cargo_auto_local_lib/
file_hashes_mod.rs

1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
// file_hashes_mod.rs

//! Calculate file hashes.
//!
//! File hashes are used to check if some file has changed.
//! Then we can run commands like compile only if a file has changed.

use serde_derive::{Deserialize, Serialize};
use sha2::Digest;

// region: structs
/// Struct with file metadata.
#[derive(Serialize, Deserialize)]
pub struct FileMetaData {
    /// filename with path from Cargo.toml folder
    filename: String,
    /// hash of file
    filehash: String,
}

/// The struct represents the file automation_tasks_rs/._file_hashes.json.
#[derive(Serialize, Deserialize)]
pub struct FileHashes {
    /// vector of file metadata
    pub vec_file_metadata: Vec<FileMetaData>,
}

// endregion: structs

/// Check if the files are modified in automation_tasks_rs.
///
/// The modified date of files is not usable when using git.  
/// The checkout will make dates newer than they really are.  
/// I should use a hash of files and write them in the same directory for later comparison.  
pub fn is_project_changed() -> bool {
    let vec_of_metadata = read_file_metadata();
    let js_struct = read_json_file(&crate::PATH_FILE_HASHES_JSON.to_string_lossy());
    // return true or false
    !are_all_files_equal(&vec_of_metadata, &js_struct.vec_file_metadata)
}

/// Check if all files are equal.
fn are_all_files_equal(vec_of_metadata: &[FileMetaData], js_vec_of_metadata: &[FileMetaData]) -> bool {
    let mut is_files_equal = true;
    for x in vec_of_metadata.iter() {
        //search in json file
        let mut is_one_equal = false;
        for y in js_vec_of_metadata.iter() {
            if x.filename == y.filename && x.filehash == y.filehash {
                is_one_equal = true;
                break;
            }
        }
        if !is_one_equal {
            // println!("{} {}", x.filename, x.filehash);
            is_files_equal = false;
            break;
        }
    }
    is_files_equal
}

/// Make a vector of file metadata.
pub fn read_file_metadata() -> Vec<FileMetaData> {
    let mut vec_of_metadata: Vec<FileMetaData> = Vec::new();

    // calculate hash of Cargo.toml
    let filehash = sha256_digest(&crate::PATH_CARGO_TOML).unwrap();
    vec_of_metadata.push(FileMetaData {
        filename: crate::PATH_CARGO_TOML.to_string_lossy().to_string(),
        filehash,
    });

    // calculate hash of file of the executable file
    let filehash = sha256_digest(&crate::PATH_TARGET_DEBUG_AUTOMATION_TASKS_RS).unwrap();
    vec_of_metadata.push(FileMetaData {
        filename: crate::PATH_TARGET_DEBUG_AUTOMATION_TASKS_RS.to_string_lossy().to_string(),
        filehash,
    });

    // all files in the src/ directory
    for entry in walkdir::WalkDir::new(crate::PATH_SRC.as_path()).into_iter().filter_map(Result::ok) {
        if entry.file_type().is_file() {
            let path = entry.path();
            // calculate hash of file
            let filehash = sha256_digest(path).unwrap();
            vec_of_metadata.push(FileMetaData {
                filename: path.to_string_lossy().to_string(),
                filehash,
            });
        }
    }
    vec_of_metadata
}

/// Read automation_tasks_rs/.file_hashes.json.
fn read_json_file(json_filepath: &str) -> FileHashes {
    let js_struct: FileHashes;
    let f = std::fs::read_to_string(json_filepath);

    match f {
        Ok(x) => {
            // check if file have CRLF instead of LF. This are unusable - create empty struct
            if x.contains("\r\n") {
                //create empty struct
                js_struct = FileHashes {
                    vec_file_metadata: Vec::new(),
                }
            } else {
                //read struct from file
                js_struct = serde_json::from_str(x.as_str()).unwrap();
            }
        }
        Err(_error) => {
            // println!("Creating new file: {}", json_filepath);
            //create empty struct
            js_struct = FileHashes {
                vec_file_metadata: Vec::new(),
            }
        }
    };
    js_struct
}

/// Calculate the hash for a file.
fn sha256_digest(path: &std::path::Path) -> anyhow::Result<String> {
    let file = std::fs::File::open(path)?;
    let mut reader = std::io::BufReader::new(file);
    let mut hasher = <sha2::Sha256 as sha2::Digest>::new();
    let mut buffer = [0; 1024];
    use std::io::Read;
    loop {
        let count = reader.read(&mut buffer)?;
        if count == 0 {
            break;
        }
        hasher.update(&buffer[..count]);
    }
    let digest = hasher.finalize();
    let hash_string = data_encoding::HEXLOWER.encode(digest.as_ref());
    // return
    Ok(hash_string)
}

/// Save the new file metadata.
pub fn save_json_file_for_file_meta_data(vec_of_metadata: Vec<FileMetaData>) {
    let x = FileHashes {
        vec_file_metadata: vec_of_metadata,
    };
    let y = serde_json::to_string_pretty(&x).unwrap();
    let json_filepath = crate::PATH_FILE_HASHES_JSON.as_path();
    let _f = std::fs::write(json_filepath, y);
}