Add command & more

This commit is contained in:
Laurenz 2024-08-31 16:28:01 +02:00
parent 2c372b1038
commit 5081a4c815
Signed by: C0ffeeCode
SSH key fingerprint: SHA256:jnEltBNftC3wUZESLSMvM9zVPOkkevGRzqqoW2k2ORI
8 changed files with 402 additions and 176 deletions

56
Cargo.lock generated
View file

@ -128,9 +128,9 @@ checksum = "8318a53db07bb3f8dca91a600466bdb3f2eaadeedfdbcf02e1accbad9271ba50"
[[package]]
name = "cc"
version = "1.1.14"
version = "1.1.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "50d2eb3cd3d1bf4529e31c215ee6f93ec5a3d536d9f578f93d9d33ee19562932"
checksum = "57b6a275aa2903740dc87da01c62040406b8812552e97129a63ea8850a17c6e6"
dependencies = [
"shlex",
]
@ -259,9 +259,9 @@ dependencies = [
[[package]]
name = "fastrand"
version = "2.1.0"
version = "2.1.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9fc0510504f03c51ada170672ac806f1f105a88aa97a5281117e1ddc3368e51a"
checksum = "e8c02a5121d4ea3eb16a80748c74f5549a5665e4c21333c6098f283870fbdea6"
[[package]]
name = "fnv"
@ -521,9 +521,9 @@ dependencies = [
[[package]]
name = "indexmap"
version = "2.4.0"
version = "2.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93ead53efc7ea8ed3cfb0c79fc8023fbb782a5432b52830b6518941cebe6505c"
checksum = "68b900aa2f7301e21c36462b170ee99994de34dff39a4a6a528e80e7376d07e5"
dependencies = [
"equivalent",
"hashbrown",
@ -638,9 +638,9 @@ dependencies = [
[[package]]
name = "object"
version = "0.36.3"
version = "0.36.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "27b64972346851a39438c60b341ebc01bba47464ae329e55cf343eb93964efd9"
checksum = "084f1a5821ac4c651660a94a7153d27ac9d8a53736203f58b31945ded098070a"
dependencies = [
"memchr",
]
@ -823,9 +823,9 @@ checksum = "719b953e2095829ee67db738b3bfa9fa368c94900df327b3f07fe6e794d2fe1f"
[[package]]
name = "rustix"
version = "0.38.34"
version = "0.38.35"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "70dc5ec042f7a43c4a73241207cecc9873a06d45debb38b329f8541d85c2730f"
checksum = "a85d50532239da68e9addb745ba38ff4612a242c1c7ceea689c4bc7c2f43c36f"
dependencies = [
"bitflags",
"errno",
@ -865,9 +865,9 @@ checksum = "fc0a2ce646f8655401bb81e7927b812614bd5d91dbc968696be50603510fcaf0"
[[package]]
name = "rustls-webpki"
version = "0.102.6"
version = "0.102.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8e6b52d4fda176fd835fdc55a835d4a89b8499cad995885a21149d5ad62f852e"
checksum = "84678086bd54edf2b415183ed7a94d0efb049f1b646a33e22a36f3794be6ae56"
dependencies = [
"ring",
"rustls-pki-types",
@ -914,18 +914,18 @@ dependencies = [
[[package]]
name = "serde"
version = "1.0.208"
version = "1.0.209"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "cff085d2cb684faa248efb494c39b68e522822ac0de72ccf08109abde717cfb2"
checksum = "99fce0ffe7310761ca6bf9faf5115afbc19688edd00171d81b1bb1b116c63e09"
dependencies = [
"serde_derive",
]
[[package]]
name = "serde_derive"
version = "1.0.208"
version = "1.0.209"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "24008e81ff7613ed8e5ba0cfaf24e2c2f1e5b8a0495711e44fcd4882fca62bcf"
checksum = "a5831b979fd7b5439637af1752d535ff49f4860c0f341d1baeb6faf0f4242170"
dependencies = [
"proc-macro2",
"quote",
@ -934,9 +934,9 @@ dependencies = [
[[package]]
name = "serde_json"
version = "1.0.125"
version = "1.0.127"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "83c8e735a073ccf5be70aa8066aa984eaf2fa000db6c8d0100ae605b366d31ed"
checksum = "8043c06d9f82bd7271361ed64f415fe5e12a77fdb52e573e7f06a516dea329ad"
dependencies = [
"itoa",
"memchr",
@ -1027,9 +1027,9 @@ checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.75"
version = "2.0.77"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f6af063034fc1935ede7be0122941bafa9bacb949334d090b77ca98b5817c7d9"
checksum = "9f35bcdf61fd8e7be6caf75f429fdca8beb3ed76584befb503b1569faee373ed"
dependencies = [
"proc-macro2",
"quote",
@ -1096,9 +1096,9 @@ checksum = "1f3ccbac311fea05f86f61904b462b55fb3df8837a366dfc601a0161d0532f20"
[[package]]
name = "tokio"
version = "1.39.3"
version = "1.40.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9babc99b9923bfa4804bd74722ff02c0381021eafa4db9949217e3be8e84fff5"
checksum = "e2b070231665d27ad9ec9b8df639893f46727666c6767db40317fbe920a5d998"
dependencies = [
"backtrace",
"bytes",
@ -1106,21 +1106,9 @@ dependencies = [
"mio",
"pin-project-lite",
"socket2",
"tokio-macros",
"windows-sys 0.52.0",
]
[[package]]
name = "tokio-macros"
version = "2.4.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "693d596312e88961bc67d7f1f97af8a70227d9f90c31bba5806eec004978d752"
dependencies = [
"proc-macro2",
"quote",
"syn",
]
[[package]]
name = "tokio-native-tls"
version = "0.3.1"

View file

@ -9,11 +9,15 @@ repository = "https://github.com/satoqz/lockman"
[dependencies]
clap = { version = "4.5.16", features = ["derive"] }
reqwest = "0.12.7"
serde = { version = "1.0.208", features = ["derive"] }
sha2 = "0.10.8"
tokio = { version = "1.39.3", features = ["macros"] }
serde = { version = "1.0.209", features = ["derive"] }
toml = "0.8.19"
sha2 = { version = "0.10.8" }
reqwest = "0.12.7"
tokio = { version = "1.40.0" }
[profile.dev.package.sha2]
# unoptimized, hash calculation is very expensive for larger files
opt-level = 1
[lints.clippy]
# uninlined_format_args = { level = "warn", priority = -1 }

77
src/add_command.rs Normal file
View file

@ -0,0 +1,77 @@
use std::{path::PathBuf, process::exit, str::FromStr};
use crate::{
check_command::calculate_file_hash,
cli::AddArgs,
lockfile::{load_project_files, update_lock_file, update_project_file},
};
pub fn add_command(args: AddArgs) {
let path = PathBuf::from_str(&args.path).expect("Failed to parse file path");
let (project_files, lock_files) = load_project_files();
let (mut project_files, mut lock_files) = (project_files.files, lock_files.locks);
if !path.is_file() {
println!(
"The following path does not point to a file:\n\t{:?}",
args.path
);
exit(1);
}
let hash = calculate_file_hash(&path);
// Insert to lock file map
if let Some(old_hash) = lock_files.get(&args.path) {
if *old_hash == hash {
eprintln!(
"File {} already on record, hashes do match",
args.path.clone()
);
} else if args.allow_override {
println!(
"Replaced hash for file {}:\n\tOld: {old_hash}\n\tNew: {hash}",
args.path
);
let hc = hash.clone();
lock_files.insert(args.path.clone(), hc);
} else {
println!(
"File already on record {}:\n\tKnown: {old_hash}\n\tNew: {hash}",
args.path
);
eprintln!("Specify flag \"--override\" to allow replacement");
exit(1);
}
} else {
lock_files.insert(args.path.clone(), hash);
}
// Insert to project file map
if let Some(old_url) = project_files.get(&args.path) {
// Path is already present
if let Some(new_url) = args.url {
if &new_url != old_url && args.allow_override {
println!(
"Replaced URL for file {}:\n\tOld: {old_url}\n\tNew: {new_url}",
args.path
);
project_files.insert(args.path, new_url);
} else if &new_url != old_url {
println!("File already on record with a different URL {}:\n\tKnown: {old_url}\n\tNew: {new_url}", args.path);
eprintln!("Specify flag \"--override\" to allow replacement");
exit(1);
} else {
eprintln!("File is already on record with the same URL");
}
} else {
// File is already known with URL but none is specified
eprintln!("Although no URL has been specified, the URL already known for the specific path has been kept:\n\tKnown URL: {old_url}");
}
} else {
// Path is new to project
project_files.insert(args.path, args.url.unwrap_or_default()); // TODO: Consider what to do
}
update_lock_file(lock_files);
update_project_file(project_files);
}

View file

@ -1,9 +1,4 @@
use std::{
fs::{self, File},
io,
path::Path,
process::exit,
};
use std::{fs::File, io, path::Path, process::exit};
use sha2::{Digest, Sha512};
@ -12,64 +7,116 @@ use crate::{
colors::{GREEN, RED, RESET, YELLOW},
CheckArgs,
},
lockfile::LockFileV1,
lockfile::{load_project_printing, FileLoadingResults, LockedFile3},
};
pub fn check_command(args: CheckArgs) {
let file = fs::read_to_string("Lockfile").expect("Lockfile not found");
let lf: LockFileV1 = toml::from_str(&file).unwrap();
let lf = lf.locks;
// TODO: Check files in lockfile are still specified in project file
let FileLoadingResults {
locked: locked_files,
unlocked: unlocked_files,
unaffiliated: unaffiliated_files,
} = load_project_printing();
let results = lf.iter().map(|i| (i.0, i.1, check_item(i)));
// Locked files
let locked_res = locked_files.iter().map(|i| check_item_cli(args, i));
for item in results {
match item.2 {
CheckResult::Ok => {
if !args.only_report_mismatches {
println!("{GREEN}OK{RESET}\t{}", item.0);
}
}
CheckResult::Invalid => {
println!("{RED}INVALID{RESET}\t{}", item.0);
if args.fast_fail {
eprintln!("Quitting as an invalid file was found and fast-fail is enabled.");
exit(5);
}
}
CheckResult::Absent => {
println!("{YELLOW}ABSENT{RESET}\t{}", item.0);
}
CheckResult::NotAFile => todo!(),
// Unlocked files
for (path, _) in &unlocked_files {
println!("{YELLOW}UNLOCKED{RESET}\t{path}");
}
let (mut ok, mut invalid, mut absent, mut not_a_file) = (0u32, 0u32, 0u32, 0u32);
for r in locked_res {
match r {
CheckResult::Ok(_) => ok += 1,
CheckResult::Invalid(_) => invalid += 1,
CheckResult::Absent => absent += 1,
CheckResult::NotAFile => not_a_file += 1,
}
}
eprintln!("\nResult: {} files without lock, {} files have locks but are unaffiliated (not in the Lockman file)", unlocked_files.len(), unaffiliated_files.len());
eprintln!("\t{ok} files are OK;");
eprintln!("\t{invalid} files have invalid hashes;");
if absent == 0 {
eprintln!("\tno files are absent.");
} else {
eprintln!("\t{absent} files are absent;");
}
if not_a_file != 0 {
eprintln!("\t{not_a_file} items are expected to be files but are not.");
}
}
pub fn check_item(item: (&String, &String)) -> CheckResult {
let path = Path::new(item.0);
fn check_item_cli(args: CheckArgs, item: &LockedFile3) -> CheckResult {
let res = check_item(item);
match res {
CheckResult::Ok(_) => {
if !args.only_report_mismatches {
println!("{GREEN}OK{RESET}\t{}", item.path);
}
}
CheckResult::Invalid(ref calculated_hash) => {
println!(
"{RED}INVALID{RESET}\t{}\n\tFound: {calculated_hash}\n\tWanted: {}",
item.path, item.expected_hash,
);
if args.fast_fail {
eprintln!("Quitting as an invalid file was found and fast-fail is enabled.");
exit(5);
}
}
CheckResult::Absent => {
println!("{YELLOW}ABSENT{RESET}\t{}", item.path);
if args.fast_fail {
eprintln!("Quitting as a file has been found absent and fast-fail is enabled.");
exit(6);
}
}
CheckResult::NotAFile => {
println!("{RED}WEIRD{RESET}\tNot a file: {}", item.path);
if args.fast_fail {
eprintln!("Quitting since an item has been found which is expected to be a file but is not and fast-fail is enabled.");
exit(7);
}
}
}
res
}
pub fn check_item(item: &LockedFile3) -> CheckResult {
let path: &Path = Path::new(&item.path);
if !path.exists() {
return CheckResult::Absent;
} else if !path.is_file() {
return CheckResult::NotAFile;
}
let calculated_hash = calculate_file_hash(path);
if calculated_hash == item.expected_hash {
CheckResult::Ok(calculated_hash)
} else {
CheckResult::Invalid(calculated_hash)
}
}
pub fn calculate_file_hash(path: &Path) -> String {
let mut file = File::open(path).unwrap();
let mut hasher = Sha512::new();
io::copy(&mut file, &mut hasher).unwrap();
let hash = hasher.finalize();
let hash = format!("{hash:x}");
if hash == *item.1 {
CheckResult::Ok
} else {
CheckResult::Invalid
}
let calculated_hash = format!("{hash:x}");
calculated_hash
}
#[derive(PartialEq, Eq)]
pub enum CheckResult {
Ok,
Invalid,
/// Correct hash
Ok(String),
/// Invalid Hash
Invalid(String),
Absent,
NotAFile,
}

View file

@ -1,5 +1,3 @@
#![warn(clippy::restriction)]
use clap::{Args, Parser, Subcommand, ValueEnum};
pub mod colors {
@ -47,7 +45,9 @@ pub enum Commands {
/// Checks if the remote resources are the same as specified in the lockfile
CheckAvailability,
/// Adds a new file to the project, pinning its hash
Add,
Add(AddArgs),
/// Adds a new file to the project, downloading it and pinning its hash.
/// (TOFU: Trust-on-First-Use)
AddDownload,
}
@ -90,6 +90,21 @@ pub struct CheckArgs {
pub fast_fail: bool,
/// Omit files with valid files in output
#[arg(default_value_t = true, long = "list-invalid-only")]
#[arg(default_value_t = false, long = "list-invalid-only")]
pub only_report_mismatches: bool,
}
#[derive(Args, Clone)]
pub struct AddArgs {
/// Specify which files to pin
#[arg(required = true)]
pub path: String,
/// Optionally specify URL to obtain the specified resource
#[arg(required = false)]
pub url: Option<String>,
/// If enabled, the hash for the specified files are replaced if they are invalid
#[arg(default_value_t = false, long = "override")]
pub allow_override: bool,
}

View file

@ -1,14 +1,12 @@
use std::{
fs::{self, File}, io::Write, path::PathBuf, process::exit, time::SystemTime
};
use std::{fs::File, io::Write, path::PathBuf, process::exit, time::SystemTime};
use reqwest::StatusCode;
use sha2::{Digest, Sha512};
use crate::{
check_command::{check_item, CheckResult},
cli::{DownloadArgs, DownloadExistingTreatment},
lockfile::{LockFileV1, ProjectFileV1},
cli::{colors::{GREEN, RED, RESET}, DownloadArgs, DownloadExistingTreatment},
lockfile::{load_project_printing, LockedFile3},
};
pub fn download_command(args: DownloadArgs) {
@ -21,83 +19,74 @@ pub fn download_command(args: DownloadArgs) {
rt.block_on(download_command_async(args));
}
pub async fn download_command_async(args: DownloadArgs) {
let lf = fs::read_to_string("Lockfile").expect("Lockfile not found");
let lf: LockFileV1 = toml::from_str(&lf).unwrap();
let lf = lf.locks;
async fn download_command_async(args: DownloadArgs) {
let res = load_project_printing();
let pf = fs::read_to_string("Projectfile").expect("Projectfile not found");
let pf: ProjectFileV1 = toml::from_str(&pf).unwrap();
let pf = pf.files;
for record in res.locked {
handle_locked_file(record, args).await;
}
}
let unlocked_files = pf
.iter()
.filter(|(path, _)| !lf.contains_key(*path))
.collect::<Vec<_>>();
let locked_files = pf
.iter()
.filter_map(|(path, url)| {
lf.get(path).map(|eh| LockedFile3 {
path: path.clone(),
url: url.clone(),
expected_hash: eh.clone(),
})
})
.collect::<Vec<_>>();
println!(
"There are {} files on record, {} are without lock, {} are locked.",
pf.len(),
unlocked_files.len(),
locked_files.len(),
);
for record in locked_files {
let check_res = check_item((&record.path, &record.expected_hash));
match check_res {
CheckResult::Ok => {
println!("OK:\t {}", &record.path);
async fn handle_locked_file(record: LockedFile3, args: DownloadArgs) {
let check_res = check_item(&record);
match check_res {
CheckResult::Ok(_) => {
println!("{GREEN}OK{RESET}:\t {}", &record.path);
}
CheckResult::Invalid(invalid_hash) => match args.existing_file_behavior {
DownloadExistingTreatment::ValidateReplace => {
println!(
"Downloading and replacing invalid file:\n\t{}",
&record.path
);
let _ = download_file(args, &record).await.unwrap();
println!("Replaced invalid file:\n\t{}", &record.path);
}
CheckResult::Invalid => match args.existing_file_behavior {
DownloadExistingTreatment::ValidateReplace => {
println!(
"Downloading and replacing invalid file:\n\t{}",
&record.path
);
let _ = download_file(args, &record).await;
println!("Replaced invalid file:\n\t{}", &record.path);
}
DownloadExistingTreatment::ValidateFail => {
println!("Existing file has an invalid hash:\n\t{}\n\texpected: {}\n\tfound: TODO ;)", record.path, record.expected_hash);
}
DownloadExistingTreatment::ValidateReport => todo!(),
DownloadExistingTreatment::Ignore => todo!(),
},
CheckResult::NotAFile => todo!(),
CheckResult::Absent => {
println!("Downloading absent file:\n\t{}", &record.path);
match download_file(args, &record).await {
Ok(_) => {
println!("Downloaded absent file:\n\t{}", &record.path);
}
Err(download_error) => {
match download_error {
DownloadError::ErrorResponse(sc) => {
println!("Received an error HTTP status code ({sc}) upon downloading file: \n\t{}", record.path);
exit(5);
},
DownloadError::HashMismatch { calculated, expected } => {
println!("Downloaded file has a different hash:\n\tfile:{} \n\texpected: {}\n\treceived: {}", record.path, expected, calculated);
exit(5);
},
DownloadError::IoError(err) => {
println!("An I/O error occurred while attempting to download a file: {}\n{err}", record.path);
},
}
},
}
DownloadExistingTreatment::ValidateFail => {
println!("Existing file has an invalid hash:\n\t{}\n\texpected: {}\n\tfound: {invalid_hash} ;)", record.path, record.expected_hash);
}
DownloadExistingTreatment::ValidateReport => todo!(),
DownloadExistingTreatment::Ignore => todo!(),
},
CheckResult::NotAFile => {
println!("{RED}ERROR{RESET}:\tPath {} exists but is a directory.\nThis is unsupported at the moment", record.path);
}
CheckResult::Absent => {
println!("Downloading absent file:\n\t{}", &record.path);
match download_file(args, &record).await {
Ok(_) => {
println!("Downloaded absent file:\n\t{}", &record.path);
}
Err(download_error) => handle_download_error(download_error, &record),
}
}
}
}
fn handle_download_error(download_error: DownloadError, record: &LockedFile3) {
match download_error {
DownloadError::ErrorResponse(sc) => {
println!(
"Received an error HTTP status code ({sc}) upon downloading file: \n\t{}",
record.path
);
exit(5);
}
DownloadError::HashMismatch {
calculated,
expected,
} => {
println!(
"Downloaded file has a different hash:\n\tfile:{} \n\texpected: {}\n\treceived: {}",
record.path, expected, calculated
);
exit(5);
}
DownloadError::IoError(err) => {
println!(
"An I/O error occurred while attempting to download a file: {}\n{err}",
record.path
);
}
}
}
@ -136,6 +125,7 @@ async fn download_file(args: DownloadArgs, record: &LockedFile3) -> Result<Strin
Ok(hash)
}
/// TODO: not in use yet
fn get_temp_dir() -> PathBuf {
let particle = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
@ -147,13 +137,6 @@ fn get_temp_dir() -> PathBuf {
path
}
#[derive(Debug)]
struct LockedFile3 {
pub path: String,
pub url: String,
pub expected_hash: String,
}
#[derive(Debug)]
enum DownloadError {
ErrorResponse(StatusCode),

View file

@ -1,21 +1,132 @@
use std::collections::HashMap;
use std::{
collections::HashMap,
fs::{self, File},
io::Write,
};
use serde::{Deserialize, Serialize};
#[derive(Deserialize, Serialize, Debug)]
pub struct ProjectFileV1 {
#[serde(rename = "lockman")]
pub version: String,
const PROJECT_FILE_NAME: &str = "Lockman.toml";
const LOCK_FILE_NAME: &str = "Lockfile.toml";
/// Project file (Lockman.toml) file format
#[derive(Deserialize, Serialize, Debug)]
pub struct ProjectFile {
/// Map of file -> URL
/// TODO: Consider using `Vec` instead
pub files: HashMap<String, String>,
}
/// Lockfile file format
#[derive(Deserialize, Serialize, Debug)]
pub struct LockFileV1 {
pub struct LockFile {
#[serde(rename = "lockfile")]
pub version: String,
pub version: u8,
/// Map of file -> URL
/// TODO: Consider using `Vec` instead
pub locks: HashMap<String, String>,
}
/// Combination of Project- & Lockfile
#[derive(Debug)]
pub struct LockedFile3 {
pub path: String,
pub url: String,
pub expected_hash: String,
}
#[derive(Debug)]
pub struct FileLoadingResults {
pub locked: Vec<LockedFile3>,
pub unlocked: Vec<(String, String)>,
pub unaffiliated: Vec<(String, String)>,
}
pub fn load_project() -> FileLoadingResults {
let (project_file, lock_file) = load_project_files();
// TODO: check file support
let pf = project_file.files;
let lf = lock_file.locks;
let unlocked_files = pf
.iter()
.filter(|(path, _)| !lf.contains_key(*path))
.map(|(p, u)| (p.clone(), u.clone()))
.collect::<Vec<_>>();
let unaffiliated_files = lf
.iter()
.filter(|(path, _)| !pf.contains_key(*path))
.map(|(p, h)| (p.clone(), h.clone()))
.collect::<Vec<_>>();
let locked_files = pf
.iter()
.filter_map(|(path, url)| {
lf.get(path).map(|eh| LockedFile3 {
path: path.clone(),
url: url.clone(),
expected_hash: eh.clone(),
})
})
.collect::<Vec<_>>();
FileLoadingResults {
locked: locked_files,
unlocked: unlocked_files,
unaffiliated: unaffiliated_files,
}
}
pub fn load_project_files() -> (ProjectFile, LockFile) {
let project_file = fs::read_to_string(PROJECT_FILE_NAME).expect("Lockman.toml not found");
let lock_file = fs::read_to_string(LOCK_FILE_NAME).expect("Lockfile.toml not found");
let project_file: ProjectFile =
toml::from_str(&project_file).expect("Failed to parse Lockman file");
let lock_file: LockFile = toml::from_str(&lock_file).expect("Failed to parse Lockfile");
(project_file, lock_file)
}
#[inline]
pub fn load_project_printing() -> FileLoadingResults {
let result = load_project();
println!(
"There are {} locked files, {} are without lock, {} locks are unaffiliated.",
result.locked.len(),
result.unlocked.len(),
result.unaffiliated.len(),
);
result
}
pub fn update_project_file(files: HashMap<String, String>) {
let data = toml::to_string_pretty(&ProjectFile { files })
.expect("Failed to serialize Lockman.toml file");
save_file(PROJECT_FILE_NAME, &data);
}
pub fn update_lock_file(files: HashMap<String, String>) {
let data = toml::to_string_pretty(&LockFile {
version: 1,
locks: files,
})
.expect("Failed to serialize Lockfile.toml file");
save_file(LOCK_FILE_NAME, &data);
}
fn save_file(path: &str, data: &str) {
let mut file = File::create(path).expect("Failed to open file for writing");
file.write_all(data.as_bytes())
.expect("Writing to file failed");
eprintln!("Successfully written to {path}");
}

View file

@ -1,6 +1,7 @@
use clap::Parser;
use cli::Commands;
mod add_command;
mod check_command;
mod cli;
mod download_command;
@ -13,7 +14,7 @@ fn main() {
Commands::Download(args) => download_command::download_command(args),
Commands::Check(args) => check_command::check_command(args),
Commands::CheckAvailability => todo!(),
Commands::Add => todo!(),
Commands::Add(args) => add_command::add_command(args),
Commands::AddDownload => todo!(),
}