Compare commits

...

2 commits
main ... dev

Author SHA1 Message Date
5081a4c815
Add command & more 2024-08-31 16:28:01 +02:00
2c372b1038
initial mess 2024-08-24 22:40:09 +02:00
9 changed files with 2139 additions and 1 deletions

1
.gitignore vendored
View file

@ -1 +1,2 @@
.vscode/
/target

1504
Cargo.lock generated

File diff suppressed because it is too large Load diff

View file

@ -1,6 +1,31 @@
[package]
name = "lockman"
description = "Management of dependencies: Downloads files and verifies integrity by hashes"
license = "MIT"
version = "0.1.0"
edition = "2021"
authors = ["C0ffeeCode", "Satoqz"]
repository = "https://github.com/satoqz/lockman"
[dependencies]
clap = { version = "4.5.16", features = ["derive"] }
serde = { version = "1.0.209", features = ["derive"] }
toml = "0.8.19"
sha2 = { version = "0.10.8" }
reqwest = "0.12.7"
tokio = { version = "1.40.0" }
[profile.dev.package.sha2]
# unoptimized, hash calculation is very expensive for larger files
opt-level = 1
[lints.clippy]
# uninlined_format_args = { level = "warn", priority = -1 }
correctness = "warn"
suspicious = "warn"
complexity = "warn"
perf = "warn"
style = "warn"
pedantic = "warn"
# restriction = "warn"
# cargo = "warn"

77
src/add_command.rs Normal file
View file

@ -0,0 +1,77 @@
use std::{path::PathBuf, process::exit, str::FromStr};
use crate::{
check_command::calculate_file_hash,
cli::AddArgs,
lockfile::{load_project_files, update_lock_file, update_project_file},
};
pub fn add_command(args: AddArgs) {
let path = PathBuf::from_str(&args.path).expect("Failed to parse file path");
let (project_files, lock_files) = load_project_files();
let (mut project_files, mut lock_files) = (project_files.files, lock_files.locks);
if !path.is_file() {
println!(
"The following path does not point to a file:\n\t{:?}",
args.path
);
exit(1);
}
let hash = calculate_file_hash(&path);
// Insert to lock file map
if let Some(old_hash) = lock_files.get(&args.path) {
if *old_hash == hash {
eprintln!(
"File {} already on record, hashes do match",
args.path.clone()
);
} else if args.allow_override {
println!(
"Replaced hash for file {}:\n\tOld: {old_hash}\n\tNew: {hash}",
args.path
);
let hc = hash.clone();
lock_files.insert(args.path.clone(), hc);
} else {
println!(
"File already on record {}:\n\tKnown: {old_hash}\n\tNew: {hash}",
args.path
);
eprintln!("Specify flag \"--override\" to allow replacement");
exit(1);
}
} else {
lock_files.insert(args.path.clone(), hash);
}
// Insert to project file map
if let Some(old_url) = project_files.get(&args.path) {
// Path is already present
if let Some(new_url) = args.url {
if &new_url != old_url && args.allow_override {
println!(
"Replaced URL for file {}:\n\tOld: {old_url}\n\tNew: {new_url}",
args.path
);
project_files.insert(args.path, new_url);
} else if &new_url != old_url {
println!("File already on record with a different URL {}:\n\tKnown: {old_url}\n\tNew: {new_url}", args.path);
eprintln!("Specify flag \"--override\" to allow replacement");
exit(1);
} else {
eprintln!("File is already on record with the same URL");
}
} else {
// File is already known with URL but none is specified
eprintln!("Although no URL has been specified, the URL already known for the specific path has been kept:\n\tKnown URL: {old_url}");
}
} else {
// Path is new to project
project_files.insert(args.path, args.url.unwrap_or_default()); // TODO: Consider what to do
}
update_lock_file(lock_files);
update_project_file(project_files);
}

122
src/check_command.rs Normal file
View file

@ -0,0 +1,122 @@
use std::{fs::File, io, path::Path, process::exit};
use sha2::{Digest, Sha512};
use crate::{
cli::{
colors::{GREEN, RED, RESET, YELLOW},
CheckArgs,
},
lockfile::{load_project_printing, FileLoadingResults, LockedFile3},
};
pub fn check_command(args: CheckArgs) {
let FileLoadingResults {
locked: locked_files,
unlocked: unlocked_files,
unaffiliated: unaffiliated_files,
} = load_project_printing();
// Locked files
let locked_res = locked_files.iter().map(|i| check_item_cli(args, i));
// Unlocked files
for (path, _) in &unlocked_files {
println!("{YELLOW}UNLOCKED{RESET}\t{path}");
}
let (mut ok, mut invalid, mut absent, mut not_a_file) = (0u32, 0u32, 0u32, 0u32);
for r in locked_res {
match r {
CheckResult::Ok(_) => ok += 1,
CheckResult::Invalid(_) => invalid += 1,
CheckResult::Absent => absent += 1,
CheckResult::NotAFile => not_a_file += 1,
}
}
eprintln!("\nResult: {} files without lock, {} files have locks but are unaffiliated (not in the Lockman file)", unlocked_files.len(), unaffiliated_files.len());
eprintln!("\t{ok} files are OK;");
eprintln!("\t{invalid} files have invalid hashes;");
if absent == 0 {
eprintln!("\tno files are absent.");
} else {
eprintln!("\t{absent} files are absent;");
}
if not_a_file != 0 {
eprintln!("\t{not_a_file} items are expected to be files but are not.");
}
}
fn check_item_cli(args: CheckArgs, item: &LockedFile3) -> CheckResult {
let res = check_item(item);
match res {
CheckResult::Ok(_) => {
if !args.only_report_mismatches {
println!("{GREEN}OK{RESET}\t{}", item.path);
}
}
CheckResult::Invalid(ref calculated_hash) => {
println!(
"{RED}INVALID{RESET}\t{}\n\tFound: {calculated_hash}\n\tWanted: {}",
item.path, item.expected_hash,
);
if args.fast_fail {
eprintln!("Quitting as an invalid file was found and fast-fail is enabled.");
exit(5);
}
}
CheckResult::Absent => {
println!("{YELLOW}ABSENT{RESET}\t{}", item.path);
if args.fast_fail {
eprintln!("Quitting as a file has been found absent and fast-fail is enabled.");
exit(6);
}
}
CheckResult::NotAFile => {
println!("{RED}WEIRD{RESET}\tNot a file: {}", item.path);
if args.fast_fail {
eprintln!("Quitting since an item has been found which is expected to be a file but is not and fast-fail is enabled.");
exit(7);
}
}
}
res
}
pub fn check_item(item: &LockedFile3) -> CheckResult {
let path: &Path = Path::new(&item.path);
if !path.exists() {
return CheckResult::Absent;
} else if !path.is_file() {
return CheckResult::NotAFile;
}
let calculated_hash = calculate_file_hash(path);
if calculated_hash == item.expected_hash {
CheckResult::Ok(calculated_hash)
} else {
CheckResult::Invalid(calculated_hash)
}
}
pub fn calculate_file_hash(path: &Path) -> String {
let mut file = File::open(path).unwrap();
let mut hasher = Sha512::new();
io::copy(&mut file, &mut hasher).unwrap();
let hash = hasher.finalize();
let calculated_hash = format!("{hash:x}");
calculated_hash
}
#[derive(PartialEq, Eq)]
pub enum CheckResult {
/// Correct hash
Ok(String),
/// Invalid Hash
Invalid(String),
Absent,
NotAFile,
}

110
src/cli.rs Normal file
View file

@ -0,0 +1,110 @@
use clap::{Args, Parser, Subcommand, ValueEnum};
pub mod colors {
pub const RESET: &str = "\x1b[0m";
pub const RED: &str = "\x1b[31m";
pub const GREEN: &str = "\x1b[32m";
pub const YELLOW: &str = "\x1b[33m";
}
#[derive(Parser)]
#[command(version, about, long_about = None)]
#[command(propagate_version = true)]
pub struct Cli {
#[command(subcommand)]
pub command: Commands,
}
// #[derive(Args, Clone)]
// pub struct GlobalArgs {
// /// Path to the project file
// #[arg(long, default_value = "Project.toml")]
// pub project_file: String,
// /// Path to the lockfile
// #[arg(long, default_value = "Lockfile.toml")]
// pub lockfile: String,
// #[arg(short = 'f', long = "format", default_value = "human")]
// pub output_format: OutputFormat,
// }
/// Format of the output which is printed to standard output
#[derive(Clone, ValueEnum)]
pub enum OutputFormat {
/// Human readable output
Human,
}
#[derive(Subcommand)]
pub enum Commands {
/// Downloads files as specified in the project file and validates according to the lockfile
Download(DownloadArgs),
/// Checks the integrity of the local files to be in accordance with the lockfile
Check(CheckArgs),
/// Checks if the remote resources are the same as specified in the lockfile
CheckAvailability,
/// Adds a new file to the project, pinning its hash
Add(AddArgs),
/// Adds a new file to the project, downloading it and pinning its hash.
/// (TOFU: Trust-on-First-Use)
AddDownload,
}
#[derive(Args, Copy, Clone)]
pub struct DownloadArgs {
/// Quit on the first hash mismatch or unavailable resource
#[arg(default_value_t = false, long = "fast-fail")]
pub fast_fail: bool,
/// If true, directly write to the designated file, otherwise
/// the downloaded data is stored at a temporary place
/// and copied to the designated place once its verified
/// TODO: Perhaps in the future, files may be hold in memory if they are not expected to be huge
#[arg(default_value_t = false, long = "direct-write")]
pub direct_write: bool,
/// How are present files ought to be handled?
#[arg(value_enum, default_value_t = DownloadExistingTreatment::ValidateFail, long = "clash-behavior")]
pub existing_file_behavior: DownloadExistingTreatment,
}
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, ValueEnum)]
pub enum DownloadExistingTreatment {
/// Validate existing files and attempt to replace them if they are invalid
ValidateReplace,
/// Validate existing files and abort if they are invalid
/// TODO: Atomic? Not implemented
ValidateFail,
/// Validate existing files; ignore invalid ones and report them later on
ValidateReport,
/// Skip existing files; they are not validated
/// TODO: Not implemented
Ignore,
}
#[derive(Args, Copy, Clone)]
pub struct CheckArgs {
/// Panic on first hash mismatch (not impacted by absent files)
#[arg(default_value_t = false, long = "fast-fail")]
pub fast_fail: bool,
/// Omit files with valid files in output
#[arg(default_value_t = false, long = "list-invalid-only")]
pub only_report_mismatches: bool,
}
#[derive(Args, Clone)]
pub struct AddArgs {
/// Specify which files to pin
#[arg(required = true)]
pub path: String,
/// Optionally specify URL to obtain the specified resource
#[arg(required = false)]
pub url: Option<String>,
/// If enabled, the hash for the specified files are replaced if they are invalid
#[arg(default_value_t = false, long = "override")]
pub allow_override: bool,
}

148
src/download_command.rs Normal file
View file

@ -0,0 +1,148 @@
use std::{fs::File, io::Write, path::PathBuf, process::exit, time::SystemTime};
use reqwest::StatusCode;
use sha2::{Digest, Sha512};
use crate::{
check_command::{check_item, CheckResult},
cli::{colors::{GREEN, RED, RESET}, DownloadArgs, DownloadExistingTreatment},
lockfile::{load_project_printing, LockedFile3},
};
pub fn download_command(args: DownloadArgs) {
let rt = tokio::runtime::Builder::new_current_thread()
.enable_io()
.enable_time()
.build()
.expect("Failed to build tokio runtime");
rt.block_on(download_command_async(args));
}
async fn download_command_async(args: DownloadArgs) {
let res = load_project_printing();
for record in res.locked {
handle_locked_file(record, args).await;
}
}
async fn handle_locked_file(record: LockedFile3, args: DownloadArgs) {
let check_res = check_item(&record);
match check_res {
CheckResult::Ok(_) => {
println!("{GREEN}OK{RESET}:\t {}", &record.path);
}
CheckResult::Invalid(invalid_hash) => match args.existing_file_behavior {
DownloadExistingTreatment::ValidateReplace => {
println!(
"Downloading and replacing invalid file:\n\t{}",
&record.path
);
let _ = download_file(args, &record).await.unwrap();
println!("Replaced invalid file:\n\t{}", &record.path);
}
DownloadExistingTreatment::ValidateFail => {
println!("Existing file has an invalid hash:\n\t{}\n\texpected: {}\n\tfound: {invalid_hash} ;)", record.path, record.expected_hash);
}
DownloadExistingTreatment::ValidateReport => todo!(),
DownloadExistingTreatment::Ignore => todo!(),
},
CheckResult::NotAFile => {
println!("{RED}ERROR{RESET}:\tPath {} exists but is a directory.\nThis is unsupported at the moment", record.path);
}
CheckResult::Absent => {
println!("Downloading absent file:\n\t{}", &record.path);
match download_file(args, &record).await {
Ok(_) => {
println!("Downloaded absent file:\n\t{}", &record.path);
}
Err(download_error) => handle_download_error(download_error, &record),
}
}
}
}
fn handle_download_error(download_error: DownloadError, record: &LockedFile3) {
match download_error {
DownloadError::ErrorResponse(sc) => {
println!(
"Received an error HTTP status code ({sc}) upon downloading file: \n\t{}",
record.path
);
exit(5);
}
DownloadError::HashMismatch {
calculated,
expected,
} => {
println!(
"Downloaded file has a different hash:\n\tfile:{} \n\texpected: {}\n\treceived: {}",
record.path, expected, calculated
);
exit(5);
}
DownloadError::IoError(err) => {
println!(
"An I/O error occurred while attempting to download a file: {}\n{err}",
record.path
);
}
}
}
async fn download_file(args: DownloadArgs, record: &LockedFile3) -> Result<String, DownloadError> {
let mut res = reqwest::get(&record.url).await.unwrap();
if res.status() != StatusCode::OK {
return Err(DownloadError::ErrorResponse(res.status()));
}
let mut hasher = Sha512::new();
let mut file = if args.direct_write {
// TODO: Handle not-existent paths
File::create(&record.path).unwrap()
} else {
todo!()
};
while let Some(chunk) = res.chunk().await.unwrap() {
hasher.write_all(&chunk).unwrap();
file.write_all(&chunk).unwrap();
}
let hash = hasher.finalize();
let hash = format!("{hash:x}");
if hash != record.expected_hash {
return Err(DownloadError::HashMismatch {
calculated: hash,
expected: record.expected_hash.clone(),
});
}
if let Err(err) = file.flush() {
return Err(DownloadError::IoError(err));
}
Ok(hash)
}
/// TODO: not in use yet
fn get_temp_dir() -> PathBuf {
let particle = SystemTime::now()
.duration_since(SystemTime::UNIX_EPOCH)
.unwrap_or_default()
.as_millis();
let particle = format!("{}-{particle}", env!("CARGO_BIN_NAME"));
let mut path = std::env::temp_dir();
path.push(particle);
path
}
#[derive(Debug)]
enum DownloadError {
ErrorResponse(StatusCode),
HashMismatch {
calculated: String,
expected: String,
},
IoError(std::io::Error),
}

132
src/lockfile.rs Normal file
View file

@ -0,0 +1,132 @@
use std::{
collections::HashMap,
fs::{self, File},
io::Write,
};
use serde::{Deserialize, Serialize};
const PROJECT_FILE_NAME: &str = "Lockman.toml";
const LOCK_FILE_NAME: &str = "Lockfile.toml";
/// Project file (Lockman.toml) file format
#[derive(Deserialize, Serialize, Debug)]
pub struct ProjectFile {
/// Map of file -> URL
/// TODO: Consider using `Vec` instead
pub files: HashMap<String, String>,
}
/// Lockfile file format
#[derive(Deserialize, Serialize, Debug)]
pub struct LockFile {
#[serde(rename = "lockfile")]
pub version: u8,
/// Map of file -> URL
/// TODO: Consider using `Vec` instead
pub locks: HashMap<String, String>,
}
/// Combination of Project- & Lockfile
#[derive(Debug)]
pub struct LockedFile3 {
pub path: String,
pub url: String,
pub expected_hash: String,
}
#[derive(Debug)]
pub struct FileLoadingResults {
pub locked: Vec<LockedFile3>,
pub unlocked: Vec<(String, String)>,
pub unaffiliated: Vec<(String, String)>,
}
pub fn load_project() -> FileLoadingResults {
let (project_file, lock_file) = load_project_files();
// TODO: check file support
let pf = project_file.files;
let lf = lock_file.locks;
let unlocked_files = pf
.iter()
.filter(|(path, _)| !lf.contains_key(*path))
.map(|(p, u)| (p.clone(), u.clone()))
.collect::<Vec<_>>();
let unaffiliated_files = lf
.iter()
.filter(|(path, _)| !pf.contains_key(*path))
.map(|(p, h)| (p.clone(), h.clone()))
.collect::<Vec<_>>();
let locked_files = pf
.iter()
.filter_map(|(path, url)| {
lf.get(path).map(|eh| LockedFile3 {
path: path.clone(),
url: url.clone(),
expected_hash: eh.clone(),
})
})
.collect::<Vec<_>>();
FileLoadingResults {
locked: locked_files,
unlocked: unlocked_files,
unaffiliated: unaffiliated_files,
}
}
pub fn load_project_files() -> (ProjectFile, LockFile) {
let project_file = fs::read_to_string(PROJECT_FILE_NAME).expect("Lockman.toml not found");
let lock_file = fs::read_to_string(LOCK_FILE_NAME).expect("Lockfile.toml not found");
let project_file: ProjectFile =
toml::from_str(&project_file).expect("Failed to parse Lockman file");
let lock_file: LockFile = toml::from_str(&lock_file).expect("Failed to parse Lockfile");
(project_file, lock_file)
}
#[inline]
pub fn load_project_printing() -> FileLoadingResults {
let result = load_project();
println!(
"There are {} locked files, {} are without lock, {} locks are unaffiliated.",
result.locked.len(),
result.unlocked.len(),
result.unaffiliated.len(),
);
result
}
pub fn update_project_file(files: HashMap<String, String>) {
let data = toml::to_string_pretty(&ProjectFile { files })
.expect("Failed to serialize Lockman.toml file");
save_file(PROJECT_FILE_NAME, &data);
}
pub fn update_lock_file(files: HashMap<String, String>) {
let data = toml::to_string_pretty(&LockFile {
version: 1,
locks: files,
})
.expect("Failed to serialize Lockfile.toml file");
save_file(LOCK_FILE_NAME, &data);
}
fn save_file(path: &str, data: &str) {
let mut file = File::create(path).expect("Failed to open file for writing");
file.write_all(data.as_bytes())
.expect("Writing to file failed");
eprintln!("Successfully written to {path}");
}

View file

@ -1,3 +1,22 @@
use clap::Parser;
use cli::Commands;
mod add_command;
mod check_command;
mod cli;
mod download_command;
mod lockfile;
fn main() {
println!("Hello, world!");
let cli = cli::Cli::parse();
match cli.command {
Commands::Download(args) => download_command::download_command(args),
Commands::Check(args) => check_command::check_command(args),
Commands::CheckAvailability => todo!(),
Commands::Add(args) => add_command::add_command(args),
Commands::AddDownload => todo!(),
}
// TODO: Check project/lock schema version support
}