patch hash check

This commit is contained in:
2025-10-17 12:53:20 +03:00
parent 561400ae3c
commit ac8b4d6f81
4 changed files with 32 additions and 17 deletions

View File

@@ -1,5 +1,5 @@
build: build:
cargo build --release --target x86_64-unknown-linux-gnu --package zsdiff_all --bin zspatch cargo build --release --target x86_64-unknown-linux-gnu --package zsdiff_all --bin zspatch
cargo build --release --target x86_64-unknown-linux-gnu --package zsdiff_all --bin zsdiff cargo build --release --target x86_64-unknown-linux-gnu --package zsdiff_all --bin zsdiff
#cargo build --release --target x86_64-apple-darwin --package zdiff_all --bin zpatch cargo build --release --target x86_64-pc-windows-gnu --package zsdiff_all --bin zspatch
#cargo build --release --target x86_64-pc-windows-gnu --package zdiff_all --bin zpatch cargo build --release --target x86_64-pc-windows-gnu --package zsdiff_all --bin zsdiff

View File

@@ -1,3 +1,4 @@
use md5;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::HashMap; use std::collections::HashMap;
use std::{fs, io}; use std::{fs, io};
@@ -9,7 +10,7 @@ pub struct Zsdiff {
} }
impl Zsdiff { impl Zsdiff {
pub async fn from_vec(_data: Vec<Vec<u8>>) -> Result<Self, std::io::Error> { pub async fn from_vec(_data: Vec<Vec<u8>>) -> Result<Self, io::Error> {
let mut content = HashMap::new(); let mut content = HashMap::new();
for part in _data { for part in _data {
let filename_size = u32::from_be_bytes(part[0..4].try_into().unwrap()) as usize; let filename_size = u32::from_be_bytes(part[0..4].try_into().unwrap()) as usize;
@@ -56,7 +57,7 @@ pub struct Metadata {
pub remove_files: Vec<String>, pub remove_files: Vec<String>,
} }
pub async fn get_hash(data: &Vec<u8>) -> String { pub async fn get_hash(data: Vec<u8>) -> String {
let hash = md5::compute(&data[..]); let hash = md5::compute(&data[..]);
format!("{:x}", hash) format!("{:x}", hash)
} }

View File

@@ -2,6 +2,7 @@ mod utils;
use clap::Parser; use clap::Parser;
use std::collections::HashMap; use std::collections::HashMap;
use std::io::Write;
use std::{fs, io}; use std::{fs, io};
use utils::{Metadata, Zsdiff, get_hash}; use utils::{Metadata, Zsdiff, get_hash};
use walkdir::WalkDir; use walkdir::WalkDir;
@@ -21,7 +22,7 @@ async fn walk_dir(dir: String) -> HashMap<String, FileInfo> {
continue; continue;
} }
let content = fs::read(path).unwrap(); let content = fs::read(path).unwrap();
let hash = get_hash(&content).await; let hash = get_hash(content).await;
let path_str = path.display().to_string(); let path_str = path.display().to_string();
let file_info = FileInfo { let file_info = FileInfo {
relative_path: path_str[dir.len() + 1..].to_string(), relative_path: path_str[dir.len() + 1..].to_string(),
@@ -76,12 +77,10 @@ pub async fn zsdiff(
let new_hashes = walk_dir(new).await; let new_hashes = walk_dir(new).await;
let compare_hashes = compare_hashes(old_hashes, new_hashes).await; let compare_hashes = compare_hashes(old_hashes, new_hashes).await;
let parts = compare_hashes.to_vec().await; let parts = compare_hashes.to_vec().await;
let file = fs::File::create(output_filename)?; utils::compress_parts(parts, &fs::File::create(output_filename)?, level).await;
utils::compress_parts(parts, &file, level).await; let output_hash = get_hash(fs::read(output_filename)?).await;
// let mut buf = Vec::new(); fs::File::create(format!("{}.md5", output_filename))?.write_all(output_hash.as_bytes())?;
// file.read(&mut buf)?; println!("Zsdiff hash: {}", output_hash);
// let output_hash = get_hash(&buf).await;
// println!("{}", output_hash);
Ok(()) Ok(())
} }
@@ -100,6 +99,5 @@ struct Args {
#[tokio::main] #[tokio::main]
async fn main() -> io::Result<()> { async fn main() -> io::Result<()> {
let args = Args::parse(); let args = Args::parse();
zsdiff(args.filename, args.old, args.new, args.compress_level).await?; zsdiff(args.filename, args.old, args.new, args.compress_level).await
Ok(())
} }

View File

@@ -26,7 +26,17 @@ async fn extract_files(zsdiff: &Zsdiff, filename: &String) -> Result<String, io:
Ok(tmp_dir_name) Ok(tmp_dir_name)
} }
async fn zpatch(filename: String, dest_dir: String) -> Result<(), io::Error> { async fn check_hash(filename: String) -> Result<(), io::Error> {
let file_data = read(format!("{}.zdiff", filename))?;
let hash_file = read(format!("{}.zdiff.md5", filename))?;
let hash = utils::get_hash(file_data).await;
if !String::from_utf8(hash_file).unwrap().eq(&hash) {
return Err(io::Error::new(io::ErrorKind::Other, "Hash mismatch"));
}
Ok(())
}
async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
let filename = &format!("{}.zdiff", filename); let filename = &format!("{}.zdiff", filename);
let parts = utils::decompress_parts(read(filename)?).await?; let parts = utils::decompress_parts(read(filename)?).await?;
let diff = Zsdiff::from_vec(parts).await?; let diff = Zsdiff::from_vec(parts).await?;
@@ -46,11 +56,12 @@ async fn zpatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
for (k, hash) in diff.metadata.hashes { for (k, hash) in diff.metadata.hashes {
let path = Path::new(&dest_dir).join(k); let path = Path::new(&dest_dir).join(k);
let content = read(path)?; let content = read(path)?;
let fs_hash = utils::get_hash(&content).await; let fs_hash = utils::get_hash(content).await;
if !fs_hash.eq(&hash) { if !fs_hash.eq(&hash) {
Err(io::Error::new(io::ErrorKind::Other, "Hash mismatch"))? Err(io::Error::new(io::ErrorKind::Other, "Hash mismatch"))?
} }
} }
fs::remove_dir_all(tmp_dir_name).ok();
Ok(()) Ok(())
} }
@@ -60,11 +71,16 @@ struct Args {
filename: String, filename: String,
#[arg(short, long)] #[arg(short, long)]
dest_dir: String, dest_dir: String,
#[arg(short, long)]
hash_check: bool,
} }
#[tokio::main] #[tokio::main]
async fn main() -> io::Result<()> { async fn main() -> io::Result<()> {
let args = Args::parse(); let args = Args::parse();
zpatch(args.filename, args.dest_dir).await?; println!("{}", args.hash_check);
Ok(()) if args.hash_check {
check_hash(args.filename.clone()).await?;
}
zspatch(args.filename, args.dest_dir).await
} }