250 lines
7.9 KiB
Rust
250 lines
7.9 KiB
Rust
mod utils;
|
|
|
|
use crate::utils::Metadata;
|
|
use clap::{Arg, ArgAction, Command, Parser};
|
|
use std::path::{Path, PathBuf};
|
|
use std::{io, time};
|
|
use tokio::fs;
|
|
use tokio::io::AsyncWriteExt;
|
|
use utils::Zsdiff;
|
|
|
|
async fn create_tmp_dir(dir_name: String) -> Result<String, io::Error> {
|
|
let name = PathBuf::from(format!("{}_tmp", dir_name));
|
|
if name.exists() {
|
|
fs::remove_dir_all(&name).await?;
|
|
}
|
|
fs::create_dir(&name).await?;
|
|
name.to_str().map(|s| s.to_string()).ok_or_else(|| {
|
|
io::Error::new(
|
|
io::ErrorKind::InvalidData,
|
|
"Path contains invalid UTF-8 characters",
|
|
)
|
|
})
|
|
}
|
|
|
|
async fn load_metadata(filename: String) -> Result<Metadata, io::Error> {
|
|
let filepath = format!("{}.zdiff", filename);
|
|
let data = fs::read(&filepath).await?;
|
|
|
|
let meta_version = u16::from_be_bytes(data[..2].try_into().unwrap());
|
|
if !utils::is_metadata_supported(meta_version) {
|
|
return Err(io::Error::new(
|
|
io::ErrorKind::InvalidData,
|
|
"Metadata version not supported",
|
|
));
|
|
}
|
|
let meta_size = u32::from_be_bytes(data[2..6].try_into().unwrap()) as usize;
|
|
let meta_data = data[6..meta_size + 6].to_vec();
|
|
let metadata = serde_json::from_slice(&meta_data[..])
|
|
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
|
|
|
Ok(metadata)
|
|
}
|
|
|
|
async fn load_file(filename: String) -> Result<Zsdiff, io::Error> {
|
|
let full_filename = format!("{}.zdiff", filename);
|
|
let data = fs::read(&full_filename).await?;
|
|
let zsdiff = Zsdiff::from_vec(data).await?;
|
|
println!(
|
|
">>> Metadata files to remove: {}",
|
|
zsdiff.metadata.remove_files.len()
|
|
);
|
|
println!(
|
|
">>> Metadata hashes to check: {}",
|
|
zsdiff.metadata.hashes.len()
|
|
);
|
|
|
|
Ok(zsdiff)
|
|
}
|
|
|
|
async fn extract_files(zsdiff: &Zsdiff, filename: String) -> Result<String, io::Error> {
|
|
let tmp_dir_name = create_tmp_dir(filename.to_string()).await?;
|
|
let path = Path::new(&tmp_dir_name);
|
|
|
|
for (i, (f, c)) in zsdiff.content.iter().enumerate() {
|
|
println!(
|
|
">>> Processing file {}/{}: '{}'",
|
|
i + 1,
|
|
zsdiff.content.len(),
|
|
f
|
|
);
|
|
let filepath = path.join(f);
|
|
if let Some(parent) = filepath.parent() {
|
|
fs::create_dir_all(parent).await?;
|
|
}
|
|
|
|
let mut file = fs::File::create(&filepath).await?;
|
|
file.write_all(c).await?;
|
|
}
|
|
Ok(tmp_dir_name)
|
|
}
|
|
|
|
async fn check_hash(filename: String) -> Result<(), io::Error> {
|
|
let file_data = fs::read(format!("{}.zdiff", filename)).await?;
|
|
let mut hash_file =
|
|
String::from_utf8(fs::read(format!("{}.zdiff.md5", filename)).await?).unwrap();
|
|
let hash = utils::get_hash(file_data).await;
|
|
hash_file = hash_file.split(" ").next().unwrap().parse().unwrap();
|
|
if !hash_file.eq(&hash) {
|
|
return Err(io::Error::new(
|
|
io::ErrorKind::Other,
|
|
format!("Hash mismatch. Expected {}, got {}", hash_file, hash),
|
|
));
|
|
}
|
|
println!(">>> Zsdiff hash: {}", hash);
|
|
Ok(())
|
|
}
|
|
|
|
async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
|
|
let now = time::Instant::now();
|
|
let cloned = filename.clone();
|
|
let diff = load_file(cloned).await.ok().unwrap();
|
|
let tmp_dir_name = extract_files(&diff, filename).await?;
|
|
|
|
let files_to_copy: Vec<String> = diff.content.keys().cloned().collect();
|
|
for (_, name) in files_to_copy.iter().enumerate() {
|
|
let from_path = Path::new(&tmp_dir_name).join(name);
|
|
let to_path = Path::new(&dest_dir).join(name);
|
|
|
|
if !from_path.exists() {
|
|
println!("ERROR: Source file doesn't exist: {:?}", from_path);
|
|
continue;
|
|
}
|
|
|
|
if let Some(parent) = to_path.parent() {
|
|
fs::create_dir_all(parent).await?;
|
|
}
|
|
|
|
fs::copy(from_path.clone(), to_path.clone()).await?;
|
|
}
|
|
|
|
for file in &diff.metadata.remove_files {
|
|
let path = Path::new(&dest_dir).join(file);
|
|
println!(">>> Removing file {}", path.display());
|
|
|
|
if !path.exists() {
|
|
println!("File doesn't exist, skipping");
|
|
continue;
|
|
}
|
|
|
|
fs::remove_file(path.clone()).await?
|
|
}
|
|
|
|
println!(">>> Starting folder removal process <<<");
|
|
println!(
|
|
">>> Folders to remove: {}",
|
|
diff.metadata.remove_folders.len()
|
|
);
|
|
|
|
for folder in &diff.metadata.remove_folders {
|
|
let path = Path::new(&dest_dir).join(folder);
|
|
|
|
if !path.exists() {
|
|
println!("Folder doesn't exist, skipping");
|
|
continue;
|
|
}
|
|
|
|
fs::remove_dir_all(path.clone()).await?
|
|
}
|
|
|
|
println!(">>> Starting hash verification <<<");
|
|
println!(">>> Files to verify: {}", diff.metadata.hashes.len());
|
|
|
|
for (k, hash) in &diff.metadata.hashes {
|
|
let path = Path::new(&dest_dir).join(k);
|
|
|
|
match fs::read(path.clone()).await {
|
|
Ok(content) => {
|
|
let fs_hash = utils::get_hash(content).await;
|
|
if !fs_hash.eq(hash) {
|
|
println!(
|
|
"Hash mismatch. Expected {}, got {}. Path: {}",
|
|
hash,
|
|
fs_hash,
|
|
path.display()
|
|
);
|
|
}
|
|
}
|
|
Err(e) => {
|
|
println!("Can't read file for hash verification: {}", e);
|
|
}
|
|
}
|
|
}
|
|
|
|
fs::remove_dir_all(tmp_dir_name).await?;
|
|
println!(">>> Patching done! <<<");
|
|
println!(">>> Elapsed time: {:.2?}", now.elapsed());
|
|
Ok(())
|
|
}
|
|
|
|
#[tokio::main]
|
|
async fn main() -> io::Result<()> {
|
|
let m = Command::new("ZsPatch")
|
|
.author("ScuroNeko")
|
|
.version("0.3.0")
|
|
.about("Explains in brief what the program does")
|
|
.subcommand_required(true)
|
|
.arg_required_else_help(true)
|
|
.after_help("")
|
|
.subcommand(
|
|
Command::new("metadata")
|
|
.short_flag('m')
|
|
.long_flag("metadata")
|
|
.arg(
|
|
Arg::new("filename")
|
|
.short('f')
|
|
.long("filename")
|
|
.required(true)
|
|
.action(ArgAction::Set),
|
|
),
|
|
)
|
|
.subcommand(
|
|
Command::new("patch")
|
|
.short_flag('p')
|
|
.arg(
|
|
Arg::new("filename")
|
|
.short('f')
|
|
.long("filename")
|
|
.required(true)
|
|
.action(ArgAction::Set),
|
|
)
|
|
.arg(
|
|
Arg::new("dest")
|
|
.short('d')
|
|
.long("dest")
|
|
.required(true)
|
|
.action(ArgAction::Set),
|
|
)
|
|
.arg(
|
|
Arg::new("hash_check")
|
|
.long("hash_check")
|
|
.required(false)
|
|
.action(ArgAction::SetTrue),
|
|
),
|
|
)
|
|
.get_matches();
|
|
|
|
match m.subcommand() {
|
|
Some(("metadata", meta_matches)) => {
|
|
let filename: &String = meta_matches.get_one("filename").unwrap();
|
|
let metadata = load_metadata(filename.clone()).await?;
|
|
println!(">>> Compress level: {}", metadata.compress_level);
|
|
println!(
|
|
">>> Encrypted?: {}",
|
|
if metadata.encrypted { "Yes" } else { "No" }
|
|
);
|
|
return Ok(());
|
|
}
|
|
Some(("patch", patch_matches)) => {
|
|
let filename: &String = patch_matches.get_one("filename").unwrap();
|
|
let dest_dir: &String = patch_matches.get_one("dest").unwrap();
|
|
if patch_matches.get_flag("hash_check") {
|
|
check_hash(filename.clone()).await.ok();
|
|
}
|
|
zspatch(filename.clone(), dest_dir.clone()).await?;
|
|
}
|
|
_ => unreachable!("Subcommand is required"),
|
|
}
|
|
Ok(())
|
|
}
|