v0.3
This commit is contained in:
125
src/zspatch.rs
125
src/zspatch.rs
@@ -1,6 +1,7 @@
|
||||
mod utils;
|
||||
|
||||
use clap::Parser;
|
||||
use crate::utils::Metadata;
|
||||
use clap::{Arg, ArgAction, ArgMatches, Command, Parser};
|
||||
use std::path::{Path, PathBuf};
|
||||
use std::{io, time};
|
||||
use tokio::fs;
|
||||
@@ -21,10 +22,28 @@ async fn create_tmp_dir(dir_name: String) -> Result<String, io::Error> {
|
||||
})
|
||||
}
|
||||
|
||||
async fn load_metadata(filename: String) -> Result<Metadata, io::Error> {
|
||||
let filepath = format!("{}.zdiff", filename);
|
||||
let data = fs::read(&filepath).await?;
|
||||
|
||||
let meta_version = u16::from_be_bytes(data[..2].try_into().unwrap());
|
||||
if !utils::is_metadata_supported(meta_version) {
|
||||
return Err(io::Error::new(
|
||||
io::ErrorKind::InvalidData,
|
||||
"Metadata version not supported",
|
||||
));
|
||||
}
|
||||
let meta_size = u32::from_be_bytes(data[2..6].try_into().unwrap()) as usize;
|
||||
let meta_data = data[6..meta_size + 6].to_vec();
|
||||
let metadata = serde_json::from_slice(&meta_data[..])
|
||||
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
|
||||
|
||||
Ok(metadata)
|
||||
}
|
||||
|
||||
async fn load_file(filename: String) -> Result<Zsdiff, io::Error> {
|
||||
let full_filename = format!("{}.zdiff", filename);
|
||||
let compressed_data = fs::read(&full_filename).await?;
|
||||
let data = utils::decompress(compressed_data).await?;
|
||||
let data = fs::read(&full_filename).await?;
|
||||
let zsdiff = Zsdiff::from_vec(data).await?;
|
||||
println!(
|
||||
">>> Metadata files to remove: {}",
|
||||
@@ -77,15 +96,11 @@ async fn check_hash(filename: String) -> Result<(), io::Error> {
|
||||
}
|
||||
|
||||
async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
|
||||
let now = time::Instant::now();
|
||||
let cloned = filename.clone();
|
||||
let diff = load_file(cloned).await.ok().unwrap();
|
||||
let tmp_dir_name = extract_files(&diff, filename).await?;
|
||||
let now = time::Instant::now();
|
||||
|
||||
fs::File::create("metadata.json")
|
||||
.await?
|
||||
.write_all(serde_json::to_vec(&diff.metadata).unwrap().as_slice())
|
||||
.await?;
|
||||
let files_to_copy: Vec<String> = diff.content.keys().cloned().collect();
|
||||
for (_, name) in files_to_copy.iter().enumerate() {
|
||||
let from_path = Path::new(&tmp_dir_name).join(name);
|
||||
@@ -156,7 +171,7 @@ async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
|
||||
}
|
||||
}
|
||||
|
||||
// fs::remove_dir_all(tmp_dir_name).await?;
|
||||
fs::remove_dir_all(tmp_dir_name).await?;
|
||||
println!(">>> Patching done! <<<");
|
||||
println!(">>> Elapsed time: {:.2?}", now.elapsed());
|
||||
Ok(())
|
||||
@@ -176,19 +191,85 @@ struct Args {
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
let args = Args::parse();
|
||||
let m = Command::new("ZsPatch")
|
||||
.author("ScuroNeko")
|
||||
.version("0.3.0")
|
||||
.about("Explains in brief what the program does")
|
||||
.subcommand_required(true)
|
||||
.arg_required_else_help(true)
|
||||
.after_help("")
|
||||
.subcommand(
|
||||
Command::new("metadata")
|
||||
.short_flag('m')
|
||||
.long_flag("metadata")
|
||||
.arg(
|
||||
Arg::new("filename")
|
||||
.short('f')
|
||||
.long("filename")
|
||||
.required(true)
|
||||
.action(ArgAction::Set),
|
||||
),
|
||||
)
|
||||
.subcommand(
|
||||
Command::new("patch")
|
||||
.short_flag('p')
|
||||
.arg(
|
||||
Arg::new("filename")
|
||||
.short('f')
|
||||
.required(true)
|
||||
.action(ArgAction::Set),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("dest")
|
||||
.short('d')
|
||||
.required(true)
|
||||
.action(ArgAction::Set),
|
||||
)
|
||||
.arg(
|
||||
Arg::new("hash_check")
|
||||
.long("hash_check")
|
||||
.required(false)
|
||||
.action(ArgAction::SetTrue),
|
||||
),
|
||||
)
|
||||
.get_matches();
|
||||
|
||||
let filename = args.filename.clone();
|
||||
let dest_dir = args.dest_dir.clone();
|
||||
match m.subcommand() {
|
||||
Some(("metadata", meta_matches)) => {
|
||||
let filename: &String = meta_matches.get_one("filename").unwrap();
|
||||
let metadata = load_metadata(filename.clone()).await?;
|
||||
println!(">>> Compress level: {}", metadata.compress_level);
|
||||
println!(
|
||||
">>> Encrypted?: {}",
|
||||
if metadata.encrypted { "Yes" } else { "No" }
|
||||
);
|
||||
return Ok(());
|
||||
}
|
||||
Some(("patch", patch_matches)) => {
|
||||
let filename: &String = patch_matches.get_one("filename").unwrap();
|
||||
let dest_dir: &String = patch_matches.get_one("dest").unwrap();
|
||||
if patch_matches.get_flag("hash_check") {
|
||||
check_hash(filename.clone()).await.ok();
|
||||
}
|
||||
zspatch(filename.clone(), dest_dir.clone()).await?;
|
||||
}
|
||||
_ => unreachable!("Subcommand is required"),
|
||||
}
|
||||
Ok(())
|
||||
|
||||
if args.check_hash {
|
||||
check_hash(args.filename.clone()).await.ok();
|
||||
}
|
||||
if args.metadata {
|
||||
let diff = load_file(filename).await?;
|
||||
let metadata = diff.metadata;
|
||||
println!(">>> Compress level: {}", metadata.compress_level);
|
||||
return Ok(());
|
||||
}
|
||||
zspatch(filename, dest_dir).await
|
||||
// let args = Args::parse();
|
||||
//
|
||||
// let filename = args.filename.clone();
|
||||
// let dest_dir = args.dest_dir.clone();
|
||||
//
|
||||
// if args.check_hash {
|
||||
// check_hash(args.filename.clone()).await.ok();
|
||||
// }
|
||||
// if args.metadata {
|
||||
// let diff = load_file(filename).await?;
|
||||
// let metadata = diff.metadata;
|
||||
// println!(">>> Compress level: {}", metadata.compress_level);
|
||||
// return Ok(());
|
||||
// }
|
||||
// zspatch(filename, dest_dir).await
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user