This commit is contained in:
2025-10-20 12:34:04 +03:00
parent 207675f522
commit a91bd3bafc
6 changed files with 560 additions and 57 deletions

View File

@@ -1,12 +1,23 @@
use crate::utils;
use md5;
use rsa::pkcs1::{DecodeRsaPrivateKey, EncodeRsaPrivateKey, EncodeRsaPublicKey};
use rsa::pkcs8::LineEnding;
use rsa::traits::PublicKeyParts;
use rsa::{Pkcs1v15Encrypt, RsaPrivateKey, RsaPublicKey};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::{fs, io};
use std::io;
use tokio::fs;
use tokio::io::AsyncWriteExt;
use zstd::{Decoder, Encoder};
const METADATA_VERSION: u16 = 1;
const SUPPORTED_VERSION: [u16; 1] = [1];
pub fn is_metadata_supported(version: u16) -> bool {
SUPPORTED_VERSION.contains(&version)
}
pub struct Zsdiff {
pub content: HashMap<String, Vec<u8>>,
pub metadata: Metadata,
@@ -31,8 +42,27 @@ impl Zsdiff {
index += meta_size;
println!(">>> File count: {}", metadata.file_count);
let data = _data;
let mut data = _data[index..].to_vec();
if metadata.encrypted {
println!(">>> Decrypting file");
let key_content = fs::read("./private.pem").await?;
let private_key = RsaPrivateKey::from_pkcs1_pem(
String::from_utf8(key_content)
.expect("Can't load key")
.as_str(),
)
.expect("Can't load private key");
let encrypter = Encrypter::from_private_key(private_key);
data = encrypter.decrypt(data).await;
println!(">>> Decrypting done");
}
println!(">>> Decompressing file");
let compressor = Compressor::new();
data = compressor.decompress(data).await?;
println!(">>> Decompressing done");
index = 0;
let mut content = HashMap::new();
while index < data.len() {
let filename_size =
@@ -52,12 +82,11 @@ impl Zsdiff {
Ok(Zsdiff { content, metadata })
}
pub async fn to_vec(&self) -> Vec<u8> {
pub async fn to_vec(&self, compress_level: i32, encrypt: bool) -> Vec<u8> {
let mut meta_bytes: Vec<u8> = Vec::new();
meta_bytes.extend(METADATA_VERSION.to_be_bytes());
meta_bytes.extend(METADATA_VERSION.to_be_bytes()); // u16
let meta = serde_json::to_vec(&self.metadata).unwrap();
meta_bytes.extend((meta.len() as u32).to_be_bytes());
meta_bytes.extend((meta.len() as u32).to_be_bytes()); // u32
meta_bytes.extend(meta);
let mut parts: Vec<u8> = Vec::new();
@@ -71,9 +100,43 @@ impl Zsdiff {
parts.extend(part)
}
let size_before = parts.len();
parts = self.compress(parts, compress_level).await;
if encrypt {
parts = self.encrypt(parts).await;
}
let size_after = parts.len();
println!(">>> Size before: {:.1?}KB", size_before / 1024);
println!(">>> Size after: {:.1?}KB", size_after / 1024);
println!(
">>> Compress ratio: {:.2?}%",
size_after as f64 / size_before as f64 * 100.0
);
let out = vec![meta_bytes, parts].concat();
out
}
async fn compress(&self, data: Vec<u8>, level: i32) -> Vec<u8> {
let compressor = Compressor::new();
println!(">>> Compressing");
let _data = compressor
.compress(data, level)
.await
.expect("Can't compress data");
println!(">>> Compressing done");
_data
}
async fn encrypt(&self, data: Vec<u8>) -> Vec<u8> {
println!(">>> Encrypting");
let encrypter = Encrypter::new_pair();
let _data = encrypter.encrypt(data).await.expect("Can't encrypt data");
encrypter.export().await.expect("Can't export keys");
println!(">>> Encrypting done");
_data
}
}
#[derive(Serialize, Deserialize, Debug)]
@@ -84,6 +147,7 @@ pub struct Metadata {
pub remove_folders: Vec<String>,
pub file_count: u32,
pub compress_level: i32,
pub encrypted: bool,
}
pub async fn get_hash(data: Vec<u8>) -> String {
@@ -91,15 +155,108 @@ pub async fn get_hash(data: Vec<u8>) -> String {
format!("{:x}", hash)
}
pub async fn compress(input: Vec<u8>, output: &fs::File, level: i32) {
let mut encoder = Encoder::new(output, level).unwrap();
io::copy(&mut input.as_slice(), &mut encoder).unwrap();
encoder.finish().unwrap();
struct Compressor {}
impl Compressor {
pub(crate) fn new() -> Self {
Compressor {}
}
pub async fn compress(&self, input: Vec<u8>, level: i32) -> Result<Vec<u8>, io::Error> {
let buf = Vec::new();
let mut encoder = Encoder::new(buf, level)?;
io::copy(&mut &input[..], &mut encoder)?;
encoder.finish()
}
pub async fn decompress(&self, input: Vec<u8>) -> Result<Vec<u8>, io::Error> {
let mut decoder = Decoder::new(&input[..])?;
let mut buf = Vec::new();
io::copy(&mut decoder, &mut buf)?;
Ok(buf)
}
}
pub async fn decompress(input: Vec<u8>) -> Result<Vec<u8>, io::Error> {
let mut decoder = Decoder::new(&input[..])?;
let mut buf = Vec::new();
io::copy(&mut decoder, &mut buf)?;
Ok(buf)
pub struct Encrypter {
private_key: RsaPrivateKey,
public_key: RsaPublicKey,
key_size: usize,
}
impl Encrypter {
pub fn new_pair() -> Self {
let mut rng = rand::rng();
let bits = 2048;
let private_key = RsaPrivateKey::new(&mut rng, bits).expect("failed to generate a key");
let public_key = RsaPublicKey::from(&private_key);
Self {
private_key,
public_key,
key_size: bits,
}
}
pub fn from_private_key(private_key: RsaPrivateKey) -> Self {
let public_key = RsaPublicKey::from(&private_key);
Self {
key_size: public_key.size(),
public_key,
private_key,
}
}
pub fn new(private_key: RsaPrivateKey, public_key: RsaPublicKey) -> Self {
Self {
key_size: public_key.size(),
private_key,
public_key,
}
}
pub async fn encrypt(&self, data: Vec<u8>) -> Result<Vec<u8>, io::Error> {
let mut rng = rand::rng();
let mut out = Vec::new();
let segment_size = self.public_key.size() - 11;
for seg in data.chunks(segment_size) {
let segment = self
.public_key
.encrypt(&mut rng, Pkcs1v15Encrypt, seg)
.unwrap();
out.extend(segment)
}
Ok(out)
}
pub async fn decrypt(&self, data: Vec<u8>) -> Vec<u8> {
let mut out = Vec::new();
let segment_size = self.public_key.size();
for seg in data.chunks(segment_size) {
let segment = self
.private_key
.decrypt(Pkcs1v15Encrypt, seg)
.expect("failed to decrypt");
out.extend(segment)
}
out
}
pub async fn export(&self) -> Result<(), io::Error> {
let private_bytes = self
.private_key
.to_pkcs1_pem(LineEnding::CRLF)
.expect("failed to pem private key");
let public_bytes = self
.public_key
.to_pkcs1_pem(LineEnding::CRLF)
.expect("failed to pem public key");
fs::File::create("private.pem")
.await?
.write_all(private_bytes.as_bytes())
.await?;
fs::File::create("public.pem")
.await?
.write_all(public_bytes.as_bytes())
.await?;
Ok(())
}
}

View File

@@ -48,6 +48,7 @@ async fn compare_hashes(
old: HashMap<String, FileInfo>,
new: HashMap<String, FileInfo>,
compress_level: i32,
encrypted: bool,
) -> Zsdiff {
let mut diff_files: HashMap<String, Vec<u8>> = HashMap::new();
let mut remove_files: Vec<String> = vec![];
@@ -89,6 +90,7 @@ async fn compare_hashes(
remove_folders,
compress_level,
file_count: diff_files.len() as u32,
encrypted,
},
}
}
@@ -98,29 +100,24 @@ pub async fn zsdiff(
old: String,
new: String,
level: i32,
encrypt: bool,
) -> Result<(), io::Error> {
let now = time::Instant::now();
let output_filename = &format!("{}.zdiff", filename);
let old_hashes = walk_dir(old).await;
let new_hashes = walk_dir(new).await;
let compare_hashes = compare_hashes(old_hashes, new_hashes, level).await;
let parts = compare_hashes.to_vec().await;
let size_before = parts.len();
let now = time::Instant::now();
utils::compress(parts, &fs::File::create(output_filename)?, level).await;
let output_data = fs::read(output_filename)?;
let size_after = output_data.len();
let hash = get_hash(output_data).await;
let diff = compare_hashes(old_hashes, new_hashes, level, encrypt).await;
let mut file = fs::File::create(output_filename)?;
let data = diff.to_vec(level, encrypt).await;
file.write_all(&data[..])?;
let hash = get_hash(data).await;
let output_hash = format!("{} {}", hash.clone(), output_filename);
fs::File::create(format!("{}.md5", output_filename))?.write_all(output_hash.as_bytes())?;
let elapsed = now.elapsed();
println!("Zsdiff hash: {}", hash);
println!("Size before: {:.1?}KB", size_before / 1024);
println!("Size after: {:.1?}KB", size_after / 1024);
println!(
"Compress ratio: {:.2?}%",
size_after as f64 / size_before as f64 * 100.0
);
print!("Time elapsed: {:.2?}", elapsed);
println!(">>> Zsdiff hash: {}", hash);
println!("Time elapsed: {:.2?}", elapsed);
Ok(())
}
@@ -134,10 +131,19 @@ struct Args {
old: String,
#[arg(short, long)]
new: String,
#[arg(short, long)]
encrypt: bool,
}
#[tokio::main]
async fn main() -> io::Result<()> {
let args = Args::parse();
zsdiff(args.filename, args.old, args.new, args.compress_level).await
zsdiff(
args.filename,
args.old,
args.new,
args.compress_level,
args.encrypt,
)
.await
}

View File

@@ -1,6 +1,7 @@
mod utils;
use clap::Parser;
use crate::utils::Metadata;
use clap::{Arg, ArgAction, ArgMatches, Command, Parser};
use std::path::{Path, PathBuf};
use std::{io, time};
use tokio::fs;
@@ -21,10 +22,28 @@ async fn create_tmp_dir(dir_name: String) -> Result<String, io::Error> {
})
}
async fn load_metadata(filename: String) -> Result<Metadata, io::Error> {
let filepath = format!("{}.zdiff", filename);
let data = fs::read(&filepath).await?;
let meta_version = u16::from_be_bytes(data[..2].try_into().unwrap());
if !utils::is_metadata_supported(meta_version) {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"Metadata version not supported",
));
}
let meta_size = u32::from_be_bytes(data[2..6].try_into().unwrap()) as usize;
let meta_data = data[6..meta_size + 6].to_vec();
let metadata = serde_json::from_slice(&meta_data[..])
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
Ok(metadata)
}
async fn load_file(filename: String) -> Result<Zsdiff, io::Error> {
let full_filename = format!("{}.zdiff", filename);
let compressed_data = fs::read(&full_filename).await?;
let data = utils::decompress(compressed_data).await?;
let data = fs::read(&full_filename).await?;
let zsdiff = Zsdiff::from_vec(data).await?;
println!(
">>> Metadata files to remove: {}",
@@ -77,15 +96,11 @@ async fn check_hash(filename: String) -> Result<(), io::Error> {
}
async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
let now = time::Instant::now();
let cloned = filename.clone();
let diff = load_file(cloned).await.ok().unwrap();
let tmp_dir_name = extract_files(&diff, filename).await?;
let now = time::Instant::now();
fs::File::create("metadata.json")
.await?
.write_all(serde_json::to_vec(&diff.metadata).unwrap().as_slice())
.await?;
let files_to_copy: Vec<String> = diff.content.keys().cloned().collect();
for (_, name) in files_to_copy.iter().enumerate() {
let from_path = Path::new(&tmp_dir_name).join(name);
@@ -156,7 +171,7 @@ async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
}
}
// fs::remove_dir_all(tmp_dir_name).await?;
fs::remove_dir_all(tmp_dir_name).await?;
println!(">>> Patching done! <<<");
println!(">>> Elapsed time: {:.2?}", now.elapsed());
Ok(())
@@ -176,19 +191,85 @@ struct Args {
#[tokio::main]
async fn main() -> io::Result<()> {
let args = Args::parse();
let m = Command::new("ZsPatch")
.author("ScuroNeko")
.version("0.3.0")
.about("Explains in brief what the program does")
.subcommand_required(true)
.arg_required_else_help(true)
.after_help("")
.subcommand(
Command::new("metadata")
.short_flag('m')
.long_flag("metadata")
.arg(
Arg::new("filename")
.short('f')
.long("filename")
.required(true)
.action(ArgAction::Set),
),
)
.subcommand(
Command::new("patch")
.short_flag('p')
.arg(
Arg::new("filename")
.short('f')
.required(true)
.action(ArgAction::Set),
)
.arg(
Arg::new("dest")
.short('d')
.required(true)
.action(ArgAction::Set),
)
.arg(
Arg::new("hash_check")
.long("hash_check")
.required(false)
.action(ArgAction::SetTrue),
),
)
.get_matches();
let filename = args.filename.clone();
let dest_dir = args.dest_dir.clone();
match m.subcommand() {
Some(("metadata", meta_matches)) => {
let filename: &String = meta_matches.get_one("filename").unwrap();
let metadata = load_metadata(filename.clone()).await?;
println!(">>> Compress level: {}", metadata.compress_level);
println!(
">>> Encrypted?: {}",
if metadata.encrypted { "Yes" } else { "No" }
);
return Ok(());
}
Some(("patch", patch_matches)) => {
let filename: &String = patch_matches.get_one("filename").unwrap();
let dest_dir: &String = patch_matches.get_one("dest").unwrap();
if patch_matches.get_flag("hash_check") {
check_hash(filename.clone()).await.ok();
}
zspatch(filename.clone(), dest_dir.clone()).await?;
}
_ => unreachable!("Subcommand is required"),
}
Ok(())
if args.check_hash {
check_hash(args.filename.clone()).await.ok();
}
if args.metadata {
let diff = load_file(filename).await?;
let metadata = diff.metadata;
println!(">>> Compress level: {}", metadata.compress_level);
return Ok(());
}
zspatch(filename, dest_dir).await
// let args = Args::parse();
//
// let filename = args.filename.clone();
// let dest_dir = args.dest_dir.clone();
//
// if args.check_hash {
// check_hash(args.filename.clone()).await.ok();
// }
// if args.metadata {
// let diff = load_file(filename).await?;
// let metadata = diff.metadata;
// println!(">>> Compress level: {}", metadata.compress_level);
// return Ok(());
// }
// zspatch(filename, dest_dir).await
}