13 Commits
v0.1 ... main

Author SHA1 Message Date
771d276eb6 readme update 2025-10-20 13:03:13 +03:00
90b5d8a523 v0.3 2025-10-20 12:43:34 +03:00
a91bd3bafc v0.3 2025-10-20 12:34:04 +03:00
207675f522 v0.2 2025-10-20 00:58:43 +03:00
c4c0ad2e12 small fix in hash from zsdiff 2025-10-17 14:04:34 +03:00
54500810a0 v0.2.1 2025-10-17 14:01:13 +03:00
c93bf8f1d2 v0.2 2025-10-17 13:51:38 +03:00
df0b260c7b v0.2 2025-10-17 13:48:34 +03:00
c7fff59f6c cleanup 2025-10-17 13:16:55 +03:00
0f645d0689 readme 2025-10-17 13:13:18 +03:00
2bbcca5253 readme 2025-10-17 13:11:06 +03:00
e5f238a126 messages 2025-10-17 13:05:11 +03:00
ac8b4d6f81 patch hash check 2025-10-17 12:53:20 +03:00
7 changed files with 815 additions and 124 deletions

252
Cargo.lock generated
View File

@@ -52,6 +52,30 @@ dependencies = [
"windows-sys 0.60.2",
]
[[package]]
name = "autocfg"
version = "1.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c08606f8c3cbf4ce6ec8e28fb0014a2c086708fe954eaa885384a6165172e7e8"
[[package]]
name = "base16ct"
version = "0.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d8b59d472eab27ade8d770dcb11da7201c11234bef9f82ce7aa517be028d462b"
[[package]]
name = "base64ct"
version = "1.8.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "55248b47b0caf0546f7988906588779981c43bb1bc9d0c44087278f80cdb44ba"
[[package]]
name = "bytes"
version = "1.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d71b6127be86fdcfddb610f7182ac57211d4b18a3e9c82eb2d17662f2227ad6a"
[[package]]
name = "cc"
version = "1.2.41"
@@ -70,6 +94,17 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9330f8b2ff13f34540b44e946ef35111825727b38d33286ef986142615121801"
[[package]]
name = "chacha20"
version = "0.10.0-rc.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9bd162f2b8af3e0639d83f28a637e4e55657b7a74508dba5a9bf4da523d5c9e9"
dependencies = [
"cfg-if",
"cpufeatures",
"rand_core",
]
[[package]]
name = "clap"
version = "4.5.49"
@@ -116,6 +151,75 @@ version = "1.0.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b05b61dc5112cbb17e4b6cd61790d9845d13888356391624cbe7e41efeac1e75"
[[package]]
name = "const-oid"
version = "0.10.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0dabb6555f92fb9ee4140454eb5dcd14c7960e1225c6d1a6cc361f032947713e"
[[package]]
name = "cpufeatures"
version = "0.2.17"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "59ed5838eebb26a2bb2e58f6d5b5316989ae9d08bab10e0e6d103e656d1b0280"
dependencies = [
"libc",
]
[[package]]
name = "crypto-bigint"
version = "0.7.0-rc.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7f4b0fda9462026d53a3ef37c5ec283639ee8494a1a5401109c0e2a3fb4d490c"
dependencies = [
"num-traits",
"rand_core",
"serdect",
"subtle",
"zeroize",
]
[[package]]
name = "crypto-common"
version = "0.2.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "6a8235645834fbc6832939736ce2f2d08192652269e11010a6240f61b908a1c6"
dependencies = [
"hybrid-array",
]
[[package]]
name = "crypto-primes"
version = "0.7.0-pre.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "25f2523fbb68811c8710829417ad488086720a6349e337c38d12fa81e09e50bf"
dependencies = [
"crypto-bigint",
"libm",
"rand_core",
]
[[package]]
name = "der"
version = "0.8.0-rc.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e9d8dd2f26c86b27a2a8ea2767ec7f9df7a89516e4794e54ac01ee618dda3aa4"
dependencies = [
"const-oid",
"pem-rfc7468",
"zeroize",
]
[[package]]
name = "digest"
version = "0.11.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "dac89f8a64533a9b0eaa73a68e424db0fb1fd6271c74cc0125336a05f090568d"
dependencies = [
"const-oid",
"crypto-common",
]
[[package]]
name = "find-msvc-tools"
version = "0.1.4"
@@ -140,6 +244,15 @@ version = "0.5.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2304e00983f87ffb38b55b444b5e3b60a884b5d30c0fca7d82fe33449bbe55ea"
[[package]]
name = "hybrid-array"
version = "0.4.5"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f471e0a81b2f90ffc0cb2f951ae04da57de8baa46fa99112b062a5173a5088d0"
dependencies = [
"typenum",
]
[[package]]
name = "is_terminal_polyfill"
version = "1.70.1"
@@ -168,6 +281,12 @@ version = "0.2.177"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2874a2af47a2325c2001a6e6fad9b16a53b802102b528163885171cf92b15976"
[[package]]
name = "libm"
version = "0.2.15"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f9fbbcab51052fe104eb5e5d351cf728d30a5be1fe14d9be8a3b097481fb97de"
[[package]]
name = "md5"
version = "0.8.0"
@@ -180,18 +299,56 @@ version = "2.7.6"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f52b00d39961fc5b2736ea853c9cc86238e165017a493d1d5c8eac6bdc4cc273"
[[package]]
name = "num-traits"
version = "0.2.19"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "071dfc062690e90b734c0b2273ce72ad0ffa95f0c74596bc250dcfd960262841"
dependencies = [
"autocfg",
]
[[package]]
name = "once_cell_polyfill"
version = "1.70.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a4895175b425cb1f87721b59f0f286c2092bd4af812243672510e1ac53e2e0ad"
[[package]]
name = "pem-rfc7468"
version = "1.0.0-rc.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "a8e58fab693c712c0d4e88f8eb3087b6521d060bcaf76aeb20cb192d809115ba"
dependencies = [
"base64ct",
]
[[package]]
name = "pin-project-lite"
version = "0.2.16"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "3b3cff922bd51709b605d9ead9aa71031d81447142d828eb4a6eba76fe619f9b"
[[package]]
name = "pkcs1"
version = "0.8.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "986d2e952779af96ea048f160fd9194e1751b4faea78bcf3ceb456efe008088e"
dependencies = [
"der",
"spki",
]
[[package]]
name = "pkcs8"
version = "0.11.0-rc.7"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "93eac55f10aceed84769df670ea4a32d2ffad7399400d41ee1c13b1cd8e1b478"
dependencies = [
"der",
"spki",
]
[[package]]
name = "pkg-config"
version = "0.3.32"
@@ -222,6 +379,44 @@ version = "5.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "69cdb34c158ceb288df11e18b4bd39de994f6657d83847bdffdbd7f346754b0f"
[[package]]
name = "rand"
version = "0.10.0-rc.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "5ec474812b9de55111b29da8a1559f1718ef3dc20fa36f031f1b5d9e3836ad6c"
dependencies = [
"chacha20",
"rand_core",
]
[[package]]
name = "rand_core"
version = "0.9.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "99d9a13982dcf210057a8a78572b2217b667c3beacbf3a0d8b454f6f82837d38"
dependencies = [
"getrandom",
]
[[package]]
name = "rsa"
version = "0.10.0-rc.9"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "bf8955ab399f6426998fde6b76ae27233cce950705e758a6c17afd2f6d0e5d52"
dependencies = [
"const-oid",
"crypto-bigint",
"crypto-primes",
"digest",
"pkcs1",
"pkcs8",
"rand_core",
"signature",
"spki",
"subtle",
"zeroize",
]
[[package]]
name = "ryu"
version = "1.0.20"
@@ -280,12 +475,42 @@ dependencies = [
"serde_core",
]
[[package]]
name = "serdect"
version = "0.4.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3ef0e35b322ddfaecbc60f34ab448e157e48531288ee49fafbb053696b8ffe2"
dependencies = [
"base16ct",
"serde",
]
[[package]]
name = "shlex"
version = "1.3.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "0fda2ff0d084019ba4d7c6f371c95d8fd75ce3524c3cb8fb653a3023f6323e64"
[[package]]
name = "signature"
version = "3.0.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "fc280a6ff65c79fbd6622f64d7127f32b85563bca8c53cd2e9141d6744a9056d"
dependencies = [
"digest",
"rand_core",
]
[[package]]
name = "spki"
version = "0.8.0-rc.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8baeff88f34ed0691978ec34440140e1572b68c7dd4a495fd14a3dc1944daa80"
dependencies = [
"base64ct",
"der",
]
[[package]]
name = "strsim"
version = "0.11.1"
@@ -293,10 +518,16 @@ source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "7da8b5736845d9f2fcb837ea5d9e2628564b3b043a70948a3f0b778838c5fb4f"
[[package]]
name = "syn"
version = "2.0.106"
name = "subtle"
version = "2.6.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ede7c438028d4436d71104916910f5bb611972c5cfd7f89b8300a8186e6fada6"
checksum = "13c2bddecc57b384dee18652358fb23172facb8a2c51ccc10d74c157bdea3292"
[[package]]
name = "syn"
version = "2.0.107"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "2a26dbd934e5451d21ef060c018dae56fc073894c5a7896f882928a76e6d081b"
dependencies = [
"proc-macro2",
"quote",
@@ -309,6 +540,7 @@ version = "1.48.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ff360e02eab121e0bc37a2d3b4d4dc622e6eda3a8e5253d5435ecf5bd4c68408"
dependencies = [
"bytes",
"pin-project-lite",
"tokio-macros",
]
@@ -324,6 +556,12 @@ dependencies = [
"syn",
]
[[package]]
name = "typenum"
version = "1.19.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "562d481066bde0658276a35467c4af00bdc6ee726305698a55b86e61d7ad82bb"
[[package]]
name = "unicode-ident"
version = "1.0.19"
@@ -459,12 +697,20 @@ version = "0.46.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "f17a85883d4e6d00e8a97c586de764dabcc06133f7f1d55dce5cdc070ad7fe59"
[[package]]
name = "zeroize"
version = "1.8.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "b97154e67e32c85465826e8bcc1c59429aaaf107c1e4a9e53c8d8ccd5eff88d0"
[[package]]
name = "zsdiff_all"
version = "0.1.0"
dependencies = [
"clap",
"md5",
"rand",
"rsa",
"serde",
"serde_json",
"tokio",

View File

@@ -3,6 +3,9 @@ name = "zsdiff_all"
version = "0.1.0"
edition = "2024"
[profile.dev]
opt-level = 3
[[bin]]
name = "zsdiff"
path = "src/zsdiff.rs"
@@ -13,9 +16,11 @@ path = "src/zspatch.rs"
[dependencies]
zstd = { version = "0.13" }
tokio = { version = "1.48", features = ["rt", "rt-multi-thread", "macros"] }
tokio = { version = "1.48", features = ["rt", "rt-multi-thread", "macros", "fs", "io-util"] }
md5 = "0.8"
serde = { version = "1.0", features = ["derive"] }
serde_json = "1.0"
walkdir = "2.5"
clap = { version = "4.5", features = ["derive"] }
rsa = { version = "0.10.0-rc.9", features = ["std"] }
rand = { version = "0.10.0-rc.0", features = ["thread_rng", "std"] }

View File

@@ -1,5 +1,12 @@
build:
all: build-linux build-win
build-linux:
cargo build --release --target x86_64-unknown-linux-gnu --package zsdiff_all --bin zspatch
cargo build --release --target x86_64-unknown-linux-gnu --package zsdiff_all --bin zsdiff
#cargo build --release --target x86_64-apple-darwin --package zdiff_all --bin zpatch
#cargo build --release --target x86_64-pc-windows-gnu --package zdiff_all --bin zpatch
build-win:
cargo build --release --target x86_64-pc-windows-gnu --package zsdiff_all --bin zspatch
cargo build --release --target x86_64-pc-windows-gnu --package zsdiff_all --bin zsdiff
clean:
rm -Rf target/

47
README.md Normal file
View File

@@ -0,0 +1,47 @@
# ZsDiff
A partial update program. Use [Zstandrard](https://github.com/facebook/zstd).
`zsdiff` products a [filename].zdiff file and [filename].zdiff.md5. It compares ALL file hashes from old with new.
```
Usage: zsdiff [OPTIONS] --filename <FILENAME> --old <OLD> --new <NEW>
Options:
-f, --filename <FILENAME>
-c, --compress-level <COMPRESS_LEVEL> [default: 11]
-o, --old <OLD>
-n, --new <NEW>
-e, --encrypt
-h, --help Print help
```
`zspatch` extract files from [filename].zdiff to [dest-dir]. If some file was deleted between old and new, `zspatch`
also delete it from [dest-dir].
Flag --hash-check compare computed hash of [filename].zdiff and hash from [filename].zdiff.md5
```
Usage: zspatch [OPTIONS] --filename <FILENAME> --dest-dir <DEST_DIR>
Options:
-f, --filename <FILENAME>
-d, --dest-dir <DEST_DIR>
-m, --metadata
-c, --check-hash
-h, --help Print help
```
```
metadata version: uint16 (2 bytes)
metadata size: uint32 (4 bytes)
n = 6
metadata content: bytes (n;m bytes)
m = n+6
(content -> rsa -> zstd (bytes) -> bytes) (m;o)
content entry: {
size: uint32 (4 bytes)
i = 4
j = i + size
content: (i;j bytes)
}
```

View File

@@ -1,93 +1,257 @@
use md5;
use rsa::pkcs1::{DecodeRsaPrivateKey, EncodeRsaPrivateKey, EncodeRsaPublicKey};
use rsa::pkcs8::LineEnding;
use rsa::traits::PublicKeyParts;
use rsa::{Pkcs1v15Encrypt, RsaPrivateKey, RsaPublicKey};
use serde::{Deserialize, Serialize};
use std::collections::HashMap;
use std::{fs, io};
use std::io;
use tokio::fs;
use tokio::io::AsyncWriteExt;
use zstd::{Decoder, Encoder};
const METADATA_VERSION: u16 = 1;
const SUPPORTED_VERSION: [u16; 1] = [1];
pub fn is_metadata_supported(version: u16) -> bool {
SUPPORTED_VERSION.contains(&version)
}
pub struct Zsdiff {
pub content: HashMap<String, Vec<u8>>,
pub metadata: Metadata,
}
impl Zsdiff {
pub async fn from_vec(_data: Vec<Vec<u8>>) -> Result<Self, std::io::Error> {
pub async fn from_vec(_data: Vec<u8>) -> Result<Self, io::Error> {
let meta_version = u16::from_be_bytes(_data[..2].try_into().unwrap());
println!(">>> Metadata version: {}", meta_version);
if !SUPPORTED_VERSION.contains(&meta_version) {
return Err(io::Error::new(
io::ErrorKind::Other,
"Metadata version mismatch",
));
}
let meta_size = u32::from_be_bytes(_data[2..6].try_into().unwrap()) as usize;
let mut index = 6;
let meta = _data[index..index + meta_size].to_vec();
let metadata: Metadata = serde_json::from_slice(&meta)?;
println!(">>> Metadata parsed successfully");
index += meta_size;
println!(">>> File count: {}", metadata.file_count);
let mut data = _data[index..].to_vec();
if metadata.encrypted {
println!(">>> Decrypting file");
let key_content = fs::read("./private.pem").await?;
let private_key = RsaPrivateKey::from_pkcs1_pem(
String::from_utf8(key_content)
.expect("Can't load key")
.as_str(),
)
.expect("Can't load private key");
let encrypter = Encrypter::from_private_key(private_key);
data = encrypter.decrypt(data).await;
println!(">>> Decrypting done");
}
println!(">>> Decompressing file");
let compressor = Compressor::new();
data = compressor.decompress(data).await?;
println!(">>> Decompressing done");
index = 0;
let mut content = HashMap::new();
for part in _data {
let filename_size = u32::from_be_bytes(part[0..4].try_into().unwrap()) as usize;
let filename = String::from_utf8(part[4..filename_size + 4].to_vec()).unwrap();
let cont = part[filename_size + 8..].to_vec();
while index < data.len() {
let filename_size =
u32::from_be_bytes(data[index..index + 4].try_into().unwrap()) as usize;
index += 4;
let filename = String::from_utf8(data[index..filename_size + index].to_vec()).unwrap();
index += filename_size;
let content_size =
u32::from_be_bytes(data[index..index + 4].try_into().unwrap()) as usize;
index += 4;
let cont = data[index..index + content_size].to_vec();
index += cont.len();
content.insert(filename, cont);
}
let meta = content.get("metadata.json").unwrap();
let metadata: Metadata = serde_json::from_slice(meta.as_slice())?;
content.remove("metadata.json");
Ok(Zsdiff { content, metadata })
}
pub async fn to_vec(&self) -> Vec<Vec<u8>> {
let mut parts: Vec<Vec<u8>> = Vec::new();
pub async fn to_vec(&self, compress_level: i32, encrypt: bool) -> Vec<u8> {
let mut meta_bytes: Vec<u8> = Vec::new();
meta_bytes.extend(METADATA_VERSION.to_be_bytes()); // u16
let meta = serde_json::to_vec(&self.metadata).unwrap();
meta_bytes.extend((meta.len() as u32).to_be_bytes()); // u32
meta_bytes.extend(meta);
let mut parts: Vec<u8> = Vec::new();
for (filename, content) in &self.content {
let filename_size: [u8; 4] = (filename.len() as u32).to_be_bytes();
let filename_encoded = vec![filename_size.as_slice(), filename.as_bytes()].concat();
let content_size: [u8; 4] = (content.len() as u32).to_be_bytes();
let content_encoded = vec![content_size.as_slice(), content.as_slice()].concat();
parts.push(vec![filename_encoded, content_encoded].concat())
let part = vec![filename_encoded, content_encoded].concat();
parts.extend(part)
}
let meta = serde_json::to_vec(&self.metadata).unwrap();
let meta_filename = "metadata.json";
let meta_filename_size = (meta_filename.len() as u32).to_be_bytes();
let meta_filename_encoded =
vec![meta_filename_size.as_slice(), meta_filename.as_bytes()].concat();
let size_before = parts.len();
parts = self.compress(parts, compress_level).await;
if encrypt {
parts = self.encrypt(parts).await;
}
let meta_size = (meta.len() as u32).to_be_bytes();
let meta_encoded = vec![meta_size.as_slice(), meta.as_slice()].concat();
parts.push(vec![meta_filename_encoded, meta_encoded].concat());
let size_after = parts.len();
println!(">>> Size before: {:.1?}KB", size_before / 1024);
println!(">>> Size after: {:.1?}KB", size_after / 1024);
println!(
">>> Compress ratio: {:.2?}%",
size_after as f64 / size_before as f64 * 100.0
);
parts
let out = vec![meta_bytes, parts].concat();
out
}
async fn compress(&self, data: Vec<u8>, level: i32) -> Vec<u8> {
let compressor = Compressor::new();
println!(">>> Compressing");
let _data = compressor
.compress(data, level)
.await
.expect("Can't compress data");
println!(">>> Compressing done");
_data
}
async fn encrypt(&self, data: Vec<u8>) -> Vec<u8> {
println!(">>> Encrypting");
let encrypter = Encrypter::new_pair();
let _data = encrypter.encrypt(data).await.expect("Can't encrypt data");
encrypter.export().await.expect("Can't export keys");
println!(">>> Encrypting done");
_data
}
}
#[derive(Serialize, Deserialize)]
#[derive(Serialize, Deserialize, Debug)]
pub struct Metadata {
pub(crate) diff_files: Vec<String>,
pub hashes: HashMap<String, String>,
pub remove_files: Vec<String>,
pub remove_folders: Vec<String>,
pub file_count: u32,
pub compress_level: i32,
pub encrypted: bool,
}
pub async fn get_hash(data: &Vec<u8>) -> String {
pub async fn get_hash(data: Vec<u8>) -> String {
let hash = md5::compute(&data[..]);
format!("{:x}", hash)
}
pub async fn compress_parts(input: Vec<Vec<u8>>, output: &fs::File, level: i32) {
let mut encoder = Encoder::new(output, level).unwrap();
for part in input.iter() {
io::copy(&mut &part[..], &mut encoder).unwrap();
struct Compressor {}
impl Compressor {
pub(crate) fn new() -> Self {
Compressor {}
}
encoder.finish().unwrap();
}
pub async fn decompress_parts(input: Vec<u8>) -> Result<Vec<Vec<u8>>, io::Error> {
pub async fn compress(&self, input: Vec<u8>, level: i32) -> Result<Vec<u8>, io::Error> {
let buf = Vec::new();
let mut encoder = Encoder::new(buf, level)?;
io::copy(&mut &input[..], &mut encoder)?;
encoder.finish()
}
pub async fn decompress(&self, input: Vec<u8>) -> Result<Vec<u8>, io::Error> {
let mut decoder = Decoder::new(&input[..])?;
let mut buf = Vec::new();
io::copy(&mut decoder, &mut buf)?;
let mut index = 0;
let mut parts: Vec<Vec<u8>> = Vec::new();
while index < buf.len() {
let filename_size = u32::from_be_bytes(buf[index..index + 4].try_into().unwrap()) as usize;
let filename = buf[index..index + filename_size + 4].to_vec();
index += 4 + filename_size;
let content_size = u32::from_be_bytes(buf[index..index + 4].try_into().unwrap()) as usize;
let content = buf[index..index + content_size + 4].to_vec();
index += content_size + 4;
let part = vec![filename, content].concat();
parts.push(part);
Ok(buf)
}
}
pub struct Encrypter {
private_key: RsaPrivateKey,
public_key: RsaPublicKey,
}
impl Encrypter {
pub fn new_pair() -> Self {
let mut rng = rand::rng();
let bits = 2048;
let private_key = RsaPrivateKey::new(&mut rng, bits).expect("failed to generate a key");
let public_key = RsaPublicKey::from(&private_key);
Self {
private_key,
public_key,
}
}
pub fn from_private_key(private_key: RsaPrivateKey) -> Self {
let public_key = RsaPublicKey::from(&private_key);
Self {
public_key,
private_key,
}
}
pub fn new(private_key: RsaPrivateKey, public_key: RsaPublicKey) -> Self {
Self {
private_key,
public_key,
}
}
pub async fn encrypt(&self, data: Vec<u8>) -> Result<Vec<u8>, io::Error> {
let mut rng = rand::rng();
let mut out = Vec::new();
let segment_size = self.public_key.size() - 11;
for seg in data.chunks(segment_size) {
let segment = self
.public_key
.encrypt(&mut rng, Pkcs1v15Encrypt, seg)
.expect("Can't encrypt segment");
out.extend(segment)
}
Ok(out)
}
pub async fn decrypt(&self, data: Vec<u8>) -> Vec<u8> {
let mut out = Vec::new();
let segment_size = self.public_key.size();
for seg in data.chunks(segment_size) {
let segment = self
.private_key
.decrypt(Pkcs1v15Encrypt, seg)
.expect("failed to decrypt");
out.extend(segment)
}
out
}
pub async fn export(&self) -> Result<(), io::Error> {
let private_bytes = self
.private_key
.to_pkcs1_pem(LineEnding::CRLF)
.expect("failed to pem private key");
let public_bytes = self
.public_key
.to_pkcs1_pem(LineEnding::CRLF)
.expect("failed to pem public key");
fs::File::create("private.pem")
.await?
.write_all(private_bytes.as_bytes())
.await?;
fs::File::create("public.pem")
.await?
.write_all(public_bytes.as_bytes())
.await?;
Ok(())
}
Ok(parts)
}

View File

@@ -2,14 +2,17 @@ mod utils;
use clap::Parser;
use std::collections::HashMap;
use std::{fs, io};
use std::io::Write;
use std::{fs, io, time};
use utils::{Metadata, Zsdiff, get_hash};
use walkdir::WalkDir;
#[derive(Debug)]
struct FileInfo {
path: String,
relative_path: String, // Without dir prefix
hash: String,
is_dir: bool,
}
async fn walk_dir(dir: String) -> HashMap<String, FileInfo> {
@@ -17,41 +20,63 @@ async fn walk_dir(dir: String) -> HashMap<String, FileInfo> {
for e in WalkDir::new(&dir) {
let e = e.unwrap();
let path = e.path();
if path.is_dir() {
if path.display().to_string().eq(&dir) {
continue;
}
let content = fs::read(path).unwrap();
let hash = get_hash(&content).await;
let content: Vec<u8>;
if path.is_dir() {
let path_str = path.display().to_string();
content = path_str.into_bytes();
} else {
content = fs::read(path).unwrap();
}
let hash = get_hash(content).await;
let path_str = path.display().to_string();
let file_info = FileInfo {
relative_path: path_str[dir.len() + 1..].to_string(),
path: path_str,
hash: hash.clone(),
is_dir: path.is_dir(),
};
hash_list.entry(hash).or_insert(file_info);
}
hash_list
}
async fn compare_hashes(old: HashMap<String, FileInfo>, new: HashMap<String, FileInfo>) -> Zsdiff {
async fn compare_hashes(
old: HashMap<String, FileInfo>,
new: HashMap<String, FileInfo>,
compress_level: i32,
encrypted: bool,
) -> Zsdiff {
let mut diff_files: HashMap<String, Vec<u8>> = HashMap::new();
let mut remove_files: Vec<String> = vec![];
let mut remove_folders: Vec<String> = vec![];
let mut hashes: HashMap<String, String> = HashMap::new();
for (_, info) in &old {
if info.is_dir {
remove_folders.push(info.relative_path.clone());
} else {
remove_files.push(info.relative_path.clone());
}
}
for (new_hash, new_fileinfo) in &new {
let old_fileinfo = old.get(new_hash);
if new_fileinfo.is_dir {
remove_folders.retain(|filename| !filename.eq(&new_fileinfo.relative_path));
} else {
remove_files.retain(|filename| !filename.eq(&new_fileinfo.relative_path));
}
if new_fileinfo.is_dir {
continue;
}
if old_fileinfo.is_none() {
let path = new_fileinfo.relative_path.clone();
diff_files.insert(path.clone(), fs::read(new_fileinfo.path.clone()).unwrap());
hashes.insert(
new_fileinfo.relative_path.clone(),
new_fileinfo.hash.clone(),
);
hashes.insert(path.clone(), new_fileinfo.hash.clone());
}
}
@@ -61,6 +86,10 @@ async fn compare_hashes(old: HashMap<String, FileInfo>, new: HashMap<String, Fil
diff_files: diff_files.keys().cloned().collect(),
hashes,
remove_files,
remove_folders,
compress_level,
file_count: diff_files.len() as u32,
encrypted,
},
}
}
@@ -70,22 +99,28 @@ pub async fn zsdiff(
old: String,
new: String,
level: i32,
encrypt: bool,
) -> Result<(), io::Error> {
let now = time::Instant::now();
let output_filename = &format!("{}.zdiff", filename);
let old_hashes = walk_dir(old).await;
let new_hashes = walk_dir(new).await;
let compare_hashes = compare_hashes(old_hashes, new_hashes).await;
let parts = compare_hashes.to_vec().await;
let file = fs::File::create(output_filename)?;
utils::compress_parts(parts, &file, level).await;
// let mut buf = Vec::new();
// file.read(&mut buf)?;
// let output_hash = get_hash(&buf).await;
// println!("{}", output_hash);
let diff = compare_hashes(old_hashes, new_hashes, level, encrypt).await;
let mut file = fs::File::create(output_filename)?;
let data = diff.to_vec(level, encrypt).await;
file.write_all(&data[..])?;
let hash = get_hash(data).await;
let output_hash = format!("{} {}", hash.clone(), output_filename);
fs::File::create(format!("{}.md5", output_filename))?.write_all(output_hash.as_bytes())?;
let elapsed = now.elapsed();
println!(">>> Zsdiff hash: {}", hash);
println!("Time elapsed: {:.2?}", elapsed);
Ok(())
}
#[derive(Parser, Debug)]
#[derive(Parser)]
struct Args {
#[arg(short, long)]
filename: String,
@@ -95,11 +130,19 @@ struct Args {
old: String,
#[arg(short, long)]
new: String,
#[arg(short, long)]
encrypt: bool,
}
#[tokio::main]
async fn main() -> io::Result<()> {
let args = Args::parse();
zsdiff(args.filename, args.old, args.new, args.compress_level).await?;
Ok(())
zsdiff(
args.filename,
args.old,
args.new,
args.compress_level,
args.encrypt,
)
.await
}

View File

@@ -1,70 +1,249 @@
mod utils;
use clap::Parser;
use std::fs::read;
use std::io::Write;
use std::path::Path;
use std::{fs, io};
use crate::utils::Metadata;
use clap::{Arg, ArgAction, Command, Parser};
use std::path::{Path, PathBuf};
use std::{io, time};
use tokio::fs;
use tokio::io::AsyncWriteExt;
use utils::Zsdiff;
async fn create_tmp_dir(dir_name: String) -> Result<String, io::Error> {
let name = format!("{}.tmp", dir_name);
fs::remove_dir_all(name.clone()).ok();
fs::DirBuilder::new().create(name.clone())?;
Ok(name)
let name = PathBuf::from(format!("{}_tmp", dir_name));
if name.exists() {
fs::remove_dir_all(&name).await?;
}
fs::create_dir(&name).await?;
name.to_str().map(|s| s.to_string()).ok_or_else(|| {
io::Error::new(
io::ErrorKind::InvalidData,
"Path contains invalid UTF-8 characters",
)
})
}
async fn extract_files(zsdiff: &Zsdiff, filename: &String) -> Result<String, io::Error> {
async fn load_metadata(filename: String) -> Result<Metadata, io::Error> {
let filepath = format!("{}.zdiff", filename);
let data = fs::read(&filepath).await?;
let meta_version = u16::from_be_bytes(data[..2].try_into().unwrap());
if !utils::is_metadata_supported(meta_version) {
return Err(io::Error::new(
io::ErrorKind::InvalidData,
"Metadata version not supported",
));
}
let meta_size = u32::from_be_bytes(data[2..6].try_into().unwrap()) as usize;
let meta_data = data[6..meta_size + 6].to_vec();
let metadata = serde_json::from_slice(&meta_data[..])
.map_err(|e| io::Error::new(io::ErrorKind::InvalidData, e))?;
Ok(metadata)
}
async fn load_file(filename: String) -> Result<Zsdiff, io::Error> {
let full_filename = format!("{}.zdiff", filename);
let data = fs::read(&full_filename).await?;
let zsdiff = Zsdiff::from_vec(data).await?;
println!(
">>> Metadata files to remove: {}",
zsdiff.metadata.remove_files.len()
);
println!(
">>> Metadata hashes to check: {}",
zsdiff.metadata.hashes.len()
);
Ok(zsdiff)
}
async fn extract_files(zsdiff: &Zsdiff, filename: String) -> Result<String, io::Error> {
let tmp_dir_name = create_tmp_dir(filename.to_string()).await?;
let path = Path::new(&tmp_dir_name);
fs::remove_dir_all(path).ok();
for (f, c) in zsdiff.content.iter() {
for (i, (f, c)) in zsdiff.content.iter().enumerate() {
println!(
">>> Processing file {}/{}: '{}'",
i + 1,
zsdiff.content.len(),
f
);
let filepath = path.join(f);
fs::create_dir_all(filepath.parent().unwrap())?;
fs::File::create(&filepath)?.write_all(c)?;
if let Some(parent) = filepath.parent() {
fs::create_dir_all(parent).await?;
}
let mut file = fs::File::create(&filepath).await?;
file.write_all(c).await?;
}
Ok(tmp_dir_name)
}
async fn zpatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
let filename = &format!("{}.zdiff", filename);
let parts = utils::decompress_parts(read(filename)?).await?;
let diff = Zsdiff::from_vec(parts).await?;
let tmp_dir_name = extract_files(&diff, filename).await?;
for name in diff.content.keys().collect::<Vec<&String>>() {
let from_path = Path::new(&tmp_dir_name).join(name);
let to_path = Path::new(&dest_dir).join(name);
fs::create_dir_all(to_path.parent().unwrap())?;
fs::copy(from_path, to_path)?;
}
for file in diff.metadata.remove_files {
let path = Path::new(&dest_dir).join(file);
fs::remove_file(path).ok();
}
for (k, hash) in diff.metadata.hashes {
let path = Path::new(&dest_dir).join(k);
let content = read(path)?;
let fs_hash = utils::get_hash(&content).await;
if !fs_hash.eq(&hash) {
Err(io::Error::new(io::ErrorKind::Other, "Hash mismatch"))?
}
async fn check_hash(filename: String) -> Result<(), io::Error> {
let file_data = fs::read(format!("{}.zdiff", filename)).await?;
let mut hash_file =
String::from_utf8(fs::read(format!("{}.zdiff.md5", filename)).await?).unwrap();
let hash = utils::get_hash(file_data).await;
hash_file = hash_file.split(" ").next().unwrap().parse().unwrap();
if !hash_file.eq(&hash) {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("Hash mismatch. Expected {}, got {}", hash_file, hash),
));
}
println!(">>> Zsdiff hash: {}", hash);
Ok(())
}
#[derive(Parser, Debug)]
struct Args {
#[arg(short, long)]
filename: String,
#[arg(short, long)]
dest_dir: String,
async fn zspatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
let now = time::Instant::now();
let cloned = filename.clone();
let diff = load_file(cloned).await.ok().unwrap();
let tmp_dir_name = extract_files(&diff, filename).await?;
let files_to_copy: Vec<String> = diff.content.keys().cloned().collect();
for (_, name) in files_to_copy.iter().enumerate() {
let from_path = Path::new(&tmp_dir_name).join(name);
let to_path = Path::new(&dest_dir).join(name);
if !from_path.exists() {
println!("ERROR: Source file doesn't exist: {:?}", from_path);
continue;
}
if let Some(parent) = to_path.parent() {
fs::create_dir_all(parent).await?;
}
fs::copy(from_path.clone(), to_path.clone()).await?;
}
for file in &diff.metadata.remove_files {
let path = Path::new(&dest_dir).join(file);
println!(">>> Removing file {}", path.display());
if !path.exists() {
println!("File doesn't exist, skipping");
continue;
}
fs::remove_file(path.clone()).await?
}
println!(">>> Starting folder removal process <<<");
println!(
">>> Folders to remove: {}",
diff.metadata.remove_folders.len()
);
for folder in &diff.metadata.remove_folders {
let path = Path::new(&dest_dir).join(folder);
if !path.exists() {
println!("Folder doesn't exist, skipping");
continue;
}
fs::remove_dir_all(path.clone()).await?
}
println!(">>> Starting hash verification <<<");
println!(">>> Files to verify: {}", diff.metadata.hashes.len());
for (k, hash) in &diff.metadata.hashes {
let path = Path::new(&dest_dir).join(k);
match fs::read(path.clone()).await {
Ok(content) => {
let fs_hash = utils::get_hash(content).await;
if !fs_hash.eq(hash) {
println!(
"Hash mismatch. Expected {}, got {}. Path: {}",
hash,
fs_hash,
path.display()
);
}
}
Err(e) => {
println!("Can't read file for hash verification: {}", e);
}
}
}
fs::remove_dir_all(tmp_dir_name).await?;
println!(">>> Patching done! <<<");
println!(">>> Elapsed time: {:.2?}", now.elapsed());
Ok(())
}
#[tokio::main]
async fn main() -> io::Result<()> {
let args = Args::parse();
zpatch(args.filename, args.dest_dir).await?;
let m = Command::new("ZsPatch")
.author("ScuroNeko")
.version("0.3.0")
.about("Explains in brief what the program does")
.subcommand_required(true)
.arg_required_else_help(true)
.after_help("")
.subcommand(
Command::new("metadata")
.short_flag('m')
.long_flag("metadata")
.arg(
Arg::new("filename")
.short('f')
.long("filename")
.required(true)
.action(ArgAction::Set),
),
)
.subcommand(
Command::new("patch")
.short_flag('p')
.arg(
Arg::new("filename")
.short('f')
.long("filename")
.required(true)
.action(ArgAction::Set),
)
.arg(
Arg::new("dest")
.short('d')
.long("dest")
.required(true)
.action(ArgAction::Set),
)
.arg(
Arg::new("hash_check")
.long("hash_check")
.required(false)
.action(ArgAction::SetTrue),
),
)
.get_matches();
match m.subcommand() {
Some(("metadata", meta_matches)) => {
let filename: &String = meta_matches.get_one("filename").unwrap();
let metadata = load_metadata(filename.clone()).await?;
println!(">>> Compress level: {}", metadata.compress_level);
println!(
">>> Encrypted?: {}",
if metadata.encrypted { "Yes" } else { "No" }
);
return Ok(());
}
Some(("patch", patch_matches)) => {
let filename: &String = patch_matches.get_one("filename").unwrap();
let dest_dir: &String = patch_matches.get_one("dest").unwrap();
if patch_matches.get_flag("hash_check") {
check_hash(filename.clone()).await.ok();
}
zspatch(filename.clone(), dest_dir.clone()).await?;
}
_ => unreachable!("Subcommand is required"),
}
Ok(())
}