This commit is contained in:
2025-10-20 00:58:43 +03:00
parent c4c0ad2e12
commit 207675f522
5 changed files with 205 additions and 99 deletions

View File

@@ -4,58 +4,86 @@ use std::collections::HashMap;
use std::{fs, io};
use zstd::{Decoder, Encoder};
const METADATA_VERSION: u16 = 1;
const SUPPORTED_VERSION: [u16; 1] = [1];
pub struct Zsdiff {
pub content: HashMap<String, Vec<u8>>,
pub metadata: Metadata,
}
impl Zsdiff {
pub async fn from_vec(_data: Vec<Vec<u8>>) -> Result<Self, io::Error> {
pub async fn from_vec(_data: Vec<u8>) -> Result<Self, io::Error> {
let meta_version = u16::from_be_bytes(_data[..2].try_into().unwrap());
println!(">>> Metadata version: {}", meta_version);
if !SUPPORTED_VERSION.contains(&meta_version) {
return Err(io::Error::new(
io::ErrorKind::Other,
"Metadata version mismatch",
));
}
let meta_size = u32::from_be_bytes(_data[2..6].try_into().unwrap()) as usize;
let mut index = 6;
let meta = _data[index..index + meta_size].to_vec();
let metadata: Metadata = serde_json::from_slice(&meta)?;
println!(">>> Metadata parsed successfully");
index += meta_size;
println!(">>> File count: {}", metadata.file_count);
let data = _data;
let mut content = HashMap::new();
for part in _data {
let filename_size = u32::from_be_bytes(part[0..4].try_into().unwrap()) as usize;
let filename = String::from_utf8(part[4..filename_size + 4].to_vec()).unwrap();
let cont = part[filename_size + 8..].to_vec();
while index < data.len() {
let filename_size =
u32::from_be_bytes(data[index..index + 4].try_into().unwrap()) as usize;
index += 4;
let filename = String::from_utf8(data[index..filename_size + index].to_vec()).unwrap();
index += filename_size;
let content_size =
u32::from_be_bytes(data[index..index + 4].try_into().unwrap()) as usize;
index += 4;
let cont = data[index..index + content_size].to_vec();
index += cont.len();
content.insert(filename, cont);
}
let meta = content.get("metadata.json").unwrap();
let metadata: Metadata = serde_json::from_slice(meta.as_slice())?;
content.remove("metadata.json");
Ok(Zsdiff { content, metadata })
}
pub async fn to_vec(&self) -> Vec<Vec<u8>> {
let mut parts: Vec<Vec<u8>> = Vec::new();
pub async fn to_vec(&self) -> Vec<u8> {
let mut meta_bytes: Vec<u8> = Vec::new();
meta_bytes.extend(METADATA_VERSION.to_be_bytes());
let meta = serde_json::to_vec(&self.metadata).unwrap();
meta_bytes.extend((meta.len() as u32).to_be_bytes());
meta_bytes.extend(meta);
let mut parts: Vec<u8> = Vec::new();
for (filename, content) in &self.content {
let filename_size: [u8; 4] = (filename.len() as u32).to_be_bytes();
let filename_encoded = vec![filename_size.as_slice(), filename.as_bytes()].concat();
let content_size: [u8; 4] = (content.len() as u32).to_be_bytes();
let content_encoded = vec![content_size.as_slice(), content.as_slice()].concat();
parts.push(vec![filename_encoded, content_encoded].concat())
let part = vec![filename_encoded, content_encoded].concat();
parts.extend(part)
}
let meta = serde_json::to_vec(&self.metadata).unwrap();
let meta_filename = "metadata.json";
let meta_filename_size = (meta_filename.len() as u32).to_be_bytes();
let meta_filename_encoded =
vec![meta_filename_size.as_slice(), meta_filename.as_bytes()].concat();
let meta_size = (meta.len() as u32).to_be_bytes();
let meta_encoded = vec![meta_size.as_slice(), meta.as_slice()].concat();
parts.push(vec![meta_filename_encoded, meta_encoded].concat());
parts
let out = vec![meta_bytes, parts].concat();
out
}
}
#[derive(Serialize, Deserialize)]
#[derive(Serialize, Deserialize, Debug)]
pub struct Metadata {
pub(crate) diff_files: Vec<String>,
pub hashes: HashMap<String, String>,
pub remove_files: Vec<String>,
pub remove_folders: Vec<String>,
pub file_count: u32,
pub compress_level: i32,
}
pub async fn get_hash(data: Vec<u8>) -> String {
@@ -63,33 +91,15 @@ pub async fn get_hash(data: Vec<u8>) -> String {
format!("{:x}", hash)
}
pub async fn compress_parts(input: Vec<Vec<u8>>, output: &fs::File, level: i32) {
pub async fn compress(input: Vec<u8>, output: &fs::File, level: i32) {
let mut encoder = Encoder::new(output, level).unwrap();
for part in input.iter() {
io::copy(&mut &part[..], &mut encoder).unwrap();
}
io::copy(&mut input.as_slice(), &mut encoder).unwrap();
encoder.finish().unwrap();
}
pub async fn decompress_parts(input: Vec<u8>) -> Result<Vec<Vec<u8>>, io::Error> {
pub async fn decompress(input: Vec<u8>) -> Result<Vec<u8>, io::Error> {
let mut decoder = Decoder::new(&input[..])?;
let mut buf = Vec::new();
io::copy(&mut decoder, &mut buf)?;
let mut index = 0;
let mut parts: Vec<Vec<u8>> = Vec::new();
while index < buf.len() {
let filename_size = u32::from_be_bytes(buf[index..index + 4].try_into().unwrap()) as usize;
let filename = buf[index..index + filename_size + 4].to_vec();
index += 4 + filename_size;
let content_size = u32::from_be_bytes(buf[index..index + 4].try_into().unwrap()) as usize;
let content = buf[index..index + content_size + 4].to_vec();
index += content_size + 4;
let part = vec![filename, content].concat();
parts.push(part);
}
Ok(parts)
Ok(buf)
}