v0.1
This commit is contained in:
53
src/main.rs
53
src/main.rs
@@ -1,53 +0,0 @@
|
||||
mod utils;
|
||||
mod zdiff;
|
||||
mod zpatch;
|
||||
|
||||
use std::fs;
|
||||
use std::fs::read;
|
||||
use std::io;
|
||||
use std::path::Path;
|
||||
|
||||
async fn zdiff(filename: &str, old: &str, new: &str) -> Result<(), io::Error> {
|
||||
let output_filename = &format!("{}.zdiff", filename);
|
||||
let old_hashes = zdiff::walk_dir(old.to_string()).await;
|
||||
let new_hashes = zdiff::walk_dir(new.to_string()).await;
|
||||
let compare_hashes = zdiff::compare_hashes(old_hashes, new_hashes).await;
|
||||
let parts = compare_hashes.to_vec().await;
|
||||
utils::compress_parts(parts, fs::File::create(output_filename)?, 11).await;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
async fn zpatch(filename: &str, dest_dir: &str) -> Result<(), io::Error> {
|
||||
let filename = &format!("{}.zdiff", filename);
|
||||
let parts = utils::decompress_parts(read(filename)?).await?;
|
||||
let zdiff = zdiff::Zdiff::from_vec(parts).await?;
|
||||
let tmp_dir_name = zpatch::extract_files(&zdiff, filename).await?;
|
||||
for name in zdiff.content.keys().collect::<Vec<&String>>() {
|
||||
let from_path = Path::new(&tmp_dir_name).join(name);
|
||||
let to_path = Path::new(&dest_dir).join(name);
|
||||
// println!("{:?} {:?}", from_path, to_path);
|
||||
fs::create_dir_all(to_path.parent().unwrap())?;
|
||||
fs::copy(from_path, to_path)?;
|
||||
}
|
||||
for file in zdiff.metadata.remove_files {
|
||||
let path = Path::new(&dest_dir).join(file);
|
||||
fs::remove_file(path)?;
|
||||
}
|
||||
|
||||
for (k, hash) in zdiff.metadata.hashes {
|
||||
let path = Path::new(&dest_dir).join(k);
|
||||
println!("path: {:?}", path);
|
||||
let content = read(path)?;
|
||||
let fs_hash = zdiff::get_hash(&content).await;
|
||||
println!("{:?} {:?}", hash, fs_hash);
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
let filename = "test";
|
||||
zdiff(filename, "test/old", "test/new").await?;
|
||||
zpatch(filename, "old").await?;
|
||||
Ok(())
|
||||
}
|
||||
62
src/utils.rs
62
src/utils.rs
@@ -1,7 +1,67 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
use std::collections::HashMap;
|
||||
use std::{fs, io};
|
||||
use zstd::{Decoder, Encoder};
|
||||
|
||||
pub async fn compress_parts(input: Vec<Vec<u8>>, output: fs::File, level: i32) {
|
||||
pub struct Zdiff {
|
||||
pub content: HashMap<String, Vec<u8>>,
|
||||
pub metadata: Metadata,
|
||||
}
|
||||
|
||||
impl Zdiff {
|
||||
pub async fn from_vec(_data: Vec<Vec<u8>>) -> Result<Self, std::io::Error> {
|
||||
let mut content = HashMap::new();
|
||||
for part in _data {
|
||||
let filename_size = u32::from_be_bytes(part[0..4].try_into().unwrap()) as usize;
|
||||
let filename = String::from_utf8(part[4..filename_size + 4].to_vec()).unwrap();
|
||||
let cont = part[filename_size + 8..].to_vec();
|
||||
content.insert(filename, cont);
|
||||
}
|
||||
let meta = content.get("metadata.json").unwrap();
|
||||
let metadata: Metadata = serde_json::from_slice(meta.as_slice())?;
|
||||
content.remove("metadata.json");
|
||||
|
||||
Ok(Zdiff { content, metadata })
|
||||
}
|
||||
|
||||
pub async fn to_vec(&self) -> Vec<Vec<u8>> {
|
||||
let mut parts: Vec<Vec<u8>> = Vec::new();
|
||||
for (filename, content) in &self.content {
|
||||
let filename_size: [u8; 4] = (filename.len() as u32).to_be_bytes();
|
||||
let filename_encoded = vec![filename_size.as_slice(), filename.as_bytes()].concat();
|
||||
|
||||
let content_size: [u8; 4] = (content.len() as u32).to_be_bytes();
|
||||
let content_encoded = vec![content_size.as_slice(), content.as_slice()].concat();
|
||||
parts.push(vec![filename_encoded, content_encoded].concat())
|
||||
}
|
||||
|
||||
let meta = serde_json::to_vec(&self.metadata).unwrap();
|
||||
let meta_filename = "metadata.json";
|
||||
let meta_filename_size = (meta_filename.len() as u32).to_be_bytes();
|
||||
let meta_filename_encoded =
|
||||
vec![meta_filename_size.as_slice(), meta_filename.as_bytes()].concat();
|
||||
|
||||
let meta_size = (meta.len() as u32).to_be_bytes();
|
||||
let meta_encoded = vec![meta_size.as_slice(), meta.as_slice()].concat();
|
||||
parts.push(vec![meta_filename_encoded, meta_encoded].concat());
|
||||
|
||||
parts
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize)]
|
||||
pub struct Metadata {
|
||||
pub(crate) diff_files: Vec<String>,
|
||||
pub hashes: HashMap<String, String>,
|
||||
pub remove_files: Vec<String>,
|
||||
}
|
||||
|
||||
pub async fn get_hash(data: &Vec<u8>) -> String {
|
||||
let hash = md5::compute(&data[..]);
|
||||
format!("{:x}", hash)
|
||||
}
|
||||
|
||||
pub async fn compress_parts(input: Vec<Vec<u8>>, output: &fs::File, level: i32) {
|
||||
let mut encoder = Encoder::new(output, level).unwrap();
|
||||
for part in input.iter() {
|
||||
io::copy(&mut &part[..], &mut encoder).unwrap();
|
||||
|
||||
118
src/zdiff.rs
118
src/zdiff.rs
@@ -1,75 +1,18 @@
|
||||
use serde::{Deserialize, Serialize};
|
||||
mod utils;
|
||||
|
||||
use clap::Parser;
|
||||
use std::collections::HashMap;
|
||||
use std::fs;
|
||||
use std::{fs, io};
|
||||
use utils::{Metadata, Zdiff, get_hash};
|
||||
use walkdir::WalkDir;
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct Zdiff {
|
||||
pub content: HashMap<String, Vec<u8>>,
|
||||
pub metadata: Metadata,
|
||||
}
|
||||
|
||||
impl Zdiff {
|
||||
pub async fn from_vec(_data: Vec<Vec<u8>>) -> Result<Self, std::io::Error> {
|
||||
let mut content = HashMap::new();
|
||||
for part in _data {
|
||||
let filename_size = u32::from_be_bytes(part[0..4].try_into().unwrap()) as usize;
|
||||
let filename = String::from_utf8(part[4..filename_size + 4].to_vec()).unwrap();
|
||||
let cont = part[filename_size + 8..].to_vec();
|
||||
content.insert(filename, cont);
|
||||
}
|
||||
let meta = content.get("metadata.json").unwrap();
|
||||
let metadata: Metadata = serde_json::from_slice(meta.as_slice())?;
|
||||
content.remove("metadata.json");
|
||||
|
||||
Ok(Zdiff { content, metadata })
|
||||
}
|
||||
|
||||
pub async fn to_vec(&self) -> Vec<Vec<u8>> {
|
||||
let mut parts: Vec<Vec<u8>> = Vec::new();
|
||||
for (filename, content) in &self.content {
|
||||
let filename_size: [u8; 4] = (filename.len() as u32).to_be_bytes();
|
||||
let filename_encoded = vec![filename_size.as_slice(), filename.as_bytes()].concat();
|
||||
|
||||
let content_size: [u8; 4] = (content.len() as u32).to_be_bytes();
|
||||
let content_encoded = vec![content_size.as_slice(), content.as_slice()].concat();
|
||||
parts.push(vec![filename_encoded, content_encoded].concat())
|
||||
}
|
||||
|
||||
let meta = serde_json::to_vec(&self.metadata).unwrap();
|
||||
let meta_filename = "metadata.json";
|
||||
let meta_filename_size = (meta_filename.len() as u32).to_be_bytes();
|
||||
let meta_filename_encoded =
|
||||
vec![meta_filename_size.as_slice(), meta_filename.as_bytes()].concat();
|
||||
|
||||
let meta_size = (meta.len() as u32).to_be_bytes();
|
||||
let meta_encoded = vec![meta_size.as_slice(), meta.as_slice()].concat();
|
||||
parts.push(vec![meta_filename_encoded, meta_encoded].concat());
|
||||
|
||||
parts
|
||||
}
|
||||
}
|
||||
|
||||
#[derive(Serialize, Deserialize, Debug)]
|
||||
pub struct Metadata {
|
||||
diff_files: Vec<String>,
|
||||
pub hashes: HashMap<String, String>,
|
||||
pub remove_files: Vec<String>,
|
||||
}
|
||||
|
||||
#[derive(Debug)]
|
||||
pub struct FileInfo {
|
||||
struct FileInfo {
|
||||
path: String,
|
||||
relative_path: String, // Without dir prefix
|
||||
hash: String,
|
||||
}
|
||||
|
||||
pub async fn get_hash(data: &Vec<u8>) -> String {
|
||||
let hash = md5::compute(&data[..]);
|
||||
format!("{:x}", hash)
|
||||
}
|
||||
|
||||
pub async fn walk_dir(dir: String) -> HashMap<String, FileInfo> {
|
||||
async fn walk_dir(dir: String) -> HashMap<String, FileInfo> {
|
||||
let mut hash_list: HashMap<String, FileInfo> = HashMap::new();
|
||||
for e in WalkDir::new(&dir) {
|
||||
let e = e.unwrap();
|
||||
@@ -79,22 +22,18 @@ pub async fn walk_dir(dir: String) -> HashMap<String, FileInfo> {
|
||||
}
|
||||
let content = fs::read(path).unwrap();
|
||||
let hash = get_hash(&content).await;
|
||||
// let filename = path.file_name().unwrap().to_str().unwrap().to_string();
|
||||
let path_str = path.display().to_string();
|
||||
let file_info = FileInfo {
|
||||
relative_path: path_str[dir.len() + 1..].to_string(),
|
||||
path: path_str,
|
||||
hash: hash.clone(),
|
||||
};
|
||||
hash_list.insert(hash, file_info);
|
||||
hash_list.entry(hash).or_insert(file_info);
|
||||
}
|
||||
hash_list
|
||||
}
|
||||
|
||||
pub async fn compare_hashes(
|
||||
old: HashMap<String, FileInfo>,
|
||||
new: HashMap<String, FileInfo>,
|
||||
) -> Zdiff {
|
||||
async fn compare_hashes(old: HashMap<String, FileInfo>, new: HashMap<String, FileInfo>) -> Zdiff {
|
||||
let mut diff_files: HashMap<String, Vec<u8>> = HashMap::new();
|
||||
let mut remove_files: Vec<String> = vec![];
|
||||
let mut hashes: HashMap<String, String> = HashMap::new();
|
||||
@@ -125,3 +64,42 @@ pub async fn compare_hashes(
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
pub async fn zdiff(
|
||||
filename: String,
|
||||
old: String,
|
||||
new: String,
|
||||
level: i32,
|
||||
) -> Result<(), io::Error> {
|
||||
let output_filename = &format!("{}.zdiff", filename);
|
||||
let old_hashes = walk_dir(old).await;
|
||||
let new_hashes = walk_dir(new).await;
|
||||
let compare_hashes = compare_hashes(old_hashes, new_hashes).await;
|
||||
let parts = compare_hashes.to_vec().await;
|
||||
let file = fs::File::create(output_filename)?;
|
||||
utils::compress_parts(parts, &file, level).await;
|
||||
// let mut buf = Vec::new();
|
||||
// file.read(&mut buf)?;
|
||||
// let output_hash = get_hash(&buf).await;
|
||||
// println!("{}", output_hash);
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
struct Args {
|
||||
#[arg(short, long)]
|
||||
filename: String,
|
||||
#[arg(short, long, default_value_t = 11)]
|
||||
compress_level: i32,
|
||||
#[arg(short, long)]
|
||||
old: String,
|
||||
#[arg(short, long)]
|
||||
new: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
let args = Args::parse();
|
||||
zdiff(args.filename, args.old, args.new, args.compress_level).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
@@ -1,20 +1,23 @@
|
||||
use crate::zdiff::Zdiff;
|
||||
use crate::zpatch;
|
||||
use std::fs;
|
||||
mod utils;
|
||||
|
||||
use clap::Parser;
|
||||
use std::fs::read;
|
||||
use std::io::Write;
|
||||
use std::path::Path;
|
||||
use std::{fs, io};
|
||||
use utils::Zdiff;
|
||||
|
||||
pub async fn create_tmp_dir(dir_name: String) -> Result<String, std::io::Error> {
|
||||
async fn create_tmp_dir(dir_name: String) -> Result<String, io::Error> {
|
||||
let name = format!("{}.tmp", dir_name);
|
||||
fs::remove_dir_all(name.clone()).map_err(|_| std::io::ErrorKind::NotFound)?;
|
||||
fs::remove_dir_all(name.clone()).ok();
|
||||
fs::DirBuilder::new().create(name.clone())?;
|
||||
Ok(name)
|
||||
}
|
||||
|
||||
pub async fn extract_files(zdiff: &Zdiff, filename: &String) -> Result<String, std::io::Error> {
|
||||
async fn extract_files(zdiff: &Zdiff, filename: &String) -> Result<String, io::Error> {
|
||||
let tmp_dir_name = create_tmp_dir(filename.to_string()).await?;
|
||||
let path = Path::new(&tmp_dir_name);
|
||||
fs::remove_dir_all(path)?;
|
||||
fs::remove_dir_all(path).ok();
|
||||
for (f, c) in zdiff.content.iter() {
|
||||
let filepath = path.join(f);
|
||||
fs::create_dir_all(filepath.parent().unwrap())?;
|
||||
@@ -22,3 +25,46 @@ pub async fn extract_files(zdiff: &Zdiff, filename: &String) -> Result<String, s
|
||||
}
|
||||
Ok(tmp_dir_name)
|
||||
}
|
||||
|
||||
async fn zpatch(filename: String, dest_dir: String) -> Result<(), io::Error> {
|
||||
let filename = &format!("{}.zdiff", filename);
|
||||
let parts = utils::decompress_parts(read(filename)?).await?;
|
||||
let zdiff = Zdiff::from_vec(parts).await?;
|
||||
let tmp_dir_name = extract_files(&zdiff, filename).await?;
|
||||
for name in zdiff.content.keys().collect::<Vec<&String>>() {
|
||||
let from_path = Path::new(&tmp_dir_name).join(name);
|
||||
let to_path = Path::new(&dest_dir).join(name);
|
||||
fs::create_dir_all(to_path.parent().unwrap())?;
|
||||
fs::copy(from_path, to_path)?;
|
||||
}
|
||||
|
||||
for file in zdiff.metadata.remove_files {
|
||||
let path = Path::new(&dest_dir).join(file);
|
||||
fs::remove_file(path).ok();
|
||||
}
|
||||
|
||||
for (k, hash) in zdiff.metadata.hashes {
|
||||
let path = Path::new(&dest_dir).join(k);
|
||||
let content = read(path)?;
|
||||
let fs_hash = utils::get_hash(&content).await;
|
||||
if !fs_hash.eq(&hash) {
|
||||
Err(io::Error::new(io::ErrorKind::Other, "Hash mismatch"))?
|
||||
}
|
||||
}
|
||||
Ok(())
|
||||
}
|
||||
|
||||
#[derive(Parser, Debug)]
|
||||
struct Args {
|
||||
#[arg(short, long)]
|
||||
filename: String,
|
||||
#[arg(short, long)]
|
||||
dest_dir: String,
|
||||
}
|
||||
|
||||
#[tokio::main]
|
||||
async fn main() -> io::Result<()> {
|
||||
let args = Args::parse();
|
||||
zpatch(args.filename, args.dest_dir).await?;
|
||||
Ok(())
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user