Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1,615 changes: 924 additions & 691 deletions Cargo.lock

Large diffs are not rendered by default.

5 changes: 4 additions & 1 deletion Cargo.toml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
authors = ["Roman Schejbal <schejbalroman@gmail.com>"]
edition = "2021"
name = "syncbox"
version = "0.5.4"
version = "0.5.6"

[dependencies]
async-trait = "0.1.74"
Expand All @@ -26,3 +26,6 @@ ssh2 = "0.9.4"
suppaftp = {version = "5.2.2", features = ["async-native-tls"]}
tokio = {version = "1.34.0", features = ["full"]}
tokio-util = {version = "0.7.10", features = ["compat"]}

[dev-dependencies]
tempfile = "3"
142 changes: 141 additions & 1 deletion src/checksum_tree.rs
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,6 @@ impl DerefMut for ChecksumTree {
}

#[cfg(test)]

mod tests {
use super::*;

Expand Down Expand Up @@ -231,4 +230,145 @@ mod tests {
r#"{"version":"0.3.0","root":{"Directory":{"dirrr":{"Directory":{"DSC05947.ARW":{"File":"a4849b4f83f996ef9ce68b9f8561db4a991ab5f9dce3c52a45267c8e274bb73a"}}}}}}"#
);
}

#[test]
fn from_hashmap_single_file() {
let mut map = HashMap::new();
map.insert("./file.txt".to_string(), "abc123".to_string());
let tree: ChecksumTree = map.into();

match tree.as_ref().unwrap() {
ChecksumElement::Directory(root) => {
let dot = root.get(".").unwrap();
match dot {
ChecksumElement::Directory(dir) => match dir.get("file.txt").unwrap() {
ChecksumElement::File(hash) => assert_eq!(hash, "abc123"),
_ => panic!("expected File"),
},
_ => panic!("expected Directory"),
}
}
_ => panic!("expected Directory"),
}
}

#[test]
fn from_hashmap_multiple_files_same_directory() {
let mut map = HashMap::new();
map.insert("./a.txt".to_string(), "hash_a".to_string());
map.insert("./b.txt".to_string(), "hash_b".to_string());
let tree: ChecksumTree = map.into();

match tree.as_ref().unwrap() {
ChecksumElement::Directory(root) => {
let dot = match root.get(".").unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!("expected Directory"),
};
assert_eq!(dot.len(), 2);
assert!(
matches!(dot.get("a.txt"), Some(ChecksumElement::File(h)) if h == "hash_a")
);
assert!(
matches!(dot.get("b.txt"), Some(ChecksumElement::File(h)) if h == "hash_b")
);
}
_ => panic!("expected Directory"),
}
}

#[test]
fn from_hashmap_deeply_nested() {
let mut map = HashMap::new();
map.insert("./a/b/c/file.txt".to_string(), "deep_hash".to_string());
let tree: ChecksumTree = map.into();

// Navigate: root -> "." -> "a" -> "b" -> "c" -> "file.txt"
let root = match tree.as_ref().unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
let dot = match root.get(".").unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
let a = match dot.get("a").unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
let b = match a.get("b").unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
let c = match b.get("c").unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
assert!(matches!(c.get("file.txt"), Some(ChecksumElement::File(h)) if h == "deep_hash"));
}

#[test]
fn from_hashmap_files_in_different_directories() {
let mut map = HashMap::new();
map.insert("./dir1/file1.txt".to_string(), "hash1".to_string());
map.insert("./dir2/file2.txt".to_string(), "hash2".to_string());
let tree: ChecksumTree = map.into();

let root = match tree.as_ref().unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
let dot = match root.get(".").unwrap() {
ChecksumElement::Directory(d) => d,
_ => panic!(),
};
assert_eq!(dot.len(), 2);
assert!(dot.contains_key("dir1"));
assert!(dot.contains_key("dir2"));
}

#[test]
fn from_hashmap_roundtrip_through_reconciler() {
use crate::reconciler::Reconciler;

let mut map = HashMap::new();
map.insert("./dir/file.txt".to_string(), "hash1".to_string());
map.insert("./other/nested/file.txt".to_string(), "hash2".to_string());
let tree1: ChecksumTree = map.clone().into();
let tree2: ChecksumTree = map.into();

let actions = Reconciler::reconcile(tree1, &tree2).unwrap();
assert!(
actions.is_empty(),
"identical trees should produce zero actions"
);
}

#[test]
fn gzip_roundtrip() {
let mut map = HashMap::new();
map.insert("./dir/file.txt".to_string(), "hash123".to_string());
let tree: ChecksumTree = map.into();

let compressed = tree.to_gzip().unwrap();
let restored = ChecksumTree::from_gzip(&compressed).unwrap();

assert_eq!(
serde_json::to_string(&tree).unwrap(),
serde_json::to_string(&restored).unwrap()
);
}

#[test]
fn gzip_roundtrip_empty_tree() {
let tree = ChecksumTree::default();

let compressed = tree.to_gzip().unwrap();
let restored = ChecksumTree::from_gzip(&compressed).unwrap();

assert_eq!(
serde_json::to_string(&tree).unwrap(),
serde_json::to_string(&restored).unwrap()
);
}
}
158 changes: 158 additions & 0 deletions src/config.rs
Original file line number Diff line number Diff line change
@@ -0,0 +1,158 @@
use clap::{
builder::{styling::AnsiColor, Styles},
Parser,
};

fn get_styles() -> Styles {
Styles::styled()
.header(AnsiColor::Green.on_default())
.usage(AnsiColor::Green.on_default())
.literal(AnsiColor::Green.on_default())
.placeholder(AnsiColor::Green.on_default())
}

/// Fast sync with remote filesystem
#[derive(Parser, Debug, Clone)]
#[command(version, about, styles = get_styles())]
pub struct Args {
#[arg(
long,
help = "Name of the checksum file",
default_value = "./.syncbox.json.gz",
env = "SYNCBOX_CHECKSUM_FILE"
)]
pub checksum_file: String,

#[arg(
long,
help = "Will skip execution and only creates the checksum file",
default_value_t = false
)]
pub checksum_only: bool,

#[arg(
short,
long,
help = "Will upload checksum file every N files",
default_value_t = 0,
env = "SYNCBOX_INTERMITTENT_CHECKSUM_UPLOAD"
)]
pub intermittent_checksum_upload: usize,

#[command(subcommand)]
pub transport: TransportType,

#[arg(
long,
help = "Ignore corrupted checksum file and override",
default_value_t = false
)]
pub force: bool,

#[arg(
short,
long,
help = "Concurrency limit for file operations",
default_value_t = 1,
env = "SYNCBOX_CONCURRENCY"
)]
pub concurrency: usize,

#[arg(
long,
help = "Files of size below this threshold (in MBs) will be read and digested using SHA256, the others will use metadata as the checksum",
default_value_t = 100,
env = "SYNCBOX_FILE_THRESHOLD"
)]
pub file_size_threshold: u64,

#[arg(short, long, default_value_t = false)]
pub skip_removal: bool,

#[arg(
help = "Directory to diff against",
default_value = ".",
env = "SYNCBOX_DIRECTORY"
)]
pub directory: String,

#[arg(long, help = "Skip first X actions", default_value_t = 0)]
pub skip: usize,

#[arg(
long,
help = "Maximum number of retry attempts for failed operations",
default_value_t = 3,
env = "SYNCBOX_MAX_RETRIES"
)]
pub max_retries: usize,

#[arg(
long,
help = "Initial retry delay in milliseconds",
default_value_t = 500,
env = "SYNCBOX_INITIAL_RETRY_DELAY"
)]
pub initial_retry_delay: u64,

#[arg(
long,
help = "Maximum retry delay in seconds",
default_value_t = 30,
env = "SYNCBOX_MAX_RETRY_DELAY"
)]
pub max_retry_delay: u64,

#[arg(
long,
help = "Enable automatic retry for transport operations",
default_value_t = true,
env = "SYNCBOX_ENABLE_RETRY_TRANSPORT"
)]
pub enable_retry_transport: bool,
}

#[derive(Clone, Debug, Parser)]
pub enum TransportType {
Ftp {
#[arg(long, env = "FTP_HOST")]
ftp_host: String,
#[arg(long, env = "FTP_USER")]
ftp_user: String,
#[arg(long, env = "FTP_PASS")]
ftp_pass: String,
#[arg(long, default_value = ".", env = "FTP_DIR")]
ftp_dir: String,
#[arg(long, default_value_t = false, env = "FTP_USE_TLS")]
use_tls: bool,
},
Sftp {
#[arg(long, env = "SFTP_HOST")]
host: String,
#[arg(long, env = "SFTP_USER")]
user: String,
#[arg(long, env = "SFTP_PASS")]
pass: String,
#[arg(long, default_value = ".", env = "SFTP_DIR")]
dir: String,
},
Local {
#[arg(long, short)]
destination: String,
},
S3 {
#[arg(long, env = "S3_BUCKET")]
bucket: String,
#[arg(long, env = "S3_REGION")]
region: String,
#[arg(long, env = "S3_ACCESS_KEY")]
access_key: String,
#[arg(long, env = "S3_SECRET_KEY")]
secret_key: String,
#[arg(long, default_value = "STANDARD", env = "S3_STORAGE_CLASS")]
storage_class: String,
#[arg(long, default_value = ".", env = "S3_DIRECTORY")]
directory: String,
},
Dry,
}
3 changes: 3 additions & 0 deletions src/lib.rs
Original file line number Diff line number Diff line change
@@ -1,4 +1,7 @@
pub mod checksum_tree;
pub mod config;
pub mod progress;
pub mod reconciler;
pub mod sync_engine;
pub mod transport;
pub mod utils;
Loading
Loading