From d94186c7b81516181adbe2f263050ab6ec8fb774 Mon Sep 17 00:00:00 2001 From: Joey Hines Date: Tue, 2 Feb 2021 21:21:55 -0600 Subject: [PATCH 1/4] First pass on adding remote backups + Added remote_backup.rs to handle remote backup logic + Added `remote` section to the config for optional remote backup setup + Using ssh2 rust library for SFTP support + small tweaks --- Cargo.lock | 87 ++++++++++++++++++++++++++++++++++++++++++++ Cargo.toml | 1 + src/backup.rs | 56 ++++++++++++++++++---------- src/config.rs | 22 ++++++++++- src/main.rs | 1 + src/remote_backup.rs | 47 ++++++++++++++++++++++++ 6 files changed, 193 insertions(+), 21 deletions(-) create mode 100644 src/remote_backup.rs diff --git a/Cargo.lock b/Cargo.lock index 4aeecd7..ee59b5e 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -43,6 +43,7 @@ dependencies = [ "regex", "reqwest", "serde 1.0.117", + "ssh2", "structopt", "tar", ] @@ -183,6 +184,15 @@ dependencies = [ "vec_map", ] +[[package]] +name = "cloudabi" +version = "0.0.3" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f" +dependencies = [ + "bitflags", +] + [[package]] name = "config" version = "0.9.3" @@ -579,6 +589,32 @@ version = "0.2.71" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49" +[[package]] +name = "libssh2-sys" +version = "0.2.21" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "e0186af0d8f171ae6b9c4c90ec51898bad5d08a2d5e470903a50d9ad8959cbee" +dependencies = [ + "cc", + "libc", + "libz-sys", + "openssl-sys", + "pkg-config", + "vcpkg", +] + +[[package]] +name = "libz-sys" +version = "1.1.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655" +dependencies = [ + "cc", + "libc", + "pkg-config", + "vcpkg", +] + [[package]] name = "linked-hash-map" version = "0.3.0" @@ -595,6 +631,15 @@ version = "0.5.3" source = "registry+https://github.com/rust-lang/crates.io-index" checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a" +[[package]] +name = "lock_api" +version = "0.3.4" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75" +dependencies = [ + "scopeguard", +] + [[package]] name = "log" version = "0.4.11" @@ -815,6 +860,30 @@ dependencies = [ "vcpkg", ] +[[package]] +name = "parking_lot" +version = "0.10.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e" +dependencies = [ + "lock_api", + "parking_lot_core", +] + +[[package]] +name = "parking_lot_core" +version = "0.7.2" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3" +dependencies = [ + "cfg-if 0.1.10", + "cloudabi", + "libc", + "redox_syscall", + "smallvec", + "winapi 0.3.8", +] + [[package]] name = "percent-encoding" version = "2.1.0" @@ -1050,6 +1119,12 @@ dependencies = [ "winapi 0.3.8", ] +[[package]] +name = "scopeguard" +version = "1.1.0" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd" + [[package]] name = "security-framework" version = "0.4.4" @@ -1168,6 +1243,18 @@ dependencies = [ "winapi 0.3.8", ] +[[package]] +name = "ssh2" +version = "0.9.1" +source = "registry+https://github.com/rust-lang/crates.io-index" +checksum = "d876d4d57f6bbf2245d43f7ec53759461f801a446d3693704aa6d27b257844d7" +dependencies = [ + "bitflags", + "libc", + "libssh2-sys", + "parking_lot", +] + [[package]] name = "strsim" version = "0.8.0" diff --git a/Cargo.toml b/Cargo.toml index c04c8e2..b784767 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -18,3 +18,4 @@ tar = "0.4.28" reqwest = { version = "0.10", features = ["blocking", "json"] } discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" } anvil-region = "0.4.0" +ssh2 = "0.9.1" diff --git a/src/backup.rs b/src/backup.rs index 1bc4a46..df325d4 100644 --- a/src/backup.rs +++ b/src/backup.rs @@ -2,6 +2,7 @@ use crate::backup; use crate::config::{AlbatrossConfig, WorldConfig, WorldType}; use crate::discord::send_webhook; use crate::region::Region; +use crate::remote_backup::remote_backup; use chrono::{NaiveDateTime, Utc}; use flate2::read::GzDecoder; use flate2::write::GzEncoder; @@ -321,8 +322,42 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option) -> Result<(), st create_dir_all(tmp_dir.clone()).unwrap(); - send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg); let timer = Instant::now(); + backup_worlds(&cfg, server_base_dir, worlds, &mut tmp_dir)?; + + backup::compress_backup(&tmp_dir, &output_archive)?; + + remove_dir_all(&tmp_dir)?; + + let backups_removed = remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep)?; + + if backups_removed > 0 { + let msg = format!( + "Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW", + backups_removed + ); + send_webhook(msg.as_str(), &cfg); + } + + if let Some(remote_backup_cfg) = &cfg.remote { + remote_backup(output_archive, remote_backup_cfg); + } + + let secs = timer.elapsed().as_secs(); + send_webhook( + format!("**Full backup completed in {}s**! *SKREEEEEEEEEE*", secs).as_str(), + &cfg, + ); + Ok(()) +} + +fn backup_worlds( + cfg: &AlbatrossConfig, + server_base_dir: PathBuf, + worlds: Vec, + tmp_dir: &mut PathBuf, +) -> Result<(), std::io::Error> { + send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg); for world in worlds { let mut world_dir = server_base_dir.clone(); let world_name = world.world_name.clone(); @@ -363,24 +398,5 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option) -> Result<(), st } } - backup::compress_backup(&tmp_dir, &output_archive)?; - - remove_dir_all(&tmp_dir)?; - - let backups_removed = remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep)?; - - if backups_removed > 0 { - let msg = format!( - "Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW", - backups_removed - ); - send_webhook(msg.as_str(), &cfg); - } - - let secs = timer.elapsed().as_secs(); - send_webhook( - format!("**Full backup completed in {}s**! *SKREEEEEEEEEE*", secs).as_str(), - &cfg, - ); Ok(()) } diff --git a/src/config.rs b/src/config.rs index fe078a0..17c638d 100644 --- a/src/config.rs +++ b/src/config.rs @@ -24,7 +24,7 @@ impl From for WorldType { } } -/// Config for individual WorldConfig +/// Config for individual world configuration #[derive(Debug, Deserialize, Clone)] pub struct WorldConfig { pub world_name: String, @@ -41,11 +41,31 @@ pub struct BackupConfig { pub discord_webhook: Option, } +/// Config for remote backups +#[derive(Debug, Deserialize, Clone)] +pub struct RemoteBackupConfig { + /// Remote server address + pub sftp_server_addr: String, + /// Remote output directory + pub remote_dir: PathBuf, + /// Remote server username + pub username: String, + /// Public key for key auth + pub public_key: Option, + /// Private key for key auth + pub private_key: Option, + /// Password if using password auth + pub password: Option, + /// Remote backups to keep + pub backups_to_keep: u64, +} + /// Configs #[derive(Debug, Deserialize, Clone)] pub struct AlbatrossConfig { pub backup: BackupConfig, pub world_config: Option>, + pub remote: Option, } impl AlbatrossConfig { diff --git a/src/main.rs b/src/main.rs index c3d5a0e..3f85c76 100644 --- a/src/main.rs +++ b/src/main.rs @@ -6,6 +6,7 @@ mod chunk_coordinate; mod config; mod discord; mod region; +mod remote_backup; mod restore; use crate::backup::{convert_backup_to_sp, do_backup}; diff --git a/src/remote_backup.rs b/src/remote_backup.rs new file mode 100644 index 0000000..43e9f91 --- /dev/null +++ b/src/remote_backup.rs @@ -0,0 +1,47 @@ +use crate::config::RemoteBackupConfig; +use ssh2::Session; +use std::net::TcpStream; +use std::path::PathBuf; + +fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Session { + let tcp = TcpStream::connect(&remote_config.sftp_server_addr).unwrap(); + let mut sess = Session::new().unwrap(); + sess.set_tcp_stream(tcp); + sess.handshake().unwrap(); + + if let Some(password) = &remote_config.password { + sess.userauth_password(&remote_config.username, password) + .unwrap(); + } else if let Some(key) = &remote_config.private_key { + let public_key = + remote_config + .public_key + .as_ref() + .map(|pub_key| pub_key.as_path().clone()); + sess.userauth_pubkey_file( + &remote_config.username, + public_key, + key, + None, + ) + .unwrap(); + } else { + panic!("No key provided") + } + + sess +} + +pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) { + let sess = open_ssh_session(remote_config); + + let remote_path = remote_config.remote_dir.join(file.file_name().unwrap()); + + let mut local_file = std::fs::File::open(&file).unwrap(); + + let sftp = sess.sftp().unwrap(); + + let mut remote_file = sftp.create(&remote_path).unwrap(); + + std::io::copy(&mut local_file, &mut remote_file).unwrap(); +} From dc005e460bca315517e169239b8d39e61dc78551 Mon Sep 17 00:00:00 2001 From: Joey Hines Date: Wed, 3 Feb 2021 17:02:01 -0600 Subject: [PATCH 2/4] Added better error handling + fmt + clippy --- src/backup.rs | 96 +++++++++++++++++++++++++++----------------- src/error.rs | 58 ++++++++++++++++++++++++++ src/main.rs | 1 + src/remote_backup.rs | 33 ++++++--------- src/restore.rs | 16 +++----- 5 files changed, 137 insertions(+), 67 deletions(-) create mode 100644 src/error.rs diff --git a/src/backup.rs b/src/backup.rs index df325d4..ac168cb 100644 --- a/src/backup.rs +++ b/src/backup.rs @@ -1,6 +1,7 @@ use crate::backup; use crate::config::{AlbatrossConfig, WorldConfig, WorldType}; use crate::discord::send_webhook; +use crate::error::Result; use crate::region::Region; use crate::remote_backup::remote_backup; use chrono::{NaiveDateTime, Utc}; @@ -25,11 +26,11 @@ pub fn backup_file( file_name: &str, mut world_path: PathBuf, mut backup_path: PathBuf, -) -> Result { +) -> Result { world_path.push(file_name); backup_path.push(file_name); - copy(world_path, backup_path) + Ok(copy(world_path, backup_path)?) } /// Backup a directory @@ -38,11 +39,7 @@ pub fn backup_file( /// * `dir_name` - directory name /// * `world_path` - path to the world folder /// * `backup_path` - path to the backup folder -pub fn backup_dir( - dir_name: &str, - world_path: &PathBuf, - backup_path: &PathBuf, -) -> Result { +pub fn backup_dir(dir_name: &str, world_path: &PathBuf, backup_path: &PathBuf) -> Result { let mut src_dir = world_path.clone(); src_dir.push(dir_name); let mut backup_dir = backup_path.clone(); @@ -74,7 +71,7 @@ pub fn backup_region( save_radius: u64, world_path: &PathBuf, backup_path: &PathBuf, -) -> Result { +) -> Result { let mut count: u64 = 0; let mut src_dir = world_path.clone(); src_dir.push(dir_name); @@ -112,7 +109,7 @@ pub fn backup_world( world_path: PathBuf, mut backup_path: PathBuf, world_config: &WorldConfig, -) -> Result { +) -> Result { let region_count; backup_path.push(&world_config.world_name); create_dir(backup_path.as_path())?; @@ -137,7 +134,7 @@ pub fn backup_overworld( world_path: PathBuf, backup_path: PathBuf, world_config: &WorldConfig, -) -> Result<(u64, u64), std::io::Error> { +) -> Result<(u64, u64)> { backup_dir("data", &world_path, &backup_path)?; backup_dir("stats", &world_path, &backup_path)?; @@ -162,7 +159,7 @@ pub fn backup_nether( world_path: PathBuf, backup_path: PathBuf, world_config: &WorldConfig, -) -> Result { +) -> Result { let mut nether_path = world_path; nether_path.push("DIM-1"); @@ -179,7 +176,7 @@ pub fn backup_end( world_path: PathBuf, backup_path: PathBuf, world_config: &WorldConfig, -) -> Result { +) -> Result { let mut end_path = world_path; end_path.push("DIM1"); @@ -191,7 +188,7 @@ pub fn backup_end( /// # Param /// * `tmp_dir`: tmp directory with the backed up files /// * `output_file`: output archive -pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<(), std::io::Error> { +pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<()> { let archive = File::create(output_file)?; let enc = GzEncoder::new(archive, Compression::default()); let mut tar_builder = tar::Builder::new(enc); @@ -199,7 +196,7 @@ pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<(), s Ok(()) } -pub fn uncompress_backup(backup: &PathBuf) -> Result { +pub fn uncompress_backup(backup: &PathBuf) -> Result { let backup_file = File::open(backup)?; let dec = GzDecoder::new(backup_file); let mut extract = Archive::new(dec); @@ -219,7 +216,7 @@ pub fn convert_backup_to_sp( config: &AlbatrossConfig, backup: &PathBuf, output: &PathBuf, -) -> Result<(), std::io::Error> { +) -> Result<()> { let extract_path = uncompress_backup(backup)?; if let Some(worlds) = &config.world_config { @@ -255,13 +252,14 @@ pub fn convert_backup_to_sp( /// /// # Param /// * `archive_entry`: archive entry -fn get_time_from_file_name( - archive_entry: &DirEntry, -) -> Result, std::io::Error> { +fn get_time_from_file_name(archive_entry: &DirEntry) -> Result { let file_name = archive_entry.file_name().to_str().unwrap().to_string(); let name: Vec<&str> = file_name.split("_backup.tar.gz").collect(); - Ok(chrono::NaiveDateTime::parse_from_str(name[0], "%d-%m-%y_%H.%M.%S").ok()) + Ok(chrono::NaiveDateTime::parse_from_str( + name[0], + "%d-%m-%y_%H.%M.%S", + )?) } /// Removes the old backups from the ouput directory @@ -269,7 +267,7 @@ fn get_time_from_file_name( /// # Params /// * `output_dir` - output directory containing /// * `keep` - number of backups to keep -fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result { +fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result { let mut backups = vec![]; let mut num_of_removed_backups: usize = 0; @@ -285,8 +283,8 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result keep as usize { backups.sort_by(|a, b| { - let a_time = get_time_from_file_name(a).unwrap().unwrap(); - let b_time = get_time_from_file_name(b).unwrap().unwrap(); + let a_time = get_time_from_file_name(a).unwrap(); + let b_time = get_time_from_file_name(b).unwrap(); b_time.cmp(&a_time) }); @@ -306,7 +304,7 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result) -> Result<(), std::io::Error> { +pub fn do_backup(cfg: AlbatrossConfig, output: Option) -> Result<()> { let server_base_dir = cfg.backup.minecraft_dir.clone(); let worlds = cfg.world_config.clone().expect("No worlds configured"); let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string(); @@ -320,27 +318,52 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option) -> Result<(), st tmp_dir.push("tmp"); remove_dir_all(&tmp_dir).ok(); - create_dir_all(tmp_dir.clone()).unwrap(); + create_dir_all(tmp_dir.clone())?; let timer = Instant::now(); - backup_worlds(&cfg, server_base_dir, worlds, &mut tmp_dir)?; - backup::compress_backup(&tmp_dir, &output_archive)?; + send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg); + + backup_worlds(&cfg, server_base_dir, worlds, &mut tmp_dir).map_err(|e| { + send_webhook("Failed to copy worlds to backup location", &cfg); + println!("Failed to copy worlds: {}", e); + e + })?; + + backup::compress_backup(&tmp_dir, &output_archive).map_err(|e| { + send_webhook("Failed to compress backup", &cfg); + println!("Failed to compress backup: {}", e); + e + })?; remove_dir_all(&tmp_dir)?; - let backups_removed = remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep)?; - - if backups_removed > 0 { - let msg = format!( - "Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW", - backups_removed - ); - send_webhook(msg.as_str(), &cfg); + match remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep) { + Ok(backups_removed) => { + if backups_removed > 0 { + let msg = format!( + "Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW", + backups_removed + ); + send_webhook(msg.as_str(), &cfg); + } + } + Err(e) => { + send_webhook("Failed to remove old backups!", &cfg); + println!("Failed to remove old backups: {}", e) + } } if let Some(remote_backup_cfg) = &cfg.remote { - remote_backup(output_archive, remote_backup_cfg); + match remote_backup(output_archive, remote_backup_cfg) { + Ok(_) => { + send_webhook("Remote backup completed!", &cfg); + } + Err(e) => { + send_webhook("Remote backup failed!", &cfg); + println!("Remote backup failed with error: {}", e); + } + } } let secs = timer.elapsed().as_secs(); @@ -356,8 +379,7 @@ fn backup_worlds( server_base_dir: PathBuf, worlds: Vec, tmp_dir: &mut PathBuf, -) -> Result<(), std::io::Error> { - send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg); +) -> Result<()> { for world in worlds { let mut world_dir = server_base_dir.clone(); let world_name = world.world_name.clone(); diff --git a/src/error.rs b/src/error.rs new file mode 100644 index 0000000..975e1b2 --- /dev/null +++ b/src/error.rs @@ -0,0 +1,58 @@ +use crate::region::RegionParseError; + +pub type Result = std::result::Result; + +#[derive(Debug)] +pub enum AlbatrossError { + FileError(std::io::Error), + SSHError(ssh2::Error), + ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr), + RegionParseError(crate::region::RegionParseError), + ChronoParseError(chrono::ParseError), +} + +impl std::error::Error for AlbatrossError {} + +impl std::fmt::Display for AlbatrossError { + fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { + match self { + AlbatrossError::FileError(e) => write!(f, "File I/O error: {}", e), + AlbatrossError::SSHError(e) => write!(f, "SSH error: {}", e), + AlbatrossError::ChunkParseError(e) => { + write!(f, "Unable to parse chunk coordinate: {}", e) + } + AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {}", e), + AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {}", e), + } + } +} + +impl From for AlbatrossError { + fn from(e: std::io::Error) -> Self { + AlbatrossError::FileError(e) + } +} + +impl From for AlbatrossError { + fn from(e: ssh2::Error) -> Self { + AlbatrossError::SSHError(e) + } +} + +impl From for AlbatrossError { + fn from(e: crate::chunk_coordinate::ChunkCoordinateErr) -> Self { + AlbatrossError::ChunkParseError(e) + } +} + +impl From for AlbatrossError { + fn from(e: RegionParseError) -> Self { + AlbatrossError::RegionParseError(e) + } +} + +impl From for AlbatrossError { + fn from(e: chrono::ParseError) -> Self { + AlbatrossError::ChronoParseError(e) + } +} diff --git a/src/main.rs b/src/main.rs index 3f85c76..a8303ec 100644 --- a/src/main.rs +++ b/src/main.rs @@ -5,6 +5,7 @@ mod backup; mod chunk_coordinate; mod config; mod discord; +mod error; mod region; mod remote_backup; mod restore; diff --git a/src/remote_backup.rs b/src/remote_backup.rs index 43e9f91..b16f16e 100644 --- a/src/remote_backup.rs +++ b/src/remote_backup.rs @@ -1,9 +1,10 @@ use crate::config::RemoteBackupConfig; +use crate::error::Result; use ssh2::Session; use std::net::TcpStream; use std::path::PathBuf; -fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Session { +fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Result { let tcp = TcpStream::connect(&remote_config.sftp_server_addr).unwrap(); let mut sess = Session::new().unwrap(); sess.set_tcp_stream(tcp); @@ -13,35 +14,27 @@ fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Session { sess.userauth_password(&remote_config.username, password) .unwrap(); } else if let Some(key) = &remote_config.private_key { - let public_key = - remote_config - .public_key - .as_ref() - .map(|pub_key| pub_key.as_path().clone()); - sess.userauth_pubkey_file( - &remote_config.username, - public_key, - key, - None, - ) - .unwrap(); + let public_key = remote_config.public_key.as_deref(); + sess.userauth_pubkey_file(&remote_config.username, public_key, key, None)?; } else { panic!("No key provided") } - sess + Ok(sess) } -pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) { - let sess = open_ssh_session(remote_config); +pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Result<()> { + let sess = open_ssh_session(remote_config)?; let remote_path = remote_config.remote_dir.join(file.file_name().unwrap()); - let mut local_file = std::fs::File::open(&file).unwrap(); + let mut local_file = std::fs::File::open(&file)?; - let sftp = sess.sftp().unwrap(); + let sftp = sess.sftp()?; - let mut remote_file = sftp.create(&remote_path).unwrap(); + let mut remote_file = sftp.create(&remote_path)?; - std::io::copy(&mut local_file, &mut remote_file).unwrap(); + std::io::copy(&mut local_file, &mut remote_file)?; + + Ok(()) } diff --git a/src/restore.rs b/src/restore.rs index 28d4581..590e470 100644 --- a/src/restore.rs +++ b/src/restore.rs @@ -1,7 +1,7 @@ use crate::backup::uncompress_backup; use crate::chunk_coordinate::ChunkCoordinate; +use crate::error::Result; use anvil_region::AnvilChunkProvider; -use std::error; use std::fs::remove_dir_all; use std::path::PathBuf; @@ -15,11 +15,7 @@ struct RestoreAccess { impl RestoreAccess { /// Create new RestoreAccess - pub fn new( - world_name: &str, - src_path: &PathBuf, - dest_path: &PathBuf, - ) -> Result { + pub fn new(world_name: &str, src_path: &PathBuf, dest_path: &PathBuf) -> Result { let src_path = uncompress_backup(src_path)?.join(world_name).join("region"); let dest_path = dest_path.join(world_name).join("region"); @@ -41,8 +37,8 @@ impl RestoreAccess { } /// Cleanup process - pub fn cleanup(self) -> Result<(), std::io::Error> { - remove_dir_all("tmp") + pub fn cleanup(self) -> Result<()> { + Ok(remove_dir_all("tmp")?) } } @@ -53,7 +49,7 @@ pub fn restore_range_from_backup( upper: ChunkCoordinate, backup_path: &PathBuf, minecraft_dir: &PathBuf, -) -> Result> { +) -> Result { let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?; let mut count = 0; @@ -74,7 +70,7 @@ pub fn restore_chunk_from_backup( chunk: ChunkCoordinate, backup_path: &PathBuf, minecraft_dir: &PathBuf, -) -> Result<(), Box> { +) -> Result<()> { let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?; chunk_access.copy_chunk(chunk.x, chunk.z); From 101a3f649a0d1bf85b63295485a6cd9ebf2d93b7 Mon Sep 17 00:00:00 2001 From: Joey Hines Date: Wed, 3 Feb 2021 18:46:33 -0600 Subject: [PATCH 3/4] Added remote backup cleanup + Didn't find a great way to make the cleanup generic, a just copy/paste pretty much for now + fmt + clippy --- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 2 +- src/backup.rs | 22 +++++++++++++--------- src/error.rs | 2 ++ src/remote_backup.rs | 45 ++++++++++++++++++++++++++++++++++++++------ 6 files changed, 57 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ee59b5e..dc47dcc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,7 +32,7 @@ dependencies = [ [[package]] name = "albatross" -version = "0.3.0" +version = "0.4.0" dependencies = [ "anvil-region", "chrono", diff --git a/Cargo.toml b/Cargo.toml index b784767..09e3b90 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "albatross" -version = "0.3.0" +version = "0.4.0" authors = ["Joey Hines "] edition = "2018" diff --git a/README.md b/README.md index 43c924f..0a128fa 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ webhooks. Backups are compressed and stored as `tar.gz` archives. ## Help ``` -albatross 0.3.0 +albatross 0.4.0 Backup your Minecraft Server! USAGE: diff --git a/src/backup.rs b/src/backup.rs index ac168cb..321721e 100644 --- a/src/backup.rs +++ b/src/backup.rs @@ -248,18 +248,22 @@ pub fn convert_backup_to_sp( Ok(()) } -/// Get the time of the backup from a file name +pub fn get_time_from_file_name(file_name: &str) -> Result { + let time: Vec<&str> = file_name.split("_backup.tar.gz").collect(); + Ok(chrono::NaiveDateTime::parse_from_str( + time[0], + "%d-%m-%y_%H.%M.%S", + )?) +} + +/// Get the time of the backup from a directory entry /// /// # Param /// * `archive_entry`: archive entry -fn get_time_from_file_name(archive_entry: &DirEntry) -> Result { +fn get_time_from_dir_entry(archive_entry: &DirEntry) -> Result { let file_name = archive_entry.file_name().to_str().unwrap().to_string(); - let name: Vec<&str> = file_name.split("_backup.tar.gz").collect(); - Ok(chrono::NaiveDateTime::parse_from_str( - name[0], - "%d-%m-%y_%H.%M.%S", - )?) + get_time_from_file_name(file_name.as_str()) } /// Removes the old backups from the ouput directory @@ -283,8 +287,8 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result { if backups.len() > keep as usize { backups.sort_by(|a, b| { - let a_time = get_time_from_file_name(a).unwrap(); - let b_time = get_time_from_file_name(b).unwrap(); + let a_time = get_time_from_dir_entry(a).unwrap(); + let b_time = get_time_from_dir_entry(b).unwrap(); b_time.cmp(&a_time) }); diff --git a/src/error.rs b/src/error.rs index 975e1b2..95687bc 100644 --- a/src/error.rs +++ b/src/error.rs @@ -9,6 +9,7 @@ pub enum AlbatrossError { ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr), RegionParseError(crate::region::RegionParseError), ChronoParseError(chrono::ParseError), + NoSSHAuth, } impl std::error::Error for AlbatrossError {} @@ -23,6 +24,7 @@ impl std::fmt::Display for AlbatrossError { } AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {}", e), AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {}", e), + AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"), } } } diff --git a/src/remote_backup.rs b/src/remote_backup.rs index b16f16e..33c524f 100644 --- a/src/remote_backup.rs +++ b/src/remote_backup.rs @@ -1,28 +1,30 @@ +use crate::backup::get_time_from_file_name; use crate::config::RemoteBackupConfig; -use crate::error::Result; +use crate::error::{AlbatrossError, Result}; use ssh2::Session; use std::net::TcpStream; use std::path::PathBuf; +/// Open an SSH session fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Result { - let tcp = TcpStream::connect(&remote_config.sftp_server_addr).unwrap(); - let mut sess = Session::new().unwrap(); + let tcp = TcpStream::connect(&remote_config.sftp_server_addr)?; + let mut sess = Session::new()?; sess.set_tcp_stream(tcp); sess.handshake().unwrap(); if let Some(password) = &remote_config.password { - sess.userauth_password(&remote_config.username, password) - .unwrap(); + sess.userauth_password(&remote_config.username, password)?; } else if let Some(key) = &remote_config.private_key { let public_key = remote_config.public_key.as_deref(); sess.userauth_pubkey_file(&remote_config.username, public_key, key, None)?; } else { - panic!("No key provided") + return Err(AlbatrossError::NoSSHAuth); } Ok(sess) } +/// Handle remote backup of a file pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Result<()> { let sess = open_ssh_session(remote_config)?; @@ -36,5 +38,36 @@ pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Resul std::io::copy(&mut local_file, &mut remote_file)?; + let files = sftp.readdir(&remote_config.remote_dir)?; + + let mut backups: Vec = files + .into_iter() + .map(|(file, _)| file) + .filter(|file| { + if let Some(ext) = file.extension() { + ext == "gz" + } else { + false + } + }) + .collect(); + + backups.sort_by(|file_a, file_b| { + let time_a = + get_time_from_file_name(file_a.file_name().unwrap().to_str().unwrap()).unwrap(); + let time_b = + get_time_from_file_name(file_b.file_name().unwrap().to_str().unwrap()).unwrap(); + + time_b.cmp(&time_a) + }); + + if backups.len() > remote_config.backups_to_keep as usize { + for _ in 0..(backups.len() - remote_config.backups_to_keep as usize) { + if let Some(backup_path) = backups.pop() { + sftp.unlink(&*backup_path)?; + } + } + } + Ok(()) } From 9c7f9906adcdfeadf6bff5e85cb3f8a14bf60595 Mon Sep 17 00:00:00 2001 From: Joey Hines Date: Thu, 4 Feb 2021 16:21:31 -0600 Subject: [PATCH 4/4] Added remote section to example config in README.md --- README.md | 33 ++++++++++++++++++++++++++------- 1 file changed, 26 insertions(+), 7 deletions(-) diff --git a/README.md b/README.md index 5641f08..07ef4bc 100644 --- a/README.md +++ b/README.md @@ -1,8 +1,10 @@ # Albatross Back up what you care about in your Minecraft worlds. -Albatross backs up player files and region files within a certain configurable radius. It can also send Discord -webhooks. Backups are compressed and stored as `tar.gz` archives. +Albatross backs up player files and region files within a certain configurable radius. It can also send Discord +webhooks. Backups are compressed and stored as `tar.gz` archives. + +Backups can also be transferred to a remote server using SFTP. ## Help ``` @@ -40,14 +42,15 @@ Exporting a backup to a single player world: Restoring a single chunk (from -2,-2 to 2,2): -`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz (0,0)` +`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz` (0,0) Restoring a range of chunks (from -2,-2 to 2,2): -`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz (-2,-2) -u (2,2)` +`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz` (-2,-2) -u (2,2) ## Config ```toml +# Local Backup Config [backup] # Minecraft sever directory minecraft_dir = "/home/mc/server" @@ -55,13 +58,29 @@ minecraft_dir = "/home/mc/server" output_dir = "/home/mc/backups" # Number of backups to keep backups_to_keep = 10 -# Discord Webhook +# Optional Discord webhook discord_webhook = "https://discordapp.com/api/webhooks/" +# Optional remote backup config +[remote] +# SFTP server host:port +sftp_server_addr = "localhost:22" +# Remote directory +remote_dir = "/home/backup/" +# Remote user +username = "user" +# Password Auth +password = "cooluser123" +# Key Auth +#public_key = /home/user/.ssh/id_rsa.pub" +#private_key = /home/user/.ssh/id_rsa" +# Backups to keep on the remote host +backups_to_keep = 3 + # World config options [[world_config]] -# world name +# World name world_name = "world" -# world save radius (in blocks) +# World save radius (in blocks) save_radius = 8000 ``` \ No newline at end of file