From 101a3f649a0d1bf85b63295485a6cd9ebf2d93b7 Mon Sep 17 00:00:00 2001 From: Joey Hines Date: Wed, 3 Feb 2021 18:46:33 -0600 Subject: [PATCH] Added remote backup cleanup + Didn't find a great way to make the cleanup generic, a just copy/paste pretty much for now + fmt + clippy --- Cargo.lock | 2 +- Cargo.toml | 2 +- README.md | 2 +- src/backup.rs | 22 +++++++++++++--------- src/error.rs | 2 ++ src/remote_backup.rs | 45 ++++++++++++++++++++++++++++++++++++++------ 6 files changed, 57 insertions(+), 18 deletions(-) diff --git a/Cargo.lock b/Cargo.lock index ee59b5e..dc47dcc 100644 --- a/Cargo.lock +++ b/Cargo.lock @@ -32,7 +32,7 @@ dependencies = [ [[package]] name = "albatross" -version = "0.3.0" +version = "0.4.0" dependencies = [ "anvil-region", "chrono", diff --git a/Cargo.toml b/Cargo.toml index b784767..09e3b90 100644 --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "albatross" -version = "0.3.0" +version = "0.4.0" authors = ["Joey Hines "] edition = "2018" diff --git a/README.md b/README.md index 43c924f..0a128fa 100644 --- a/README.md +++ b/README.md @@ -6,7 +6,7 @@ webhooks. Backups are compressed and stored as `tar.gz` archives. ## Help ``` -albatross 0.3.0 +albatross 0.4.0 Backup your Minecraft Server! USAGE: diff --git a/src/backup.rs b/src/backup.rs index ac168cb..321721e 100644 --- a/src/backup.rs +++ b/src/backup.rs @@ -248,18 +248,22 @@ pub fn convert_backup_to_sp( Ok(()) } -/// Get the time of the backup from a file name +pub fn get_time_from_file_name(file_name: &str) -> Result { + let time: Vec<&str> = file_name.split("_backup.tar.gz").collect(); + Ok(chrono::NaiveDateTime::parse_from_str( + time[0], + "%d-%m-%y_%H.%M.%S", + )?) +} + +/// Get the time of the backup from a directory entry /// /// # Param /// * `archive_entry`: archive entry -fn get_time_from_file_name(archive_entry: &DirEntry) -> Result { +fn get_time_from_dir_entry(archive_entry: &DirEntry) -> Result { let file_name = archive_entry.file_name().to_str().unwrap().to_string(); - let name: Vec<&str> = file_name.split("_backup.tar.gz").collect(); - Ok(chrono::NaiveDateTime::parse_from_str( - name[0], - "%d-%m-%y_%H.%M.%S", - )?) + get_time_from_file_name(file_name.as_str()) } /// Removes the old backups from the ouput directory @@ -283,8 +287,8 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result { if backups.len() > keep as usize { backups.sort_by(|a, b| { - let a_time = get_time_from_file_name(a).unwrap(); - let b_time = get_time_from_file_name(b).unwrap(); + let a_time = get_time_from_dir_entry(a).unwrap(); + let b_time = get_time_from_dir_entry(b).unwrap(); b_time.cmp(&a_time) }); diff --git a/src/error.rs b/src/error.rs index 975e1b2..95687bc 100644 --- a/src/error.rs +++ b/src/error.rs @@ -9,6 +9,7 @@ pub enum AlbatrossError { ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr), RegionParseError(crate::region::RegionParseError), ChronoParseError(chrono::ParseError), + NoSSHAuth, } impl std::error::Error for AlbatrossError {} @@ -23,6 +24,7 @@ impl std::fmt::Display for AlbatrossError { } AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {}", e), AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {}", e), + AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"), } } } diff --git a/src/remote_backup.rs b/src/remote_backup.rs index b16f16e..33c524f 100644 --- a/src/remote_backup.rs +++ b/src/remote_backup.rs @@ -1,28 +1,30 @@ +use crate::backup::get_time_from_file_name; use crate::config::RemoteBackupConfig; -use crate::error::Result; +use crate::error::{AlbatrossError, Result}; use ssh2::Session; use std::net::TcpStream; use std::path::PathBuf; +/// Open an SSH session fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Result { - let tcp = TcpStream::connect(&remote_config.sftp_server_addr).unwrap(); - let mut sess = Session::new().unwrap(); + let tcp = TcpStream::connect(&remote_config.sftp_server_addr)?; + let mut sess = Session::new()?; sess.set_tcp_stream(tcp); sess.handshake().unwrap(); if let Some(password) = &remote_config.password { - sess.userauth_password(&remote_config.username, password) - .unwrap(); + sess.userauth_password(&remote_config.username, password)?; } else if let Some(key) = &remote_config.private_key { let public_key = remote_config.public_key.as_deref(); sess.userauth_pubkey_file(&remote_config.username, public_key, key, None)?; } else { - panic!("No key provided") + return Err(AlbatrossError::NoSSHAuth); } Ok(sess) } +/// Handle remote backup of a file pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Result<()> { let sess = open_ssh_session(remote_config)?; @@ -36,5 +38,36 @@ pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Resul std::io::copy(&mut local_file, &mut remote_file)?; + let files = sftp.readdir(&remote_config.remote_dir)?; + + let mut backups: Vec = files + .into_iter() + .map(|(file, _)| file) + .filter(|file| { + if let Some(ext) = file.extension() { + ext == "gz" + } else { + false + } + }) + .collect(); + + backups.sort_by(|file_a, file_b| { + let time_a = + get_time_from_file_name(file_a.file_name().unwrap().to_str().unwrap()).unwrap(); + let time_b = + get_time_from_file_name(file_b.file_name().unwrap().to_str().unwrap()).unwrap(); + + time_b.cmp(&time_a) + }); + + if backups.len() > remote_config.backups_to_keep as usize { + for _ in 0..(backups.len() - remote_config.backups_to_keep as usize) { + if let Some(backup_path) = backups.pop() { + sftp.unlink(&*backup_path)?; + } + } + } + Ok(()) }