Added remote backup cleanup

+ Didn't find a great way to make the cleanup generic, a just copy/paste pretty much for now
+ fmt + clippy
backup_error_fix
Joey Hines 2021-02-03 18:46:33 -06:00
parent dc005e460b
commit 101a3f649a
No known key found for this signature in database
GPG Key ID: 80F567B5C968F91B
6 changed files with 57 additions and 18 deletions

2
Cargo.lock generated
View File

@ -32,7 +32,7 @@ dependencies = [
[[package]]
name = "albatross"
version = "0.3.0"
version = "0.4.0"
dependencies = [
"anvil-region",
"chrono",

View File

@ -1,6 +1,6 @@
[package]
name = "albatross"
version = "0.3.0"
version = "0.4.0"
authors = ["Joey Hines <joey@ahines.net>"]
edition = "2018"

View File

@ -6,7 +6,7 @@ webhooks. Backups are compressed and stored as `tar.gz` archives.
## Help
```
albatross 0.3.0
albatross 0.4.0
Backup your Minecraft Server!
USAGE:

View File

@ -248,18 +248,22 @@ pub fn convert_backup_to_sp(
Ok(())
}
/// Get the time of the backup from a file name
pub fn get_time_from_file_name(file_name: &str) -> Result<NaiveDateTime> {
let time: Vec<&str> = file_name.split("_backup.tar.gz").collect();
Ok(chrono::NaiveDateTime::parse_from_str(
time[0],
"%d-%m-%y_%H.%M.%S",
)?)
}
/// Get the time of the backup from a directory entry
///
/// # Param
/// * `archive_entry`: archive entry
fn get_time_from_file_name(archive_entry: &DirEntry) -> Result<NaiveDateTime> {
fn get_time_from_dir_entry(archive_entry: &DirEntry) -> Result<NaiveDateTime> {
let file_name = archive_entry.file_name().to_str().unwrap().to_string();
let name: Vec<&str> = file_name.split("_backup.tar.gz").collect();
Ok(chrono::NaiveDateTime::parse_from_str(
name[0],
"%d-%m-%y_%H.%M.%S",
)?)
get_time_from_file_name(file_name.as_str())
}
/// Removes the old backups from the ouput directory
@ -283,8 +287,8 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result<usize> {
if backups.len() > keep as usize {
backups.sort_by(|a, b| {
let a_time = get_time_from_file_name(a).unwrap();
let b_time = get_time_from_file_name(b).unwrap();
let a_time = get_time_from_dir_entry(a).unwrap();
let b_time = get_time_from_dir_entry(b).unwrap();
b_time.cmp(&a_time)
});

View File

@ -9,6 +9,7 @@ pub enum AlbatrossError {
ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr),
RegionParseError(crate::region::RegionParseError),
ChronoParseError(chrono::ParseError),
NoSSHAuth,
}
impl std::error::Error for AlbatrossError {}
@ -23,6 +24,7 @@ impl std::fmt::Display for AlbatrossError {
}
AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {}", e),
AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {}", e),
AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"),
}
}
}

View File

@ -1,28 +1,30 @@
use crate::backup::get_time_from_file_name;
use crate::config::RemoteBackupConfig;
use crate::error::Result;
use crate::error::{AlbatrossError, Result};
use ssh2::Session;
use std::net::TcpStream;
use std::path::PathBuf;
/// Open an SSH session
fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Result<Session> {
let tcp = TcpStream::connect(&remote_config.sftp_server_addr).unwrap();
let mut sess = Session::new().unwrap();
let tcp = TcpStream::connect(&remote_config.sftp_server_addr)?;
let mut sess = Session::new()?;
sess.set_tcp_stream(tcp);
sess.handshake().unwrap();
if let Some(password) = &remote_config.password {
sess.userauth_password(&remote_config.username, password)
.unwrap();
sess.userauth_password(&remote_config.username, password)?;
} else if let Some(key) = &remote_config.private_key {
let public_key = remote_config.public_key.as_deref();
sess.userauth_pubkey_file(&remote_config.username, public_key, key, None)?;
} else {
panic!("No key provided")
return Err(AlbatrossError::NoSSHAuth);
}
Ok(sess)
}
/// Handle remote backup of a file
pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Result<()> {
let sess = open_ssh_session(remote_config)?;
@ -36,5 +38,36 @@ pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Resul
std::io::copy(&mut local_file, &mut remote_file)?;
let files = sftp.readdir(&remote_config.remote_dir)?;
let mut backups: Vec<PathBuf> = files
.into_iter()
.map(|(file, _)| file)
.filter(|file| {
if let Some(ext) = file.extension() {
ext == "gz"
} else {
false
}
})
.collect();
backups.sort_by(|file_a, file_b| {
let time_a =
get_time_from_file_name(file_a.file_name().unwrap().to_str().unwrap()).unwrap();
let time_b =
get_time_from_file_name(file_b.file_name().unwrap().to_str().unwrap()).unwrap();
time_b.cmp(&time_a)
});
if backups.len() > remote_config.backups_to_keep as usize {
for _ in 0..(backups.len() - remote_config.backups_to_keep as usize) {
if let Some(backup_path) = backups.pop() {
sftp.unlink(&*backup_path)?;
}
}
}
Ok(())
}