Merge pull request 'Remote Backups' (#7) from sftp into master

Reviewed-on: https://git.etztech.xyz/ZeroHD/Albatross/pulls/7
backup_error_fix
Joey Hines 2021-02-04 23:35:55 +01:00
commit 678dcb583e
9 changed files with 367 additions and 67 deletions

89
Cargo.lock generated
View File

@ -32,7 +32,7 @@ dependencies = [
[[package]] [[package]]
name = "albatross" name = "albatross"
version = "0.3.0" version = "0.4.0"
dependencies = [ dependencies = [
"anvil-region", "anvil-region",
"chrono", "chrono",
@ -43,6 +43,7 @@ dependencies = [
"regex", "regex",
"reqwest", "reqwest",
"serde 1.0.117", "serde 1.0.117",
"ssh2",
"structopt", "structopt",
"tar", "tar",
] ]
@ -183,6 +184,15 @@ dependencies = [
"vec_map", "vec_map",
] ]
[[package]]
name = "cloudabi"
version = "0.0.3"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "ddfc5b9aa5d4507acaf872de71051dfd0e309860e88966e1051e462a077aac4f"
dependencies = [
"bitflags",
]
[[package]] [[package]]
name = "config" name = "config"
version = "0.9.3" version = "0.9.3"
@ -579,6 +589,32 @@ version = "0.2.71"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49" checksum = "9457b06509d27052635f90d6466700c65095fdf75409b3fbdd903e988b886f49"
[[package]]
name = "libssh2-sys"
version = "0.2.21"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "e0186af0d8f171ae6b9c4c90ec51898bad5d08a2d5e470903a50d9ad8959cbee"
dependencies = [
"cc",
"libc",
"libz-sys",
"openssl-sys",
"pkg-config",
"vcpkg",
]
[[package]]
name = "libz-sys"
version = "1.1.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "602113192b08db8f38796c4e85c39e960c145965140e918018bcde1952429655"
dependencies = [
"cc",
"libc",
"pkg-config",
"vcpkg",
]
[[package]] [[package]]
name = "linked-hash-map" name = "linked-hash-map"
version = "0.3.0" version = "0.3.0"
@ -595,6 +631,15 @@ version = "0.5.3"
source = "registry+https://github.com/rust-lang/crates.io-index" source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a" checksum = "8dd5a6d5999d9907cda8ed67bbd137d3af8085216c2ac62de5be860bd41f304a"
[[package]]
name = "lock_api"
version = "0.3.4"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "c4da24a77a3d8a6d4862d95f72e6fdb9c09a643ecdb402d754004a557f2bec75"
dependencies = [
"scopeguard",
]
[[package]] [[package]]
name = "log" name = "log"
version = "0.4.11" version = "0.4.11"
@ -815,6 +860,30 @@ dependencies = [
"vcpkg", "vcpkg",
] ]
[[package]]
name = "parking_lot"
version = "0.10.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d3a704eb390aafdc107b0e392f56a82b668e3a71366993b5340f5833fd62505e"
dependencies = [
"lock_api",
"parking_lot_core",
]
[[package]]
name = "parking_lot_core"
version = "0.7.2"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d58c7c768d4ba344e3e8d72518ac13e259d7c7ade24167003b8488e10b6740a3"
dependencies = [
"cfg-if 0.1.10",
"cloudabi",
"libc",
"redox_syscall",
"smallvec",
"winapi 0.3.8",
]
[[package]] [[package]]
name = "percent-encoding" name = "percent-encoding"
version = "2.1.0" version = "2.1.0"
@ -1050,6 +1119,12 @@ dependencies = [
"winapi 0.3.8", "winapi 0.3.8",
] ]
[[package]]
name = "scopeguard"
version = "1.1.0"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d29ab0c6d3fc0ee92fe66e2d99f700eab17a8d57d1c1d3b748380fb20baa78cd"
[[package]] [[package]]
name = "security-framework" name = "security-framework"
version = "0.4.4" version = "0.4.4"
@ -1168,6 +1243,18 @@ dependencies = [
"winapi 0.3.8", "winapi 0.3.8",
] ]
[[package]]
name = "ssh2"
version = "0.9.1"
source = "registry+https://github.com/rust-lang/crates.io-index"
checksum = "d876d4d57f6bbf2245d43f7ec53759461f801a446d3693704aa6d27b257844d7"
dependencies = [
"bitflags",
"libc",
"libssh2-sys",
"parking_lot",
]
[[package]] [[package]]
name = "strsim" name = "strsim"
version = "0.8.0" version = "0.8.0"

View File

@ -1,6 +1,6 @@
[package] [package]
name = "albatross" name = "albatross"
version = "0.3.0" version = "0.4.0"
authors = ["Joey Hines <joey@ahines.net>"] authors = ["Joey Hines <joey@ahines.net>"]
edition = "2018" edition = "2018"
@ -18,3 +18,4 @@ tar = "0.4.28"
reqwest = { version = "0.10", features = ["blocking", "json"] } reqwest = { version = "0.10", features = ["blocking", "json"] }
discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" } discord-hooks-rs = { git = "https://github.com/joeyahines/discord-hooks-rs" }
anvil-region = "0.4.0" anvil-region = "0.4.0"
ssh2 = "0.9.1"

View File

@ -4,9 +4,11 @@ Back up what you care about in your Minecraft worlds.
Albatross backs up player files and region files within a certain configurable radius. It can also send Discord Albatross backs up player files and region files within a certain configurable radius. It can also send Discord
webhooks. Backups are compressed and stored as `tar.gz` archives. webhooks. Backups are compressed and stored as `tar.gz` archives.
Backups can also be transferred to a remote server using SFTP.
## Help ## Help
``` ```
albatross 0.3.0 albatross 0.4.0
Backup your Minecraft Server! Backup your Minecraft Server!
USAGE: USAGE:
@ -40,14 +42,15 @@ Exporting a backup to a single player world:
Restoring a single chunk (from -2,-2 to 2,2): Restoring a single chunk (from -2,-2 to 2,2):
`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz (0,0)` `albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz` (0,0)
Restoring a range of chunks (from -2,-2 to 2,2): Restoring a range of chunks (from -2,-2 to 2,2):
`albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz (-2,-2) -u (2,2)` `albatorss -c test.toml restore world backups/04-11-20_01.51.27_backup.tar.gz sp.tar.gz` (-2,-2) -u (2,2)
## Config ## Config
```toml ```toml
# Local Backup Config
[backup] [backup]
# Minecraft sever directory # Minecraft sever directory
minecraft_dir = "/home/mc/server" minecraft_dir = "/home/mc/server"
@ -55,13 +58,29 @@ minecraft_dir = "/home/mc/server"
output_dir = "/home/mc/backups" output_dir = "/home/mc/backups"
# Number of backups to keep # Number of backups to keep
backups_to_keep = 10 backups_to_keep = 10
# Discord Webhook # Optional Discord webhook
discord_webhook = "https://discordapp.com/api/webhooks/" discord_webhook = "https://discordapp.com/api/webhooks/"
# Optional remote backup config
[remote]
# SFTP server host:port
sftp_server_addr = "localhost:22"
# Remote directory
remote_dir = "/home/backup/"
# Remote user
username = "user"
# Password Auth
password = "cooluser123"
# Key Auth
#public_key = /home/user/.ssh/id_rsa.pub"
#private_key = /home/user/.ssh/id_rsa"
# Backups to keep on the remote host
backups_to_keep = 3
# World config options # World config options
[[world_config]] [[world_config]]
# world name # World name
world_name = "world" world_name = "world"
# world save radius (in blocks) # World save radius (in blocks)
save_radius = 8000 save_radius = 8000
``` ```

View File

@ -1,7 +1,9 @@
use crate::backup; use crate::backup;
use crate::config::{AlbatrossConfig, WorldConfig, WorldType}; use crate::config::{AlbatrossConfig, WorldConfig, WorldType};
use crate::discord::send_webhook; use crate::discord::send_webhook;
use crate::error::Result;
use crate::region::Region; use crate::region::Region;
use crate::remote_backup::remote_backup;
use chrono::{NaiveDateTime, Utc}; use chrono::{NaiveDateTime, Utc};
use flate2::read::GzDecoder; use flate2::read::GzDecoder;
use flate2::write::GzEncoder; use flate2::write::GzEncoder;
@ -24,11 +26,11 @@ pub fn backup_file(
file_name: &str, file_name: &str,
mut world_path: PathBuf, mut world_path: PathBuf,
mut backup_path: PathBuf, mut backup_path: PathBuf,
) -> Result<u64, std::io::Error> { ) -> Result<u64> {
world_path.push(file_name); world_path.push(file_name);
backup_path.push(file_name); backup_path.push(file_name);
copy(world_path, backup_path) Ok(copy(world_path, backup_path)?)
} }
/// Backup a directory /// Backup a directory
@ -37,11 +39,7 @@ pub fn backup_file(
/// * `dir_name` - directory name /// * `dir_name` - directory name
/// * `world_path` - path to the world folder /// * `world_path` - path to the world folder
/// * `backup_path` - path to the backup folder /// * `backup_path` - path to the backup folder
pub fn backup_dir( pub fn backup_dir(dir_name: &str, world_path: &PathBuf, backup_path: &PathBuf) -> Result<u64> {
dir_name: &str,
world_path: &PathBuf,
backup_path: &PathBuf,
) -> Result<u64, std::io::Error> {
let mut src_dir = world_path.clone(); let mut src_dir = world_path.clone();
src_dir.push(dir_name); src_dir.push(dir_name);
let mut backup_dir = backup_path.clone(); let mut backup_dir = backup_path.clone();
@ -73,7 +71,7 @@ pub fn backup_region(
save_radius: u64, save_radius: u64,
world_path: &PathBuf, world_path: &PathBuf,
backup_path: &PathBuf, backup_path: &PathBuf,
) -> Result<u64, std::io::Error> { ) -> Result<u64> {
let mut count: u64 = 0; let mut count: u64 = 0;
let mut src_dir = world_path.clone(); let mut src_dir = world_path.clone();
src_dir.push(dir_name); src_dir.push(dir_name);
@ -111,7 +109,7 @@ pub fn backup_world(
world_path: PathBuf, world_path: PathBuf,
mut backup_path: PathBuf, mut backup_path: PathBuf,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<u64, std::io::Error> { ) -> Result<u64> {
let region_count; let region_count;
backup_path.push(&world_config.world_name); backup_path.push(&world_config.world_name);
create_dir(backup_path.as_path())?; create_dir(backup_path.as_path())?;
@ -136,7 +134,7 @@ pub fn backup_overworld(
world_path: PathBuf, world_path: PathBuf,
backup_path: PathBuf, backup_path: PathBuf,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<(u64, u64), std::io::Error> { ) -> Result<(u64, u64)> {
backup_dir("data", &world_path, &backup_path)?; backup_dir("data", &world_path, &backup_path)?;
backup_dir("stats", &world_path, &backup_path)?; backup_dir("stats", &world_path, &backup_path)?;
@ -161,7 +159,7 @@ pub fn backup_nether(
world_path: PathBuf, world_path: PathBuf,
backup_path: PathBuf, backup_path: PathBuf,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<u64, std::io::Error> { ) -> Result<u64> {
let mut nether_path = world_path; let mut nether_path = world_path;
nether_path.push("DIM-1"); nether_path.push("DIM-1");
@ -178,7 +176,7 @@ pub fn backup_end(
world_path: PathBuf, world_path: PathBuf,
backup_path: PathBuf, backup_path: PathBuf,
world_config: &WorldConfig, world_config: &WorldConfig,
) -> Result<u64, std::io::Error> { ) -> Result<u64> {
let mut end_path = world_path; let mut end_path = world_path;
end_path.push("DIM1"); end_path.push("DIM1");
@ -190,7 +188,7 @@ pub fn backup_end(
/// # Param /// # Param
/// * `tmp_dir`: tmp directory with the backed up files /// * `tmp_dir`: tmp directory with the backed up files
/// * `output_file`: output archive /// * `output_file`: output archive
pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<(), std::io::Error> { pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<()> {
let archive = File::create(output_file)?; let archive = File::create(output_file)?;
let enc = GzEncoder::new(archive, Compression::default()); let enc = GzEncoder::new(archive, Compression::default());
let mut tar_builder = tar::Builder::new(enc); let mut tar_builder = tar::Builder::new(enc);
@ -198,7 +196,7 @@ pub fn compress_backup(tmp_dir: &PathBuf, output_file: &PathBuf) -> Result<(), s
Ok(()) Ok(())
} }
pub fn uncompress_backup(backup: &PathBuf) -> Result<PathBuf, std::io::Error> { pub fn uncompress_backup(backup: &PathBuf) -> Result<PathBuf> {
let backup_file = File::open(backup)?; let backup_file = File::open(backup)?;
let dec = GzDecoder::new(backup_file); let dec = GzDecoder::new(backup_file);
let mut extract = Archive::new(dec); let mut extract = Archive::new(dec);
@ -218,7 +216,7 @@ pub fn convert_backup_to_sp(
config: &AlbatrossConfig, config: &AlbatrossConfig,
backup: &PathBuf, backup: &PathBuf,
output: &PathBuf, output: &PathBuf,
) -> Result<(), std::io::Error> { ) -> Result<()> {
let extract_path = uncompress_backup(backup)?; let extract_path = uncompress_backup(backup)?;
if let Some(worlds) = &config.world_config { if let Some(worlds) = &config.world_config {
@ -250,17 +248,22 @@ pub fn convert_backup_to_sp(
Ok(()) Ok(())
} }
/// Get the time of the backup from a file name pub fn get_time_from_file_name(file_name: &str) -> Result<NaiveDateTime> {
let time: Vec<&str> = file_name.split("_backup.tar.gz").collect();
Ok(chrono::NaiveDateTime::parse_from_str(
time[0],
"%d-%m-%y_%H.%M.%S",
)?)
}
/// Get the time of the backup from a directory entry
/// ///
/// # Param /// # Param
/// * `archive_entry`: archive entry /// * `archive_entry`: archive entry
fn get_time_from_file_name( fn get_time_from_dir_entry(archive_entry: &DirEntry) -> Result<NaiveDateTime> {
archive_entry: &DirEntry,
) -> Result<Option<NaiveDateTime>, std::io::Error> {
let file_name = archive_entry.file_name().to_str().unwrap().to_string(); let file_name = archive_entry.file_name().to_str().unwrap().to_string();
let name: Vec<&str> = file_name.split("_backup.tar.gz").collect();
Ok(chrono::NaiveDateTime::parse_from_str(name[0], "%d-%m-%y_%H.%M.%S").ok()) get_time_from_file_name(file_name.as_str())
} }
/// Removes the old backups from the ouput directory /// Removes the old backups from the ouput directory
@ -268,7 +271,7 @@ fn get_time_from_file_name(
/// # Params /// # Params
/// * `output_dir` - output directory containing /// * `output_dir` - output directory containing
/// * `keep` - number of backups to keep /// * `keep` - number of backups to keep
fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result<usize, std::io::Error> { fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result<usize> {
let mut backups = vec![]; let mut backups = vec![];
let mut num_of_removed_backups: usize = 0; let mut num_of_removed_backups: usize = 0;
@ -284,8 +287,8 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result<usize, std::io:
if backups.len() > keep as usize { if backups.len() > keep as usize {
backups.sort_by(|a, b| { backups.sort_by(|a, b| {
let a_time = get_time_from_file_name(a).unwrap().unwrap(); let a_time = get_time_from_dir_entry(a).unwrap();
let b_time = get_time_from_file_name(b).unwrap().unwrap(); let b_time = get_time_from_dir_entry(b).unwrap();
b_time.cmp(&a_time) b_time.cmp(&a_time)
}); });
@ -305,7 +308,7 @@ fn remove_old_backups(output_dir: &PathBuf, keep: u64) -> Result<usize, std::io:
/// ///
/// # Params /// # Params
/// * `cfg` - config file /// * `cfg` - config file
pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<(), std::io::Error> { pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<()> {
let server_base_dir = cfg.backup.minecraft_dir.clone(); let server_base_dir = cfg.backup.minecraft_dir.clone();
let worlds = cfg.world_config.clone().expect("No worlds configured"); let worlds = cfg.world_config.clone().expect("No worlds configured");
let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string(); let time_str = Utc::now().format("%d-%m-%y_%H.%M.%S").to_string();
@ -319,10 +322,68 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<(), st
tmp_dir.push("tmp"); tmp_dir.push("tmp");
remove_dir_all(&tmp_dir).ok(); remove_dir_all(&tmp_dir).ok();
create_dir_all(tmp_dir.clone()).unwrap(); create_dir_all(tmp_dir.clone())?;
let timer = Instant::now();
send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg); send_webhook("**Albatross is swooping in to backup your worlds!**", &cfg);
let timer = Instant::now();
backup_worlds(&cfg, server_base_dir, worlds, &mut tmp_dir).map_err(|e| {
send_webhook("Failed to copy worlds to backup location", &cfg);
println!("Failed to copy worlds: {}", e);
e
})?;
backup::compress_backup(&tmp_dir, &output_archive).map_err(|e| {
send_webhook("Failed to compress backup", &cfg);
println!("Failed to compress backup: {}", e);
e
})?;
remove_dir_all(&tmp_dir)?;
match remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep) {
Ok(backups_removed) => {
if backups_removed > 0 {
let msg = format!(
"Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW",
backups_removed
);
send_webhook(msg.as_str(), &cfg);
}
}
Err(e) => {
send_webhook("Failed to remove old backups!", &cfg);
println!("Failed to remove old backups: {}", e)
}
}
if let Some(remote_backup_cfg) = &cfg.remote {
match remote_backup(output_archive, remote_backup_cfg) {
Ok(_) => {
send_webhook("Remote backup completed!", &cfg);
}
Err(e) => {
send_webhook("Remote backup failed!", &cfg);
println!("Remote backup failed with error: {}", e);
}
}
}
let secs = timer.elapsed().as_secs();
send_webhook(
format!("**Full backup completed in {}s**! *SKREEEEEEEEEE*", secs).as_str(),
&cfg,
);
Ok(())
}
fn backup_worlds(
cfg: &AlbatrossConfig,
server_base_dir: PathBuf,
worlds: Vec<WorldConfig>,
tmp_dir: &mut PathBuf,
) -> Result<()> {
for world in worlds { for world in worlds {
let mut world_dir = server_base_dir.clone(); let mut world_dir = server_base_dir.clone();
let world_name = world.world_name.clone(); let world_name = world.world_name.clone();
@ -363,24 +424,5 @@ pub fn do_backup(cfg: AlbatrossConfig, output: Option<PathBuf>) -> Result<(), st
} }
} }
backup::compress_backup(&tmp_dir, &output_archive)?;
remove_dir_all(&tmp_dir)?;
let backups_removed = remove_old_backups(&cfg.backup.output_dir, cfg.backup.backups_to_keep)?;
if backups_removed > 0 {
let msg = format!(
"Albatross mistook **{}** of your old backups for some french fries and ate them!! SKRAWWWW",
backups_removed
);
send_webhook(msg.as_str(), &cfg);
}
let secs = timer.elapsed().as_secs();
send_webhook(
format!("**Full backup completed in {}s**! *SKREEEEEEEEEE*", secs).as_str(),
&cfg,
);
Ok(()) Ok(())
} }

View File

@ -24,7 +24,7 @@ impl From<String> for WorldType {
} }
} }
/// Config for individual WorldConfig /// Config for individual world configuration
#[derive(Debug, Deserialize, Clone)] #[derive(Debug, Deserialize, Clone)]
pub struct WorldConfig { pub struct WorldConfig {
pub world_name: String, pub world_name: String,
@ -41,11 +41,31 @@ pub struct BackupConfig {
pub discord_webhook: Option<String>, pub discord_webhook: Option<String>,
} }
/// Config for remote backups
#[derive(Debug, Deserialize, Clone)]
pub struct RemoteBackupConfig {
/// Remote server address
pub sftp_server_addr: String,
/// Remote output directory
pub remote_dir: PathBuf,
/// Remote server username
pub username: String,
/// Public key for key auth
pub public_key: Option<PathBuf>,
/// Private key for key auth
pub private_key: Option<PathBuf>,
/// Password if using password auth
pub password: Option<String>,
/// Remote backups to keep
pub backups_to_keep: u64,
}
/// Configs /// Configs
#[derive(Debug, Deserialize, Clone)] #[derive(Debug, Deserialize, Clone)]
pub struct AlbatrossConfig { pub struct AlbatrossConfig {
pub backup: BackupConfig, pub backup: BackupConfig,
pub world_config: Option<Vec<WorldConfig>>, pub world_config: Option<Vec<WorldConfig>>,
pub remote: Option<RemoteBackupConfig>,
} }
impl AlbatrossConfig { impl AlbatrossConfig {

60
src/error.rs 100644
View File

@ -0,0 +1,60 @@
use crate::region::RegionParseError;
pub type Result<T> = std::result::Result<T, AlbatrossError>;
#[derive(Debug)]
pub enum AlbatrossError {
FileError(std::io::Error),
SSHError(ssh2::Error),
ChunkParseError(crate::chunk_coordinate::ChunkCoordinateErr),
RegionParseError(crate::region::RegionParseError),
ChronoParseError(chrono::ParseError),
NoSSHAuth,
}
impl std::error::Error for AlbatrossError {}
impl std::fmt::Display for AlbatrossError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
AlbatrossError::FileError(e) => write!(f, "File I/O error: {}", e),
AlbatrossError::SSHError(e) => write!(f, "SSH error: {}", e),
AlbatrossError::ChunkParseError(e) => {
write!(f, "Unable to parse chunk coordinate: {}", e)
}
AlbatrossError::RegionParseError(e) => write!(f, "Unable to parse region name: {}", e),
AlbatrossError::ChronoParseError(e) => write!(f, "Unable to parse time: {}", e),
AlbatrossError::NoSSHAuth => write!(f, "No SSH auth methods provided in the config"),
}
}
}
impl From<std::io::Error> for AlbatrossError {
fn from(e: std::io::Error) -> Self {
AlbatrossError::FileError(e)
}
}
impl From<ssh2::Error> for AlbatrossError {
fn from(e: ssh2::Error) -> Self {
AlbatrossError::SSHError(e)
}
}
impl From<crate::chunk_coordinate::ChunkCoordinateErr> for AlbatrossError {
fn from(e: crate::chunk_coordinate::ChunkCoordinateErr) -> Self {
AlbatrossError::ChunkParseError(e)
}
}
impl From<crate::region::RegionParseError> for AlbatrossError {
fn from(e: RegionParseError) -> Self {
AlbatrossError::RegionParseError(e)
}
}
impl From<chrono::ParseError> for AlbatrossError {
fn from(e: chrono::ParseError) -> Self {
AlbatrossError::ChronoParseError(e)
}
}

View File

@ -5,7 +5,9 @@ mod backup;
mod chunk_coordinate; mod chunk_coordinate;
mod config; mod config;
mod discord; mod discord;
mod error;
mod region; mod region;
mod remote_backup;
mod restore; mod restore;
use crate::backup::{convert_backup_to_sp, do_backup}; use crate::backup::{convert_backup_to_sp, do_backup};

View File

@ -0,0 +1,73 @@
use crate::backup::get_time_from_file_name;
use crate::config::RemoteBackupConfig;
use crate::error::{AlbatrossError, Result};
use ssh2::Session;
use std::net::TcpStream;
use std::path::PathBuf;
/// Open an SSH session
fn open_ssh_session(remote_config: &RemoteBackupConfig) -> Result<Session> {
let tcp = TcpStream::connect(&remote_config.sftp_server_addr)?;
let mut sess = Session::new()?;
sess.set_tcp_stream(tcp);
sess.handshake().unwrap();
if let Some(password) = &remote_config.password {
sess.userauth_password(&remote_config.username, password)?;
} else if let Some(key) = &remote_config.private_key {
let public_key = remote_config.public_key.as_deref();
sess.userauth_pubkey_file(&remote_config.username, public_key, key, None)?;
} else {
return Err(AlbatrossError::NoSSHAuth);
}
Ok(sess)
}
/// Handle remote backup of a file
pub fn remote_backup(file: PathBuf, remote_config: &RemoteBackupConfig) -> Result<()> {
let sess = open_ssh_session(remote_config)?;
let remote_path = remote_config.remote_dir.join(file.file_name().unwrap());
let mut local_file = std::fs::File::open(&file)?;
let sftp = sess.sftp()?;
let mut remote_file = sftp.create(&remote_path)?;
std::io::copy(&mut local_file, &mut remote_file)?;
let files = sftp.readdir(&remote_config.remote_dir)?;
let mut backups: Vec<PathBuf> = files
.into_iter()
.map(|(file, _)| file)
.filter(|file| {
if let Some(ext) = file.extension() {
ext == "gz"
} else {
false
}
})
.collect();
backups.sort_by(|file_a, file_b| {
let time_a =
get_time_from_file_name(file_a.file_name().unwrap().to_str().unwrap()).unwrap();
let time_b =
get_time_from_file_name(file_b.file_name().unwrap().to_str().unwrap()).unwrap();
time_b.cmp(&time_a)
});
if backups.len() > remote_config.backups_to_keep as usize {
for _ in 0..(backups.len() - remote_config.backups_to_keep as usize) {
if let Some(backup_path) = backups.pop() {
sftp.unlink(&*backup_path)?;
}
}
}
Ok(())
}

View File

@ -1,7 +1,7 @@
use crate::backup::uncompress_backup; use crate::backup::uncompress_backup;
use crate::chunk_coordinate::ChunkCoordinate; use crate::chunk_coordinate::ChunkCoordinate;
use crate::error::Result;
use anvil_region::AnvilChunkProvider; use anvil_region::AnvilChunkProvider;
use std::error;
use std::fs::remove_dir_all; use std::fs::remove_dir_all;
use std::path::PathBuf; use std::path::PathBuf;
@ -15,11 +15,7 @@ struct RestoreAccess {
impl RestoreAccess { impl RestoreAccess {
/// Create new RestoreAccess /// Create new RestoreAccess
pub fn new( pub fn new(world_name: &str, src_path: &PathBuf, dest_path: &PathBuf) -> Result<Self> {
world_name: &str,
src_path: &PathBuf,
dest_path: &PathBuf,
) -> Result<Self, std::io::Error> {
let src_path = uncompress_backup(src_path)?.join(world_name).join("region"); let src_path = uncompress_backup(src_path)?.join(world_name).join("region");
let dest_path = dest_path.join(world_name).join("region"); let dest_path = dest_path.join(world_name).join("region");
@ -41,8 +37,8 @@ impl RestoreAccess {
} }
/// Cleanup process /// Cleanup process
pub fn cleanup(self) -> Result<(), std::io::Error> { pub fn cleanup(self) -> Result<()> {
remove_dir_all("tmp") Ok(remove_dir_all("tmp")?)
} }
} }
@ -53,7 +49,7 @@ pub fn restore_range_from_backup(
upper: ChunkCoordinate, upper: ChunkCoordinate,
backup_path: &PathBuf, backup_path: &PathBuf,
minecraft_dir: &PathBuf, minecraft_dir: &PathBuf,
) -> Result<u64, Box<dyn error::Error>> { ) -> Result<u64> {
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?; let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
let mut count = 0; let mut count = 0;
@ -74,7 +70,7 @@ pub fn restore_chunk_from_backup(
chunk: ChunkCoordinate, chunk: ChunkCoordinate,
backup_path: &PathBuf, backup_path: &PathBuf,
minecraft_dir: &PathBuf, minecraft_dir: &PathBuf,
) -> Result<(), Box<dyn error::Error>> { ) -> Result<()> {
let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?; let chunk_access = RestoreAccess::new(world_name, backup_path, minecraft_dir)?;
chunk_access.copy_chunk(chunk.x, chunk.z); chunk_access.copy_chunk(chunk.x, chunk.z);