433 lines
13 KiB
Python
433 lines
13 KiB
Python
"""Backup and restore service for configuration and data management."""
|
|
|
|
import json
|
|
import logging
|
|
import os
|
|
import shutil
|
|
import tarfile
|
|
from dataclasses import dataclass
|
|
from datetime import datetime
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
@dataclass
|
|
class BackupInfo:
|
|
"""Information about a backup."""
|
|
|
|
name: str
|
|
timestamp: datetime
|
|
size_bytes: int
|
|
backup_type: str # 'config', 'data', 'full'
|
|
description: Optional[str] = None
|
|
|
|
|
|
class BackupService:
|
|
"""Service for managing backups and restores."""
|
|
|
|
def __init__(
|
|
self,
|
|
backup_dir: str = "data/backups",
|
|
config_dir: str = "data",
|
|
database_path: str = "data/aniworld.db",
|
|
):
|
|
"""Initialize backup service.
|
|
|
|
Args:
|
|
backup_dir: Directory to store backups.
|
|
config_dir: Directory containing configuration files.
|
|
database_path: Path to the database file.
|
|
"""
|
|
self.backup_dir = Path(backup_dir)
|
|
self.config_dir = Path(config_dir)
|
|
self.database_path = Path(database_path)
|
|
|
|
# Create backup directory if it doesn't exist
|
|
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
def backup_configuration(
|
|
self, description: str = ""
|
|
) -> Optional[BackupInfo]:
|
|
"""Create a configuration backup.
|
|
|
|
Args:
|
|
description: Optional description for the backup.
|
|
|
|
Returns:
|
|
BackupInfo: Information about the created backup.
|
|
"""
|
|
try:
|
|
timestamp = datetime.now()
|
|
backup_name = (
|
|
f"config_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
|
)
|
|
backup_path = self.backup_dir / backup_name
|
|
|
|
with tarfile.open(backup_path, "w:gz") as tar:
|
|
# Add configuration files
|
|
config_files = [
|
|
self.config_dir / "config.json",
|
|
]
|
|
|
|
for config_file in config_files:
|
|
if config_file.exists():
|
|
tar.add(config_file, arcname=config_file.name)
|
|
|
|
size_bytes = backup_path.stat().st_size
|
|
|
|
info = BackupInfo(
|
|
name=backup_name,
|
|
timestamp=timestamp,
|
|
size_bytes=size_bytes,
|
|
backup_type="config",
|
|
description=description,
|
|
)
|
|
|
|
logger.info(f"Configuration backup created: {backup_name}")
|
|
return info
|
|
except Exception as e:
|
|
logger.error(f"Failed to create configuration backup: {e}")
|
|
return None
|
|
|
|
def backup_database(
|
|
self, description: str = ""
|
|
) -> Optional[BackupInfo]:
|
|
"""Create a database backup.
|
|
|
|
Args:
|
|
description: Optional description for the backup.
|
|
|
|
Returns:
|
|
BackupInfo: Information about the created backup.
|
|
"""
|
|
try:
|
|
if not self.database_path.exists():
|
|
logger.warning(
|
|
f"Database file not found: {self.database_path}"
|
|
)
|
|
return None
|
|
|
|
timestamp = datetime.now()
|
|
backup_name = (
|
|
f"database_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
|
)
|
|
backup_path = self.backup_dir / backup_name
|
|
|
|
with tarfile.open(backup_path, "w:gz") as tar:
|
|
tar.add(self.database_path, arcname=self.database_path.name)
|
|
|
|
size_bytes = backup_path.stat().st_size
|
|
|
|
info = BackupInfo(
|
|
name=backup_name,
|
|
timestamp=timestamp,
|
|
size_bytes=size_bytes,
|
|
backup_type="data",
|
|
description=description,
|
|
)
|
|
|
|
logger.info(f"Database backup created: {backup_name}")
|
|
return info
|
|
except Exception as e:
|
|
logger.error(f"Failed to create database backup: {e}")
|
|
return None
|
|
|
|
def backup_full(
|
|
self, description: str = ""
|
|
) -> Optional[BackupInfo]:
|
|
"""Create a full system backup.
|
|
|
|
Args:
|
|
description: Optional description for the backup.
|
|
|
|
Returns:
|
|
BackupInfo: Information about the created backup.
|
|
"""
|
|
try:
|
|
timestamp = datetime.now()
|
|
backup_name = f"full_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
|
backup_path = self.backup_dir / backup_name
|
|
|
|
with tarfile.open(backup_path, "w:gz") as tar:
|
|
# Add configuration
|
|
config_file = self.config_dir / "config.json"
|
|
if config_file.exists():
|
|
tar.add(config_file, arcname=config_file.name)
|
|
|
|
# Add database
|
|
if self.database_path.exists():
|
|
tar.add(
|
|
self.database_path,
|
|
arcname=self.database_path.name,
|
|
)
|
|
|
|
# Add download queue
|
|
queue_file = self.config_dir / "download_queue.json"
|
|
if queue_file.exists():
|
|
tar.add(queue_file, arcname=queue_file.name)
|
|
|
|
size_bytes = backup_path.stat().st_size
|
|
|
|
info = BackupInfo(
|
|
name=backup_name,
|
|
timestamp=timestamp,
|
|
size_bytes=size_bytes,
|
|
backup_type="full",
|
|
description=description,
|
|
)
|
|
|
|
logger.info(f"Full backup created: {backup_name}")
|
|
return info
|
|
except Exception as e:
|
|
logger.error(f"Failed to create full backup: {e}")
|
|
return None
|
|
|
|
def restore_configuration(self, backup_name: str) -> bool:
|
|
"""Restore configuration from backup.
|
|
|
|
Args:
|
|
backup_name: Name of the backup to restore.
|
|
|
|
Returns:
|
|
bool: True if restore was successful.
|
|
"""
|
|
try:
|
|
backup_path = self.backup_dir / backup_name
|
|
|
|
if not backup_path.exists():
|
|
logger.error(f"Backup file not found: {backup_name}")
|
|
return False
|
|
|
|
# Extract to temporary directory
|
|
temp_dir = self.backup_dir / "temp_restore"
|
|
temp_dir.mkdir(exist_ok=True)
|
|
|
|
with tarfile.open(backup_path, "r:gz") as tar:
|
|
tar.extractall(temp_dir)
|
|
|
|
# Copy configuration file back
|
|
config_file = temp_dir / "config.json"
|
|
if config_file.exists():
|
|
shutil.copy(config_file, self.config_dir / "config.json")
|
|
|
|
# Cleanup
|
|
shutil.rmtree(temp_dir)
|
|
|
|
logger.info(f"Configuration restored from: {backup_name}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to restore configuration: {e}")
|
|
return False
|
|
|
|
def restore_database(self, backup_name: str) -> bool:
|
|
"""Restore database from backup.
|
|
|
|
Args:
|
|
backup_name: Name of the backup to restore.
|
|
|
|
Returns:
|
|
bool: True if restore was successful.
|
|
"""
|
|
try:
|
|
backup_path = self.backup_dir / backup_name
|
|
|
|
if not backup_path.exists():
|
|
logger.error(f"Backup file not found: {backup_name}")
|
|
return False
|
|
|
|
# Create backup of current database
|
|
if self.database_path.exists():
|
|
current_backup = (
|
|
self.database_path.parent
|
|
/ f"{self.database_path.name}.backup"
|
|
)
|
|
shutil.copy(self.database_path, current_backup)
|
|
logger.info(f"Current database backed up to: {current_backup}")
|
|
|
|
# Extract to temporary directory
|
|
temp_dir = self.backup_dir / "temp_restore"
|
|
temp_dir.mkdir(exist_ok=True)
|
|
|
|
with tarfile.open(backup_path, "r:gz") as tar:
|
|
tar.extractall(temp_dir)
|
|
|
|
# Copy database file back
|
|
db_file = temp_dir / self.database_path.name
|
|
if db_file.exists():
|
|
shutil.copy(db_file, self.database_path)
|
|
|
|
# Cleanup
|
|
shutil.rmtree(temp_dir)
|
|
|
|
logger.info(f"Database restored from: {backup_name}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to restore database: {e}")
|
|
return False
|
|
|
|
def list_backups(
|
|
self, backup_type: Optional[str] = None
|
|
) -> List[Dict[str, Any]]:
|
|
"""List available backups.
|
|
|
|
Args:
|
|
backup_type: Optional filter by backup type.
|
|
|
|
Returns:
|
|
list: List of backup information.
|
|
"""
|
|
try:
|
|
backups = []
|
|
|
|
for backup_file in sorted(self.backup_dir.glob("*.tar.gz")):
|
|
# Extract type from filename
|
|
filename = backup_file.name
|
|
file_type = filename.split("_")[0]
|
|
|
|
if backup_type and file_type != backup_type:
|
|
continue
|
|
|
|
# Extract timestamp
|
|
timestamp_str = (
|
|
filename.split("_", 1)[1].replace(".tar.gz", "")
|
|
)
|
|
|
|
backups.append(
|
|
{
|
|
"name": filename,
|
|
"type": file_type,
|
|
"size_bytes": backup_file.stat().st_size,
|
|
"created": timestamp_str,
|
|
}
|
|
)
|
|
|
|
return sorted(backups, key=lambda x: x["created"], reverse=True)
|
|
except Exception as e:
|
|
logger.error(f"Failed to list backups: {e}")
|
|
return []
|
|
|
|
def delete_backup(self, backup_name: str) -> bool:
|
|
"""Delete a backup.
|
|
|
|
Args:
|
|
backup_name: Name of the backup to delete.
|
|
|
|
Returns:
|
|
bool: True if delete was successful.
|
|
"""
|
|
try:
|
|
backup_path = self.backup_dir / backup_name
|
|
|
|
if not backup_path.exists():
|
|
logger.warning(f"Backup not found: {backup_name}")
|
|
return False
|
|
|
|
backup_path.unlink()
|
|
logger.info(f"Backup deleted: {backup_name}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to delete backup: {e}")
|
|
return False
|
|
|
|
def cleanup_old_backups(
|
|
self, max_backups: int = 10, backup_type: Optional[str] = None
|
|
) -> int:
|
|
"""Remove old backups, keeping only the most recent ones.
|
|
|
|
Args:
|
|
max_backups: Maximum number of backups to keep.
|
|
backup_type: Optional filter by backup type.
|
|
|
|
Returns:
|
|
int: Number of backups deleted.
|
|
"""
|
|
try:
|
|
backups = self.list_backups(backup_type)
|
|
|
|
if len(backups) <= max_backups:
|
|
return 0
|
|
|
|
deleted_count = 0
|
|
for backup in backups[max_backups:]:
|
|
if self.delete_backup(backup["name"]):
|
|
deleted_count += 1
|
|
|
|
logger.info(f"Cleaned up {deleted_count} old backups")
|
|
return deleted_count
|
|
except Exception as e:
|
|
logger.error(f"Failed to cleanup old backups: {e}")
|
|
return 0
|
|
|
|
def export_anime_data(
|
|
self, output_file: str
|
|
) -> bool:
|
|
"""Export anime library data to JSON.
|
|
|
|
Args:
|
|
output_file: Path to export file.
|
|
|
|
Returns:
|
|
bool: True if export was successful.
|
|
"""
|
|
try:
|
|
# This would integrate with the anime service
|
|
# to export anime library data
|
|
export_data = {
|
|
"timestamp": datetime.now().isoformat(),
|
|
"anime_count": 0,
|
|
"data": [],
|
|
}
|
|
|
|
with open(output_file, "w") as f:
|
|
json.dump(export_data, f, indent=2)
|
|
|
|
logger.info(f"Anime data exported to: {output_file}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to export anime data: {e}")
|
|
return False
|
|
|
|
def import_anime_data(self, input_file: str) -> bool:
|
|
"""Import anime library data from JSON.
|
|
|
|
Args:
|
|
input_file: Path to import file.
|
|
|
|
Returns:
|
|
bool: True if import was successful.
|
|
"""
|
|
try:
|
|
if not os.path.exists(input_file):
|
|
logger.error(f"Import file not found: {input_file}")
|
|
return False
|
|
|
|
with open(input_file, "r") as f:
|
|
json.load(f) # Load and validate JSON
|
|
|
|
# This would integrate with the anime service
|
|
# to import anime library data
|
|
|
|
logger.info(f"Anime data imported from: {input_file}")
|
|
return True
|
|
except Exception as e:
|
|
logger.error(f"Failed to import anime data: {e}")
|
|
return False
|
|
|
|
|
|
# Global backup service instance
|
|
_backup_service: Optional[BackupService] = None
|
|
|
|
|
|
def get_backup_service() -> BackupService:
|
|
"""Get or create the global backup service instance.
|
|
|
|
Returns:
|
|
BackupService: The backup service instance.
|
|
"""
|
|
global _backup_service
|
|
if _backup_service is None:
|
|
_backup_service = BackupService()
|
|
return _backup_service
|