backup
This commit is contained in:
422
src/server/services/analytics_service.py
Normal file
422
src/server/services/analytics_service.py
Normal file
@@ -0,0 +1,422 @@
|
||||
"""Analytics service for downloads, popularity, and performance metrics.
|
||||
|
||||
This module provides comprehensive analytics tracking including download
|
||||
statistics, series popularity analysis, storage usage trends, and
|
||||
performance reporting.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import psutil
|
||||
from sqlalchemy import func, select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.database.models import Download, DownloadStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANALYTICS_FILE = Path("data") / "analytics.json"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DownloadStats:
|
||||
"""Download statistics snapshot."""
|
||||
|
||||
total_downloads: int = 0
|
||||
successful_downloads: int = 0
|
||||
failed_downloads: int = 0
|
||||
total_bytes_downloaded: int = 0
|
||||
average_speed_mbps: float = 0.0
|
||||
success_rate: float = 0.0
|
||||
average_duration_seconds: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeriesPopularity:
|
||||
"""Series popularity metrics."""
|
||||
|
||||
series_name: str
|
||||
download_count: int
|
||||
total_size_bytes: int
|
||||
last_download: Optional[str] = None
|
||||
success_rate: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class StorageAnalysis:
|
||||
"""Storage usage analysis."""
|
||||
|
||||
total_storage_bytes: int = 0
|
||||
used_storage_bytes: int = 0
|
||||
free_storage_bytes: int = 0
|
||||
storage_percent_used: float = 0.0
|
||||
downloads_directory_size_bytes: int = 0
|
||||
cache_directory_size_bytes: int = 0
|
||||
logs_directory_size_bytes: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class PerformanceReport:
|
||||
"""Performance metrics and trends."""
|
||||
|
||||
period_start: str
|
||||
period_end: str
|
||||
downloads_per_hour: float = 0.0
|
||||
average_queue_size: float = 0.0
|
||||
peak_memory_usage_mb: float = 0.0
|
||||
average_cpu_percent: float = 0.0
|
||||
uptime_seconds: float = 0.0
|
||||
error_rate: float = 0.0
|
||||
samples: List[Dict[str, Any]] = field(default_factory=list)
|
||||
|
||||
|
||||
class AnalyticsService:
|
||||
"""Service for tracking and reporting analytics data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the analytics service."""
|
||||
self.analytics_file = ANALYTICS_FILE
|
||||
self._ensure_analytics_file()
|
||||
|
||||
def _ensure_analytics_file(self) -> None:
|
||||
"""Ensure analytics file exists with default data."""
|
||||
if not self.analytics_file.exists():
|
||||
default_data = {
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"last_updated": datetime.now().isoformat(),
|
||||
"download_stats": asdict(DownloadStats()),
|
||||
"series_popularity": [],
|
||||
"storage_history": [],
|
||||
"performance_samples": [],
|
||||
}
|
||||
self.analytics_file.write_text(json.dumps(default_data, indent=2))
|
||||
|
||||
def _load_analytics(self) -> Dict[str, Any]:
|
||||
"""Load analytics data from file."""
|
||||
try:
|
||||
return json.loads(self.analytics_file.read_text())
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
self._ensure_analytics_file()
|
||||
return json.loads(self.analytics_file.read_text())
|
||||
|
||||
def _save_analytics(self, data: Dict[str, Any]) -> None:
|
||||
"""Save analytics data to file."""
|
||||
data["last_updated"] = datetime.now().isoformat()
|
||||
self.analytics_file.write_text(json.dumps(data, indent=2))
|
||||
|
||||
async def get_download_stats(
|
||||
self, db: AsyncSession, days: int = 30
|
||||
) -> DownloadStats:
|
||||
"""Get download statistics for the specified period.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
days: Number of days to analyze
|
||||
|
||||
Returns:
|
||||
DownloadStats with aggregated download data
|
||||
"""
|
||||
cutoff_date = datetime.now() - timedelta(days=days)
|
||||
|
||||
# Query downloads within period
|
||||
stmt = select(Download).where(
|
||||
Download.created_at >= cutoff_date
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
downloads = result.scalars().all()
|
||||
|
||||
if not downloads:
|
||||
return DownloadStats()
|
||||
|
||||
successful = [d for d in downloads
|
||||
if d.status == DownloadStatus.COMPLETED]
|
||||
failed = [d for d in downloads
|
||||
if d.status == DownloadStatus.FAILED]
|
||||
|
||||
total_bytes = sum(d.size_bytes or 0 for d in successful)
|
||||
total_seconds = sum(
|
||||
d.duration_seconds or 0 for d in successful
|
||||
) or 1
|
||||
|
||||
avg_speed = (
|
||||
(total_bytes / (1024 * 1024)) / total_seconds
|
||||
if total_seconds > 0
|
||||
else 0.0
|
||||
)
|
||||
success_rate = (
|
||||
len(successful) / len(downloads) * 100 if downloads else 0.0
|
||||
)
|
||||
|
||||
return DownloadStats(
|
||||
total_downloads=len(downloads),
|
||||
successful_downloads=len(successful),
|
||||
failed_downloads=len(failed),
|
||||
total_bytes_downloaded=total_bytes,
|
||||
average_speed_mbps=avg_speed,
|
||||
success_rate=success_rate,
|
||||
average_duration_seconds=total_seconds / len(successful)
|
||||
if successful
|
||||
else 0.0,
|
||||
)
|
||||
|
||||
async def get_series_popularity(
|
||||
self, db: AsyncSession, limit: int = 10
|
||||
) -> List[SeriesPopularity]:
|
||||
"""Get most popular series by download count.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
limit: Maximum number of series to return
|
||||
|
||||
Returns:
|
||||
List of SeriesPopularity objects
|
||||
"""
|
||||
stmt = (
|
||||
select(
|
||||
Download.series_name,
|
||||
func.count(Download.id).label("download_count"),
|
||||
func.sum(Download.size_bytes).label("total_size"),
|
||||
func.max(Download.created_at).label("last_download"),
|
||||
func.countif(
|
||||
Download.status == DownloadStatus.COMPLETED
|
||||
).label("successful"),
|
||||
)
|
||||
.group_by(Download.series_name)
|
||||
.order_by(func.count(Download.id).desc())
|
||||
.limit(limit)
|
||||
)
|
||||
|
||||
result = await db.execute(stmt)
|
||||
rows = result.all()
|
||||
|
||||
popularity = []
|
||||
for row in rows:
|
||||
success_rate = 0.0
|
||||
if row.download_count > 0:
|
||||
success_rate = (
|
||||
(row.successful or 0) / row.download_count * 100
|
||||
)
|
||||
|
||||
popularity.append(
|
||||
SeriesPopularity(
|
||||
series_name=row.series_name or "Unknown",
|
||||
download_count=row.download_count or 0,
|
||||
total_size_bytes=row.total_size or 0,
|
||||
last_download=row.last_download.isoformat()
|
||||
if row.last_download
|
||||
else None,
|
||||
success_rate=success_rate,
|
||||
)
|
||||
)
|
||||
|
||||
return popularity
|
||||
|
||||
def get_storage_analysis(self) -> StorageAnalysis:
|
||||
"""Get current storage usage analysis.
|
||||
|
||||
Returns:
|
||||
StorageAnalysis with storage breakdown
|
||||
"""
|
||||
try:
|
||||
# Get disk usage for data directory
|
||||
disk = psutil.disk_usage("/")
|
||||
total = disk.total
|
||||
used = disk.used
|
||||
free = disk.free
|
||||
|
||||
analysis = StorageAnalysis(
|
||||
total_storage_bytes=total,
|
||||
used_storage_bytes=used,
|
||||
free_storage_bytes=free,
|
||||
storage_percent_used=disk.percent,
|
||||
downloads_directory_size_bytes=self._get_dir_size(
|
||||
Path("data")
|
||||
),
|
||||
cache_directory_size_bytes=self._get_dir_size(
|
||||
Path("data") / "cache"
|
||||
),
|
||||
logs_directory_size_bytes=self._get_dir_size(
|
||||
Path("logs")
|
||||
),
|
||||
)
|
||||
|
||||
return analysis
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Storage analysis failed: {e}")
|
||||
return StorageAnalysis()
|
||||
|
||||
def _get_dir_size(self, path: Path) -> int:
|
||||
"""Calculate total size of directory.
|
||||
|
||||
Args:
|
||||
path: Directory path
|
||||
|
||||
Returns:
|
||||
Total size in bytes
|
||||
"""
|
||||
if not path.exists():
|
||||
return 0
|
||||
|
||||
total = 0
|
||||
try:
|
||||
for item in path.rglob("*"):
|
||||
if item.is_file():
|
||||
total += item.stat().st_size
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
|
||||
return total
|
||||
|
||||
async def get_performance_report(
|
||||
self, db: AsyncSession, hours: int = 24
|
||||
) -> PerformanceReport:
|
||||
"""Get performance metrics for the specified period.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
hours: Number of hours to analyze
|
||||
|
||||
Returns:
|
||||
PerformanceReport with performance metrics
|
||||
"""
|
||||
cutoff_time = datetime.now() - timedelta(hours=hours)
|
||||
|
||||
# Get download metrics
|
||||
stmt = select(Download).where(
|
||||
Download.created_at >= cutoff_time
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
downloads = result.scalars().all()
|
||||
|
||||
downloads_per_hour = len(downloads) / max(hours, 1)
|
||||
|
||||
# Get queue size over time (estimated from analytics)
|
||||
analytics = self._load_analytics()
|
||||
performance_samples = analytics.get("performance_samples", [])
|
||||
|
||||
# Filter recent samples
|
||||
recent_samples = [
|
||||
s
|
||||
for s in performance_samples
|
||||
if datetime.fromisoformat(s.get("timestamp", "2000-01-01"))
|
||||
>= cutoff_time
|
||||
]
|
||||
|
||||
avg_queue = sum(
|
||||
s.get("queue_size", 0) for s in recent_samples
|
||||
) / len(recent_samples) if recent_samples else 0.0
|
||||
|
||||
# Get memory and CPU stats
|
||||
process = psutil.Process()
|
||||
memory_info = process.memory_info()
|
||||
peak_memory_mb = memory_info.rss / (1024 * 1024)
|
||||
|
||||
cpu_percent = process.cpu_percent(interval=1)
|
||||
|
||||
# Calculate error rate
|
||||
failed_count = sum(
|
||||
1 for d in downloads
|
||||
if d.status == DownloadStatus.FAILED
|
||||
)
|
||||
error_rate = (
|
||||
failed_count / len(downloads) * 100 if downloads else 0.0
|
||||
)
|
||||
|
||||
# Get uptime
|
||||
boot_time = datetime.fromtimestamp(psutil.boot_time())
|
||||
uptime_seconds = (datetime.now() - boot_time).total_seconds()
|
||||
|
||||
return PerformanceReport(
|
||||
period_start=cutoff_time.isoformat(),
|
||||
period_end=datetime.now().isoformat(),
|
||||
downloads_per_hour=downloads_per_hour,
|
||||
average_queue_size=avg_queue,
|
||||
peak_memory_usage_mb=peak_memory_mb,
|
||||
average_cpu_percent=cpu_percent,
|
||||
uptime_seconds=uptime_seconds,
|
||||
error_rate=error_rate,
|
||||
samples=recent_samples[-100:], # Keep last 100 samples
|
||||
)
|
||||
|
||||
def record_performance_sample(
|
||||
self,
|
||||
queue_size: int,
|
||||
active_downloads: int,
|
||||
cpu_percent: float,
|
||||
memory_mb: float,
|
||||
) -> None:
|
||||
"""Record a performance metric sample.
|
||||
|
||||
Args:
|
||||
queue_size: Current queue size
|
||||
active_downloads: Number of active downloads
|
||||
cpu_percent: Current CPU usage percentage
|
||||
memory_mb: Current memory usage in MB
|
||||
"""
|
||||
analytics = self._load_analytics()
|
||||
samples = analytics.get("performance_samples", [])
|
||||
|
||||
sample = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"queue_size": queue_size,
|
||||
"active_downloads": active_downloads,
|
||||
"cpu_percent": cpu_percent,
|
||||
"memory_mb": memory_mb,
|
||||
}
|
||||
|
||||
samples.append(sample)
|
||||
|
||||
# Keep only recent samples (7 days worth at 1 sample per minute)
|
||||
max_samples = 7 * 24 * 60
|
||||
if len(samples) > max_samples:
|
||||
samples = samples[-max_samples:]
|
||||
|
||||
analytics["performance_samples"] = samples
|
||||
self._save_analytics(analytics)
|
||||
|
||||
async def generate_summary_report(
|
||||
self, db: AsyncSession
|
||||
) -> Dict[str, Any]:
|
||||
"""Generate comprehensive analytics summary.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Summary report with all analytics
|
||||
"""
|
||||
download_stats = await self.get_download_stats(db)
|
||||
series_popularity = await self.get_series_popularity(db, limit=5)
|
||||
storage = self.get_storage_analysis()
|
||||
performance = await self.get_performance_report(db)
|
||||
|
||||
return {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"download_stats": asdict(download_stats),
|
||||
"series_popularity": [
|
||||
asdict(s) for s in series_popularity
|
||||
],
|
||||
"storage_analysis": asdict(storage),
|
||||
"performance_report": asdict(performance),
|
||||
}
|
||||
|
||||
|
||||
_analytics_service_instance: Optional[AnalyticsService] = None
|
||||
|
||||
|
||||
def get_analytics_service() -> AnalyticsService:
|
||||
"""Get or create singleton analytics service instance.
|
||||
|
||||
Returns:
|
||||
AnalyticsService instance
|
||||
"""
|
||||
global _analytics_service_instance
|
||||
if _analytics_service_instance is None:
|
||||
_analytics_service_instance = AnalyticsService()
|
||||
return _analytics_service_instance
|
||||
432
src/server/services/backup_service.py
Normal file
432
src/server/services/backup_service.py
Normal file
@@ -0,0 +1,432 @@
|
||||
"""Backup and restore service for configuration and data management."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupInfo:
|
||||
"""Information about a backup."""
|
||||
|
||||
name: str
|
||||
timestamp: datetime
|
||||
size_bytes: int
|
||||
backup_type: str # 'config', 'data', 'full'
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class BackupService:
|
||||
"""Service for managing backups and restores."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
backup_dir: str = "data/backups",
|
||||
config_dir: str = "data",
|
||||
database_path: str = "data/aniworld.db",
|
||||
):
|
||||
"""Initialize backup service.
|
||||
|
||||
Args:
|
||||
backup_dir: Directory to store backups.
|
||||
config_dir: Directory containing configuration files.
|
||||
database_path: Path to the database file.
|
||||
"""
|
||||
self.backup_dir = Path(backup_dir)
|
||||
self.config_dir = Path(config_dir)
|
||||
self.database_path = Path(database_path)
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def backup_configuration(
|
||||
self, description: str = ""
|
||||
) -> Optional[BackupInfo]:
|
||||
"""Create a configuration backup.
|
||||
|
||||
Args:
|
||||
description: Optional description for the backup.
|
||||
|
||||
Returns:
|
||||
BackupInfo: Information about the created backup.
|
||||
"""
|
||||
try:
|
||||
timestamp = datetime.now()
|
||||
backup_name = (
|
||||
f"config_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
||||
)
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
with tarfile.open(backup_path, "w:gz") as tar:
|
||||
# Add configuration files
|
||||
config_files = [
|
||||
self.config_dir / "config.json",
|
||||
]
|
||||
|
||||
for config_file in config_files:
|
||||
if config_file.exists():
|
||||
tar.add(config_file, arcname=config_file.name)
|
||||
|
||||
size_bytes = backup_path.stat().st_size
|
||||
|
||||
info = BackupInfo(
|
||||
name=backup_name,
|
||||
timestamp=timestamp,
|
||||
size_bytes=size_bytes,
|
||||
backup_type="config",
|
||||
description=description,
|
||||
)
|
||||
|
||||
logger.info(f"Configuration backup created: {backup_name}")
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create configuration backup: {e}")
|
||||
return None
|
||||
|
||||
def backup_database(
|
||||
self, description: str = ""
|
||||
) -> Optional[BackupInfo]:
|
||||
"""Create a database backup.
|
||||
|
||||
Args:
|
||||
description: Optional description for the backup.
|
||||
|
||||
Returns:
|
||||
BackupInfo: Information about the created backup.
|
||||
"""
|
||||
try:
|
||||
if not self.database_path.exists():
|
||||
logger.warning(
|
||||
f"Database file not found: {self.database_path}"
|
||||
)
|
||||
return None
|
||||
|
||||
timestamp = datetime.now()
|
||||
backup_name = (
|
||||
f"database_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
||||
)
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
with tarfile.open(backup_path, "w:gz") as tar:
|
||||
tar.add(self.database_path, arcname=self.database_path.name)
|
||||
|
||||
size_bytes = backup_path.stat().st_size
|
||||
|
||||
info = BackupInfo(
|
||||
name=backup_name,
|
||||
timestamp=timestamp,
|
||||
size_bytes=size_bytes,
|
||||
backup_type="data",
|
||||
description=description,
|
||||
)
|
||||
|
||||
logger.info(f"Database backup created: {backup_name}")
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create database backup: {e}")
|
||||
return None
|
||||
|
||||
def backup_full(
|
||||
self, description: str = ""
|
||||
) -> Optional[BackupInfo]:
|
||||
"""Create a full system backup.
|
||||
|
||||
Args:
|
||||
description: Optional description for the backup.
|
||||
|
||||
Returns:
|
||||
BackupInfo: Information about the created backup.
|
||||
"""
|
||||
try:
|
||||
timestamp = datetime.now()
|
||||
backup_name = f"full_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
with tarfile.open(backup_path, "w:gz") as tar:
|
||||
# Add configuration
|
||||
config_file = self.config_dir / "config.json"
|
||||
if config_file.exists():
|
||||
tar.add(config_file, arcname=config_file.name)
|
||||
|
||||
# Add database
|
||||
if self.database_path.exists():
|
||||
tar.add(
|
||||
self.database_path,
|
||||
arcname=self.database_path.name,
|
||||
)
|
||||
|
||||
# Add download queue
|
||||
queue_file = self.config_dir / "download_queue.json"
|
||||
if queue_file.exists():
|
||||
tar.add(queue_file, arcname=queue_file.name)
|
||||
|
||||
size_bytes = backup_path.stat().st_size
|
||||
|
||||
info = BackupInfo(
|
||||
name=backup_name,
|
||||
timestamp=timestamp,
|
||||
size_bytes=size_bytes,
|
||||
backup_type="full",
|
||||
description=description,
|
||||
)
|
||||
|
||||
logger.info(f"Full backup created: {backup_name}")
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create full backup: {e}")
|
||||
return None
|
||||
|
||||
def restore_configuration(self, backup_name: str) -> bool:
|
||||
"""Restore configuration from backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to restore.
|
||||
|
||||
Returns:
|
||||
bool: True if restore was successful.
|
||||
"""
|
||||
try:
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
if not backup_path.exists():
|
||||
logger.error(f"Backup file not found: {backup_name}")
|
||||
return False
|
||||
|
||||
# Extract to temporary directory
|
||||
temp_dir = self.backup_dir / "temp_restore"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
|
||||
with tarfile.open(backup_path, "r:gz") as tar:
|
||||
tar.extractall(temp_dir)
|
||||
|
||||
# Copy configuration file back
|
||||
config_file = temp_dir / "config.json"
|
||||
if config_file.exists():
|
||||
shutil.copy(config_file, self.config_dir / "config.json")
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
logger.info(f"Configuration restored from: {backup_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore configuration: {e}")
|
||||
return False
|
||||
|
||||
def restore_database(self, backup_name: str) -> bool:
|
||||
"""Restore database from backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to restore.
|
||||
|
||||
Returns:
|
||||
bool: True if restore was successful.
|
||||
"""
|
||||
try:
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
if not backup_path.exists():
|
||||
logger.error(f"Backup file not found: {backup_name}")
|
||||
return False
|
||||
|
||||
# Create backup of current database
|
||||
if self.database_path.exists():
|
||||
current_backup = (
|
||||
self.database_path.parent
|
||||
/ f"{self.database_path.name}.backup"
|
||||
)
|
||||
shutil.copy(self.database_path, current_backup)
|
||||
logger.info(f"Current database backed up to: {current_backup}")
|
||||
|
||||
# Extract to temporary directory
|
||||
temp_dir = self.backup_dir / "temp_restore"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
|
||||
with tarfile.open(backup_path, "r:gz") as tar:
|
||||
tar.extractall(temp_dir)
|
||||
|
||||
# Copy database file back
|
||||
db_file = temp_dir / self.database_path.name
|
||||
if db_file.exists():
|
||||
shutil.copy(db_file, self.database_path)
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
logger.info(f"Database restored from: {backup_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore database: {e}")
|
||||
return False
|
||||
|
||||
def list_backups(
|
||||
self, backup_type: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List available backups.
|
||||
|
||||
Args:
|
||||
backup_type: Optional filter by backup type.
|
||||
|
||||
Returns:
|
||||
list: List of backup information.
|
||||
"""
|
||||
try:
|
||||
backups = []
|
||||
|
||||
for backup_file in sorted(self.backup_dir.glob("*.tar.gz")):
|
||||
# Extract type from filename
|
||||
filename = backup_file.name
|
||||
file_type = filename.split("_")[0]
|
||||
|
||||
if backup_type and file_type != backup_type:
|
||||
continue
|
||||
|
||||
# Extract timestamp
|
||||
timestamp_str = (
|
||||
filename.split("_", 1)[1].replace(".tar.gz", "")
|
||||
)
|
||||
|
||||
backups.append(
|
||||
{
|
||||
"name": filename,
|
||||
"type": file_type,
|
||||
"size_bytes": backup_file.stat().st_size,
|
||||
"created": timestamp_str,
|
||||
}
|
||||
)
|
||||
|
||||
return sorted(backups, key=lambda x: x["created"], reverse=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list backups: {e}")
|
||||
return []
|
||||
|
||||
def delete_backup(self, backup_name: str) -> bool:
|
||||
"""Delete a backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if delete was successful.
|
||||
"""
|
||||
try:
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
if not backup_path.exists():
|
||||
logger.warning(f"Backup not found: {backup_name}")
|
||||
return False
|
||||
|
||||
backup_path.unlink()
|
||||
logger.info(f"Backup deleted: {backup_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete backup: {e}")
|
||||
return False
|
||||
|
||||
def cleanup_old_backups(
|
||||
self, max_backups: int = 10, backup_type: Optional[str] = None
|
||||
) -> int:
|
||||
"""Remove old backups, keeping only the most recent ones.
|
||||
|
||||
Args:
|
||||
max_backups: Maximum number of backups to keep.
|
||||
backup_type: Optional filter by backup type.
|
||||
|
||||
Returns:
|
||||
int: Number of backups deleted.
|
||||
"""
|
||||
try:
|
||||
backups = self.list_backups(backup_type)
|
||||
|
||||
if len(backups) <= max_backups:
|
||||
return 0
|
||||
|
||||
deleted_count = 0
|
||||
for backup in backups[max_backups:]:
|
||||
if self.delete_backup(backup["name"]):
|
||||
deleted_count += 1
|
||||
|
||||
logger.info(f"Cleaned up {deleted_count} old backups")
|
||||
return deleted_count
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup old backups: {e}")
|
||||
return 0
|
||||
|
||||
def export_anime_data(
|
||||
self, output_file: str
|
||||
) -> bool:
|
||||
"""Export anime library data to JSON.
|
||||
|
||||
Args:
|
||||
output_file: Path to export file.
|
||||
|
||||
Returns:
|
||||
bool: True if export was successful.
|
||||
"""
|
||||
try:
|
||||
# This would integrate with the anime service
|
||||
# to export anime library data
|
||||
export_data = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"anime_count": 0,
|
||||
"data": [],
|
||||
}
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(export_data, f, indent=2)
|
||||
|
||||
logger.info(f"Anime data exported to: {output_file}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export anime data: {e}")
|
||||
return False
|
||||
|
||||
def import_anime_data(self, input_file: str) -> bool:
|
||||
"""Import anime library data from JSON.
|
||||
|
||||
Args:
|
||||
input_file: Path to import file.
|
||||
|
||||
Returns:
|
||||
bool: True if import was successful.
|
||||
"""
|
||||
try:
|
||||
if not os.path.exists(input_file):
|
||||
logger.error(f"Import file not found: {input_file}")
|
||||
return False
|
||||
|
||||
with open(input_file, "r") as f:
|
||||
json.load(f) # Load and validate JSON
|
||||
|
||||
# This would integrate with the anime service
|
||||
# to import anime library data
|
||||
|
||||
logger.info(f"Anime data imported from: {input_file}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import anime data: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Global backup service instance
|
||||
_backup_service: Optional[BackupService] = None
|
||||
|
||||
|
||||
def get_backup_service() -> BackupService:
|
||||
"""Get or create the global backup service instance.
|
||||
|
||||
Returns:
|
||||
BackupService: The backup service instance.
|
||||
"""
|
||||
global _backup_service
|
||||
if _backup_service is None:
|
||||
_backup_service = BackupService()
|
||||
return _backup_service
|
||||
324
src/server/services/monitoring_service.py
Normal file
324
src/server/services/monitoring_service.py
Normal file
@@ -0,0 +1,324 @@
|
||||
"""Monitoring service for system resource tracking and metrics collection."""
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import psutil
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.database.models import DownloadQueueItem
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueueMetrics:
|
||||
"""Download queue statistics and metrics."""
|
||||
|
||||
total_items: int = 0
|
||||
pending_items: int = 0
|
||||
downloading_items: int = 0
|
||||
completed_items: int = 0
|
||||
failed_items: int = 0
|
||||
total_size_bytes: int = 0
|
||||
downloaded_bytes: int = 0
|
||||
average_speed_mbps: float = 0.0
|
||||
estimated_time_remaining: Optional[timedelta] = None
|
||||
success_rate: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class SystemMetrics:
|
||||
"""System resource metrics at a point in time."""
|
||||
|
||||
timestamp: datetime
|
||||
cpu_percent: float
|
||||
memory_percent: float
|
||||
memory_available_mb: float
|
||||
disk_percent: float
|
||||
disk_free_mb: float
|
||||
uptime_seconds: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorMetrics:
|
||||
"""Error tracking and statistics."""
|
||||
|
||||
total_errors: int = 0
|
||||
errors_24h: int = 0
|
||||
most_common_errors: Dict[str, int] = field(default_factory=dict)
|
||||
last_error_time: Optional[datetime] = None
|
||||
error_rate_per_hour: float = 0.0
|
||||
|
||||
|
||||
class MonitoringService:
|
||||
"""Service for monitoring system resources and application metrics."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize monitoring service."""
|
||||
self._error_log: List[tuple[datetime, str]] = []
|
||||
self._performance_samples: List[SystemMetrics] = []
|
||||
self._max_samples = 1440 # Keep 24 hours of minute samples
|
||||
|
||||
def get_system_metrics(self) -> SystemMetrics:
|
||||
"""Get current system resource metrics.
|
||||
|
||||
Returns:
|
||||
SystemMetrics: Current system metrics.
|
||||
"""
|
||||
try:
|
||||
import time
|
||||
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
memory_info = psutil.virtual_memory()
|
||||
disk_info = psutil.disk_usage("/")
|
||||
boot_time = psutil.boot_time()
|
||||
uptime_seconds = time.time() - boot_time
|
||||
|
||||
metrics = SystemMetrics(
|
||||
timestamp=datetime.now(),
|
||||
cpu_percent=cpu_percent,
|
||||
memory_percent=memory_info.percent,
|
||||
memory_available_mb=memory_info.available / (1024 * 1024),
|
||||
disk_percent=disk_info.percent,
|
||||
disk_free_mb=disk_info.free / (1024 * 1024),
|
||||
uptime_seconds=uptime_seconds,
|
||||
)
|
||||
|
||||
# Store sample
|
||||
self._performance_samples.append(metrics)
|
||||
if len(self._performance_samples) > self._max_samples:
|
||||
self._performance_samples.pop(0)
|
||||
|
||||
return metrics
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get system metrics: {e}")
|
||||
raise
|
||||
|
||||
async def get_queue_metrics(self, db: AsyncSession) -> QueueMetrics:
|
||||
"""Get download queue metrics.
|
||||
|
||||
Args:
|
||||
db: Database session.
|
||||
|
||||
Returns:
|
||||
QueueMetrics: Queue statistics and progress.
|
||||
"""
|
||||
try:
|
||||
# Get all queue items
|
||||
result = await db.execute(select(DownloadQueueItem))
|
||||
items = result.scalars().all()
|
||||
|
||||
if not items:
|
||||
return QueueMetrics()
|
||||
|
||||
# Calculate metrics
|
||||
total_items = len(items)
|
||||
pending_items = sum(1 for i in items if i.status == "PENDING")
|
||||
downloading_items = sum(
|
||||
1 for i in items if i.status == "DOWNLOADING"
|
||||
)
|
||||
completed_items = sum(1 for i in items if i.status == "COMPLETED")
|
||||
failed_items = sum(1 for i in items if i.status == "FAILED")
|
||||
|
||||
total_size_bytes = sum(
|
||||
(i.total_bytes or 0) for i in items
|
||||
)
|
||||
downloaded_bytes = sum(
|
||||
(i.downloaded_bytes or 0) for i in items
|
||||
)
|
||||
|
||||
# Calculate average speed from active downloads
|
||||
speeds = [
|
||||
i.download_speed for i in items
|
||||
if i.status == "DOWNLOADING" and i.download_speed
|
||||
]
|
||||
average_speed_mbps = (
|
||||
sum(speeds) / len(speeds) / (1024 * 1024) if speeds else 0
|
||||
)
|
||||
|
||||
# Calculate success rate
|
||||
success_rate = (
|
||||
(completed_items / total_items * 100) if total_items > 0 else 0
|
||||
)
|
||||
|
||||
# Estimate time remaining
|
||||
estimated_time_remaining = None
|
||||
if average_speed_mbps > 0 and total_size_bytes > downloaded_bytes:
|
||||
remaining_bytes = total_size_bytes - downloaded_bytes
|
||||
remaining_seconds = remaining_bytes / average_speed_mbps
|
||||
estimated_time_remaining = timedelta(seconds=remaining_seconds)
|
||||
|
||||
return QueueMetrics(
|
||||
total_items=total_items,
|
||||
pending_items=pending_items,
|
||||
downloading_items=downloading_items,
|
||||
completed_items=completed_items,
|
||||
failed_items=failed_items,
|
||||
total_size_bytes=total_size_bytes,
|
||||
downloaded_bytes=downloaded_bytes,
|
||||
average_speed_mbps=average_speed_mbps,
|
||||
estimated_time_remaining=estimated_time_remaining,
|
||||
success_rate=success_rate,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get queue metrics: {e}")
|
||||
raise
|
||||
|
||||
def log_error(self, error_message: str) -> None:
|
||||
"""Log an error for tracking purposes.
|
||||
|
||||
Args:
|
||||
error_message: The error message to log.
|
||||
"""
|
||||
self._error_log.append((datetime.now(), error_message))
|
||||
logger.debug(f"Error logged: {error_message}")
|
||||
|
||||
def get_error_metrics(self) -> ErrorMetrics:
|
||||
"""Get error tracking metrics.
|
||||
|
||||
Returns:
|
||||
ErrorMetrics: Error statistics and trends.
|
||||
"""
|
||||
total_errors = len(self._error_log)
|
||||
|
||||
# Get errors from last 24 hours
|
||||
cutoff_time = datetime.now() - timedelta(hours=24)
|
||||
recent_errors = [
|
||||
(time, msg) for time, msg in self._error_log
|
||||
if time >= cutoff_time
|
||||
]
|
||||
errors_24h = len(recent_errors)
|
||||
|
||||
# Count error types
|
||||
error_counts: Dict[str, int] = {}
|
||||
for _, msg in recent_errors:
|
||||
error_type = msg.split(":")[0]
|
||||
error_counts[error_type] = error_counts.get(error_type, 0) + 1
|
||||
|
||||
# Sort by count
|
||||
most_common_errors = dict(
|
||||
sorted(error_counts.items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
)
|
||||
|
||||
# Get last error time
|
||||
last_error_time = (
|
||||
recent_errors[-1][0] if recent_errors else None
|
||||
)
|
||||
|
||||
# Calculate error rate per hour
|
||||
error_rate_per_hour = (
|
||||
errors_24h / 24 if errors_24h > 0 else 0
|
||||
)
|
||||
|
||||
return ErrorMetrics(
|
||||
total_errors=total_errors,
|
||||
errors_24h=errors_24h,
|
||||
most_common_errors=most_common_errors,
|
||||
last_error_time=last_error_time,
|
||||
error_rate_per_hour=error_rate_per_hour,
|
||||
)
|
||||
|
||||
def get_performance_summary(self) -> Dict[str, Any]:
|
||||
"""Get performance summary from collected samples.
|
||||
|
||||
Returns:
|
||||
dict: Performance statistics.
|
||||
"""
|
||||
if not self._performance_samples:
|
||||
return {}
|
||||
|
||||
cpu_values = [m.cpu_percent for m in self._performance_samples]
|
||||
memory_values = [m.memory_percent for m in self._performance_samples]
|
||||
disk_values = [m.disk_percent for m in self._performance_samples]
|
||||
|
||||
return {
|
||||
"cpu": {
|
||||
"current": cpu_values[-1],
|
||||
"average": sum(cpu_values) / len(cpu_values),
|
||||
"max": max(cpu_values),
|
||||
"min": min(cpu_values),
|
||||
},
|
||||
"memory": {
|
||||
"current": memory_values[-1],
|
||||
"average": sum(memory_values) / len(memory_values),
|
||||
"max": max(memory_values),
|
||||
"min": min(memory_values),
|
||||
},
|
||||
"disk": {
|
||||
"current": disk_values[-1],
|
||||
"average": sum(disk_values) / len(disk_values),
|
||||
"max": max(disk_values),
|
||||
"min": min(disk_values),
|
||||
},
|
||||
"sample_count": len(self._performance_samples),
|
||||
}
|
||||
|
||||
async def get_comprehensive_status(
|
||||
self, db: AsyncSession
|
||||
) -> Dict[str, Any]:
|
||||
"""Get comprehensive system status summary.
|
||||
|
||||
Args:
|
||||
db: Database session.
|
||||
|
||||
Returns:
|
||||
dict: Complete system status.
|
||||
"""
|
||||
try:
|
||||
system_metrics = self.get_system_metrics()
|
||||
queue_metrics = await self.get_queue_metrics(db)
|
||||
error_metrics = self.get_error_metrics()
|
||||
performance = self.get_performance_summary()
|
||||
|
||||
return {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"system": {
|
||||
"cpu_percent": system_metrics.cpu_percent,
|
||||
"memory_percent": system_metrics.memory_percent,
|
||||
"disk_percent": system_metrics.disk_percent,
|
||||
"uptime_seconds": system_metrics.uptime_seconds,
|
||||
},
|
||||
"queue": {
|
||||
"total_items": queue_metrics.total_items,
|
||||
"pending": queue_metrics.pending_items,
|
||||
"downloading": queue_metrics.downloading_items,
|
||||
"completed": queue_metrics.completed_items,
|
||||
"failed": queue_metrics.failed_items,
|
||||
"success_rate": round(queue_metrics.success_rate, 2),
|
||||
"average_speed_mbps": round(
|
||||
queue_metrics.average_speed_mbps, 2
|
||||
),
|
||||
},
|
||||
"errors": {
|
||||
"total": error_metrics.total_errors,
|
||||
"last_24h": error_metrics.errors_24h,
|
||||
"rate_per_hour": round(
|
||||
error_metrics.error_rate_per_hour, 2
|
||||
),
|
||||
"most_common": error_metrics.most_common_errors,
|
||||
},
|
||||
"performance": performance,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get comprehensive status: {e}")
|
||||
raise
|
||||
|
||||
|
||||
# Global monitoring service instance
|
||||
_monitoring_service: Optional[MonitoringService] = None
|
||||
|
||||
|
||||
def get_monitoring_service() -> MonitoringService:
|
||||
"""Get or create the global monitoring service instance.
|
||||
|
||||
Returns:
|
||||
MonitoringService: The monitoring service instance.
|
||||
"""
|
||||
global _monitoring_service
|
||||
if _monitoring_service is None:
|
||||
_monitoring_service = MonitoringService()
|
||||
return _monitoring_service
|
||||
Reference in New Issue
Block a user