This commit is contained in:
2025-10-05 21:56:33 +02:00
parent d30aa7cfea
commit fe2df1514c
77 changed files with 82 additions and 12002 deletions

View File

@@ -0,0 +1,6 @@
"""
Infrastructure package for the Aniworld server.
This package contains repository implementations, database connections,
caching, and other infrastructure concerns.
"""

View File

@@ -0,0 +1,916 @@
"""
Database & Storage Management for AniWorld App
This module provides database schema management, data migration,
backup/restore functionality, and storage optimization.
"""
import os
import sqlite3
import json
import shutil
import time
import hashlib
import logging
import threading
import zipfile
import uuid
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from dataclasses import dataclass, field
from contextlib import contextmanager
import glob
from pathlib import Path
@dataclass
class AnimeMetadata:
"""Represents anime metadata stored in database."""
anime_id: str
name: str
folder: str
key: Optional[str] = None
description: Optional[str] = None
genres: List[str] = field(default_factory=list)
release_year: Optional[int] = None
status: str = 'ongoing' # ongoing, completed, cancelled
total_episodes: Optional[int] = None
poster_url: Optional[str] = None
last_updated: datetime = field(default_factory=datetime.now)
created_at: datetime = field(default_factory=datetime.now)
custom_metadata: Dict[str, Any] = field(default_factory=dict)
@dataclass
class EpisodeMetadata:
"""Represents episode metadata stored in database."""
episode_id: str
anime_id: str
season: int
episode: int
title: Optional[str] = None
description: Optional[str] = None
duration_seconds: Optional[int] = None
file_path: Optional[str] = None
file_size_bytes: Optional[int] = None
download_date: Optional[datetime] = None
last_watched: Optional[datetime] = None
watch_count: int = 0
is_downloaded: bool = False
quality: Optional[str] = None
language: str = 'German Dub'
@dataclass
class BackupInfo:
"""Represents backup metadata."""
backup_id: str
backup_path: str
backup_type: str # full, incremental, metadata_only
created_at: datetime
size_bytes: int
description: Optional[str] = None
tables_included: List[str] = field(default_factory=list)
checksum: Optional[str] = None
class DatabaseManager:
"""Manage SQLite database with migrations and maintenance."""
def __init__(self, db_path: str = "./data/aniworld.db"):
self.db_path = db_path
self.db_dir = os.path.dirname(db_path)
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
# Create database directory
os.makedirs(self.db_dir, exist_ok=True)
# Initialize database
self.initialize_database()
# Run migrations
self.run_migrations()
@contextmanager
def get_connection(self):
"""Get database connection with proper error handling."""
conn = None
try:
conn = sqlite3.connect(self.db_path, timeout=30)
conn.row_factory = sqlite3.Row # Enable dict-like access
yield conn
except Exception as e:
if conn:
conn.rollback()
self.logger.error(f"Database connection error: {e}")
raise
finally:
if conn:
conn.close()
def initialize_database(self):
"""Initialize database with base schema."""
with self.get_connection() as conn:
# Create schema version table
conn.execute("""
CREATE TABLE IF NOT EXISTS schema_version (
version INTEGER PRIMARY KEY,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
description TEXT
)
""")
# Insert initial version if not exists
conn.execute("""
INSERT OR IGNORE INTO schema_version (version, description)
VALUES (0, 'Initial schema')
""")
conn.commit()
def get_current_version(self) -> int:
"""Get current database schema version."""
with self.get_connection() as conn:
cursor = conn.execute("SELECT MAX(version) FROM schema_version")
result = cursor.fetchone()
return result[0] if result and result[0] is not None else 0
def run_migrations(self):
"""Run database migrations."""
current_version = self.get_current_version()
migrations = self.get_migrations()
for version, migration in migrations.items():
if version > current_version:
self.logger.info(f"Running migration to version {version}")
try:
with self.get_connection() as conn:
migration['up'](conn)
# Record migration
conn.execute("""
INSERT INTO schema_version (version, description)
VALUES (?, ?)
""", (version, migration['description']))
conn.commit()
self.logger.info(f"Migration to version {version} completed")
except Exception as e:
self.logger.error(f"Migration to version {version} failed: {e}")
raise
def get_migrations(self) -> Dict[int, Dict[str, Any]]:
"""Define database migrations."""
return {
1: {
'description': 'Create anime metadata table',
'up': self._migration_001_anime_table
},
2: {
'description': 'Create episode metadata table',
'up': self._migration_002_episode_table
},
3: {
'description': 'Create download history table',
'up': self._migration_003_download_history
},
4: {
'description': 'Create user preferences table',
'up': self._migration_004_user_preferences
},
5: {
'description': 'Create storage locations table',
'up': self._migration_005_storage_locations
},
6: {
'description': 'Add indexes for performance',
'up': self._migration_006_indexes
}
}
def _migration_001_anime_table(self, conn: sqlite3.Connection):
"""Create anime metadata table."""
conn.execute("""
CREATE TABLE anime_metadata (
anime_id TEXT PRIMARY KEY,
name TEXT NOT NULL,
folder TEXT NOT NULL UNIQUE,
key TEXT,
description TEXT,
genres TEXT, -- JSON array
release_year INTEGER,
status TEXT DEFAULT 'ongoing',
total_episodes INTEGER,
poster_url TEXT,
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
custom_metadata TEXT -- JSON object
)
""")
def _migration_002_episode_table(self, conn: sqlite3.Connection):
"""Create episode metadata table."""
conn.execute("""
CREATE TABLE episode_metadata (
episode_id TEXT PRIMARY KEY,
anime_id TEXT NOT NULL,
season INTEGER NOT NULL,
episode INTEGER NOT NULL,
title TEXT,
description TEXT,
duration_seconds INTEGER,
file_path TEXT,
file_size_bytes INTEGER,
download_date TIMESTAMP,
last_watched TIMESTAMP,
watch_count INTEGER DEFAULT 0,
is_downloaded BOOLEAN DEFAULT FALSE,
quality TEXT,
language TEXT DEFAULT 'German Dub',
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id),
UNIQUE(anime_id, season, episode, language)
)
""")
def _migration_003_download_history(self, conn: sqlite3.Connection):
"""Create download history table."""
conn.execute("""
CREATE TABLE download_history (
download_id TEXT PRIMARY KEY,
anime_id TEXT NOT NULL,
season INTEGER NOT NULL,
episode INTEGER NOT NULL,
language TEXT NOT NULL,
download_started TIMESTAMP NOT NULL,
download_completed TIMESTAMP,
download_status TEXT NOT NULL, -- started, completed, failed, cancelled
file_size_bytes INTEGER,
download_speed_mbps REAL,
error_message TEXT,
retry_count INTEGER DEFAULT 0,
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
)
""")
def _migration_004_user_preferences(self, conn: sqlite3.Connection):
"""Create user preferences table."""
conn.execute("""
CREATE TABLE user_preferences (
key TEXT PRIMARY KEY,
value TEXT NOT NULL, -- JSON value
category TEXT NOT NULL,
description TEXT,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
def _migration_005_storage_locations(self, conn: sqlite3.Connection):
"""Create storage locations table."""
conn.execute("""
CREATE TABLE storage_locations (
location_id TEXT PRIMARY KEY,
anime_id TEXT,
path TEXT NOT NULL,
location_type TEXT NOT NULL, -- primary, backup, cache
is_active BOOLEAN DEFAULT TRUE,
free_space_bytes INTEGER,
total_space_bytes INTEGER,
last_checked TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
)
""")
def _migration_006_indexes(self, conn: sqlite3.Connection):
"""Add indexes for performance."""
indexes = [
"CREATE INDEX idx_anime_name ON anime_metadata(name)",
"CREATE INDEX idx_anime_folder ON anime_metadata(folder)",
"CREATE INDEX idx_anime_status ON anime_metadata(status)",
"CREATE INDEX idx_episode_anime_id ON episode_metadata(anime_id)",
"CREATE INDEX idx_episode_season_episode ON episode_metadata(season, episode)",
"CREATE INDEX idx_episode_downloaded ON episode_metadata(is_downloaded)",
"CREATE INDEX idx_download_status ON download_history(download_status)",
"CREATE INDEX idx_download_date ON download_history(download_started)",
"CREATE INDEX idx_storage_active ON storage_locations(is_active)",
"CREATE INDEX idx_storage_type ON storage_locations(location_type)"
]
for index_sql in indexes:
try:
conn.execute(index_sql)
except sqlite3.OperationalError as e:
if "already exists" not in str(e):
raise
def execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
"""Execute a SELECT query and return results."""
with self.get_connection() as conn:
cursor = conn.execute(query, params)
return cursor.fetchall()
def execute_update(self, query: str, params: tuple = ()) -> int:
"""Execute an UPDATE/INSERT/DELETE query and return affected rows."""
with self.get_connection() as conn:
cursor = conn.execute(query, params)
conn.commit()
return cursor.rowcount
class AnimeRepository:
"""Repository for anime data operations."""
def __init__(self, db_manager: DatabaseManager):
self.db = db_manager
self.logger = logging.getLogger(__name__)
def create_anime(self, metadata: AnimeMetadata) -> bool:
"""Create new anime record."""
try:
query = """
INSERT INTO anime_metadata (
anime_id, name, folder, key, description, genres,
release_year, status, total_episodes, poster_url,
custom_metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
params = (
metadata.anime_id,
metadata.name,
metadata.folder,
metadata.key,
metadata.description,
json.dumps(metadata.genres),
metadata.release_year,
metadata.status,
metadata.total_episodes,
metadata.poster_url,
json.dumps(metadata.custom_metadata)
)
rows_affected = self.db.execute_update(query, params)
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to create anime {metadata.name}: {e}")
return False
def get_anime_by_folder(self, folder: str) -> Optional[AnimeMetadata]:
"""Get anime by folder name."""
try:
query = """
SELECT * FROM anime_metadata WHERE folder = ?
"""
results = self.db.execute_query(query, (folder,))
if results:
row = results[0]
return self._row_to_anime_metadata(row)
return None
except Exception as e:
self.logger.error(f"Failed to get anime by folder {folder}: {e}")
return None
def get_all_anime(self, status_filter: Optional[str] = None) -> List[AnimeMetadata]:
"""Get all anime, optionally filtered by status."""
try:
if status_filter:
query = "SELECT * FROM anime_metadata WHERE status = ? ORDER BY name"
params = (status_filter,)
else:
query = "SELECT * FROM anime_metadata ORDER BY name"
params = ()
results = self.db.execute_query(query, params)
return [self._row_to_anime_metadata(row) for row in results]
except Exception as e:
self.logger.error(f"Failed to get all anime: {e}")
return []
def update_anime(self, metadata: AnimeMetadata) -> bool:
"""Update anime metadata."""
try:
query = """
UPDATE anime_metadata SET
name = ?, key = ?, description = ?, genres = ?,
release_year = ?, status = ?, total_episodes = ?,
poster_url = ?, last_updated = CURRENT_TIMESTAMP,
custom_metadata = ?
WHERE anime_id = ?
"""
params = (
metadata.name,
metadata.key,
metadata.description,
json.dumps(metadata.genres),
metadata.release_year,
metadata.status,
metadata.total_episodes,
metadata.poster_url,
json.dumps(metadata.custom_metadata),
metadata.anime_id
)
rows_affected = self.db.execute_update(query, params)
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to update anime {metadata.anime_id}: {e}")
return False
def delete_anime(self, anime_id: str) -> bool:
"""Delete anime and related data."""
try:
# Delete episodes first (foreign key constraint)
self.db.execute_update("DELETE FROM episode_metadata WHERE anime_id = ?", (anime_id,))
self.db.execute_update("DELETE FROM download_history WHERE anime_id = ?", (anime_id,))
self.db.execute_update("DELETE FROM storage_locations WHERE anime_id = ?", (anime_id,))
# Delete anime
rows_affected = self.db.execute_update("DELETE FROM anime_metadata WHERE anime_id = ?", (anime_id,))
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to delete anime {anime_id}: {e}")
return False
def search_anime(self, search_term: str) -> List[AnimeMetadata]:
"""Search anime by name or description."""
try:
query = """
SELECT * FROM anime_metadata
WHERE name LIKE ? OR description LIKE ?
ORDER BY name
"""
search_pattern = f"%{search_term}%"
results = self.db.execute_query(query, (search_pattern, search_pattern))
return [self._row_to_anime_metadata(row) for row in results]
except Exception as e:
self.logger.error(f"Failed to search anime: {e}")
return []
def _row_to_anime_metadata(self, row: sqlite3.Row) -> AnimeMetadata:
"""Convert database row to AnimeMetadata object."""
return AnimeMetadata(
anime_id=row['anime_id'],
name=row['name'],
folder=row['folder'],
key=row['key'],
description=row['description'],
genres=json.loads(row['genres'] or '[]'),
release_year=row['release_year'],
status=row['status'],
total_episodes=row['total_episodes'],
poster_url=row['poster_url'],
last_updated=datetime.fromisoformat(row['last_updated']) if row['last_updated'] else datetime.now(),
created_at=datetime.fromisoformat(row['created_at']) if row['created_at'] else datetime.now(),
custom_metadata=json.loads(row['custom_metadata'] or '{}')
)
class BackupManager:
"""Manage database backups and restore operations."""
def __init__(self, db_manager: DatabaseManager, backup_dir: str = "./backups"):
self.db = db_manager
self.backup_dir = backup_dir
self.logger = logging.getLogger(__name__)
# Create backup directory
os.makedirs(backup_dir, exist_ok=True)
def create_full_backup(self, description: str = None) -> Optional[BackupInfo]:
"""Create a full database backup."""
try:
backup_id = f"full_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
backup_filename = f"{backup_id}.db"
backup_path = os.path.join(self.backup_dir, backup_filename)
# Copy database file
shutil.copy2(self.db.db_path, backup_path)
# Calculate checksum
checksum = self._calculate_file_checksum(backup_path)
# Get file size
size_bytes = os.path.getsize(backup_path)
# Get table list
with self.db.get_connection() as conn:
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
tables = [row[0] for row in cursor.fetchall()]
backup_info = BackupInfo(
backup_id=backup_id,
backup_path=backup_path,
backup_type='full',
created_at=datetime.now(),
size_bytes=size_bytes,
description=description or f"Full backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
tables_included=tables,
checksum=checksum
)
# Save backup metadata
self._save_backup_metadata(backup_info)
self.logger.info(f"Full backup created: {backup_id}")
return backup_info
except Exception as e:
self.logger.error(f"Failed to create full backup: {e}")
return None
def create_metadata_backup(self, description: str = None) -> Optional[BackupInfo]:
"""Create a metadata-only backup (excluding large binary data)."""
try:
backup_id = f"metadata_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
backup_filename = f"{backup_id}.json"
backup_path = os.path.join(self.backup_dir, backup_filename)
# Export metadata as JSON
metadata = self._export_metadata()
with open(backup_path, 'w', encoding='utf-8') as f:
json.dump(metadata, f, indent=2, default=str)
# Calculate checksum
checksum = self._calculate_file_checksum(backup_path)
# Get file size
size_bytes = os.path.getsize(backup_path)
backup_info = BackupInfo(
backup_id=backup_id,
backup_path=backup_path,
backup_type='metadata_only',
created_at=datetime.now(),
size_bytes=size_bytes,
description=description or f"Metadata backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
tables_included=['anime_metadata', 'episode_metadata', 'user_preferences'],
checksum=checksum
)
# Save backup metadata
self._save_backup_metadata(backup_info)
self.logger.info(f"Metadata backup created: {backup_id}")
return backup_info
except Exception as e:
self.logger.error(f"Failed to create metadata backup: {e}")
return None
def restore_backup(self, backup_id: str) -> bool:
"""Restore from a backup."""
try:
backup_info = self._load_backup_metadata(backup_id)
if not backup_info:
self.logger.error(f"Backup not found: {backup_id}")
return False
if not os.path.exists(backup_info.backup_path):
self.logger.error(f"Backup file not found: {backup_info.backup_path}")
return False
# Verify backup integrity
if not self._verify_backup_integrity(backup_info):
self.logger.error(f"Backup integrity check failed: {backup_id}")
return False
# Create a backup of current database before restore
current_backup = self.create_full_backup(f"Pre-restore backup before restoring {backup_id}")
if backup_info.backup_type == 'full':
# Replace database file
shutil.copy2(backup_info.backup_path, self.db.db_path)
elif backup_info.backup_type == 'metadata_only':
# Restore metadata from JSON
with open(backup_info.backup_path, 'r', encoding='utf-8') as f:
metadata = json.load(f)
self._import_metadata(metadata)
self.logger.info(f"Backup restored successfully: {backup_id}")
return True
except Exception as e:
self.logger.error(f"Failed to restore backup {backup_id}: {e}")
return False
def list_backups(self) -> List[BackupInfo]:
"""List all available backups."""
backups = []
try:
# Look for backup metadata files
metadata_pattern = os.path.join(self.backup_dir, "*.backup_info.json")
for metadata_file in glob.glob(metadata_pattern):
try:
with open(metadata_file, 'r') as f:
backup_data = json.load(f)
backup_info = BackupInfo(
backup_id=backup_data['backup_id'],
backup_path=backup_data['backup_path'],
backup_type=backup_data['backup_type'],
created_at=datetime.fromisoformat(backup_data['created_at']),
size_bytes=backup_data['size_bytes'],
description=backup_data.get('description'),
tables_included=backup_data.get('tables_included', []),
checksum=backup_data.get('checksum')
)
backups.append(backup_info)
except Exception as e:
self.logger.warning(f"Failed to load backup metadata from {metadata_file}: {e}")
# Sort by creation date (newest first)
backups.sort(key=lambda b: b.created_at, reverse=True)
except Exception as e:
self.logger.error(f"Failed to list backups: {e}")
return backups
def cleanup_old_backups(self, keep_days: int = 30, keep_count: int = 10):
"""Clean up old backup files."""
try:
backups = self.list_backups()
cutoff_date = datetime.now() - timedelta(days=keep_days)
# Keep at least keep_count backups regardless of age
backups_to_delete = []
for i, backup in enumerate(backups):
if i >= keep_count and backup.created_at < cutoff_date:
backups_to_delete.append(backup)
for backup in backups_to_delete:
try:
# Remove backup file
if os.path.exists(backup.backup_path):
os.remove(backup.backup_path)
# Remove metadata file
metadata_file = f"{backup.backup_path}.backup_info.json"
if os.path.exists(metadata_file):
os.remove(metadata_file)
self.logger.info(f"Removed old backup: {backup.backup_id}")
except Exception as e:
self.logger.warning(f"Failed to remove backup {backup.backup_id}: {e}")
if backups_to_delete:
self.logger.info(f"Cleaned up {len(backups_to_delete)} old backups")
except Exception as e:
self.logger.error(f"Failed to cleanup old backups: {e}")
def _export_metadata(self) -> Dict[str, Any]:
"""Export database metadata to dictionary."""
metadata = {
'export_date': datetime.now().isoformat(),
'schema_version': self.db.get_current_version(),
'tables': {}
}
# Export specific tables
tables_to_export = ['anime_metadata', 'episode_metadata', 'user_preferences', 'storage_locations']
with self.db.get_connection() as conn:
for table in tables_to_export:
try:
cursor = conn.execute(f"SELECT * FROM {table}")
rows = cursor.fetchall()
# Convert rows to dictionaries
metadata['tables'][table] = [dict(row) for row in rows]
except Exception as e:
self.logger.warning(f"Failed to export table {table}: {e}")
return metadata
def _import_metadata(self, metadata: Dict[str, Any]):
"""Import metadata from dictionary to database."""
with self.db.get_connection() as conn:
for table_name, rows in metadata.get('tables', {}).items():
if not rows:
continue
try:
# Clear existing data (be careful!)
conn.execute(f"DELETE FROM {table_name}")
# Insert new data
if rows:
columns = list(rows[0].keys())
placeholders = ','.join(['?' for _ in columns])
insert_sql = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({placeholders})"
for row in rows:
values = [row[col] for col in columns]
conn.execute(insert_sql, values)
conn.commit()
self.logger.info(f"Imported {len(rows)} rows to {table_name}")
except Exception as e:
self.logger.error(f"Failed to import table {table_name}: {e}")
conn.rollback()
raise
def _calculate_file_checksum(self, file_path: str) -> str:
"""Calculate SHA256 checksum of file."""
hash_sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def _verify_backup_integrity(self, backup_info: BackupInfo) -> bool:
"""Verify backup file integrity using checksum."""
if not backup_info.checksum:
return True # No checksum to verify
current_checksum = self._calculate_file_checksum(backup_info.backup_path)
return current_checksum == backup_info.checksum
def _save_backup_metadata(self, backup_info: BackupInfo):
"""Save backup metadata to file."""
metadata_file = f"{backup_info.backup_path}.backup_info.json"
metadata = {
'backup_id': backup_info.backup_id,
'backup_path': backup_info.backup_path,
'backup_type': backup_info.backup_type,
'created_at': backup_info.created_at.isoformat(),
'size_bytes': backup_info.size_bytes,
'description': backup_info.description,
'tables_included': backup_info.tables_included,
'checksum': backup_info.checksum
}
with open(metadata_file, 'w') as f:
json.dump(metadata, f, indent=2)
def _load_backup_metadata(self, backup_id: str) -> Optional[BackupInfo]:
"""Load backup metadata from file."""
# Look for metadata file
metadata_pattern = os.path.join(self.backup_dir, f"{backup_id}.*.backup_info.json")
metadata_files = glob.glob(metadata_pattern)
if not metadata_files:
return None
try:
with open(metadata_files[0], 'r') as f:
backup_data = json.load(f)
return BackupInfo(
backup_id=backup_data['backup_id'],
backup_path=backup_data['backup_path'],
backup_type=backup_data['backup_type'],
created_at=datetime.fromisoformat(backup_data['created_at']),
size_bytes=backup_data['size_bytes'],
description=backup_data.get('description'),
tables_included=backup_data.get('tables_included', []),
checksum=backup_data.get('checksum')
)
except Exception as e:
self.logger.error(f"Failed to load backup metadata for {backup_id}: {e}")
return None
class StorageManager:
"""Manage storage locations and usage monitoring."""
def __init__(self, db_manager: DatabaseManager):
self.db = db_manager
self.logger = logging.getLogger(__name__)
def add_storage_location(self, path: str, location_type: str = 'primary', anime_id: str = None) -> str:
"""Add a new storage location."""
location_id = str(uuid.uuid4())
query = """
INSERT INTO storage_locations
(location_id, anime_id, path, location_type, is_active)
VALUES (?, ?, ?, ?, ?)
"""
self.db.execute_update(query, (location_id, anime_id, path, location_type, True))
# Update storage stats
self.update_storage_stats(location_id)
return location_id
def update_storage_stats(self, location_id: str):
"""Update storage statistics for a location."""
try:
# Get location path
query = "SELECT path FROM storage_locations WHERE location_id = ?"
results = self.db.execute_query(query, (location_id,))
if not results:
return
path = results[0]['path']
if os.path.exists(path):
# Get disk usage
stat = shutil.disk_usage(path)
# Update database
update_query = """
UPDATE storage_locations
SET free_space_bytes = ?, total_space_bytes = ?, last_checked = CURRENT_TIMESTAMP
WHERE location_id = ?
"""
self.db.execute_update(update_query, (stat.free, stat.total, location_id))
except Exception as e:
self.logger.error(f"Failed to update storage stats for {location_id}: {e}")
def get_storage_summary(self) -> Dict[str, Any]:
"""Get storage usage summary."""
query = """
SELECT
location_type,
COUNT(*) as location_count,
SUM(free_space_bytes) as total_free,
SUM(total_space_bytes) as total_space
FROM storage_locations
WHERE is_active = 1
GROUP BY location_type
"""
results = self.db.execute_query(query)
summary = {}
for row in results:
summary[row['location_type']] = {
'location_count': row['location_count'],
'total_free_gb': (row['total_free'] or 0) / (1024**3),
'total_space_gb': (row['total_space'] or 0) / (1024**3),
'usage_percent': ((row['total_space'] - row['total_free']) / row['total_space'] * 100) if row['total_space'] else 0
}
return summary
# Global instances
database_manager = DatabaseManager()
anime_repository = AnimeRepository(database_manager)
backup_manager = BackupManager(database_manager)
storage_manager = StorageManager(database_manager)
def init_database_system():
"""Initialize database system."""
# Database is initialized on creation
pass
def cleanup_database_system():
"""Clean up database resources."""
# No specific cleanup needed for SQLite
pass
# Export main components
__all__ = [
'DatabaseManager',
'AnimeRepository',
'BackupManager',
'StorageManager',
'AnimeMetadata',
'EpisodeMetadata',
'BackupInfo',
'database_manager',
'anime_repository',
'backup_manager',
'storage_manager',
'init_database_system',
'cleanup_database_system'
]

View File

@@ -0,0 +1,537 @@
"""
REST API & Integration Module for AniWorld App
This module provides comprehensive REST API endpoints for external integrations,
webhook support, API authentication, and export functionality.
"""
import json
import csv
import io
import uuid
import hmac
import hashlib
import time
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Callable
from functools import wraps
import logging
import requests
import threading
from dataclasses import dataclass, field
from flask import Blueprint, request, jsonify, make_response, current_app
from werkzeug.security import generate_password_hash, check_password_hash
from auth import require_auth, optional_auth
from error_handler import handle_api_errors, RetryableError, NonRetryableError
@dataclass
class APIKey:
"""Represents an API key for external integrations."""
key_id: str
name: str
key_hash: str
permissions: List[str]
rate_limit_per_hour: int = 1000
created_at: datetime = field(default_factory=datetime.now)
last_used: Optional[datetime] = None
is_active: bool = True
@dataclass
class WebhookEndpoint:
"""Represents a webhook endpoint configuration."""
webhook_id: str
name: str
url: str
events: List[str]
secret: Optional[str] = None
is_active: bool = True
retry_attempts: int = 3
created_at: datetime = field(default_factory=datetime.now)
last_triggered: Optional[datetime] = None
class APIKeyManager:
"""Manage API keys for external integrations."""
def __init__(self):
self.api_keys: Dict[str, APIKey] = {}
self.rate_limits: Dict[str, Dict[str, int]] = {} # key_id -> {hour: count}
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
def create_api_key(self, name: str, permissions: List[str], rate_limit: int = 1000) -> tuple:
"""Create a new API key and return the key and key_id."""
key_id = str(uuid.uuid4())
raw_key = f"aniworld_{uuid.uuid4().hex}"
key_hash = generate_password_hash(raw_key)
api_key = APIKey(
key_id=key_id,
name=name,
key_hash=key_hash,
permissions=permissions,
rate_limit_per_hour=rate_limit
)
with self.lock:
self.api_keys[key_id] = api_key
self.logger.info(f"Created API key: {name} ({key_id})")
return raw_key, key_id
def validate_api_key(self, raw_key: str) -> Optional[APIKey]:
"""Validate an API key and return the associated APIKey object."""
with self.lock:
for api_key in self.api_keys.values():
if api_key.is_active and check_password_hash(api_key.key_hash, raw_key):
api_key.last_used = datetime.now()
return api_key
return None
def check_rate_limit(self, key_id: str) -> bool:
"""Check if API key is within rate limits."""
current_hour = datetime.now().replace(minute=0, second=0, microsecond=0)
with self.lock:
if key_id not in self.api_keys:
return False
api_key = self.api_keys[key_id]
if key_id not in self.rate_limits:
self.rate_limits[key_id] = {}
hour_key = current_hour.isoformat()
current_count = self.rate_limits[key_id].get(hour_key, 0)
if current_count >= api_key.rate_limit_per_hour:
return False
self.rate_limits[key_id][hour_key] = current_count + 1
# Clean old entries (keep only last 24 hours)
cutoff = current_hour - timedelta(hours=24)
for hour_key in list(self.rate_limits[key_id].keys()):
if datetime.fromisoformat(hour_key) < cutoff:
del self.rate_limits[key_id][hour_key]
return True
def revoke_api_key(self, key_id: str) -> bool:
"""Revoke an API key."""
with self.lock:
if key_id in self.api_keys:
self.api_keys[key_id].is_active = False
self.logger.info(f"Revoked API key: {key_id}")
return True
return False
def list_api_keys(self) -> List[Dict[str, Any]]:
"""List all API keys (without sensitive data)."""
with self.lock:
return [
{
'key_id': key.key_id,
'name': key.name,
'permissions': key.permissions,
'rate_limit_per_hour': key.rate_limit_per_hour,
'created_at': key.created_at.isoformat(),
'last_used': key.last_used.isoformat() if key.last_used else None,
'is_active': key.is_active
}
for key in self.api_keys.values()
]
class WebhookManager:
"""Manage webhook endpoints and delivery."""
def __init__(self):
self.webhooks: Dict[str, WebhookEndpoint] = {}
self.delivery_queue = []
self.delivery_thread = None
self.running = False
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
def start(self):
"""Start webhook delivery service."""
if self.running:
return
self.running = True
self.delivery_thread = threading.Thread(target=self._delivery_loop, daemon=True)
self.delivery_thread.start()
self.logger.info("Webhook delivery service started")
def stop(self):
"""Stop webhook delivery service."""
self.running = False
if self.delivery_thread:
self.delivery_thread.join(timeout=5)
self.logger.info("Webhook delivery service stopped")
def create_webhook(self, name: str, url: str, events: List[str], secret: Optional[str] = None) -> str:
"""Create a new webhook endpoint."""
webhook_id = str(uuid.uuid4())
webhook = WebhookEndpoint(
webhook_id=webhook_id,
name=name,
url=url,
events=events,
secret=secret
)
with self.lock:
self.webhooks[webhook_id] = webhook
self.logger.info(f"Created webhook: {name} ({webhook_id})")
return webhook_id
def delete_webhook(self, webhook_id: str) -> bool:
"""Delete a webhook endpoint."""
with self.lock:
if webhook_id in self.webhooks:
del self.webhooks[webhook_id]
self.logger.info(f"Deleted webhook: {webhook_id}")
return True
return False
def trigger_event(self, event_type: str, data: Dict[str, Any]):
"""Trigger webhook event for all subscribed endpoints."""
event_data = {
'event': event_type,
'timestamp': datetime.now().isoformat(),
'data': data
}
with self.lock:
for webhook in self.webhooks.values():
if webhook.is_active and event_type in webhook.events:
self.delivery_queue.append((webhook, event_data))
self.logger.debug(f"Triggered webhook event: {event_type}")
def _delivery_loop(self):
"""Main delivery loop for webhook events."""
while self.running:
try:
if self.delivery_queue:
with self.lock:
webhook, event_data = self.delivery_queue.pop(0)
self._deliver_webhook(webhook, event_data)
else:
time.sleep(1)
except Exception as e:
self.logger.error(f"Error in webhook delivery loop: {e}")
time.sleep(1)
def _deliver_webhook(self, webhook: WebhookEndpoint, event_data: Dict[str, Any]):
"""Deliver webhook event to endpoint."""
for attempt in range(webhook.retry_attempts):
try:
headers = {'Content-Type': 'application/json'}
# Add signature if secret is provided
if webhook.secret:
payload = json.dumps(event_data)
signature = hmac.new(
webhook.secret.encode(),
payload.encode(),
hashlib.sha256
).hexdigest()
headers['X-Webhook-Signature'] = f"sha256={signature}"
response = requests.post(
webhook.url,
json=event_data,
headers=headers,
timeout=30
)
if response.status_code < 400:
webhook.last_triggered = datetime.now()
self.logger.debug(f"Webhook delivered successfully: {webhook.webhook_id}")
break
else:
self.logger.warning(f"Webhook delivery failed (HTTP {response.status_code}): {webhook.webhook_id}")
except Exception as e:
self.logger.error(f"Webhook delivery error (attempt {attempt + 1}): {e}")
if attempt < webhook.retry_attempts - 1:
time.sleep(2 ** attempt) # Exponential backoff
def list_webhooks(self) -> List[Dict[str, Any]]:
"""List all webhook endpoints."""
with self.lock:
return [
{
'webhook_id': webhook.webhook_id,
'name': webhook.name,
'url': webhook.url,
'events': webhook.events,
'is_active': webhook.is_active,
'created_at': webhook.created_at.isoformat(),
'last_triggered': webhook.last_triggered.isoformat() if webhook.last_triggered else None
}
for webhook in self.webhooks.values()
]
class ExportManager:
"""Manage data export functionality."""
def __init__(self, series_app=None):
self.series_app = series_app
self.logger = logging.getLogger(__name__)
def export_anime_list_json(self, include_missing_only: bool = False) -> Dict[str, Any]:
"""Export anime list as JSON."""
try:
if not self.series_app or not self.series_app.List:
return {'anime_list': [], 'metadata': {'count': 0}}
anime_list = []
series_list = self.series_app.List.GetList()
for serie in series_list:
# Skip series without missing episodes if filter is enabled
if include_missing_only and not serie.episodeDict:
continue
anime_data = {
'name': serie.name or serie.folder,
'folder': serie.folder,
'key': getattr(serie, 'key', None),
'missing_episodes': {}
}
if hasattr(serie, 'episodeDict') and serie.episodeDict:
for season, episodes in serie.episodeDict.items():
if episodes:
anime_data['missing_episodes'][str(season)] = list(episodes)
anime_list.append(anime_data)
return {
'anime_list': anime_list,
'metadata': {
'count': len(anime_list),
'exported_at': datetime.now().isoformat(),
'include_missing_only': include_missing_only
}
}
except Exception as e:
self.logger.error(f"Failed to export anime list as JSON: {e}")
raise RetryableError(f"JSON export failed: {e}")
def export_anime_list_csv(self, include_missing_only: bool = False) -> str:
"""Export anime list as CSV."""
try:
output = io.StringIO()
writer = csv.writer(output)
# Write header
writer.writerow(['Name', 'Folder', 'Key', 'Season', 'Episode', 'Missing'])
if not self.series_app or not self.series_app.List:
return output.getvalue()
series_list = self.series_app.List.GetList()
for serie in series_list:
# Skip series without missing episodes if filter is enabled
if include_missing_only and not serie.episodeDict:
continue
name = serie.name or serie.folder
folder = serie.folder
key = getattr(serie, 'key', '')
if hasattr(serie, 'episodeDict') and serie.episodeDict:
for season, episodes in serie.episodeDict.items():
for episode in episodes:
writer.writerow([name, folder, key, season, episode, 'Yes'])
else:
writer.writerow([name, folder, key, '', '', 'No'])
return output.getvalue()
except Exception as e:
self.logger.error(f"Failed to export anime list as CSV: {e}")
raise RetryableError(f"CSV export failed: {e}")
def export_download_statistics(self) -> Dict[str, Any]:
"""Export download statistics and metrics."""
try:
# This would integrate with download manager statistics
from performance_optimizer import download_manager
stats = download_manager.get_statistics()
return {
'download_statistics': stats,
'metadata': {
'exported_at': datetime.now().isoformat()
}
}
except Exception as e:
self.logger.error(f"Failed to export download statistics: {e}")
raise RetryableError(f"Statistics export failed: {e}")
class NotificationService:
"""External notification service integration."""
def __init__(self):
self.services = {}
self.logger = logging.getLogger(__name__)
def register_discord_webhook(self, webhook_url: str, name: str = "discord"):
"""Register Discord webhook for notifications."""
self.services[name] = {
'type': 'discord',
'webhook_url': webhook_url
}
self.logger.info(f"Registered Discord webhook: {name}")
def register_telegram_bot(self, bot_token: str, chat_id: str, name: str = "telegram"):
"""Register Telegram bot for notifications."""
self.services[name] = {
'type': 'telegram',
'bot_token': bot_token,
'chat_id': chat_id
}
self.logger.info(f"Registered Telegram bot: {name}")
def send_notification(self, message: str, title: str = None, service_name: str = None):
"""Send notification to all or specific services."""
services_to_use = [service_name] if service_name else list(self.services.keys())
for name in services_to_use:
if name in self.services:
try:
service = self.services[name]
if service['type'] == 'discord':
self._send_discord_notification(service, message, title)
elif service['type'] == 'telegram':
self._send_telegram_notification(service, message, title)
except Exception as e:
self.logger.error(f"Failed to send notification via {name}: {e}")
def _send_discord_notification(self, service: Dict, message: str, title: str = None):
"""Send Discord webhook notification."""
payload = {
'embeds': [{
'title': title or 'AniWorld Notification',
'description': message,
'color': 0x00ff00,
'timestamp': datetime.now().isoformat()
}]
}
response = requests.post(service['webhook_url'], json=payload, timeout=10)
response.raise_for_status()
def _send_telegram_notification(self, service: Dict, message: str, title: str = None):
"""Send Telegram bot notification."""
text = f"*{title}*\n\n{message}" if title else message
payload = {
'chat_id': service['chat_id'],
'text': text,
'parse_mode': 'Markdown'
}
url = f"https://api.telegram.org/bot{service['bot_token']}/sendMessage"
response = requests.post(url, json=payload, timeout=10)
response.raise_for_status()
# Global instances
api_key_manager = APIKeyManager()
webhook_manager = WebhookManager()
export_manager = ExportManager()
notification_service = NotificationService()
def require_api_key(permissions: List[str] = None):
"""Decorator to require valid API key with optional permissions."""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
auth_header = request.headers.get('Authorization', '')
if not auth_header.startswith('Bearer '):
return jsonify({
'status': 'error',
'message': 'Invalid authorization header format'
}), 401
api_key = auth_header[7:] # Remove 'Bearer ' prefix
validated_key = api_key_manager.validate_api_key(api_key)
if not validated_key:
return jsonify({
'status': 'error',
'message': 'Invalid API key'
}), 401
# Check rate limits
if not api_key_manager.check_rate_limit(validated_key.key_id):
return jsonify({
'status': 'error',
'message': 'Rate limit exceeded'
}), 429
# Check permissions
if permissions:
missing_permissions = set(permissions) - set(validated_key.permissions)
if missing_permissions:
return jsonify({
'status': 'error',
'message': f'Missing permissions: {", ".join(missing_permissions)}'
}), 403
# Store API key info in request context
request.api_key = validated_key
return f(*args, **kwargs)
return decorated_function
return decorator
def init_api_integrations():
"""Initialize API integration services."""
webhook_manager.start()
def cleanup_api_integrations():
"""Clean up API integration services."""
webhook_manager.stop()
# Export main components
__all__ = [
'APIKeyManager',
'WebhookManager',
'ExportManager',
'NotificationService',
'api_key_manager',
'webhook_manager',
'export_manager',
'notification_service',
'require_api_key',
'init_api_integrations',
'cleanup_api_integrations'
]

View File

@@ -0,0 +1,40 @@
import logging
console_handler = None
error_logger = None
noKeyFound_logger = None
noGerFound_logger = None
def setupLogger():
global console_handler, error_logger, noKeyFound_logger, noGerFound_logger
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
if (console_handler is None):
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(logging.Formatter(
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
)
logging.getLogger().addHandler(console_handler)
logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO)
logging.getLogger('charset_normalizer').setLevel(logging.INFO)
logging.getLogger().setLevel(logging.INFO)
if (error_logger is None):
error_logger = logging.getLogger("ErrorLog")
error_handler = logging.FileHandler("../errors.log")
error_handler.setLevel(logging.ERROR)
error_logger.addHandler(error_handler)
if (noKeyFound_logger is None):
noKeyFound_logger = logging.getLogger("NoKeyFound")
noKeyFound_handler = logging.FileHandler("../NoKeyFound.log")
noKeyFound_handler.setLevel(logging.ERROR)
noKeyFound_logger.addHandler(noKeyFound_handler)
if (noGerFound_logger is None):
noGerFound_logger = logging.getLogger("noGerFound")
noGerFound_handler = logging.FileHandler("../noGerFound.log")
noGerFound_handler.setLevel(logging.ERROR)
noGerFound_logger.addHandler(noGerFound_handler)
setupLogger()

View File

@@ -0,0 +1,6 @@
"""
Repository package for data access layer.
This package contains repository implementations following the Repository pattern
for clean separation of data access logic from business logic.
"""