instruction2

This commit is contained in:
2025-10-12 22:39:51 +02:00
parent e48cb29131
commit 7481a33c15
40 changed files with 639 additions and 14993 deletions

View File

@@ -1,6 +0,0 @@
"""
Infrastructure package for the Aniworld server.
This package contains repository implementations, database connections,
caching, and other infrastructure concerns.
"""

View File

@@ -1,916 +0,0 @@
"""
Database & Storage Management for AniWorld App
This module provides database schema management, data migration,
backup/restore functionality, and storage optimization.
"""
import os
import sqlite3
import json
import shutil
import time
import hashlib
import logging
import threading
import zipfile
import uuid
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from dataclasses import dataclass, field
from contextlib import contextmanager
import glob
from pathlib import Path
@dataclass
class AnimeMetadata:
"""Represents anime metadata stored in database."""
anime_id: str
name: str
folder: str
key: Optional[str] = None
description: Optional[str] = None
genres: List[str] = field(default_factory=list)
release_year: Optional[int] = None
status: str = 'ongoing' # ongoing, completed, cancelled
total_episodes: Optional[int] = None
poster_url: Optional[str] = None
last_updated: datetime = field(default_factory=datetime.now)
created_at: datetime = field(default_factory=datetime.now)
custom_metadata: Dict[str, Any] = field(default_factory=dict)
@dataclass
class EpisodeMetadata:
"""Represents episode metadata stored in database."""
episode_id: str
anime_id: str
season: int
episode: int
title: Optional[str] = None
description: Optional[str] = None
duration_seconds: Optional[int] = None
file_path: Optional[str] = None
file_size_bytes: Optional[int] = None
download_date: Optional[datetime] = None
last_watched: Optional[datetime] = None
watch_count: int = 0
is_downloaded: bool = False
quality: Optional[str] = None
language: str = 'German Dub'
@dataclass
class BackupInfo:
"""Represents backup metadata."""
backup_id: str
backup_path: str
backup_type: str # full, incremental, metadata_only
created_at: datetime
size_bytes: int
description: Optional[str] = None
tables_included: List[str] = field(default_factory=list)
checksum: Optional[str] = None
class DatabaseManager:
"""Manage SQLite database with migrations and maintenance."""
def __init__(self, db_path: str = "./data/aniworld.db"):
self.db_path = db_path
self.db_dir = os.path.dirname(db_path)
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
# Create database directory
os.makedirs(self.db_dir, exist_ok=True)
# Initialize database
self.initialize_database()
# Run migrations
self.run_migrations()
@contextmanager
def get_connection(self):
"""Get database connection with proper error handling."""
conn = None
try:
conn = sqlite3.connect(self.db_path, timeout=30)
conn.row_factory = sqlite3.Row # Enable dict-like access
yield conn
except Exception as e:
if conn:
conn.rollback()
self.logger.error(f"Database connection error: {e}")
raise
finally:
if conn:
conn.close()
def initialize_database(self):
"""Initialize database with base schema."""
with self.get_connection() as conn:
# Create schema version table
conn.execute("""
CREATE TABLE IF NOT EXISTS schema_version (
version INTEGER PRIMARY KEY,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
description TEXT
)
""")
# Insert initial version if not exists
conn.execute("""
INSERT OR IGNORE INTO schema_version (version, description)
VALUES (0, 'Initial schema')
""")
conn.commit()
def get_current_version(self) -> int:
"""Get current database schema version."""
with self.get_connection() as conn:
cursor = conn.execute("SELECT MAX(version) FROM schema_version")
result = cursor.fetchone()
return result[0] if result and result[0] is not None else 0
def run_migrations(self):
"""Run database migrations."""
current_version = self.get_current_version()
migrations = self.get_migrations()
for version, migration in migrations.items():
if version > current_version:
self.logger.info(f"Running migration to version {version}")
try:
with self.get_connection() as conn:
migration['up'](conn)
# Record migration
conn.execute("""
INSERT INTO schema_version (version, description)
VALUES (?, ?)
""", (version, migration['description']))
conn.commit()
self.logger.info(f"Migration to version {version} completed")
except Exception as e:
self.logger.error(f"Migration to version {version} failed: {e}")
raise
def get_migrations(self) -> Dict[int, Dict[str, Any]]:
"""Define database migrations."""
return {
1: {
'description': 'Create anime metadata table',
'up': self._migration_001_anime_table
},
2: {
'description': 'Create episode metadata table',
'up': self._migration_002_episode_table
},
3: {
'description': 'Create download history table',
'up': self._migration_003_download_history
},
4: {
'description': 'Create user preferences table',
'up': self._migration_004_user_preferences
},
5: {
'description': 'Create storage locations table',
'up': self._migration_005_storage_locations
},
6: {
'description': 'Add indexes for performance',
'up': self._migration_006_indexes
}
}
def _migration_001_anime_table(self, conn: sqlite3.Connection):
"""Create anime metadata table."""
conn.execute("""
CREATE TABLE anime_metadata (
anime_id TEXT PRIMARY KEY,
name TEXT NOT NULL,
folder TEXT NOT NULL UNIQUE,
key TEXT,
description TEXT,
genres TEXT, -- JSON array
release_year INTEGER,
status TEXT DEFAULT 'ongoing',
total_episodes INTEGER,
poster_url TEXT,
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
custom_metadata TEXT -- JSON object
)
""")
def _migration_002_episode_table(self, conn: sqlite3.Connection):
"""Create episode metadata table."""
conn.execute("""
CREATE TABLE episode_metadata (
episode_id TEXT PRIMARY KEY,
anime_id TEXT NOT NULL,
season INTEGER NOT NULL,
episode INTEGER NOT NULL,
title TEXT,
description TEXT,
duration_seconds INTEGER,
file_path TEXT,
file_size_bytes INTEGER,
download_date TIMESTAMP,
last_watched TIMESTAMP,
watch_count INTEGER DEFAULT 0,
is_downloaded BOOLEAN DEFAULT FALSE,
quality TEXT,
language TEXT DEFAULT 'German Dub',
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id),
UNIQUE(anime_id, season, episode, language)
)
""")
def _migration_003_download_history(self, conn: sqlite3.Connection):
"""Create download history table."""
conn.execute("""
CREATE TABLE download_history (
download_id TEXT PRIMARY KEY,
anime_id TEXT NOT NULL,
season INTEGER NOT NULL,
episode INTEGER NOT NULL,
language TEXT NOT NULL,
download_started TIMESTAMP NOT NULL,
download_completed TIMESTAMP,
download_status TEXT NOT NULL, -- started, completed, failed, cancelled
file_size_bytes INTEGER,
download_speed_mbps REAL,
error_message TEXT,
retry_count INTEGER DEFAULT 0,
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
)
""")
def _migration_004_user_preferences(self, conn: sqlite3.Connection):
"""Create user preferences table."""
conn.execute("""
CREATE TABLE user_preferences (
key TEXT PRIMARY KEY,
value TEXT NOT NULL, -- JSON value
category TEXT NOT NULL,
description TEXT,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
def _migration_005_storage_locations(self, conn: sqlite3.Connection):
"""Create storage locations table."""
conn.execute("""
CREATE TABLE storage_locations (
location_id TEXT PRIMARY KEY,
anime_id TEXT,
path TEXT NOT NULL,
location_type TEXT NOT NULL, -- primary, backup, cache
is_active BOOLEAN DEFAULT TRUE,
free_space_bytes INTEGER,
total_space_bytes INTEGER,
last_checked TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
)
""")
def _migration_006_indexes(self, conn: sqlite3.Connection):
"""Add indexes for performance."""
indexes = [
"CREATE INDEX idx_anime_name ON anime_metadata(name)",
"CREATE INDEX idx_anime_folder ON anime_metadata(folder)",
"CREATE INDEX idx_anime_status ON anime_metadata(status)",
"CREATE INDEX idx_episode_anime_id ON episode_metadata(anime_id)",
"CREATE INDEX idx_episode_season_episode ON episode_metadata(season, episode)",
"CREATE INDEX idx_episode_downloaded ON episode_metadata(is_downloaded)",
"CREATE INDEX idx_download_status ON download_history(download_status)",
"CREATE INDEX idx_download_date ON download_history(download_started)",
"CREATE INDEX idx_storage_active ON storage_locations(is_active)",
"CREATE INDEX idx_storage_type ON storage_locations(location_type)"
]
for index_sql in indexes:
try:
conn.execute(index_sql)
except sqlite3.OperationalError as e:
if "already exists" not in str(e):
raise
def execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
"""Execute a SELECT query and return results."""
with self.get_connection() as conn:
cursor = conn.execute(query, params)
return cursor.fetchall()
def execute_update(self, query: str, params: tuple = ()) -> int:
"""Execute an UPDATE/INSERT/DELETE query and return affected rows."""
with self.get_connection() as conn:
cursor = conn.execute(query, params)
conn.commit()
return cursor.rowcount
class AnimeRepository:
"""Repository for anime data operations."""
def __init__(self, db_manager: DatabaseManager):
self.db = db_manager
self.logger = logging.getLogger(__name__)
def create_anime(self, metadata: AnimeMetadata) -> bool:
"""Create new anime record."""
try:
query = """
INSERT INTO anime_metadata (
anime_id, name, folder, key, description, genres,
release_year, status, total_episodes, poster_url,
custom_metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
params = (
metadata.anime_id,
metadata.name,
metadata.folder,
metadata.key,
metadata.description,
json.dumps(metadata.genres),
metadata.release_year,
metadata.status,
metadata.total_episodes,
metadata.poster_url,
json.dumps(metadata.custom_metadata)
)
rows_affected = self.db.execute_update(query, params)
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to create anime {metadata.name}: {e}")
return False
def get_anime_by_folder(self, folder: str) -> Optional[AnimeMetadata]:
"""Get anime by folder name."""
try:
query = """
SELECT * FROM anime_metadata WHERE folder = ?
"""
results = self.db.execute_query(query, (folder,))
if results:
row = results[0]
return self._row_to_anime_metadata(row)
return None
except Exception as e:
self.logger.error(f"Failed to get anime by folder {folder}: {e}")
return None
def get_all_anime(self, status_filter: Optional[str] = None) -> List[AnimeMetadata]:
"""Get all anime, optionally filtered by status."""
try:
if status_filter:
query = "SELECT * FROM anime_metadata WHERE status = ? ORDER BY name"
params = (status_filter,)
else:
query = "SELECT * FROM anime_metadata ORDER BY name"
params = ()
results = self.db.execute_query(query, params)
return [self._row_to_anime_metadata(row) for row in results]
except Exception as e:
self.logger.error(f"Failed to get all anime: {e}")
return []
def update_anime(self, metadata: AnimeMetadata) -> bool:
"""Update anime metadata."""
try:
query = """
UPDATE anime_metadata SET
name = ?, key = ?, description = ?, genres = ?,
release_year = ?, status = ?, total_episodes = ?,
poster_url = ?, last_updated = CURRENT_TIMESTAMP,
custom_metadata = ?
WHERE anime_id = ?
"""
params = (
metadata.name,
metadata.key,
metadata.description,
json.dumps(metadata.genres),
metadata.release_year,
metadata.status,
metadata.total_episodes,
metadata.poster_url,
json.dumps(metadata.custom_metadata),
metadata.anime_id
)
rows_affected = self.db.execute_update(query, params)
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to update anime {metadata.anime_id}: {e}")
return False
def delete_anime(self, anime_id: str) -> bool:
"""Delete anime and related data."""
try:
# Delete episodes first (foreign key constraint)
self.db.execute_update("DELETE FROM episode_metadata WHERE anime_id = ?", (anime_id,))
self.db.execute_update("DELETE FROM download_history WHERE anime_id = ?", (anime_id,))
self.db.execute_update("DELETE FROM storage_locations WHERE anime_id = ?", (anime_id,))
# Delete anime
rows_affected = self.db.execute_update("DELETE FROM anime_metadata WHERE anime_id = ?", (anime_id,))
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to delete anime {anime_id}: {e}")
return False
def search_anime(self, search_term: str) -> List[AnimeMetadata]:
"""Search anime by name or description."""
try:
query = """
SELECT * FROM anime_metadata
WHERE name LIKE ? OR description LIKE ?
ORDER BY name
"""
search_pattern = f"%{search_term}%"
results = self.db.execute_query(query, (search_pattern, search_pattern))
return [self._row_to_anime_metadata(row) for row in results]
except Exception as e:
self.logger.error(f"Failed to search anime: {e}")
return []
def _row_to_anime_metadata(self, row: sqlite3.Row) -> AnimeMetadata:
"""Convert database row to AnimeMetadata object."""
return AnimeMetadata(
anime_id=row['anime_id'],
name=row['name'],
folder=row['folder'],
key=row['key'],
description=row['description'],
genres=json.loads(row['genres'] or '[]'),
release_year=row['release_year'],
status=row['status'],
total_episodes=row['total_episodes'],
poster_url=row['poster_url'],
last_updated=datetime.fromisoformat(row['last_updated']) if row['last_updated'] else datetime.now(),
created_at=datetime.fromisoformat(row['created_at']) if row['created_at'] else datetime.now(),
custom_metadata=json.loads(row['custom_metadata'] or '{}')
)
class BackupManager:
"""Manage database backups and restore operations."""
def __init__(self, db_manager: DatabaseManager, backup_dir: str = "./backups"):
self.db = db_manager
self.backup_dir = backup_dir
self.logger = logging.getLogger(__name__)
# Create backup directory
os.makedirs(backup_dir, exist_ok=True)
def create_full_backup(self, description: str = None) -> Optional[BackupInfo]:
"""Create a full database backup."""
try:
backup_id = f"full_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
backup_filename = f"{backup_id}.db"
backup_path = os.path.join(self.backup_dir, backup_filename)
# Copy database file
shutil.copy2(self.db.db_path, backup_path)
# Calculate checksum
checksum = self._calculate_file_checksum(backup_path)
# Get file size
size_bytes = os.path.getsize(backup_path)
# Get table list
with self.db.get_connection() as conn:
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
tables = [row[0] for row in cursor.fetchall()]
backup_info = BackupInfo(
backup_id=backup_id,
backup_path=backup_path,
backup_type='full',
created_at=datetime.now(),
size_bytes=size_bytes,
description=description or f"Full backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
tables_included=tables,
checksum=checksum
)
# Save backup metadata
self._save_backup_metadata(backup_info)
self.logger.info(f"Full backup created: {backup_id}")
return backup_info
except Exception as e:
self.logger.error(f"Failed to create full backup: {e}")
return None
def create_metadata_backup(self, description: str = None) -> Optional[BackupInfo]:
"""Create a metadata-only backup (excluding large binary data)."""
try:
backup_id = f"metadata_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
backup_filename = f"{backup_id}.json"
backup_path = os.path.join(self.backup_dir, backup_filename)
# Export metadata as JSON
metadata = self._export_metadata()
with open(backup_path, 'w', encoding='utf-8') as f:
json.dump(metadata, f, indent=2, default=str)
# Calculate checksum
checksum = self._calculate_file_checksum(backup_path)
# Get file size
size_bytes = os.path.getsize(backup_path)
backup_info = BackupInfo(
backup_id=backup_id,
backup_path=backup_path,
backup_type='metadata_only',
created_at=datetime.now(),
size_bytes=size_bytes,
description=description or f"Metadata backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
tables_included=['anime_metadata', 'episode_metadata', 'user_preferences'],
checksum=checksum
)
# Save backup metadata
self._save_backup_metadata(backup_info)
self.logger.info(f"Metadata backup created: {backup_id}")
return backup_info
except Exception as e:
self.logger.error(f"Failed to create metadata backup: {e}")
return None
def restore_backup(self, backup_id: str) -> bool:
"""Restore from a backup."""
try:
backup_info = self._load_backup_metadata(backup_id)
if not backup_info:
self.logger.error(f"Backup not found: {backup_id}")
return False
if not os.path.exists(backup_info.backup_path):
self.logger.error(f"Backup file not found: {backup_info.backup_path}")
return False
# Verify backup integrity
if not self._verify_backup_integrity(backup_info):
self.logger.error(f"Backup integrity check failed: {backup_id}")
return False
# Create a backup of current database before restore
current_backup = self.create_full_backup(f"Pre-restore backup before restoring {backup_id}")
if backup_info.backup_type == 'full':
# Replace database file
shutil.copy2(backup_info.backup_path, self.db.db_path)
elif backup_info.backup_type == 'metadata_only':
# Restore metadata from JSON
with open(backup_info.backup_path, 'r', encoding='utf-8') as f:
metadata = json.load(f)
self._import_metadata(metadata)
self.logger.info(f"Backup restored successfully: {backup_id}")
return True
except Exception as e:
self.logger.error(f"Failed to restore backup {backup_id}: {e}")
return False
def list_backups(self) -> List[BackupInfo]:
"""List all available backups."""
backups = []
try:
# Look for backup metadata files
metadata_pattern = os.path.join(self.backup_dir, "*.backup_info.json")
for metadata_file in glob.glob(metadata_pattern):
try:
with open(metadata_file, 'r') as f:
backup_data = json.load(f)
backup_info = BackupInfo(
backup_id=backup_data['backup_id'],
backup_path=backup_data['backup_path'],
backup_type=backup_data['backup_type'],
created_at=datetime.fromisoformat(backup_data['created_at']),
size_bytes=backup_data['size_bytes'],
description=backup_data.get('description'),
tables_included=backup_data.get('tables_included', []),
checksum=backup_data.get('checksum')
)
backups.append(backup_info)
except Exception as e:
self.logger.warning(f"Failed to load backup metadata from {metadata_file}: {e}")
# Sort by creation date (newest first)
backups.sort(key=lambda b: b.created_at, reverse=True)
except Exception as e:
self.logger.error(f"Failed to list backups: {e}")
return backups
def cleanup_old_backups(self, keep_days: int = 30, keep_count: int = 10):
"""Clean up old backup files."""
try:
backups = self.list_backups()
cutoff_date = datetime.now() - timedelta(days=keep_days)
# Keep at least keep_count backups regardless of age
backups_to_delete = []
for i, backup in enumerate(backups):
if i >= keep_count and backup.created_at < cutoff_date:
backups_to_delete.append(backup)
for backup in backups_to_delete:
try:
# Remove backup file
if os.path.exists(backup.backup_path):
os.remove(backup.backup_path)
# Remove metadata file
metadata_file = f"{backup.backup_path}.backup_info.json"
if os.path.exists(metadata_file):
os.remove(metadata_file)
self.logger.info(f"Removed old backup: {backup.backup_id}")
except Exception as e:
self.logger.warning(f"Failed to remove backup {backup.backup_id}: {e}")
if backups_to_delete:
self.logger.info(f"Cleaned up {len(backups_to_delete)} old backups")
except Exception as e:
self.logger.error(f"Failed to cleanup old backups: {e}")
def _export_metadata(self) -> Dict[str, Any]:
"""Export database metadata to dictionary."""
metadata = {
'export_date': datetime.now().isoformat(),
'schema_version': self.db.get_current_version(),
'tables': {}
}
# Export specific tables
tables_to_export = ['anime_metadata', 'episode_metadata', 'user_preferences', 'storage_locations']
with self.db.get_connection() as conn:
for table in tables_to_export:
try:
cursor = conn.execute(f"SELECT * FROM {table}")
rows = cursor.fetchall()
# Convert rows to dictionaries
metadata['tables'][table] = [dict(row) for row in rows]
except Exception as e:
self.logger.warning(f"Failed to export table {table}: {e}")
return metadata
def _import_metadata(self, metadata: Dict[str, Any]):
"""Import metadata from dictionary to database."""
with self.db.get_connection() as conn:
for table_name, rows in metadata.get('tables', {}).items():
if not rows:
continue
try:
# Clear existing data (be careful!)
conn.execute(f"DELETE FROM {table_name}")
# Insert new data
if rows:
columns = list(rows[0].keys())
placeholders = ','.join(['?' for _ in columns])
insert_sql = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({placeholders})"
for row in rows:
values = [row[col] for col in columns]
conn.execute(insert_sql, values)
conn.commit()
self.logger.info(f"Imported {len(rows)} rows to {table_name}")
except Exception as e:
self.logger.error(f"Failed to import table {table_name}: {e}")
conn.rollback()
raise
def _calculate_file_checksum(self, file_path: str) -> str:
"""Calculate SHA256 checksum of file."""
hash_sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def _verify_backup_integrity(self, backup_info: BackupInfo) -> bool:
"""Verify backup file integrity using checksum."""
if not backup_info.checksum:
return True # No checksum to verify
current_checksum = self._calculate_file_checksum(backup_info.backup_path)
return current_checksum == backup_info.checksum
def _save_backup_metadata(self, backup_info: BackupInfo):
"""Save backup metadata to file."""
metadata_file = f"{backup_info.backup_path}.backup_info.json"
metadata = {
'backup_id': backup_info.backup_id,
'backup_path': backup_info.backup_path,
'backup_type': backup_info.backup_type,
'created_at': backup_info.created_at.isoformat(),
'size_bytes': backup_info.size_bytes,
'description': backup_info.description,
'tables_included': backup_info.tables_included,
'checksum': backup_info.checksum
}
with open(metadata_file, 'w') as f:
json.dump(metadata, f, indent=2)
def _load_backup_metadata(self, backup_id: str) -> Optional[BackupInfo]:
"""Load backup metadata from file."""
# Look for metadata file
metadata_pattern = os.path.join(self.backup_dir, f"{backup_id}.*.backup_info.json")
metadata_files = glob.glob(metadata_pattern)
if not metadata_files:
return None
try:
with open(metadata_files[0], 'r') as f:
backup_data = json.load(f)
return BackupInfo(
backup_id=backup_data['backup_id'],
backup_path=backup_data['backup_path'],
backup_type=backup_data['backup_type'],
created_at=datetime.fromisoformat(backup_data['created_at']),
size_bytes=backup_data['size_bytes'],
description=backup_data.get('description'),
tables_included=backup_data.get('tables_included', []),
checksum=backup_data.get('checksum')
)
except Exception as e:
self.logger.error(f"Failed to load backup metadata for {backup_id}: {e}")
return None
class StorageManager:
"""Manage storage locations and usage monitoring."""
def __init__(self, db_manager: DatabaseManager):
self.db = db_manager
self.logger = logging.getLogger(__name__)
def add_storage_location(self, path: str, location_type: str = 'primary', anime_id: str = None) -> str:
"""Add a new storage location."""
location_id = str(uuid.uuid4())
query = """
INSERT INTO storage_locations
(location_id, anime_id, path, location_type, is_active)
VALUES (?, ?, ?, ?, ?)
"""
self.db.execute_update(query, (location_id, anime_id, path, location_type, True))
# Update storage stats
self.update_storage_stats(location_id)
return location_id
def update_storage_stats(self, location_id: str):
"""Update storage statistics for a location."""
try:
# Get location path
query = "SELECT path FROM storage_locations WHERE location_id = ?"
results = self.db.execute_query(query, (location_id,))
if not results:
return
path = results[0]['path']
if os.path.exists(path):
# Get disk usage
stat = shutil.disk_usage(path)
# Update database
update_query = """
UPDATE storage_locations
SET free_space_bytes = ?, total_space_bytes = ?, last_checked = CURRENT_TIMESTAMP
WHERE location_id = ?
"""
self.db.execute_update(update_query, (stat.free, stat.total, location_id))
except Exception as e:
self.logger.error(f"Failed to update storage stats for {location_id}: {e}")
def get_storage_summary(self) -> Dict[str, Any]:
"""Get storage usage summary."""
query = """
SELECT
location_type,
COUNT(*) as location_count,
SUM(free_space_bytes) as total_free,
SUM(total_space_bytes) as total_space
FROM storage_locations
WHERE is_active = 1
GROUP BY location_type
"""
results = self.db.execute_query(query)
summary = {}
for row in results:
summary[row['location_type']] = {
'location_count': row['location_count'],
'total_free_gb': (row['total_free'] or 0) / (1024**3),
'total_space_gb': (row['total_space'] or 0) / (1024**3),
'usage_percent': ((row['total_space'] - row['total_free']) / row['total_space'] * 100) if row['total_space'] else 0
}
return summary
# Global instances
database_manager = DatabaseManager()
anime_repository = AnimeRepository(database_manager)
backup_manager = BackupManager(database_manager)
storage_manager = StorageManager(database_manager)
def init_database_system():
"""Initialize database system."""
# Database is initialized on creation
pass
def cleanup_database_system():
"""Clean up database resources."""
# No specific cleanup needed for SQLite
pass
# Export main components
__all__ = [
'DatabaseManager',
'AnimeRepository',
'BackupManager',
'StorageManager',
'AnimeMetadata',
'EpisodeMetadata',
'BackupInfo',
'database_manager',
'anime_repository',
'backup_manager',
'storage_manager',
'init_database_system',
'cleanup_database_system'
]

View File

@@ -1,6 +0,0 @@
"""
Repository package for data access layer.
This package contains repository implementations following the Repository pattern
for clean separation of data access logic from business logic.
"""

View File

@@ -1,24 +0,0 @@
# AniWorld FastAPI Server Configuration
# Authentication Configuration
JWT_SECRET_KEY=your-super-secure-jwt-secret-key-change-this-in-production
PASSWORD_SALT=c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1
MASTER_PASSWORD=admin123
# MASTER_PASSWORD_HASH=bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5
SESSION_TIMEOUT_HOURS=24
# Application Configuration
ANIME_DIRECTORY=\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien
LOG_LEVEL=INFO
# Database Configuration (if needed)
DATABASE_URL=sqlite:///./aniworld.db
# Security Configuration
CORS_ORIGINS=*
API_RATE_LIMIT=100
# Provider Configuration
DEFAULT_PROVIDER=aniworld.to
PROVIDER_TIMEOUT=30
RETRY_ATTEMPTS=3

View File

@@ -1,257 +0,0 @@
# AniWorld FastAPI Server
A comprehensive FastAPI-based server implementation for AniWorld following the project instructions.
## 🚀 Features
### ✅ Authentication System (Completed)
- **Simple Master Password Authentication**: Single master password for the entire application
- **JWT Token Management**: Stateless authentication using JWT tokens
- **Environment Configuration**: Secure password hash stored in environment variables
- **Session Management**: Configurable token expiry (default: 24 hours)
- **Security Features**: SHA-256 password hashing with salt
### ✅ API Endpoints (Implemented)
#### Authentication Endpoints
- `POST /auth/login` - Login with master password and receive JWT token
- `GET /auth/verify` - Verify JWT token validity (protected)
- `POST /auth/logout` - Logout endpoint (stateless - client removes token)
#### System Endpoints
- `GET /` - Root endpoint with API information
- `GET /health` - Health check endpoint
- `GET /api/system/config` - System configuration (protected)
- `GET /api/system/database/health` - Database health check (protected)
#### Anime & Episode Endpoints (Protected)
- `GET /api/anime/search` - Search anime by title with pagination
- `GET /api/anime/{anime_id}` - Get specific anime details
- `GET /api/anime/{anime_id}/episodes` - Get all episodes for anime
- `GET /api/episodes/{episode_id}` - Get specific episode details
### 🔧 Technical Features
- **FastAPI Framework**: Modern, fast (high-performance) web framework
- **OpenAPI Documentation**: Automatic API documentation at `/docs`
- **CORS Support**: Configurable cross-origin resource sharing
- **Request Validation**: Pydantic models for request/response validation
- **Error Handling**: Centralized error handling with proper HTTP status codes
- **Logging**: Comprehensive logging system with file and console output
- **Environment Configuration**: Secure configuration via environment variables
## 🛠️ Installation & Setup
### Prerequisites
- Python 3.11+ (AniWorld conda environment)
- Conda package manager
### 1. Activate AniWorld Environment
```bash
conda activate AniWorld
```
### 2. Install Dependencies
```bash
cd src/server
pip install -r requirements_fastapi.txt
```
### 3. Configure Environment
Create or update `.env` file:
```env
# Authentication
JWT_SECRET_KEY=your-super-secure-jwt-secret-key
PASSWORD_SALT=your-secure-salt
MASTER_PASSWORD=admin123
SESSION_TIMEOUT_HOURS=24
# Application
ANIME_DIRECTORY=your-anime-directory-path
LOG_LEVEL=INFO
# Optional
DATABASE_URL=sqlite:///./aniworld.db
CORS_ORIGINS=*
```
### 4. Start the Server
#### Option 1: Direct Python Execution
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py
```
#### Option 2: Using Batch Script (Windows)
```cmd
cd src/server
run_and_test.bat
```
#### Option 3: Using Shell Script (Linux/Mac)
```bash
cd src/server
chmod +x start_fastapi_server.sh
./start_fastapi_server.sh
```
## 📖 API Usage
### 1. Access Documentation
Visit: http://localhost:8000/docs
### 2. Authentication Flow
#### Step 1: Login
```bash
curl -X POST "http://localhost:8000/auth/login" \
-H "Content-Type: application/json" \
-d '{"password": "admin123"}'
```
Response:
```json
{
"success": true,
"message": "Authentication successful",
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"expires_at": "2025-10-06T18:19:24.710065"
}
```
#### Step 2: Use Token for Protected Endpoints
```bash
curl -X GET "http://localhost:8000/api/anime/search?query=naruto&limit=5" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
### 3. Example API Calls
#### Health Check
```bash
curl "http://localhost:8000/health"
```
#### Search Anime
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/search?query=naruto&limit=10"
```
#### Get Anime Details
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/anime_123"
```
## 🧪 Testing
### Automated Testing
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
```
### Manual Testing
1. Start the server
2. Visit http://localhost:8000/docs
3. Use the interactive API documentation
4. Test authentication with password: `admin123`
## 📁 Project Structure
```
src/server/
├── fastapi_app.py # Main FastAPI application
├── .env # Environment configuration
├── requirements_fastapi.txt # Python dependencies
├── test_fastapi.py # Test script
├── start_fastapi_server.bat # Windows startup script
├── start_fastapi_server.sh # Linux/Mac startup script
├── run_and_test.bat # Windows test runner
└── logs/ # Log files
```
## 🔐 Security
### Authentication
- Master password authentication (no user registration required)
- JWT tokens with configurable expiry
- Secure password hashing (SHA-256 + salt)
- Environment-based secret management
### API Security
- All anime/episode endpoints require authentication
- CORS protection
- Input validation using Pydantic
- Error handling without sensitive data exposure
## 🔧 Configuration
### Environment Variables
- `JWT_SECRET_KEY`: Secret key for JWT token signing
- `PASSWORD_SALT`: Salt for password hashing
- `MASTER_PASSWORD`: Master password (development only)
- `MASTER_PASSWORD_HASH`: Hashed master password (production)
- `SESSION_TIMEOUT_HOURS`: JWT token expiry time
- `ANIME_DIRECTORY`: Path to anime files
- `LOG_LEVEL`: Logging level (DEBUG, INFO, WARNING, ERROR)
### Production Configuration
1. Set `MASTER_PASSWORD_HASH` instead of `MASTER_PASSWORD`
2. Use a strong `JWT_SECRET_KEY`
3. Set appropriate `CORS_ORIGINS`
4. Configure proper logging levels
## 📊 API Status
| Endpoint Category | Status | Coverage |
|------------------|--------|----------|
| Authentication | ✅ Complete | 100% |
| Health/System | ✅ Complete | 100% |
| Anime Search | ✅ Implemented | Mock data |
| Episode Management | ✅ Implemented | Mock data |
| Database Integration | 🔄 Placeholder | Todo |
| Real Data Provider | 🔄 Placeholder | Todo |
## 🚧 Future Enhancements
### High Priority
- [ ] Connect to actual anime database/provider
- [ ] Implement real anime search functionality
- [ ] Add episode streaming capabilities
- [ ] Database connection pooling
### Medium Priority
- [ ] Redis caching layer
- [ ] Rate limiting middleware
- [ ] Background task processing
- [ ] WebSocket support
### Low Priority
- [ ] Advanced search filters
- [ ] User preferences (multi-user support)
- [ ] Download progress tracking
- [ ] Statistics and analytics
## 📝 License
This project follows the AniWorld project licensing terms.
## 🤝 Contributing
1. Follow the coding standards in `.github/copilot-instructions.md`
2. Use type hints and Pydantic models
3. Add comprehensive logging
4. Include tests for new features
5. Update documentation
## 📞 Support
- API Documentation: http://localhost:8000/docs
- Health Check: http://localhost:8000/health
- Logs: Check `logs/aniworld.log` for detailed information
---
**Note**: This FastAPI implementation provides a solid foundation following the project instructions. The authentication system is complete and production-ready, while anime/episode endpoints currently return mock data pending integration with the actual data providers.

View File

@@ -1,573 +0,0 @@
import os
import json
import hashlib
import secrets
from typing import Dict, Any, Optional
from datetime import datetime, timedelta
class Config:
"""Configuration management for AniWorld Flask app."""
def __init__(self, config_file: str = "data/config.json"):
self.config_file = config_file
self.default_config = {
"security": {
"master_password_hash": None,
"salt": None,
"session_timeout_hours": 24,
"max_failed_attempts": 5,
"lockout_duration_minutes": 30
},
"anime": {
"directory": os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"),
"download_threads": 3,
"download_speed_limit": None,
"auto_rescan_time": "03:00",
"auto_download_after_rescan": False
},
"logging": {
"level": "INFO",
"enable_console_logging": True,
"enable_console_progress": False,
"enable_fail2ban_logging": True,
"log_file": "./logs/aniworld.log",
"max_log_size_mb": 10,
"log_backup_count": 5
},
"providers": {
"default_provider": "aniworld.to",
"preferred_language": "German Dub",
"fallback_providers": ["aniworld.to"],
"provider_timeout": 30,
"retry_attempts": 3,
"provider_settings": {
"aniworld.to": {
"enabled": True,
"priority": 1,
"quality_preference": "720p"
}
}
},
"advanced": {
"max_concurrent_downloads": 3,
"download_buffer_size": 8192,
"connection_timeout": 30,
"read_timeout": 300,
"enable_debug_mode": False,
"cache_duration_minutes": 60
}
}
self._config = self._load_config()
def _load_config(self) -> Dict[str, Any]:
"""Load configuration from file or create default."""
try:
if os.path.exists(self.config_file):
with open(self.config_file, 'r', encoding='utf-8') as f:
config = json.load(f)
# Merge with defaults to ensure all keys exist
return self._merge_configs(self.default_config, config)
else:
return self.default_config.copy()
except Exception as e:
print(f"Error loading config: {e}")
return self.default_config.copy()
def _merge_configs(self, default: Dict[str, Any], user: Dict[str, Any]) -> Dict[str, Any]:
"""Recursively merge user config with defaults."""
result = default.copy()
for key, value in user.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = self._merge_configs(result[key], value)
else:
result[key] = value
return result
def save_config(self) -> bool:
"""Save current configuration to file."""
try:
with open(self.config_file, 'w', encoding='utf-8') as f:
json.dump(self._config, f, indent=4)
return True
except Exception as e:
print(f"Error saving config: {e}")
return False
def get(self, key_path: str, default: Any = None) -> Any:
"""Get config value using dot notation (e.g., 'security.master_password_hash')."""
keys = key_path.split('.')
value = self._config
for key in keys:
if isinstance(value, dict) and key in value:
value = value[key]
else:
return default
return value
def set(self, key_path: str, value: Any) -> bool:
"""Set config value using dot notation."""
keys = key_path.split('.')
config = self._config
# Navigate to parent
for key in keys[:-1]:
if key not in config:
config[key] = {}
config = config[key]
# Set final value
config[keys[-1]] = value
return self.save_config()
def set_master_password(self, password: str) -> bool:
"""Set master password with secure hashing."""
try:
# Generate salt
salt = secrets.token_hex(32)
# Hash password with salt
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
# Save to config
self.set("security.salt", salt)
self.set("security.master_password_hash", password_hash)
return True
except Exception as e:
print(f"Error setting master password: {e}")
return False
def verify_password(self, password: str) -> bool:
"""Verify password against stored hash."""
try:
stored_hash = self.get("security.master_password_hash")
salt = self.get("security.salt")
if not stored_hash or not salt:
return False
# Hash provided password with stored salt
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
return password_hash == stored_hash
except Exception as e:
print(f"Error verifying password: {e}")
return False
def has_master_password(self) -> bool:
"""Check if master password is configured."""
return bool(self.get("security.master_password_hash"))
def backup_config(self, backup_path: Optional[str] = None) -> str:
"""Create backup of current configuration."""
if not backup_path:
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = f"config_backup_{timestamp}.json"
try:
with open(backup_path, 'w', encoding='utf-8') as f:
json.dump(self._config, f, indent=4)
return backup_path
except Exception as e:
raise Exception(f"Failed to create backup: {e}")
def restore_config(self, backup_path: str) -> bool:
"""Restore configuration from backup."""
try:
with open(backup_path, 'r', encoding='utf-8') as f:
config = json.load(f)
# Validate config before restoring
validation_result = self.validate_config(config)
if not validation_result['valid']:
raise Exception(f"Invalid configuration: {validation_result['errors']}")
self._config = self._merge_configs(self.default_config, config)
return self.save_config()
except Exception as e:
print(f"Error restoring config: {e}")
return False
def validate_config(self, config: Dict[str, Any] = None) -> Dict[str, Any]:
"""Validate configuration structure and values."""
if config is None:
config = self._config
errors = []
warnings = []
# Validate security settings
security = config.get('security', {})
if security.get('session_timeout_hours', 0) < 1 or security.get('session_timeout_hours', 0) > 168:
errors.append("Session timeout must be between 1 and 168 hours")
if security.get('max_failed_attempts', 0) < 1 or security.get('max_failed_attempts', 0) > 50:
errors.append("Max failed attempts must be between 1 and 50")
if security.get('lockout_duration_minutes', 0) < 1 or security.get('lockout_duration_minutes', 0) > 1440:
errors.append("Lockout duration must be between 1 and 1440 minutes")
# Validate anime settings
anime = config.get('anime', {})
directory = anime.get('directory', '')
if directory and not os.path.exists(directory) and not directory.startswith('\\\\'):
warnings.append(f"Anime directory does not exist: {directory}")
download_threads = anime.get('download_threads', 1)
if download_threads < 1 or download_threads > 10:
errors.append("Download threads must be between 1 and 10")
# Validate logging settings
logging_config = config.get('logging', {})
log_level = logging_config.get('level', 'INFO')
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
errors.append(f"Invalid log level: {log_level}")
# Validate provider settings
providers = config.get('providers', {})
provider_timeout = providers.get('provider_timeout', 30)
if provider_timeout < 5 or provider_timeout > 300:
errors.append("Provider timeout must be between 5 and 300 seconds")
retry_attempts = providers.get('retry_attempts', 3)
if retry_attempts < 0 or retry_attempts > 10:
errors.append("Retry attempts must be between 0 and 10")
# Validate advanced settings
advanced = config.get('advanced', {})
max_concurrent = advanced.get('max_concurrent_downloads', 3)
if max_concurrent < 1 or max_concurrent > 20:
errors.append("Max concurrent downloads must be between 1 and 20")
connection_timeout = advanced.get('connection_timeout', 30)
if connection_timeout < 5 or connection_timeout > 300:
errors.append("Connection timeout must be between 5 and 300 seconds")
return {
'valid': len(errors) == 0,
'errors': errors,
'warnings': warnings
}
def get_config_schema(self) -> Dict[str, Any]:
"""Get configuration schema for UI generation."""
return {
"security": {
"title": "Security Settings",
"fields": {
"session_timeout_hours": {
"type": "number",
"title": "Session Timeout (hours)",
"description": "How long sessions remain active",
"min": 1,
"max": 168,
"default": 24
},
"max_failed_attempts": {
"type": "number",
"title": "Max Failed Login Attempts",
"description": "Number of failed attempts before lockout",
"min": 1,
"max": 50,
"default": 5
},
"lockout_duration_minutes": {
"type": "number",
"title": "Lockout Duration (minutes)",
"description": "How long to lock account after failed attempts",
"min": 1,
"max": 1440,
"default": 30
}
}
},
"anime": {
"title": "Anime Settings",
"fields": {
"directory": {
"type": "text",
"title": "Anime Directory",
"description": "Base directory for anime storage",
"required": True
},
"download_threads": {
"type": "number",
"title": "Download Threads",
"description": "Number of concurrent download threads",
"min": 1,
"max": 10,
"default": 3
},
"download_speed_limit": {
"type": "number",
"title": "Speed Limit (KB/s)",
"description": "Download speed limit (0 = unlimited)",
"min": 0,
"max": 102400,
"default": 0
}
}
},
"providers": {
"title": "Provider Settings",
"fields": {
"default_provider": {
"type": "select",
"title": "Default Provider",
"description": "Primary anime provider",
"options": ["aniworld.to"],
"default": "aniworld.to"
},
"preferred_language": {
"type": "select",
"title": "Preferred Language",
"description": "Default language preference",
"options": ["German Dub", "German Sub", "English Dub", "English Sub", "Japanese"],
"default": "German Dub"
},
"provider_timeout": {
"type": "number",
"title": "Provider Timeout (seconds)",
"description": "Timeout for provider requests",
"min": 5,
"max": 300,
"default": 30
},
"retry_attempts": {
"type": "number",
"title": "Retry Attempts",
"description": "Number of retry attempts for failed requests",
"min": 0,
"max": 10,
"default": 3
}
}
},
"advanced": {
"title": "Advanced Settings",
"fields": {
"max_concurrent_downloads": {
"type": "number",
"title": "Max Concurrent Downloads",
"description": "Maximum simultaneous downloads",
"min": 1,
"max": 20,
"default": 3
},
"connection_timeout": {
"type": "number",
"title": "Connection Timeout (seconds)",
"description": "Network connection timeout",
"min": 5,
"max": 300,
"default": 30
},
"enable_debug_mode": {
"type": "boolean",
"title": "Debug Mode",
"description": "Enable detailed debug logging",
"default": False
}
}
}
}
def export_config(self, include_sensitive: bool = False) -> Dict[str, Any]:
"""Export configuration, optionally excluding sensitive data."""
config_copy = json.loads(json.dumps(self._config)) # Deep copy
if not include_sensitive:
# Remove sensitive data
if 'security' in config_copy:
config_copy['security'].pop('master_password_hash', None)
config_copy['security'].pop('salt', None)
return config_copy
def import_config(self, config_data: Dict[str, Any], validate: bool = True) -> Dict[str, Any]:
"""Import configuration with validation."""
if validate:
validation_result = self.validate_config(config_data)
if not validation_result['valid']:
return {
'success': False,
'errors': validation_result['errors'],
'warnings': validation_result['warnings']
}
# Merge with existing config (don't overwrite security settings)
current_security = self._config.get('security', {})
merged_config = self._merge_configs(self.default_config, config_data)
# Preserve current security settings if not provided
if not config_data.get('security', {}).get('master_password_hash'):
merged_config['security'] = current_security
self._config = merged_config
success = self.save_config()
return {
'success': success,
'errors': [] if success else ['Failed to save configuration'],
'warnings': validation_result.get('warnings', []) if validate else []
}
@property
def anime_directory(self) -> str:
"""Get anime directory path."""
# Always check environment variable first
env_dir = os.getenv("ANIME_DIRECTORY")
if env_dir:
# Remove quotes if they exist
env_dir = env_dir.strip('"\'')
return env_dir
return self.get("anime.directory", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
@anime_directory.setter
def anime_directory(self, value: str):
"""Set anime directory path."""
self.set("anime.directory", value)
@property
def session_timeout_hours(self) -> int:
"""Get session timeout in hours."""
return self.get("security.session_timeout_hours", 24)
@property
def max_failed_attempts(self) -> int:
"""Get maximum failed login attempts."""
return self.get("security.max_failed_attempts", 5)
@property
def lockout_duration_minutes(self) -> int:
"""Get lockout duration in minutes."""
return self.get("security.lockout_duration_minutes", 30)
@property
def scheduled_rescan_enabled(self) -> bool:
"""Get whether scheduled rescan is enabled."""
return self.get("scheduler.rescan_enabled", False)
@scheduled_rescan_enabled.setter
def scheduled_rescan_enabled(self, value: bool):
"""Set whether scheduled rescan is enabled."""
self.set("scheduler.rescan_enabled", value)
@property
def scheduled_rescan_time(self) -> str:
"""Get scheduled rescan time in HH:MM format."""
return self.get("scheduler.rescan_time", "03:00")
@scheduled_rescan_time.setter
def scheduled_rescan_time(self, value: str):
"""Set scheduled rescan time in HH:MM format."""
self.set("scheduler.rescan_time", value)
@property
def auto_download_after_rescan(self) -> bool:
"""Get whether to auto-download after scheduled rescan."""
return self.get("scheduler.auto_download_after_rescan", False)
@auto_download_after_rescan.setter
def auto_download_after_rescan(self, value: bool):
"""Set whether to auto-download after scheduled rescan."""
self.set("scheduler.auto_download_after_rescan", value)
@property
def log_level(self) -> str:
"""Get current log level."""
return self.get("logging.level", "INFO")
@log_level.setter
def log_level(self, value: str):
"""Set log level."""
self.set("logging.level", value.upper())
@property
def enable_console_logging(self) -> bool:
"""Get whether console logging is enabled."""
return self.get("logging.enable_console_logging", True)
@enable_console_logging.setter
def enable_console_logging(self, value: bool):
"""Set whether console logging is enabled."""
self.set("logging.enable_console_logging", value)
@property
def enable_console_progress(self) -> bool:
"""Get whether console progress bars are enabled."""
return self.get("logging.enable_console_progress", False)
@enable_console_progress.setter
def enable_console_progress(self, value: bool):
"""Set whether console progress bars are enabled."""
self.set("logging.enable_console_progress", value)
@property
def enable_fail2ban_logging(self) -> bool:
"""Get whether fail2ban logging is enabled."""
return self.get("logging.enable_fail2ban_logging", True)
@enable_fail2ban_logging.setter
def enable_fail2ban_logging(self, value: bool):
"""Set whether fail2ban logging is enabled."""
self.set("logging.enable_fail2ban_logging", value)
# Provider configuration properties
@property
def default_provider(self) -> str:
"""Get default provider."""
return self.get("providers.default_provider", "aniworld.to")
@default_provider.setter
def default_provider(self, value: str):
"""Set default provider."""
self.set("providers.default_provider", value)
@property
def preferred_language(self) -> str:
"""Get preferred language."""
return self.get("providers.preferred_language", "German Dub")
@preferred_language.setter
def preferred_language(self, value: str):
"""Set preferred language."""
self.set("providers.preferred_language", value)
@property
def provider_timeout(self) -> int:
"""Get provider timeout in seconds."""
return self.get("providers.provider_timeout", 30)
@provider_timeout.setter
def provider_timeout(self, value: int):
"""Set provider timeout in seconds."""
self.set("providers.provider_timeout", value)
# Advanced configuration properties
@property
def max_concurrent_downloads(self) -> int:
"""Get maximum concurrent downloads."""
return self.get("advanced.max_concurrent_downloads", 3)
@max_concurrent_downloads.setter
def max_concurrent_downloads(self, value: int):
"""Set maximum concurrent downloads."""
self.set("advanced.max_concurrent_downloads", value)
@property
def enable_debug_mode(self) -> bool:
"""Get whether debug mode is enabled."""
return self.get("advanced.enable_debug_mode", False)
@enable_debug_mode.setter
def enable_debug_mode(self, value: bool):
"""Set whether debug mode is enabled."""
self.set("advanced.enable_debug_mode", value)
# Global config instance
config = Config()

View File

@@ -1,10 +0,0 @@
"""
Configuration package for the Aniworld server.
This package provides configuration management and environment
variable handling for secure application deployment.
"""
from .env_config import EnvironmentConfig, env_config
__all__ = ['EnvironmentConfig', 'env_config']

View File

@@ -1,217 +0,0 @@
"""
Environment configuration for secure handling of sensitive data.
This module provides secure environment variable handling and configuration
management for the Aniworld server application.
"""
import os
import secrets
from typing import Optional, Dict, Any
from dotenv import load_dotenv
import logging
logger = logging.getLogger(__name__)
# Load environment variables from .env file
load_dotenv()
class EnvironmentConfig:
"""Manages environment variables and secure configuration."""
# Security
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
# Database
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
# Redis (for caching and sessions)
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
# API Keys and External Services
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
# Email Configuration (for password reset)
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
# Security Settings
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
# Rate Limiting
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
# Application Settings
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
HOST: str = os.getenv('HOST', '127.0.0.1')
PORT: int = int(os.getenv('PORT', '5000'))
# Anime Directory and Download Settings
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
# Logging
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
LOG_FILE: str = os.getenv('LOG_FILE', './logs/aniworld.log')
@classmethod
def get_database_config(cls) -> Dict[str, Any]:
"""Get database configuration."""
return {
'url': cls.DATABASE_URL,
'password': cls.DATABASE_PASSWORD,
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
}
@classmethod
def get_redis_config(cls) -> Dict[str, Any]:
"""Get Redis configuration."""
return {
'url': cls.REDIS_URL,
'password': cls.REDIS_PASSWORD,
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
'retry_on_timeout': True,
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
}
@classmethod
def get_email_config(cls) -> Dict[str, Any]:
"""Get email configuration."""
return {
'server': cls.SMTP_SERVER,
'port': cls.SMTP_PORT,
'username': cls.SMTP_USERNAME,
'password': cls.SMTP_PASSWORD,
'use_tls': cls.SMTP_USE_TLS,
'from_email': cls.FROM_EMAIL
}
@classmethod
def get_security_config(cls) -> Dict[str, Any]:
"""Get security configuration."""
return {
'secret_key': cls.SECRET_KEY,
'jwt_secret_key': cls.JWT_SECRET_KEY,
'password_salt': cls.PASSWORD_SALT,
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
}
@classmethod
def validate_config(cls) -> bool:
"""Validate that required configuration is present."""
required_vars = [
'SECRET_KEY',
'JWT_SECRET_KEY',
'PASSWORD_SALT'
]
missing_vars = []
for var in required_vars:
if not getattr(cls, var):
missing_vars.append(var)
if missing_vars:
logger.error(f"Missing required environment variables: {missing_vars}")
return False
return True
@classmethod
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
"""Generate a template .env file with all available configuration options."""
try:
template_content = """# Aniworld Server Environment Configuration
# Copy this file to .env and fill in your values
# Security (REQUIRED - Generate secure random values)
SECRET_KEY=your_secret_key_here
JWT_SECRET_KEY=your_jwt_secret_here
PASSWORD_SALT=your_password_salt_here
# Database Configuration
DATABASE_URL=sqlite:///data/aniworld.db
# DATABASE_PASSWORD=your_db_password_here
DATABASE_POOL_SIZE=10
DATABASE_MAX_OVERFLOW=20
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (for caching and sessions)
REDIS_URL=redis://localhost:6379/0
# REDIS_PASSWORD=your_redis_password_here
REDIS_MAX_CONNECTIONS=10
REDIS_SOCKET_TIMEOUT=5
# Email Configuration (for password reset emails)
SMTP_SERVER=localhost
SMTP_PORT=587
# SMTP_USERNAME=your_smtp_username
# SMTP_PASSWORD=your_smtp_password
SMTP_USE_TLS=true
FROM_EMAIL=noreply@aniworld.local
# External API Keys
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
# TMDB_API_KEY=your_tmdb_api_key
# Security Settings
SESSION_TIMEOUT_HOURS=24
MAX_FAILED_LOGIN_ATTEMPTS=5
LOCKOUT_DURATION_MINUTES=30
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
API_RATE_LIMIT_PER_MINUTE=100
# Application Settings
DEBUG=false
HOST=127.0.0.1
PORT=5000
# Anime and Download Settings
ANIME_DIRECTORY=./downloads
MAX_CONCURRENT_DOWNLOADS=3
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
# Logging
LOG_LEVEL=INFO
LOG_FILE=./logs/aniworld.log
"""
with open(file_path, 'w', encoding='utf-8') as f:
f.write(template_content)
logger.info(f"Environment template created at {file_path}")
return True
except Exception as e:
logger.error(f"Error creating environment template: {e}")
return False
# Create global instance
env_config = EnvironmentConfig()
# Validate configuration on import
if not env_config.validate_config():
logger.warning("Invalid environment configuration detected. Please check your .env file.")

View File

@@ -1,28 +0,0 @@
from typing import Optional
from pydantic import BaseSettings, Field
class Settings(BaseSettings):
"""Application settings from environment variables."""
jwt_secret_key: str = Field(default="your-secret-key-here", env="JWT_SECRET_KEY")
password_salt: str = Field(default="default-salt", env="PASSWORD_SALT")
master_password_hash: Optional[str] = Field(default=None, env="MASTER_PASSWORD_HASH")
master_password: Optional[str] = Field(default=None, env="MASTER_PASSWORD") # For development
token_expiry_hours: int = Field(default=24, env="SESSION_TIMEOUT_HOURS")
anime_directory: str = Field(default="", env="ANIME_DIRECTORY")
log_level: str = Field(default="INFO", env="LOG_LEVEL")
# Additional settings from .env
database_url: str = Field(default="sqlite:///./data/aniworld.db", env="DATABASE_URL")
cors_origins: str = Field(default="*", env="CORS_ORIGINS")
api_rate_limit: int = Field(default=100, env="API_RATE_LIMIT")
default_provider: str = Field(default="aniworld.to", env="DEFAULT_PROVIDER")
provider_timeout: int = Field(default=30, env="PROVIDER_TIMEOUT")
retry_attempts: int = Field(default=3, env="RETRY_ATTEMPTS")
class Config:
env_file = ".env"
extra = "ignore"
settings = Settings()

View File

@@ -1,612 +0,0 @@
"""
FastAPI-based AniWorld Server Application.
This module implements a comprehensive FastAPI application following the instructions:
- Simple master password authentication using JWT
- Repository pattern with dependency injection
- Proper error handling and validation
- OpenAPI documentation
- Security best practices
"""
import hashlib
import logging
import os
import sys
from contextlib import asynccontextmanager
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional
import jwt
# Add parent directory to path for imports
current_dir = os.path.dirname(__file__)
parent_dir = os.path.join(current_dir, '..')
project_root = os.path.join(parent_dir, '..') # Go up two levels to reach project root
sys.path.insert(0, os.path.abspath(project_root))
import uvicorn
from fastapi import Depends, FastAPI, HTTPException, Request, Security, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from pydantic import BaseModel, Field
from src.config.settings import settings
# Application flow services will be imported lazily where needed to avoid
# import-time circular dependencies during tests.
SetupService = None
ApplicationFlowMiddleware = None
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('./logs/aniworld.log'),
logging.StreamHandler()
]
)
logger = logging.getLogger(__name__)
# Security
security = HTTPBearer()
# Settings are loaded from `src.config.settings.settings`
# Pydantic Models
class LoginRequest(BaseModel):
"""Login request model."""
password: str = Field(..., min_length=1, description="Master password")
class LoginResponse(BaseModel):
"""Login response model."""
success: bool
message: str
token: Optional[str] = None
expires_at: Optional[datetime] = None
class TokenVerifyResponse(BaseModel):
"""Token verification response model."""
valid: bool
message: str
user: Optional[str] = None
expires_at: Optional[datetime] = None
class HealthResponse(BaseModel):
"""Health check response model."""
status: str
timestamp: datetime
version: str = "1.0.0"
services: Dict[str, str]
class AnimeSearchRequest(BaseModel):
"""Anime search request model."""
query: str = Field(..., min_length=1, max_length=100)
limit: int = Field(default=20, ge=1, le=100)
offset: int = Field(default=0, ge=0)
class AnimeResponse(BaseModel):
"""Anime response model."""
id: str
title: str
description: Optional[str] = None
episodes: int = 0
status: str = "Unknown"
poster_url: Optional[str] = None
class EpisodeResponse(BaseModel):
"""Episode response model."""
id: str
anime_id: str
episode_number: int
title: Optional[str] = None
description: Optional[str] = None
duration: Optional[int] = None
stream_url: Optional[str] = None
class ErrorResponse(BaseModel):
"""Error response model."""
success: bool = False
error: str
code: Optional[str] = None
details: Optional[Dict[str, Any]] = None
class SetupRequest(BaseModel):
"""Setup request model."""
password: str = Field(..., min_length=8, description="Master password (min 8 characters)")
directory: str = Field(..., min_length=1, description="Anime directory path")
class SetupResponse(BaseModel):
"""Setup response model."""
status: str
message: str
redirect_url: Optional[str] = None
class SetupStatusResponse(BaseModel):
"""Setup status response model."""
setup_complete: bool
requirements: Dict[str, bool]
missing_requirements: List[str]
# Authentication utilities
def hash_password(password: str) -> str:
"""Hash password with salt using SHA-256."""
salted_password = password + settings.password_salt
return hashlib.sha256(salted_password.encode()).hexdigest()
def verify_master_password(password: str) -> bool:
"""Verify password against master password hash."""
if not settings.master_password_hash:
# If no hash is set, check against plain password (development only)
if settings.master_password:
return password == settings.master_password
return False
password_hash = hash_password(password)
return password_hash == settings.master_password_hash
def generate_jwt_token() -> Dict[str, Any]:
"""Generate JWT token for authentication."""
expires_at = datetime.utcnow() + timedelta(hours=settings.token_expiry_hours)
payload = {
'user': 'master',
'exp': expires_at,
'iat': datetime.utcnow(),
'iss': 'aniworld-fastapi-server'
}
token = jwt.encode(payload, settings.jwt_secret_key, algorithm='HS256')
return {
'token': token,
'expires_at': expires_at
}
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
"""Verify and decode JWT token."""
try:
payload = jwt.decode(token, settings.jwt_secret_key, algorithms=['HS256'])
return payload
except jwt.ExpiredSignatureError:
logger.warning("Token has expired")
return None
except jwt.InvalidTokenError as e:
logger.warning(f"Invalid token: {str(e)}")
return None
async def get_current_user(credentials: HTTPAuthorizationCredentials = Security(security)):
"""Dependency to get current authenticated user."""
token = credentials.credentials
payload = verify_jwt_token(token)
if not payload:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid or expired token",
headers={"WWW-Authenticate": "Bearer"},
)
return payload
# Global exception handler
async def global_exception_handler(request, exc):
"""Global exception handler for unhandled errors."""
logger.error(f"Unhandled exception: {exc}", exc_info=True)
return JSONResponse(
status_code=500,
content={
"success": False,
"error": "Internal Server Error",
"code": "INTERNAL_ERROR"
}
)
# Application lifespan
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Manage application lifespan events."""
# Startup
logger.info("Starting AniWorld FastAPI server...")
logger.info(f"Anime directory: {settings.anime_directory}")
logger.info(f"Log level: {settings.log_level}")
# Verify configuration
if not settings.master_password_hash and not settings.master_password:
logger.warning("No master password configured! Set MASTER_PASSWORD_HASH or MASTER_PASSWORD environment variable.")
yield
# Shutdown
logger.info("Shutting down AniWorld FastAPI server...")
# Create FastAPI application
app = FastAPI(
title="AniWorld API",
description="""
## AniWorld Management System
A comprehensive FastAPI-based application for managing anime series and episodes.
### Features
* **Series Management**: Search, track, and manage anime series
* **Episode Tracking**: Monitor missing episodes and download progress
* **Authentication**: Secure master password authentication with JWT tokens
* **Real-time Updates**: WebSocket support for live progress tracking
* **File Management**: Automatic file scanning and organization
* **Download Queue**: Queue-based download management system
### Authentication
Most endpoints require authentication using a master password.
Use the `/auth/login` endpoint to obtain a JWT token, then include it
in the `Authorization` header as `Bearer <token>`.
### API Versioning
This API follows semantic versioning. Current version: **1.0.0**
""",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan,
contact={
"name": "AniWorld API Support",
"url": "https://github.com/your-repo/aniworld",
"email": "support@aniworld.com",
},
license_info={
"name": "MIT",
"url": "https://opensource.org/licenses/MIT",
},
tags_metadata=[
{
"name": "Authentication",
"description": "Operations related to user authentication and session management",
},
{
"name": "Anime",
"description": "Operations for searching and managing anime series",
},
{
"name": "Episodes",
"description": "Operations for managing individual episodes",
},
{
"name": "Downloads",
"description": "Operations for managing the download queue and progress",
},
{
"name": "System",
"description": "System health, configuration, and maintenance operations",
},
{
"name": "Files",
"description": "File system operations and scanning functionality",
},
]
)
# Configure templates
templates = Jinja2Templates(directory="src/server/web/templates")
# Mount static files
app.mount("/static", StaticFiles(directory="src/server/web/static"), name="static")
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Configure appropriately for production
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Add application flow middleware (import lazily to avoid circular imports during tests)
try:
if SetupService is None:
from src.server.middleware.application_flow_middleware import (
ApplicationFlowMiddleware as _AppFlow,
)
from src.server.services.setup_service import SetupService as _SetupService
setup_service = _SetupService()
app.add_middleware(_AppFlow, setup_service=setup_service)
except Exception:
# In test environments or minimal setups, middleware may be skipped
pass
# Add global exception handler
app.add_exception_handler(Exception, global_exception_handler)
from src.server.controllers.v2.anime_controller import router as anime_router
# Include controller routers (use v2 controllers to avoid circular imports)
from src.server.controllers.v2.auth_controller import router as auth_router
from src.server.controllers.v2.episode_controller import router as episode_router
from src.server.controllers.v2.setup_controller import router as setup_router
from src.server.controllers.v2.system_controller import router as system_router
app.include_router(auth_router)
app.include_router(setup_router)
app.include_router(anime_router)
app.include_router(episode_router)
app.include_router(system_router)
# Legacy API compatibility endpoints (TODO: migrate JavaScript to use v1 endpoints)
@app.post("/api/add_series")
async def legacy_add_series(
request_data: Dict[str, Any],
current_user: Dict = Depends(get_current_user)
):
"""Legacy endpoint for adding series - basic implementation."""
try:
link = request_data.get('link', '')
name = request_data.get('name', '')
if not link or not name:
return {"status": "error", "message": "Link and name are required"}
return {"status": "success", "message": f"Series '{name}' added successfully"}
except Exception as e:
return {"status": "error", "message": f"Failed to add series: {str(e)}"}
@app.post("/api/download")
async def legacy_download(
request_data: Dict[str, Any],
current_user: Dict = Depends(get_current_user)
):
"""Legacy endpoint for downloading series - basic implementation."""
try:
folders = request_data.get('folders', [])
if not folders:
return {"status": "error", "message": "No folders specified"}
folder_count = len(folders)
return {"status": "success", "message": f"Download started for {folder_count} series"}
except Exception as e:
return {"status": "error", "message": f"Failed to start download: {str(e)}"}
# Setup endpoints moved to controllers: src/server/controllers/setup_controller.py
# Health check endpoint (kept in main app)
@app.get("/health", response_model=HealthResponse, tags=["System"])
async def health_check() -> HealthResponse:
"""
Application health check endpoint.
"""
return HealthResponse(
status="healthy",
timestamp=datetime.utcnow(),
services={
"authentication": "online",
"anime_service": "online",
"episode_service": "online"
}
)
# Common browser requests that might cause "Invalid HTTP request received" warnings
@app.get("/favicon.ico")
async def favicon():
"""Handle favicon requests from browsers."""
return JSONResponse(status_code=404, content={"detail": "Favicon not found"})
@app.get("/robots.txt")
async def robots():
"""Handle robots.txt requests."""
return JSONResponse(status_code=404, content={"detail": "Robots.txt not found"})
@app.get("/")
async def root():
"""Root endpoint redirect to docs."""
return {"message": "AniWorld API", "documentation": "/docs", "health": "/health"}
# Web interface routes
@app.get("/app", response_class=HTMLResponse)
async def web_app(request: Request):
"""Serve the main web application."""
return templates.TemplateResponse("base/index.html", {"request": request})
@app.get("/login", response_class=HTMLResponse)
async def login_page(request: Request):
"""Serve the login page."""
return templates.TemplateResponse("base/login.html", {"request": request})
@app.get("/setup", response_class=HTMLResponse)
async def setup_page(request: Request):
"""Serve the setup page."""
return templates.TemplateResponse("base/setup.html", {"request": request})
@app.get("/queue", response_class=HTMLResponse)
async def queue_page(request: Request):
"""Serve the queue page."""
return templates.TemplateResponse("base/queue.html", {"request": request})
# Anime endpoints (protected)
@app.get("/api/anime/search", response_model=List[AnimeResponse], tags=["Anime"])
async def search_anime(
query: str,
limit: int = 20,
offset: int = 0,
current_user: Dict = Depends(get_current_user)
) -> List[AnimeResponse]:
"""
Search for anime by title.
Requires: Bearer token in Authorization header
- **query**: Search query string
- **limit**: Maximum number of results (1-100)
- **offset**: Number of results to skip for pagination
"""
# TODO: Implement actual anime search logic
# This is a placeholder implementation
logger.info(f"Searching anime with query: {query}")
# Mock data for now
mock_results = [
AnimeResponse(
id=f"anime_{i}",
title=f"Sample Anime {i}",
description=f"Description for anime {i}",
episodes=24,
status="Completed"
)
for i in range(offset + 1, min(offset + limit + 1, 100))
if query.lower() in f"sample anime {i}".lower()
]
return mock_results
@app.get("/api/anime/{anime_id}", response_model=AnimeResponse, tags=["Anime"])
async def get_anime(
anime_id: str,
current_user: Dict = Depends(get_current_user)
) -> AnimeResponse:
"""
Get detailed information about a specific anime.
Requires: Bearer token in Authorization header
- **anime_id**: Unique identifier for the anime
"""
# TODO: Implement actual anime retrieval logic
logger.info(f"Fetching anime details for ID: {anime_id}")
# Mock data for now
return AnimeResponse(
id=anime_id,
title=f"Anime {anime_id}",
description=f"Detailed description for anime {anime_id}",
episodes=24,
status="Completed"
)
@app.get("/api/anime/{anime_id}/episodes", response_model=List[EpisodeResponse], tags=["Episodes"])
async def get_anime_episodes(
anime_id: str,
current_user: Dict = Depends(get_current_user)
) -> List[EpisodeResponse]:
"""
Get all episodes for a specific anime.
Requires: Bearer token in Authorization header
- **anime_id**: Unique identifier for the anime
"""
# TODO: Implement actual episode retrieval logic
logger.info(f"Fetching episodes for anime ID: {anime_id}")
# Mock data for now
return [
EpisodeResponse(
id=f"{anime_id}_ep_{i}",
anime_id=anime_id,
episode_number=i,
title=f"Episode {i}",
description=f"Description for episode {i}",
duration=1440 # 24 minutes in seconds
)
for i in range(1, 25) # 24 episodes
]
@app.get("/api/episodes/{episode_id}", response_model=EpisodeResponse, tags=["Episodes"])
async def get_episode(
episode_id: str,
current_user: Dict = Depends(get_current_user)
) -> EpisodeResponse:
"""
Get detailed information about a specific episode.
Requires: Bearer token in Authorization header
- **episode_id**: Unique identifier for the episode
"""
# TODO: Implement actual episode retrieval logic
logger.info(f"Fetching episode details for ID: {episode_id}")
# Mock data for now
return EpisodeResponse(
id=episode_id,
anime_id="sample_anime",
episode_number=1,
title=f"Episode {episode_id}",
description=f"Detailed description for episode {episode_id}",
duration=1440
)
# Database health check endpoint
@app.get("/api/system/database/health", response_model=Dict[str, Any], tags=["System"])
async def database_health(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
"""
Check database connectivity and health.
Requires: Bearer token in Authorization header
"""
# TODO: Implement actual database health check
return {
"status": "healthy",
"connection_pool": "active",
"response_time_ms": 15,
"last_check": datetime.utcnow().isoformat()
}
# Configuration endpoint
@app.get("/api/system/config", response_model=Dict[str, Any], tags=["System"])
async def get_system_config(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
"""
Get system configuration information.
Requires: Bearer token in Authorization header
"""
return {
"anime_directory": settings.anime_directory,
"log_level": settings.log_level,
"token_expiry_hours": settings.token_expiry_hours,
"version": "1.0.0"
}
if __name__ == "__main__":
import socket
# Configure enhanced logging
log_level = getattr(logging, settings.log_level.upper(), logging.INFO)
logging.getLogger().setLevel(log_level)
# Check if port is available
def is_port_available(host: str, port: int) -> bool:
"""Check if a port is available on the given host."""
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.bind((host, port))
return True
except OSError:
return False
host = "127.0.0.1"
port = 8000
if not is_port_available(host, port):
logger.error(f"Port {port} is already in use on {host}. Please stop other services or choose a different port.")
logger.info("You can check which process is using the port with: netstat -ano | findstr :8000")
sys.exit(1)
logger.info("Starting AniWorld FastAPI server with uvicorn...")
logger.info(f"Anime directory: {settings.anime_directory}")
logger.info(f"Log level: {settings.log_level}")
logger.info(f"Server will be available at http://{host}:{port}")
logger.info(f"API documentation at http://{host}:{port}/docs")
try:
# Run the application
uvicorn.run(
"fastapi_app:app",
host=host,
port=port,
reload=False, # Disable reload to prevent constant restarting
log_level=settings.log_level.lower()
)
except Exception as e:
logger.error(f"Failed to start server: {e}")
sys.exit(1)

View File

@@ -1,248 +0,0 @@
"""
Application Flow Middleware for FastAPI.
This middleware enforces the application flow priorities:
1. Setup page (if setup is not complete)
2. Authentication page (if user is not authenticated)
3. Main application (for authenticated users with completed setup)
The middleware redirects users to the appropriate page based on their current state
and the state of the application setup.
"""
import logging
from typing import Optional
from fastapi import Request
from fastapi.responses import RedirectResponse
from starlette.middleware.base import BaseHTTPMiddleware
# Import the setup service
try:
from ...core.application.services.setup_service import SetupService
except ImportError:
# Handle case where service is not available
class SetupService:
def is_setup_complete(self):
return True
logger = logging.getLogger(__name__)
class ApplicationFlowMiddleware(BaseHTTPMiddleware):
"""
Middleware to enforce application flow: setup → auth → main application.
This middleware:
1. Checks if setup is complete
2. Validates authentication status
3. Redirects to appropriate page based on state
4. Allows API endpoints and static files to pass through
"""
def __init__(self, app, setup_service: Optional[SetupService] = None):
"""
Initialize the application flow middleware.
Args:
app: FastAPI application instance
setup_service: Setup service instance (optional, will create if not provided)
"""
super().__init__(app)
self.setup_service = setup_service or SetupService()
# Define paths that should bypass flow enforcement
self.bypass_paths = {
"/static", # Static files
"/favicon.ico", # Browser favicon requests
"/robots.txt", # Robots.txt
"/health", # Health check endpoints
"/docs", # OpenAPI documentation
"/redoc", # ReDoc documentation
"/openapi.json" # OpenAPI spec
}
# API paths that should bypass flow but may require auth
self.api_paths = {
"/api",
"/auth"
}
# Pages that are part of the flow and should be accessible
self.flow_pages = {
"/setup",
"/login",
"/app"
}
async def dispatch(self, request: Request, call_next):
"""
Process the request and enforce application flow.
Args:
request: Incoming HTTP request
call_next: Next middleware/handler in chain
Returns:
Response: Either a redirect response or the result of call_next
"""
try:
# Get the request path
path = request.url.path
# Skip flow enforcement for certain paths
if self._should_bypass_flow(path):
return await call_next(request)
# Check application setup status
setup_complete = self.setup_service.is_setup_complete()
# Check authentication status
is_authenticated = await self._is_user_authenticated(request)
# Determine the appropriate action
redirect_response = self._determine_redirect(path, setup_complete, is_authenticated)
if redirect_response:
logger.info(f"Redirecting {path} to {redirect_response.headers.get('location')}")
return redirect_response
# Continue with the request
return await call_next(request)
except Exception as e:
logger.error(f"Error in ApplicationFlowMiddleware: {e}", exc_info=True)
# In case of error, allow the request to continue
return await call_next(request)
def _should_bypass_flow(self, path: str) -> bool:
"""
Check if the given path should bypass flow enforcement.
Args:
path: Request path
Returns:
bool: True if path should bypass flow enforcement
"""
# Check exact bypass paths
for bypass_path in self.bypass_paths:
if path.startswith(bypass_path):
return True
# API paths bypass flow enforcement (but may have their own auth)
for api_path in self.api_paths:
if path.startswith(api_path):
return True
return False
async def _is_user_authenticated(self, request: Request) -> bool:
"""
Check if the user is authenticated by validating JWT token.
Args:
request: HTTP request object
Returns:
bool: True if user is authenticated, False otherwise
"""
try:
# Check for Authorization header
auth_header = request.headers.get("authorization")
if not auth_header or not auth_header.startswith("Bearer "):
return False
# Extract and validate token
token = auth_header.split(" ")[1]
# Import JWT validation function (avoid circular imports)
try:
from ..fastapi_app import verify_jwt_token
payload = verify_jwt_token(token)
return payload is not None
except ImportError:
# Fallback if import fails
logger.warning("Could not import JWT verification function")
return False
except Exception as e:
logger.error(f"Error checking authentication: {e}")
return False
def _determine_redirect(self, path: str, setup_complete: bool, is_authenticated: bool) -> Optional[RedirectResponse]:
"""
Determine if a redirect is needed based on current state.
Args:
path: Current request path
setup_complete: Whether application setup is complete
is_authenticated: Whether user is authenticated
Returns:
Optional[RedirectResponse]: Redirect response if needed, None otherwise
"""
# If setup is not complete
if not setup_complete:
# Allow access to setup page
if path == "/setup":
return None
# Redirect everything else to setup
return RedirectResponse(url="/setup", status_code=302)
# Setup is complete, check authentication
if not is_authenticated:
# Allow access to login page
if path == "/login":
return None
# Redirect unauthenticated users to login (except for specific pages)
if path in self.flow_pages or path == "/":
return RedirectResponse(url="/login", status_code=302)
# User is authenticated and setup is complete
else:
# Redirect from setup/login pages to main app
if path in ["/setup", "/login", "/"]:
return RedirectResponse(url="/app", status_code=302)
# No redirect needed
return None
def get_flow_status(self, request: Request) -> dict:
"""
Get current flow status for debugging/monitoring.
Args:
request: HTTP request object
Returns:
dict: Current flow status information
"""
try:
setup_complete = self.setup_service.is_setup_complete()
is_authenticated = self._is_user_authenticated(request)
return {
"setup_complete": setup_complete,
"authenticated": is_authenticated,
"path": request.url.path,
"should_bypass": self._should_bypass_flow(request.url.path)
}
except Exception as e:
return {
"error": str(e),
"path": request.url.path
}
def create_application_flow_middleware(setup_service: Optional[SetupService] = None) -> ApplicationFlowMiddleware:
"""
Factory function to create application flow middleware.
Args:
setup_service: Setup service instance (optional)
Returns:
ApplicationFlowMiddleware: Configured middleware instance
"""
return ApplicationFlowMiddleware(app=None, setup_service=setup_service)

View File

@@ -1,41 +0,0 @@
# FastAPI and ASGI server
fastapi==0.118.0
uvicorn[standard]==0.37.0
python-multipart==0.0.12
# Authentication and security
pyjwt==2.10.1
passlib[bcrypt]==1.7.4
python-jose[cryptography]==3.3.0
# Configuration and environment
pydantic==2.11.10
pydantic-settings==2.11.0
python-dotenv==1.1.1
# Database (if needed)
sqlalchemy==2.0.43
alembic==1.16.5
# HTTP client
httpx==0.28.1
aiofiles==24.1.0
# Utilities
python-dateutil==2.9.0.post0
pytz==2024.2
# Development and testing
pytest==8.4.2
pytest-asyncio==1.2.0
pytest-cov==7.0.0
pytest-mock==3.15.1
# Code quality
black==25.9.0
isort==6.1.0
flake8==7.3.0
mypy==1.18.2
# Logging
structlog==25.1.0

View File

@@ -1,34 +0,0 @@
from fastapi import FastAPI
from src.server.controllers import (
anime_controller,
auth_controller,
episode_controller,
setup_controller,
system_controller,
)
# Avoid TestClient/httpx incompatibilities in some envs; we'll check route registration instead
def test_controllers_expose_router_objects():
# Routers should exist
assert hasattr(auth_controller, "router")
assert hasattr(anime_controller, "router")
assert hasattr(episode_controller, "router")
assert hasattr(setup_controller, "router")
assert hasattr(system_controller, "router")
def test_include_routers_in_app():
app = FastAPI()
app.include_router(auth_controller.router)
app.include_router(anime_controller.router)
app.include_router(episode_controller.router)
app.include_router(setup_controller.router)
app.include_router(system_controller.router)
# Basic sanity: the system config route should be registered on the app
paths = [r.path for r in app.routes if hasattr(r, 'path')]
assert "/api/system/config" in paths

View File

@@ -1,24 +0,0 @@
import os
from src.config.settings import settings
def test_settings_has_fields(monkeypatch):
# Ensure settings object has expected attributes and env vars affect values
monkeypatch.setenv("JWT_SECRET_KEY", "test-secret")
monkeypatch.setenv("ANIME_DIRECTORY", "/tmp/anime")
# Reload settings by creating a new instance
from src.config.settings import Settings
s = Settings()
assert s.jwt_secret_key == "test-secret"
assert s.anime_directory == "/tmp/anime"
def test_settings_defaults():
# When env not set, defaults are used
s = settings
assert hasattr(s, "jwt_secret_key")
assert hasattr(s, "anime_directory")
assert hasattr(s, "token_expiry_hours")

View File

@@ -1,549 +0,0 @@
"""
Performance & Optimization Module for AniWorld App
This module provides download speed limiting, parallel download support,
caching mechanisms, memory usage monitoring, and download resumption.
"""
import os
import threading
import time
import logging
import queue
import hashlib
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Callable
from dataclasses import dataclass, field
from concurrent.futures import ThreadPoolExecutor, as_completed
import json
import sqlite3
from contextlib import contextmanager
import gc
import psutil
import requests
@dataclass
class DownloadTask:
"""Represents a download task with all necessary information."""
task_id: str
serie_name: str
season: int
episode: int
key: str
language: str
output_path: str
temp_path: str
priority: int = 0 # Higher number = higher priority
retry_count: int = 0
max_retries: int = 3
created_at: datetime = field(default_factory=datetime.now)
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
status: str = 'pending' # pending, downloading, completed, failed, paused
progress: Dict[str, Any] = field(default_factory=dict)
error_message: Optional[str] = None
class SpeedLimiter:
"""Control download speeds to prevent bandwidth saturation."""
def __init__(self, max_speed_mbps: float = 0): # 0 = unlimited
self.max_speed_mbps = max_speed_mbps
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
self.download_start_time = None
self.bytes_downloaded = 0
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
def set_speed_limit(self, max_speed_mbps: float):
"""Set maximum download speed in MB/s."""
with self.lock:
self.max_speed_mbps = max_speed_mbps
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
self.logger.info(f"Speed limit set to {max_speed_mbps} MB/s")
def start_download(self):
"""Mark the start of a new download session."""
with self.lock:
self.download_start_time = time.time()
self.bytes_downloaded = 0
def update_progress(self, bytes_downloaded: int):
"""Update download progress and apply speed limiting if needed."""
if self.max_bytes_per_second <= 0: # No limit
return
with self.lock:
self.bytes_downloaded += bytes_downloaded
if self.download_start_time:
elapsed_time = time.time() - self.download_start_time
if elapsed_time > 0:
current_speed = self.bytes_downloaded / elapsed_time
if current_speed > self.max_bytes_per_second:
# Calculate required delay
target_time = self.bytes_downloaded / self.max_bytes_per_second
delay = target_time - elapsed_time
if delay > 0:
self.logger.debug(f"Speed limiting: sleeping for {delay:.2f}s")
time.sleep(delay)
def get_current_speed(self) -> float:
"""Get current download speed in MB/s."""
with self.lock:
if self.download_start_time:
elapsed_time = time.time() - self.download_start_time
if elapsed_time > 0:
speed_bps = self.bytes_downloaded / elapsed_time
return speed_bps / (1024 * 1024) # Convert to MB/s
return 0.0
class MemoryMonitor:
"""Monitor and optimize memory usage."""
def __init__(self, warning_threshold_mb: int = 1024, critical_threshold_mb: int = 2048):
self.warning_threshold = warning_threshold_mb * 1024 * 1024
self.critical_threshold = critical_threshold_mb * 1024 * 1024
self.logger = logging.getLogger(__name__)
self.monitoring = False
self.monitor_thread = None
def start_monitoring(self, check_interval: int = 30):
"""Start continuous memory monitoring."""
if self.monitoring:
return
self.monitoring = True
self.monitor_thread = threading.Thread(
target=self._monitoring_loop,
args=(check_interval,),
daemon=True
)
self.monitor_thread.start()
self.logger.info("Memory monitoring started")
def stop_monitoring(self):
"""Stop memory monitoring."""
self.monitoring = False
if self.monitor_thread:
self.monitor_thread.join(timeout=5)
self.logger.info("Memory monitoring stopped")
def _monitoring_loop(self, check_interval: int):
"""Main monitoring loop."""
while self.monitoring:
try:
self.check_memory_usage()
time.sleep(check_interval)
except Exception as e:
self.logger.error(f"Error in memory monitoring: {e}")
time.sleep(check_interval)
def check_memory_usage(self):
"""Check current memory usage and take action if needed."""
try:
process = psutil.Process()
memory_info = process.memory_info()
memory_usage = memory_info.rss
if memory_usage > self.critical_threshold:
self.logger.warning(f"Critical memory usage: {memory_usage / (1024*1024):.1f} MB")
self.force_garbage_collection()
# Check again after GC
memory_info = process.memory_info()
memory_usage = memory_info.rss
if memory_usage > self.critical_threshold:
self.logger.error("Memory usage still critical after garbage collection")
elif memory_usage > self.warning_threshold:
self.logger.info(f"Memory usage warning: {memory_usage / (1024*1024):.1f} MB")
except Exception as e:
self.logger.error(f"Failed to check memory usage: {e}")
def force_garbage_collection(self):
"""Force garbage collection to free memory."""
self.logger.debug("Forcing garbage collection")
collected = gc.collect()
self.logger.debug(f"Garbage collection freed {collected} objects")
def get_memory_stats(self) -> Dict[str, Any]:
"""Get current memory statistics."""
try:
process = psutil.Process()
memory_info = process.memory_info()
return {
'rss_mb': memory_info.rss / (1024 * 1024),
'vms_mb': memory_info.vms / (1024 * 1024),
'percent': process.memory_percent(),
'warning_threshold_mb': self.warning_threshold / (1024 * 1024),
'critical_threshold_mb': self.critical_threshold / (1024 * 1024)
}
except Exception as e:
self.logger.error(f"Failed to get memory stats: {e}")
return {}
class ParallelDownloadManager:
"""Manage parallel downloads with configurable thread count."""
def __init__(self, max_workers: int = 3, speed_limiter: Optional[SpeedLimiter] = None):
self.max_workers = max_workers
self.speed_limiter = speed_limiter or SpeedLimiter()
self.executor = ThreadPoolExecutor(max_workers=max_workers)
self.active_tasks: Dict[str, DownloadTask] = {}
self.pending_queue = queue.PriorityQueue()
self.completed_tasks: List[DownloadTask] = []
self.failed_tasks: List[DownloadTask] = []
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
self.running = False
self.worker_thread = None
# Statistics
self.stats = {
'total_tasks': 0,
'completed_tasks': 0,
'failed_tasks': 0,
'active_tasks': 0,
'average_speed_mbps': 0.0
}
def start(self):
"""Start the download manager."""
if self.running:
return
self.running = True
self.worker_thread = threading.Thread(target=self._worker_loop, daemon=True)
self.worker_thread.start()
self.logger.info(f"Download manager started with {self.max_workers} workers")
def stop(self):
"""Stop the download manager."""
self.running = False
# Cancel all pending tasks
with self.lock:
while not self.pending_queue.empty():
try:
_, task = self.pending_queue.get_nowait()
task.status = 'cancelled'
except queue.Empty:
break
# Shutdown executor
self.executor.shutdown(wait=True)
if self.worker_thread:
self.worker_thread.join(timeout=5)
self.logger.info("Download manager stopped")
def add_task(self, task: DownloadTask) -> str:
"""Add a download task to the queue."""
with self.lock:
self.stats['total_tasks'] += 1
# Priority queue uses negative priority for max-heap behavior
self.pending_queue.put((-task.priority, task))
self.logger.info(f"Added download task: {task.task_id}")
return task.task_id
def _worker_loop(self):
"""Main worker loop that processes download tasks."""
while self.running:
try:
# Check for pending tasks
if not self.pending_queue.empty() and len(self.active_tasks) < self.max_workers:
_, task = self.pending_queue.get_nowait()
if task.status == 'pending':
self._start_task(task)
# Check completed tasks
self._check_completed_tasks()
time.sleep(0.1) # Small delay to prevent busy waiting
except queue.Empty:
time.sleep(1)
except Exception as e:
self.logger.error(f"Error in worker loop: {e}")
time.sleep(1)
def _start_task(self, task: DownloadTask):
"""Start a download task."""
with self.lock:
task.status = 'downloading'
task.started_at = datetime.now()
self.active_tasks[task.task_id] = task
self.stats['active_tasks'] = len(self.active_tasks)
# Submit to thread pool
future = self.executor.submit(self._execute_download, task)
task.future = future
self.logger.info(f"Started download task: {task.task_id}")
def _execute_download(self, task: DownloadTask) -> bool:
"""Execute the actual download."""
try:
self.logger.info(f"Executing download: {task.serie_name} S{task.season}E{task.episode}")
# Create progress callback that respects speed limiting
def progress_callback(info):
if 'downloaded_bytes' in info:
self.speed_limiter.update_progress(info.get('downloaded_bytes', 0))
# Update task progress
task.progress.update(info)
self.speed_limiter.start_download()
# Here you would call the actual download function
# For now, simulate download
success = self._simulate_download(task, progress_callback)
return success
except Exception as e:
self.logger.error(f"Download failed for task {task.task_id}: {e}")
task.error_message = str(e)
return False
def _simulate_download(self, task: DownloadTask, progress_callback: Callable) -> bool:
"""Simulate download for testing purposes."""
# This is a placeholder - replace with actual download logic
total_size = 100 * 1024 * 1024 # 100MB simulation
downloaded = 0
chunk_size = 1024 * 1024 # 1MB chunks
while downloaded < total_size and task.status == 'downloading':
# Simulate download chunk
time.sleep(0.1)
downloaded += chunk_size
progress_info = {
'status': 'downloading',
'downloaded_bytes': downloaded,
'total_bytes': total_size,
'percent': (downloaded / total_size) * 100
}
progress_callback(progress_info)
if downloaded >= total_size:
progress_callback({'status': 'finished'})
return True
return False
def _check_completed_tasks(self):
"""Check for completed download tasks."""
completed_task_ids = []
with self.lock:
for task_id, task in self.active_tasks.items():
if hasattr(task, 'future') and task.future.done():
completed_task_ids.append(task_id)
# Process completed tasks
for task_id in completed_task_ids:
self._handle_completed_task(task_id)
def _handle_completed_task(self, task_id: str):
"""Handle a completed download task."""
with self.lock:
task = self.active_tasks.pop(task_id, None)
if not task:
return
task.completed_at = datetime.now()
self.stats['active_tasks'] = len(self.active_tasks)
try:
success = task.future.result()
if success:
task.status = 'completed'
self.completed_tasks.append(task)
self.stats['completed_tasks'] += 1
self.logger.info(f"Task completed successfully: {task_id}")
else:
task.status = 'failed'
self.failed_tasks.append(task)
self.stats['failed_tasks'] += 1
self.logger.warning(f"Task failed: {task_id}")
except Exception as e:
task.status = 'failed'
task.error_message = str(e)
self.failed_tasks.append(task)
self.stats['failed_tasks'] += 1
self.logger.error(f"Task failed with exception: {task_id} - {e}")
def get_task_status(self, task_id: str) -> Optional[Dict[str, Any]]:
"""Get status of a specific task."""
with self.lock:
# Check active tasks
if task_id in self.active_tasks:
task = self.active_tasks[task_id]
return self._task_to_dict(task)
# Check completed tasks
for task in self.completed_tasks:
if task.task_id == task_id:
return self._task_to_dict(task)
# Check failed tasks
for task in self.failed_tasks:
if task.task_id == task_id:
return self._task_to_dict(task)
return None
def _task_to_dict(self, task: DownloadTask) -> Dict[str, Any]:
"""Convert task to dictionary representation."""
return {
'task_id': task.task_id,
'serie_name': task.serie_name,
'season': task.season,
'episode': task.episode,
'status': task.status,
'progress': task.progress,
'created_at': task.created_at.isoformat(),
'started_at': task.started_at.isoformat() if task.started_at else None,
'completed_at': task.completed_at.isoformat() if task.completed_at else None,
'error_message': task.error_message,
'retry_count': task.retry_count
}
def get_all_tasks(self) -> Dict[str, List[Dict[str, Any]]]:
"""Get all tasks grouped by status."""
with self.lock:
return {
'active': [self._task_to_dict(task) for task in self.active_tasks.values()],
'completed': [self._task_to_dict(task) for task in self.completed_tasks[-50:]], # Last 50
'failed': [self._task_to_dict(task) for task in self.failed_tasks[-50:]] # Last 50
}
def get_statistics(self) -> Dict[str, Any]:
"""Get download manager statistics."""
return self.stats.copy()
def set_max_workers(self, max_workers: int):
"""Change the number of worker threads."""
if max_workers <= 0:
raise ValueError("max_workers must be positive")
self.max_workers = max_workers
# Recreate executor with new worker count
old_executor = self.executor
self.executor = ThreadPoolExecutor(max_workers=max_workers)
old_executor.shutdown(wait=False)
self.logger.info(f"Updated worker count to {max_workers}")
class ResumeManager:
"""Manage download resumption for interrupted downloads."""
def __init__(self, resume_dir: str = "./resume"):
self.resume_dir = resume_dir
self.logger = logging.getLogger(__name__)
os.makedirs(resume_dir, exist_ok=True)
def save_resume_info(self, task_id: str, resume_data: Dict[str, Any]):
"""Save resume information for a download."""
try:
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
with open(resume_file, 'w') as f:
json.dump(resume_data, f, indent=2, default=str)
self.logger.debug(f"Saved resume info for task: {task_id}")
except Exception as e:
self.logger.error(f"Failed to save resume info for {task_id}: {e}")
def load_resume_info(self, task_id: str) -> Optional[Dict[str, Any]]:
"""Load resume information for a download."""
try:
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
if os.path.exists(resume_file):
with open(resume_file, 'r') as f:
resume_data = json.load(f)
self.logger.debug(f"Loaded resume info for task: {task_id}")
return resume_data
except Exception as e:
self.logger.error(f"Failed to load resume info for {task_id}: {e}")
return None
def clear_resume_info(self, task_id: str):
"""Clear resume information after successful completion."""
try:
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
if os.path.exists(resume_file):
os.remove(resume_file)
self.logger.debug(f"Cleared resume info for task: {task_id}")
except Exception as e:
self.logger.error(f"Failed to clear resume info for {task_id}: {e}")
def get_resumable_tasks(self) -> List[str]:
"""Get list of tasks that can be resumed."""
try:
resume_files = [f for f in os.listdir(self.resume_dir) if f.endswith('.json')]
task_ids = [os.path.splitext(f)[0] for f in resume_files]
return task_ids
except Exception as e:
self.logger.error(f"Failed to get resumable tasks: {e}")
return []
# Global instances
speed_limiter = SpeedLimiter()
memory_monitor = MemoryMonitor()
download_manager = ParallelDownloadManager(max_workers=3, speed_limiter=speed_limiter)
resume_manager = ResumeManager()
def init_performance_monitoring():
"""Initialize performance monitoring components."""
memory_monitor.start_monitoring()
download_manager.start()
def cleanup_performance_monitoring():
"""Clean up performance monitoring components."""
memory_monitor.stop_monitoring()
download_manager.stop()
# Export main components
__all__ = [
'SpeedLimiter',
'MemoryMonitor',
'ParallelDownloadManager',
'ResumeManager',
'DownloadTask',
'speed_limiter',
'download_cache',
'memory_monitor',
'download_manager',
'resume_manager',
'init_performance_monitoring',
'cleanup_performance_monitoring'
]

View File

@@ -1,293 +0,0 @@
import threading
import time
from datetime import datetime, timedelta
from typing import Dict, Optional, Callable
import logging
logger = logging.getLogger(__name__)
class ProcessLock:
"""Thread-safe process lock for preventing duplicate operations."""
def __init__(self, name: str, timeout_minutes: int = 60):
self.name = name
self.timeout_minutes = timeout_minutes
self.lock = threading.RLock()
self.locked_at: Optional[datetime] = None
self.locked_by: Optional[str] = None
self.progress_callback: Optional[Callable] = None
self.is_locked = False
self.progress_data = {}
def acquire(self, locked_by: str = "system", progress_callback: Callable = None) -> bool:
"""
Attempt to acquire the lock.
Returns True if lock was acquired, False if already locked.
"""
with self.lock:
# Check if lock has expired
if self.is_locked and self.locked_at:
if datetime.now() - self.locked_at > timedelta(minutes=self.timeout_minutes):
logger.warning(f"Process lock '{self.name}' expired, releasing...")
self._release_internal()
if self.is_locked:
return False
self.is_locked = True
self.locked_at = datetime.now()
self.locked_by = locked_by
self.progress_callback = progress_callback
self.progress_data = {}
logger.info(f"Process lock '{self.name}' acquired by '{locked_by}'")
return True
def release(self) -> bool:
"""Release the lock."""
with self.lock:
if not self.is_locked:
return False
self._release_internal()
logger.info(f"Process lock '{self.name}' released")
return True
def _release_internal(self):
"""Internal method to release lock without logging."""
self.is_locked = False
self.locked_at = None
self.locked_by = None
self.progress_callback = None
self.progress_data = {}
def is_locked_by_other(self, requester: str) -> bool:
"""Check if lock is held by someone other than requester."""
with self.lock:
return self.is_locked and self.locked_by != requester
def get_status(self) -> Dict:
"""Get current lock status."""
with self.lock:
return {
'is_locked': self.is_locked,
'locked_by': self.locked_by,
'locked_at': self.locked_at.isoformat() if self.locked_at else None,
'progress': self.progress_data.copy(),
'timeout_minutes': self.timeout_minutes
}
def update_progress(self, progress_data: Dict):
"""Update progress data for this lock."""
with self.lock:
if self.is_locked:
self.progress_data.update(progress_data)
if self.progress_callback:
try:
self.progress_callback(progress_data)
except Exception as e:
logger.error(f"Progress callback error: {e}")
def __enter__(self):
"""Context manager entry."""
if not self.acquire():
raise ProcessLockError(f"Could not acquire lock '{self.name}'")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.release()
class ProcessLockError(Exception):
"""Exception raised when process lock operations fail."""
pass
class ProcessLockManager:
"""Global manager for all process locks."""
def __init__(self):
self.locks: Dict[str, ProcessLock] = {}
self.manager_lock = threading.RLock()
def get_lock(self, name: str, timeout_minutes: int = 60) -> ProcessLock:
"""Get or create a process lock."""
with self.manager_lock:
if name not in self.locks:
self.locks[name] = ProcessLock(name, timeout_minutes)
return self.locks[name]
def acquire_lock(self, name: str, locked_by: str = "system",
timeout_minutes: int = 60, progress_callback: Callable = None) -> bool:
"""Acquire a named lock."""
lock = self.get_lock(name, timeout_minutes)
return lock.acquire(locked_by, progress_callback)
def release_lock(self, name: str) -> bool:
"""Release a named lock."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].release()
return False
def is_locked(self, name: str) -> bool:
"""Check if a named lock is currently held."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].is_locked
return False
def get_all_locks_status(self) -> Dict:
"""Get status of all locks."""
with self.manager_lock:
return {
name: lock.get_status()
for name, lock in self.locks.items()
}
def cleanup_expired_locks(self) -> int:
"""Clean up any expired locks. Returns number of locks cleaned up."""
cleaned_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked and lock.locked_at:
if datetime.now() - lock.locked_at > timedelta(minutes=lock.timeout_minutes):
lock._release_internal()
cleaned_count += 1
logger.info(f"Cleaned up expired lock: {lock.name}")
return cleaned_count
def force_release_all(self) -> int:
"""Force release all locks. Returns number of locks released."""
released_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked:
lock._release_internal()
released_count += 1
logger.warning(f"Force released lock: {lock.name}")
return released_count
# Global instance
process_lock_manager = ProcessLockManager()
# Predefined lock names for common operations
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
SEARCH_LOCK = "search"
CONFIG_LOCK = "config"
def with_process_lock(lock_name: str, timeout_minutes: int = 60):
"""Decorator to protect functions with process locks."""
def decorator(func):
def wrapper(*args, **kwargs):
locked_by = kwargs.pop('_locked_by', func.__name__)
progress_callback = kwargs.pop('_progress_callback', None)
if not process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes, progress_callback):
raise ProcessLockError(f"Process '{lock_name}' is already running")
try:
return func(*args, **kwargs)
finally:
process_lock_manager.release_lock(lock_name)
return wrapper
return decorator
def check_process_locks():
"""Check and clean up any expired process locks."""
return process_lock_manager.cleanup_expired_locks()
def get_process_status(lock_name: str) -> Dict:
"""Get status of a specific process lock."""
lock = process_lock_manager.get_lock(lock_name)
return lock.get_status()
def update_process_progress(lock_name: str, progress_data: Dict):
"""Update progress for a specific process."""
if process_lock_manager.is_locked(lock_name):
lock = process_lock_manager.get_lock(lock_name)
lock.update_progress(progress_data)
def is_process_running(lock_name: str) -> bool:
"""Check if a specific process is currently running."""
return process_lock_manager.is_locked(lock_name)
class QueueDeduplicator:
"""Prevent duplicate episodes in download queue."""
def __init__(self):
self.active_items = set() # Set of (serie_name, season, episode) tuples
self.lock = threading.RLock()
def add_episode(self, serie_name: str, season: int, episode: int) -> bool:
"""
Add episode to active set if not already present.
Returns True if added, False if duplicate.
"""
with self.lock:
episode_key = (serie_name, season, episode)
if episode_key in self.active_items:
return False
self.active_items.add(episode_key)
return True
def remove_episode(self, serie_name: str, season: int, episode: int):
"""Remove episode from active set."""
with self.lock:
episode_key = (serie_name, season, episode)
self.active_items.discard(episode_key)
def is_episode_active(self, serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is currently being processed."""
with self.lock:
episode_key = (serie_name, season, episode)
return episode_key in self.active_items
def get_active_episodes(self) -> list:
"""Get list of all active episodes."""
with self.lock:
return list(self.active_items)
def clear_all(self):
"""Clear all active episodes."""
with self.lock:
self.active_items.clear()
def get_count(self) -> int:
"""Get number of active episodes."""
with self.lock:
return len(self.active_items)
# Global deduplicator instance
episode_deduplicator = QueueDeduplicator()
def add_episode_to_queue_safe(serie_name: str, season: int, episode: int) -> bool:
"""
Safely add episode to queue with deduplication.
Returns True if added, False if duplicate.
"""
return episode_deduplicator.add_episode(serie_name, season, episode)
def remove_episode_from_queue(serie_name: str, season: int, episode: int):
"""Remove episode from deduplication tracking."""
episode_deduplicator.remove_episode(serie_name, season, episode)
def is_episode_in_queue(serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is already in queue/being processed."""
return episode_deduplicator.is_episode_active(serie_name, season, episode)