cleanup
This commit is contained in:
@@ -1,149 +0,0 @@
|
||||
# --- Global UTF-8 logging setup (fix UnicodeEncodeError) ---
|
||||
import sys
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
# Add the parent directory to sys.path to import our modules
|
||||
# This must be done before any local imports
|
||||
current_dir = os.path.dirname(__file__)
|
||||
parent_dir = os.path.join(current_dir, '..')
|
||||
sys.path.insert(0, os.path.abspath(parent_dir))
|
||||
|
||||
from flask import Flask, render_template, request, jsonify, redirect, url_for
|
||||
import logging
|
||||
import atexit
|
||||
# Import config
|
||||
try:
|
||||
from config import config
|
||||
except ImportError:
|
||||
# Fallback config
|
||||
class Config:
|
||||
anime_directory = "./downloads"
|
||||
log_level = "INFO"
|
||||
config = Config()
|
||||
|
||||
# Simple auth decorators as fallbacks
|
||||
def require_auth(f):
|
||||
from functools import wraps
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
||||
def optional_auth(f):
|
||||
return f
|
||||
|
||||
|
||||
# Placeholder for missing services
|
||||
class MockScheduler:
|
||||
def start_scheduler(self): pass
|
||||
def stop_scheduler(self): pass
|
||||
|
||||
def init_scheduler(config, socketio=None, app=None):
|
||||
return MockScheduler()
|
||||
|
||||
def init_series_app(verbose=False):
|
||||
if verbose:
|
||||
logging.info("Series app initialized (mock)")
|
||||
|
||||
|
||||
app = Flask(__name__,
|
||||
template_folder='web/templates/base',
|
||||
static_folder='web/static')
|
||||
app.config['SECRET_KEY'] = os.urandom(24)
|
||||
app.config['PERMANENT_SESSION_LIFETIME'] = 86400 # 24 hours
|
||||
|
||||
# Error handler for API routes to return JSON instead of HTML
|
||||
@app.errorhandler(404)
|
||||
def handle_api_not_found(error):
|
||||
"""Handle 404 errors for API routes by returning JSON instead of HTML."""
|
||||
if request.path.startswith('/api/'):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'API endpoint not found',
|
||||
'path': request.path
|
||||
}), 404
|
||||
# For non-API routes, let Flask handle it normally
|
||||
return error
|
||||
|
||||
# Global error handler to log any unhandled exceptions
|
||||
@app.errorhandler(Exception)
|
||||
def handle_exception(e):
|
||||
logging.error("Unhandled exception occurred: %s", e, exc_info=True)
|
||||
if request.path.startswith('/api/'):
|
||||
return jsonify({'success': False, 'error': 'Internal Server Error'}), 500
|
||||
return "Internal Server Error", 500
|
||||
|
||||
# Register cleanup functions
|
||||
@atexit.register
|
||||
def cleanup_on_exit():
|
||||
"""Clean up resources on application exit."""
|
||||
try:
|
||||
# Additional cleanup functions will be added when features are implemented
|
||||
logging.info("Application cleanup completed")
|
||||
except Exception as e:
|
||||
logging.error(f"Error during cleanup: {e}")
|
||||
|
||||
# Basic routes since blueprints are missing
|
||||
@app.route('/')
|
||||
def index():
|
||||
return jsonify({
|
||||
'message': 'AniWorld Flask Server',
|
||||
'version': '1.0.0',
|
||||
'status': 'running'
|
||||
})
|
||||
|
||||
@app.route('/health')
|
||||
def health():
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'services': {
|
||||
'flask': 'online',
|
||||
'config': 'loaded'
|
||||
}
|
||||
})
|
||||
|
||||
@app.route('/api/auth/login', methods=['POST'])
|
||||
def login():
|
||||
# Simple login endpoint
|
||||
data = request.get_json()
|
||||
if data and data.get('password') == 'admin123':
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Login successful',
|
||||
'token': 'mock-jwt-token'
|
||||
})
|
||||
return jsonify({'success': False, 'error': 'Invalid password'}), 401
|
||||
|
||||
# Initialize scheduler
|
||||
scheduler = init_scheduler(config)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Configure basic logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Basic logging system initialized")
|
||||
|
||||
# Only run startup messages and scheduler in the parent process
|
||||
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
|
||||
logger.info("Starting Aniworld Flask server...")
|
||||
logger.info(f"Anime directory: {config.anime_directory}")
|
||||
logger.info(f"Log level: {config.log_level}")
|
||||
|
||||
scheduler.start_scheduler()
|
||||
init_series_app(verbose=True)
|
||||
logger.info("Server will be available at http://localhost:5000")
|
||||
|
||||
try:
|
||||
# Run Flask app
|
||||
app.run(debug=True, host='0.0.0.0', port=5000)
|
||||
finally:
|
||||
# Clean shutdown
|
||||
if 'scheduler' in locals() and scheduler:
|
||||
scheduler.stop_scheduler()
|
||||
logger.info("Scheduler stopped")
|
||||
@@ -1,42 +0,0 @@
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
|
||||
from src.core.SerieScanner import SerieScanner
|
||||
from src.core.entities.SerieList import SerieList
|
||||
from src.core.providers.provider_factory import Loaders
|
||||
|
||||
|
||||
class SeriesApp:
|
||||
|
||||
def __init__(self, directory_to_search: str):
|
||||
|
||||
# Only show initialization message for the first instance
|
||||
if SeriesApp._initialization_count <= 1:
|
||||
print("Please wait while initializing...")
|
||||
|
||||
self.progress = None
|
||||
self.directory_to_search = directory_to_search
|
||||
self.Loaders = Loaders()
|
||||
self.loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
self.SerieScanner = SerieScanner(directory_to_search, self.loader)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
def __InitList__(self):
|
||||
self.series_list = self.List.GetMissingEpisode()
|
||||
|
||||
def search(self, words :str) -> list:
|
||||
return self.loader.Search(words)
|
||||
|
||||
def download(self, serieFolder: str, season: int, episode: int, key: str, callback) -> bool:
|
||||
self.loader.Download(self.directory_to_search, serieFolder, season, episode, key, "German Dub", callback)
|
||||
|
||||
def ReScan(self, callback):
|
||||
|
||||
self.SerieScanner.Reinit()
|
||||
self.SerieScanner.Scan(callback)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,981 +0,0 @@
|
||||
"""
|
||||
User Preferences and Settings Persistence Manager
|
||||
|
||||
This module provides user preferences management, settings persistence,
|
||||
and customization options for the AniWorld web interface.
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime
|
||||
from flask import Blueprint, request, jsonify, session
|
||||
|
||||
class UserPreferencesManager:
|
||||
"""Manages user preferences and settings persistence."""
|
||||
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
self.preferences_file = 'data/user_preferences.json'
|
||||
self.preferences = {} # Initialize preferences attribute
|
||||
self.default_preferences = {
|
||||
'ui': {
|
||||
'theme': 'auto', # 'light', 'dark', 'auto'
|
||||
'density': 'comfortable', # 'compact', 'comfortable', 'spacious'
|
||||
'language': 'en',
|
||||
'animations_enabled': True,
|
||||
'sidebar_collapsed': False,
|
||||
'grid_view': True,
|
||||
'items_per_page': 20
|
||||
},
|
||||
'downloads': {
|
||||
'auto_download': False,
|
||||
'download_quality': 'best',
|
||||
'concurrent_downloads': 3,
|
||||
'retry_failed': True,
|
||||
'notification_sound': True,
|
||||
'auto_organize': True
|
||||
},
|
||||
'notifications': {
|
||||
'browser_notifications': True,
|
||||
'email_notifications': False,
|
||||
'webhook_notifications': False,
|
||||
'notification_types': {
|
||||
'download_complete': True,
|
||||
'download_error': True,
|
||||
'series_updated': False,
|
||||
'system_alerts': True
|
||||
}
|
||||
},
|
||||
'keyboard_shortcuts': {
|
||||
'enabled': True,
|
||||
'shortcuts': {
|
||||
'search': 'ctrl+f',
|
||||
'download': 'ctrl+d',
|
||||
'refresh': 'f5',
|
||||
'select_all': 'ctrl+a',
|
||||
'help': 'f1',
|
||||
'settings': 'ctrl+comma'
|
||||
}
|
||||
},
|
||||
'advanced': {
|
||||
'debug_mode': False,
|
||||
'performance_mode': False,
|
||||
'cache_enabled': True,
|
||||
'auto_backup': True,
|
||||
'log_level': 'info'
|
||||
}
|
||||
}
|
||||
|
||||
# Initialize with defaults if no app provided
|
||||
if app is None:
|
||||
self.preferences = self.default_preferences.copy()
|
||||
else:
|
||||
self.init_app(app)
|
||||
|
||||
def init_app(self, app):
|
||||
"""Initialize with Flask app."""
|
||||
self.app = app
|
||||
self.preferences_file = os.path.join(app.instance_path, 'data/user_preferences.json')
|
||||
|
||||
# Ensure instance path exists
|
||||
os.makedirs(app.instance_path, exist_ok=True)
|
||||
|
||||
# Load or create preferences file
|
||||
self.load_preferences()
|
||||
|
||||
def load_preferences(self) -> Dict[str, Any]:
|
||||
"""Load preferences from file."""
|
||||
try:
|
||||
if os.path.exists(self.preferences_file):
|
||||
with open(self.preferences_file, 'r', encoding='utf-8') as f:
|
||||
loaded_prefs = json.load(f)
|
||||
|
||||
# Merge with defaults to ensure all keys exist
|
||||
self.preferences = self.merge_preferences(self.default_preferences, loaded_prefs)
|
||||
else:
|
||||
self.preferences = self.default_preferences.copy()
|
||||
self.save_preferences()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error loading preferences: {e}")
|
||||
self.preferences = self.default_preferences.copy()
|
||||
|
||||
return self.preferences
|
||||
|
||||
def save_preferences(self) -> bool:
|
||||
"""Save preferences to file."""
|
||||
try:
|
||||
with open(self.preferences_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(self.preferences, f, indent=2, ensure_ascii=False)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error saving preferences: {e}")
|
||||
return False
|
||||
|
||||
def merge_preferences(self, defaults: Dict, user_prefs: Dict) -> Dict:
|
||||
"""Recursively merge user preferences with defaults."""
|
||||
result = defaults.copy()
|
||||
|
||||
for key, value in user_prefs.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
result[key] = self.merge_preferences(result[key], value)
|
||||
else:
|
||||
result[key] = value
|
||||
|
||||
return result
|
||||
|
||||
def get_preference(self, key: str, default: Any = None) -> Any:
|
||||
"""Get a specific preference using dot notation (e.g., 'ui.theme')."""
|
||||
keys = key.split('.')
|
||||
value = self.preferences
|
||||
|
||||
try:
|
||||
for k in keys:
|
||||
value = value[k]
|
||||
return value
|
||||
except (KeyError, TypeError):
|
||||
return default
|
||||
|
||||
def set_preference(self, key: str, value: Any) -> bool:
|
||||
"""Set a specific preference using dot notation."""
|
||||
keys = key.split('.')
|
||||
pref_dict = self.preferences
|
||||
|
||||
try:
|
||||
# Navigate to parent dictionary
|
||||
for k in keys[:-1]:
|
||||
if k not in pref_dict:
|
||||
pref_dict[k] = {}
|
||||
pref_dict = pref_dict[k]
|
||||
|
||||
# Set the value
|
||||
pref_dict[keys[-1]] = value
|
||||
|
||||
# Save to file
|
||||
return self.save_preferences()
|
||||
|
||||
except Exception as e:
|
||||
print(f"Error setting preference {key}: {e}")
|
||||
return False
|
||||
|
||||
def reset_preferences(self) -> bool:
|
||||
"""Reset all preferences to defaults."""
|
||||
self.preferences = self.default_preferences.copy()
|
||||
return self.save_preferences()
|
||||
|
||||
def export_preferences(self) -> str:
|
||||
"""Export preferences as JSON string."""
|
||||
try:
|
||||
return json.dumps(self.preferences, indent=2, ensure_ascii=False)
|
||||
except Exception as e:
|
||||
print(f"Error exporting preferences: {e}")
|
||||
return "{}"
|
||||
|
||||
def import_preferences(self, json_data: str) -> bool:
|
||||
"""Import preferences from JSON string."""
|
||||
try:
|
||||
imported_prefs = json.loads(json_data)
|
||||
self.preferences = self.merge_preferences(self.default_preferences, imported_prefs)
|
||||
return self.save_preferences()
|
||||
except Exception as e:
|
||||
print(f"Error importing preferences: {e}")
|
||||
return False
|
||||
|
||||
def get_user_session_preferences(self) -> Dict[str, Any]:
|
||||
"""Get preferences for current user session."""
|
||||
# For now, return global preferences
|
||||
# In the future, could be user-specific
|
||||
return self.preferences.copy()
|
||||
|
||||
def get_preferences_js(self):
|
||||
"""Generate JavaScript code for preferences management."""
|
||||
return f"""
|
||||
// AniWorld User Preferences Manager
|
||||
class UserPreferencesManager {{
|
||||
constructor() {{
|
||||
this.preferences = {json.dumps(self.preferences)};
|
||||
this.defaultPreferences = {json.dumps(self.default_preferences)};
|
||||
this.changeListeners = new Map();
|
||||
this.init();
|
||||
}}
|
||||
|
||||
init() {{
|
||||
this.loadFromServer();
|
||||
this.applyPreferences();
|
||||
this.setupPreferencesUI();
|
||||
this.setupAutoSave();
|
||||
}}
|
||||
|
||||
async loadFromServer() {{
|
||||
try {{
|
||||
const response = await fetch('/api/preferences');
|
||||
if (response.ok) {{
|
||||
this.preferences = await response.json();
|
||||
this.applyPreferences();
|
||||
}}
|
||||
}} catch (error) {{
|
||||
console.error('Error loading preferences:', error);
|
||||
}}
|
||||
}}
|
||||
|
||||
async saveToServer() {{
|
||||
try {{
|
||||
const response = await fetch('/api/preferences', {{
|
||||
method: 'PUT',
|
||||
headers: {{
|
||||
'Content-Type': 'application/json'
|
||||
}},
|
||||
body: JSON.stringify(this.preferences)
|
||||
}});
|
||||
|
||||
if (!response.ok) {{
|
||||
console.error('Error saving preferences to server');
|
||||
}}
|
||||
}} catch (error) {{
|
||||
console.error('Error saving preferences:', error);
|
||||
}}
|
||||
}}
|
||||
|
||||
get(key, defaultValue = null) {{
|
||||
const keys = key.split('.');
|
||||
let value = this.preferences;
|
||||
|
||||
try {{
|
||||
for (const k of keys) {{
|
||||
value = value[k];
|
||||
}}
|
||||
return value !== undefined ? value : defaultValue;
|
||||
}} catch (error) {{
|
||||
return defaultValue;
|
||||
}}
|
||||
}}
|
||||
|
||||
set(key, value, save = true) {{
|
||||
const keys = key.split('.');
|
||||
let obj = this.preferences;
|
||||
|
||||
// Navigate to parent object
|
||||
for (let i = 0; i < keys.length - 1; i++) {{
|
||||
const k = keys[i];
|
||||
if (!obj[k] || typeof obj[k] !== 'object') {{
|
||||
obj[k] = {{}};
|
||||
}}
|
||||
obj = obj[k];
|
||||
}}
|
||||
|
||||
// Set the value
|
||||
const lastKey = keys[keys.length - 1];
|
||||
const oldValue = obj[lastKey];
|
||||
obj[lastKey] = value;
|
||||
|
||||
// Apply the change immediately
|
||||
this.applyPreference(key, value);
|
||||
|
||||
// Notify listeners
|
||||
this.notifyChangeListeners(key, value, oldValue);
|
||||
|
||||
// Save to server
|
||||
if (save) {{
|
||||
this.saveToServer();
|
||||
}}
|
||||
|
||||
// Store in localStorage as backup
|
||||
localStorage.setItem('aniworld_preferences', JSON.stringify(this.preferences));
|
||||
}}
|
||||
|
||||
applyPreferences() {{
|
||||
// Apply all preferences
|
||||
this.applyTheme();
|
||||
this.applyUIPreferences();
|
||||
this.applyKeyboardShortcuts();
|
||||
this.applyNotificationSettings();
|
||||
}}
|
||||
|
||||
applyPreference(key, value) {{
|
||||
// Apply individual preference change
|
||||
if (key.startsWith('ui.theme')) {{
|
||||
this.applyTheme();
|
||||
}} else if (key.startsWith('ui.')) {{
|
||||
this.applyUIPreferences();
|
||||
}} else if (key.startsWith('keyboard_shortcuts.')) {{
|
||||
this.applyKeyboardShortcuts();
|
||||
}} else if (key.startsWith('notifications.')) {{
|
||||
this.applyNotificationSettings();
|
||||
}}
|
||||
}}
|
||||
|
||||
applyTheme() {{
|
||||
const theme = this.get('ui.theme', 'auto');
|
||||
const html = document.documentElement;
|
||||
|
||||
html.classList.remove('theme-light', 'theme-dark');
|
||||
|
||||
if (theme === 'auto') {{
|
||||
// Use system preference
|
||||
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
html.classList.add(prefersDark ? 'theme-dark' : 'theme-light');
|
||||
}} else {{
|
||||
html.classList.add(`theme-${{theme}}`);
|
||||
}}
|
||||
|
||||
// Update Bootstrap theme
|
||||
html.setAttribute('data-bs-theme', theme === 'dark' || (theme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches) ? 'dark' : 'light');
|
||||
}}
|
||||
|
||||
applyUIPreferences() {{
|
||||
const density = this.get('ui.density', 'comfortable');
|
||||
const animations = this.get('ui.animations_enabled', true);
|
||||
const gridView = this.get('ui.grid_view', true);
|
||||
|
||||
// Apply UI density
|
||||
document.body.className = document.body.className.replace(/density-\\w+/g, '');
|
||||
document.body.classList.add(`density-${{density}}`);
|
||||
|
||||
// Apply animations
|
||||
if (!animations) {{
|
||||
document.body.classList.add('no-animations');
|
||||
}} else {{
|
||||
document.body.classList.remove('no-animations');
|
||||
}}
|
||||
|
||||
// Apply view mode
|
||||
const viewToggle = document.querySelector('.view-toggle');
|
||||
if (viewToggle) {{
|
||||
viewToggle.classList.toggle('grid-view', gridView);
|
||||
viewToggle.classList.toggle('list-view', !gridView);
|
||||
}}
|
||||
}}
|
||||
|
||||
applyKeyboardShortcuts() {{
|
||||
const enabled = this.get('keyboard_shortcuts.enabled', true);
|
||||
const shortcuts = this.get('keyboard_shortcuts.shortcuts', {{}});
|
||||
|
||||
if (window.keyboardManager) {{
|
||||
window.keyboardManager.setEnabled(enabled);
|
||||
window.keyboardManager.updateShortcuts(shortcuts);
|
||||
}}
|
||||
}}
|
||||
|
||||
applyNotificationSettings() {{
|
||||
const browserNotifications = this.get('notifications.browser_notifications', true);
|
||||
|
||||
// Request notification permission if needed
|
||||
if (browserNotifications && 'Notification' in window && Notification.permission === 'default') {{
|
||||
Notification.requestPermission();
|
||||
}}
|
||||
}}
|
||||
|
||||
setupPreferencesUI() {{
|
||||
this.createSettingsModal();
|
||||
this.bindSettingsEvents();
|
||||
}}
|
||||
|
||||
createSettingsModal() {{
|
||||
const existingModal = document.getElementById('preferences-modal');
|
||||
if (existingModal) return;
|
||||
|
||||
const modal = document.createElement('div');
|
||||
modal.id = 'preferences-modal';
|
||||
modal.className = 'modal fade';
|
||||
modal.innerHTML = `
|
||||
<div class="modal-dialog modal-lg">
|
||||
<div class="modal-content">
|
||||
<div class="modal-header">
|
||||
<h5 class="modal-title">Preferences</h5>
|
||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
||||
</div>
|
||||
<div class="modal-body">
|
||||
<ul class="nav nav-tabs mb-3">
|
||||
<li class="nav-item">
|
||||
<a class="nav-link active" data-bs-toggle="tab" href="#ui-tab">Interface</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" data-bs-toggle="tab" href="#downloads-tab">Downloads</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" data-bs-toggle="tab" href="#notifications-tab">Notifications</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" data-bs-toggle="tab" href="#shortcuts-tab">Shortcuts</a>
|
||||
</li>
|
||||
<li class="nav-item">
|
||||
<a class="nav-link" data-bs-toggle="tab" href="#advanced-tab">Advanced</a>
|
||||
</li>
|
||||
</ul>
|
||||
<div class="tab-content">
|
||||
${{this.createUITab()}}
|
||||
${{this.createDownloadsTab()}}
|
||||
${{this.createNotificationsTab()}}
|
||||
${{this.createShortcutsTab()}}
|
||||
${{this.createAdvancedTab()}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="modal-footer">
|
||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
||||
<button type="button" class="btn btn-outline-danger" id="reset-preferences">Reset to Defaults</button>
|
||||
<button type="button" class="btn btn-outline-primary" id="export-preferences">Export</button>
|
||||
<button type="button" class="btn btn-outline-primary" id="import-preferences">Import</button>
|
||||
<button type="button" class="btn btn-primary" id="save-preferences">Save</button>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
|
||||
document.body.appendChild(modal);
|
||||
}}
|
||||
|
||||
createUITab() {{
|
||||
return `
|
||||
<div class="tab-pane fade show active" id="ui-tab">
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Theme</label>
|
||||
<select class="form-select" id="pref-theme">
|
||||
<option value="auto">Auto (System)</option>
|
||||
<option value="light">Light</option>
|
||||
<option value="dark">Dark</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label class="form-label">UI Density</label>
|
||||
<select class="form-select" id="pref-density">
|
||||
<option value="compact">Compact</option>
|
||||
<option value="comfortable">Comfortable</option>
|
||||
<option value="spacious">Spacious</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Language</label>
|
||||
<select class="form-select" id="pref-language">
|
||||
<option value="en">English</option>
|
||||
<option value="de">German</option>
|
||||
<option value="ja">Japanese</option>
|
||||
</select>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Items per page</label>
|
||||
<select class="form-select" id="pref-items-per-page">
|
||||
<option value="10">10</option>
|
||||
<option value="20">20</option>
|
||||
<option value="50">50</option>
|
||||
<option value="100">100</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-animations">
|
||||
<label class="form-check-label" for="pref-animations">
|
||||
Enable animations
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-grid-view">
|
||||
<label class="form-check-label" for="pref-grid-view">
|
||||
Default to grid view
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}}
|
||||
|
||||
createDownloadsTab() {{
|
||||
return `
|
||||
<div class="tab-pane fade" id="downloads-tab">
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Download Quality</label>
|
||||
<select class="form-select" id="pref-download-quality">
|
||||
<option value="best">Best Available</option>
|
||||
<option value="1080p">1080p</option>
|
||||
<option value="720p">720p</option>
|
||||
<option value="480p">480p</option>
|
||||
</select>
|
||||
</div>
|
||||
<div class="mb-3">
|
||||
<label class="form-label">Concurrent Downloads</label>
|
||||
<input type="number" class="form-control" id="pref-concurrent-downloads" min="1" max="10">
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-auto-download">
|
||||
<label class="form-check-label" for="pref-auto-download">
|
||||
Auto-download new episodes
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-retry-failed">
|
||||
<label class="form-check-label" for="pref-retry-failed">
|
||||
Retry failed downloads
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-auto-organize">
|
||||
<label class="form-check-label" for="pref-auto-organize">
|
||||
Auto-organize downloads
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}}
|
||||
|
||||
createNotificationsTab() {{
|
||||
return `
|
||||
<div class="tab-pane fade" id="notifications-tab">
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<h6>General</h6>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-browser-notifications">
|
||||
<label class="form-check-label" for="pref-browser-notifications">
|
||||
Browser notifications
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-notification-sound">
|
||||
<label class="form-check-label" for="pref-notification-sound">
|
||||
Notification sound
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<h6>Notification Types</h6>
|
||||
<div class="form-check mb-2">
|
||||
<input class="form-check-input" type="checkbox" id="pref-notify-download-complete">
|
||||
<label class="form-check-label" for="pref-notify-download-complete">
|
||||
Download complete
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-2">
|
||||
<input class="form-check-input" type="checkbox" id="pref-notify-download-error">
|
||||
<label class="form-check-label" for="pref-notify-download-error">
|
||||
Download errors
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-2">
|
||||
<input class="form-check-input" type="checkbox" id="pref-notify-series-updated">
|
||||
<label class="form-check-label" for="pref-notify-series-updated">
|
||||
Series updates
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}}
|
||||
|
||||
createShortcutsTab() {{
|
||||
return `
|
||||
<div class="tab-pane fade" id="shortcuts-tab">
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-shortcuts-enabled">
|
||||
<label class="form-check-label" for="pref-shortcuts-enabled">
|
||||
Enable keyboard shortcuts
|
||||
</label>
|
||||
</div>
|
||||
<div id="shortcuts-list">
|
||||
<!-- Shortcuts will be populated dynamically -->
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}}
|
||||
|
||||
createAdvancedTab() {{
|
||||
return `
|
||||
<div class="tab-pane fade" id="advanced-tab">
|
||||
<div class="row">
|
||||
<div class="col-md-6">
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-debug-mode">
|
||||
<label class="form-check-label" for="pref-debug-mode">
|
||||
Debug mode
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-performance-mode">
|
||||
<label class="form-check-label" for="pref-performance-mode">
|
||||
Performance mode
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
<div class="col-md-6">
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-cache-enabled">
|
||||
<label class="form-check-label" for="pref-cache-enabled">
|
||||
Enable caching
|
||||
</label>
|
||||
</div>
|
||||
<div class="form-check mb-3">
|
||||
<input class="form-check-input" type="checkbox" id="pref-auto-backup">
|
||||
<label class="form-check-label" for="pref-auto-backup">
|
||||
Auto backup settings
|
||||
</label>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
`;
|
||||
}}
|
||||
|
||||
bindSettingsEvents() {{
|
||||
// Theme system preference listener
|
||||
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', () => {{
|
||||
if (this.get('ui.theme') === 'auto') {{
|
||||
this.applyTheme();
|
||||
}}
|
||||
}});
|
||||
|
||||
// Settings modal events will be bound when modal is shown
|
||||
document.addEventListener('show.bs.modal', (e) => {{
|
||||
if (e.target.id === 'preferences-modal') {{
|
||||
this.populateSettingsForm();
|
||||
}}
|
||||
}});
|
||||
}}
|
||||
|
||||
populateSettingsForm() {{
|
||||
// Populate form fields with current preferences
|
||||
const fields = [
|
||||
{{ id: 'pref-theme', key: 'ui.theme' }},
|
||||
{{ id: 'pref-density', key: 'ui.density' }},
|
||||
{{ id: 'pref-language', key: 'ui.language' }},
|
||||
{{ id: 'pref-items-per-page', key: 'ui.items_per_page' }},
|
||||
{{ id: 'pref-animations', key: 'ui.animations_enabled' }},
|
||||
{{ id: 'pref-grid-view', key: 'ui.grid_view' }},
|
||||
{{ id: 'pref-download-quality', key: 'downloads.download_quality' }},
|
||||
{{ id: 'pref-concurrent-downloads', key: 'downloads.concurrent_downloads' }},
|
||||
{{ id: 'pref-auto-download', key: 'downloads.auto_download' }},
|
||||
{{ id: 'pref-retry-failed', key: 'downloads.retry_failed' }},
|
||||
{{ id: 'pref-auto-organize', key: 'downloads.auto_organize' }},
|
||||
{{ id: 'pref-browser-notifications', key: 'notifications.browser_notifications' }},
|
||||
{{ id: 'pref-notification-sound', key: 'downloads.notification_sound' }},
|
||||
{{ id: 'pref-shortcuts-enabled', key: 'keyboard_shortcuts.enabled' }},
|
||||
{{ id: 'pref-debug-mode', key: 'advanced.debug_mode' }},
|
||||
{{ id: 'pref-performance-mode', key: 'advanced.performance_mode' }},
|
||||
{{ id: 'pref-cache-enabled', key: 'advanced.cache_enabled' }},
|
||||
{{ id: 'pref-auto-backup', key: 'advanced.auto_backup' }}
|
||||
];
|
||||
|
||||
fields.forEach(field => {{
|
||||
const element = document.getElementById(field.id);
|
||||
if (element) {{
|
||||
const value = this.get(field.key);
|
||||
if (element.type === 'checkbox') {{
|
||||
element.checked = value;
|
||||
}} else {{
|
||||
element.value = value;
|
||||
}}
|
||||
}}
|
||||
}});
|
||||
}}
|
||||
|
||||
setupAutoSave() {{
|
||||
// Auto-save preferences on change
|
||||
document.addEventListener('change', (e) => {{
|
||||
if (e.target.id && e.target.id.startsWith('pref-')) {{
|
||||
this.saveFormValue(e.target);
|
||||
}}
|
||||
}});
|
||||
}}
|
||||
|
||||
saveFormValue(element) {{
|
||||
const keyMap = {{
|
||||
'pref-theme': 'ui.theme',
|
||||
'pref-density': 'ui.density',
|
||||
'pref-language': 'ui.language',
|
||||
'pref-items-per-page': 'ui.items_per_page',
|
||||
'pref-animations': 'ui.animations_enabled',
|
||||
'pref-grid-view': 'ui.grid_view',
|
||||
'pref-download-quality': 'downloads.download_quality',
|
||||
'pref-concurrent-downloads': 'downloads.concurrent_downloads',
|
||||
'pref-auto-download': 'downloads.auto_download',
|
||||
'pref-retry-failed': 'downloads.retry_failed',
|
||||
'pref-auto-organize': 'downloads.auto_organize',
|
||||
'pref-browser-notifications': 'notifications.browser_notifications',
|
||||
'pref-notification-sound': 'downloads.notification_sound',
|
||||
'pref-shortcuts-enabled': 'keyboard_shortcuts.enabled',
|
||||
'pref-debug-mode': 'advanced.debug_mode',
|
||||
'pref-performance-mode': 'advanced.performance_mode',
|
||||
'pref-cache-enabled': 'advanced.cache_enabled',
|
||||
'pref-auto-backup': 'advanced.auto_backup'
|
||||
}};
|
||||
|
||||
const key = keyMap[element.id];
|
||||
if (key) {{
|
||||
let value = element.type === 'checkbox' ? element.checked : element.value;
|
||||
if (element.type === 'number') {{
|
||||
value = parseInt(value, 10);
|
||||
}}
|
||||
this.set(key, value);
|
||||
}}
|
||||
}}
|
||||
|
||||
showPreferences() {{
|
||||
const modal = document.getElementById('preferences-modal');
|
||||
if (modal) {{
|
||||
const bsModal = new bootstrap.Modal(modal);
|
||||
bsModal.show();
|
||||
}}
|
||||
}}
|
||||
|
||||
onPreferenceChange(key, callback) {{
|
||||
if (!this.changeListeners.has(key)) {{
|
||||
this.changeListeners.set(key, []);
|
||||
}}
|
||||
this.changeListeners.get(key).push(callback);
|
||||
}}
|
||||
|
||||
notifyChangeListeners(key, newValue, oldValue) {{
|
||||
const listeners = this.changeListeners.get(key) || [];
|
||||
listeners.forEach(callback => {{
|
||||
try {{
|
||||
callback(newValue, oldValue, key);
|
||||
}} catch (error) {{
|
||||
console.error('Error in preference change listener:', error);
|
||||
}}
|
||||
}});
|
||||
}}
|
||||
|
||||
reset() {{
|
||||
this.preferences = JSON.parse(JSON.stringify(this.defaultPreferences));
|
||||
this.applyPreferences();
|
||||
this.saveToServer();
|
||||
localStorage.removeItem('aniworld_preferences');
|
||||
}}
|
||||
|
||||
export() {{
|
||||
const data = JSON.stringify(this.preferences, null, 2);
|
||||
const blob = new Blob([data], {{ type: 'application/json' }});
|
||||
const url = URL.createObjectURL(blob);
|
||||
|
||||
const a = document.createElement('a');
|
||||
a.href = url;
|
||||
a.download = 'aniworld_preferences.json';
|
||||
document.body.appendChild(a);
|
||||
a.click();
|
||||
document.body.removeChild(a);
|
||||
URL.revokeObjectURL(url);
|
||||
}}
|
||||
|
||||
import(file) {{
|
||||
return new Promise((resolve, reject) => {{
|
||||
const reader = new FileReader();
|
||||
reader.onload = (e) => {{
|
||||
try {{
|
||||
const imported = JSON.parse(e.target.result);
|
||||
this.preferences = this.mergePreferences(this.defaultPreferences, imported);
|
||||
this.applyPreferences();
|
||||
this.saveToServer();
|
||||
resolve(true);
|
||||
}} catch (error) {{
|
||||
reject(error);
|
||||
}}
|
||||
}};
|
||||
reader.onerror = reject;
|
||||
reader.readAsText(file);
|
||||
}});
|
||||
}}
|
||||
|
||||
mergePreferences(defaults, userPrefs) {{
|
||||
const result = {{ ...defaults }};
|
||||
|
||||
for (const [key, value] of Object.entries(userPrefs)) {{
|
||||
if (key in result && typeof result[key] === 'object' && typeof value === 'object') {{
|
||||
result[key] = this.mergePreferences(result[key], value);
|
||||
}} else {{
|
||||
result[key] = value;
|
||||
}}
|
||||
}}
|
||||
|
||||
return result;
|
||||
}}
|
||||
}}
|
||||
|
||||
// Initialize preferences when DOM is loaded
|
||||
document.addEventListener('DOMContentLoaded', () => {{
|
||||
window.preferencesManager = new UserPreferencesManager();
|
||||
}});
|
||||
"""
|
||||
|
||||
def get_css(self):
|
||||
"""Generate CSS for user preferences."""
|
||||
return """
|
||||
/* User Preferences Styles */
|
||||
.density-compact {
|
||||
--spacing: 0.5rem;
|
||||
--font-size: 0.875rem;
|
||||
}
|
||||
|
||||
.density-comfortable {
|
||||
--spacing: 1rem;
|
||||
--font-size: 1rem;
|
||||
}
|
||||
|
||||
.density-spacious {
|
||||
--spacing: 1.5rem;
|
||||
--font-size: 1.125rem;
|
||||
}
|
||||
|
||||
.no-animations * {
|
||||
animation-duration: 0s !important;
|
||||
transition-duration: 0s !important;
|
||||
}
|
||||
|
||||
.theme-light {
|
||||
--bs-body-bg: #ffffff;
|
||||
--bs-body-color: #212529;
|
||||
--bs-primary: #0d6efd;
|
||||
}
|
||||
|
||||
.theme-dark {
|
||||
--bs-body-bg: #121212;
|
||||
--bs-body-color: #e9ecef;
|
||||
--bs-primary: #0d6efd;
|
||||
}
|
||||
|
||||
#preferences-modal .nav-tabs {
|
||||
border-bottom: 1px solid var(--bs-border-color);
|
||||
}
|
||||
|
||||
#preferences-modal .tab-pane {
|
||||
min-height: 300px;
|
||||
}
|
||||
|
||||
.preference-group {
|
||||
margin-bottom: 2rem;
|
||||
}
|
||||
|
||||
.preference-group h6 {
|
||||
color: var(--bs-secondary);
|
||||
margin-bottom: 1rem;
|
||||
}
|
||||
|
||||
/* Responsive preferences modal */
|
||||
@media (max-width: 768px) {
|
||||
#preferences-modal .modal-dialog {
|
||||
max-width: 95vw;
|
||||
margin: 0.5rem;
|
||||
}
|
||||
|
||||
#preferences-modal .nav-tabs {
|
||||
flex-wrap: wrap;
|
||||
}
|
||||
|
||||
#preferences-modal .nav-link {
|
||||
font-size: 0.875rem;
|
||||
padding: 0.5rem;
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
|
||||
# Create the preferences API blueprint
|
||||
preferences_bp = Blueprint('preferences', __name__, url_prefix='/api')
|
||||
|
||||
# Global preferences manager instance
|
||||
preferences_manager = UserPreferencesManager()
|
||||
|
||||
@preferences_bp.route('/preferences', methods=['GET'])
|
||||
def get_preferences():
|
||||
"""Get user preferences."""
|
||||
try:
|
||||
return jsonify(preferences_manager.get_user_session_preferences())
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@preferences_bp.route('/preferences', methods=['PUT'])
|
||||
def update_preferences():
|
||||
"""Update user preferences."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
preferences_manager.preferences = preferences_manager.merge_preferences(
|
||||
preferences_manager.default_preferences,
|
||||
data
|
||||
)
|
||||
|
||||
if preferences_manager.save_preferences():
|
||||
return jsonify({'success': True, 'message': 'Preferences updated'})
|
||||
else:
|
||||
return jsonify({'error': 'Failed to save preferences'}), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@preferences_bp.route('/preferences/<key>', methods=['GET'])
|
||||
def get_preference(key):
|
||||
"""Get a specific preference."""
|
||||
try:
|
||||
value = preferences_manager.get_preference(key)
|
||||
return jsonify({'key': key, 'value': value})
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@preferences_bp.route('/preferences/<key>', methods=['PUT'])
|
||||
def set_preference(key):
|
||||
"""Set a specific preference."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
value = data.get('value')
|
||||
|
||||
if preferences_manager.set_preference(key, value):
|
||||
return jsonify({'success': True, 'key': key, 'value': value})
|
||||
else:
|
||||
return jsonify({'error': 'Failed to set preference'}), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@preferences_bp.route('/preferences/reset', methods=['POST'])
|
||||
def reset_preferences():
|
||||
"""Reset preferences to defaults."""
|
||||
try:
|
||||
if preferences_manager.reset_preferences():
|
||||
return jsonify({'success': True, 'message': 'Preferences reset to defaults'})
|
||||
else:
|
||||
return jsonify({'error': 'Failed to reset preferences'}), 500
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@preferences_bp.route('/preferences/export', methods=['GET'])
|
||||
def export_preferences():
|
||||
"""Export preferences as JSON file."""
|
||||
try:
|
||||
from flask import Response
|
||||
json_data = preferences_manager.export_preferences()
|
||||
|
||||
return Response(
|
||||
json_data,
|
||||
mimetype='application/json',
|
||||
headers={'Content-Disposition': 'attachment; filename=aniworld_preferences.json'}
|
||||
)
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@preferences_bp.route('/preferences/import', methods=['POST'])
|
||||
def import_preferences():
|
||||
"""Import preferences from JSON file."""
|
||||
try:
|
||||
if 'file' not in request.files:
|
||||
return jsonify({'error': 'No file provided'}), 400
|
||||
|
||||
file = request.files['file']
|
||||
if file.filename == '':
|
||||
return jsonify({'error': 'No file selected'}), 400
|
||||
|
||||
json_data = file.read().decode('utf-8')
|
||||
|
||||
if preferences_manager.import_preferences(json_data):
|
||||
return jsonify({'success': True, 'message': 'Preferences imported successfully'})
|
||||
else:
|
||||
return jsonify({'error': 'Failed to import preferences'}), 500
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
@@ -1,565 +0,0 @@
|
||||
"""
|
||||
System Health Monitoring for AniWorld App
|
||||
|
||||
This module provides comprehensive system health checks and monitoring
|
||||
for the anime downloading application.
|
||||
"""
|
||||
|
||||
import psutil
|
||||
import logging
|
||||
import threading
|
||||
import time
|
||||
from typing import Dict, List, Optional, Any
|
||||
from datetime import datetime, timedelta
|
||||
from dataclasses import dataclass
|
||||
from flask import Blueprint, jsonify, request
|
||||
import os
|
||||
import socket
|
||||
import requests
|
||||
from auth import require_auth, optional_auth
|
||||
|
||||
|
||||
@dataclass
|
||||
class HealthMetric:
|
||||
"""Represents a health metric measurement."""
|
||||
name: str
|
||||
value: Any
|
||||
unit: str
|
||||
status: str # 'healthy', 'warning', 'critical'
|
||||
threshold_warning: Optional[float] = None
|
||||
threshold_critical: Optional[float] = None
|
||||
timestamp: Optional[datetime] = None
|
||||
|
||||
def __post_init__(self):
|
||||
if self.timestamp is None:
|
||||
self.timestamp = datetime.now()
|
||||
|
||||
|
||||
class SystemHealthMonitor:
|
||||
"""Monitor system health metrics and performance."""
|
||||
|
||||
def __init__(self, check_interval: int = 60):
|
||||
self.check_interval = check_interval
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.metrics_history: Dict[str, List[HealthMetric]] = {}
|
||||
self.alerts: List[Dict] = []
|
||||
self.monitoring_enabled = True
|
||||
self.monitor_thread = None
|
||||
self._lock = threading.Lock()
|
||||
|
||||
# Configurable thresholds
|
||||
self.thresholds = {
|
||||
'cpu_percent': {'warning': 80.0, 'critical': 95.0},
|
||||
'memory_percent': {'warning': 85.0, 'critical': 95.0},
|
||||
'disk_percent': {'warning': 90.0, 'critical': 98.0},
|
||||
'disk_free_gb': {'warning': 5.0, 'critical': 1.0},
|
||||
'network_latency_ms': {'warning': 1000, 'critical': 5000},
|
||||
}
|
||||
|
||||
def start_monitoring(self):
|
||||
"""Start continuous health monitoring."""
|
||||
if self.monitor_thread and self.monitor_thread.is_alive():
|
||||
self.logger.warning("Health monitoring already running")
|
||||
return
|
||||
|
||||
self.monitoring_enabled = True
|
||||
self.monitor_thread = threading.Thread(target=self._monitoring_loop, daemon=True)
|
||||
self.monitor_thread.start()
|
||||
self.logger.info("System health monitoring started")
|
||||
|
||||
def stop_monitoring(self):
|
||||
"""Stop health monitoring."""
|
||||
self.monitoring_enabled = False
|
||||
if self.monitor_thread:
|
||||
self.monitor_thread.join(timeout=5)
|
||||
self.logger.info("System health monitoring stopped")
|
||||
|
||||
def _monitoring_loop(self):
|
||||
"""Main monitoring loop."""
|
||||
while self.monitoring_enabled:
|
||||
try:
|
||||
self.collect_all_metrics()
|
||||
time.sleep(self.check_interval)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in monitoring loop: {e}", exc_info=True)
|
||||
time.sleep(self.check_interval)
|
||||
|
||||
def collect_all_metrics(self):
|
||||
"""Collect all health metrics."""
|
||||
metrics = []
|
||||
|
||||
# System metrics
|
||||
metrics.extend(self.get_cpu_metrics())
|
||||
metrics.extend(self.get_memory_metrics())
|
||||
metrics.extend(self.get_disk_metrics())
|
||||
metrics.extend(self.get_network_metrics())
|
||||
|
||||
# Application metrics
|
||||
metrics.extend(self.get_process_metrics())
|
||||
|
||||
# Store metrics
|
||||
with self._lock:
|
||||
for metric in metrics:
|
||||
if metric.name not in self.metrics_history:
|
||||
self.metrics_history[metric.name] = []
|
||||
|
||||
self.metrics_history[metric.name].append(metric)
|
||||
|
||||
# Keep only last 24 hours of data
|
||||
cutoff = datetime.now() - timedelta(hours=24)
|
||||
self.metrics_history[metric.name] = [
|
||||
m for m in self.metrics_history[metric.name]
|
||||
if m.timestamp > cutoff
|
||||
]
|
||||
|
||||
# Check for alerts
|
||||
self._check_alert_conditions(metric)
|
||||
|
||||
def get_cpu_metrics(self) -> List[HealthMetric]:
|
||||
"""Get CPU-related metrics."""
|
||||
metrics = []
|
||||
|
||||
# CPU usage percentage
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
status = self._get_status_for_metric('cpu_percent', cpu_percent)
|
||||
metrics.append(HealthMetric(
|
||||
name='cpu_percent',
|
||||
value=cpu_percent,
|
||||
unit='%',
|
||||
status=status,
|
||||
threshold_warning=self.thresholds['cpu_percent']['warning'],
|
||||
threshold_critical=self.thresholds['cpu_percent']['critical']
|
||||
))
|
||||
|
||||
# CPU count
|
||||
metrics.append(HealthMetric(
|
||||
name='cpu_count',
|
||||
value=psutil.cpu_count(),
|
||||
unit='cores',
|
||||
status='healthy'
|
||||
))
|
||||
|
||||
# Load average (Unix-like systems only)
|
||||
try:
|
||||
load_avg = psutil.getloadavg()
|
||||
metrics.append(HealthMetric(
|
||||
name='load_average_1m',
|
||||
value=load_avg[0],
|
||||
unit='',
|
||||
status='healthy'
|
||||
))
|
||||
except AttributeError:
|
||||
pass # Not available on Windows
|
||||
|
||||
return metrics
|
||||
|
||||
def get_memory_metrics(self) -> List[HealthMetric]:
|
||||
"""Get memory-related metrics."""
|
||||
metrics = []
|
||||
|
||||
# Virtual memory
|
||||
memory = psutil.virtual_memory()
|
||||
status = self._get_status_for_metric('memory_percent', memory.percent)
|
||||
|
||||
metrics.append(HealthMetric(
|
||||
name='memory_percent',
|
||||
value=memory.percent,
|
||||
unit='%',
|
||||
status=status,
|
||||
threshold_warning=self.thresholds['memory_percent']['warning'],
|
||||
threshold_critical=self.thresholds['memory_percent']['critical']
|
||||
))
|
||||
|
||||
metrics.append(HealthMetric(
|
||||
name='memory_total_gb',
|
||||
value=round(memory.total / (1024**3), 2),
|
||||
unit='GB',
|
||||
status='healthy'
|
||||
))
|
||||
|
||||
metrics.append(HealthMetric(
|
||||
name='memory_available_gb',
|
||||
value=round(memory.available / (1024**3), 2),
|
||||
unit='GB',
|
||||
status='healthy'
|
||||
))
|
||||
|
||||
# Swap memory
|
||||
swap = psutil.swap_memory()
|
||||
if swap.total > 0:
|
||||
metrics.append(HealthMetric(
|
||||
name='swap_percent',
|
||||
value=swap.percent,
|
||||
unit='%',
|
||||
status='warning' if swap.percent > 50 else 'healthy'
|
||||
))
|
||||
|
||||
return metrics
|
||||
|
||||
def get_disk_metrics(self) -> List[HealthMetric]:
|
||||
"""Get disk-related metrics."""
|
||||
metrics = []
|
||||
|
||||
# Check main disk partitions
|
||||
partitions = psutil.disk_partitions()
|
||||
for partition in partitions:
|
||||
if 'cdrom' in partition.opts or partition.fstype == '':
|
||||
continue
|
||||
|
||||
try:
|
||||
usage = psutil.disk_usage(partition.mountpoint)
|
||||
disk_percent = (usage.used / usage.total) * 100
|
||||
free_gb = usage.free / (1024**3)
|
||||
|
||||
# Disk usage percentage
|
||||
status_percent = self._get_status_for_metric('disk_percent', disk_percent)
|
||||
device_name = partition.device.replace(":", "").replace("\\", "")
|
||||
metrics.append(HealthMetric(
|
||||
name=f'disk_percent_{device_name}',
|
||||
value=round(disk_percent, 1),
|
||||
unit='%',
|
||||
status=status_percent,
|
||||
threshold_warning=self.thresholds['disk_percent']['warning'],
|
||||
threshold_critical=self.thresholds['disk_percent']['critical']
|
||||
))
|
||||
|
||||
# Free space in GB
|
||||
status_free = 'critical' if free_gb < self.thresholds['disk_free_gb']['critical'] \
|
||||
else 'warning' if free_gb < self.thresholds['disk_free_gb']['warning'] \
|
||||
else 'healthy'
|
||||
|
||||
metrics.append(HealthMetric(
|
||||
name=f'disk_free_gb_{device_name}',
|
||||
value=round(free_gb, 2),
|
||||
unit='GB',
|
||||
status=status_free,
|
||||
threshold_warning=self.thresholds['disk_free_gb']['warning'],
|
||||
threshold_critical=self.thresholds['disk_free_gb']['critical']
|
||||
))
|
||||
|
||||
except PermissionError:
|
||||
continue
|
||||
|
||||
# Disk I/O
|
||||
try:
|
||||
disk_io = psutil.disk_io_counters()
|
||||
if disk_io:
|
||||
metrics.append(HealthMetric(
|
||||
name='disk_read_mb',
|
||||
value=round(disk_io.read_bytes / (1024**2), 2),
|
||||
unit='MB',
|
||||
status='healthy'
|
||||
))
|
||||
|
||||
metrics.append(HealthMetric(
|
||||
name='disk_write_mb',
|
||||
value=round(disk_io.write_bytes / (1024**2), 2),
|
||||
unit='MB',
|
||||
status='healthy'
|
||||
))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return metrics
|
||||
|
||||
def get_network_metrics(self) -> List[HealthMetric]:
|
||||
"""Get network-related metrics."""
|
||||
metrics = []
|
||||
|
||||
# Network I/O
|
||||
try:
|
||||
net_io = psutil.net_io_counters()
|
||||
if net_io:
|
||||
metrics.append(HealthMetric(
|
||||
name='network_sent_mb',
|
||||
value=round(net_io.bytes_sent / (1024**2), 2),
|
||||
unit='MB',
|
||||
status='healthy'
|
||||
))
|
||||
|
||||
metrics.append(HealthMetric(
|
||||
name='network_recv_mb',
|
||||
value=round(net_io.bytes_recv / (1024**2), 2),
|
||||
unit='MB',
|
||||
status='healthy'
|
||||
))
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Network connectivity test
|
||||
try:
|
||||
start_time = time.time()
|
||||
socket.create_connection(("8.8.8.8", 53), timeout=5)
|
||||
latency = (time.time() - start_time) * 1000 # Convert to ms
|
||||
|
||||
status = self._get_status_for_metric('network_latency_ms', latency)
|
||||
metrics.append(HealthMetric(
|
||||
name='network_latency_ms',
|
||||
value=round(latency, 2),
|
||||
unit='ms',
|
||||
status=status,
|
||||
threshold_warning=self.thresholds['network_latency_ms']['warning'],
|
||||
threshold_critical=self.thresholds['network_latency_ms']['critical']
|
||||
))
|
||||
except Exception:
|
||||
metrics.append(HealthMetric(
|
||||
name='network_latency_ms',
|
||||
value=-1,
|
||||
unit='ms',
|
||||
status='critical'
|
||||
))
|
||||
|
||||
return metrics
|
||||
|
||||
def get_process_metrics(self) -> List[HealthMetric]:
|
||||
"""Get process-specific metrics."""
|
||||
metrics = []
|
||||
|
||||
try:
|
||||
# Current process metrics
|
||||
process = psutil.Process()
|
||||
|
||||
# Process CPU usage
|
||||
cpu_percent = process.cpu_percent()
|
||||
metrics.append(HealthMetric(
|
||||
name='process_cpu_percent',
|
||||
value=cpu_percent,
|
||||
unit='%',
|
||||
status='warning' if cpu_percent > 50 else 'healthy'
|
||||
))
|
||||
|
||||
# Process memory usage
|
||||
memory_info = process.memory_info()
|
||||
memory_mb = memory_info.rss / (1024**2)
|
||||
metrics.append(HealthMetric(
|
||||
name='process_memory_mb',
|
||||
value=round(memory_mb, 2),
|
||||
unit='MB',
|
||||
status='warning' if memory_mb > 1024 else 'healthy' # Warning if > 1GB
|
||||
))
|
||||
|
||||
# Process threads
|
||||
threads = process.num_threads()
|
||||
metrics.append(HealthMetric(
|
||||
name='process_threads',
|
||||
value=threads,
|
||||
unit='',
|
||||
status='warning' if threads > 50 else 'healthy'
|
||||
))
|
||||
|
||||
# Process connections
|
||||
try:
|
||||
connections = len(process.connections())
|
||||
metrics.append(HealthMetric(
|
||||
name='process_connections',
|
||||
value=connections,
|
||||
unit='',
|
||||
status='warning' if connections > 100 else 'healthy'
|
||||
))
|
||||
except psutil.AccessDenied:
|
||||
pass
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get process metrics: {e}")
|
||||
|
||||
return metrics
|
||||
|
||||
def _get_status_for_metric(self, metric_name: str, value: float) -> str:
|
||||
"""Determine status based on thresholds."""
|
||||
if metric_name in self.thresholds:
|
||||
thresholds = self.thresholds[metric_name]
|
||||
if value >= thresholds['critical']:
|
||||
return 'critical'
|
||||
elif value >= thresholds['warning']:
|
||||
return 'warning'
|
||||
return 'healthy'
|
||||
|
||||
def _check_alert_conditions(self, metric: HealthMetric):
|
||||
"""Check if metric triggers an alert."""
|
||||
if metric.status in ['critical', 'warning']:
|
||||
alert = {
|
||||
'timestamp': metric.timestamp.isoformat(),
|
||||
'metric_name': metric.name,
|
||||
'value': metric.value,
|
||||
'unit': metric.unit,
|
||||
'status': metric.status,
|
||||
'message': f"{metric.name} is {metric.status}: {metric.value}{metric.unit}"
|
||||
}
|
||||
|
||||
with self._lock:
|
||||
self.alerts.append(alert)
|
||||
|
||||
# Keep only last 100 alerts
|
||||
if len(self.alerts) > 100:
|
||||
self.alerts = self.alerts[-100:]
|
||||
|
||||
def get_current_health_status(self) -> Dict[str, Any]:
|
||||
"""Get current system health status."""
|
||||
with self._lock:
|
||||
latest_metrics = {}
|
||||
for name, history in self.metrics_history.items():
|
||||
if history:
|
||||
latest_metrics[name] = {
|
||||
'value': history[-1].value,
|
||||
'unit': history[-1].unit,
|
||||
'status': history[-1].status,
|
||||
'timestamp': history[-1].timestamp.isoformat()
|
||||
}
|
||||
|
||||
# Calculate overall health status
|
||||
statuses = [metric['status'] for metric in latest_metrics.values()]
|
||||
if 'critical' in statuses:
|
||||
overall_status = 'critical'
|
||||
elif 'warning' in statuses:
|
||||
overall_status = 'warning'
|
||||
else:
|
||||
overall_status = 'healthy'
|
||||
|
||||
return {
|
||||
'overall_status': overall_status,
|
||||
'metrics': latest_metrics,
|
||||
'recent_alerts': self.alerts[-10:], # Last 10 alerts
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
|
||||
def get_metric_history(self, metric_name: str, hours: int = 24) -> List[Dict]:
|
||||
"""Get history for a specific metric."""
|
||||
with self._lock:
|
||||
if metric_name not in self.metrics_history:
|
||||
return []
|
||||
|
||||
cutoff = datetime.now() - timedelta(hours=hours)
|
||||
history = [
|
||||
{
|
||||
'value': m.value,
|
||||
'status': m.status,
|
||||
'timestamp': m.timestamp.isoformat()
|
||||
}
|
||||
for m in self.metrics_history[metric_name]
|
||||
if m.timestamp > cutoff
|
||||
]
|
||||
|
||||
return history
|
||||
|
||||
|
||||
# Blueprint for health endpoints
|
||||
health_bp = Blueprint('health', __name__)
|
||||
|
||||
# Global health monitor instance
|
||||
health_monitor = SystemHealthMonitor()
|
||||
|
||||
|
||||
@health_bp.route('/api/health/status')
|
||||
@optional_auth
|
||||
def get_health_status():
|
||||
"""Get current system health status."""
|
||||
try:
|
||||
status = health_monitor.get_current_health_status()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': status
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@health_bp.route('/api/health/metrics/<metric_name>')
|
||||
@optional_auth
|
||||
def get_metric_history(metric_name):
|
||||
"""Get history for a specific metric."""
|
||||
try:
|
||||
hours = int(request.args.get('hours', 24))
|
||||
history = health_monitor.get_metric_history(metric_name, hours)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'metric_name': metric_name,
|
||||
'history': history
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@health_bp.route('/api/health/alerts')
|
||||
@optional_auth
|
||||
def get_health_alerts():
|
||||
"""Get recent health alerts."""
|
||||
try:
|
||||
with health_monitor._lock:
|
||||
alerts = health_monitor.alerts[-50:] # Last 50 alerts
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'alerts': alerts,
|
||||
'count': len(alerts)
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@health_bp.route('/api/health/start', methods=['POST'])
|
||||
@require_auth
|
||||
def start_health_monitoring():
|
||||
"""Start health monitoring."""
|
||||
try:
|
||||
health_monitor.start_monitoring()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Health monitoring started'
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@health_bp.route('/api/health/stop', methods=['POST'])
|
||||
@require_auth
|
||||
def stop_health_monitoring():
|
||||
"""Stop health monitoring."""
|
||||
try:
|
||||
health_monitor.stop_monitoring()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Health monitoring stopped'
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
def init_health_monitoring():
|
||||
"""Initialize and start health monitoring."""
|
||||
health_monitor.start_monitoring()
|
||||
|
||||
|
||||
def cleanup_health_monitoring():
|
||||
"""Clean up health monitoring resources."""
|
||||
health_monitor.stop_monitoring()
|
||||
|
||||
|
||||
# Export main components
|
||||
__all__ = [
|
||||
'SystemHealthMonitor',
|
||||
'HealthMetric',
|
||||
'health_bp',
|
||||
'health_monitor',
|
||||
'init_health_monitoring',
|
||||
'cleanup_health_monitoring'
|
||||
]
|
||||
@@ -1,303 +0,0 @@
|
||||
from flask import Blueprint, render_template, request, jsonify
|
||||
from web.controllers.auth_controller import optional_auth
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Create blueprint for download queue management
|
||||
download_queue_bp = Blueprint('download_queue', __name__)
|
||||
|
||||
# Global download queue state
|
||||
download_queue_state = {
|
||||
'active_downloads': [],
|
||||
'pending_queue': [],
|
||||
'completed_downloads': [],
|
||||
'failed_downloads': [],
|
||||
'queue_lock': threading.Lock(),
|
||||
'statistics': {
|
||||
'total_items': 0,
|
||||
'completed_items': 0,
|
||||
'failed_items': 0,
|
||||
'estimated_time_remaining': None,
|
||||
'current_speed': '0 MB/s',
|
||||
'average_speed': '0 MB/s'
|
||||
}
|
||||
}
|
||||
|
||||
@download_queue_bp.route('/queue')
|
||||
@optional_auth
|
||||
def queue_page():
|
||||
"""Download queue management page."""
|
||||
return render_template('queue.html')
|
||||
|
||||
@download_queue_bp.route('/api/queue/status')
|
||||
@optional_auth
|
||||
def get_queue_status():
|
||||
"""Get detailed download queue status."""
|
||||
with download_queue_state['queue_lock']:
|
||||
# Calculate ETA
|
||||
eta = None
|
||||
if download_queue_state['active_downloads']:
|
||||
active_download = download_queue_state['active_downloads'][0]
|
||||
if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0:
|
||||
remaining_items = len(download_queue_state['pending_queue'])
|
||||
avg_speed = active_download['progress']['speed_mbps']
|
||||
# Rough estimation: assume 500MB per episode
|
||||
estimated_mb_remaining = remaining_items * 500
|
||||
eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None
|
||||
if eta_seconds:
|
||||
eta = datetime.now() + timedelta(seconds=eta_seconds)
|
||||
|
||||
return jsonify({
|
||||
'active_downloads': download_queue_state['active_downloads'],
|
||||
'pending_queue': download_queue_state['pending_queue'],
|
||||
'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10
|
||||
'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10
|
||||
'statistics': {
|
||||
**download_queue_state['statistics'],
|
||||
'eta': eta.isoformat() if eta else None
|
||||
}
|
||||
})
|
||||
|
||||
@download_queue_bp.route('/api/queue/clear', methods=['POST'])
|
||||
@optional_auth
|
||||
def clear_queue():
|
||||
"""Clear completed and failed downloads from queue."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all'
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
if queue_type == 'completed' or queue_type == 'all':
|
||||
download_queue_state['completed_downloads'].clear()
|
||||
|
||||
if queue_type == 'failed' or queue_type == 'all':
|
||||
download_queue_state['failed_downloads'].clear()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Cleared {queue_type} downloads'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/retry', methods=['POST'])
|
||||
@optional_auth
|
||||
def retry_failed_download():
|
||||
"""Retry a failed download."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
download_id = data.get('id')
|
||||
|
||||
if not download_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download ID is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Find failed download
|
||||
failed_download = None
|
||||
for i, download in enumerate(download_queue_state['failed_downloads']):
|
||||
if download['id'] == download_id:
|
||||
failed_download = download_queue_state['failed_downloads'].pop(i)
|
||||
break
|
||||
|
||||
if not failed_download:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed download not found'
|
||||
}), 404
|
||||
|
||||
# Reset download status and add back to queue
|
||||
failed_download['status'] = 'queued'
|
||||
failed_download['error'] = None
|
||||
failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1
|
||||
download_queue_state['pending_queue'].append(failed_download)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download added back to queue'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/remove', methods=['POST'])
|
||||
@optional_auth
|
||||
def remove_from_queue():
|
||||
"""Remove an item from the pending queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
download_id = data.get('id')
|
||||
|
||||
if not download_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download ID is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Find and remove from pending queue
|
||||
removed = False
|
||||
for i, download in enumerate(download_queue_state['pending_queue']):
|
||||
if download['id'] == download_id:
|
||||
download_queue_state['pending_queue'].pop(i)
|
||||
removed = True
|
||||
break
|
||||
|
||||
if not removed:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download not found in queue'
|
||||
}), 404
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download removed from queue'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/reorder', methods=['POST'])
|
||||
@optional_auth
|
||||
def reorder_queue():
|
||||
"""Reorder items in the pending queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
new_order = data.get('order') # Array of download IDs in new order
|
||||
|
||||
if not new_order or not isinstance(new_order, list):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Valid order array is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Create new queue based on the provided order
|
||||
old_queue = download_queue_state['pending_queue'].copy()
|
||||
new_queue = []
|
||||
|
||||
# Add items in the specified order
|
||||
for download_id in new_order:
|
||||
for download in old_queue:
|
||||
if download['id'] == download_id:
|
||||
new_queue.append(download)
|
||||
break
|
||||
|
||||
# Add any remaining items that weren't in the new order
|
||||
for download in old_queue:
|
||||
if download not in new_queue:
|
||||
new_queue.append(download)
|
||||
|
||||
download_queue_state['pending_queue'] = new_queue
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Queue reordered successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
# Helper functions for queue management
|
||||
def add_to_download_queue(serie_name, episode_info, priority='normal'):
|
||||
"""Add a download to the queue."""
|
||||
import uuid
|
||||
|
||||
download_item = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'serie_name': serie_name,
|
||||
'episode': episode_info,
|
||||
'status': 'queued',
|
||||
'priority': priority,
|
||||
'added_at': datetime.now().isoformat(),
|
||||
'started_at': None,
|
||||
'completed_at': None,
|
||||
'error': None,
|
||||
'retry_count': 0,
|
||||
'progress': {
|
||||
'percent': 0,
|
||||
'downloaded_mb': 0,
|
||||
'total_mb': 0,
|
||||
'speed_mbps': 0,
|
||||
'eta_seconds': None
|
||||
}
|
||||
}
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Insert based on priority
|
||||
if priority == 'high':
|
||||
download_queue_state['pending_queue'].insert(0, download_item)
|
||||
else:
|
||||
download_queue_state['pending_queue'].append(download_item)
|
||||
|
||||
download_queue_state['statistics']['total_items'] += 1
|
||||
|
||||
return download_item['id']
|
||||
|
||||
def update_download_progress(download_id, progress_data):
|
||||
"""Update progress for an active download."""
|
||||
with download_queue_state['queue_lock']:
|
||||
for download in download_queue_state['active_downloads']:
|
||||
if download['id'] == download_id:
|
||||
download['progress'].update(progress_data)
|
||||
|
||||
# Update global statistics
|
||||
if 'speed_mbps' in progress_data:
|
||||
download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s"
|
||||
|
||||
break
|
||||
|
||||
def move_download_to_completed(download_id, success=True, error=None):
|
||||
"""Move download from active to completed/failed."""
|
||||
with download_queue_state['queue_lock']:
|
||||
download = None
|
||||
for i, item in enumerate(download_queue_state['active_downloads']):
|
||||
if item['id'] == download_id:
|
||||
download = download_queue_state['active_downloads'].pop(i)
|
||||
break
|
||||
|
||||
if download:
|
||||
download['completed_at'] = datetime.now().isoformat()
|
||||
|
||||
if success:
|
||||
download['status'] = 'completed'
|
||||
download['progress']['percent'] = 100
|
||||
download_queue_state['completed_downloads'].append(download)
|
||||
download_queue_state['statistics']['completed_items'] += 1
|
||||
else:
|
||||
download['status'] = 'failed'
|
||||
download['error'] = error
|
||||
download_queue_state['failed_downloads'].append(download)
|
||||
download_queue_state['statistics']['failed_items'] += 1
|
||||
|
||||
def start_next_download():
|
||||
"""Move next queued download to active state."""
|
||||
with download_queue_state['queue_lock']:
|
||||
if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent
|
||||
download = download_queue_state['pending_queue'].pop(0)
|
||||
download['status'] = 'downloading'
|
||||
download['started_at'] = datetime.now().isoformat()
|
||||
download_queue_state['active_downloads'].append(download)
|
||||
return download
|
||||
return None
|
||||
|
||||
def get_queue_statistics():
|
||||
"""Get current queue statistics."""
|
||||
with download_queue_state['queue_lock']:
|
||||
return download_queue_state['statistics'].copy()
|
||||
@@ -1,252 +0,0 @@
|
||||
import threading
|
||||
import time
|
||||
import schedule
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
import logging
|
||||
from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK,
|
||||
ProcessLockError, is_process_running)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ScheduledOperations:
|
||||
"""Handle scheduled operations like automatic rescans and downloads."""
|
||||
|
||||
def __init__(self, config_manager, socketio=None):
|
||||
self.config = config_manager
|
||||
self.socketio = socketio
|
||||
self.scheduler_thread = None
|
||||
self.running = False
|
||||
self.rescan_callback: Optional[Callable] = None
|
||||
self.download_callback: Optional[Callable] = None
|
||||
self.last_scheduled_rescan: Optional[datetime] = None
|
||||
|
||||
# Load scheduled rescan settings
|
||||
self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False)
|
||||
self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00')
|
||||
self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False)
|
||||
|
||||
def set_rescan_callback(self, callback: Callable):
|
||||
"""Set callback function for performing rescan operations."""
|
||||
self.rescan_callback = callback
|
||||
|
||||
def set_download_callback(self, callback: Callable):
|
||||
"""Set callback function for performing download operations."""
|
||||
self.download_callback = callback
|
||||
|
||||
def start_scheduler(self):
|
||||
"""Start the background scheduler thread."""
|
||||
if self.running:
|
||||
logger.warning("Scheduler is already running")
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
|
||||
self.scheduler_thread.start()
|
||||
logger.info("Scheduled operations started")
|
||||
|
||||
def stop_scheduler(self):
|
||||
"""Stop the background scheduler."""
|
||||
self.running = False
|
||||
schedule.clear()
|
||||
if self.scheduler_thread and self.scheduler_thread.is_alive():
|
||||
self.scheduler_thread.join(timeout=5)
|
||||
logger.info("Scheduled operations stopped")
|
||||
|
||||
def _scheduler_loop(self):
|
||||
"""Main scheduler loop that runs in background thread."""
|
||||
self._setup_scheduled_jobs()
|
||||
|
||||
while self.running:
|
||||
try:
|
||||
schedule.run_pending()
|
||||
time.sleep(60) # Check every minute
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}")
|
||||
time.sleep(60)
|
||||
|
||||
def _setup_scheduled_jobs(self):
|
||||
"""Setup all scheduled jobs based on configuration."""
|
||||
schedule.clear()
|
||||
|
||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
||||
try:
|
||||
schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan)
|
||||
logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up scheduled rescan: {e}")
|
||||
|
||||
def _perform_scheduled_rescan(self):
|
||||
"""Perform the scheduled rescan operation."""
|
||||
try:
|
||||
logger.info("Starting scheduled rescan...")
|
||||
|
||||
# Emit scheduled rescan started event
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_started')
|
||||
|
||||
# Check if rescan is already running
|
||||
if is_process_running(RESCAN_LOCK):
|
||||
logger.warning("Rescan is already running, skipping scheduled rescan")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_skipped', {
|
||||
'reason': 'Rescan already in progress'
|
||||
})
|
||||
return
|
||||
|
||||
# Perform the rescan using process lock
|
||||
@with_process_lock(RESCAN_LOCK, timeout_minutes=180)
|
||||
def perform_rescan():
|
||||
self.last_scheduled_rescan = datetime.now()
|
||||
|
||||
if self.rescan_callback:
|
||||
result = self.rescan_callback()
|
||||
logger.info("Scheduled rescan completed successfully")
|
||||
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_completed', {
|
||||
'timestamp': self.last_scheduled_rescan.isoformat(),
|
||||
'result': result
|
||||
})
|
||||
|
||||
# Auto-start download if configured
|
||||
if self.auto_download_after_rescan and self.download_callback:
|
||||
logger.info("Starting auto-download after scheduled rescan")
|
||||
threading.Thread(
|
||||
target=self._perform_auto_download,
|
||||
daemon=True
|
||||
).start()
|
||||
else:
|
||||
logger.warning("No rescan callback configured")
|
||||
|
||||
perform_rescan(_locked_by='scheduled_operation')
|
||||
|
||||
except ProcessLockError:
|
||||
logger.warning("Could not acquire rescan lock for scheduled operation")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_error', {
|
||||
'error': 'Could not acquire rescan lock'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduled rescan failed: {e}")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_error', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def _perform_auto_download(self):
|
||||
"""Perform automatic download after scheduled rescan."""
|
||||
try:
|
||||
# Wait a bit after rescan to let UI update
|
||||
time.sleep(10)
|
||||
|
||||
if self.download_callback:
|
||||
# Find series with missing episodes and start download
|
||||
logger.info("Starting auto-download of missing episodes")
|
||||
result = self.download_callback()
|
||||
|
||||
if self.socketio:
|
||||
self.socketio.emit('auto_download_started', {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'result': result
|
||||
})
|
||||
else:
|
||||
logger.warning("No download callback configured for auto-download")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Auto-download after scheduled rescan failed: {e}")
|
||||
if self.socketio:
|
||||
self.socketio.emit('auto_download_error', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False):
|
||||
"""Update scheduled rescan configuration."""
|
||||
try:
|
||||
# Validate time format
|
||||
if enabled and time_str:
|
||||
datetime.strptime(time_str, '%H:%M')
|
||||
|
||||
# Update configuration
|
||||
self.scheduled_rescan_enabled = enabled
|
||||
self.scheduled_rescan_time = time_str
|
||||
self.auto_download_after_rescan = auto_download
|
||||
|
||||
# Save to config
|
||||
self.config.scheduled_rescan_enabled = enabled
|
||||
self.config.scheduled_rescan_time = time_str
|
||||
self.config.auto_download_after_rescan = auto_download
|
||||
self.config.save_config()
|
||||
|
||||
# Restart scheduler with new settings
|
||||
if self.running:
|
||||
self._setup_scheduled_jobs()
|
||||
|
||||
logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}")
|
||||
return True
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Invalid time format: {time_str}")
|
||||
raise ValueError(f"Invalid time format. Use HH:MM format.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating scheduled rescan config: {e}")
|
||||
raise
|
||||
|
||||
def get_scheduled_rescan_config(self) -> Dict[str, Any]:
|
||||
"""Get current scheduled rescan configuration."""
|
||||
next_run = None
|
||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
||||
try:
|
||||
# Calculate next run time
|
||||
now = datetime.now()
|
||||
today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M')
|
||||
|
||||
if now > today_run:
|
||||
# Next run is tomorrow
|
||||
next_run = today_run + timedelta(days=1)
|
||||
else:
|
||||
# Next run is today
|
||||
next_run = today_run
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating next run time: {e}")
|
||||
|
||||
return {
|
||||
'enabled': self.scheduled_rescan_enabled,
|
||||
'time': self.scheduled_rescan_time,
|
||||
'auto_download_after_rescan': self.auto_download_after_rescan,
|
||||
'next_run': next_run.isoformat() if next_run else None,
|
||||
'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None,
|
||||
'is_running': self.running
|
||||
}
|
||||
|
||||
def trigger_manual_scheduled_rescan(self):
|
||||
"""Manually trigger a scheduled rescan (for testing purposes)."""
|
||||
logger.info("Manually triggering scheduled rescan")
|
||||
threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start()
|
||||
|
||||
def get_next_scheduled_jobs(self) -> list:
|
||||
"""Get list of all scheduled jobs with their next run times."""
|
||||
jobs = []
|
||||
for job in schedule.jobs:
|
||||
jobs.append({
|
||||
'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func),
|
||||
'next_run': job.next_run.isoformat() if job.next_run else None,
|
||||
'interval': str(job.interval),
|
||||
'unit': job.unit
|
||||
})
|
||||
return jobs
|
||||
|
||||
|
||||
# Global scheduler instance
|
||||
scheduled_operations = None
|
||||
|
||||
def init_scheduler(config_manager, socketio=None):
|
||||
"""Initialize the global scheduler."""
|
||||
global scheduled_operations
|
||||
scheduled_operations = ScheduledOperations(config_manager, socketio)
|
||||
return scheduled_operations
|
||||
|
||||
def get_scheduler():
|
||||
"""Get the global scheduler instance."""
|
||||
return scheduled_operations
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,10 +0,0 @@
|
||||
"""
|
||||
Configuration package for the Aniworld server.
|
||||
|
||||
This package provides configuration management and environment
|
||||
variable handling for secure application deployment.
|
||||
"""
|
||||
|
||||
from .env_config import EnvironmentConfig, env_config
|
||||
|
||||
__all__ = ['EnvironmentConfig', 'env_config']
|
||||
@@ -1,217 +0,0 @@
|
||||
"""
|
||||
Environment configuration for secure handling of sensitive data.
|
||||
|
||||
This module provides secure environment variable handling and configuration
|
||||
management for the Aniworld server application.
|
||||
"""
|
||||
|
||||
import os
|
||||
import secrets
|
||||
from typing import Optional, Dict, Any
|
||||
from dotenv import load_dotenv
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class EnvironmentConfig:
|
||||
"""Manages environment variables and secure configuration."""
|
||||
|
||||
# Security
|
||||
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
|
||||
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
|
||||
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
|
||||
|
||||
# Database
|
||||
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
|
||||
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
|
||||
|
||||
# Redis (for caching and sessions)
|
||||
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
||||
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
|
||||
|
||||
# API Keys and External Services
|
||||
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
|
||||
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
|
||||
|
||||
# Email Configuration (for password reset)
|
||||
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
|
||||
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
|
||||
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
|
||||
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
|
||||
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
|
||||
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
|
||||
|
||||
# Security Settings
|
||||
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
|
||||
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
|
||||
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
|
||||
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
|
||||
|
||||
# Application Settings
|
||||
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
|
||||
HOST: str = os.getenv('HOST', '127.0.0.1')
|
||||
PORT: int = int(os.getenv('PORT', '5000'))
|
||||
|
||||
# Anime Directory and Download Settings
|
||||
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
|
||||
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
|
||||
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
|
||||
LOG_FILE: str = os.getenv('LOG_FILE', 'logs/aniworld.log')
|
||||
|
||||
@classmethod
|
||||
def get_database_config(cls) -> Dict[str, Any]:
|
||||
"""Get database configuration."""
|
||||
return {
|
||||
'url': cls.DATABASE_URL,
|
||||
'password': cls.DATABASE_PASSWORD,
|
||||
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
|
||||
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
|
||||
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
|
||||
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_redis_config(cls) -> Dict[str, Any]:
|
||||
"""Get Redis configuration."""
|
||||
return {
|
||||
'url': cls.REDIS_URL,
|
||||
'password': cls.REDIS_PASSWORD,
|
||||
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
|
||||
'retry_on_timeout': True,
|
||||
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_email_config(cls) -> Dict[str, Any]:
|
||||
"""Get email configuration."""
|
||||
return {
|
||||
'server': cls.SMTP_SERVER,
|
||||
'port': cls.SMTP_PORT,
|
||||
'username': cls.SMTP_USERNAME,
|
||||
'password': cls.SMTP_PASSWORD,
|
||||
'use_tls': cls.SMTP_USE_TLS,
|
||||
'from_email': cls.FROM_EMAIL
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_security_config(cls) -> Dict[str, Any]:
|
||||
"""Get security configuration."""
|
||||
return {
|
||||
'secret_key': cls.SECRET_KEY,
|
||||
'jwt_secret_key': cls.JWT_SECRET_KEY,
|
||||
'password_salt': cls.PASSWORD_SALT,
|
||||
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
|
||||
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
|
||||
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
|
||||
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
|
||||
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def validate_config(cls) -> bool:
|
||||
"""Validate that required configuration is present."""
|
||||
required_vars = [
|
||||
'SECRET_KEY',
|
||||
'JWT_SECRET_KEY',
|
||||
'PASSWORD_SALT'
|
||||
]
|
||||
|
||||
missing_vars = []
|
||||
for var in required_vars:
|
||||
if not getattr(cls, var):
|
||||
missing_vars.append(var)
|
||||
|
||||
if missing_vars:
|
||||
logger.error(f"Missing required environment variables: {missing_vars}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
|
||||
"""Generate a template .env file with all available configuration options."""
|
||||
try:
|
||||
template_content = """# Aniworld Server Environment Configuration
|
||||
# Copy this file to .env and fill in your values
|
||||
|
||||
# Security (REQUIRED - Generate secure random values)
|
||||
SECRET_KEY=your_secret_key_here
|
||||
JWT_SECRET_KEY=your_jwt_secret_here
|
||||
PASSWORD_SALT=your_password_salt_here
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_URL=sqlite:///data/aniworld.db
|
||||
# DATABASE_PASSWORD=your_db_password_here
|
||||
DATABASE_POOL_SIZE=10
|
||||
DATABASE_MAX_OVERFLOW=20
|
||||
DATABASE_POOL_TIMEOUT=30
|
||||
DATABASE_POOL_RECYCLE=3600
|
||||
|
||||
# Redis Configuration (for caching and sessions)
|
||||
REDIS_URL=redis://localhost:6379/0
|
||||
# REDIS_PASSWORD=your_redis_password_here
|
||||
REDIS_MAX_CONNECTIONS=10
|
||||
REDIS_SOCKET_TIMEOUT=5
|
||||
|
||||
# Email Configuration (for password reset emails)
|
||||
SMTP_SERVER=localhost
|
||||
SMTP_PORT=587
|
||||
# SMTP_USERNAME=your_smtp_username
|
||||
# SMTP_PASSWORD=your_smtp_password
|
||||
SMTP_USE_TLS=true
|
||||
FROM_EMAIL=noreply@aniworld.local
|
||||
|
||||
# External API Keys
|
||||
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
|
||||
# TMDB_API_KEY=your_tmdb_api_key
|
||||
|
||||
# Security Settings
|
||||
SESSION_TIMEOUT_HOURS=24
|
||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
||||
LOCKOUT_DURATION_MINUTES=30
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_PER_MINUTE=60
|
||||
API_RATE_LIMIT_PER_MINUTE=100
|
||||
|
||||
# Application Settings
|
||||
DEBUG=false
|
||||
HOST=127.0.0.1
|
||||
PORT=5000
|
||||
|
||||
# Anime and Download Settings
|
||||
ANIME_DIRECTORY=./downloads
|
||||
MAX_CONCURRENT_DOWNLOADS=3
|
||||
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=INFO
|
||||
LOG_FILE=logs/aniworld.log
|
||||
"""
|
||||
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
f.write(template_content)
|
||||
|
||||
logger.info(f"Environment template created at {file_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating environment template: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Create global instance
|
||||
env_config = EnvironmentConfig()
|
||||
|
||||
# Validate configuration on import
|
||||
if not env_config.validate_config():
|
||||
logger.warning("Invalid environment configuration detected. Please check your .env file.")
|
||||
@@ -1,6 +0,0 @@
|
||||
"""
|
||||
Data access layer for the Aniworld server.
|
||||
|
||||
This package contains data managers and repositories for handling
|
||||
database operations and data persistence.
|
||||
"""
|
||||
Binary file not shown.
@@ -1,264 +0,0 @@
|
||||
"""
|
||||
API Key management functionality.
|
||||
|
||||
This module handles API key management including:
|
||||
- API key creation and validation
|
||||
- API key permissions
|
||||
- API key revocation
|
||||
"""
|
||||
|
||||
import secrets
|
||||
import hashlib
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any, Optional
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class APIKeyManager:
|
||||
"""Manages API keys for users."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""Initialize API key manager with database connection."""
|
||||
if db_path is None:
|
||||
# Default to a database in the data directory
|
||||
data_dir = os.path.dirname(__file__)
|
||||
db_path = os.path.join(data_dir, 'aniworld.db')
|
||||
|
||||
self.db_path = db_path
|
||||
self._init_database()
|
||||
|
||||
def _init_database(self):
|
||||
"""Initialize database tables if they don't exist."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS api_keys (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
name TEXT NOT NULL,
|
||||
key_hash TEXT UNIQUE NOT NULL,
|
||||
permissions TEXT DEFAULT 'read',
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_used TIMESTAMP,
|
||||
expires_at TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS api_key_usage (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
api_key_id INTEGER NOT NULL,
|
||||
endpoint TEXT NOT NULL,
|
||||
ip_address TEXT,
|
||||
user_agent TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (api_key_id) REFERENCES api_keys (id)
|
||||
)
|
||||
''')
|
||||
conn.commit()
|
||||
logger.info("API key database tables initialized")
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing API key database: {e}")
|
||||
raise
|
||||
|
||||
def _hash_api_key(self, api_key: str) -> str:
|
||||
"""Hash API key for secure storage."""
|
||||
return hashlib.sha256(api_key.encode()).hexdigest()
|
||||
|
||||
def create_api_key(self, user_id: int, name: str, permissions: str = 'read',
|
||||
expires_days: int = None) -> Dict[str, Any]:
|
||||
"""Create new API key for user."""
|
||||
try:
|
||||
# Generate secure API key
|
||||
api_key = f"ak_{secrets.token_urlsafe(32)}"
|
||||
key_hash = self._hash_api_key(api_key)
|
||||
|
||||
# Calculate expiry if specified
|
||||
expires_at = None
|
||||
if expires_days:
|
||||
expires_at = datetime.now() + timedelta(days=expires_days)
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO api_keys (user_id, name, key_hash, permissions, expires_at)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', (user_id, name, key_hash, permissions, expires_at))
|
||||
|
||||
api_key_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Created API key '{name}' for user {user_id}")
|
||||
|
||||
return {
|
||||
'id': api_key_id,
|
||||
'key': api_key, # Only returned once!
|
||||
'name': name,
|
||||
'permissions': permissions,
|
||||
'expires_at': expires_at.isoformat() if expires_at else None,
|
||||
'created_at': datetime.now().isoformat()
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating API key for user {user_id}: {e}")
|
||||
raise
|
||||
|
||||
def validate_api_key(self, api_key: str) -> Optional[Dict[str, Any]]:
|
||||
"""Validate API key and return key info if valid."""
|
||||
try:
|
||||
key_hash = self._hash_api_key(api_key)
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT ak.*, u.username FROM api_keys ak
|
||||
JOIN users u ON ak.user_id = u.id
|
||||
WHERE ak.key_hash = ?
|
||||
AND ak.is_active = 1
|
||||
AND (ak.expires_at IS NULL OR ak.expires_at > ?)
|
||||
AND u.is_active = 1
|
||||
''', (key_hash, datetime.now()))
|
||||
|
||||
key_row = cursor.fetchone()
|
||||
if key_row:
|
||||
key_info = dict(key_row)
|
||||
# Update last used timestamp
|
||||
self._update_last_used(key_info['id'])
|
||||
return key_info
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error validating API key: {e}")
|
||||
return None
|
||||
|
||||
def get_user_api_keys(self, user_id: int) -> List[Dict[str, Any]]:
|
||||
"""Get all API keys for a user (without the actual key values)."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT id, name, permissions, created_at, last_used, expires_at, is_active
|
||||
FROM api_keys
|
||||
WHERE user_id = ?
|
||||
ORDER BY created_at DESC
|
||||
''', (user_id,))
|
||||
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting API keys for user {user_id}: {e}")
|
||||
return []
|
||||
|
||||
def revoke_api_key(self, key_id: int, user_id: int = None) -> bool:
|
||||
"""Revoke (deactivate) an API key."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
# If user_id is provided, ensure the key belongs to the user
|
||||
if user_id:
|
||||
cursor = conn.execute('''
|
||||
UPDATE api_keys
|
||||
SET is_active = 0
|
||||
WHERE id = ? AND user_id = ?
|
||||
''', (key_id, user_id))
|
||||
else:
|
||||
cursor = conn.execute('''
|
||||
UPDATE api_keys
|
||||
SET is_active = 0
|
||||
WHERE id = ?
|
||||
''', (key_id,))
|
||||
|
||||
success = cursor.rowcount > 0
|
||||
conn.commit()
|
||||
|
||||
if success:
|
||||
logger.info(f"Revoked API key ID {key_id}")
|
||||
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error revoking API key {key_id}: {e}")
|
||||
return False
|
||||
|
||||
def _update_last_used(self, api_key_id: int):
|
||||
"""Update last used timestamp for API key."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
UPDATE api_keys
|
||||
SET last_used = CURRENT_TIMESTAMP
|
||||
WHERE id = ?
|
||||
''', (api_key_id,))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating last used for API key {api_key_id}: {e}")
|
||||
|
||||
def log_api_usage(self, api_key_id: int, endpoint: str, ip_address: str = None,
|
||||
user_agent: str = None):
|
||||
"""Log API key usage."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
INSERT INTO api_key_usage (api_key_id, endpoint, ip_address, user_agent)
|
||||
VALUES (?, ?, ?, ?)
|
||||
''', (api_key_id, endpoint, ip_address, user_agent))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Error logging API usage: {e}")
|
||||
|
||||
def get_api_usage_stats(self, api_key_id: int, days: int = 30) -> Dict[str, Any]:
|
||||
"""Get usage statistics for an API key."""
|
||||
try:
|
||||
since_date = datetime.now() - timedelta(days=days)
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
|
||||
# Total requests
|
||||
cursor = conn.execute('''
|
||||
SELECT COUNT(*) as total_requests
|
||||
FROM api_key_usage
|
||||
WHERE api_key_id = ? AND created_at > ?
|
||||
''', (api_key_id, since_date))
|
||||
total_requests = cursor.fetchone()['total_requests']
|
||||
|
||||
# Requests by endpoint
|
||||
cursor = conn.execute('''
|
||||
SELECT endpoint, COUNT(*) as requests
|
||||
FROM api_key_usage
|
||||
WHERE api_key_id = ? AND created_at > ?
|
||||
GROUP BY endpoint
|
||||
ORDER BY requests DESC
|
||||
''', (api_key_id, since_date))
|
||||
endpoints = [dict(row) for row in cursor.fetchall()]
|
||||
|
||||
return {
|
||||
'total_requests': total_requests,
|
||||
'endpoints': endpoints,
|
||||
'period_days': days
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting API usage stats for key {api_key_id}: {e}")
|
||||
return {'total_requests': 0, 'endpoints': [], 'period_days': days}
|
||||
|
||||
def cleanup_expired_keys(self):
|
||||
"""Clean up expired API keys."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE api_keys
|
||||
SET is_active = 0
|
||||
WHERE expires_at <= ? AND is_active = 1
|
||||
''', (datetime.now(),))
|
||||
|
||||
cleaned_count = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if cleaned_count > 0:
|
||||
logger.info(f"Cleaned up {cleaned_count} expired API keys")
|
||||
|
||||
return cleaned_count
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up expired API keys: {e}")
|
||||
return 0
|
||||
@@ -1,49 +0,0 @@
|
||||
{
|
||||
"security": {
|
||||
"master_password_hash": "1353f6d9db7090c302864c2d6437dc11cc96cd66d59d7737d1b345603fdbdfda",
|
||||
"salt": "a25e23440d681cef2d75c0adb6de0913359a1d8b9f98f9747fc75f53c79c4bd4",
|
||||
"session_timeout_hours": 24,
|
||||
"max_failed_attempts": 5,
|
||||
"lockout_duration_minutes": 30
|
||||
},
|
||||
"anime": {
|
||||
"directory": "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien",
|
||||
"download_threads": 3,
|
||||
"download_speed_limit": null,
|
||||
"auto_rescan_time": "03:00",
|
||||
"auto_download_after_rescan": false
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"enable_console_logging": true,
|
||||
"enable_console_progress": false,
|
||||
"enable_fail2ban_logging": true,
|
||||
"log_file": "aniworld.log",
|
||||
"max_log_size_mb": 10,
|
||||
"log_backup_count": 5
|
||||
},
|
||||
"providers": {
|
||||
"default_provider": "aniworld.to",
|
||||
"preferred_language": "German Dub",
|
||||
"fallback_providers": [
|
||||
"aniworld.to"
|
||||
],
|
||||
"provider_timeout": 30,
|
||||
"retry_attempts": 3,
|
||||
"provider_settings": {
|
||||
"aniworld.to": {
|
||||
"enabled": true,
|
||||
"priority": 1,
|
||||
"quality_preference": "720p"
|
||||
}
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"max_concurrent_downloads": 3,
|
||||
"download_buffer_size": 8192,
|
||||
"connection_timeout": 30,
|
||||
"read_timeout": 300,
|
||||
"enable_debug_mode": false,
|
||||
"cache_duration_minutes": 60
|
||||
}
|
||||
}
|
||||
@@ -1,216 +0,0 @@
|
||||
"""
|
||||
Session management functionality.
|
||||
|
||||
This module handles user session management including:
|
||||
- Session creation and validation
|
||||
- Session expiry handling
|
||||
- Session cleanup
|
||||
"""
|
||||
|
||||
import secrets
|
||||
import time
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any, Optional
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SessionManager:
|
||||
"""Manages user sessions."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""Initialize session manager with database connection."""
|
||||
if db_path is None:
|
||||
# Default to a database in the data directory
|
||||
data_dir = os.path.dirname(__file__)
|
||||
db_path = os.path.join(data_dir, 'aniworld.db')
|
||||
|
||||
self.db_path = db_path
|
||||
self._init_database()
|
||||
|
||||
def _init_database(self):
|
||||
"""Initialize database tables if they don't exist."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS user_sessions (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
session_token TEXT UNIQUE NOT NULL,
|
||||
expires_at TIMESTAMP NOT NULL,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
last_activity TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
ip_address TEXT,
|
||||
user_agent TEXT,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
''')
|
||||
conn.commit()
|
||||
logger.info("Session database tables initialized")
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing session database: {e}")
|
||||
raise
|
||||
|
||||
def create_session(self, user_id: int, extended: bool = False) -> str:
|
||||
"""Create new session for user."""
|
||||
try:
|
||||
session_token = secrets.token_urlsafe(32)
|
||||
|
||||
# Set expiry based on extended flag
|
||||
if extended:
|
||||
expires_at = datetime.now() + timedelta(days=30)
|
||||
else:
|
||||
expires_at = datetime.now() + timedelta(days=7)
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
INSERT INTO user_sessions (user_id, session_token, expires_at)
|
||||
VALUES (?, ?, ?)
|
||||
''', (user_id, session_token, expires_at))
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Created session for user {user_id}, expires at {expires_at}")
|
||||
return session_token
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating session for user {user_id}: {e}")
|
||||
raise
|
||||
|
||||
def validate_session(self, session_token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Validate session token and return session info if valid."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT * FROM user_sessions
|
||||
WHERE session_token = ? AND expires_at > ? AND is_active = 1
|
||||
''', (session_token, datetime.now()))
|
||||
|
||||
session_row = cursor.fetchone()
|
||||
if session_row:
|
||||
session_info = dict(session_row)
|
||||
# Update last activity
|
||||
self.update_session_activity(session_token)
|
||||
return session_info
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error validating session: {e}")
|
||||
return None
|
||||
|
||||
def get_session_info(self, session_token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get session information without updating activity."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT *, CASE
|
||||
WHEN expires_at <= ? THEN 1
|
||||
ELSE 0
|
||||
END as expired
|
||||
FROM user_sessions
|
||||
WHERE session_token = ?
|
||||
''', (datetime.now(), session_token))
|
||||
|
||||
session_row = cursor.fetchone()
|
||||
return dict(session_row) if session_row else None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting session info: {e}")
|
||||
return None
|
||||
|
||||
def update_session_activity(self, session_token: str) -> bool:
|
||||
"""Update session last activity timestamp."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE user_sessions
|
||||
SET last_activity = CURRENT_TIMESTAMP
|
||||
WHERE session_token = ?
|
||||
''', (session_token,))
|
||||
|
||||
success = cursor.rowcount > 0
|
||||
conn.commit()
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating session activity: {e}")
|
||||
return False
|
||||
|
||||
def destroy_session(self, session_token: str) -> bool:
|
||||
"""Destroy (deactivate) session."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE user_sessions
|
||||
SET is_active = 0
|
||||
WHERE session_token = ?
|
||||
''', (session_token,))
|
||||
|
||||
success = cursor.rowcount > 0
|
||||
conn.commit()
|
||||
|
||||
if success:
|
||||
logger.info(f"Session destroyed: {session_token}")
|
||||
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error destroying session: {e}")
|
||||
return False
|
||||
|
||||
def destroy_all_sessions(self, user_id: int) -> bool:
|
||||
"""Destroy all sessions for a user."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE user_sessions
|
||||
SET is_active = 0
|
||||
WHERE user_id = ?
|
||||
''', (user_id,))
|
||||
|
||||
sessions_destroyed = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
logger.info(f"Destroyed {sessions_destroyed} sessions for user {user_id}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error destroying all sessions for user {user_id}: {e}")
|
||||
return False
|
||||
|
||||
def get_user_sessions(self, user_id: int) -> List[Dict[str, Any]]:
|
||||
"""Get all active sessions for a user."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT * FROM user_sessions
|
||||
WHERE user_id = ? AND is_active = 1
|
||||
ORDER BY last_activity DESC
|
||||
''', (user_id,))
|
||||
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user sessions for user {user_id}: {e}")
|
||||
return []
|
||||
|
||||
def cleanup_expired_sessions(self):
|
||||
"""Clean up expired sessions."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE user_sessions
|
||||
SET is_active = 0
|
||||
WHERE expires_at <= ? AND is_active = 1
|
||||
''', (datetime.now(),))
|
||||
|
||||
cleaned_count = cursor.rowcount
|
||||
conn.commit()
|
||||
|
||||
if cleaned_count > 0:
|
||||
logger.info(f"Cleaned up {cleaned_count} expired sessions")
|
||||
|
||||
return cleaned_count
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up expired sessions: {e}")
|
||||
return 0
|
||||
@@ -1,369 +0,0 @@
|
||||
"""
|
||||
User management functionality.
|
||||
|
||||
This module handles all user-related database operations including:
|
||||
- User authentication
|
||||
- User registration
|
||||
- Password management
|
||||
- User profile management
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import secrets
|
||||
import time
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Any, Optional
|
||||
from dataclasses import dataclass
|
||||
import sqlite3
|
||||
import os
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class User:
|
||||
"""User data model."""
|
||||
id: int
|
||||
username: str
|
||||
email: str
|
||||
password_hash: str
|
||||
full_name: Optional[str] = None
|
||||
created_at: Optional[datetime] = None
|
||||
updated_at: Optional[datetime] = None
|
||||
is_active: bool = True
|
||||
role: str = 'user'
|
||||
|
||||
|
||||
class UserManager:
|
||||
"""Manages user data and operations."""
|
||||
|
||||
def __init__(self, db_path: str = None):
|
||||
"""Initialize user manager with database connection."""
|
||||
if db_path is None:
|
||||
# Default to a database in the data directory
|
||||
data_dir = os.path.dirname(__file__)
|
||||
db_path = os.path.join(data_dir, 'aniworld.db')
|
||||
|
||||
self.db_path = db_path
|
||||
self._init_database()
|
||||
|
||||
def _init_database(self):
|
||||
"""Initialize database tables if they don't exist."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT UNIQUE NOT NULL,
|
||||
email TEXT UNIQUE NOT NULL,
|
||||
password_hash TEXT NOT NULL,
|
||||
full_name TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
role TEXT DEFAULT 'user'
|
||||
)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS password_reset_tokens (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
token TEXT UNIQUE NOT NULL,
|
||||
expires_at TIMESTAMP NOT NULL,
|
||||
used BOOLEAN DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
''')
|
||||
|
||||
conn.execute('''
|
||||
CREATE TABLE IF NOT EXISTS user_activity (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
user_id INTEGER NOT NULL,
|
||||
action TEXT NOT NULL,
|
||||
details TEXT,
|
||||
ip_address TEXT,
|
||||
user_agent TEXT,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (user_id) REFERENCES users (id)
|
||||
)
|
||||
''')
|
||||
|
||||
conn.commit()
|
||||
logger.info("User database tables initialized")
|
||||
except Exception as e:
|
||||
logger.error(f"Error initializing user database: {e}")
|
||||
raise
|
||||
|
||||
def _hash_password(self, password: str) -> str:
|
||||
"""Hash password using SHA-256 with salt."""
|
||||
salt = secrets.token_hex(32)
|
||||
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
|
||||
return f"{salt}:{password_hash}"
|
||||
|
||||
def _verify_password(self, password: str, stored_hash: str) -> bool:
|
||||
"""Verify password against stored hash."""
|
||||
try:
|
||||
salt, password_hash = stored_hash.split(':', 1)
|
||||
computed_hash = hashlib.sha256((password + salt).encode()).hexdigest()
|
||||
return computed_hash == password_hash
|
||||
except ValueError:
|
||||
return False
|
||||
|
||||
def authenticate_user(self, username: str, password: str) -> Optional[Dict[str, Any]]:
|
||||
"""Authenticate user with username/email and password."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT * FROM users
|
||||
WHERE (username = ? OR email = ?) AND is_active = 1
|
||||
''', (username, username))
|
||||
|
||||
user_row = cursor.fetchone()
|
||||
if not user_row:
|
||||
return None
|
||||
|
||||
user = dict(user_row)
|
||||
if self._verify_password(password, user['password_hash']):
|
||||
# Log successful authentication
|
||||
self._log_user_activity(user['id'], 'login', 'Successful authentication')
|
||||
# Remove password hash from returned data
|
||||
del user['password_hash']
|
||||
return user
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error during authentication: {e}")
|
||||
return None
|
||||
|
||||
def get_user_by_id(self, user_id: int) -> Optional[Dict[str, Any]]:
|
||||
"""Get user by ID."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('SELECT * FROM users WHERE id = ?', (user_id,))
|
||||
user_row = cursor.fetchone()
|
||||
|
||||
if user_row:
|
||||
user = dict(user_row)
|
||||
del user['password_hash'] # Remove sensitive data
|
||||
return user
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user by ID {user_id}: {e}")
|
||||
return None
|
||||
|
||||
def get_user_by_username(self, username: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get user by username."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('SELECT * FROM users WHERE username = ?', (username,))
|
||||
user_row = cursor.fetchone()
|
||||
|
||||
if user_row:
|
||||
user = dict(user_row)
|
||||
del user['password_hash'] # Remove sensitive data
|
||||
return user
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user by username {username}: {e}")
|
||||
return None
|
||||
|
||||
def get_user_by_email(self, email: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get user by email."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('SELECT * FROM users WHERE email = ?', (email,))
|
||||
user_row = cursor.fetchone()
|
||||
|
||||
if user_row:
|
||||
user = dict(user_row)
|
||||
del user['password_hash'] # Remove sensitive data
|
||||
return user
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user by email {email}: {e}")
|
||||
return None
|
||||
|
||||
def create_user(self, username: str, email: str, password: str, full_name: str = None) -> Optional[int]:
|
||||
"""Create new user."""
|
||||
try:
|
||||
password_hash = self._hash_password(password)
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
INSERT INTO users (username, email, password_hash, full_name)
|
||||
VALUES (?, ?, ?, ?)
|
||||
''', (username, email, password_hash, full_name))
|
||||
|
||||
user_id = cursor.lastrowid
|
||||
conn.commit()
|
||||
|
||||
self._log_user_activity(user_id, 'register', 'New user account created')
|
||||
logger.info(f"Created new user: {username} (ID: {user_id})")
|
||||
return user_id
|
||||
except sqlite3.IntegrityError as e:
|
||||
logger.warning(f"User creation failed - duplicate data: {e}")
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating user: {e}")
|
||||
return None
|
||||
|
||||
def update_user(self, user_id: int, **kwargs) -> bool:
|
||||
"""Update user information."""
|
||||
try:
|
||||
# Remove sensitive fields that shouldn't be updated this way
|
||||
kwargs.pop('password_hash', None)
|
||||
kwargs.pop('id', None)
|
||||
|
||||
if not kwargs:
|
||||
return True
|
||||
|
||||
# Build dynamic query
|
||||
set_clause = ', '.join([f"{key} = ?" for key in kwargs.keys()])
|
||||
values = list(kwargs.values()) + [user_id]
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute(f'''
|
||||
UPDATE users
|
||||
SET {set_clause}, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?
|
||||
''', values)
|
||||
|
||||
success = cursor.rowcount > 0
|
||||
conn.commit()
|
||||
|
||||
if success:
|
||||
self._log_user_activity(user_id, 'profile_update', f'Updated fields: {list(kwargs.keys())}')
|
||||
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating user {user_id}: {e}")
|
||||
return False
|
||||
|
||||
def delete_user(self, user_id: int) -> bool:
|
||||
"""Soft delete user (deactivate)."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE users
|
||||
SET is_active = 0, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?
|
||||
''', (user_id,))
|
||||
|
||||
success = cursor.rowcount > 0
|
||||
conn.commit()
|
||||
|
||||
if success:
|
||||
self._log_user_activity(user_id, 'account_deleted', 'User account deactivated')
|
||||
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting user {user_id}: {e}")
|
||||
return False
|
||||
|
||||
def change_password(self, user_id: int, new_password: str) -> bool:
|
||||
"""Change user password."""
|
||||
try:
|
||||
password_hash = self._hash_password(new_password)
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
cursor = conn.execute('''
|
||||
UPDATE users
|
||||
SET password_hash = ?, updated_at = CURRENT_TIMESTAMP
|
||||
WHERE id = ?
|
||||
''', (password_hash, user_id))
|
||||
|
||||
success = cursor.rowcount > 0
|
||||
conn.commit()
|
||||
|
||||
if success:
|
||||
self._log_user_activity(user_id, 'password_change', 'Password changed')
|
||||
|
||||
return success
|
||||
except Exception as e:
|
||||
logger.error(f"Error changing password for user {user_id}: {e}")
|
||||
return False
|
||||
|
||||
def create_password_reset_token(self, user_id: int) -> str:
|
||||
"""Create password reset token for user."""
|
||||
try:
|
||||
token = secrets.token_urlsafe(32)
|
||||
expires_at = datetime.now() + timedelta(hours=1) # 1 hour expiry
|
||||
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
INSERT INTO password_reset_tokens (user_id, token, expires_at)
|
||||
VALUES (?, ?, ?)
|
||||
''', (user_id, token, expires_at))
|
||||
conn.commit()
|
||||
|
||||
self._log_user_activity(user_id, 'password_reset_request', 'Password reset token created')
|
||||
return token
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating password reset token for user {user_id}: {e}")
|
||||
raise
|
||||
|
||||
def verify_reset_token(self, token: str) -> Optional[int]:
|
||||
"""Verify password reset token and return user ID if valid."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT user_id FROM password_reset_tokens
|
||||
WHERE token = ? AND expires_at > ? AND used = 0
|
||||
''', (token, datetime.now()))
|
||||
|
||||
result = cursor.fetchone()
|
||||
if result:
|
||||
user_id = result['user_id']
|
||||
|
||||
# Mark token as used
|
||||
conn.execute('''
|
||||
UPDATE password_reset_tokens
|
||||
SET used = 1
|
||||
WHERE token = ?
|
||||
''', (token,))
|
||||
conn.commit()
|
||||
|
||||
return user_id
|
||||
|
||||
return None
|
||||
except Exception as e:
|
||||
logger.error(f"Error verifying reset token: {e}")
|
||||
return None
|
||||
|
||||
def get_user_activity(self, user_id: int, limit: int = 50, offset: int = 0) -> List[Dict[str, Any]]:
|
||||
"""Get user activity log."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.row_factory = sqlite3.Row
|
||||
cursor = conn.execute('''
|
||||
SELECT * FROM user_activity
|
||||
WHERE user_id = ?
|
||||
ORDER BY created_at DESC
|
||||
LIMIT ? OFFSET ?
|
||||
''', (user_id, limit, offset))
|
||||
|
||||
return [dict(row) for row in cursor.fetchall()]
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user activity for user {user_id}: {e}")
|
||||
return []
|
||||
|
||||
def _log_user_activity(self, user_id: int, action: str, details: str = None,
|
||||
ip_address: str = None, user_agent: str = None):
|
||||
"""Log user activity."""
|
||||
try:
|
||||
with sqlite3.connect(self.db_path) as conn:
|
||||
conn.execute('''
|
||||
INSERT INTO user_activity (user_id, action, details, ip_address, user_agent)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
''', (user_id, action, details, ip_address, user_agent))
|
||||
conn.commit()
|
||||
except Exception as e:
|
||||
logger.error(f"Error logging user activity: {e}")
|
||||
@@ -1,48 +0,0 @@
|
||||
{
|
||||
"ui": {
|
||||
"theme": "auto",
|
||||
"density": "comfortable",
|
||||
"language": "en",
|
||||
"animations_enabled": true,
|
||||
"sidebar_collapsed": false,
|
||||
"grid_view": true,
|
||||
"items_per_page": 20
|
||||
},
|
||||
"downloads": {
|
||||
"auto_download": false,
|
||||
"download_quality": "best",
|
||||
"concurrent_downloads": 3,
|
||||
"retry_failed": true,
|
||||
"notification_sound": true,
|
||||
"auto_organize": true
|
||||
},
|
||||
"notifications": {
|
||||
"browser_notifications": true,
|
||||
"email_notifications": false,
|
||||
"webhook_notifications": false,
|
||||
"notification_types": {
|
||||
"download_complete": true,
|
||||
"download_error": true,
|
||||
"series_updated": false,
|
||||
"system_alerts": true
|
||||
}
|
||||
},
|
||||
"keyboard_shortcuts": {
|
||||
"enabled": true,
|
||||
"shortcuts": {
|
||||
"search": "ctrl+f",
|
||||
"download": "ctrl+d",
|
||||
"refresh": "f5",
|
||||
"select_all": "ctrl+a",
|
||||
"help": "f1",
|
||||
"settings": "ctrl+comma"
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"debug_mode": false,
|
||||
"performance_mode": false,
|
||||
"cache_enabled": true,
|
||||
"auto_backup": true,
|
||||
"log_level": "info"
|
||||
}
|
||||
}
|
||||
@@ -347,6 +347,22 @@ async def health_check() -> HealthResponse:
|
||||
}
|
||||
)
|
||||
|
||||
# Common browser requests that might cause "Invalid HTTP request received" warnings
|
||||
@app.get("/favicon.ico")
|
||||
async def favicon():
|
||||
"""Handle favicon requests from browsers."""
|
||||
return JSONResponse(status_code=404, content={"detail": "Favicon not found"})
|
||||
|
||||
@app.get("/robots.txt")
|
||||
async def robots():
|
||||
"""Handle robots.txt requests."""
|
||||
return JSONResponse(status_code=404, content={"detail": "Robots.txt not found"})
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint redirect to docs."""
|
||||
return {"message": "AniWorld API", "documentation": "/docs", "health": "/health"}
|
||||
|
||||
# Anime endpoints (protected)
|
||||
@app.get("/api/anime/search", response_model=List[AnimeResponse], tags=["Anime"])
|
||||
async def search_anime(
|
||||
@@ -487,35 +503,46 @@ async def get_system_config(current_user: Dict = Depends(get_current_user)) -> D
|
||||
"version": "1.0.0"
|
||||
}
|
||||
|
||||
# Root endpoint
|
||||
@app.get("/", tags=["System"])
|
||||
async def root():
|
||||
"""
|
||||
Root endpoint with basic API information.
|
||||
"""
|
||||
return {
|
||||
"message": "AniWorld FastAPI Server",
|
||||
"version": "1.0.0",
|
||||
"docs": "/docs",
|
||||
"health": "/health"
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import socket
|
||||
|
||||
# Configure enhanced logging
|
||||
log_level = getattr(logging, settings.log_level.upper(), logging.INFO)
|
||||
logging.getLogger().setLevel(log_level)
|
||||
|
||||
# Check if port is available
|
||||
def is_port_available(host: str, port: int) -> bool:
|
||||
"""Check if a port is available on the given host."""
|
||||
try:
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
||||
sock.bind((host, port))
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
host = "127.0.0.1"
|
||||
port = 8000
|
||||
|
||||
if not is_port_available(host, port):
|
||||
logger.error(f"Port {port} is already in use on {host}. Please stop other services or choose a different port.")
|
||||
logger.info("You can check which process is using the port with: netstat -ano | findstr :8000")
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Starting AniWorld FastAPI server with uvicorn...")
|
||||
logger.info(f"Anime directory: {settings.anime_directory}")
|
||||
logger.info(f"Log level: {settings.log_level}")
|
||||
logger.info("Server will be available at http://127.0.0.1:8000")
|
||||
logger.info("API documentation at http://127.0.0.1:8000/docs")
|
||||
logger.info(f"Server will be available at http://{host}:{port}")
|
||||
logger.info(f"API documentation at http://{host}:{port}/docs")
|
||||
|
||||
# Run the application
|
||||
uvicorn.run(
|
||||
"fastapi_app:app",
|
||||
host="127.0.0.1",
|
||||
port=8000,
|
||||
reload=False, # Disable reload to prevent constant restarting
|
||||
log_level=settings.log_level.lower()
|
||||
)
|
||||
try:
|
||||
# Run the application
|
||||
uvicorn.run(
|
||||
"fastapi_app:app",
|
||||
host=host,
|
||||
port=port,
|
||||
reload=False, # Disable reload to prevent constant restarting
|
||||
log_level=settings.log_level.lower()
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start server: {e}")
|
||||
sys.exit(1)
|
||||
@@ -1,6 +0,0 @@
|
||||
"""
|
||||
Infrastructure package for the Aniworld server.
|
||||
|
||||
This package contains repository implementations, database connections,
|
||||
caching, and other infrastructure concerns.
|
||||
"""
|
||||
@@ -1,916 +0,0 @@
|
||||
"""
|
||||
Database & Storage Management for AniWorld App
|
||||
|
||||
This module provides database schema management, data migration,
|
||||
backup/restore functionality, and storage optimization.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import json
|
||||
import shutil
|
||||
import time
|
||||
import hashlib
|
||||
import logging
|
||||
import threading
|
||||
import zipfile
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
from contextlib import contextmanager
|
||||
import glob
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnimeMetadata:
|
||||
"""Represents anime metadata stored in database."""
|
||||
anime_id: str
|
||||
name: str
|
||||
folder: str
|
||||
key: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
genres: List[str] = field(default_factory=list)
|
||||
release_year: Optional[int] = None
|
||||
status: str = 'ongoing' # ongoing, completed, cancelled
|
||||
total_episodes: Optional[int] = None
|
||||
poster_url: Optional[str] = None
|
||||
last_updated: datetime = field(default_factory=datetime.now)
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
custom_metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EpisodeMetadata:
|
||||
"""Represents episode metadata stored in database."""
|
||||
episode_id: str
|
||||
anime_id: str
|
||||
season: int
|
||||
episode: int
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
duration_seconds: Optional[int] = None
|
||||
file_path: Optional[str] = None
|
||||
file_size_bytes: Optional[int] = None
|
||||
download_date: Optional[datetime] = None
|
||||
last_watched: Optional[datetime] = None
|
||||
watch_count: int = 0
|
||||
is_downloaded: bool = False
|
||||
quality: Optional[str] = None
|
||||
language: str = 'German Dub'
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupInfo:
|
||||
"""Represents backup metadata."""
|
||||
backup_id: str
|
||||
backup_path: str
|
||||
backup_type: str # full, incremental, metadata_only
|
||||
created_at: datetime
|
||||
size_bytes: int
|
||||
description: Optional[str] = None
|
||||
tables_included: List[str] = field(default_factory=list)
|
||||
checksum: Optional[str] = None
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Manage SQLite database with migrations and maintenance."""
|
||||
|
||||
def __init__(self, db_path: str = "./data/aniworld.db"):
|
||||
self.db_path = db_path
|
||||
self.db_dir = os.path.dirname(db_path)
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.lock = threading.Lock()
|
||||
|
||||
# Create database directory
|
||||
os.makedirs(self.db_dir, exist_ok=True)
|
||||
|
||||
# Initialize database
|
||||
self.initialize_database()
|
||||
|
||||
# Run migrations
|
||||
self.run_migrations()
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self):
|
||||
"""Get database connection with proper error handling."""
|
||||
conn = None
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path, timeout=30)
|
||||
conn.row_factory = sqlite3.Row # Enable dict-like access
|
||||
yield conn
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
self.logger.error(f"Database connection error: {e}")
|
||||
raise
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def initialize_database(self):
|
||||
"""Initialize database with base schema."""
|
||||
with self.get_connection() as conn:
|
||||
# Create schema version table
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS schema_version (
|
||||
version INTEGER PRIMARY KEY,
|
||||
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
description TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
# Insert initial version if not exists
|
||||
conn.execute("""
|
||||
INSERT OR IGNORE INTO schema_version (version, description)
|
||||
VALUES (0, 'Initial schema')
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
def get_current_version(self) -> int:
|
||||
"""Get current database schema version."""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute("SELECT MAX(version) FROM schema_version")
|
||||
result = cursor.fetchone()
|
||||
return result[0] if result and result[0] is not None else 0
|
||||
|
||||
def run_migrations(self):
|
||||
"""Run database migrations."""
|
||||
current_version = self.get_current_version()
|
||||
migrations = self.get_migrations()
|
||||
|
||||
for version, migration in migrations.items():
|
||||
if version > current_version:
|
||||
self.logger.info(f"Running migration to version {version}")
|
||||
try:
|
||||
with self.get_connection() as conn:
|
||||
migration['up'](conn)
|
||||
|
||||
# Record migration
|
||||
conn.execute("""
|
||||
INSERT INTO schema_version (version, description)
|
||||
VALUES (?, ?)
|
||||
""", (version, migration['description']))
|
||||
|
||||
conn.commit()
|
||||
self.logger.info(f"Migration to version {version} completed")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration to version {version} failed: {e}")
|
||||
raise
|
||||
|
||||
def get_migrations(self) -> Dict[int, Dict[str, Any]]:
|
||||
"""Define database migrations."""
|
||||
return {
|
||||
1: {
|
||||
'description': 'Create anime metadata table',
|
||||
'up': self._migration_001_anime_table
|
||||
},
|
||||
2: {
|
||||
'description': 'Create episode metadata table',
|
||||
'up': self._migration_002_episode_table
|
||||
},
|
||||
3: {
|
||||
'description': 'Create download history table',
|
||||
'up': self._migration_003_download_history
|
||||
},
|
||||
4: {
|
||||
'description': 'Create user preferences table',
|
||||
'up': self._migration_004_user_preferences
|
||||
},
|
||||
5: {
|
||||
'description': 'Create storage locations table',
|
||||
'up': self._migration_005_storage_locations
|
||||
},
|
||||
6: {
|
||||
'description': 'Add indexes for performance',
|
||||
'up': self._migration_006_indexes
|
||||
}
|
||||
}
|
||||
|
||||
def _migration_001_anime_table(self, conn: sqlite3.Connection):
|
||||
"""Create anime metadata table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE anime_metadata (
|
||||
anime_id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
folder TEXT NOT NULL UNIQUE,
|
||||
key TEXT,
|
||||
description TEXT,
|
||||
genres TEXT, -- JSON array
|
||||
release_year INTEGER,
|
||||
status TEXT DEFAULT 'ongoing',
|
||||
total_episodes INTEGER,
|
||||
poster_url TEXT,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
custom_metadata TEXT -- JSON object
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_002_episode_table(self, conn: sqlite3.Connection):
|
||||
"""Create episode metadata table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE episode_metadata (
|
||||
episode_id TEXT PRIMARY KEY,
|
||||
anime_id TEXT NOT NULL,
|
||||
season INTEGER NOT NULL,
|
||||
episode INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
duration_seconds INTEGER,
|
||||
file_path TEXT,
|
||||
file_size_bytes INTEGER,
|
||||
download_date TIMESTAMP,
|
||||
last_watched TIMESTAMP,
|
||||
watch_count INTEGER DEFAULT 0,
|
||||
is_downloaded BOOLEAN DEFAULT FALSE,
|
||||
quality TEXT,
|
||||
language TEXT DEFAULT 'German Dub',
|
||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id),
|
||||
UNIQUE(anime_id, season, episode, language)
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_003_download_history(self, conn: sqlite3.Connection):
|
||||
"""Create download history table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE download_history (
|
||||
download_id TEXT PRIMARY KEY,
|
||||
anime_id TEXT NOT NULL,
|
||||
season INTEGER NOT NULL,
|
||||
episode INTEGER NOT NULL,
|
||||
language TEXT NOT NULL,
|
||||
download_started TIMESTAMP NOT NULL,
|
||||
download_completed TIMESTAMP,
|
||||
download_status TEXT NOT NULL, -- started, completed, failed, cancelled
|
||||
file_size_bytes INTEGER,
|
||||
download_speed_mbps REAL,
|
||||
error_message TEXT,
|
||||
retry_count INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_004_user_preferences(self, conn: sqlite3.Connection):
|
||||
"""Create user preferences table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE user_preferences (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL, -- JSON value
|
||||
category TEXT NOT NULL,
|
||||
description TEXT,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_005_storage_locations(self, conn: sqlite3.Connection):
|
||||
"""Create storage locations table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE storage_locations (
|
||||
location_id TEXT PRIMARY KEY,
|
||||
anime_id TEXT,
|
||||
path TEXT NOT NULL,
|
||||
location_type TEXT NOT NULL, -- primary, backup, cache
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
free_space_bytes INTEGER,
|
||||
total_space_bytes INTEGER,
|
||||
last_checked TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_006_indexes(self, conn: sqlite3.Connection):
|
||||
"""Add indexes for performance."""
|
||||
indexes = [
|
||||
"CREATE INDEX idx_anime_name ON anime_metadata(name)",
|
||||
"CREATE INDEX idx_anime_folder ON anime_metadata(folder)",
|
||||
"CREATE INDEX idx_anime_status ON anime_metadata(status)",
|
||||
"CREATE INDEX idx_episode_anime_id ON episode_metadata(anime_id)",
|
||||
"CREATE INDEX idx_episode_season_episode ON episode_metadata(season, episode)",
|
||||
"CREATE INDEX idx_episode_downloaded ON episode_metadata(is_downloaded)",
|
||||
"CREATE INDEX idx_download_status ON download_history(download_status)",
|
||||
"CREATE INDEX idx_download_date ON download_history(download_started)",
|
||||
"CREATE INDEX idx_storage_active ON storage_locations(is_active)",
|
||||
"CREATE INDEX idx_storage_type ON storage_locations(location_type)"
|
||||
]
|
||||
|
||||
for index_sql in indexes:
|
||||
try:
|
||||
conn.execute(index_sql)
|
||||
except sqlite3.OperationalError as e:
|
||||
if "already exists" not in str(e):
|
||||
raise
|
||||
|
||||
def execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
|
||||
"""Execute a SELECT query and return results."""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
return cursor.fetchall()
|
||||
|
||||
def execute_update(self, query: str, params: tuple = ()) -> int:
|
||||
"""Execute an UPDATE/INSERT/DELETE query and return affected rows."""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
conn.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
class AnimeRepository:
|
||||
"""Repository for anime data operations."""
|
||||
|
||||
def __init__(self, db_manager: DatabaseManager):
|
||||
self.db = db_manager
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def create_anime(self, metadata: AnimeMetadata) -> bool:
|
||||
"""Create new anime record."""
|
||||
try:
|
||||
query = """
|
||||
INSERT INTO anime_metadata (
|
||||
anime_id, name, folder, key, description, genres,
|
||||
release_year, status, total_episodes, poster_url,
|
||||
custom_metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
params = (
|
||||
metadata.anime_id,
|
||||
metadata.name,
|
||||
metadata.folder,
|
||||
metadata.key,
|
||||
metadata.description,
|
||||
json.dumps(metadata.genres),
|
||||
metadata.release_year,
|
||||
metadata.status,
|
||||
metadata.total_episodes,
|
||||
metadata.poster_url,
|
||||
json.dumps(metadata.custom_metadata)
|
||||
)
|
||||
|
||||
rows_affected = self.db.execute_update(query, params)
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create anime {metadata.name}: {e}")
|
||||
return False
|
||||
|
||||
def get_anime_by_folder(self, folder: str) -> Optional[AnimeMetadata]:
|
||||
"""Get anime by folder name."""
|
||||
try:
|
||||
query = """
|
||||
SELECT * FROM anime_metadata WHERE folder = ?
|
||||
"""
|
||||
|
||||
results = self.db.execute_query(query, (folder,))
|
||||
|
||||
if results:
|
||||
row = results[0]
|
||||
return self._row_to_anime_metadata(row)
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get anime by folder {folder}: {e}")
|
||||
return None
|
||||
|
||||
def get_all_anime(self, status_filter: Optional[str] = None) -> List[AnimeMetadata]:
|
||||
"""Get all anime, optionally filtered by status."""
|
||||
try:
|
||||
if status_filter:
|
||||
query = "SELECT * FROM anime_metadata WHERE status = ? ORDER BY name"
|
||||
params = (status_filter,)
|
||||
else:
|
||||
query = "SELECT * FROM anime_metadata ORDER BY name"
|
||||
params = ()
|
||||
|
||||
results = self.db.execute_query(query, params)
|
||||
|
||||
return [self._row_to_anime_metadata(row) for row in results]
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get all anime: {e}")
|
||||
return []
|
||||
|
||||
def update_anime(self, metadata: AnimeMetadata) -> bool:
|
||||
"""Update anime metadata."""
|
||||
try:
|
||||
query = """
|
||||
UPDATE anime_metadata SET
|
||||
name = ?, key = ?, description = ?, genres = ?,
|
||||
release_year = ?, status = ?, total_episodes = ?,
|
||||
poster_url = ?, last_updated = CURRENT_TIMESTAMP,
|
||||
custom_metadata = ?
|
||||
WHERE anime_id = ?
|
||||
"""
|
||||
|
||||
params = (
|
||||
metadata.name,
|
||||
metadata.key,
|
||||
metadata.description,
|
||||
json.dumps(metadata.genres),
|
||||
metadata.release_year,
|
||||
metadata.status,
|
||||
metadata.total_episodes,
|
||||
metadata.poster_url,
|
||||
json.dumps(metadata.custom_metadata),
|
||||
metadata.anime_id
|
||||
)
|
||||
|
||||
rows_affected = self.db.execute_update(query, params)
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to update anime {metadata.anime_id}: {e}")
|
||||
return False
|
||||
|
||||
def delete_anime(self, anime_id: str) -> bool:
|
||||
"""Delete anime and related data."""
|
||||
try:
|
||||
# Delete episodes first (foreign key constraint)
|
||||
self.db.execute_update("DELETE FROM episode_metadata WHERE anime_id = ?", (anime_id,))
|
||||
self.db.execute_update("DELETE FROM download_history WHERE anime_id = ?", (anime_id,))
|
||||
self.db.execute_update("DELETE FROM storage_locations WHERE anime_id = ?", (anime_id,))
|
||||
|
||||
# Delete anime
|
||||
rows_affected = self.db.execute_update("DELETE FROM anime_metadata WHERE anime_id = ?", (anime_id,))
|
||||
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to delete anime {anime_id}: {e}")
|
||||
return False
|
||||
|
||||
def search_anime(self, search_term: str) -> List[AnimeMetadata]:
|
||||
"""Search anime by name or description."""
|
||||
try:
|
||||
query = """
|
||||
SELECT * FROM anime_metadata
|
||||
WHERE name LIKE ? OR description LIKE ?
|
||||
ORDER BY name
|
||||
"""
|
||||
|
||||
search_pattern = f"%{search_term}%"
|
||||
results = self.db.execute_query(query, (search_pattern, search_pattern))
|
||||
|
||||
return [self._row_to_anime_metadata(row) for row in results]
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to search anime: {e}")
|
||||
return []
|
||||
|
||||
def _row_to_anime_metadata(self, row: sqlite3.Row) -> AnimeMetadata:
|
||||
"""Convert database row to AnimeMetadata object."""
|
||||
return AnimeMetadata(
|
||||
anime_id=row['anime_id'],
|
||||
name=row['name'],
|
||||
folder=row['folder'],
|
||||
key=row['key'],
|
||||
description=row['description'],
|
||||
genres=json.loads(row['genres'] or '[]'),
|
||||
release_year=row['release_year'],
|
||||
status=row['status'],
|
||||
total_episodes=row['total_episodes'],
|
||||
poster_url=row['poster_url'],
|
||||
last_updated=datetime.fromisoformat(row['last_updated']) if row['last_updated'] else datetime.now(),
|
||||
created_at=datetime.fromisoformat(row['created_at']) if row['created_at'] else datetime.now(),
|
||||
custom_metadata=json.loads(row['custom_metadata'] or '{}')
|
||||
)
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Manage database backups and restore operations."""
|
||||
|
||||
def __init__(self, db_manager: DatabaseManager, backup_dir: str = "./backups"):
|
||||
self.db = db_manager
|
||||
self.backup_dir = backup_dir
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Create backup directory
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
|
||||
def create_full_backup(self, description: str = None) -> Optional[BackupInfo]:
|
||||
"""Create a full database backup."""
|
||||
try:
|
||||
backup_id = f"full_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
backup_filename = f"{backup_id}.db"
|
||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
||||
|
||||
# Copy database file
|
||||
shutil.copy2(self.db.db_path, backup_path)
|
||||
|
||||
# Calculate checksum
|
||||
checksum = self._calculate_file_checksum(backup_path)
|
||||
|
||||
# Get file size
|
||||
size_bytes = os.path.getsize(backup_path)
|
||||
|
||||
# Get table list
|
||||
with self.db.get_connection() as conn:
|
||||
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
backup_info = BackupInfo(
|
||||
backup_id=backup_id,
|
||||
backup_path=backup_path,
|
||||
backup_type='full',
|
||||
created_at=datetime.now(),
|
||||
size_bytes=size_bytes,
|
||||
description=description or f"Full backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
||||
tables_included=tables,
|
||||
checksum=checksum
|
||||
)
|
||||
|
||||
# Save backup metadata
|
||||
self._save_backup_metadata(backup_info)
|
||||
|
||||
self.logger.info(f"Full backup created: {backup_id}")
|
||||
return backup_info
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create full backup: {e}")
|
||||
return None
|
||||
|
||||
def create_metadata_backup(self, description: str = None) -> Optional[BackupInfo]:
|
||||
"""Create a metadata-only backup (excluding large binary data)."""
|
||||
try:
|
||||
backup_id = f"metadata_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
backup_filename = f"{backup_id}.json"
|
||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
||||
|
||||
# Export metadata as JSON
|
||||
metadata = self._export_metadata()
|
||||
|
||||
with open(backup_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(metadata, f, indent=2, default=str)
|
||||
|
||||
# Calculate checksum
|
||||
checksum = self._calculate_file_checksum(backup_path)
|
||||
|
||||
# Get file size
|
||||
size_bytes = os.path.getsize(backup_path)
|
||||
|
||||
backup_info = BackupInfo(
|
||||
backup_id=backup_id,
|
||||
backup_path=backup_path,
|
||||
backup_type='metadata_only',
|
||||
created_at=datetime.now(),
|
||||
size_bytes=size_bytes,
|
||||
description=description or f"Metadata backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
||||
tables_included=['anime_metadata', 'episode_metadata', 'user_preferences'],
|
||||
checksum=checksum
|
||||
)
|
||||
|
||||
# Save backup metadata
|
||||
self._save_backup_metadata(backup_info)
|
||||
|
||||
self.logger.info(f"Metadata backup created: {backup_id}")
|
||||
return backup_info
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create metadata backup: {e}")
|
||||
return None
|
||||
|
||||
def restore_backup(self, backup_id: str) -> bool:
|
||||
"""Restore from a backup."""
|
||||
try:
|
||||
backup_info = self._load_backup_metadata(backup_id)
|
||||
if not backup_info:
|
||||
self.logger.error(f"Backup not found: {backup_id}")
|
||||
return False
|
||||
|
||||
if not os.path.exists(backup_info.backup_path):
|
||||
self.logger.error(f"Backup file not found: {backup_info.backup_path}")
|
||||
return False
|
||||
|
||||
# Verify backup integrity
|
||||
if not self._verify_backup_integrity(backup_info):
|
||||
self.logger.error(f"Backup integrity check failed: {backup_id}")
|
||||
return False
|
||||
|
||||
# Create a backup of current database before restore
|
||||
current_backup = self.create_full_backup(f"Pre-restore backup before restoring {backup_id}")
|
||||
|
||||
if backup_info.backup_type == 'full':
|
||||
# Replace database file
|
||||
shutil.copy2(backup_info.backup_path, self.db.db_path)
|
||||
|
||||
elif backup_info.backup_type == 'metadata_only':
|
||||
# Restore metadata from JSON
|
||||
with open(backup_info.backup_path, 'r', encoding='utf-8') as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
self._import_metadata(metadata)
|
||||
|
||||
self.logger.info(f"Backup restored successfully: {backup_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to restore backup {backup_id}: {e}")
|
||||
return False
|
||||
|
||||
def list_backups(self) -> List[BackupInfo]:
|
||||
"""List all available backups."""
|
||||
backups = []
|
||||
|
||||
try:
|
||||
# Look for backup metadata files
|
||||
metadata_pattern = os.path.join(self.backup_dir, "*.backup_info.json")
|
||||
|
||||
for metadata_file in glob.glob(metadata_pattern):
|
||||
try:
|
||||
with open(metadata_file, 'r') as f:
|
||||
backup_data = json.load(f)
|
||||
|
||||
backup_info = BackupInfo(
|
||||
backup_id=backup_data['backup_id'],
|
||||
backup_path=backup_data['backup_path'],
|
||||
backup_type=backup_data['backup_type'],
|
||||
created_at=datetime.fromisoformat(backup_data['created_at']),
|
||||
size_bytes=backup_data['size_bytes'],
|
||||
description=backup_data.get('description'),
|
||||
tables_included=backup_data.get('tables_included', []),
|
||||
checksum=backup_data.get('checksum')
|
||||
)
|
||||
|
||||
backups.append(backup_info)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to load backup metadata from {metadata_file}: {e}")
|
||||
|
||||
# Sort by creation date (newest first)
|
||||
backups.sort(key=lambda b: b.created_at, reverse=True)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to list backups: {e}")
|
||||
|
||||
return backups
|
||||
|
||||
def cleanup_old_backups(self, keep_days: int = 30, keep_count: int = 10):
|
||||
"""Clean up old backup files."""
|
||||
try:
|
||||
backups = self.list_backups()
|
||||
cutoff_date = datetime.now() - timedelta(days=keep_days)
|
||||
|
||||
# Keep at least keep_count backups regardless of age
|
||||
backups_to_delete = []
|
||||
|
||||
for i, backup in enumerate(backups):
|
||||
if i >= keep_count and backup.created_at < cutoff_date:
|
||||
backups_to_delete.append(backup)
|
||||
|
||||
for backup in backups_to_delete:
|
||||
try:
|
||||
# Remove backup file
|
||||
if os.path.exists(backup.backup_path):
|
||||
os.remove(backup.backup_path)
|
||||
|
||||
# Remove metadata file
|
||||
metadata_file = f"{backup.backup_path}.backup_info.json"
|
||||
if os.path.exists(metadata_file):
|
||||
os.remove(metadata_file)
|
||||
|
||||
self.logger.info(f"Removed old backup: {backup.backup_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to remove backup {backup.backup_id}: {e}")
|
||||
|
||||
if backups_to_delete:
|
||||
self.logger.info(f"Cleaned up {len(backups_to_delete)} old backups")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cleanup old backups: {e}")
|
||||
|
||||
def _export_metadata(self) -> Dict[str, Any]:
|
||||
"""Export database metadata to dictionary."""
|
||||
metadata = {
|
||||
'export_date': datetime.now().isoformat(),
|
||||
'schema_version': self.db.get_current_version(),
|
||||
'tables': {}
|
||||
}
|
||||
|
||||
# Export specific tables
|
||||
tables_to_export = ['anime_metadata', 'episode_metadata', 'user_preferences', 'storage_locations']
|
||||
|
||||
with self.db.get_connection() as conn:
|
||||
for table in tables_to_export:
|
||||
try:
|
||||
cursor = conn.execute(f"SELECT * FROM {table}")
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Convert rows to dictionaries
|
||||
metadata['tables'][table] = [dict(row) for row in rows]
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to export table {table}: {e}")
|
||||
|
||||
return metadata
|
||||
|
||||
def _import_metadata(self, metadata: Dict[str, Any]):
|
||||
"""Import metadata from dictionary to database."""
|
||||
with self.db.get_connection() as conn:
|
||||
for table_name, rows in metadata.get('tables', {}).items():
|
||||
if not rows:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Clear existing data (be careful!)
|
||||
conn.execute(f"DELETE FROM {table_name}")
|
||||
|
||||
# Insert new data
|
||||
if rows:
|
||||
columns = list(rows[0].keys())
|
||||
placeholders = ','.join(['?' for _ in columns])
|
||||
insert_sql = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({placeholders})"
|
||||
|
||||
for row in rows:
|
||||
values = [row[col] for col in columns]
|
||||
conn.execute(insert_sql, values)
|
||||
|
||||
conn.commit()
|
||||
self.logger.info(f"Imported {len(rows)} rows to {table_name}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to import table {table_name}: {e}")
|
||||
conn.rollback()
|
||||
raise
|
||||
|
||||
def _calculate_file_checksum(self, file_path: str) -> str:
|
||||
"""Calculate SHA256 checksum of file."""
|
||||
hash_sha256 = hashlib.sha256()
|
||||
with open(file_path, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hash_sha256.update(chunk)
|
||||
return hash_sha256.hexdigest()
|
||||
|
||||
def _verify_backup_integrity(self, backup_info: BackupInfo) -> bool:
|
||||
"""Verify backup file integrity using checksum."""
|
||||
if not backup_info.checksum:
|
||||
return True # No checksum to verify
|
||||
|
||||
current_checksum = self._calculate_file_checksum(backup_info.backup_path)
|
||||
return current_checksum == backup_info.checksum
|
||||
|
||||
def _save_backup_metadata(self, backup_info: BackupInfo):
|
||||
"""Save backup metadata to file."""
|
||||
metadata_file = f"{backup_info.backup_path}.backup_info.json"
|
||||
|
||||
metadata = {
|
||||
'backup_id': backup_info.backup_id,
|
||||
'backup_path': backup_info.backup_path,
|
||||
'backup_type': backup_info.backup_type,
|
||||
'created_at': backup_info.created_at.isoformat(),
|
||||
'size_bytes': backup_info.size_bytes,
|
||||
'description': backup_info.description,
|
||||
'tables_included': backup_info.tables_included,
|
||||
'checksum': backup_info.checksum
|
||||
}
|
||||
|
||||
with open(metadata_file, 'w') as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
|
||||
def _load_backup_metadata(self, backup_id: str) -> Optional[BackupInfo]:
|
||||
"""Load backup metadata from file."""
|
||||
# Look for metadata file
|
||||
metadata_pattern = os.path.join(self.backup_dir, f"{backup_id}.*.backup_info.json")
|
||||
metadata_files = glob.glob(metadata_pattern)
|
||||
|
||||
if not metadata_files:
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(metadata_files[0], 'r') as f:
|
||||
backup_data = json.load(f)
|
||||
|
||||
return BackupInfo(
|
||||
backup_id=backup_data['backup_id'],
|
||||
backup_path=backup_data['backup_path'],
|
||||
backup_type=backup_data['backup_type'],
|
||||
created_at=datetime.fromisoformat(backup_data['created_at']),
|
||||
size_bytes=backup_data['size_bytes'],
|
||||
description=backup_data.get('description'),
|
||||
tables_included=backup_data.get('tables_included', []),
|
||||
checksum=backup_data.get('checksum')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to load backup metadata for {backup_id}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
class StorageManager:
|
||||
"""Manage storage locations and usage monitoring."""
|
||||
|
||||
def __init__(self, db_manager: DatabaseManager):
|
||||
self.db = db_manager
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def add_storage_location(self, path: str, location_type: str = 'primary', anime_id: str = None) -> str:
|
||||
"""Add a new storage location."""
|
||||
location_id = str(uuid.uuid4())
|
||||
|
||||
query = """
|
||||
INSERT INTO storage_locations
|
||||
(location_id, anime_id, path, location_type, is_active)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
self.db.execute_update(query, (location_id, anime_id, path, location_type, True))
|
||||
|
||||
# Update storage stats
|
||||
self.update_storage_stats(location_id)
|
||||
|
||||
return location_id
|
||||
|
||||
def update_storage_stats(self, location_id: str):
|
||||
"""Update storage statistics for a location."""
|
||||
try:
|
||||
# Get location path
|
||||
query = "SELECT path FROM storage_locations WHERE location_id = ?"
|
||||
results = self.db.execute_query(query, (location_id,))
|
||||
|
||||
if not results:
|
||||
return
|
||||
|
||||
path = results[0]['path']
|
||||
|
||||
if os.path.exists(path):
|
||||
# Get disk usage
|
||||
stat = shutil.disk_usage(path)
|
||||
|
||||
# Update database
|
||||
update_query = """
|
||||
UPDATE storage_locations
|
||||
SET free_space_bytes = ?, total_space_bytes = ?, last_checked = CURRENT_TIMESTAMP
|
||||
WHERE location_id = ?
|
||||
"""
|
||||
|
||||
self.db.execute_update(update_query, (stat.free, stat.total, location_id))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to update storage stats for {location_id}: {e}")
|
||||
|
||||
def get_storage_summary(self) -> Dict[str, Any]:
|
||||
"""Get storage usage summary."""
|
||||
query = """
|
||||
SELECT
|
||||
location_type,
|
||||
COUNT(*) as location_count,
|
||||
SUM(free_space_bytes) as total_free,
|
||||
SUM(total_space_bytes) as total_space
|
||||
FROM storage_locations
|
||||
WHERE is_active = 1
|
||||
GROUP BY location_type
|
||||
"""
|
||||
|
||||
results = self.db.execute_query(query)
|
||||
|
||||
summary = {}
|
||||
for row in results:
|
||||
summary[row['location_type']] = {
|
||||
'location_count': row['location_count'],
|
||||
'total_free_gb': (row['total_free'] or 0) / (1024**3),
|
||||
'total_space_gb': (row['total_space'] or 0) / (1024**3),
|
||||
'usage_percent': ((row['total_space'] - row['total_free']) / row['total_space'] * 100) if row['total_space'] else 0
|
||||
}
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
# Global instances
|
||||
database_manager = DatabaseManager()
|
||||
anime_repository = AnimeRepository(database_manager)
|
||||
backup_manager = BackupManager(database_manager)
|
||||
storage_manager = StorageManager(database_manager)
|
||||
|
||||
|
||||
def init_database_system():
|
||||
"""Initialize database system."""
|
||||
# Database is initialized on creation
|
||||
pass
|
||||
|
||||
|
||||
def cleanup_database_system():
|
||||
"""Clean up database resources."""
|
||||
# No specific cleanup needed for SQLite
|
||||
pass
|
||||
|
||||
|
||||
# Export main components
|
||||
__all__ = [
|
||||
'DatabaseManager',
|
||||
'AnimeRepository',
|
||||
'BackupManager',
|
||||
'StorageManager',
|
||||
'AnimeMetadata',
|
||||
'EpisodeMetadata',
|
||||
'BackupInfo',
|
||||
'database_manager',
|
||||
'anime_repository',
|
||||
'backup_manager',
|
||||
'storage_manager',
|
||||
'init_database_system',
|
||||
'cleanup_database_system'
|
||||
]
|
||||
537
src/server/infrastructure/external/api_client.py
vendored
537
src/server/infrastructure/external/api_client.py
vendored
@@ -1,537 +0,0 @@
|
||||
"""
|
||||
REST API & Integration Module for AniWorld App
|
||||
|
||||
This module provides comprehensive REST API endpoints for external integrations,
|
||||
webhook support, API authentication, and export functionality.
|
||||
"""
|
||||
|
||||
import json
|
||||
import csv
|
||||
import io
|
||||
import uuid
|
||||
import hmac
|
||||
import hashlib
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Any, Callable
|
||||
from functools import wraps
|
||||
import logging
|
||||
import requests
|
||||
import threading
|
||||
from dataclasses import dataclass, field
|
||||
|
||||
from flask import Blueprint, request, jsonify, make_response, current_app
|
||||
from werkzeug.security import generate_password_hash, check_password_hash
|
||||
|
||||
from auth import require_auth, optional_auth
|
||||
from error_handler import handle_api_errors, RetryableError, NonRetryableError
|
||||
|
||||
|
||||
@dataclass
|
||||
class APIKey:
|
||||
"""Represents an API key for external integrations."""
|
||||
key_id: str
|
||||
name: str
|
||||
key_hash: str
|
||||
permissions: List[str]
|
||||
rate_limit_per_hour: int = 1000
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
last_used: Optional[datetime] = None
|
||||
is_active: bool = True
|
||||
|
||||
|
||||
@dataclass
|
||||
class WebhookEndpoint:
|
||||
"""Represents a webhook endpoint configuration."""
|
||||
webhook_id: str
|
||||
name: str
|
||||
url: str
|
||||
events: List[str]
|
||||
secret: Optional[str] = None
|
||||
is_active: bool = True
|
||||
retry_attempts: int = 3
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
last_triggered: Optional[datetime] = None
|
||||
|
||||
|
||||
class APIKeyManager:
|
||||
"""Manage API keys for external integrations."""
|
||||
|
||||
def __init__(self):
|
||||
self.api_keys: Dict[str, APIKey] = {}
|
||||
self.rate_limits: Dict[str, Dict[str, int]] = {} # key_id -> {hour: count}
|
||||
self.lock = threading.Lock()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def create_api_key(self, name: str, permissions: List[str], rate_limit: int = 1000) -> tuple:
|
||||
"""Create a new API key and return the key and key_id."""
|
||||
key_id = str(uuid.uuid4())
|
||||
raw_key = f"aniworld_{uuid.uuid4().hex}"
|
||||
key_hash = generate_password_hash(raw_key)
|
||||
|
||||
api_key = APIKey(
|
||||
key_id=key_id,
|
||||
name=name,
|
||||
key_hash=key_hash,
|
||||
permissions=permissions,
|
||||
rate_limit_per_hour=rate_limit
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
self.api_keys[key_id] = api_key
|
||||
|
||||
self.logger.info(f"Created API key: {name} ({key_id})")
|
||||
return raw_key, key_id
|
||||
|
||||
def validate_api_key(self, raw_key: str) -> Optional[APIKey]:
|
||||
"""Validate an API key and return the associated APIKey object."""
|
||||
with self.lock:
|
||||
for api_key in self.api_keys.values():
|
||||
if api_key.is_active and check_password_hash(api_key.key_hash, raw_key):
|
||||
api_key.last_used = datetime.now()
|
||||
return api_key
|
||||
return None
|
||||
|
||||
def check_rate_limit(self, key_id: str) -> bool:
|
||||
"""Check if API key is within rate limits."""
|
||||
current_hour = datetime.now().replace(minute=0, second=0, microsecond=0)
|
||||
|
||||
with self.lock:
|
||||
if key_id not in self.api_keys:
|
||||
return False
|
||||
|
||||
api_key = self.api_keys[key_id]
|
||||
|
||||
if key_id not in self.rate_limits:
|
||||
self.rate_limits[key_id] = {}
|
||||
|
||||
hour_key = current_hour.isoformat()
|
||||
current_count = self.rate_limits[key_id].get(hour_key, 0)
|
||||
|
||||
if current_count >= api_key.rate_limit_per_hour:
|
||||
return False
|
||||
|
||||
self.rate_limits[key_id][hour_key] = current_count + 1
|
||||
|
||||
# Clean old entries (keep only last 24 hours)
|
||||
cutoff = current_hour - timedelta(hours=24)
|
||||
for hour_key in list(self.rate_limits[key_id].keys()):
|
||||
if datetime.fromisoformat(hour_key) < cutoff:
|
||||
del self.rate_limits[key_id][hour_key]
|
||||
|
||||
return True
|
||||
|
||||
def revoke_api_key(self, key_id: str) -> bool:
|
||||
"""Revoke an API key."""
|
||||
with self.lock:
|
||||
if key_id in self.api_keys:
|
||||
self.api_keys[key_id].is_active = False
|
||||
self.logger.info(f"Revoked API key: {key_id}")
|
||||
return True
|
||||
return False
|
||||
|
||||
def list_api_keys(self) -> List[Dict[str, Any]]:
|
||||
"""List all API keys (without sensitive data)."""
|
||||
with self.lock:
|
||||
return [
|
||||
{
|
||||
'key_id': key.key_id,
|
||||
'name': key.name,
|
||||
'permissions': key.permissions,
|
||||
'rate_limit_per_hour': key.rate_limit_per_hour,
|
||||
'created_at': key.created_at.isoformat(),
|
||||
'last_used': key.last_used.isoformat() if key.last_used else None,
|
||||
'is_active': key.is_active
|
||||
}
|
||||
for key in self.api_keys.values()
|
||||
]
|
||||
|
||||
|
||||
class WebhookManager:
|
||||
"""Manage webhook endpoints and delivery."""
|
||||
|
||||
def __init__(self):
|
||||
self.webhooks: Dict[str, WebhookEndpoint] = {}
|
||||
self.delivery_queue = []
|
||||
self.delivery_thread = None
|
||||
self.running = False
|
||||
self.lock = threading.Lock()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def start(self):
|
||||
"""Start webhook delivery service."""
|
||||
if self.running:
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self.delivery_thread = threading.Thread(target=self._delivery_loop, daemon=True)
|
||||
self.delivery_thread.start()
|
||||
self.logger.info("Webhook delivery service started")
|
||||
|
||||
def stop(self):
|
||||
"""Stop webhook delivery service."""
|
||||
self.running = False
|
||||
if self.delivery_thread:
|
||||
self.delivery_thread.join(timeout=5)
|
||||
self.logger.info("Webhook delivery service stopped")
|
||||
|
||||
def create_webhook(self, name: str, url: str, events: List[str], secret: Optional[str] = None) -> str:
|
||||
"""Create a new webhook endpoint."""
|
||||
webhook_id = str(uuid.uuid4())
|
||||
|
||||
webhook = WebhookEndpoint(
|
||||
webhook_id=webhook_id,
|
||||
name=name,
|
||||
url=url,
|
||||
events=events,
|
||||
secret=secret
|
||||
)
|
||||
|
||||
with self.lock:
|
||||
self.webhooks[webhook_id] = webhook
|
||||
|
||||
self.logger.info(f"Created webhook: {name} ({webhook_id})")
|
||||
return webhook_id
|
||||
|
||||
def delete_webhook(self, webhook_id: str) -> bool:
|
||||
"""Delete a webhook endpoint."""
|
||||
with self.lock:
|
||||
if webhook_id in self.webhooks:
|
||||
del self.webhooks[webhook_id]
|
||||
self.logger.info(f"Deleted webhook: {webhook_id}")
|
||||
return True
|
||||
return False
|
||||
|
||||
def trigger_event(self, event_type: str, data: Dict[str, Any]):
|
||||
"""Trigger webhook event for all subscribed endpoints."""
|
||||
event_data = {
|
||||
'event': event_type,
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'data': data
|
||||
}
|
||||
|
||||
with self.lock:
|
||||
for webhook in self.webhooks.values():
|
||||
if webhook.is_active and event_type in webhook.events:
|
||||
self.delivery_queue.append((webhook, event_data))
|
||||
|
||||
self.logger.debug(f"Triggered webhook event: {event_type}")
|
||||
|
||||
def _delivery_loop(self):
|
||||
"""Main delivery loop for webhook events."""
|
||||
while self.running:
|
||||
try:
|
||||
if self.delivery_queue:
|
||||
with self.lock:
|
||||
webhook, event_data = self.delivery_queue.pop(0)
|
||||
|
||||
self._deliver_webhook(webhook, event_data)
|
||||
else:
|
||||
time.sleep(1)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in webhook delivery loop: {e}")
|
||||
time.sleep(1)
|
||||
|
||||
def _deliver_webhook(self, webhook: WebhookEndpoint, event_data: Dict[str, Any]):
|
||||
"""Deliver webhook event to endpoint."""
|
||||
for attempt in range(webhook.retry_attempts):
|
||||
try:
|
||||
headers = {'Content-Type': 'application/json'}
|
||||
|
||||
# Add signature if secret is provided
|
||||
if webhook.secret:
|
||||
payload = json.dumps(event_data)
|
||||
signature = hmac.new(
|
||||
webhook.secret.encode(),
|
||||
payload.encode(),
|
||||
hashlib.sha256
|
||||
).hexdigest()
|
||||
headers['X-Webhook-Signature'] = f"sha256={signature}"
|
||||
|
||||
response = requests.post(
|
||||
webhook.url,
|
||||
json=event_data,
|
||||
headers=headers,
|
||||
timeout=30
|
||||
)
|
||||
|
||||
if response.status_code < 400:
|
||||
webhook.last_triggered = datetime.now()
|
||||
self.logger.debug(f"Webhook delivered successfully: {webhook.webhook_id}")
|
||||
break
|
||||
else:
|
||||
self.logger.warning(f"Webhook delivery failed (HTTP {response.status_code}): {webhook.webhook_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Webhook delivery error (attempt {attempt + 1}): {e}")
|
||||
if attempt < webhook.retry_attempts - 1:
|
||||
time.sleep(2 ** attempt) # Exponential backoff
|
||||
|
||||
def list_webhooks(self) -> List[Dict[str, Any]]:
|
||||
"""List all webhook endpoints."""
|
||||
with self.lock:
|
||||
return [
|
||||
{
|
||||
'webhook_id': webhook.webhook_id,
|
||||
'name': webhook.name,
|
||||
'url': webhook.url,
|
||||
'events': webhook.events,
|
||||
'is_active': webhook.is_active,
|
||||
'created_at': webhook.created_at.isoformat(),
|
||||
'last_triggered': webhook.last_triggered.isoformat() if webhook.last_triggered else None
|
||||
}
|
||||
for webhook in self.webhooks.values()
|
||||
]
|
||||
|
||||
|
||||
class ExportManager:
|
||||
"""Manage data export functionality."""
|
||||
|
||||
def __init__(self, series_app=None):
|
||||
self.series_app = series_app
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def export_anime_list_json(self, include_missing_only: bool = False) -> Dict[str, Any]:
|
||||
"""Export anime list as JSON."""
|
||||
try:
|
||||
if not self.series_app or not self.series_app.List:
|
||||
return {'anime_list': [], 'metadata': {'count': 0}}
|
||||
|
||||
anime_list = []
|
||||
series_list = self.series_app.List.GetList()
|
||||
|
||||
for serie in series_list:
|
||||
# Skip series without missing episodes if filter is enabled
|
||||
if include_missing_only and not serie.episodeDict:
|
||||
continue
|
||||
|
||||
anime_data = {
|
||||
'name': serie.name or serie.folder,
|
||||
'folder': serie.folder,
|
||||
'key': getattr(serie, 'key', None),
|
||||
'missing_episodes': {}
|
||||
}
|
||||
|
||||
if hasattr(serie, 'episodeDict') and serie.episodeDict:
|
||||
for season, episodes in serie.episodeDict.items():
|
||||
if episodes:
|
||||
anime_data['missing_episodes'][str(season)] = list(episodes)
|
||||
|
||||
anime_list.append(anime_data)
|
||||
|
||||
return {
|
||||
'anime_list': anime_list,
|
||||
'metadata': {
|
||||
'count': len(anime_list),
|
||||
'exported_at': datetime.now().isoformat(),
|
||||
'include_missing_only': include_missing_only
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to export anime list as JSON: {e}")
|
||||
raise RetryableError(f"JSON export failed: {e}")
|
||||
|
||||
def export_anime_list_csv(self, include_missing_only: bool = False) -> str:
|
||||
"""Export anime list as CSV."""
|
||||
try:
|
||||
output = io.StringIO()
|
||||
writer = csv.writer(output)
|
||||
|
||||
# Write header
|
||||
writer.writerow(['Name', 'Folder', 'Key', 'Season', 'Episode', 'Missing'])
|
||||
|
||||
if not self.series_app or not self.series_app.List:
|
||||
return output.getvalue()
|
||||
|
||||
series_list = self.series_app.List.GetList()
|
||||
|
||||
for serie in series_list:
|
||||
# Skip series without missing episodes if filter is enabled
|
||||
if include_missing_only and not serie.episodeDict:
|
||||
continue
|
||||
|
||||
name = serie.name or serie.folder
|
||||
folder = serie.folder
|
||||
key = getattr(serie, 'key', '')
|
||||
|
||||
if hasattr(serie, 'episodeDict') and serie.episodeDict:
|
||||
for season, episodes in serie.episodeDict.items():
|
||||
for episode in episodes:
|
||||
writer.writerow([name, folder, key, season, episode, 'Yes'])
|
||||
else:
|
||||
writer.writerow([name, folder, key, '', '', 'No'])
|
||||
|
||||
return output.getvalue()
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to export anime list as CSV: {e}")
|
||||
raise RetryableError(f"CSV export failed: {e}")
|
||||
|
||||
def export_download_statistics(self) -> Dict[str, Any]:
|
||||
"""Export download statistics and metrics."""
|
||||
try:
|
||||
# This would integrate with download manager statistics
|
||||
from performance_optimizer import download_manager
|
||||
|
||||
stats = download_manager.get_statistics()
|
||||
|
||||
return {
|
||||
'download_statistics': stats,
|
||||
'metadata': {
|
||||
'exported_at': datetime.now().isoformat()
|
||||
}
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to export download statistics: {e}")
|
||||
raise RetryableError(f"Statistics export failed: {e}")
|
||||
|
||||
|
||||
class NotificationService:
|
||||
"""External notification service integration."""
|
||||
|
||||
def __init__(self):
|
||||
self.services = {}
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def register_discord_webhook(self, webhook_url: str, name: str = "discord"):
|
||||
"""Register Discord webhook for notifications."""
|
||||
self.services[name] = {
|
||||
'type': 'discord',
|
||||
'webhook_url': webhook_url
|
||||
}
|
||||
self.logger.info(f"Registered Discord webhook: {name}")
|
||||
|
||||
def register_telegram_bot(self, bot_token: str, chat_id: str, name: str = "telegram"):
|
||||
"""Register Telegram bot for notifications."""
|
||||
self.services[name] = {
|
||||
'type': 'telegram',
|
||||
'bot_token': bot_token,
|
||||
'chat_id': chat_id
|
||||
}
|
||||
self.logger.info(f"Registered Telegram bot: {name}")
|
||||
|
||||
def send_notification(self, message: str, title: str = None, service_name: str = None):
|
||||
"""Send notification to all or specific services."""
|
||||
services_to_use = [service_name] if service_name else list(self.services.keys())
|
||||
|
||||
for name in services_to_use:
|
||||
if name in self.services:
|
||||
try:
|
||||
service = self.services[name]
|
||||
|
||||
if service['type'] == 'discord':
|
||||
self._send_discord_notification(service, message, title)
|
||||
elif service['type'] == 'telegram':
|
||||
self._send_telegram_notification(service, message, title)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to send notification via {name}: {e}")
|
||||
|
||||
def _send_discord_notification(self, service: Dict, message: str, title: str = None):
|
||||
"""Send Discord webhook notification."""
|
||||
payload = {
|
||||
'embeds': [{
|
||||
'title': title or 'AniWorld Notification',
|
||||
'description': message,
|
||||
'color': 0x00ff00,
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}]
|
||||
}
|
||||
|
||||
response = requests.post(service['webhook_url'], json=payload, timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
def _send_telegram_notification(self, service: Dict, message: str, title: str = None):
|
||||
"""Send Telegram bot notification."""
|
||||
text = f"*{title}*\n\n{message}" if title else message
|
||||
|
||||
payload = {
|
||||
'chat_id': service['chat_id'],
|
||||
'text': text,
|
||||
'parse_mode': 'Markdown'
|
||||
}
|
||||
|
||||
url = f"https://api.telegram.org/bot{service['bot_token']}/sendMessage"
|
||||
response = requests.post(url, json=payload, timeout=10)
|
||||
response.raise_for_status()
|
||||
|
||||
|
||||
# Global instances
|
||||
api_key_manager = APIKeyManager()
|
||||
webhook_manager = WebhookManager()
|
||||
export_manager = ExportManager()
|
||||
notification_service = NotificationService()
|
||||
|
||||
|
||||
def require_api_key(permissions: List[str] = None):
|
||||
"""Decorator to require valid API key with optional permissions."""
|
||||
def decorator(f):
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
auth_header = request.headers.get('Authorization', '')
|
||||
|
||||
if not auth_header.startswith('Bearer '):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Invalid authorization header format'
|
||||
}), 401
|
||||
|
||||
api_key = auth_header[7:] # Remove 'Bearer ' prefix
|
||||
|
||||
validated_key = api_key_manager.validate_api_key(api_key)
|
||||
if not validated_key:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Invalid API key'
|
||||
}), 401
|
||||
|
||||
# Check rate limits
|
||||
if not api_key_manager.check_rate_limit(validated_key.key_id):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Rate limit exceeded'
|
||||
}), 429
|
||||
|
||||
# Check permissions
|
||||
if permissions:
|
||||
missing_permissions = set(permissions) - set(validated_key.permissions)
|
||||
if missing_permissions:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Missing permissions: {", ".join(missing_permissions)}'
|
||||
}), 403
|
||||
|
||||
# Store API key info in request context
|
||||
request.api_key = validated_key
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
return decorator
|
||||
|
||||
|
||||
def init_api_integrations():
|
||||
"""Initialize API integration services."""
|
||||
webhook_manager.start()
|
||||
|
||||
|
||||
def cleanup_api_integrations():
|
||||
"""Clean up API integration services."""
|
||||
webhook_manager.stop()
|
||||
|
||||
|
||||
# Export main components
|
||||
__all__ = [
|
||||
'APIKeyManager',
|
||||
'WebhookManager',
|
||||
'ExportManager',
|
||||
'NotificationService',
|
||||
'api_key_manager',
|
||||
'webhook_manager',
|
||||
'export_manager',
|
||||
'notification_service',
|
||||
'require_api_key',
|
||||
'init_api_integrations',
|
||||
'cleanup_api_integrations'
|
||||
]
|
||||
@@ -1,40 +0,0 @@
|
||||
import logging
|
||||
|
||||
console_handler = None
|
||||
error_logger = None
|
||||
noKeyFound_logger = None
|
||||
noGerFound_logger = None
|
||||
def setupLogger():
|
||||
global console_handler, error_logger, noKeyFound_logger, noGerFound_logger
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
|
||||
if (console_handler is None):
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
|
||||
)
|
||||
logging.getLogger().addHandler(console_handler)
|
||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO)
|
||||
logging.getLogger('charset_normalizer').setLevel(logging.INFO)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
if (error_logger is None):
|
||||
error_logger = logging.getLogger("ErrorLog")
|
||||
error_handler = logging.FileHandler("../errors.log")
|
||||
error_handler.setLevel(logging.ERROR)
|
||||
error_logger.addHandler(error_handler)
|
||||
|
||||
if (noKeyFound_logger is None):
|
||||
noKeyFound_logger = logging.getLogger("NoKeyFound")
|
||||
noKeyFound_handler = logging.FileHandler("../NoKeyFound.log")
|
||||
noKeyFound_handler.setLevel(logging.ERROR)
|
||||
noKeyFound_logger.addHandler(noKeyFound_handler)
|
||||
|
||||
if (noGerFound_logger is None):
|
||||
noGerFound_logger = logging.getLogger("noGerFound")
|
||||
noGerFound_handler = logging.FileHandler("../noGerFound.log")
|
||||
noGerFound_handler.setLevel(logging.ERROR)
|
||||
noGerFound_logger.addHandler(noGerFound_handler)
|
||||
|
||||
setupLogger()
|
||||
@@ -1,6 +0,0 @@
|
||||
"""
|
||||
Repository package for data access layer.
|
||||
|
||||
This package contains repository implementations following the Repository pattern
|
||||
for clean separation of data access logic from business logic.
|
||||
"""
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,20 +0,0 @@
|
||||
@echo off
|
||||
REM Start the FastAPI server and run a simple test
|
||||
|
||||
echo Starting AniWorld FastAPI Server...
|
||||
cd /d "D:\repo\Aniworld\src\server"
|
||||
|
||||
REM Start server in background
|
||||
start "AniWorld Server" cmd /k "C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py"
|
||||
|
||||
REM Wait a moment for server to start
|
||||
timeout /t 5
|
||||
|
||||
REM Test the server
|
||||
echo Testing the server...
|
||||
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
|
||||
|
||||
echo.
|
||||
echo FastAPI server should be running in the other window.
|
||||
echo Visit http://localhost:8000/docs to see the API documentation.
|
||||
pause
|
||||
@@ -1,549 +0,0 @@
|
||||
"""
|
||||
Performance & Optimization Module for AniWorld App
|
||||
|
||||
This module provides download speed limiting, parallel download support,
|
||||
caching mechanisms, memory usage monitoring, and download resumption.
|
||||
"""
|
||||
|
||||
import os
|
||||
import threading
|
||||
import time
|
||||
import logging
|
||||
import queue
|
||||
import hashlib
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Any, Callable
|
||||
from dataclasses import dataclass, field
|
||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
||||
import json
|
||||
import sqlite3
|
||||
from contextlib import contextmanager
|
||||
import gc
|
||||
import psutil
|
||||
import requests
|
||||
|
||||
|
||||
@dataclass
|
||||
class DownloadTask:
|
||||
"""Represents a download task with all necessary information."""
|
||||
task_id: str
|
||||
serie_name: str
|
||||
season: int
|
||||
episode: int
|
||||
key: str
|
||||
language: str
|
||||
output_path: str
|
||||
temp_path: str
|
||||
priority: int = 0 # Higher number = higher priority
|
||||
retry_count: int = 0
|
||||
max_retries: int = 3
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
status: str = 'pending' # pending, downloading, completed, failed, paused
|
||||
progress: Dict[str, Any] = field(default_factory=dict)
|
||||
error_message: Optional[str] = None
|
||||
|
||||
|
||||
class SpeedLimiter:
|
||||
"""Control download speeds to prevent bandwidth saturation."""
|
||||
|
||||
def __init__(self, max_speed_mbps: float = 0): # 0 = unlimited
|
||||
self.max_speed_mbps = max_speed_mbps
|
||||
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
|
||||
self.download_start_time = None
|
||||
self.bytes_downloaded = 0
|
||||
self.lock = threading.Lock()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def set_speed_limit(self, max_speed_mbps: float):
|
||||
"""Set maximum download speed in MB/s."""
|
||||
with self.lock:
|
||||
self.max_speed_mbps = max_speed_mbps
|
||||
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
|
||||
self.logger.info(f"Speed limit set to {max_speed_mbps} MB/s")
|
||||
|
||||
def start_download(self):
|
||||
"""Mark the start of a new download session."""
|
||||
with self.lock:
|
||||
self.download_start_time = time.time()
|
||||
self.bytes_downloaded = 0
|
||||
|
||||
def update_progress(self, bytes_downloaded: int):
|
||||
"""Update download progress and apply speed limiting if needed."""
|
||||
if self.max_bytes_per_second <= 0: # No limit
|
||||
return
|
||||
|
||||
with self.lock:
|
||||
self.bytes_downloaded += bytes_downloaded
|
||||
|
||||
if self.download_start_time:
|
||||
elapsed_time = time.time() - self.download_start_time
|
||||
if elapsed_time > 0:
|
||||
current_speed = self.bytes_downloaded / elapsed_time
|
||||
|
||||
if current_speed > self.max_bytes_per_second:
|
||||
# Calculate required delay
|
||||
target_time = self.bytes_downloaded / self.max_bytes_per_second
|
||||
delay = target_time - elapsed_time
|
||||
|
||||
if delay > 0:
|
||||
self.logger.debug(f"Speed limiting: sleeping for {delay:.2f}s")
|
||||
time.sleep(delay)
|
||||
|
||||
def get_current_speed(self) -> float:
|
||||
"""Get current download speed in MB/s."""
|
||||
with self.lock:
|
||||
if self.download_start_time:
|
||||
elapsed_time = time.time() - self.download_start_time
|
||||
if elapsed_time > 0:
|
||||
speed_bps = self.bytes_downloaded / elapsed_time
|
||||
return speed_bps / (1024 * 1024) # Convert to MB/s
|
||||
return 0.0
|
||||
|
||||
class MemoryMonitor:
|
||||
"""Monitor and optimize memory usage."""
|
||||
|
||||
def __init__(self, warning_threshold_mb: int = 1024, critical_threshold_mb: int = 2048):
|
||||
self.warning_threshold = warning_threshold_mb * 1024 * 1024
|
||||
self.critical_threshold = critical_threshold_mb * 1024 * 1024
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.monitoring = False
|
||||
self.monitor_thread = None
|
||||
|
||||
def start_monitoring(self, check_interval: int = 30):
|
||||
"""Start continuous memory monitoring."""
|
||||
if self.monitoring:
|
||||
return
|
||||
|
||||
self.monitoring = True
|
||||
self.monitor_thread = threading.Thread(
|
||||
target=self._monitoring_loop,
|
||||
args=(check_interval,),
|
||||
daemon=True
|
||||
)
|
||||
self.monitor_thread.start()
|
||||
self.logger.info("Memory monitoring started")
|
||||
|
||||
def stop_monitoring(self):
|
||||
"""Stop memory monitoring."""
|
||||
self.monitoring = False
|
||||
if self.monitor_thread:
|
||||
self.monitor_thread.join(timeout=5)
|
||||
self.logger.info("Memory monitoring stopped")
|
||||
|
||||
def _monitoring_loop(self, check_interval: int):
|
||||
"""Main monitoring loop."""
|
||||
while self.monitoring:
|
||||
try:
|
||||
self.check_memory_usage()
|
||||
time.sleep(check_interval)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in memory monitoring: {e}")
|
||||
time.sleep(check_interval)
|
||||
|
||||
def check_memory_usage(self):
|
||||
"""Check current memory usage and take action if needed."""
|
||||
try:
|
||||
process = psutil.Process()
|
||||
memory_info = process.memory_info()
|
||||
memory_usage = memory_info.rss
|
||||
|
||||
if memory_usage > self.critical_threshold:
|
||||
self.logger.warning(f"Critical memory usage: {memory_usage / (1024*1024):.1f} MB")
|
||||
self.force_garbage_collection()
|
||||
|
||||
# Check again after GC
|
||||
memory_info = process.memory_info()
|
||||
memory_usage = memory_info.rss
|
||||
|
||||
if memory_usage > self.critical_threshold:
|
||||
self.logger.error("Memory usage still critical after garbage collection")
|
||||
|
||||
elif memory_usage > self.warning_threshold:
|
||||
self.logger.info(f"Memory usage warning: {memory_usage / (1024*1024):.1f} MB")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to check memory usage: {e}")
|
||||
|
||||
def force_garbage_collection(self):
|
||||
"""Force garbage collection to free memory."""
|
||||
self.logger.debug("Forcing garbage collection")
|
||||
collected = gc.collect()
|
||||
self.logger.debug(f"Garbage collection freed {collected} objects")
|
||||
|
||||
def get_memory_stats(self) -> Dict[str, Any]:
|
||||
"""Get current memory statistics."""
|
||||
try:
|
||||
process = psutil.Process()
|
||||
memory_info = process.memory_info()
|
||||
|
||||
return {
|
||||
'rss_mb': memory_info.rss / (1024 * 1024),
|
||||
'vms_mb': memory_info.vms / (1024 * 1024),
|
||||
'percent': process.memory_percent(),
|
||||
'warning_threshold_mb': self.warning_threshold / (1024 * 1024),
|
||||
'critical_threshold_mb': self.critical_threshold / (1024 * 1024)
|
||||
}
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get memory stats: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
class ParallelDownloadManager:
|
||||
"""Manage parallel downloads with configurable thread count."""
|
||||
|
||||
def __init__(self, max_workers: int = 3, speed_limiter: Optional[SpeedLimiter] = None):
|
||||
self.max_workers = max_workers
|
||||
self.speed_limiter = speed_limiter or SpeedLimiter()
|
||||
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
||||
self.active_tasks: Dict[str, DownloadTask] = {}
|
||||
self.pending_queue = queue.PriorityQueue()
|
||||
self.completed_tasks: List[DownloadTask] = []
|
||||
self.failed_tasks: List[DownloadTask] = []
|
||||
self.lock = threading.Lock()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.running = False
|
||||
self.worker_thread = None
|
||||
|
||||
# Statistics
|
||||
self.stats = {
|
||||
'total_tasks': 0,
|
||||
'completed_tasks': 0,
|
||||
'failed_tasks': 0,
|
||||
'active_tasks': 0,
|
||||
'average_speed_mbps': 0.0
|
||||
}
|
||||
|
||||
def start(self):
|
||||
"""Start the download manager."""
|
||||
if self.running:
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self.worker_thread = threading.Thread(target=self._worker_loop, daemon=True)
|
||||
self.worker_thread.start()
|
||||
self.logger.info(f"Download manager started with {self.max_workers} workers")
|
||||
|
||||
def stop(self):
|
||||
"""Stop the download manager."""
|
||||
self.running = False
|
||||
|
||||
# Cancel all pending tasks
|
||||
with self.lock:
|
||||
while not self.pending_queue.empty():
|
||||
try:
|
||||
_, task = self.pending_queue.get_nowait()
|
||||
task.status = 'cancelled'
|
||||
except queue.Empty:
|
||||
break
|
||||
|
||||
# Shutdown executor
|
||||
self.executor.shutdown(wait=True)
|
||||
|
||||
if self.worker_thread:
|
||||
self.worker_thread.join(timeout=5)
|
||||
|
||||
self.logger.info("Download manager stopped")
|
||||
|
||||
def add_task(self, task: DownloadTask) -> str:
|
||||
"""Add a download task to the queue."""
|
||||
with self.lock:
|
||||
self.stats['total_tasks'] += 1
|
||||
# Priority queue uses negative priority for max-heap behavior
|
||||
self.pending_queue.put((-task.priority, task))
|
||||
self.logger.info(f"Added download task: {task.task_id}")
|
||||
return task.task_id
|
||||
|
||||
def _worker_loop(self):
|
||||
"""Main worker loop that processes download tasks."""
|
||||
while self.running:
|
||||
try:
|
||||
# Check for pending tasks
|
||||
if not self.pending_queue.empty() and len(self.active_tasks) < self.max_workers:
|
||||
_, task = self.pending_queue.get_nowait()
|
||||
|
||||
if task.status == 'pending':
|
||||
self._start_task(task)
|
||||
|
||||
# Check completed tasks
|
||||
self._check_completed_tasks()
|
||||
|
||||
time.sleep(0.1) # Small delay to prevent busy waiting
|
||||
|
||||
except queue.Empty:
|
||||
time.sleep(1)
|
||||
except Exception as e:
|
||||
self.logger.error(f"Error in worker loop: {e}")
|
||||
time.sleep(1)
|
||||
|
||||
def _start_task(self, task: DownloadTask):
|
||||
"""Start a download task."""
|
||||
with self.lock:
|
||||
task.status = 'downloading'
|
||||
task.started_at = datetime.now()
|
||||
self.active_tasks[task.task_id] = task
|
||||
self.stats['active_tasks'] = len(self.active_tasks)
|
||||
|
||||
# Submit to thread pool
|
||||
future = self.executor.submit(self._execute_download, task)
|
||||
task.future = future
|
||||
|
||||
self.logger.info(f"Started download task: {task.task_id}")
|
||||
|
||||
def _execute_download(self, task: DownloadTask) -> bool:
|
||||
"""Execute the actual download."""
|
||||
try:
|
||||
self.logger.info(f"Executing download: {task.serie_name} S{task.season}E{task.episode}")
|
||||
|
||||
# Create progress callback that respects speed limiting
|
||||
def progress_callback(info):
|
||||
if 'downloaded_bytes' in info:
|
||||
self.speed_limiter.update_progress(info.get('downloaded_bytes', 0))
|
||||
|
||||
# Update task progress
|
||||
task.progress.update(info)
|
||||
|
||||
self.speed_limiter.start_download()
|
||||
|
||||
# Here you would call the actual download function
|
||||
# For now, simulate download
|
||||
success = self._simulate_download(task, progress_callback)
|
||||
|
||||
return success
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Download failed for task {task.task_id}: {e}")
|
||||
task.error_message = str(e)
|
||||
return False
|
||||
|
||||
def _simulate_download(self, task: DownloadTask, progress_callback: Callable) -> bool:
|
||||
"""Simulate download for testing purposes."""
|
||||
# This is a placeholder - replace with actual download logic
|
||||
total_size = 100 * 1024 * 1024 # 100MB simulation
|
||||
downloaded = 0
|
||||
chunk_size = 1024 * 1024 # 1MB chunks
|
||||
|
||||
while downloaded < total_size and task.status == 'downloading':
|
||||
# Simulate download chunk
|
||||
time.sleep(0.1)
|
||||
downloaded += chunk_size
|
||||
|
||||
progress_info = {
|
||||
'status': 'downloading',
|
||||
'downloaded_bytes': downloaded,
|
||||
'total_bytes': total_size,
|
||||
'percent': (downloaded / total_size) * 100
|
||||
}
|
||||
|
||||
progress_callback(progress_info)
|
||||
|
||||
if downloaded >= total_size:
|
||||
progress_callback({'status': 'finished'})
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
def _check_completed_tasks(self):
|
||||
"""Check for completed download tasks."""
|
||||
completed_task_ids = []
|
||||
|
||||
with self.lock:
|
||||
for task_id, task in self.active_tasks.items():
|
||||
if hasattr(task, 'future') and task.future.done():
|
||||
completed_task_ids.append(task_id)
|
||||
|
||||
# Process completed tasks
|
||||
for task_id in completed_task_ids:
|
||||
self._handle_completed_task(task_id)
|
||||
|
||||
def _handle_completed_task(self, task_id: str):
|
||||
"""Handle a completed download task."""
|
||||
with self.lock:
|
||||
task = self.active_tasks.pop(task_id, None)
|
||||
if not task:
|
||||
return
|
||||
|
||||
task.completed_at = datetime.now()
|
||||
self.stats['active_tasks'] = len(self.active_tasks)
|
||||
|
||||
try:
|
||||
success = task.future.result()
|
||||
|
||||
if success:
|
||||
task.status = 'completed'
|
||||
self.completed_tasks.append(task)
|
||||
self.stats['completed_tasks'] += 1
|
||||
self.logger.info(f"Task completed successfully: {task_id}")
|
||||
else:
|
||||
task.status = 'failed'
|
||||
self.failed_tasks.append(task)
|
||||
self.stats['failed_tasks'] += 1
|
||||
self.logger.warning(f"Task failed: {task_id}")
|
||||
|
||||
except Exception as e:
|
||||
task.status = 'failed'
|
||||
task.error_message = str(e)
|
||||
self.failed_tasks.append(task)
|
||||
self.stats['failed_tasks'] += 1
|
||||
self.logger.error(f"Task failed with exception: {task_id} - {e}")
|
||||
|
||||
def get_task_status(self, task_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Get status of a specific task."""
|
||||
with self.lock:
|
||||
# Check active tasks
|
||||
if task_id in self.active_tasks:
|
||||
task = self.active_tasks[task_id]
|
||||
return self._task_to_dict(task)
|
||||
|
||||
# Check completed tasks
|
||||
for task in self.completed_tasks:
|
||||
if task.task_id == task_id:
|
||||
return self._task_to_dict(task)
|
||||
|
||||
# Check failed tasks
|
||||
for task in self.failed_tasks:
|
||||
if task.task_id == task_id:
|
||||
return self._task_to_dict(task)
|
||||
|
||||
return None
|
||||
|
||||
def _task_to_dict(self, task: DownloadTask) -> Dict[str, Any]:
|
||||
"""Convert task to dictionary representation."""
|
||||
return {
|
||||
'task_id': task.task_id,
|
||||
'serie_name': task.serie_name,
|
||||
'season': task.season,
|
||||
'episode': task.episode,
|
||||
'status': task.status,
|
||||
'progress': task.progress,
|
||||
'created_at': task.created_at.isoformat(),
|
||||
'started_at': task.started_at.isoformat() if task.started_at else None,
|
||||
'completed_at': task.completed_at.isoformat() if task.completed_at else None,
|
||||
'error_message': task.error_message,
|
||||
'retry_count': task.retry_count
|
||||
}
|
||||
|
||||
def get_all_tasks(self) -> Dict[str, List[Dict[str, Any]]]:
|
||||
"""Get all tasks grouped by status."""
|
||||
with self.lock:
|
||||
return {
|
||||
'active': [self._task_to_dict(task) for task in self.active_tasks.values()],
|
||||
'completed': [self._task_to_dict(task) for task in self.completed_tasks[-50:]], # Last 50
|
||||
'failed': [self._task_to_dict(task) for task in self.failed_tasks[-50:]] # Last 50
|
||||
}
|
||||
|
||||
def get_statistics(self) -> Dict[str, Any]:
|
||||
"""Get download manager statistics."""
|
||||
return self.stats.copy()
|
||||
|
||||
def set_max_workers(self, max_workers: int):
|
||||
"""Change the number of worker threads."""
|
||||
if max_workers <= 0:
|
||||
raise ValueError("max_workers must be positive")
|
||||
|
||||
self.max_workers = max_workers
|
||||
|
||||
# Recreate executor with new worker count
|
||||
old_executor = self.executor
|
||||
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
||||
old_executor.shutdown(wait=False)
|
||||
|
||||
self.logger.info(f"Updated worker count to {max_workers}")
|
||||
|
||||
|
||||
class ResumeManager:
|
||||
"""Manage download resumption for interrupted downloads."""
|
||||
|
||||
def __init__(self, resume_dir: str = "./resume"):
|
||||
self.resume_dir = resume_dir
|
||||
self.logger = logging.getLogger(__name__)
|
||||
os.makedirs(resume_dir, exist_ok=True)
|
||||
|
||||
def save_resume_info(self, task_id: str, resume_data: Dict[str, Any]):
|
||||
"""Save resume information for a download."""
|
||||
try:
|
||||
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
|
||||
with open(resume_file, 'w') as f:
|
||||
json.dump(resume_data, f, indent=2, default=str)
|
||||
|
||||
self.logger.debug(f"Saved resume info for task: {task_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to save resume info for {task_id}: {e}")
|
||||
|
||||
def load_resume_info(self, task_id: str) -> Optional[Dict[str, Any]]:
|
||||
"""Load resume information for a download."""
|
||||
try:
|
||||
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
|
||||
|
||||
if os.path.exists(resume_file):
|
||||
with open(resume_file, 'r') as f:
|
||||
resume_data = json.load(f)
|
||||
|
||||
self.logger.debug(f"Loaded resume info for task: {task_id}")
|
||||
return resume_data
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to load resume info for {task_id}: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def clear_resume_info(self, task_id: str):
|
||||
"""Clear resume information after successful completion."""
|
||||
try:
|
||||
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
|
||||
|
||||
if os.path.exists(resume_file):
|
||||
os.remove(resume_file)
|
||||
self.logger.debug(f"Cleared resume info for task: {task_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to clear resume info for {task_id}: {e}")
|
||||
|
||||
def get_resumable_tasks(self) -> List[str]:
|
||||
"""Get list of tasks that can be resumed."""
|
||||
try:
|
||||
resume_files = [f for f in os.listdir(self.resume_dir) if f.endswith('.json')]
|
||||
task_ids = [os.path.splitext(f)[0] for f in resume_files]
|
||||
return task_ids
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get resumable tasks: {e}")
|
||||
return []
|
||||
|
||||
|
||||
# Global instances
|
||||
speed_limiter = SpeedLimiter()
|
||||
memory_monitor = MemoryMonitor()
|
||||
download_manager = ParallelDownloadManager(max_workers=3, speed_limiter=speed_limiter)
|
||||
resume_manager = ResumeManager()
|
||||
|
||||
|
||||
def init_performance_monitoring():
|
||||
"""Initialize performance monitoring components."""
|
||||
memory_monitor.start_monitoring()
|
||||
download_manager.start()
|
||||
|
||||
|
||||
def cleanup_performance_monitoring():
|
||||
"""Clean up performance monitoring components."""
|
||||
memory_monitor.stop_monitoring()
|
||||
download_manager.stop()
|
||||
|
||||
|
||||
# Export main components
|
||||
__all__ = [
|
||||
'SpeedLimiter',
|
||||
'MemoryMonitor',
|
||||
'ParallelDownloadManager',
|
||||
'ResumeManager',
|
||||
'DownloadTask',
|
||||
'speed_limiter',
|
||||
'download_cache',
|
||||
'memory_monitor',
|
||||
'download_manager',
|
||||
'resume_manager',
|
||||
'init_performance_monitoring',
|
||||
'cleanup_performance_monitoring'
|
||||
]
|
||||
@@ -1,293 +0,0 @@
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, Optional, Callable
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ProcessLock:
|
||||
"""Thread-safe process lock for preventing duplicate operations."""
|
||||
|
||||
def __init__(self, name: str, timeout_minutes: int = 60):
|
||||
self.name = name
|
||||
self.timeout_minutes = timeout_minutes
|
||||
self.lock = threading.RLock()
|
||||
self.locked_at: Optional[datetime] = None
|
||||
self.locked_by: Optional[str] = None
|
||||
self.progress_callback: Optional[Callable] = None
|
||||
self.is_locked = False
|
||||
self.progress_data = {}
|
||||
|
||||
def acquire(self, locked_by: str = "system", progress_callback: Callable = None) -> bool:
|
||||
"""
|
||||
Attempt to acquire the lock.
|
||||
Returns True if lock was acquired, False if already locked.
|
||||
"""
|
||||
with self.lock:
|
||||
# Check if lock has expired
|
||||
if self.is_locked and self.locked_at:
|
||||
if datetime.now() - self.locked_at > timedelta(minutes=self.timeout_minutes):
|
||||
logger.warning(f"Process lock '{self.name}' expired, releasing...")
|
||||
self._release_internal()
|
||||
|
||||
if self.is_locked:
|
||||
return False
|
||||
|
||||
self.is_locked = True
|
||||
self.locked_at = datetime.now()
|
||||
self.locked_by = locked_by
|
||||
self.progress_callback = progress_callback
|
||||
self.progress_data = {}
|
||||
|
||||
logger.info(f"Process lock '{self.name}' acquired by '{locked_by}'")
|
||||
return True
|
||||
|
||||
def release(self) -> bool:
|
||||
"""Release the lock."""
|
||||
with self.lock:
|
||||
if not self.is_locked:
|
||||
return False
|
||||
|
||||
self._release_internal()
|
||||
logger.info(f"Process lock '{self.name}' released")
|
||||
return True
|
||||
|
||||
def _release_internal(self):
|
||||
"""Internal method to release lock without logging."""
|
||||
self.is_locked = False
|
||||
self.locked_at = None
|
||||
self.locked_by = None
|
||||
self.progress_callback = None
|
||||
self.progress_data = {}
|
||||
|
||||
def is_locked_by_other(self, requester: str) -> bool:
|
||||
"""Check if lock is held by someone other than requester."""
|
||||
with self.lock:
|
||||
return self.is_locked and self.locked_by != requester
|
||||
|
||||
def get_status(self) -> Dict:
|
||||
"""Get current lock status."""
|
||||
with self.lock:
|
||||
return {
|
||||
'is_locked': self.is_locked,
|
||||
'locked_by': self.locked_by,
|
||||
'locked_at': self.locked_at.isoformat() if self.locked_at else None,
|
||||
'progress': self.progress_data.copy(),
|
||||
'timeout_minutes': self.timeout_minutes
|
||||
}
|
||||
|
||||
def update_progress(self, progress_data: Dict):
|
||||
"""Update progress data for this lock."""
|
||||
with self.lock:
|
||||
if self.is_locked:
|
||||
self.progress_data.update(progress_data)
|
||||
if self.progress_callback:
|
||||
try:
|
||||
self.progress_callback(progress_data)
|
||||
except Exception as e:
|
||||
logger.error(f"Progress callback error: {e}")
|
||||
|
||||
def __enter__(self):
|
||||
"""Context manager entry."""
|
||||
if not self.acquire():
|
||||
raise ProcessLockError(f"Could not acquire lock '{self.name}'")
|
||||
return self
|
||||
|
||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
||||
"""Context manager exit."""
|
||||
self.release()
|
||||
|
||||
|
||||
class ProcessLockError(Exception):
|
||||
"""Exception raised when process lock operations fail."""
|
||||
pass
|
||||
|
||||
|
||||
class ProcessLockManager:
|
||||
"""Global manager for all process locks."""
|
||||
|
||||
def __init__(self):
|
||||
self.locks: Dict[str, ProcessLock] = {}
|
||||
self.manager_lock = threading.RLock()
|
||||
|
||||
def get_lock(self, name: str, timeout_minutes: int = 60) -> ProcessLock:
|
||||
"""Get or create a process lock."""
|
||||
with self.manager_lock:
|
||||
if name not in self.locks:
|
||||
self.locks[name] = ProcessLock(name, timeout_minutes)
|
||||
return self.locks[name]
|
||||
|
||||
def acquire_lock(self, name: str, locked_by: str = "system",
|
||||
timeout_minutes: int = 60, progress_callback: Callable = None) -> bool:
|
||||
"""Acquire a named lock."""
|
||||
lock = self.get_lock(name, timeout_minutes)
|
||||
return lock.acquire(locked_by, progress_callback)
|
||||
|
||||
def release_lock(self, name: str) -> bool:
|
||||
"""Release a named lock."""
|
||||
with self.manager_lock:
|
||||
if name in self.locks:
|
||||
return self.locks[name].release()
|
||||
return False
|
||||
|
||||
def is_locked(self, name: str) -> bool:
|
||||
"""Check if a named lock is currently held."""
|
||||
with self.manager_lock:
|
||||
if name in self.locks:
|
||||
return self.locks[name].is_locked
|
||||
return False
|
||||
|
||||
def get_all_locks_status(self) -> Dict:
|
||||
"""Get status of all locks."""
|
||||
with self.manager_lock:
|
||||
return {
|
||||
name: lock.get_status()
|
||||
for name, lock in self.locks.items()
|
||||
}
|
||||
|
||||
def cleanup_expired_locks(self) -> int:
|
||||
"""Clean up any expired locks. Returns number of locks cleaned up."""
|
||||
cleaned_count = 0
|
||||
with self.manager_lock:
|
||||
for lock in self.locks.values():
|
||||
if lock.is_locked and lock.locked_at:
|
||||
if datetime.now() - lock.locked_at > timedelta(minutes=lock.timeout_minutes):
|
||||
lock._release_internal()
|
||||
cleaned_count += 1
|
||||
logger.info(f"Cleaned up expired lock: {lock.name}")
|
||||
|
||||
return cleaned_count
|
||||
|
||||
def force_release_all(self) -> int:
|
||||
"""Force release all locks. Returns number of locks released."""
|
||||
released_count = 0
|
||||
with self.manager_lock:
|
||||
for lock in self.locks.values():
|
||||
if lock.is_locked:
|
||||
lock._release_internal()
|
||||
released_count += 1
|
||||
logger.warning(f"Force released lock: {lock.name}")
|
||||
|
||||
return released_count
|
||||
|
||||
|
||||
# Global instance
|
||||
process_lock_manager = ProcessLockManager()
|
||||
|
||||
# Predefined lock names for common operations
|
||||
RESCAN_LOCK = "rescan"
|
||||
DOWNLOAD_LOCK = "download"
|
||||
SEARCH_LOCK = "search"
|
||||
CONFIG_LOCK = "config"
|
||||
|
||||
def with_process_lock(lock_name: str, timeout_minutes: int = 60):
|
||||
"""Decorator to protect functions with process locks."""
|
||||
def decorator(func):
|
||||
def wrapper(*args, **kwargs):
|
||||
locked_by = kwargs.pop('_locked_by', func.__name__)
|
||||
progress_callback = kwargs.pop('_progress_callback', None)
|
||||
|
||||
if not process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes, progress_callback):
|
||||
raise ProcessLockError(f"Process '{lock_name}' is already running")
|
||||
|
||||
try:
|
||||
return func(*args, **kwargs)
|
||||
finally:
|
||||
process_lock_manager.release_lock(lock_name)
|
||||
|
||||
return wrapper
|
||||
return decorator
|
||||
|
||||
|
||||
def check_process_locks():
|
||||
"""Check and clean up any expired process locks."""
|
||||
return process_lock_manager.cleanup_expired_locks()
|
||||
|
||||
|
||||
def get_process_status(lock_name: str) -> Dict:
|
||||
"""Get status of a specific process lock."""
|
||||
lock = process_lock_manager.get_lock(lock_name)
|
||||
return lock.get_status()
|
||||
|
||||
|
||||
def update_process_progress(lock_name: str, progress_data: Dict):
|
||||
"""Update progress for a specific process."""
|
||||
if process_lock_manager.is_locked(lock_name):
|
||||
lock = process_lock_manager.get_lock(lock_name)
|
||||
lock.update_progress(progress_data)
|
||||
|
||||
|
||||
def is_process_running(lock_name: str) -> bool:
|
||||
"""Check if a specific process is currently running."""
|
||||
return process_lock_manager.is_locked(lock_name)
|
||||
|
||||
|
||||
class QueueDeduplicator:
|
||||
"""Prevent duplicate episodes in download queue."""
|
||||
|
||||
def __init__(self):
|
||||
self.active_items = set() # Set of (serie_name, season, episode) tuples
|
||||
self.lock = threading.RLock()
|
||||
|
||||
def add_episode(self, serie_name: str, season: int, episode: int) -> bool:
|
||||
"""
|
||||
Add episode to active set if not already present.
|
||||
Returns True if added, False if duplicate.
|
||||
"""
|
||||
with self.lock:
|
||||
episode_key = (serie_name, season, episode)
|
||||
if episode_key in self.active_items:
|
||||
return False
|
||||
|
||||
self.active_items.add(episode_key)
|
||||
return True
|
||||
|
||||
def remove_episode(self, serie_name: str, season: int, episode: int):
|
||||
"""Remove episode from active set."""
|
||||
with self.lock:
|
||||
episode_key = (serie_name, season, episode)
|
||||
self.active_items.discard(episode_key)
|
||||
|
||||
def is_episode_active(self, serie_name: str, season: int, episode: int) -> bool:
|
||||
"""Check if episode is currently being processed."""
|
||||
with self.lock:
|
||||
episode_key = (serie_name, season, episode)
|
||||
return episode_key in self.active_items
|
||||
|
||||
def get_active_episodes(self) -> list:
|
||||
"""Get list of all active episodes."""
|
||||
with self.lock:
|
||||
return list(self.active_items)
|
||||
|
||||
def clear_all(self):
|
||||
"""Clear all active episodes."""
|
||||
with self.lock:
|
||||
self.active_items.clear()
|
||||
|
||||
def get_count(self) -> int:
|
||||
"""Get number of active episodes."""
|
||||
with self.lock:
|
||||
return len(self.active_items)
|
||||
|
||||
|
||||
# Global deduplicator instance
|
||||
episode_deduplicator = QueueDeduplicator()
|
||||
|
||||
|
||||
def add_episode_to_queue_safe(serie_name: str, season: int, episode: int) -> bool:
|
||||
"""
|
||||
Safely add episode to queue with deduplication.
|
||||
Returns True if added, False if duplicate.
|
||||
"""
|
||||
return episode_deduplicator.add_episode(serie_name, season, episode)
|
||||
|
||||
|
||||
def remove_episode_from_queue(serie_name: str, season: int, episode: int):
|
||||
"""Remove episode from deduplication tracking."""
|
||||
episode_deduplicator.remove_episode(serie_name, season, episode)
|
||||
|
||||
|
||||
def is_episode_in_queue(serie_name: str, season: int, episode: int) -> bool:
|
||||
"""Check if episode is already in queue/being processed."""
|
||||
return episode_deduplicator.is_episode_active(serie_name, season, episode)
|
||||
@@ -1,33 +0,0 @@
|
||||
@echo off
|
||||
REM AniWorld FastAPI Server Startup Script for Windows
|
||||
REM This script activates the conda environment and starts the FastAPI server
|
||||
|
||||
echo Starting AniWorld FastAPI Server...
|
||||
|
||||
REM Activate conda environment
|
||||
echo Activating AniWorld conda environment...
|
||||
call conda activate AniWorld
|
||||
|
||||
REM Change to server directory
|
||||
cd /d "%~dp0"
|
||||
|
||||
REM Set environment variables for development
|
||||
set PYTHONPATH=%PYTHONPATH%;%CD%\..\..
|
||||
|
||||
REM Check if .env file exists
|
||||
if not exist ".env" (
|
||||
echo Warning: .env file not found. Using default configuration.
|
||||
)
|
||||
|
||||
REM Install/update FastAPI dependencies if needed
|
||||
echo Checking FastAPI dependencies...
|
||||
pip install -r requirements_fastapi.txt
|
||||
|
||||
REM Start the FastAPI server with uvicorn
|
||||
echo Starting FastAPI server on http://localhost:8000
|
||||
echo API documentation available at http://localhost:8000/docs
|
||||
echo Press Ctrl+C to stop the server
|
||||
|
||||
python fastapi_app.py
|
||||
|
||||
pause
|
||||
@@ -1,32 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
# AniWorld FastAPI Server Startup Script
|
||||
# This script activates the conda environment and starts the FastAPI server
|
||||
|
||||
echo "Starting AniWorld FastAPI Server..."
|
||||
|
||||
# Activate conda environment
|
||||
echo "Activating AniWorld conda environment..."
|
||||
source activate AniWorld
|
||||
|
||||
# Change to server directory
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Set environment variables for development
|
||||
export PYTHONPATH="${PYTHONPATH}:$(pwd)/../.."
|
||||
|
||||
# Check if .env file exists
|
||||
if [ ! -f ".env" ]; then
|
||||
echo "Warning: .env file not found. Using default configuration."
|
||||
fi
|
||||
|
||||
# Install/update FastAPI dependencies if needed
|
||||
echo "Checking FastAPI dependencies..."
|
||||
pip install -r requirements_fastapi.txt
|
||||
|
||||
# Start the FastAPI server with uvicorn
|
||||
echo "Starting FastAPI server on http://localhost:8000"
|
||||
echo "API documentation available at http://localhost:8000/docs"
|
||||
echo "Press Ctrl+C to stop the server"
|
||||
|
||||
python fastapi_app.py
|
||||
@@ -1,22 +0,0 @@
|
||||
@echo off
|
||||
echo Starting AniWorld Web Manager...
|
||||
echo.
|
||||
|
||||
REM Check if environment variable is set
|
||||
if "%ANIME_DIRECTORY%"=="" (
|
||||
echo WARNING: ANIME_DIRECTORY environment variable not set!
|
||||
echo Using default directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
echo.
|
||||
echo To set your own directory, run:
|
||||
echo set ANIME_DIRECTORY="\\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien"
|
||||
echo.
|
||||
pause
|
||||
)
|
||||
|
||||
REM Change to server directory
|
||||
cd /d "%~dp0"
|
||||
|
||||
REM Start the Flask application
|
||||
python app.py
|
||||
|
||||
pause
|
||||
@@ -1,21 +0,0 @@
|
||||
#!/bin/bash
|
||||
|
||||
echo "Starting AniWorld Web Manager..."
|
||||
echo
|
||||
|
||||
# Check if environment variable is set
|
||||
if [ -z "$ANIME_DIRECTORY" ]; then
|
||||
echo "WARNING: ANIME_DIRECTORY environment variable not set!"
|
||||
echo "Using default directory: \\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"
|
||||
echo
|
||||
echo "To set your own directory, run:"
|
||||
echo "export ANIME_DIRECTORY=\"/path/to/your/anime/directory\""
|
||||
echo
|
||||
read -p "Press Enter to continue..."
|
||||
fi
|
||||
|
||||
# Change to server directory
|
||||
cd "$(dirname "$0")"
|
||||
|
||||
# Start the Flask application
|
||||
python app.py
|
||||
@@ -1,109 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple test script for the AniWorld FastAPI server.
|
||||
"""
|
||||
|
||||
import requests
|
||||
import json
|
||||
|
||||
BASE_URL = "http://localhost:8000"
|
||||
|
||||
def test_health():
|
||||
"""Test the health endpoint."""
|
||||
print("Testing /health endpoint...")
|
||||
try:
|
||||
response = requests.get(f"{BASE_URL}/health")
|
||||
print(f"Status: {response.status_code}")
|
||||
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
||||
return response.status_code == 200
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return False
|
||||
|
||||
def test_root():
|
||||
"""Test the root endpoint."""
|
||||
print("\nTesting / endpoint...")
|
||||
try:
|
||||
response = requests.get(f"{BASE_URL}/")
|
||||
print(f"Status: {response.status_code}")
|
||||
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
||||
return response.status_code == 200
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return False
|
||||
|
||||
def test_login():
|
||||
"""Test the login endpoint."""
|
||||
print("\nTesting /auth/login endpoint...")
|
||||
try:
|
||||
# Test with correct password
|
||||
data = {"password": "admin123"}
|
||||
response = requests.post(f"{BASE_URL}/auth/login", json=data)
|
||||
print(f"Status: {response.status_code}")
|
||||
response_data = response.json()
|
||||
print(f"Response: {json.dumps(response_data, indent=2, default=str)}")
|
||||
|
||||
if response.status_code == 200:
|
||||
return response_data.get("token")
|
||||
return None
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return None
|
||||
|
||||
def test_protected_endpoint(token):
|
||||
"""Test a protected endpoint with the token."""
|
||||
print("\nTesting /auth/verify endpoint (protected)...")
|
||||
try:
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
response = requests.get(f"{BASE_URL}/auth/verify", headers=headers)
|
||||
print(f"Status: {response.status_code}")
|
||||
print(f"Response: {json.dumps(response.json(), indent=2, default=str)}")
|
||||
return response.status_code == 200
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return False
|
||||
|
||||
def test_anime_search(token):
|
||||
"""Test the anime search endpoint."""
|
||||
print("\nTesting /api/anime/search endpoint (protected)...")
|
||||
try:
|
||||
headers = {"Authorization": f"Bearer {token}"}
|
||||
params = {"query": "naruto", "limit": 5}
|
||||
response = requests.get(f"{BASE_URL}/api/anime/search", headers=headers, params=params)
|
||||
print(f"Status: {response.status_code}")
|
||||
print(f"Response: {json.dumps(response.json(), indent=2)}")
|
||||
return response.status_code == 200
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
return False
|
||||
|
||||
if __name__ == "__main__":
|
||||
print("AniWorld FastAPI Server Test")
|
||||
print("=" * 40)
|
||||
|
||||
# Test public endpoints
|
||||
health_ok = test_health()
|
||||
root_ok = test_root()
|
||||
|
||||
# Test authentication
|
||||
token = test_login()
|
||||
|
||||
if token:
|
||||
# Test protected endpoints
|
||||
verify_ok = test_protected_endpoint(token)
|
||||
search_ok = test_anime_search(token)
|
||||
|
||||
print("\n" + "=" * 40)
|
||||
print("Test Results:")
|
||||
print(f"Health endpoint: {'✓' if health_ok else '✗'}")
|
||||
print(f"Root endpoint: {'✓' if root_ok else '✗'}")
|
||||
print(f"Login endpoint: {'✓' if token else '✗'}")
|
||||
print(f"Token verification: {'✓' if verify_ok else '✗'}")
|
||||
print(f"Anime search: {'✓' if search_ok else '✗'}")
|
||||
|
||||
if all([health_ok, root_ok, token, verify_ok, search_ok]):
|
||||
print("\n🎉 All tests passed! The FastAPI server is working correctly.")
|
||||
else:
|
||||
print("\n❌ Some tests failed. Check the output above for details.")
|
||||
else:
|
||||
print("\n❌ Authentication failed. Cannot test protected endpoints.")
|
||||
Reference in New Issue
Block a user