removed old stff
This commit is contained in:
parent
8e885dd40b
commit
539dd80e14
File diff suppressed because it is too large
Load Diff
@ -1,981 +0,0 @@
|
|||||||
"""
|
|
||||||
User Preferences and Settings Persistence Manager
|
|
||||||
|
|
||||||
This module provides user preferences management, settings persistence,
|
|
||||||
and customization options for the AniWorld web interface.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
from typing import Dict, Any, Optional
|
|
||||||
from datetime import datetime
|
|
||||||
from flask import Blueprint, request, jsonify, session
|
|
||||||
|
|
||||||
class UserPreferencesManager:
|
|
||||||
"""Manages user preferences and settings persistence."""
|
|
||||||
|
|
||||||
def __init__(self, app=None):
|
|
||||||
self.app = app
|
|
||||||
self.preferences_file = 'data/user_preferences.json'
|
|
||||||
self.preferences = {} # Initialize preferences attribute
|
|
||||||
self.default_preferences = {
|
|
||||||
'ui': {
|
|
||||||
'theme': 'auto', # 'light', 'dark', 'auto'
|
|
||||||
'density': 'comfortable', # 'compact', 'comfortable', 'spacious'
|
|
||||||
'language': 'en',
|
|
||||||
'animations_enabled': True,
|
|
||||||
'sidebar_collapsed': False,
|
|
||||||
'grid_view': True,
|
|
||||||
'items_per_page': 20
|
|
||||||
},
|
|
||||||
'downloads': {
|
|
||||||
'auto_download': False,
|
|
||||||
'download_quality': 'best',
|
|
||||||
'concurrent_downloads': 3,
|
|
||||||
'retry_failed': True,
|
|
||||||
'notification_sound': True,
|
|
||||||
'auto_organize': True
|
|
||||||
},
|
|
||||||
'notifications': {
|
|
||||||
'browser_notifications': True,
|
|
||||||
'email_notifications': False,
|
|
||||||
'webhook_notifications': False,
|
|
||||||
'notification_types': {
|
|
||||||
'download_complete': True,
|
|
||||||
'download_error': True,
|
|
||||||
'series_updated': False,
|
|
||||||
'system_alerts': True
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'keyboard_shortcuts': {
|
|
||||||
'enabled': True,
|
|
||||||
'shortcuts': {
|
|
||||||
'search': 'ctrl+f',
|
|
||||||
'download': 'ctrl+d',
|
|
||||||
'refresh': 'f5',
|
|
||||||
'select_all': 'ctrl+a',
|
|
||||||
'help': 'f1',
|
|
||||||
'settings': 'ctrl+comma'
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'advanced': {
|
|
||||||
'debug_mode': False,
|
|
||||||
'performance_mode': False,
|
|
||||||
'cache_enabled': True,
|
|
||||||
'auto_backup': True,
|
|
||||||
'log_level': 'info'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Initialize with defaults if no app provided
|
|
||||||
if app is None:
|
|
||||||
self.preferences = self.default_preferences.copy()
|
|
||||||
else:
|
|
||||||
self.init_app(app)
|
|
||||||
|
|
||||||
def init_app(self, app):
|
|
||||||
"""Initialize with Flask app."""
|
|
||||||
self.app = app
|
|
||||||
self.preferences_file = os.path.join(app.instance_path, 'data/user_preferences.json')
|
|
||||||
|
|
||||||
# Ensure instance path exists
|
|
||||||
os.makedirs(app.instance_path, exist_ok=True)
|
|
||||||
|
|
||||||
# Load or create preferences file
|
|
||||||
self.load_preferences()
|
|
||||||
|
|
||||||
def load_preferences(self) -> Dict[str, Any]:
|
|
||||||
"""Load preferences from file."""
|
|
||||||
try:
|
|
||||||
if os.path.exists(self.preferences_file):
|
|
||||||
with open(self.preferences_file, 'r', encoding='utf-8') as f:
|
|
||||||
loaded_prefs = json.load(f)
|
|
||||||
|
|
||||||
# Merge with defaults to ensure all keys exist
|
|
||||||
self.preferences = self.merge_preferences(self.default_preferences, loaded_prefs)
|
|
||||||
else:
|
|
||||||
self.preferences = self.default_preferences.copy()
|
|
||||||
self.save_preferences()
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error loading preferences: {e}")
|
|
||||||
self.preferences = self.default_preferences.copy()
|
|
||||||
|
|
||||||
return self.preferences
|
|
||||||
|
|
||||||
def save_preferences(self) -> bool:
|
|
||||||
"""Save preferences to file."""
|
|
||||||
try:
|
|
||||||
with open(self.preferences_file, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(self.preferences, f, indent=2, ensure_ascii=False)
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error saving preferences: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def merge_preferences(self, defaults: Dict, user_prefs: Dict) -> Dict:
|
|
||||||
"""Recursively merge user preferences with defaults."""
|
|
||||||
result = defaults.copy()
|
|
||||||
|
|
||||||
for key, value in user_prefs.items():
|
|
||||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
||||||
result[key] = self.merge_preferences(result[key], value)
|
|
||||||
else:
|
|
||||||
result[key] = value
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
def get_preference(self, key: str, default: Any = None) -> Any:
|
|
||||||
"""Get a specific preference using dot notation (e.g., 'ui.theme')."""
|
|
||||||
keys = key.split('.')
|
|
||||||
value = self.preferences
|
|
||||||
|
|
||||||
try:
|
|
||||||
for k in keys:
|
|
||||||
value = value[k]
|
|
||||||
return value
|
|
||||||
except (KeyError, TypeError):
|
|
||||||
return default
|
|
||||||
|
|
||||||
def set_preference(self, key: str, value: Any) -> bool:
|
|
||||||
"""Set a specific preference using dot notation."""
|
|
||||||
keys = key.split('.')
|
|
||||||
pref_dict = self.preferences
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Navigate to parent dictionary
|
|
||||||
for k in keys[:-1]:
|
|
||||||
if k not in pref_dict:
|
|
||||||
pref_dict[k] = {}
|
|
||||||
pref_dict = pref_dict[k]
|
|
||||||
|
|
||||||
# Set the value
|
|
||||||
pref_dict[keys[-1]] = value
|
|
||||||
|
|
||||||
# Save to file
|
|
||||||
return self.save_preferences()
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error setting preference {key}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def reset_preferences(self) -> bool:
|
|
||||||
"""Reset all preferences to defaults."""
|
|
||||||
self.preferences = self.default_preferences.copy()
|
|
||||||
return self.save_preferences()
|
|
||||||
|
|
||||||
def export_preferences(self) -> str:
|
|
||||||
"""Export preferences as JSON string."""
|
|
||||||
try:
|
|
||||||
return json.dumps(self.preferences, indent=2, ensure_ascii=False)
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error exporting preferences: {e}")
|
|
||||||
return "{}"
|
|
||||||
|
|
||||||
def import_preferences(self, json_data: str) -> bool:
|
|
||||||
"""Import preferences from JSON string."""
|
|
||||||
try:
|
|
||||||
imported_prefs = json.loads(json_data)
|
|
||||||
self.preferences = self.merge_preferences(self.default_preferences, imported_prefs)
|
|
||||||
return self.save_preferences()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error importing preferences: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_user_session_preferences(self) -> Dict[str, Any]:
|
|
||||||
"""Get preferences for current user session."""
|
|
||||||
# For now, return global preferences
|
|
||||||
# In the future, could be user-specific
|
|
||||||
return self.preferences.copy()
|
|
||||||
|
|
||||||
def get_preferences_js(self):
|
|
||||||
"""Generate JavaScript code for preferences management."""
|
|
||||||
return f"""
|
|
||||||
// AniWorld User Preferences Manager
|
|
||||||
class UserPreferencesManager {{
|
|
||||||
constructor() {{
|
|
||||||
this.preferences = {json.dumps(self.preferences)};
|
|
||||||
this.defaultPreferences = {json.dumps(self.default_preferences)};
|
|
||||||
this.changeListeners = new Map();
|
|
||||||
this.init();
|
|
||||||
}}
|
|
||||||
|
|
||||||
init() {{
|
|
||||||
this.loadFromServer();
|
|
||||||
this.applyPreferences();
|
|
||||||
this.setupPreferencesUI();
|
|
||||||
this.setupAutoSave();
|
|
||||||
}}
|
|
||||||
|
|
||||||
async loadFromServer() {{
|
|
||||||
try {{
|
|
||||||
const response = await fetch('/api/preferences');
|
|
||||||
if (response.ok) {{
|
|
||||||
this.preferences = await response.json();
|
|
||||||
this.applyPreferences();
|
|
||||||
}}
|
|
||||||
}} catch (error) {{
|
|
||||||
console.error('Error loading preferences:', error);
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
async saveToServer() {{
|
|
||||||
try {{
|
|
||||||
const response = await fetch('/api/preferences', {{
|
|
||||||
method: 'PUT',
|
|
||||||
headers: {{
|
|
||||||
'Content-Type': 'application/json'
|
|
||||||
}},
|
|
||||||
body: JSON.stringify(this.preferences)
|
|
||||||
}});
|
|
||||||
|
|
||||||
if (!response.ok) {{
|
|
||||||
console.error('Error saving preferences to server');
|
|
||||||
}}
|
|
||||||
}} catch (error) {{
|
|
||||||
console.error('Error saving preferences:', error);
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
get(key, defaultValue = null) {{
|
|
||||||
const keys = key.split('.');
|
|
||||||
let value = this.preferences;
|
|
||||||
|
|
||||||
try {{
|
|
||||||
for (const k of keys) {{
|
|
||||||
value = value[k];
|
|
||||||
}}
|
|
||||||
return value !== undefined ? value : defaultValue;
|
|
||||||
}} catch (error) {{
|
|
||||||
return defaultValue;
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
set(key, value, save = true) {{
|
|
||||||
const keys = key.split('.');
|
|
||||||
let obj = this.preferences;
|
|
||||||
|
|
||||||
// Navigate to parent object
|
|
||||||
for (let i = 0; i < keys.length - 1; i++) {{
|
|
||||||
const k = keys[i];
|
|
||||||
if (!obj[k] || typeof obj[k] !== 'object') {{
|
|
||||||
obj[k] = {{}};
|
|
||||||
}}
|
|
||||||
obj = obj[k];
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Set the value
|
|
||||||
const lastKey = keys[keys.length - 1];
|
|
||||||
const oldValue = obj[lastKey];
|
|
||||||
obj[lastKey] = value;
|
|
||||||
|
|
||||||
// Apply the change immediately
|
|
||||||
this.applyPreference(key, value);
|
|
||||||
|
|
||||||
// Notify listeners
|
|
||||||
this.notifyChangeListeners(key, value, oldValue);
|
|
||||||
|
|
||||||
// Save to server
|
|
||||||
if (save) {{
|
|
||||||
this.saveToServer();
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Store in localStorage as backup
|
|
||||||
localStorage.setItem('aniworld_preferences', JSON.stringify(this.preferences));
|
|
||||||
}}
|
|
||||||
|
|
||||||
applyPreferences() {{
|
|
||||||
// Apply all preferences
|
|
||||||
this.applyTheme();
|
|
||||||
this.applyUIPreferences();
|
|
||||||
this.applyKeyboardShortcuts();
|
|
||||||
this.applyNotificationSettings();
|
|
||||||
}}
|
|
||||||
|
|
||||||
applyPreference(key, value) {{
|
|
||||||
// Apply individual preference change
|
|
||||||
if (key.startsWith('ui.theme')) {{
|
|
||||||
this.applyTheme();
|
|
||||||
}} else if (key.startsWith('ui.')) {{
|
|
||||||
this.applyUIPreferences();
|
|
||||||
}} else if (key.startsWith('keyboard_shortcuts.')) {{
|
|
||||||
this.applyKeyboardShortcuts();
|
|
||||||
}} else if (key.startsWith('notifications.')) {{
|
|
||||||
this.applyNotificationSettings();
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
applyTheme() {{
|
|
||||||
const theme = this.get('ui.theme', 'auto');
|
|
||||||
const html = document.documentElement;
|
|
||||||
|
|
||||||
html.classList.remove('theme-light', 'theme-dark');
|
|
||||||
|
|
||||||
if (theme === 'auto') {{
|
|
||||||
// Use system preference
|
|
||||||
const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches;
|
|
||||||
html.classList.add(prefersDark ? 'theme-dark' : 'theme-light');
|
|
||||||
}} else {{
|
|
||||||
html.classList.add(`theme-${{theme}}`);
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Update Bootstrap theme
|
|
||||||
html.setAttribute('data-bs-theme', theme === 'dark' || (theme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches) ? 'dark' : 'light');
|
|
||||||
}}
|
|
||||||
|
|
||||||
applyUIPreferences() {{
|
|
||||||
const density = this.get('ui.density', 'comfortable');
|
|
||||||
const animations = this.get('ui.animations_enabled', true);
|
|
||||||
const gridView = this.get('ui.grid_view', true);
|
|
||||||
|
|
||||||
// Apply UI density
|
|
||||||
document.body.className = document.body.className.replace(/density-\\w+/g, '');
|
|
||||||
document.body.classList.add(`density-${{density}}`);
|
|
||||||
|
|
||||||
// Apply animations
|
|
||||||
if (!animations) {{
|
|
||||||
document.body.classList.add('no-animations');
|
|
||||||
}} else {{
|
|
||||||
document.body.classList.remove('no-animations');
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Apply view mode
|
|
||||||
const viewToggle = document.querySelector('.view-toggle');
|
|
||||||
if (viewToggle) {{
|
|
||||||
viewToggle.classList.toggle('grid-view', gridView);
|
|
||||||
viewToggle.classList.toggle('list-view', !gridView);
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
applyKeyboardShortcuts() {{
|
|
||||||
const enabled = this.get('keyboard_shortcuts.enabled', true);
|
|
||||||
const shortcuts = this.get('keyboard_shortcuts.shortcuts', {{}});
|
|
||||||
|
|
||||||
if (window.keyboardManager) {{
|
|
||||||
window.keyboardManager.setEnabled(enabled);
|
|
||||||
window.keyboardManager.updateShortcuts(shortcuts);
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
applyNotificationSettings() {{
|
|
||||||
const browserNotifications = this.get('notifications.browser_notifications', true);
|
|
||||||
|
|
||||||
// Request notification permission if needed
|
|
||||||
if (browserNotifications && 'Notification' in window && Notification.permission === 'default') {{
|
|
||||||
Notification.requestPermission();
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
setupPreferencesUI() {{
|
|
||||||
this.createSettingsModal();
|
|
||||||
this.bindSettingsEvents();
|
|
||||||
}}
|
|
||||||
|
|
||||||
createSettingsModal() {{
|
|
||||||
const existingModal = document.getElementById('preferences-modal');
|
|
||||||
if (existingModal) return;
|
|
||||||
|
|
||||||
const modal = document.createElement('div');
|
|
||||||
modal.id = 'preferences-modal';
|
|
||||||
modal.className = 'modal fade';
|
|
||||||
modal.innerHTML = `
|
|
||||||
<div class="modal-dialog modal-lg">
|
|
||||||
<div class="modal-content">
|
|
||||||
<div class="modal-header">
|
|
||||||
<h5 class="modal-title">Preferences</h5>
|
|
||||||
<button type="button" class="btn-close" data-bs-dismiss="modal"></button>
|
|
||||||
</div>
|
|
||||||
<div class="modal-body">
|
|
||||||
<ul class="nav nav-tabs mb-3">
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link active" data-bs-toggle="tab" href="#ui-tab">Interface</a>
|
|
||||||
</li>
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link" data-bs-toggle="tab" href="#downloads-tab">Downloads</a>
|
|
||||||
</li>
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link" data-bs-toggle="tab" href="#notifications-tab">Notifications</a>
|
|
||||||
</li>
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link" data-bs-toggle="tab" href="#shortcuts-tab">Shortcuts</a>
|
|
||||||
</li>
|
|
||||||
<li class="nav-item">
|
|
||||||
<a class="nav-link" data-bs-toggle="tab" href="#advanced-tab">Advanced</a>
|
|
||||||
</li>
|
|
||||||
</ul>
|
|
||||||
<div class="tab-content">
|
|
||||||
${{this.createUITab()}}
|
|
||||||
${{this.createDownloadsTab()}}
|
|
||||||
${{this.createNotificationsTab()}}
|
|
||||||
${{this.createShortcutsTab()}}
|
|
||||||
${{this.createAdvancedTab()}}
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="modal-footer">
|
|
||||||
<button type="button" class="btn btn-secondary" data-bs-dismiss="modal">Close</button>
|
|
||||||
<button type="button" class="btn btn-outline-danger" id="reset-preferences">Reset to Defaults</button>
|
|
||||||
<button type="button" class="btn btn-outline-primary" id="export-preferences">Export</button>
|
|
||||||
<button type="button" class="btn btn-outline-primary" id="import-preferences">Import</button>
|
|
||||||
<button type="button" class="btn btn-primary" id="save-preferences">Save</button>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
|
|
||||||
document.body.appendChild(modal);
|
|
||||||
}}
|
|
||||||
|
|
||||||
createUITab() {{
|
|
||||||
return `
|
|
||||||
<div class="tab-pane fade show active" id="ui-tab">
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-6">
|
|
||||||
<div class="mb-3">
|
|
||||||
<label class="form-label">Theme</label>
|
|
||||||
<select class="form-select" id="pref-theme">
|
|
||||||
<option value="auto">Auto (System)</option>
|
|
||||||
<option value="light">Light</option>
|
|
||||||
<option value="dark">Dark</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div class="mb-3">
|
|
||||||
<label class="form-label">UI Density</label>
|
|
||||||
<select class="form-select" id="pref-density">
|
|
||||||
<option value="compact">Compact</option>
|
|
||||||
<option value="comfortable">Comfortable</option>
|
|
||||||
<option value="spacious">Spacious</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div class="mb-3">
|
|
||||||
<label class="form-label">Language</label>
|
|
||||||
<select class="form-select" id="pref-language">
|
|
||||||
<option value="en">English</option>
|
|
||||||
<option value="de">German</option>
|
|
||||||
<option value="ja">Japanese</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="col-md-6">
|
|
||||||
<div class="mb-3">
|
|
||||||
<label class="form-label">Items per page</label>
|
|
||||||
<select class="form-select" id="pref-items-per-page">
|
|
||||||
<option value="10">10</option>
|
|
||||||
<option value="20">20</option>
|
|
||||||
<option value="50">50</option>
|
|
||||||
<option value="100">100</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-animations">
|
|
||||||
<label class="form-check-label" for="pref-animations">
|
|
||||||
Enable animations
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-grid-view">
|
|
||||||
<label class="form-check-label" for="pref-grid-view">
|
|
||||||
Default to grid view
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
}}
|
|
||||||
|
|
||||||
createDownloadsTab() {{
|
|
||||||
return `
|
|
||||||
<div class="tab-pane fade" id="downloads-tab">
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-6">
|
|
||||||
<div class="mb-3">
|
|
||||||
<label class="form-label">Download Quality</label>
|
|
||||||
<select class="form-select" id="pref-download-quality">
|
|
||||||
<option value="best">Best Available</option>
|
|
||||||
<option value="1080p">1080p</option>
|
|
||||||
<option value="720p">720p</option>
|
|
||||||
<option value="480p">480p</option>
|
|
||||||
</select>
|
|
||||||
</div>
|
|
||||||
<div class="mb-3">
|
|
||||||
<label class="form-label">Concurrent Downloads</label>
|
|
||||||
<input type="number" class="form-control" id="pref-concurrent-downloads" min="1" max="10">
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="col-md-6">
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-auto-download">
|
|
||||||
<label class="form-check-label" for="pref-auto-download">
|
|
||||||
Auto-download new episodes
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-retry-failed">
|
|
||||||
<label class="form-check-label" for="pref-retry-failed">
|
|
||||||
Retry failed downloads
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-auto-organize">
|
|
||||||
<label class="form-check-label" for="pref-auto-organize">
|
|
||||||
Auto-organize downloads
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
}}
|
|
||||||
|
|
||||||
createNotificationsTab() {{
|
|
||||||
return `
|
|
||||||
<div class="tab-pane fade" id="notifications-tab">
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-6">
|
|
||||||
<h6>General</h6>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-browser-notifications">
|
|
||||||
<label class="form-check-label" for="pref-browser-notifications">
|
|
||||||
Browser notifications
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-notification-sound">
|
|
||||||
<label class="form-check-label" for="pref-notification-sound">
|
|
||||||
Notification sound
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="col-md-6">
|
|
||||||
<h6>Notification Types</h6>
|
|
||||||
<div class="form-check mb-2">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-notify-download-complete">
|
|
||||||
<label class="form-check-label" for="pref-notify-download-complete">
|
|
||||||
Download complete
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-2">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-notify-download-error">
|
|
||||||
<label class="form-check-label" for="pref-notify-download-error">
|
|
||||||
Download errors
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-2">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-notify-series-updated">
|
|
||||||
<label class="form-check-label" for="pref-notify-series-updated">
|
|
||||||
Series updates
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
}}
|
|
||||||
|
|
||||||
createShortcutsTab() {{
|
|
||||||
return `
|
|
||||||
<div class="tab-pane fade" id="shortcuts-tab">
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-shortcuts-enabled">
|
|
||||||
<label class="form-check-label" for="pref-shortcuts-enabled">
|
|
||||||
Enable keyboard shortcuts
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div id="shortcuts-list">
|
|
||||||
<!-- Shortcuts will be populated dynamically -->
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
}}
|
|
||||||
|
|
||||||
createAdvancedTab() {{
|
|
||||||
return `
|
|
||||||
<div class="tab-pane fade" id="advanced-tab">
|
|
||||||
<div class="row">
|
|
||||||
<div class="col-md-6">
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-debug-mode">
|
|
||||||
<label class="form-check-label" for="pref-debug-mode">
|
|
||||||
Debug mode
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-performance-mode">
|
|
||||||
<label class="form-check-label" for="pref-performance-mode">
|
|
||||||
Performance mode
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
<div class="col-md-6">
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-cache-enabled">
|
|
||||||
<label class="form-check-label" for="pref-cache-enabled">
|
|
||||||
Enable caching
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
<div class="form-check mb-3">
|
|
||||||
<input class="form-check-input" type="checkbox" id="pref-auto-backup">
|
|
||||||
<label class="form-check-label" for="pref-auto-backup">
|
|
||||||
Auto backup settings
|
|
||||||
</label>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
</div>
|
|
||||||
`;
|
|
||||||
}}
|
|
||||||
|
|
||||||
bindSettingsEvents() {{
|
|
||||||
// Theme system preference listener
|
|
||||||
window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', () => {{
|
|
||||||
if (this.get('ui.theme') === 'auto') {{
|
|
||||||
this.applyTheme();
|
|
||||||
}}
|
|
||||||
}});
|
|
||||||
|
|
||||||
// Settings modal events will be bound when modal is shown
|
|
||||||
document.addEventListener('show.bs.modal', (e) => {{
|
|
||||||
if (e.target.id === 'preferences-modal') {{
|
|
||||||
this.populateSettingsForm();
|
|
||||||
}}
|
|
||||||
}});
|
|
||||||
}}
|
|
||||||
|
|
||||||
populateSettingsForm() {{
|
|
||||||
// Populate form fields with current preferences
|
|
||||||
const fields = [
|
|
||||||
{{ id: 'pref-theme', key: 'ui.theme' }},
|
|
||||||
{{ id: 'pref-density', key: 'ui.density' }},
|
|
||||||
{{ id: 'pref-language', key: 'ui.language' }},
|
|
||||||
{{ id: 'pref-items-per-page', key: 'ui.items_per_page' }},
|
|
||||||
{{ id: 'pref-animations', key: 'ui.animations_enabled' }},
|
|
||||||
{{ id: 'pref-grid-view', key: 'ui.grid_view' }},
|
|
||||||
{{ id: 'pref-download-quality', key: 'downloads.download_quality' }},
|
|
||||||
{{ id: 'pref-concurrent-downloads', key: 'downloads.concurrent_downloads' }},
|
|
||||||
{{ id: 'pref-auto-download', key: 'downloads.auto_download' }},
|
|
||||||
{{ id: 'pref-retry-failed', key: 'downloads.retry_failed' }},
|
|
||||||
{{ id: 'pref-auto-organize', key: 'downloads.auto_organize' }},
|
|
||||||
{{ id: 'pref-browser-notifications', key: 'notifications.browser_notifications' }},
|
|
||||||
{{ id: 'pref-notification-sound', key: 'downloads.notification_sound' }},
|
|
||||||
{{ id: 'pref-shortcuts-enabled', key: 'keyboard_shortcuts.enabled' }},
|
|
||||||
{{ id: 'pref-debug-mode', key: 'advanced.debug_mode' }},
|
|
||||||
{{ id: 'pref-performance-mode', key: 'advanced.performance_mode' }},
|
|
||||||
{{ id: 'pref-cache-enabled', key: 'advanced.cache_enabled' }},
|
|
||||||
{{ id: 'pref-auto-backup', key: 'advanced.auto_backup' }}
|
|
||||||
];
|
|
||||||
|
|
||||||
fields.forEach(field => {{
|
|
||||||
const element = document.getElementById(field.id);
|
|
||||||
if (element) {{
|
|
||||||
const value = this.get(field.key);
|
|
||||||
if (element.type === 'checkbox') {{
|
|
||||||
element.checked = value;
|
|
||||||
}} else {{
|
|
||||||
element.value = value;
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
}});
|
|
||||||
}}
|
|
||||||
|
|
||||||
setupAutoSave() {{
|
|
||||||
// Auto-save preferences on change
|
|
||||||
document.addEventListener('change', (e) => {{
|
|
||||||
if (e.target.id && e.target.id.startsWith('pref-')) {{
|
|
||||||
this.saveFormValue(e.target);
|
|
||||||
}}
|
|
||||||
}});
|
|
||||||
}}
|
|
||||||
|
|
||||||
saveFormValue(element) {{
|
|
||||||
const keyMap = {{
|
|
||||||
'pref-theme': 'ui.theme',
|
|
||||||
'pref-density': 'ui.density',
|
|
||||||
'pref-language': 'ui.language',
|
|
||||||
'pref-items-per-page': 'ui.items_per_page',
|
|
||||||
'pref-animations': 'ui.animations_enabled',
|
|
||||||
'pref-grid-view': 'ui.grid_view',
|
|
||||||
'pref-download-quality': 'downloads.download_quality',
|
|
||||||
'pref-concurrent-downloads': 'downloads.concurrent_downloads',
|
|
||||||
'pref-auto-download': 'downloads.auto_download',
|
|
||||||
'pref-retry-failed': 'downloads.retry_failed',
|
|
||||||
'pref-auto-organize': 'downloads.auto_organize',
|
|
||||||
'pref-browser-notifications': 'notifications.browser_notifications',
|
|
||||||
'pref-notification-sound': 'downloads.notification_sound',
|
|
||||||
'pref-shortcuts-enabled': 'keyboard_shortcuts.enabled',
|
|
||||||
'pref-debug-mode': 'advanced.debug_mode',
|
|
||||||
'pref-performance-mode': 'advanced.performance_mode',
|
|
||||||
'pref-cache-enabled': 'advanced.cache_enabled',
|
|
||||||
'pref-auto-backup': 'advanced.auto_backup'
|
|
||||||
}};
|
|
||||||
|
|
||||||
const key = keyMap[element.id];
|
|
||||||
if (key) {{
|
|
||||||
let value = element.type === 'checkbox' ? element.checked : element.value;
|
|
||||||
if (element.type === 'number') {{
|
|
||||||
value = parseInt(value, 10);
|
|
||||||
}}
|
|
||||||
this.set(key, value);
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
showPreferences() {{
|
|
||||||
const modal = document.getElementById('preferences-modal');
|
|
||||||
if (modal) {{
|
|
||||||
const bsModal = new bootstrap.Modal(modal);
|
|
||||||
bsModal.show();
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
onPreferenceChange(key, callback) {{
|
|
||||||
if (!this.changeListeners.has(key)) {{
|
|
||||||
this.changeListeners.set(key, []);
|
|
||||||
}}
|
|
||||||
this.changeListeners.get(key).push(callback);
|
|
||||||
}}
|
|
||||||
|
|
||||||
notifyChangeListeners(key, newValue, oldValue) {{
|
|
||||||
const listeners = this.changeListeners.get(key) || [];
|
|
||||||
listeners.forEach(callback => {{
|
|
||||||
try {{
|
|
||||||
callback(newValue, oldValue, key);
|
|
||||||
}} catch (error) {{
|
|
||||||
console.error('Error in preference change listener:', error);
|
|
||||||
}}
|
|
||||||
}});
|
|
||||||
}}
|
|
||||||
|
|
||||||
reset() {{
|
|
||||||
this.preferences = JSON.parse(JSON.stringify(this.defaultPreferences));
|
|
||||||
this.applyPreferences();
|
|
||||||
this.saveToServer();
|
|
||||||
localStorage.removeItem('aniworld_preferences');
|
|
||||||
}}
|
|
||||||
|
|
||||||
export() {{
|
|
||||||
const data = JSON.stringify(this.preferences, null, 2);
|
|
||||||
const blob = new Blob([data], {{ type: 'application/json' }});
|
|
||||||
const url = URL.createObjectURL(blob);
|
|
||||||
|
|
||||||
const a = document.createElement('a');
|
|
||||||
a.href = url;
|
|
||||||
a.download = 'aniworld_preferences.json';
|
|
||||||
document.body.appendChild(a);
|
|
||||||
a.click();
|
|
||||||
document.body.removeChild(a);
|
|
||||||
URL.revokeObjectURL(url);
|
|
||||||
}}
|
|
||||||
|
|
||||||
import(file) {{
|
|
||||||
return new Promise((resolve, reject) => {{
|
|
||||||
const reader = new FileReader();
|
|
||||||
reader.onload = (e) => {{
|
|
||||||
try {{
|
|
||||||
const imported = JSON.parse(e.target.result);
|
|
||||||
this.preferences = this.mergePreferences(this.defaultPreferences, imported);
|
|
||||||
this.applyPreferences();
|
|
||||||
this.saveToServer();
|
|
||||||
resolve(true);
|
|
||||||
}} catch (error) {{
|
|
||||||
reject(error);
|
|
||||||
}}
|
|
||||||
}};
|
|
||||||
reader.onerror = reject;
|
|
||||||
reader.readAsText(file);
|
|
||||||
}});
|
|
||||||
}}
|
|
||||||
|
|
||||||
mergePreferences(defaults, userPrefs) {{
|
|
||||||
const result = {{ ...defaults }};
|
|
||||||
|
|
||||||
for (const [key, value] of Object.entries(userPrefs)) {{
|
|
||||||
if (key in result && typeof result[key] === 'object' && typeof value === 'object') {{
|
|
||||||
result[key] = this.mergePreferences(result[key], value);
|
|
||||||
}} else {{
|
|
||||||
result[key] = value;
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
return result;
|
|
||||||
}}
|
|
||||||
}}
|
|
||||||
|
|
||||||
// Initialize preferences when DOM is loaded
|
|
||||||
document.addEventListener('DOMContentLoaded', () => {{
|
|
||||||
window.preferencesManager = new UserPreferencesManager();
|
|
||||||
}});
|
|
||||||
"""
|
|
||||||
|
|
||||||
def get_css(self):
|
|
||||||
"""Generate CSS for user preferences."""
|
|
||||||
return """
|
|
||||||
/* User Preferences Styles */
|
|
||||||
.density-compact {
|
|
||||||
--spacing: 0.5rem;
|
|
||||||
--font-size: 0.875rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.density-comfortable {
|
|
||||||
--spacing: 1rem;
|
|
||||||
--font-size: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.density-spacious {
|
|
||||||
--spacing: 1.5rem;
|
|
||||||
--font-size: 1.125rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.no-animations * {
|
|
||||||
animation-duration: 0s !important;
|
|
||||||
transition-duration: 0s !important;
|
|
||||||
}
|
|
||||||
|
|
||||||
.theme-light {
|
|
||||||
--bs-body-bg: #ffffff;
|
|
||||||
--bs-body-color: #212529;
|
|
||||||
--bs-primary: #0d6efd;
|
|
||||||
}
|
|
||||||
|
|
||||||
.theme-dark {
|
|
||||||
--bs-body-bg: #121212;
|
|
||||||
--bs-body-color: #e9ecef;
|
|
||||||
--bs-primary: #0d6efd;
|
|
||||||
}
|
|
||||||
|
|
||||||
#preferences-modal .nav-tabs {
|
|
||||||
border-bottom: 1px solid var(--bs-border-color);
|
|
||||||
}
|
|
||||||
|
|
||||||
#preferences-modal .tab-pane {
|
|
||||||
min-height: 300px;
|
|
||||||
}
|
|
||||||
|
|
||||||
.preference-group {
|
|
||||||
margin-bottom: 2rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
.preference-group h6 {
|
|
||||||
color: var(--bs-secondary);
|
|
||||||
margin-bottom: 1rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
/* Responsive preferences modal */
|
|
||||||
@media (max-width: 768px) {
|
|
||||||
#preferences-modal .modal-dialog {
|
|
||||||
max-width: 95vw;
|
|
||||||
margin: 0.5rem;
|
|
||||||
}
|
|
||||||
|
|
||||||
#preferences-modal .nav-tabs {
|
|
||||||
flex-wrap: wrap;
|
|
||||||
}
|
|
||||||
|
|
||||||
#preferences-modal .nav-link {
|
|
||||||
font-size: 0.875rem;
|
|
||||||
padding: 0.5rem;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
# Create the preferences API blueprint
|
|
||||||
preferences_bp = Blueprint('preferences', __name__, url_prefix='/api')
|
|
||||||
|
|
||||||
# Global preferences manager instance
|
|
||||||
preferences_manager = UserPreferencesManager()
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences', methods=['GET'])
|
|
||||||
def get_preferences():
|
|
||||||
"""Get user preferences."""
|
|
||||||
try:
|
|
||||||
return jsonify(preferences_manager.get_user_session_preferences())
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences', methods=['PUT'])
|
|
||||||
def update_preferences():
|
|
||||||
"""Update user preferences."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
preferences_manager.preferences = preferences_manager.merge_preferences(
|
|
||||||
preferences_manager.default_preferences,
|
|
||||||
data
|
|
||||||
)
|
|
||||||
|
|
||||||
if preferences_manager.save_preferences():
|
|
||||||
return jsonify({'success': True, 'message': 'Preferences updated'})
|
|
||||||
else:
|
|
||||||
return jsonify({'error': 'Failed to save preferences'}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences/<key>', methods=['GET'])
|
|
||||||
def get_preference(key):
|
|
||||||
"""Get a specific preference."""
|
|
||||||
try:
|
|
||||||
value = preferences_manager.get_preference(key)
|
|
||||||
return jsonify({'key': key, 'value': value})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences/<key>', methods=['PUT'])
|
|
||||||
def set_preference(key):
|
|
||||||
"""Set a specific preference."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
value = data.get('value')
|
|
||||||
|
|
||||||
if preferences_manager.set_preference(key, value):
|
|
||||||
return jsonify({'success': True, 'key': key, 'value': value})
|
|
||||||
else:
|
|
||||||
return jsonify({'error': 'Failed to set preference'}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences/reset', methods=['POST'])
|
|
||||||
def reset_preferences():
|
|
||||||
"""Reset preferences to defaults."""
|
|
||||||
try:
|
|
||||||
if preferences_manager.reset_preferences():
|
|
||||||
return jsonify({'success': True, 'message': 'Preferences reset to defaults'})
|
|
||||||
else:
|
|
||||||
return jsonify({'error': 'Failed to reset preferences'}), 500
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences/export', methods=['GET'])
|
|
||||||
def export_preferences():
|
|
||||||
"""Export preferences as JSON file."""
|
|
||||||
try:
|
|
||||||
from flask import Response
|
|
||||||
json_data = preferences_manager.export_preferences()
|
|
||||||
|
|
||||||
return Response(
|
|
||||||
json_data,
|
|
||||||
mimetype='application/json',
|
|
||||||
headers={'Content-Disposition': 'attachment; filename=aniworld_preferences.json'}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@preferences_bp.route('/preferences/import', methods=['POST'])
|
|
||||||
def import_preferences():
|
|
||||||
"""Import preferences from JSON file."""
|
|
||||||
try:
|
|
||||||
if 'file' not in request.files:
|
|
||||||
return jsonify({'error': 'No file provided'}), 400
|
|
||||||
|
|
||||||
file = request.files['file']
|
|
||||||
if file.filename == '':
|
|
||||||
return jsonify({'error': 'No file selected'}), 400
|
|
||||||
|
|
||||||
json_data = file.read().decode('utf-8')
|
|
||||||
|
|
||||||
if preferences_manager.import_preferences(json_data):
|
|
||||||
return jsonify({'success': True, 'message': 'Preferences imported successfully'})
|
|
||||||
else:
|
|
||||||
return jsonify({'error': 'Failed to import preferences'}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
@ -1,565 +0,0 @@
|
|||||||
"""
|
|
||||||
System Health Monitoring for AniWorld App
|
|
||||||
|
|
||||||
This module provides comprehensive system health checks and monitoring
|
|
||||||
for the anime downloading application.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import psutil
|
|
||||||
import logging
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from typing import Dict, List, Optional, Any
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from dataclasses import dataclass
|
|
||||||
from flask import Blueprint, jsonify, request
|
|
||||||
import os
|
|
||||||
import socket
|
|
||||||
import requests
|
|
||||||
from auth import require_auth, optional_auth
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class HealthMetric:
|
|
||||||
"""Represents a health metric measurement."""
|
|
||||||
name: str
|
|
||||||
value: Any
|
|
||||||
unit: str
|
|
||||||
status: str # 'healthy', 'warning', 'critical'
|
|
||||||
threshold_warning: Optional[float] = None
|
|
||||||
threshold_critical: Optional[float] = None
|
|
||||||
timestamp: Optional[datetime] = None
|
|
||||||
|
|
||||||
def __post_init__(self):
|
|
||||||
if self.timestamp is None:
|
|
||||||
self.timestamp = datetime.now()
|
|
||||||
|
|
||||||
|
|
||||||
class SystemHealthMonitor:
|
|
||||||
"""Monitor system health metrics and performance."""
|
|
||||||
|
|
||||||
def __init__(self, check_interval: int = 60):
|
|
||||||
self.check_interval = check_interval
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
self.metrics_history: Dict[str, List[HealthMetric]] = {}
|
|
||||||
self.alerts: List[Dict] = []
|
|
||||||
self.monitoring_enabled = True
|
|
||||||
self.monitor_thread = None
|
|
||||||
self._lock = threading.Lock()
|
|
||||||
|
|
||||||
# Configurable thresholds
|
|
||||||
self.thresholds = {
|
|
||||||
'cpu_percent': {'warning': 80.0, 'critical': 95.0},
|
|
||||||
'memory_percent': {'warning': 85.0, 'critical': 95.0},
|
|
||||||
'disk_percent': {'warning': 90.0, 'critical': 98.0},
|
|
||||||
'disk_free_gb': {'warning': 5.0, 'critical': 1.0},
|
|
||||||
'network_latency_ms': {'warning': 1000, 'critical': 5000},
|
|
||||||
}
|
|
||||||
|
|
||||||
def start_monitoring(self):
|
|
||||||
"""Start continuous health monitoring."""
|
|
||||||
if self.monitor_thread and self.monitor_thread.is_alive():
|
|
||||||
self.logger.warning("Health monitoring already running")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.monitoring_enabled = True
|
|
||||||
self.monitor_thread = threading.Thread(target=self._monitoring_loop, daemon=True)
|
|
||||||
self.monitor_thread.start()
|
|
||||||
self.logger.info("System health monitoring started")
|
|
||||||
|
|
||||||
def stop_monitoring(self):
|
|
||||||
"""Stop health monitoring."""
|
|
||||||
self.monitoring_enabled = False
|
|
||||||
if self.monitor_thread:
|
|
||||||
self.monitor_thread.join(timeout=5)
|
|
||||||
self.logger.info("System health monitoring stopped")
|
|
||||||
|
|
||||||
def _monitoring_loop(self):
|
|
||||||
"""Main monitoring loop."""
|
|
||||||
while self.monitoring_enabled:
|
|
||||||
try:
|
|
||||||
self.collect_all_metrics()
|
|
||||||
time.sleep(self.check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error in monitoring loop: {e}", exc_info=True)
|
|
||||||
time.sleep(self.check_interval)
|
|
||||||
|
|
||||||
def collect_all_metrics(self):
|
|
||||||
"""Collect all health metrics."""
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
# System metrics
|
|
||||||
metrics.extend(self.get_cpu_metrics())
|
|
||||||
metrics.extend(self.get_memory_metrics())
|
|
||||||
metrics.extend(self.get_disk_metrics())
|
|
||||||
metrics.extend(self.get_network_metrics())
|
|
||||||
|
|
||||||
# Application metrics
|
|
||||||
metrics.extend(self.get_process_metrics())
|
|
||||||
|
|
||||||
# Store metrics
|
|
||||||
with self._lock:
|
|
||||||
for metric in metrics:
|
|
||||||
if metric.name not in self.metrics_history:
|
|
||||||
self.metrics_history[metric.name] = []
|
|
||||||
|
|
||||||
self.metrics_history[metric.name].append(metric)
|
|
||||||
|
|
||||||
# Keep only last 24 hours of data
|
|
||||||
cutoff = datetime.now() - timedelta(hours=24)
|
|
||||||
self.metrics_history[metric.name] = [
|
|
||||||
m for m in self.metrics_history[metric.name]
|
|
||||||
if m.timestamp > cutoff
|
|
||||||
]
|
|
||||||
|
|
||||||
# Check for alerts
|
|
||||||
self._check_alert_conditions(metric)
|
|
||||||
|
|
||||||
def get_cpu_metrics(self) -> List[HealthMetric]:
|
|
||||||
"""Get CPU-related metrics."""
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
# CPU usage percentage
|
|
||||||
cpu_percent = psutil.cpu_percent(interval=1)
|
|
||||||
status = self._get_status_for_metric('cpu_percent', cpu_percent)
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='cpu_percent',
|
|
||||||
value=cpu_percent,
|
|
||||||
unit='%',
|
|
||||||
status=status,
|
|
||||||
threshold_warning=self.thresholds['cpu_percent']['warning'],
|
|
||||||
threshold_critical=self.thresholds['cpu_percent']['critical']
|
|
||||||
))
|
|
||||||
|
|
||||||
# CPU count
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='cpu_count',
|
|
||||||
value=psutil.cpu_count(),
|
|
||||||
unit='cores',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
# Load average (Unix-like systems only)
|
|
||||||
try:
|
|
||||||
load_avg = psutil.getloadavg()
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='load_average_1m',
|
|
||||||
value=load_avg[0],
|
|
||||||
unit='',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
except AttributeError:
|
|
||||||
pass # Not available on Windows
|
|
||||||
|
|
||||||
return metrics
|
|
||||||
|
|
||||||
def get_memory_metrics(self) -> List[HealthMetric]:
|
|
||||||
"""Get memory-related metrics."""
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
# Virtual memory
|
|
||||||
memory = psutil.virtual_memory()
|
|
||||||
status = self._get_status_for_metric('memory_percent', memory.percent)
|
|
||||||
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='memory_percent',
|
|
||||||
value=memory.percent,
|
|
||||||
unit='%',
|
|
||||||
status=status,
|
|
||||||
threshold_warning=self.thresholds['memory_percent']['warning'],
|
|
||||||
threshold_critical=self.thresholds['memory_percent']['critical']
|
|
||||||
))
|
|
||||||
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='memory_total_gb',
|
|
||||||
value=round(memory.total / (1024**3), 2),
|
|
||||||
unit='GB',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='memory_available_gb',
|
|
||||||
value=round(memory.available / (1024**3), 2),
|
|
||||||
unit='GB',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
# Swap memory
|
|
||||||
swap = psutil.swap_memory()
|
|
||||||
if swap.total > 0:
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='swap_percent',
|
|
||||||
value=swap.percent,
|
|
||||||
unit='%',
|
|
||||||
status='warning' if swap.percent > 50 else 'healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
return metrics
|
|
||||||
|
|
||||||
def get_disk_metrics(self) -> List[HealthMetric]:
|
|
||||||
"""Get disk-related metrics."""
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
# Check main disk partitions
|
|
||||||
partitions = psutil.disk_partitions()
|
|
||||||
for partition in partitions:
|
|
||||||
if 'cdrom' in partition.opts or partition.fstype == '':
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
usage = psutil.disk_usage(partition.mountpoint)
|
|
||||||
disk_percent = (usage.used / usage.total) * 100
|
|
||||||
free_gb = usage.free / (1024**3)
|
|
||||||
|
|
||||||
# Disk usage percentage
|
|
||||||
status_percent = self._get_status_for_metric('disk_percent', disk_percent)
|
|
||||||
device_name = partition.device.replace(":", "").replace("\\", "")
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name=f'disk_percent_{device_name}',
|
|
||||||
value=round(disk_percent, 1),
|
|
||||||
unit='%',
|
|
||||||
status=status_percent,
|
|
||||||
threshold_warning=self.thresholds['disk_percent']['warning'],
|
|
||||||
threshold_critical=self.thresholds['disk_percent']['critical']
|
|
||||||
))
|
|
||||||
|
|
||||||
# Free space in GB
|
|
||||||
status_free = 'critical' if free_gb < self.thresholds['disk_free_gb']['critical'] \
|
|
||||||
else 'warning' if free_gb < self.thresholds['disk_free_gb']['warning'] \
|
|
||||||
else 'healthy'
|
|
||||||
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name=f'disk_free_gb_{device_name}',
|
|
||||||
value=round(free_gb, 2),
|
|
||||||
unit='GB',
|
|
||||||
status=status_free,
|
|
||||||
threshold_warning=self.thresholds['disk_free_gb']['warning'],
|
|
||||||
threshold_critical=self.thresholds['disk_free_gb']['critical']
|
|
||||||
))
|
|
||||||
|
|
||||||
except PermissionError:
|
|
||||||
continue
|
|
||||||
|
|
||||||
# Disk I/O
|
|
||||||
try:
|
|
||||||
disk_io = psutil.disk_io_counters()
|
|
||||||
if disk_io:
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='disk_read_mb',
|
|
||||||
value=round(disk_io.read_bytes / (1024**2), 2),
|
|
||||||
unit='MB',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='disk_write_mb',
|
|
||||||
value=round(disk_io.write_bytes / (1024**2), 2),
|
|
||||||
unit='MB',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return metrics
|
|
||||||
|
|
||||||
def get_network_metrics(self) -> List[HealthMetric]:
|
|
||||||
"""Get network-related metrics."""
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
# Network I/O
|
|
||||||
try:
|
|
||||||
net_io = psutil.net_io_counters()
|
|
||||||
if net_io:
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='network_sent_mb',
|
|
||||||
value=round(net_io.bytes_sent / (1024**2), 2),
|
|
||||||
unit='MB',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='network_recv_mb',
|
|
||||||
value=round(net_io.bytes_recv / (1024**2), 2),
|
|
||||||
unit='MB',
|
|
||||||
status='healthy'
|
|
||||||
))
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Network connectivity test
|
|
||||||
try:
|
|
||||||
start_time = time.time()
|
|
||||||
socket.create_connection(("8.8.8.8", 53), timeout=5)
|
|
||||||
latency = (time.time() - start_time) * 1000 # Convert to ms
|
|
||||||
|
|
||||||
status = self._get_status_for_metric('network_latency_ms', latency)
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='network_latency_ms',
|
|
||||||
value=round(latency, 2),
|
|
||||||
unit='ms',
|
|
||||||
status=status,
|
|
||||||
threshold_warning=self.thresholds['network_latency_ms']['warning'],
|
|
||||||
threshold_critical=self.thresholds['network_latency_ms']['critical']
|
|
||||||
))
|
|
||||||
except Exception:
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='network_latency_ms',
|
|
||||||
value=-1,
|
|
||||||
unit='ms',
|
|
||||||
status='critical'
|
|
||||||
))
|
|
||||||
|
|
||||||
return metrics
|
|
||||||
|
|
||||||
def get_process_metrics(self) -> List[HealthMetric]:
|
|
||||||
"""Get process-specific metrics."""
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Current process metrics
|
|
||||||
process = psutil.Process()
|
|
||||||
|
|
||||||
# Process CPU usage
|
|
||||||
cpu_percent = process.cpu_percent()
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='process_cpu_percent',
|
|
||||||
value=cpu_percent,
|
|
||||||
unit='%',
|
|
||||||
status='warning' if cpu_percent > 50 else 'healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
# Process memory usage
|
|
||||||
memory_info = process.memory_info()
|
|
||||||
memory_mb = memory_info.rss / (1024**2)
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='process_memory_mb',
|
|
||||||
value=round(memory_mb, 2),
|
|
||||||
unit='MB',
|
|
||||||
status='warning' if memory_mb > 1024 else 'healthy' # Warning if > 1GB
|
|
||||||
))
|
|
||||||
|
|
||||||
# Process threads
|
|
||||||
threads = process.num_threads()
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='process_threads',
|
|
||||||
value=threads,
|
|
||||||
unit='',
|
|
||||||
status='warning' if threads > 50 else 'healthy'
|
|
||||||
))
|
|
||||||
|
|
||||||
# Process connections
|
|
||||||
try:
|
|
||||||
connections = len(process.connections())
|
|
||||||
metrics.append(HealthMetric(
|
|
||||||
name='process_connections',
|
|
||||||
value=connections,
|
|
||||||
unit='',
|
|
||||||
status='warning' if connections > 100 else 'healthy'
|
|
||||||
))
|
|
||||||
except psutil.AccessDenied:
|
|
||||||
pass
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get process metrics: {e}")
|
|
||||||
|
|
||||||
return metrics
|
|
||||||
|
|
||||||
def _get_status_for_metric(self, metric_name: str, value: float) -> str:
|
|
||||||
"""Determine status based on thresholds."""
|
|
||||||
if metric_name in self.thresholds:
|
|
||||||
thresholds = self.thresholds[metric_name]
|
|
||||||
if value >= thresholds['critical']:
|
|
||||||
return 'critical'
|
|
||||||
elif value >= thresholds['warning']:
|
|
||||||
return 'warning'
|
|
||||||
return 'healthy'
|
|
||||||
|
|
||||||
def _check_alert_conditions(self, metric: HealthMetric):
|
|
||||||
"""Check if metric triggers an alert."""
|
|
||||||
if metric.status in ['critical', 'warning']:
|
|
||||||
alert = {
|
|
||||||
'timestamp': metric.timestamp.isoformat(),
|
|
||||||
'metric_name': metric.name,
|
|
||||||
'value': metric.value,
|
|
||||||
'unit': metric.unit,
|
|
||||||
'status': metric.status,
|
|
||||||
'message': f"{metric.name} is {metric.status}: {metric.value}{metric.unit}"
|
|
||||||
}
|
|
||||||
|
|
||||||
with self._lock:
|
|
||||||
self.alerts.append(alert)
|
|
||||||
|
|
||||||
# Keep only last 100 alerts
|
|
||||||
if len(self.alerts) > 100:
|
|
||||||
self.alerts = self.alerts[-100:]
|
|
||||||
|
|
||||||
def get_current_health_status(self) -> Dict[str, Any]:
|
|
||||||
"""Get current system health status."""
|
|
||||||
with self._lock:
|
|
||||||
latest_metrics = {}
|
|
||||||
for name, history in self.metrics_history.items():
|
|
||||||
if history:
|
|
||||||
latest_metrics[name] = {
|
|
||||||
'value': history[-1].value,
|
|
||||||
'unit': history[-1].unit,
|
|
||||||
'status': history[-1].status,
|
|
||||||
'timestamp': history[-1].timestamp.isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Calculate overall health status
|
|
||||||
statuses = [metric['status'] for metric in latest_metrics.values()]
|
|
||||||
if 'critical' in statuses:
|
|
||||||
overall_status = 'critical'
|
|
||||||
elif 'warning' in statuses:
|
|
||||||
overall_status = 'warning'
|
|
||||||
else:
|
|
||||||
overall_status = 'healthy'
|
|
||||||
|
|
||||||
return {
|
|
||||||
'overall_status': overall_status,
|
|
||||||
'metrics': latest_metrics,
|
|
||||||
'recent_alerts': self.alerts[-10:], # Last 10 alerts
|
|
||||||
'timestamp': datetime.now().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_metric_history(self, metric_name: str, hours: int = 24) -> List[Dict]:
|
|
||||||
"""Get history for a specific metric."""
|
|
||||||
with self._lock:
|
|
||||||
if metric_name not in self.metrics_history:
|
|
||||||
return []
|
|
||||||
|
|
||||||
cutoff = datetime.now() - timedelta(hours=hours)
|
|
||||||
history = [
|
|
||||||
{
|
|
||||||
'value': m.value,
|
|
||||||
'status': m.status,
|
|
||||||
'timestamp': m.timestamp.isoformat()
|
|
||||||
}
|
|
||||||
for m in self.metrics_history[metric_name]
|
|
||||||
if m.timestamp > cutoff
|
|
||||||
]
|
|
||||||
|
|
||||||
return history
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for health endpoints
|
|
||||||
health_bp = Blueprint('health', __name__)
|
|
||||||
|
|
||||||
# Global health monitor instance
|
|
||||||
health_monitor = SystemHealthMonitor()
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/status')
|
|
||||||
@optional_auth
|
|
||||||
def get_health_status():
|
|
||||||
"""Get current system health status."""
|
|
||||||
try:
|
|
||||||
status = health_monitor.get_current_health_status()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': status
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/metrics/<metric_name>')
|
|
||||||
@optional_auth
|
|
||||||
def get_metric_history(metric_name):
|
|
||||||
"""Get history for a specific metric."""
|
|
||||||
try:
|
|
||||||
hours = int(request.args.get('hours', 24))
|
|
||||||
history = health_monitor.get_metric_history(metric_name, hours)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'metric_name': metric_name,
|
|
||||||
'history': history
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/alerts')
|
|
||||||
@optional_auth
|
|
||||||
def get_health_alerts():
|
|
||||||
"""Get recent health alerts."""
|
|
||||||
try:
|
|
||||||
with health_monitor._lock:
|
|
||||||
alerts = health_monitor.alerts[-50:] # Last 50 alerts
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'alerts': alerts,
|
|
||||||
'count': len(alerts)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/start', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def start_health_monitoring():
|
|
||||||
"""Start health monitoring."""
|
|
||||||
try:
|
|
||||||
health_monitor.start_monitoring()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Health monitoring started'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/stop', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def stop_health_monitoring():
|
|
||||||
"""Stop health monitoring."""
|
|
||||||
try:
|
|
||||||
health_monitor.stop_monitoring()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Health monitoring stopped'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
def init_health_monitoring():
|
|
||||||
"""Initialize and start health monitoring."""
|
|
||||||
health_monitor.start_monitoring()
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_health_monitoring():
|
|
||||||
"""Clean up health monitoring resources."""
|
|
||||||
health_monitor.stop_monitoring()
|
|
||||||
|
|
||||||
|
|
||||||
# Export main components
|
|
||||||
__all__ = [
|
|
||||||
'SystemHealthMonitor',
|
|
||||||
'HealthMetric',
|
|
||||||
'health_bp',
|
|
||||||
'health_monitor',
|
|
||||||
'init_health_monitoring',
|
|
||||||
'cleanup_health_monitoring'
|
|
||||||
]
|
|
||||||
@ -1,303 +0,0 @@
|
|||||||
from flask import Blueprint, render_template, request, jsonify
|
|
||||||
from web.controllers.auth_controller import optional_auth
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
# Create blueprint for download queue management
|
|
||||||
download_queue_bp = Blueprint('download_queue', __name__)
|
|
||||||
|
|
||||||
# Global download queue state
|
|
||||||
download_queue_state = {
|
|
||||||
'active_downloads': [],
|
|
||||||
'pending_queue': [],
|
|
||||||
'completed_downloads': [],
|
|
||||||
'failed_downloads': [],
|
|
||||||
'queue_lock': threading.Lock(),
|
|
||||||
'statistics': {
|
|
||||||
'total_items': 0,
|
|
||||||
'completed_items': 0,
|
|
||||||
'failed_items': 0,
|
|
||||||
'estimated_time_remaining': None,
|
|
||||||
'current_speed': '0 MB/s',
|
|
||||||
'average_speed': '0 MB/s'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
@download_queue_bp.route('/queue')
|
|
||||||
@optional_auth
|
|
||||||
def queue_page():
|
|
||||||
"""Download queue management page."""
|
|
||||||
return render_template('queue.html')
|
|
||||||
|
|
||||||
@download_queue_bp.route('/api/queue/status')
|
|
||||||
@optional_auth
|
|
||||||
def get_queue_status():
|
|
||||||
"""Get detailed download queue status."""
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
# Calculate ETA
|
|
||||||
eta = None
|
|
||||||
if download_queue_state['active_downloads']:
|
|
||||||
active_download = download_queue_state['active_downloads'][0]
|
|
||||||
if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0:
|
|
||||||
remaining_items = len(download_queue_state['pending_queue'])
|
|
||||||
avg_speed = active_download['progress']['speed_mbps']
|
|
||||||
# Rough estimation: assume 500MB per episode
|
|
||||||
estimated_mb_remaining = remaining_items * 500
|
|
||||||
eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None
|
|
||||||
if eta_seconds:
|
|
||||||
eta = datetime.now() + timedelta(seconds=eta_seconds)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'active_downloads': download_queue_state['active_downloads'],
|
|
||||||
'pending_queue': download_queue_state['pending_queue'],
|
|
||||||
'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10
|
|
||||||
'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10
|
|
||||||
'statistics': {
|
|
||||||
**download_queue_state['statistics'],
|
|
||||||
'eta': eta.isoformat() if eta else None
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
@download_queue_bp.route('/api/queue/clear', methods=['POST'])
|
|
||||||
@optional_auth
|
|
||||||
def clear_queue():
|
|
||||||
"""Clear completed and failed downloads from queue."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all'
|
|
||||||
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
if queue_type == 'completed' or queue_type == 'all':
|
|
||||||
download_queue_state['completed_downloads'].clear()
|
|
||||||
|
|
||||||
if queue_type == 'failed' or queue_type == 'all':
|
|
||||||
download_queue_state['failed_downloads'].clear()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': f'Cleared {queue_type} downloads'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@download_queue_bp.route('/api/queue/retry', methods=['POST'])
|
|
||||||
@optional_auth
|
|
||||||
def retry_failed_download():
|
|
||||||
"""Retry a failed download."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
download_id = data.get('id')
|
|
||||||
|
|
||||||
if not download_id:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Download ID is required'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
# Find failed download
|
|
||||||
failed_download = None
|
|
||||||
for i, download in enumerate(download_queue_state['failed_downloads']):
|
|
||||||
if download['id'] == download_id:
|
|
||||||
failed_download = download_queue_state['failed_downloads'].pop(i)
|
|
||||||
break
|
|
||||||
|
|
||||||
if not failed_download:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Failed download not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
# Reset download status and add back to queue
|
|
||||||
failed_download['status'] = 'queued'
|
|
||||||
failed_download['error'] = None
|
|
||||||
failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1
|
|
||||||
download_queue_state['pending_queue'].append(failed_download)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Download added back to queue'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@download_queue_bp.route('/api/queue/remove', methods=['POST'])
|
|
||||||
@optional_auth
|
|
||||||
def remove_from_queue():
|
|
||||||
"""Remove an item from the pending queue."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
download_id = data.get('id')
|
|
||||||
|
|
||||||
if not download_id:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Download ID is required'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
# Find and remove from pending queue
|
|
||||||
removed = False
|
|
||||||
for i, download in enumerate(download_queue_state['pending_queue']):
|
|
||||||
if download['id'] == download_id:
|
|
||||||
download_queue_state['pending_queue'].pop(i)
|
|
||||||
removed = True
|
|
||||||
break
|
|
||||||
|
|
||||||
if not removed:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Download not found in queue'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Download removed from queue'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@download_queue_bp.route('/api/queue/reorder', methods=['POST'])
|
|
||||||
@optional_auth
|
|
||||||
def reorder_queue():
|
|
||||||
"""Reorder items in the pending queue."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
new_order = data.get('order') # Array of download IDs in new order
|
|
||||||
|
|
||||||
if not new_order or not isinstance(new_order, list):
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Valid order array is required'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
# Create new queue based on the provided order
|
|
||||||
old_queue = download_queue_state['pending_queue'].copy()
|
|
||||||
new_queue = []
|
|
||||||
|
|
||||||
# Add items in the specified order
|
|
||||||
for download_id in new_order:
|
|
||||||
for download in old_queue:
|
|
||||||
if download['id'] == download_id:
|
|
||||||
new_queue.append(download)
|
|
||||||
break
|
|
||||||
|
|
||||||
# Add any remaining items that weren't in the new order
|
|
||||||
for download in old_queue:
|
|
||||||
if download not in new_queue:
|
|
||||||
new_queue.append(download)
|
|
||||||
|
|
||||||
download_queue_state['pending_queue'] = new_queue
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Queue reordered successfully'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
# Helper functions for queue management
|
|
||||||
def add_to_download_queue(serie_name, episode_info, priority='normal'):
|
|
||||||
"""Add a download to the queue."""
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
download_item = {
|
|
||||||
'id': str(uuid.uuid4()),
|
|
||||||
'serie_name': serie_name,
|
|
||||||
'episode': episode_info,
|
|
||||||
'status': 'queued',
|
|
||||||
'priority': priority,
|
|
||||||
'added_at': datetime.now().isoformat(),
|
|
||||||
'started_at': None,
|
|
||||||
'completed_at': None,
|
|
||||||
'error': None,
|
|
||||||
'retry_count': 0,
|
|
||||||
'progress': {
|
|
||||||
'percent': 0,
|
|
||||||
'downloaded_mb': 0,
|
|
||||||
'total_mb': 0,
|
|
||||||
'speed_mbps': 0,
|
|
||||||
'eta_seconds': None
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
# Insert based on priority
|
|
||||||
if priority == 'high':
|
|
||||||
download_queue_state['pending_queue'].insert(0, download_item)
|
|
||||||
else:
|
|
||||||
download_queue_state['pending_queue'].append(download_item)
|
|
||||||
|
|
||||||
download_queue_state['statistics']['total_items'] += 1
|
|
||||||
|
|
||||||
return download_item['id']
|
|
||||||
|
|
||||||
def update_download_progress(download_id, progress_data):
|
|
||||||
"""Update progress for an active download."""
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
for download in download_queue_state['active_downloads']:
|
|
||||||
if download['id'] == download_id:
|
|
||||||
download['progress'].update(progress_data)
|
|
||||||
|
|
||||||
# Update global statistics
|
|
||||||
if 'speed_mbps' in progress_data:
|
|
||||||
download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s"
|
|
||||||
|
|
||||||
break
|
|
||||||
|
|
||||||
def move_download_to_completed(download_id, success=True, error=None):
|
|
||||||
"""Move download from active to completed/failed."""
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
download = None
|
|
||||||
for i, item in enumerate(download_queue_state['active_downloads']):
|
|
||||||
if item['id'] == download_id:
|
|
||||||
download = download_queue_state['active_downloads'].pop(i)
|
|
||||||
break
|
|
||||||
|
|
||||||
if download:
|
|
||||||
download['completed_at'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
if success:
|
|
||||||
download['status'] = 'completed'
|
|
||||||
download['progress']['percent'] = 100
|
|
||||||
download_queue_state['completed_downloads'].append(download)
|
|
||||||
download_queue_state['statistics']['completed_items'] += 1
|
|
||||||
else:
|
|
||||||
download['status'] = 'failed'
|
|
||||||
download['error'] = error
|
|
||||||
download_queue_state['failed_downloads'].append(download)
|
|
||||||
download_queue_state['statistics']['failed_items'] += 1
|
|
||||||
|
|
||||||
def start_next_download():
|
|
||||||
"""Move next queued download to active state."""
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent
|
|
||||||
download = download_queue_state['pending_queue'].pop(0)
|
|
||||||
download['status'] = 'downloading'
|
|
||||||
download['started_at'] = datetime.now().isoformat()
|
|
||||||
download_queue_state['active_downloads'].append(download)
|
|
||||||
return download
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_queue_statistics():
|
|
||||||
"""Get current queue statistics."""
|
|
||||||
with download_queue_state['queue_lock']:
|
|
||||||
return download_queue_state['statistics'].copy()
|
|
||||||
@ -1,252 +0,0 @@
|
|||||||
import threading
|
|
||||||
import time
|
|
||||||
import schedule
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Optional, Callable, Dict, Any
|
|
||||||
import logging
|
|
||||||
from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK,
|
|
||||||
ProcessLockError, is_process_running)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class ScheduledOperations:
|
|
||||||
"""Handle scheduled operations like automatic rescans and downloads."""
|
|
||||||
|
|
||||||
def __init__(self, config_manager, socketio=None):
|
|
||||||
self.config = config_manager
|
|
||||||
self.socketio = socketio
|
|
||||||
self.scheduler_thread = None
|
|
||||||
self.running = False
|
|
||||||
self.rescan_callback: Optional[Callable] = None
|
|
||||||
self.download_callback: Optional[Callable] = None
|
|
||||||
self.last_scheduled_rescan: Optional[datetime] = None
|
|
||||||
|
|
||||||
# Load scheduled rescan settings
|
|
||||||
self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False)
|
|
||||||
self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00')
|
|
||||||
self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False)
|
|
||||||
|
|
||||||
def set_rescan_callback(self, callback: Callable):
|
|
||||||
"""Set callback function for performing rescan operations."""
|
|
||||||
self.rescan_callback = callback
|
|
||||||
|
|
||||||
def set_download_callback(self, callback: Callable):
|
|
||||||
"""Set callback function for performing download operations."""
|
|
||||||
self.download_callback = callback
|
|
||||||
|
|
||||||
def start_scheduler(self):
|
|
||||||
"""Start the background scheduler thread."""
|
|
||||||
if self.running:
|
|
||||||
logger.warning("Scheduler is already running")
|
|
||||||
return
|
|
||||||
|
|
||||||
self.running = True
|
|
||||||
self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
|
|
||||||
self.scheduler_thread.start()
|
|
||||||
logger.info("Scheduled operations started")
|
|
||||||
|
|
||||||
def stop_scheduler(self):
|
|
||||||
"""Stop the background scheduler."""
|
|
||||||
self.running = False
|
|
||||||
schedule.clear()
|
|
||||||
if self.scheduler_thread and self.scheduler_thread.is_alive():
|
|
||||||
self.scheduler_thread.join(timeout=5)
|
|
||||||
logger.info("Scheduled operations stopped")
|
|
||||||
|
|
||||||
def _scheduler_loop(self):
|
|
||||||
"""Main scheduler loop that runs in background thread."""
|
|
||||||
self._setup_scheduled_jobs()
|
|
||||||
|
|
||||||
while self.running:
|
|
||||||
try:
|
|
||||||
schedule.run_pending()
|
|
||||||
time.sleep(60) # Check every minute
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Scheduler error: {e}")
|
|
||||||
time.sleep(60)
|
|
||||||
|
|
||||||
def _setup_scheduled_jobs(self):
|
|
||||||
"""Setup all scheduled jobs based on configuration."""
|
|
||||||
schedule.clear()
|
|
||||||
|
|
||||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
|
||||||
try:
|
|
||||||
schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan)
|
|
||||||
logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error setting up scheduled rescan: {e}")
|
|
||||||
|
|
||||||
def _perform_scheduled_rescan(self):
|
|
||||||
"""Perform the scheduled rescan operation."""
|
|
||||||
try:
|
|
||||||
logger.info("Starting scheduled rescan...")
|
|
||||||
|
|
||||||
# Emit scheduled rescan started event
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('scheduled_rescan_started')
|
|
||||||
|
|
||||||
# Check if rescan is already running
|
|
||||||
if is_process_running(RESCAN_LOCK):
|
|
||||||
logger.warning("Rescan is already running, skipping scheduled rescan")
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('scheduled_rescan_skipped', {
|
|
||||||
'reason': 'Rescan already in progress'
|
|
||||||
})
|
|
||||||
return
|
|
||||||
|
|
||||||
# Perform the rescan using process lock
|
|
||||||
@with_process_lock(RESCAN_LOCK, timeout_minutes=180)
|
|
||||||
def perform_rescan():
|
|
||||||
self.last_scheduled_rescan = datetime.now()
|
|
||||||
|
|
||||||
if self.rescan_callback:
|
|
||||||
result = self.rescan_callback()
|
|
||||||
logger.info("Scheduled rescan completed successfully")
|
|
||||||
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('scheduled_rescan_completed', {
|
|
||||||
'timestamp': self.last_scheduled_rescan.isoformat(),
|
|
||||||
'result': result
|
|
||||||
})
|
|
||||||
|
|
||||||
# Auto-start download if configured
|
|
||||||
if self.auto_download_after_rescan and self.download_callback:
|
|
||||||
logger.info("Starting auto-download after scheduled rescan")
|
|
||||||
threading.Thread(
|
|
||||||
target=self._perform_auto_download,
|
|
||||||
daemon=True
|
|
||||||
).start()
|
|
||||||
else:
|
|
||||||
logger.warning("No rescan callback configured")
|
|
||||||
|
|
||||||
perform_rescan(_locked_by='scheduled_operation')
|
|
||||||
|
|
||||||
except ProcessLockError:
|
|
||||||
logger.warning("Could not acquire rescan lock for scheduled operation")
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('scheduled_rescan_error', {
|
|
||||||
'error': 'Could not acquire rescan lock'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Scheduled rescan failed: {e}")
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('scheduled_rescan_error', {
|
|
||||||
'error': str(e)
|
|
||||||
})
|
|
||||||
|
|
||||||
def _perform_auto_download(self):
|
|
||||||
"""Perform automatic download after scheduled rescan."""
|
|
||||||
try:
|
|
||||||
# Wait a bit after rescan to let UI update
|
|
||||||
time.sleep(10)
|
|
||||||
|
|
||||||
if self.download_callback:
|
|
||||||
# Find series with missing episodes and start download
|
|
||||||
logger.info("Starting auto-download of missing episodes")
|
|
||||||
result = self.download_callback()
|
|
||||||
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('auto_download_started', {
|
|
||||||
'timestamp': datetime.now().isoformat(),
|
|
||||||
'result': result
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
logger.warning("No download callback configured for auto-download")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Auto-download after scheduled rescan failed: {e}")
|
|
||||||
if self.socketio:
|
|
||||||
self.socketio.emit('auto_download_error', {
|
|
||||||
'error': str(e)
|
|
||||||
})
|
|
||||||
|
|
||||||
def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False):
|
|
||||||
"""Update scheduled rescan configuration."""
|
|
||||||
try:
|
|
||||||
# Validate time format
|
|
||||||
if enabled and time_str:
|
|
||||||
datetime.strptime(time_str, '%H:%M')
|
|
||||||
|
|
||||||
# Update configuration
|
|
||||||
self.scheduled_rescan_enabled = enabled
|
|
||||||
self.scheduled_rescan_time = time_str
|
|
||||||
self.auto_download_after_rescan = auto_download
|
|
||||||
|
|
||||||
# Save to config
|
|
||||||
self.config.scheduled_rescan_enabled = enabled
|
|
||||||
self.config.scheduled_rescan_time = time_str
|
|
||||||
self.config.auto_download_after_rescan = auto_download
|
|
||||||
self.config.save_config()
|
|
||||||
|
|
||||||
# Restart scheduler with new settings
|
|
||||||
if self.running:
|
|
||||||
self._setup_scheduled_jobs()
|
|
||||||
|
|
||||||
logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except ValueError as e:
|
|
||||||
logger.error(f"Invalid time format: {time_str}")
|
|
||||||
raise ValueError(f"Invalid time format. Use HH:MM format.")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating scheduled rescan config: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def get_scheduled_rescan_config(self) -> Dict[str, Any]:
|
|
||||||
"""Get current scheduled rescan configuration."""
|
|
||||||
next_run = None
|
|
||||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
|
||||||
try:
|
|
||||||
# Calculate next run time
|
|
||||||
now = datetime.now()
|
|
||||||
today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M')
|
|
||||||
|
|
||||||
if now > today_run:
|
|
||||||
# Next run is tomorrow
|
|
||||||
next_run = today_run + timedelta(days=1)
|
|
||||||
else:
|
|
||||||
# Next run is today
|
|
||||||
next_run = today_run
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error calculating next run time: {e}")
|
|
||||||
|
|
||||||
return {
|
|
||||||
'enabled': self.scheduled_rescan_enabled,
|
|
||||||
'time': self.scheduled_rescan_time,
|
|
||||||
'auto_download_after_rescan': self.auto_download_after_rescan,
|
|
||||||
'next_run': next_run.isoformat() if next_run else None,
|
|
||||||
'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None,
|
|
||||||
'is_running': self.running
|
|
||||||
}
|
|
||||||
|
|
||||||
def trigger_manual_scheduled_rescan(self):
|
|
||||||
"""Manually trigger a scheduled rescan (for testing purposes)."""
|
|
||||||
logger.info("Manually triggering scheduled rescan")
|
|
||||||
threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start()
|
|
||||||
|
|
||||||
def get_next_scheduled_jobs(self) -> list:
|
|
||||||
"""Get list of all scheduled jobs with their next run times."""
|
|
||||||
jobs = []
|
|
||||||
for job in schedule.jobs:
|
|
||||||
jobs.append({
|
|
||||||
'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func),
|
|
||||||
'next_run': job.next_run.isoformat() if job.next_run else None,
|
|
||||||
'interval': str(job.interval),
|
|
||||||
'unit': job.unit
|
|
||||||
})
|
|
||||||
return jobs
|
|
||||||
|
|
||||||
|
|
||||||
# Global scheduler instance
|
|
||||||
scheduled_operations = None
|
|
||||||
|
|
||||||
def init_scheduler(config_manager, socketio=None):
|
|
||||||
"""Initialize the global scheduler."""
|
|
||||||
global scheduled_operations
|
|
||||||
scheduled_operations = ScheduledOperations(config_manager, socketio)
|
|
||||||
return scheduled_operations
|
|
||||||
|
|
||||||
def get_scheduler():
|
|
||||||
"""Get the global scheduler instance."""
|
|
||||||
return scheduled_operations
|
|
||||||
File diff suppressed because it is too large
Load Diff
@ -1,268 +0,0 @@
|
|||||||
"""
|
|
||||||
Setup service for detecting and managing application setup state.
|
|
||||||
|
|
||||||
This service determines if the application is properly configured and set up,
|
|
||||||
following the application flow pattern: setup → auth → main application.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import sqlite3
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class SetupService:
|
|
||||||
"""Service for managing application setup detection and configuration."""
|
|
||||||
|
|
||||||
def __init__(self, config_path: str = "data/config.json", db_path: str = "data/aniworld.db"):
|
|
||||||
"""Initialize the setup service with configuration and database paths."""
|
|
||||||
self.config_path = Path(config_path)
|
|
||||||
self.db_path = Path(db_path)
|
|
||||||
self._config_cache: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
def is_setup_complete(self) -> bool:
|
|
||||||
"""
|
|
||||||
Check if the application setup is complete.
|
|
||||||
|
|
||||||
Setup is considered complete if:
|
|
||||||
1. Configuration file exists and is valid
|
|
||||||
2. Database exists and is accessible
|
|
||||||
3. Master password is configured
|
|
||||||
4. Setup completion flag is set (if present)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if setup is complete, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Check if configuration file exists and is valid
|
|
||||||
if not self._is_config_valid():
|
|
||||||
logger.info("Setup incomplete: Configuration file is missing or invalid")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if database exists and is accessible
|
|
||||||
if not self._is_database_accessible():
|
|
||||||
logger.info("Setup incomplete: Database is not accessible")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check if master password is configured
|
|
||||||
if not self._is_master_password_configured():
|
|
||||||
logger.info("Setup incomplete: Master password is not configured")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check for explicit setup completion flag
|
|
||||||
config = self.get_config()
|
|
||||||
if config and config.get("setup", {}).get("completed") is False:
|
|
||||||
logger.info("Setup incomplete: Setup completion flag is False")
|
|
||||||
return False
|
|
||||||
|
|
||||||
logger.debug("Setup validation complete: All checks passed")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error checking setup completion: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _is_config_valid(self) -> bool:
|
|
||||||
"""Check if the configuration file exists and contains valid JSON."""
|
|
||||||
try:
|
|
||||||
if not self.config_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
config = self.get_config()
|
|
||||||
return config is not None and isinstance(config, dict)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Configuration validation error: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _is_database_accessible(self) -> bool:
|
|
||||||
"""Check if the database exists and is accessible."""
|
|
||||||
try:
|
|
||||||
if not self.db_path.exists():
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Try to connect and perform a simple query
|
|
||||||
with sqlite3.connect(str(self.db_path)) as conn:
|
|
||||||
cursor = conn.cursor()
|
|
||||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Database accessibility check failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _is_master_password_configured(self) -> bool:
|
|
||||||
"""Check if master password is properly configured."""
|
|
||||||
try:
|
|
||||||
config = self.get_config()
|
|
||||||
if not config:
|
|
||||||
return False
|
|
||||||
|
|
||||||
security_config = config.get("security", {})
|
|
||||||
|
|
||||||
# Check if password hash exists
|
|
||||||
password_hash = security_config.get("master_password_hash")
|
|
||||||
salt = security_config.get("salt")
|
|
||||||
|
|
||||||
return bool(password_hash and salt and len(password_hash) > 0 and len(salt) > 0)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Master password configuration check failed: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_config(self, force_reload: bool = False) -> Optional[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
Get the configuration data from the config file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
force_reload: If True, reload config from file even if cached
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Configuration data or None if not accessible
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if self._config_cache is None or force_reload:
|
|
||||||
if not self.config_path.exists():
|
|
||||||
return None
|
|
||||||
|
|
||||||
with open(self.config_path, 'r', encoding='utf-8') as f:
|
|
||||||
self._config_cache = json.load(f)
|
|
||||||
|
|
||||||
return self._config_cache
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error loading configuration: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def mark_setup_complete(self, config_updates: Optional[Dict[str, Any]] = None) -> bool:
|
|
||||||
"""
|
|
||||||
Mark the setup as completed and optionally update configuration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
config_updates: Additional configuration updates to apply
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if successful, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
config = self.get_config() or {}
|
|
||||||
|
|
||||||
# Update configuration with any provided updates
|
|
||||||
if config_updates:
|
|
||||||
config.update(config_updates)
|
|
||||||
|
|
||||||
# Set setup completion flag
|
|
||||||
if "setup" not in config:
|
|
||||||
config["setup"] = {}
|
|
||||||
config["setup"]["completed"] = True
|
|
||||||
config["setup"]["completed_at"] = str(datetime.utcnow())
|
|
||||||
|
|
||||||
# Save updated configuration
|
|
||||||
return self._save_config(config)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error marking setup as complete: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def reset_setup(self) -> bool:
|
|
||||||
"""
|
|
||||||
Reset the setup completion status (for development/testing).
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if successful, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
config = self.get_config()
|
|
||||||
if not config:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Remove or set setup completion flag to false
|
|
||||||
if "setup" in config:
|
|
||||||
config["setup"]["completed"] = False
|
|
||||||
|
|
||||||
return self._save_config(config)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error resetting setup: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _save_config(self, config: Dict[str, Any]) -> bool:
|
|
||||||
"""Save configuration to file."""
|
|
||||||
try:
|
|
||||||
# Ensure directory exists
|
|
||||||
self.config_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
# Save configuration
|
|
||||||
with open(self.config_path, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(config, f, indent=4, ensure_ascii=False)
|
|
||||||
|
|
||||||
# Clear cache to force reload on next access
|
|
||||||
self._config_cache = None
|
|
||||||
|
|
||||||
logger.info(f"Configuration saved to {self.config_path}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error saving configuration: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_setup_requirements(self) -> Dict[str, bool]:
|
|
||||||
"""
|
|
||||||
Get detailed breakdown of setup requirements and their status.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Dictionary with requirement names and their completion status
|
|
||||||
"""
|
|
||||||
config = self.get_config()
|
|
||||||
return {
|
|
||||||
"config_file_exists": self.config_path.exists(),
|
|
||||||
"config_file_valid": self._is_config_valid(),
|
|
||||||
"database_exists": self.db_path.exists(),
|
|
||||||
"database_accessible": self._is_database_accessible(),
|
|
||||||
"master_password_configured": self._is_master_password_configured(),
|
|
||||||
"setup_marked_complete": bool(config and config.get("setup", {}).get("completed", True))
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_missing_requirements(self) -> List[str]:
|
|
||||||
"""
|
|
||||||
Get list of missing setup requirements.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
list: List of missing requirement descriptions
|
|
||||||
"""
|
|
||||||
requirements = self.get_setup_requirements()
|
|
||||||
missing = []
|
|
||||||
|
|
||||||
if not requirements["config_file_exists"]:
|
|
||||||
missing.append("Configuration file is missing")
|
|
||||||
elif not requirements["config_file_valid"]:
|
|
||||||
missing.append("Configuration file is invalid or corrupted")
|
|
||||||
|
|
||||||
if not requirements["database_exists"]:
|
|
||||||
missing.append("Database file is missing")
|
|
||||||
elif not requirements["database_accessible"]:
|
|
||||||
missing.append("Database is not accessible or corrupted")
|
|
||||||
|
|
||||||
if not requirements["master_password_configured"]:
|
|
||||||
missing.append("Master password is not configured")
|
|
||||||
|
|
||||||
if not requirements["setup_marked_complete"]:
|
|
||||||
missing.append("Setup process was not completed")
|
|
||||||
|
|
||||||
return missing
|
|
||||||
|
|
||||||
|
|
||||||
# Convenience functions for easy import
|
|
||||||
def is_setup_complete() -> bool:
|
|
||||||
"""Convenience function to check if setup is complete."""
|
|
||||||
service = SetupService()
|
|
||||||
return service.is_setup_complete()
|
|
||||||
|
|
||||||
|
|
||||||
def get_setup_service() -> SetupService:
|
|
||||||
"""Get a configured setup service instance."""
|
|
||||||
return SetupService()
|
|
||||||
Loading…
x
Reference in New Issue
Block a user