From 539dd80e142f071065c365b1c95d43ed26cd9f6c Mon Sep 17 00:00:00 2001 From: Lukas Date: Sun, 12 Oct 2025 23:45:02 +0200 Subject: [PATCH] removed old stff --- src/server/services/bulk_service.py | 1122 ----------------- src/server/services/config_service.py | 981 --------------- src/server/services/monitoring_service.py | 565 --------- src/server/services/queue_service.py | 303 ----- src/server/services/scheduler_service.py | 252 ---- src/server/services/search_service.py | 1361 --------------------- src/server/services/setup_service.py | 268 ---- 7 files changed, 4852 deletions(-) delete mode 100644 src/server/services/bulk_service.py delete mode 100644 src/server/services/config_service.py delete mode 100644 src/server/services/monitoring_service.py delete mode 100644 src/server/services/queue_service.py delete mode 100644 src/server/services/scheduler_service.py delete mode 100644 src/server/services/search_service.py delete mode 100644 src/server/services/setup_service.py diff --git a/src/server/services/bulk_service.py b/src/server/services/bulk_service.py deleted file mode 100644 index 04f6f62..0000000 --- a/src/server/services/bulk_service.py +++ /dev/null @@ -1,1122 +0,0 @@ -""" -Bulk Operations Manager for Multiple Series Management - -This module provides bulk operation capabilities for managing multiple series -simultaneously, including batch downloads, deletions, updates, and organization. -""" - -from typing import List, Dict, Any, Optional, Set -import asyncio -import json -import os -from datetime import datetime -import threading -from concurrent.futures import ThreadPoolExecutor -import time - -class BulkOperationsManager: - """Manages bulk operations for multiple series.""" - - def __init__(self, app=None): - self.app = app - self.active_operations = {} - self.operation_history = [] - self.max_concurrent_operations = 5 - self.executor = ThreadPoolExecutor(max_workers=self.max_concurrent_operations) - - def init_app(self, app): - """Initialize with Flask app.""" - self.app = app - - def get_bulk_operations_js(self): - """Generate JavaScript code for bulk operations functionality.""" - return """ -// AniWorld Bulk Operations Manager -class BulkOperationsManager { - constructor() { - this.selectedItems = new Set(); - this.operations = new Map(); - this.init(); - } - - init() { - this.setupSelectionControls(); - this.setupBulkActions(); - this.setupOperationProgress(); - this.setupKeyboardShortcuts(); - } - - setupSelectionControls() { - // Add selection checkboxes to series items - const seriesItems = document.querySelectorAll('.series-item, .anime-card'); - seriesItems.forEach(item => { - this.addSelectionCheckbox(item); - }); - - // Add bulk selection controls - this.createBulkSelectionBar(); - - // Setup click handlers - document.addEventListener('click', this.handleItemClick.bind(this)); - document.addEventListener('change', this.handleCheckboxChange.bind(this)); - } - - addSelectionCheckbox(item) { - if (item.querySelector('.bulk-select-checkbox')) return; - - const checkbox = document.createElement('input'); - checkbox.type = 'checkbox'; - checkbox.className = 'bulk-select-checkbox form-check-input position-absolute'; - checkbox.style.top = '10px'; - checkbox.style.left = '10px'; - checkbox.style.zIndex = '10'; - checkbox.dataset.itemId = item.dataset.seriesId || item.dataset.id; - - item.style.position = 'relative'; - item.appendChild(checkbox); - } - - createBulkSelectionBar() { - const existingBar = document.querySelector('.bulk-selection-bar'); - if (existingBar) return; - - const selectionBar = document.createElement('div'); - selectionBar.className = 'bulk-selection-bar bg-primary text-white p-3 mb-3 rounded d-none'; - selectionBar.innerHTML = ` -
-
- 0 items selected -
- - - -
-
-
-
- - - - - -
-
-
- `; - - const mainContent = document.querySelector('.main-content, .container-fluid'); - if (mainContent) { - mainContent.insertBefore(selectionBar, mainContent.firstChild); - } - } - - setupBulkActions() { - // Bulk action button handlers - document.addEventListener('click', (e) => { - if (e.target.id === 'select-all') this.selectAll(); - else if (e.target.id === 'select-none') this.selectNone(); - else if (e.target.id === 'select-visible') this.selectVisible(); - else if (e.target.id === 'bulk-download') this.bulkDownload(); - else if (e.target.id === 'bulk-update') this.bulkUpdate(); - else if (e.target.id === 'bulk-organize') this.bulkOrganize(); - else if (e.target.id === 'bulk-export') this.bulkExport(); - else if (e.target.id === 'bulk-delete') this.bulkDelete(); - }); - } - - setupOperationProgress() { - // Create progress tracking container - const progressContainer = document.createElement('div'); - progressContainer.id = 'bulk-progress-container'; - progressContainer.className = 'position-fixed bottom-0 end-0 p-3'; - progressContainer.style.zIndex = '9998'; - document.body.appendChild(progressContainer); - } - - setupKeyboardShortcuts() { - document.addEventListener('keydown', (e) => { - if (e.ctrlKey || e.metaKey) { - switch(e.key) { - case 'a': - if (this.selectedItems.size > 0) { - e.preventDefault(); - this.selectAll(); - } - break; - case 'd': - if (this.selectedItems.size > 0) { - e.preventDefault(); - this.bulkDownload(); - } - break; - } - } - }); - } - - handleItemClick(e) { - const item = e.target.closest('.series-item, .anime-card'); - if (!item) return; - - // Handle shift+click for range selection - if (e.shiftKey && this.selectedItems.size > 0) { - this.selectRange(item); - } - } - - handleCheckboxChange(e) { - if (!e.target.classList.contains('bulk-select-checkbox')) return; - - const itemId = e.target.dataset.itemId; - const item = e.target.closest('.series-item, .anime-card'); - - if (e.target.checked) { - this.selectedItems.add(itemId); - item.classList.add('selected'); - } else { - this.selectedItems.delete(itemId); - item.classList.remove('selected'); - } - - this.updateSelectionUI(); - } - - selectAll() { - const checkboxes = document.querySelectorAll('.bulk-select-checkbox'); - checkboxes.forEach(checkbox => { - checkbox.checked = true; - this.selectedItems.add(checkbox.dataset.itemId); - checkbox.closest('.series-item, .anime-card').classList.add('selected'); - }); - this.updateSelectionUI(); - } - - selectNone() { - const checkboxes = document.querySelectorAll('.bulk-select-checkbox'); - checkboxes.forEach(checkbox => { - checkbox.checked = false; - checkbox.closest('.series-item, .anime-card').classList.remove('selected'); - }); - this.selectedItems.clear(); - this.updateSelectionUI(); - } - - selectVisible() { - const visibleItems = document.querySelectorAll('.series-item:not(.d-none), .anime-card:not(.d-none)'); - visibleItems.forEach(item => { - const checkbox = item.querySelector('.bulk-select-checkbox'); - if (checkbox) { - checkbox.checked = true; - this.selectedItems.add(checkbox.dataset.itemId); - item.classList.add('selected'); - } - }); - this.updateSelectionUI(); - } - - selectRange(endItem) { - const items = Array.from(document.querySelectorAll('.series-item, .anime-card')); - const selectedItems = Array.from(document.querySelectorAll('.series-item.selected, .anime-card.selected')); - - if (selectedItems.length === 0) return; - - const lastSelected = selectedItems[selectedItems.length - 1]; - const startIndex = items.indexOf(lastSelected); - const endIndex = items.indexOf(endItem); - - const min = Math.min(startIndex, endIndex); - const max = Math.max(startIndex, endIndex); - - for (let i = min; i <= max; i++) { - const item = items[i]; - const checkbox = item.querySelector('.bulk-select-checkbox'); - if (checkbox) { - checkbox.checked = true; - this.selectedItems.add(checkbox.dataset.itemId); - item.classList.add('selected'); - } - } - - this.updateSelectionUI(); - } - - updateSelectionUI() { - const count = this.selectedItems.size; - const selectionBar = document.querySelector('.bulk-selection-bar'); - const countElement = document.querySelector('.selection-count'); - - if (count > 0) { - selectionBar.classList.remove('d-none'); - countElement.textContent = `${count} item${count === 1 ? '' : 's'} selected`; - } else { - selectionBar.classList.add('d-none'); - } - } - - async bulkDownload() { - if (this.selectedItems.size === 0) return; - - const confirmed = await this.confirmOperation( - 'Bulk Download', - `Download ${this.selectedItems.size} selected series?` - ); - - if (!confirmed) return; - - const operationId = this.startOperation('download', Array.from(this.selectedItems)); - - try { - const response = await fetch('/api/bulk/download', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - operation_id: operationId, - series_ids: Array.from(this.selectedItems) - }) - }); - - const result = await response.json(); - - if (result.success) { - this.trackOperation(operationId, result.task_id); - } else { - this.showError('Failed to start bulk download'); - this.completeOperation(operationId, false); - } - } catch (error) { - this.showError('Error starting bulk download: ' + error.message); - this.completeOperation(operationId, false); - } - } - - async bulkUpdate() { - if (this.selectedItems.size === 0) return; - - const confirmed = await this.confirmOperation( - 'Bulk Update', - `Update metadata for ${this.selectedItems.size} selected series?` - ); - - if (!confirmed) return; - - const operationId = this.startOperation('update', Array.from(this.selectedItems)); - - try { - const response = await fetch('/api/bulk/update', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - operation_id: operationId, - series_ids: Array.from(this.selectedItems) - }) - }); - - const result = await response.json(); - - if (result.success) { - this.trackOperation(operationId, result.task_id); - } else { - this.showError('Failed to start bulk update'); - this.completeOperation(operationId, false); - } - } catch (error) { - this.showError('Error starting bulk update: ' + error.message); - this.completeOperation(operationId, false); - } - } - - async bulkOrganize() { - if (this.selectedItems.size === 0) return; - - const options = await this.showOrganizeModal(); - if (!options) return; - - const operationId = this.startOperation('organize', Array.from(this.selectedItems)); - - try { - const response = await fetch('/api/bulk/organize', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - operation_id: operationId, - series_ids: Array.from(this.selectedItems), - options: options - }) - }); - - const result = await response.json(); - - if (result.success) { - this.trackOperation(operationId, result.task_id); - } else { - this.showError('Failed to start bulk organization'); - this.completeOperation(operationId, false); - } - } catch (error) { - this.showError('Error starting bulk organization: ' + error.message); - this.completeOperation(operationId, false); - } - } - - async bulkExport() { - if (this.selectedItems.size === 0) return; - - const format = await this.showExportModal(); - if (!format) return; - - const operationId = this.startOperation('export', Array.from(this.selectedItems)); - - try { - const response = await fetch('/api/bulk/export', { - method: 'POST', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - operation_id: operationId, - series_ids: Array.from(this.selectedItems), - format: format - }) - }); - - if (response.ok) { - const blob = await response.blob(); - this.downloadFile(blob, `series_export_${Date.now()}.${format}`); - this.completeOperation(operationId, true); - } else { - this.showError('Failed to export series data'); - this.completeOperation(operationId, false); - } - } catch (error) { - this.showError('Error exporting series data: ' + error.message); - this.completeOperation(operationId, false); - } - } - - async bulkDelete() { - if (this.selectedItems.size === 0) return; - - const confirmed = await this.confirmOperation( - 'Bulk Delete', - `Permanently delete ${this.selectedItems.size} selected series?\\n\\nThis action cannot be undone!`, - 'danger' - ); - - if (!confirmed) return; - - const operationId = this.startOperation('delete', Array.from(this.selectedItems)); - - try { - const response = await fetch('/api/bulk/delete', { - method: 'DELETE', - headers: { - 'Content-Type': 'application/json' - }, - body: JSON.stringify({ - operation_id: operationId, - series_ids: Array.from(this.selectedItems) - }) - }); - - const result = await response.json(); - - if (result.success) { - this.trackOperation(operationId, result.task_id); - // Remove deleted items from selection - this.selectedItems.clear(); - this.updateSelectionUI(); - } else { - this.showError('Failed to start bulk deletion'); - this.completeOperation(operationId, false); - } - } catch (error) { - this.showError('Error starting bulk deletion: ' + error.message); - this.completeOperation(operationId, false); - } - } - - startOperation(type, itemIds) { - const operationId = 'op_' + Date.now() + '_' + Math.random().toString(36).substr(2, 9); - - this.operations.set(operationId, { - type: type, - itemIds: itemIds, - startTime: Date.now(), - status: 'running' - }); - - this.showOperationProgress(operationId, type, itemIds.length); - - return operationId; - } - - trackOperation(operationId, taskId) { - const operation = this.operations.get(operationId); - if (!operation) return; - - operation.taskId = taskId; - - // Poll for progress updates - const pollInterval = setInterval(async () => { - try { - const response = await fetch(`/api/bulk/status/${taskId}`); - const status = await response.json(); - - this.updateOperationProgress(operationId, status); - - if (status.complete) { - clearInterval(pollInterval); - this.completeOperation(operationId, status.success); - } - } catch (error) { - console.error('Error polling operation status:', error); - clearInterval(pollInterval); - this.completeOperation(operationId, false); - } - }, 1000); - - operation.pollInterval = pollInterval; - } - - showOperationProgress(operationId, type, count) { - const progressContainer = document.getElementById('bulk-progress-container'); - - const progressCard = document.createElement('div'); - progressCard.id = `progress-${operationId}`; - progressCard.className = 'card mb-2'; - progressCard.style.width = '300px'; - progressCard.innerHTML = ` -
-
- ${type.charAt(0).toUpperCase() + type.slice(1)} ${count} items - -
-
-
-
- Starting... -
- `; - - progressContainer.appendChild(progressCard); - } - - updateOperationProgress(operationId, status) { - const progressCard = document.getElementById(`progress-${operationId}`); - if (!progressCard) return; - - const progressBar = progressCard.querySelector('.progress-bar'); - const statusText = progressCard.querySelector('.operation-status'); - - const percentage = Math.round((status.completed / status.total) * 100); - progressBar.style.width = `${percentage}%`; - - statusText.textContent = status.message || `${status.completed}/${status.total} completed`; - - if (status.error) { - progressBar.classList.add('bg-danger'); - statusText.textContent = 'Error: ' + status.error; - } - } - - completeOperation(operationId, success) { - const operation = this.operations.get(operationId); - if (!operation) return; - - // Clear polling if active - if (operation.pollInterval) { - clearInterval(operation.pollInterval); - } - - operation.status = success ? 'completed' : 'failed'; - operation.endTime = Date.now(); - - const progressCard = document.getElementById(`progress-${operationId}`); - if (progressCard) { - const progressBar = progressCard.querySelector('.progress-bar'); - const statusText = progressCard.querySelector('.operation-status'); - - progressBar.classList.remove('progress-bar-animated', 'progress-bar-striped'); - - if (success) { - progressBar.classList.add('bg-success'); - progressBar.style.width = '100%'; - statusText.textContent = 'Completed successfully'; - } else { - progressBar.classList.add('bg-danger'); - statusText.textContent = 'Operation failed'; - } - - // Auto-remove after 5 seconds - setTimeout(() => { - if (progressCard.parentNode) { - progressCard.parentNode.removeChild(progressCard); - } - }, 5000); - } - - this.operations.delete(operationId); - } - - cancelOperation(operationId) { - const operation = this.operations.get(operationId); - if (!operation) return; - - if (operation.taskId) { - fetch(`/api/bulk/cancel/${operation.taskId}`, { - method: 'POST' - }); - } - - this.completeOperation(operationId, false); - } - - confirmOperation(title, message, type = 'primary') { - return new Promise((resolve) => { - const modal = document.createElement('div'); - modal.className = 'modal fade'; - modal.innerHTML = ` - - `; - - document.body.appendChild(modal); - const bsModal = new bootstrap.Modal(modal); - bsModal.show(); - - modal.querySelector('#confirm-operation').addEventListener('click', () => { - resolve(true); - bsModal.hide(); - }); - - modal.addEventListener('hidden.bs.modal', () => { - if (modal.parentNode) { - document.body.removeChild(modal); - } - resolve(false); - }); - }); - } - - showOrganizeModal() { - return new Promise((resolve) => { - const modal = document.createElement('div'); - modal.className = 'modal fade'; - modal.innerHTML = ` - - `; - - document.body.appendChild(modal); - const bsModal = new bootstrap.Modal(modal); - bsModal.show(); - - modal.querySelector('#confirm-organize').addEventListener('click', () => { - const options = { - method: modal.querySelector('#organize-method').value, - targetDir: modal.querySelector('#target-dir').value, - createSymlinks: modal.querySelector('#create-symlinks').checked - }; - resolve(options); - bsModal.hide(); - }); - - modal.addEventListener('hidden.bs.modal', () => { - if (modal.parentNode) { - document.body.removeChild(modal); - } - resolve(null); - }); - }); - } - - showExportModal() { - return new Promise((resolve) => { - const modal = document.createElement('div'); - modal.className = 'modal fade'; - modal.innerHTML = ` - - `; - - document.body.appendChild(modal); - const bsModal = new bootstrap.Modal(modal); - bsModal.show(); - - modal.querySelector('#confirm-export').addEventListener('click', () => { - const format = modal.querySelector('input[name="export-format"]:checked').value; - resolve(format); - bsModal.hide(); - }); - - modal.addEventListener('hidden.bs.modal', () => { - if (modal.parentNode) { - document.body.removeChild(modal); - } - resolve(null); - }); - }); - } - - downloadFile(blob, filename) { - const url = URL.createObjectURL(blob); - const a = document.createElement('a'); - a.href = url; - a.download = filename; - document.body.appendChild(a); - a.click(); - document.body.removeChild(a); - URL.revokeObjectURL(url); - } - - showError(message) { - const toast = document.createElement('div'); - toast.className = 'toast align-items-center text-white bg-danger'; - toast.innerHTML = ` -
-
${message}
- -
- `; - - let toastContainer = document.querySelector('.toast-container'); - if (!toastContainer) { - toastContainer = document.createElement('div'); - toastContainer.className = 'toast-container position-fixed bottom-0 end-0 p-3'; - document.body.appendChild(toastContainer); - } - - toastContainer.appendChild(toast); - const bsToast = new bootstrap.Toast(toast); - bsToast.show(); - - toast.addEventListener('hidden.bs.toast', () => { - if (toast.parentNode) { - toastContainer.removeChild(toast); - } - }); - } -} - -// Initialize bulk operations when DOM is loaded -document.addEventListener('DOMContentLoaded', () => { - window.bulkOpsManager = new BulkOperationsManager(); -}); -""" - - def get_css(self): - """Generate CSS styles for bulk operations.""" - return """ -/* Bulk Operations Styles */ -.bulk-selection-bar { - animation: slideDown 0.3s ease-out; - border-left: 4px solid #0d6efd; -} - -@keyframes slideDown { - from { - opacity: 0; - transform: translateY(-20px); - } - to { - opacity: 1; - transform: translateY(0); - } -} - -.bulk-select-checkbox { - opacity: 0; - transition: opacity 0.2s ease; -} - -.series-item:hover .bulk-select-checkbox, -.anime-card:hover .bulk-select-checkbox, -.bulk-select-checkbox:checked { - opacity: 1; -} - -.series-item.selected, -.anime-card.selected { - background-color: rgba(13, 110, 253, 0.1); - border: 2px solid #0d6efd; - border-radius: 8px; -} - -#bulk-progress-container { - max-height: 400px; - overflow-y: auto; -} - -.progress-sm { - height: 0.5rem; -} - -.operation-status { - font-size: 0.75rem; -} - -.btn-group .btn { - white-space: nowrap; -} - -/* Mobile responsiveness */ -@media (max-width: 768px) { - .bulk-selection-bar .row { - flex-direction: column; - } - - .bulk-selection-bar .col-md-6 { - margin-bottom: 1rem; - text-align: center; - } - - .bulk-selection-bar .text-end { - text-align: center !important; - } - - .btn-group { - flex-wrap: wrap; - justify-content: center; - } - - .btn-group .btn { - margin: 0.25rem; - } -} - -/* Accessibility */ -.bulk-select-checkbox:focus { - box-shadow: 0 0 0 0.2rem rgba(13, 110, 253, 0.25); -} - -@media (prefers-reduced-motion: reduce) { - .bulk-selection-bar { - animation: none; - } -} -""" - - async def bulk_download(self, series_ids: List[str], operation_id: str) -> Dict[str, Any]: - """Execute bulk download operation.""" - try: - results = [] - total = len(series_ids) - completed = 0 - - for series_id in series_ids: - try: - # Update progress - await self.update_operation_progress(operation_id, completed, total, f"Downloading {series_id}") - - # Simulate download (replace with actual download logic) - await asyncio.sleep(1) # Simulated work - - results.append({ - 'series_id': series_id, - 'status': 'success', - 'message': 'Download completed' - }) - - completed += 1 - - except Exception as e: - results.append({ - 'series_id': series_id, - 'status': 'error', - 'message': str(e) - }) - - await self.update_operation_progress(operation_id, total, total, "All downloads completed") - - return { - 'success': True, - 'results': results, - 'completed': completed, - 'total': total - } - - except Exception as e: - return { - 'success': False, - 'error': str(e), - 'completed': completed, - 'total': total - } - - async def bulk_update(self, series_ids: List[str], operation_id: str) -> Dict[str, Any]: - """Execute bulk update operation.""" - try: - results = [] - total = len(series_ids) - completed = 0 - - for series_id in series_ids: - try: - await self.update_operation_progress(operation_id, completed, total, f"Updating {series_id}") - - # Simulate update (replace with actual update logic) - await asyncio.sleep(0.5) - - results.append({ - 'series_id': series_id, - 'status': 'success', - 'message': 'Metadata updated' - }) - - completed += 1 - - except Exception as e: - results.append({ - 'series_id': series_id, - 'status': 'error', - 'message': str(e) - }) - - await self.update_operation_progress(operation_id, total, total, "All updates completed") - - return { - 'success': True, - 'results': results, - 'completed': completed, - 'total': total - } - - except Exception as e: - return { - 'success': False, - 'error': str(e), - 'completed': completed, - 'total': total - } - - async def bulk_organize(self, series_ids: List[str], options: Dict[str, Any], operation_id: str) -> Dict[str, Any]: - """Execute bulk organize operation.""" - try: - results = [] - total = len(series_ids) - completed = 0 - - method = options.get('method', 'genre') - target_dir = options.get('targetDir', '') - create_symlinks = options.get('createSymlinks', False) - - for series_id in series_ids: - try: - await self.update_operation_progress(operation_id, completed, total, f"Organizing {series_id}") - - # Simulate organization (replace with actual logic) - await asyncio.sleep(0.3) - - results.append({ - 'series_id': series_id, - 'status': 'success', - 'message': f'Organized by {method}' - }) - - completed += 1 - - except Exception as e: - results.append({ - 'series_id': series_id, - 'status': 'error', - 'message': str(e) - }) - - await self.update_operation_progress(operation_id, total, total, "Organization completed") - - return { - 'success': True, - 'results': results, - 'completed': completed, - 'total': total - } - - except Exception as e: - return { - 'success': False, - 'error': str(e), - 'completed': completed, - 'total': total - } - - async def bulk_delete(self, series_ids: List[str], operation_id: str) -> Dict[str, Any]: - """Execute bulk delete operation.""" - try: - results = [] - total = len(series_ids) - completed = 0 - - for series_id in series_ids: - try: - await self.update_operation_progress(operation_id, completed, total, f"Deleting {series_id}") - - # Simulate deletion (replace with actual deletion logic) - await asyncio.sleep(0.2) - - results.append({ - 'series_id': series_id, - 'status': 'success', - 'message': 'Series deleted' - }) - - completed += 1 - - except Exception as e: - results.append({ - 'series_id': series_id, - 'status': 'error', - 'message': str(e) - }) - - await self.update_operation_progress(operation_id, total, total, "Deletion completed") - - return { - 'success': True, - 'results': results, - 'completed': completed, - 'total': total - } - - except Exception as e: - return { - 'success': False, - 'error': str(e), - 'completed': completed, - 'total': total - } - - async def export_series_data(self, series_ids: List[str], format: str) -> bytes: - """Export series data in specified format.""" - # This would implement actual data export logic - # For now, return dummy data - - if format == 'json': - data = { - 'series': [{'id': sid, 'name': f'Series {sid}'} for sid in series_ids], - 'exported_at': datetime.now().isoformat() - } - return json.dumps(data, indent=2).encode('utf-8') - - elif format == 'csv': - import csv - import io - - output = io.StringIO() - writer = csv.writer(output) - writer.writerow(['ID', 'Name', 'Status']) - - for sid in series_ids: - writer.writerow([sid, f'Series {sid}', 'Active']) - - return output.getvalue().encode('utf-8') - - elif format == 'xml': - xml_data = '\n\n' - for sid in series_ids: - xml_data += f' \n' - xml_data += '' - - return xml_data.encode('utf-8') - - return b'' - - async def update_operation_progress(self, operation_id: str, completed: int, total: int, message: str): - """Update operation progress (implement with WebSocket or polling).""" - # This would send progress updates to the frontend - # For now, just store the progress - pass - - -# Export the bulk operations manager -bulk_operations_manager = BulkOperationsManager() \ No newline at end of file diff --git a/src/server/services/config_service.py b/src/server/services/config_service.py deleted file mode 100644 index 94a0a72..0000000 --- a/src/server/services/config_service.py +++ /dev/null @@ -1,981 +0,0 @@ -""" -User Preferences and Settings Persistence Manager - -This module provides user preferences management, settings persistence, -and customization options for the AniWorld web interface. -""" - -import json -import os -from typing import Dict, Any, Optional -from datetime import datetime -from flask import Blueprint, request, jsonify, session - -class UserPreferencesManager: - """Manages user preferences and settings persistence.""" - - def __init__(self, app=None): - self.app = app - self.preferences_file = 'data/user_preferences.json' - self.preferences = {} # Initialize preferences attribute - self.default_preferences = { - 'ui': { - 'theme': 'auto', # 'light', 'dark', 'auto' - 'density': 'comfortable', # 'compact', 'comfortable', 'spacious' - 'language': 'en', - 'animations_enabled': True, - 'sidebar_collapsed': False, - 'grid_view': True, - 'items_per_page': 20 - }, - 'downloads': { - 'auto_download': False, - 'download_quality': 'best', - 'concurrent_downloads': 3, - 'retry_failed': True, - 'notification_sound': True, - 'auto_organize': True - }, - 'notifications': { - 'browser_notifications': True, - 'email_notifications': False, - 'webhook_notifications': False, - 'notification_types': { - 'download_complete': True, - 'download_error': True, - 'series_updated': False, - 'system_alerts': True - } - }, - 'keyboard_shortcuts': { - 'enabled': True, - 'shortcuts': { - 'search': 'ctrl+f', - 'download': 'ctrl+d', - 'refresh': 'f5', - 'select_all': 'ctrl+a', - 'help': 'f1', - 'settings': 'ctrl+comma' - } - }, - 'advanced': { - 'debug_mode': False, - 'performance_mode': False, - 'cache_enabled': True, - 'auto_backup': True, - 'log_level': 'info' - } - } - - # Initialize with defaults if no app provided - if app is None: - self.preferences = self.default_preferences.copy() - else: - self.init_app(app) - - def init_app(self, app): - """Initialize with Flask app.""" - self.app = app - self.preferences_file = os.path.join(app.instance_path, 'data/user_preferences.json') - - # Ensure instance path exists - os.makedirs(app.instance_path, exist_ok=True) - - # Load or create preferences file - self.load_preferences() - - def load_preferences(self) -> Dict[str, Any]: - """Load preferences from file.""" - try: - if os.path.exists(self.preferences_file): - with open(self.preferences_file, 'r', encoding='utf-8') as f: - loaded_prefs = json.load(f) - - # Merge with defaults to ensure all keys exist - self.preferences = self.merge_preferences(self.default_preferences, loaded_prefs) - else: - self.preferences = self.default_preferences.copy() - self.save_preferences() - - except Exception as e: - print(f"Error loading preferences: {e}") - self.preferences = self.default_preferences.copy() - - return self.preferences - - def save_preferences(self) -> bool: - """Save preferences to file.""" - try: - with open(self.preferences_file, 'w', encoding='utf-8') as f: - json.dump(self.preferences, f, indent=2, ensure_ascii=False) - return True - except Exception as e: - print(f"Error saving preferences: {e}") - return False - - def merge_preferences(self, defaults: Dict, user_prefs: Dict) -> Dict: - """Recursively merge user preferences with defaults.""" - result = defaults.copy() - - for key, value in user_prefs.items(): - if key in result and isinstance(result[key], dict) and isinstance(value, dict): - result[key] = self.merge_preferences(result[key], value) - else: - result[key] = value - - return result - - def get_preference(self, key: str, default: Any = None) -> Any: - """Get a specific preference using dot notation (e.g., 'ui.theme').""" - keys = key.split('.') - value = self.preferences - - try: - for k in keys: - value = value[k] - return value - except (KeyError, TypeError): - return default - - def set_preference(self, key: str, value: Any) -> bool: - """Set a specific preference using dot notation.""" - keys = key.split('.') - pref_dict = self.preferences - - try: - # Navigate to parent dictionary - for k in keys[:-1]: - if k not in pref_dict: - pref_dict[k] = {} - pref_dict = pref_dict[k] - - # Set the value - pref_dict[keys[-1]] = value - - # Save to file - return self.save_preferences() - - except Exception as e: - print(f"Error setting preference {key}: {e}") - return False - - def reset_preferences(self) -> bool: - """Reset all preferences to defaults.""" - self.preferences = self.default_preferences.copy() - return self.save_preferences() - - def export_preferences(self) -> str: - """Export preferences as JSON string.""" - try: - return json.dumps(self.preferences, indent=2, ensure_ascii=False) - except Exception as e: - print(f"Error exporting preferences: {e}") - return "{}" - - def import_preferences(self, json_data: str) -> bool: - """Import preferences from JSON string.""" - try: - imported_prefs = json.loads(json_data) - self.preferences = self.merge_preferences(self.default_preferences, imported_prefs) - return self.save_preferences() - except Exception as e: - print(f"Error importing preferences: {e}") - return False - - def get_user_session_preferences(self) -> Dict[str, Any]: - """Get preferences for current user session.""" - # For now, return global preferences - # In the future, could be user-specific - return self.preferences.copy() - - def get_preferences_js(self): - """Generate JavaScript code for preferences management.""" - return f""" -// AniWorld User Preferences Manager -class UserPreferencesManager {{ - constructor() {{ - this.preferences = {json.dumps(self.preferences)}; - this.defaultPreferences = {json.dumps(self.default_preferences)}; - this.changeListeners = new Map(); - this.init(); - }} - - init() {{ - this.loadFromServer(); - this.applyPreferences(); - this.setupPreferencesUI(); - this.setupAutoSave(); - }} - - async loadFromServer() {{ - try {{ - const response = await fetch('/api/preferences'); - if (response.ok) {{ - this.preferences = await response.json(); - this.applyPreferences(); - }} - }} catch (error) {{ - console.error('Error loading preferences:', error); - }} - }} - - async saveToServer() {{ - try {{ - const response = await fetch('/api/preferences', {{ - method: 'PUT', - headers: {{ - 'Content-Type': 'application/json' - }}, - body: JSON.stringify(this.preferences) - }}); - - if (!response.ok) {{ - console.error('Error saving preferences to server'); - }} - }} catch (error) {{ - console.error('Error saving preferences:', error); - }} - }} - - get(key, defaultValue = null) {{ - const keys = key.split('.'); - let value = this.preferences; - - try {{ - for (const k of keys) {{ - value = value[k]; - }} - return value !== undefined ? value : defaultValue; - }} catch (error) {{ - return defaultValue; - }} - }} - - set(key, value, save = true) {{ - const keys = key.split('.'); - let obj = this.preferences; - - // Navigate to parent object - for (let i = 0; i < keys.length - 1; i++) {{ - const k = keys[i]; - if (!obj[k] || typeof obj[k] !== 'object') {{ - obj[k] = {{}}; - }} - obj = obj[k]; - }} - - // Set the value - const lastKey = keys[keys.length - 1]; - const oldValue = obj[lastKey]; - obj[lastKey] = value; - - // Apply the change immediately - this.applyPreference(key, value); - - // Notify listeners - this.notifyChangeListeners(key, value, oldValue); - - // Save to server - if (save) {{ - this.saveToServer(); - }} - - // Store in localStorage as backup - localStorage.setItem('aniworld_preferences', JSON.stringify(this.preferences)); - }} - - applyPreferences() {{ - // Apply all preferences - this.applyTheme(); - this.applyUIPreferences(); - this.applyKeyboardShortcuts(); - this.applyNotificationSettings(); - }} - - applyPreference(key, value) {{ - // Apply individual preference change - if (key.startsWith('ui.theme')) {{ - this.applyTheme(); - }} else if (key.startsWith('ui.')) {{ - this.applyUIPreferences(); - }} else if (key.startsWith('keyboard_shortcuts.')) {{ - this.applyKeyboardShortcuts(); - }} else if (key.startsWith('notifications.')) {{ - this.applyNotificationSettings(); - }} - }} - - applyTheme() {{ - const theme = this.get('ui.theme', 'auto'); - const html = document.documentElement; - - html.classList.remove('theme-light', 'theme-dark'); - - if (theme === 'auto') {{ - // Use system preference - const prefersDark = window.matchMedia('(prefers-color-scheme: dark)').matches; - html.classList.add(prefersDark ? 'theme-dark' : 'theme-light'); - }} else {{ - html.classList.add(`theme-${{theme}}`); - }} - - // Update Bootstrap theme - html.setAttribute('data-bs-theme', theme === 'dark' || (theme === 'auto' && window.matchMedia('(prefers-color-scheme: dark)').matches) ? 'dark' : 'light'); - }} - - applyUIPreferences() {{ - const density = this.get('ui.density', 'comfortable'); - const animations = this.get('ui.animations_enabled', true); - const gridView = this.get('ui.grid_view', true); - - // Apply UI density - document.body.className = document.body.className.replace(/density-\\w+/g, ''); - document.body.classList.add(`density-${{density}}`); - - // Apply animations - if (!animations) {{ - document.body.classList.add('no-animations'); - }} else {{ - document.body.classList.remove('no-animations'); - }} - - // Apply view mode - const viewToggle = document.querySelector('.view-toggle'); - if (viewToggle) {{ - viewToggle.classList.toggle('grid-view', gridView); - viewToggle.classList.toggle('list-view', !gridView); - }} - }} - - applyKeyboardShortcuts() {{ - const enabled = this.get('keyboard_shortcuts.enabled', true); - const shortcuts = this.get('keyboard_shortcuts.shortcuts', {{}}); - - if (window.keyboardManager) {{ - window.keyboardManager.setEnabled(enabled); - window.keyboardManager.updateShortcuts(shortcuts); - }} - }} - - applyNotificationSettings() {{ - const browserNotifications = this.get('notifications.browser_notifications', true); - - // Request notification permission if needed - if (browserNotifications && 'Notification' in window && Notification.permission === 'default') {{ - Notification.requestPermission(); - }} - }} - - setupPreferencesUI() {{ - this.createSettingsModal(); - this.bindSettingsEvents(); - }} - - createSettingsModal() {{ - const existingModal = document.getElementById('preferences-modal'); - if (existingModal) return; - - const modal = document.createElement('div'); - modal.id = 'preferences-modal'; - modal.className = 'modal fade'; - modal.innerHTML = ` - - `; - - document.body.appendChild(modal); - }} - - createUITab() {{ - return ` -
-
-
-
- - -
-
- - -
-
- - -
-
-
-
- - -
-
- - -
-
- - -
-
-
-
- `; - }} - - createDownloadsTab() {{ - return ` -
-
-
-
- - -
-
- - -
-
-
-
- - -
-
- - -
-
- - -
-
-
-
- `; - }} - - createNotificationsTab() {{ - return ` -
-
-
-
General
-
- - -
-
- - -
-
-
-
Notification Types
-
- - -
-
- - -
-
- - -
-
-
-
- `; - }} - - createShortcutsTab() {{ - return ` -
-
- - -
-
- -
-
- `; - }} - - createAdvancedTab() {{ - return ` -
-
-
-
- - -
-
- - -
-
-
-
- - -
-
- - -
-
-
-
- `; - }} - - bindSettingsEvents() {{ - // Theme system preference listener - window.matchMedia('(prefers-color-scheme: dark)').addEventListener('change', () => {{ - if (this.get('ui.theme') === 'auto') {{ - this.applyTheme(); - }} - }}); - - // Settings modal events will be bound when modal is shown - document.addEventListener('show.bs.modal', (e) => {{ - if (e.target.id === 'preferences-modal') {{ - this.populateSettingsForm(); - }} - }}); - }} - - populateSettingsForm() {{ - // Populate form fields with current preferences - const fields = [ - {{ id: 'pref-theme', key: 'ui.theme' }}, - {{ id: 'pref-density', key: 'ui.density' }}, - {{ id: 'pref-language', key: 'ui.language' }}, - {{ id: 'pref-items-per-page', key: 'ui.items_per_page' }}, - {{ id: 'pref-animations', key: 'ui.animations_enabled' }}, - {{ id: 'pref-grid-view', key: 'ui.grid_view' }}, - {{ id: 'pref-download-quality', key: 'downloads.download_quality' }}, - {{ id: 'pref-concurrent-downloads', key: 'downloads.concurrent_downloads' }}, - {{ id: 'pref-auto-download', key: 'downloads.auto_download' }}, - {{ id: 'pref-retry-failed', key: 'downloads.retry_failed' }}, - {{ id: 'pref-auto-organize', key: 'downloads.auto_organize' }}, - {{ id: 'pref-browser-notifications', key: 'notifications.browser_notifications' }}, - {{ id: 'pref-notification-sound', key: 'downloads.notification_sound' }}, - {{ id: 'pref-shortcuts-enabled', key: 'keyboard_shortcuts.enabled' }}, - {{ id: 'pref-debug-mode', key: 'advanced.debug_mode' }}, - {{ id: 'pref-performance-mode', key: 'advanced.performance_mode' }}, - {{ id: 'pref-cache-enabled', key: 'advanced.cache_enabled' }}, - {{ id: 'pref-auto-backup', key: 'advanced.auto_backup' }} - ]; - - fields.forEach(field => {{ - const element = document.getElementById(field.id); - if (element) {{ - const value = this.get(field.key); - if (element.type === 'checkbox') {{ - element.checked = value; - }} else {{ - element.value = value; - }} - }} - }}); - }} - - setupAutoSave() {{ - // Auto-save preferences on change - document.addEventListener('change', (e) => {{ - if (e.target.id && e.target.id.startsWith('pref-')) {{ - this.saveFormValue(e.target); - }} - }}); - }} - - saveFormValue(element) {{ - const keyMap = {{ - 'pref-theme': 'ui.theme', - 'pref-density': 'ui.density', - 'pref-language': 'ui.language', - 'pref-items-per-page': 'ui.items_per_page', - 'pref-animations': 'ui.animations_enabled', - 'pref-grid-view': 'ui.grid_view', - 'pref-download-quality': 'downloads.download_quality', - 'pref-concurrent-downloads': 'downloads.concurrent_downloads', - 'pref-auto-download': 'downloads.auto_download', - 'pref-retry-failed': 'downloads.retry_failed', - 'pref-auto-organize': 'downloads.auto_organize', - 'pref-browser-notifications': 'notifications.browser_notifications', - 'pref-notification-sound': 'downloads.notification_sound', - 'pref-shortcuts-enabled': 'keyboard_shortcuts.enabled', - 'pref-debug-mode': 'advanced.debug_mode', - 'pref-performance-mode': 'advanced.performance_mode', - 'pref-cache-enabled': 'advanced.cache_enabled', - 'pref-auto-backup': 'advanced.auto_backup' - }}; - - const key = keyMap[element.id]; - if (key) {{ - let value = element.type === 'checkbox' ? element.checked : element.value; - if (element.type === 'number') {{ - value = parseInt(value, 10); - }} - this.set(key, value); - }} - }} - - showPreferences() {{ - const modal = document.getElementById('preferences-modal'); - if (modal) {{ - const bsModal = new bootstrap.Modal(modal); - bsModal.show(); - }} - }} - - onPreferenceChange(key, callback) {{ - if (!this.changeListeners.has(key)) {{ - this.changeListeners.set(key, []); - }} - this.changeListeners.get(key).push(callback); - }} - - notifyChangeListeners(key, newValue, oldValue) {{ - const listeners = this.changeListeners.get(key) || []; - listeners.forEach(callback => {{ - try {{ - callback(newValue, oldValue, key); - }} catch (error) {{ - console.error('Error in preference change listener:', error); - }} - }}); - }} - - reset() {{ - this.preferences = JSON.parse(JSON.stringify(this.defaultPreferences)); - this.applyPreferences(); - this.saveToServer(); - localStorage.removeItem('aniworld_preferences'); - }} - - export() {{ - const data = JSON.stringify(this.preferences, null, 2); - const blob = new Blob([data], {{ type: 'application/json' }}); - const url = URL.createObjectURL(blob); - - const a = document.createElement('a'); - a.href = url; - a.download = 'aniworld_preferences.json'; - document.body.appendChild(a); - a.click(); - document.body.removeChild(a); - URL.revokeObjectURL(url); - }} - - import(file) {{ - return new Promise((resolve, reject) => {{ - const reader = new FileReader(); - reader.onload = (e) => {{ - try {{ - const imported = JSON.parse(e.target.result); - this.preferences = this.mergePreferences(this.defaultPreferences, imported); - this.applyPreferences(); - this.saveToServer(); - resolve(true); - }} catch (error) {{ - reject(error); - }} - }}; - reader.onerror = reject; - reader.readAsText(file); - }}); - }} - - mergePreferences(defaults, userPrefs) {{ - const result = {{ ...defaults }}; - - for (const [key, value] of Object.entries(userPrefs)) {{ - if (key in result && typeof result[key] === 'object' && typeof value === 'object') {{ - result[key] = this.mergePreferences(result[key], value); - }} else {{ - result[key] = value; - }} - }} - - return result; - }} -}} - -// Initialize preferences when DOM is loaded -document.addEventListener('DOMContentLoaded', () => {{ - window.preferencesManager = new UserPreferencesManager(); -}}); -""" - - def get_css(self): - """Generate CSS for user preferences.""" - return """ -/* User Preferences Styles */ -.density-compact { - --spacing: 0.5rem; - --font-size: 0.875rem; -} - -.density-comfortable { - --spacing: 1rem; - --font-size: 1rem; -} - -.density-spacious { - --spacing: 1.5rem; - --font-size: 1.125rem; -} - -.no-animations * { - animation-duration: 0s !important; - transition-duration: 0s !important; -} - -.theme-light { - --bs-body-bg: #ffffff; - --bs-body-color: #212529; - --bs-primary: #0d6efd; -} - -.theme-dark { - --bs-body-bg: #121212; - --bs-body-color: #e9ecef; - --bs-primary: #0d6efd; -} - -#preferences-modal .nav-tabs { - border-bottom: 1px solid var(--bs-border-color); -} - -#preferences-modal .tab-pane { - min-height: 300px; -} - -.preference-group { - margin-bottom: 2rem; -} - -.preference-group h6 { - color: var(--bs-secondary); - margin-bottom: 1rem; -} - -/* Responsive preferences modal */ -@media (max-width: 768px) { - #preferences-modal .modal-dialog { - max-width: 95vw; - margin: 0.5rem; - } - - #preferences-modal .nav-tabs { - flex-wrap: wrap; - } - - #preferences-modal .nav-link { - font-size: 0.875rem; - padding: 0.5rem; - } -} -""" - - -# Create the preferences API blueprint -preferences_bp = Blueprint('preferences', __name__, url_prefix='/api') - -# Global preferences manager instance -preferences_manager = UserPreferencesManager() - -@preferences_bp.route('/preferences', methods=['GET']) -def get_preferences(): - """Get user preferences.""" - try: - return jsonify(preferences_manager.get_user_session_preferences()) - except Exception as e: - return jsonify({'error': str(e)}), 500 - -@preferences_bp.route('/preferences', methods=['PUT']) -def update_preferences(): - """Update user preferences.""" - try: - data = request.get_json() - preferences_manager.preferences = preferences_manager.merge_preferences( - preferences_manager.default_preferences, - data - ) - - if preferences_manager.save_preferences(): - return jsonify({'success': True, 'message': 'Preferences updated'}) - else: - return jsonify({'error': 'Failed to save preferences'}), 500 - - except Exception as e: - return jsonify({'error': str(e)}), 500 - -@preferences_bp.route('/preferences/', methods=['GET']) -def get_preference(key): - """Get a specific preference.""" - try: - value = preferences_manager.get_preference(key) - return jsonify({'key': key, 'value': value}) - except Exception as e: - return jsonify({'error': str(e)}), 500 - -@preferences_bp.route('/preferences/', methods=['PUT']) -def set_preference(key): - """Set a specific preference.""" - try: - data = request.get_json() - value = data.get('value') - - if preferences_manager.set_preference(key, value): - return jsonify({'success': True, 'key': key, 'value': value}) - else: - return jsonify({'error': 'Failed to set preference'}), 500 - - except Exception as e: - return jsonify({'error': str(e)}), 500 - -@preferences_bp.route('/preferences/reset', methods=['POST']) -def reset_preferences(): - """Reset preferences to defaults.""" - try: - if preferences_manager.reset_preferences(): - return jsonify({'success': True, 'message': 'Preferences reset to defaults'}) - else: - return jsonify({'error': 'Failed to reset preferences'}), 500 - except Exception as e: - return jsonify({'error': str(e)}), 500 - -@preferences_bp.route('/preferences/export', methods=['GET']) -def export_preferences(): - """Export preferences as JSON file.""" - try: - from flask import Response - json_data = preferences_manager.export_preferences() - - return Response( - json_data, - mimetype='application/json', - headers={'Content-Disposition': 'attachment; filename=aniworld_preferences.json'} - ) - except Exception as e: - return jsonify({'error': str(e)}), 500 - -@preferences_bp.route('/preferences/import', methods=['POST']) -def import_preferences(): - """Import preferences from JSON file.""" - try: - if 'file' not in request.files: - return jsonify({'error': 'No file provided'}), 400 - - file = request.files['file'] - if file.filename == '': - return jsonify({'error': 'No file selected'}), 400 - - json_data = file.read().decode('utf-8') - - if preferences_manager.import_preferences(json_data): - return jsonify({'success': True, 'message': 'Preferences imported successfully'}) - else: - return jsonify({'error': 'Failed to import preferences'}), 500 - - except Exception as e: - return jsonify({'error': str(e)}), 500 \ No newline at end of file diff --git a/src/server/services/monitoring_service.py b/src/server/services/monitoring_service.py deleted file mode 100644 index 2e145c8..0000000 --- a/src/server/services/monitoring_service.py +++ /dev/null @@ -1,565 +0,0 @@ -""" -System Health Monitoring for AniWorld App - -This module provides comprehensive system health checks and monitoring -for the anime downloading application. -""" - -import psutil -import logging -import threading -import time -from typing import Dict, List, Optional, Any -from datetime import datetime, timedelta -from dataclasses import dataclass -from flask import Blueprint, jsonify, request -import os -import socket -import requests -from auth import require_auth, optional_auth - - -@dataclass -class HealthMetric: - """Represents a health metric measurement.""" - name: str - value: Any - unit: str - status: str # 'healthy', 'warning', 'critical' - threshold_warning: Optional[float] = None - threshold_critical: Optional[float] = None - timestamp: Optional[datetime] = None - - def __post_init__(self): - if self.timestamp is None: - self.timestamp = datetime.now() - - -class SystemHealthMonitor: - """Monitor system health metrics and performance.""" - - def __init__(self, check_interval: int = 60): - self.check_interval = check_interval - self.logger = logging.getLogger(__name__) - self.metrics_history: Dict[str, List[HealthMetric]] = {} - self.alerts: List[Dict] = [] - self.monitoring_enabled = True - self.monitor_thread = None - self._lock = threading.Lock() - - # Configurable thresholds - self.thresholds = { - 'cpu_percent': {'warning': 80.0, 'critical': 95.0}, - 'memory_percent': {'warning': 85.0, 'critical': 95.0}, - 'disk_percent': {'warning': 90.0, 'critical': 98.0}, - 'disk_free_gb': {'warning': 5.0, 'critical': 1.0}, - 'network_latency_ms': {'warning': 1000, 'critical': 5000}, - } - - def start_monitoring(self): - """Start continuous health monitoring.""" - if self.monitor_thread and self.monitor_thread.is_alive(): - self.logger.warning("Health monitoring already running") - return - - self.monitoring_enabled = True - self.monitor_thread = threading.Thread(target=self._monitoring_loop, daemon=True) - self.monitor_thread.start() - self.logger.info("System health monitoring started") - - def stop_monitoring(self): - """Stop health monitoring.""" - self.monitoring_enabled = False - if self.monitor_thread: - self.monitor_thread.join(timeout=5) - self.logger.info("System health monitoring stopped") - - def _monitoring_loop(self): - """Main monitoring loop.""" - while self.monitoring_enabled: - try: - self.collect_all_metrics() - time.sleep(self.check_interval) - except Exception as e: - self.logger.error(f"Error in monitoring loop: {e}", exc_info=True) - time.sleep(self.check_interval) - - def collect_all_metrics(self): - """Collect all health metrics.""" - metrics = [] - - # System metrics - metrics.extend(self.get_cpu_metrics()) - metrics.extend(self.get_memory_metrics()) - metrics.extend(self.get_disk_metrics()) - metrics.extend(self.get_network_metrics()) - - # Application metrics - metrics.extend(self.get_process_metrics()) - - # Store metrics - with self._lock: - for metric in metrics: - if metric.name not in self.metrics_history: - self.metrics_history[metric.name] = [] - - self.metrics_history[metric.name].append(metric) - - # Keep only last 24 hours of data - cutoff = datetime.now() - timedelta(hours=24) - self.metrics_history[metric.name] = [ - m for m in self.metrics_history[metric.name] - if m.timestamp > cutoff - ] - - # Check for alerts - self._check_alert_conditions(metric) - - def get_cpu_metrics(self) -> List[HealthMetric]: - """Get CPU-related metrics.""" - metrics = [] - - # CPU usage percentage - cpu_percent = psutil.cpu_percent(interval=1) - status = self._get_status_for_metric('cpu_percent', cpu_percent) - metrics.append(HealthMetric( - name='cpu_percent', - value=cpu_percent, - unit='%', - status=status, - threshold_warning=self.thresholds['cpu_percent']['warning'], - threshold_critical=self.thresholds['cpu_percent']['critical'] - )) - - # CPU count - metrics.append(HealthMetric( - name='cpu_count', - value=psutil.cpu_count(), - unit='cores', - status='healthy' - )) - - # Load average (Unix-like systems only) - try: - load_avg = psutil.getloadavg() - metrics.append(HealthMetric( - name='load_average_1m', - value=load_avg[0], - unit='', - status='healthy' - )) - except AttributeError: - pass # Not available on Windows - - return metrics - - def get_memory_metrics(self) -> List[HealthMetric]: - """Get memory-related metrics.""" - metrics = [] - - # Virtual memory - memory = psutil.virtual_memory() - status = self._get_status_for_metric('memory_percent', memory.percent) - - metrics.append(HealthMetric( - name='memory_percent', - value=memory.percent, - unit='%', - status=status, - threshold_warning=self.thresholds['memory_percent']['warning'], - threshold_critical=self.thresholds['memory_percent']['critical'] - )) - - metrics.append(HealthMetric( - name='memory_total_gb', - value=round(memory.total / (1024**3), 2), - unit='GB', - status='healthy' - )) - - metrics.append(HealthMetric( - name='memory_available_gb', - value=round(memory.available / (1024**3), 2), - unit='GB', - status='healthy' - )) - - # Swap memory - swap = psutil.swap_memory() - if swap.total > 0: - metrics.append(HealthMetric( - name='swap_percent', - value=swap.percent, - unit='%', - status='warning' if swap.percent > 50 else 'healthy' - )) - - return metrics - - def get_disk_metrics(self) -> List[HealthMetric]: - """Get disk-related metrics.""" - metrics = [] - - # Check main disk partitions - partitions = psutil.disk_partitions() - for partition in partitions: - if 'cdrom' in partition.opts or partition.fstype == '': - continue - - try: - usage = psutil.disk_usage(partition.mountpoint) - disk_percent = (usage.used / usage.total) * 100 - free_gb = usage.free / (1024**3) - - # Disk usage percentage - status_percent = self._get_status_for_metric('disk_percent', disk_percent) - device_name = partition.device.replace(":", "").replace("\\", "") - metrics.append(HealthMetric( - name=f'disk_percent_{device_name}', - value=round(disk_percent, 1), - unit='%', - status=status_percent, - threshold_warning=self.thresholds['disk_percent']['warning'], - threshold_critical=self.thresholds['disk_percent']['critical'] - )) - - # Free space in GB - status_free = 'critical' if free_gb < self.thresholds['disk_free_gb']['critical'] \ - else 'warning' if free_gb < self.thresholds['disk_free_gb']['warning'] \ - else 'healthy' - - metrics.append(HealthMetric( - name=f'disk_free_gb_{device_name}', - value=round(free_gb, 2), - unit='GB', - status=status_free, - threshold_warning=self.thresholds['disk_free_gb']['warning'], - threshold_critical=self.thresholds['disk_free_gb']['critical'] - )) - - except PermissionError: - continue - - # Disk I/O - try: - disk_io = psutil.disk_io_counters() - if disk_io: - metrics.append(HealthMetric( - name='disk_read_mb', - value=round(disk_io.read_bytes / (1024**2), 2), - unit='MB', - status='healthy' - )) - - metrics.append(HealthMetric( - name='disk_write_mb', - value=round(disk_io.write_bytes / (1024**2), 2), - unit='MB', - status='healthy' - )) - except Exception: - pass - - return metrics - - def get_network_metrics(self) -> List[HealthMetric]: - """Get network-related metrics.""" - metrics = [] - - # Network I/O - try: - net_io = psutil.net_io_counters() - if net_io: - metrics.append(HealthMetric( - name='network_sent_mb', - value=round(net_io.bytes_sent / (1024**2), 2), - unit='MB', - status='healthy' - )) - - metrics.append(HealthMetric( - name='network_recv_mb', - value=round(net_io.bytes_recv / (1024**2), 2), - unit='MB', - status='healthy' - )) - except Exception: - pass - - # Network connectivity test - try: - start_time = time.time() - socket.create_connection(("8.8.8.8", 53), timeout=5) - latency = (time.time() - start_time) * 1000 # Convert to ms - - status = self._get_status_for_metric('network_latency_ms', latency) - metrics.append(HealthMetric( - name='network_latency_ms', - value=round(latency, 2), - unit='ms', - status=status, - threshold_warning=self.thresholds['network_latency_ms']['warning'], - threshold_critical=self.thresholds['network_latency_ms']['critical'] - )) - except Exception: - metrics.append(HealthMetric( - name='network_latency_ms', - value=-1, - unit='ms', - status='critical' - )) - - return metrics - - def get_process_metrics(self) -> List[HealthMetric]: - """Get process-specific metrics.""" - metrics = [] - - try: - # Current process metrics - process = psutil.Process() - - # Process CPU usage - cpu_percent = process.cpu_percent() - metrics.append(HealthMetric( - name='process_cpu_percent', - value=cpu_percent, - unit='%', - status='warning' if cpu_percent > 50 else 'healthy' - )) - - # Process memory usage - memory_info = process.memory_info() - memory_mb = memory_info.rss / (1024**2) - metrics.append(HealthMetric( - name='process_memory_mb', - value=round(memory_mb, 2), - unit='MB', - status='warning' if memory_mb > 1024 else 'healthy' # Warning if > 1GB - )) - - # Process threads - threads = process.num_threads() - metrics.append(HealthMetric( - name='process_threads', - value=threads, - unit='', - status='warning' if threads > 50 else 'healthy' - )) - - # Process connections - try: - connections = len(process.connections()) - metrics.append(HealthMetric( - name='process_connections', - value=connections, - unit='', - status='warning' if connections > 100 else 'healthy' - )) - except psutil.AccessDenied: - pass - - except Exception as e: - self.logger.error(f"Failed to get process metrics: {e}") - - return metrics - - def _get_status_for_metric(self, metric_name: str, value: float) -> str: - """Determine status based on thresholds.""" - if metric_name in self.thresholds: - thresholds = self.thresholds[metric_name] - if value >= thresholds['critical']: - return 'critical' - elif value >= thresholds['warning']: - return 'warning' - return 'healthy' - - def _check_alert_conditions(self, metric: HealthMetric): - """Check if metric triggers an alert.""" - if metric.status in ['critical', 'warning']: - alert = { - 'timestamp': metric.timestamp.isoformat(), - 'metric_name': metric.name, - 'value': metric.value, - 'unit': metric.unit, - 'status': metric.status, - 'message': f"{metric.name} is {metric.status}: {metric.value}{metric.unit}" - } - - with self._lock: - self.alerts.append(alert) - - # Keep only last 100 alerts - if len(self.alerts) > 100: - self.alerts = self.alerts[-100:] - - def get_current_health_status(self) -> Dict[str, Any]: - """Get current system health status.""" - with self._lock: - latest_metrics = {} - for name, history in self.metrics_history.items(): - if history: - latest_metrics[name] = { - 'value': history[-1].value, - 'unit': history[-1].unit, - 'status': history[-1].status, - 'timestamp': history[-1].timestamp.isoformat() - } - - # Calculate overall health status - statuses = [metric['status'] for metric in latest_metrics.values()] - if 'critical' in statuses: - overall_status = 'critical' - elif 'warning' in statuses: - overall_status = 'warning' - else: - overall_status = 'healthy' - - return { - 'overall_status': overall_status, - 'metrics': latest_metrics, - 'recent_alerts': self.alerts[-10:], # Last 10 alerts - 'timestamp': datetime.now().isoformat() - } - - def get_metric_history(self, metric_name: str, hours: int = 24) -> List[Dict]: - """Get history for a specific metric.""" - with self._lock: - if metric_name not in self.metrics_history: - return [] - - cutoff = datetime.now() - timedelta(hours=hours) - history = [ - { - 'value': m.value, - 'status': m.status, - 'timestamp': m.timestamp.isoformat() - } - for m in self.metrics_history[metric_name] - if m.timestamp > cutoff - ] - - return history - - -# Blueprint for health endpoints -health_bp = Blueprint('health', __name__) - -# Global health monitor instance -health_monitor = SystemHealthMonitor() - - -@health_bp.route('/api/health/status') -@optional_auth -def get_health_status(): - """Get current system health status.""" - try: - status = health_monitor.get_current_health_status() - return jsonify({ - 'status': 'success', - 'data': status - }) - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - - -@health_bp.route('/api/health/metrics/') -@optional_auth -def get_metric_history(metric_name): - """Get history for a specific metric.""" - try: - hours = int(request.args.get('hours', 24)) - history = health_monitor.get_metric_history(metric_name, hours) - - return jsonify({ - 'status': 'success', - 'data': { - 'metric_name': metric_name, - 'history': history - } - }) - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - - -@health_bp.route('/api/health/alerts') -@optional_auth -def get_health_alerts(): - """Get recent health alerts.""" - try: - with health_monitor._lock: - alerts = health_monitor.alerts[-50:] # Last 50 alerts - - return jsonify({ - 'status': 'success', - 'data': { - 'alerts': alerts, - 'count': len(alerts) - } - }) - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - - -@health_bp.route('/api/health/start', methods=['POST']) -@require_auth -def start_health_monitoring(): - """Start health monitoring.""" - try: - health_monitor.start_monitoring() - return jsonify({ - 'status': 'success', - 'message': 'Health monitoring started' - }) - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - - -@health_bp.route('/api/health/stop', methods=['POST']) -@require_auth -def stop_health_monitoring(): - """Stop health monitoring.""" - try: - health_monitor.stop_monitoring() - return jsonify({ - 'status': 'success', - 'message': 'Health monitoring stopped' - }) - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - - -def init_health_monitoring(): - """Initialize and start health monitoring.""" - health_monitor.start_monitoring() - - -def cleanup_health_monitoring(): - """Clean up health monitoring resources.""" - health_monitor.stop_monitoring() - - -# Export main components -__all__ = [ - 'SystemHealthMonitor', - 'HealthMetric', - 'health_bp', - 'health_monitor', - 'init_health_monitoring', - 'cleanup_health_monitoring' -] \ No newline at end of file diff --git a/src/server/services/queue_service.py b/src/server/services/queue_service.py deleted file mode 100644 index 816b55a..0000000 --- a/src/server/services/queue_service.py +++ /dev/null @@ -1,303 +0,0 @@ -from flask import Blueprint, render_template, request, jsonify -from web.controllers.auth_controller import optional_auth -import threading -import time -from datetime import datetime, timedelta - -# Create blueprint for download queue management -download_queue_bp = Blueprint('download_queue', __name__) - -# Global download queue state -download_queue_state = { - 'active_downloads': [], - 'pending_queue': [], - 'completed_downloads': [], - 'failed_downloads': [], - 'queue_lock': threading.Lock(), - 'statistics': { - 'total_items': 0, - 'completed_items': 0, - 'failed_items': 0, - 'estimated_time_remaining': None, - 'current_speed': '0 MB/s', - 'average_speed': '0 MB/s' - } -} - -@download_queue_bp.route('/queue') -@optional_auth -def queue_page(): - """Download queue management page.""" - return render_template('queue.html') - -@download_queue_bp.route('/api/queue/status') -@optional_auth -def get_queue_status(): - """Get detailed download queue status.""" - with download_queue_state['queue_lock']: - # Calculate ETA - eta = None - if download_queue_state['active_downloads']: - active_download = download_queue_state['active_downloads'][0] - if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0: - remaining_items = len(download_queue_state['pending_queue']) - avg_speed = active_download['progress']['speed_mbps'] - # Rough estimation: assume 500MB per episode - estimated_mb_remaining = remaining_items * 500 - eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None - if eta_seconds: - eta = datetime.now() + timedelta(seconds=eta_seconds) - - return jsonify({ - 'active_downloads': download_queue_state['active_downloads'], - 'pending_queue': download_queue_state['pending_queue'], - 'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10 - 'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10 - 'statistics': { - **download_queue_state['statistics'], - 'eta': eta.isoformat() if eta else None - } - }) - -@download_queue_bp.route('/api/queue/clear', methods=['POST']) -@optional_auth -def clear_queue(): - """Clear completed and failed downloads from queue.""" - try: - data = request.get_json() or {} - queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all' - - with download_queue_state['queue_lock']: - if queue_type == 'completed' or queue_type == 'all': - download_queue_state['completed_downloads'].clear() - - if queue_type == 'failed' or queue_type == 'all': - download_queue_state['failed_downloads'].clear() - - return jsonify({ - 'status': 'success', - 'message': f'Cleared {queue_type} downloads' - }) - - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - -@download_queue_bp.route('/api/queue/retry', methods=['POST']) -@optional_auth -def retry_failed_download(): - """Retry a failed download.""" - try: - data = request.get_json() - download_id = data.get('id') - - if not download_id: - return jsonify({ - 'status': 'error', - 'message': 'Download ID is required' - }), 400 - - with download_queue_state['queue_lock']: - # Find failed download - failed_download = None - for i, download in enumerate(download_queue_state['failed_downloads']): - if download['id'] == download_id: - failed_download = download_queue_state['failed_downloads'].pop(i) - break - - if not failed_download: - return jsonify({ - 'status': 'error', - 'message': 'Failed download not found' - }), 404 - - # Reset download status and add back to queue - failed_download['status'] = 'queued' - failed_download['error'] = None - failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1 - download_queue_state['pending_queue'].append(failed_download) - - return jsonify({ - 'status': 'success', - 'message': 'Download added back to queue' - }) - - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - -@download_queue_bp.route('/api/queue/remove', methods=['POST']) -@optional_auth -def remove_from_queue(): - """Remove an item from the pending queue.""" - try: - data = request.get_json() - download_id = data.get('id') - - if not download_id: - return jsonify({ - 'status': 'error', - 'message': 'Download ID is required' - }), 400 - - with download_queue_state['queue_lock']: - # Find and remove from pending queue - removed = False - for i, download in enumerate(download_queue_state['pending_queue']): - if download['id'] == download_id: - download_queue_state['pending_queue'].pop(i) - removed = True - break - - if not removed: - return jsonify({ - 'status': 'error', - 'message': 'Download not found in queue' - }), 404 - - return jsonify({ - 'status': 'success', - 'message': 'Download removed from queue' - }) - - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - -@download_queue_bp.route('/api/queue/reorder', methods=['POST']) -@optional_auth -def reorder_queue(): - """Reorder items in the pending queue.""" - try: - data = request.get_json() - new_order = data.get('order') # Array of download IDs in new order - - if not new_order or not isinstance(new_order, list): - return jsonify({ - 'status': 'error', - 'message': 'Valid order array is required' - }), 400 - - with download_queue_state['queue_lock']: - # Create new queue based on the provided order - old_queue = download_queue_state['pending_queue'].copy() - new_queue = [] - - # Add items in the specified order - for download_id in new_order: - for download in old_queue: - if download['id'] == download_id: - new_queue.append(download) - break - - # Add any remaining items that weren't in the new order - for download in old_queue: - if download not in new_queue: - new_queue.append(download) - - download_queue_state['pending_queue'] = new_queue - - return jsonify({ - 'status': 'success', - 'message': 'Queue reordered successfully' - }) - - except Exception as e: - return jsonify({ - 'status': 'error', - 'message': str(e) - }), 500 - -# Helper functions for queue management -def add_to_download_queue(serie_name, episode_info, priority='normal'): - """Add a download to the queue.""" - import uuid - - download_item = { - 'id': str(uuid.uuid4()), - 'serie_name': serie_name, - 'episode': episode_info, - 'status': 'queued', - 'priority': priority, - 'added_at': datetime.now().isoformat(), - 'started_at': None, - 'completed_at': None, - 'error': None, - 'retry_count': 0, - 'progress': { - 'percent': 0, - 'downloaded_mb': 0, - 'total_mb': 0, - 'speed_mbps': 0, - 'eta_seconds': None - } - } - - with download_queue_state['queue_lock']: - # Insert based on priority - if priority == 'high': - download_queue_state['pending_queue'].insert(0, download_item) - else: - download_queue_state['pending_queue'].append(download_item) - - download_queue_state['statistics']['total_items'] += 1 - - return download_item['id'] - -def update_download_progress(download_id, progress_data): - """Update progress for an active download.""" - with download_queue_state['queue_lock']: - for download in download_queue_state['active_downloads']: - if download['id'] == download_id: - download['progress'].update(progress_data) - - # Update global statistics - if 'speed_mbps' in progress_data: - download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s" - - break - -def move_download_to_completed(download_id, success=True, error=None): - """Move download from active to completed/failed.""" - with download_queue_state['queue_lock']: - download = None - for i, item in enumerate(download_queue_state['active_downloads']): - if item['id'] == download_id: - download = download_queue_state['active_downloads'].pop(i) - break - - if download: - download['completed_at'] = datetime.now().isoformat() - - if success: - download['status'] = 'completed' - download['progress']['percent'] = 100 - download_queue_state['completed_downloads'].append(download) - download_queue_state['statistics']['completed_items'] += 1 - else: - download['status'] = 'failed' - download['error'] = error - download_queue_state['failed_downloads'].append(download) - download_queue_state['statistics']['failed_items'] += 1 - -def start_next_download(): - """Move next queued download to active state.""" - with download_queue_state['queue_lock']: - if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent - download = download_queue_state['pending_queue'].pop(0) - download['status'] = 'downloading' - download['started_at'] = datetime.now().isoformat() - download_queue_state['active_downloads'].append(download) - return download - return None - -def get_queue_statistics(): - """Get current queue statistics.""" - with download_queue_state['queue_lock']: - return download_queue_state['statistics'].copy() \ No newline at end of file diff --git a/src/server/services/scheduler_service.py b/src/server/services/scheduler_service.py deleted file mode 100644 index 8167ddf..0000000 --- a/src/server/services/scheduler_service.py +++ /dev/null @@ -1,252 +0,0 @@ -import threading -import time -import schedule -from datetime import datetime, timedelta -from typing import Optional, Callable, Dict, Any -import logging -from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK, - ProcessLockError, is_process_running) - -logger = logging.getLogger(__name__) - -class ScheduledOperations: - """Handle scheduled operations like automatic rescans and downloads.""" - - def __init__(self, config_manager, socketio=None): - self.config = config_manager - self.socketio = socketio - self.scheduler_thread = None - self.running = False - self.rescan_callback: Optional[Callable] = None - self.download_callback: Optional[Callable] = None - self.last_scheduled_rescan: Optional[datetime] = None - - # Load scheduled rescan settings - self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False) - self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00') - self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False) - - def set_rescan_callback(self, callback: Callable): - """Set callback function for performing rescan operations.""" - self.rescan_callback = callback - - def set_download_callback(self, callback: Callable): - """Set callback function for performing download operations.""" - self.download_callback = callback - - def start_scheduler(self): - """Start the background scheduler thread.""" - if self.running: - logger.warning("Scheduler is already running") - return - - self.running = True - self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True) - self.scheduler_thread.start() - logger.info("Scheduled operations started") - - def stop_scheduler(self): - """Stop the background scheduler.""" - self.running = False - schedule.clear() - if self.scheduler_thread and self.scheduler_thread.is_alive(): - self.scheduler_thread.join(timeout=5) - logger.info("Scheduled operations stopped") - - def _scheduler_loop(self): - """Main scheduler loop that runs in background thread.""" - self._setup_scheduled_jobs() - - while self.running: - try: - schedule.run_pending() - time.sleep(60) # Check every minute - except Exception as e: - logger.error(f"Scheduler error: {e}") - time.sleep(60) - - def _setup_scheduled_jobs(self): - """Setup all scheduled jobs based on configuration.""" - schedule.clear() - - if self.scheduled_rescan_enabled and self.scheduled_rescan_time: - try: - schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan) - logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}") - except Exception as e: - logger.error(f"Error setting up scheduled rescan: {e}") - - def _perform_scheduled_rescan(self): - """Perform the scheduled rescan operation.""" - try: - logger.info("Starting scheduled rescan...") - - # Emit scheduled rescan started event - if self.socketio: - self.socketio.emit('scheduled_rescan_started') - - # Check if rescan is already running - if is_process_running(RESCAN_LOCK): - logger.warning("Rescan is already running, skipping scheduled rescan") - if self.socketio: - self.socketio.emit('scheduled_rescan_skipped', { - 'reason': 'Rescan already in progress' - }) - return - - # Perform the rescan using process lock - @with_process_lock(RESCAN_LOCK, timeout_minutes=180) - def perform_rescan(): - self.last_scheduled_rescan = datetime.now() - - if self.rescan_callback: - result = self.rescan_callback() - logger.info("Scheduled rescan completed successfully") - - if self.socketio: - self.socketio.emit('scheduled_rescan_completed', { - 'timestamp': self.last_scheduled_rescan.isoformat(), - 'result': result - }) - - # Auto-start download if configured - if self.auto_download_after_rescan and self.download_callback: - logger.info("Starting auto-download after scheduled rescan") - threading.Thread( - target=self._perform_auto_download, - daemon=True - ).start() - else: - logger.warning("No rescan callback configured") - - perform_rescan(_locked_by='scheduled_operation') - - except ProcessLockError: - logger.warning("Could not acquire rescan lock for scheduled operation") - if self.socketio: - self.socketio.emit('scheduled_rescan_error', { - 'error': 'Could not acquire rescan lock' - }) - except Exception as e: - logger.error(f"Scheduled rescan failed: {e}") - if self.socketio: - self.socketio.emit('scheduled_rescan_error', { - 'error': str(e) - }) - - def _perform_auto_download(self): - """Perform automatic download after scheduled rescan.""" - try: - # Wait a bit after rescan to let UI update - time.sleep(10) - - if self.download_callback: - # Find series with missing episodes and start download - logger.info("Starting auto-download of missing episodes") - result = self.download_callback() - - if self.socketio: - self.socketio.emit('auto_download_started', { - 'timestamp': datetime.now().isoformat(), - 'result': result - }) - else: - logger.warning("No download callback configured for auto-download") - - except Exception as e: - logger.error(f"Auto-download after scheduled rescan failed: {e}") - if self.socketio: - self.socketio.emit('auto_download_error', { - 'error': str(e) - }) - - def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False): - """Update scheduled rescan configuration.""" - try: - # Validate time format - if enabled and time_str: - datetime.strptime(time_str, '%H:%M') - - # Update configuration - self.scheduled_rescan_enabled = enabled - self.scheduled_rescan_time = time_str - self.auto_download_after_rescan = auto_download - - # Save to config - self.config.scheduled_rescan_enabled = enabled - self.config.scheduled_rescan_time = time_str - self.config.auto_download_after_rescan = auto_download - self.config.save_config() - - # Restart scheduler with new settings - if self.running: - self._setup_scheduled_jobs() - - logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}") - return True - - except ValueError as e: - logger.error(f"Invalid time format: {time_str}") - raise ValueError(f"Invalid time format. Use HH:MM format.") - except Exception as e: - logger.error(f"Error updating scheduled rescan config: {e}") - raise - - def get_scheduled_rescan_config(self) -> Dict[str, Any]: - """Get current scheduled rescan configuration.""" - next_run = None - if self.scheduled_rescan_enabled and self.scheduled_rescan_time: - try: - # Calculate next run time - now = datetime.now() - today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M') - - if now > today_run: - # Next run is tomorrow - next_run = today_run + timedelta(days=1) - else: - # Next run is today - next_run = today_run - - except Exception as e: - logger.error(f"Error calculating next run time: {e}") - - return { - 'enabled': self.scheduled_rescan_enabled, - 'time': self.scheduled_rescan_time, - 'auto_download_after_rescan': self.auto_download_after_rescan, - 'next_run': next_run.isoformat() if next_run else None, - 'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None, - 'is_running': self.running - } - - def trigger_manual_scheduled_rescan(self): - """Manually trigger a scheduled rescan (for testing purposes).""" - logger.info("Manually triggering scheduled rescan") - threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start() - - def get_next_scheduled_jobs(self) -> list: - """Get list of all scheduled jobs with their next run times.""" - jobs = [] - for job in schedule.jobs: - jobs.append({ - 'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func), - 'next_run': job.next_run.isoformat() if job.next_run else None, - 'interval': str(job.interval), - 'unit': job.unit - }) - return jobs - - -# Global scheduler instance -scheduled_operations = None - -def init_scheduler(config_manager, socketio=None): - """Initialize the global scheduler.""" - global scheduled_operations - scheduled_operations = ScheduledOperations(config_manager, socketio) - return scheduled_operations - -def get_scheduler(): - """Get the global scheduler instance.""" - return scheduled_operations \ No newline at end of file diff --git a/src/server/services/search_service.py b/src/server/services/search_service.py deleted file mode 100644 index f60b06e..0000000 --- a/src/server/services/search_service.py +++ /dev/null @@ -1,1361 +0,0 @@ -""" -Advanced Search and Filters Manager - -This module provides advanced search functionality, filtering capabilities, -and search result management for the AniWorld web interface. -""" - -from typing import Dict, List, Any, Optional, Set -import re -from datetime import datetime, timedelta -from flask import Blueprint, request, jsonify -import json - -class AdvancedSearchManager: - """Manages advanced search and filtering functionality.""" - - def __init__(self, app=None): - self.app = app - self.search_history = [] - self.saved_searches = {} - self.filter_presets = { - 'recent': { - 'name': 'Recently Added', - 'filters': { - 'date_added': {'operator': 'last_days', 'value': 7} - } - }, - 'downloading': { - 'name': 'Currently Downloading', - 'filters': { - 'status': {'operator': 'equals', 'value': 'downloading'} - } - }, - 'completed': { - 'name': 'Completed Series', - 'filters': { - 'status': {'operator': 'equals', 'value': 'completed'} - } - }, - 'high_rated': { - 'name': 'Highly Rated', - 'filters': { - 'rating': {'operator': 'greater_than', 'value': 8.0} - } - } - } - - def init_app(self, app): - """Initialize with Flask app.""" - self.app = app - - def get_search_js(self): - """Generate JavaScript code for advanced search functionality.""" - return f""" -// AniWorld Advanced Search Manager -class AdvancedSearchManager {{ - constructor() {{ - this.currentFilters = {{}}; - this.searchHistory = []; - this.savedSearches = {{}}; - this.filterPresets = {json.dumps(self.filter_presets)}; - this.searchResults = []; - this.sortBy = 'name'; - this.sortOrder = 'asc'; - this.init(); - }} - - init() {{ - this.createSearchInterface(); - this.setupSearchEvents(); - this.loadSearchHistory(); - this.loadSavedSearches(); - this.setupKeyboardShortcuts(); - }} - - createSearchInterface() {{ - this.createSearchBar(); - this.createAdvancedFilters(); - this.createSearchResults(); - this.createQuickFilters(); - }} - - createSearchBar() {{ - const existingSearch = document.querySelector('.advanced-search-bar'); - if (existingSearch) return; - - const searchContainer = document.createElement('div'); - searchContainer.className = 'advanced-search-bar mb-4'; - searchContainer.innerHTML = ` -
-
-
- - - - -
-
-
- -
-
-
- - - -
-
-
- `; - - // Insert at the top of main content - const mainContent = document.querySelector('.main-content, .container-fluid'); - if (mainContent) {{ - mainContent.insertBefore(searchContainer, mainContent.firstChild); - }} - }} - - createAdvancedFilters() {{ - const filtersModal = document.createElement('div'); - filtersModal.id = 'advanced-filters-modal'; - filtersModal.className = 'modal fade'; - filtersModal.innerHTML = ` - - `; - - document.body.appendChild(filtersModal); - - // Populate filter presets - this.populateFilterPresets(); - }} - - createSearchResults() {{ - const existingResults = document.querySelector('.search-results-container'); - if (existingResults) return; - - const resultsContainer = document.createElement('div'); - resultsContainer.className = 'search-results-container'; - resultsContainer.innerHTML = ` -
-
- 0 results - -
-
- - -
-
-
- -
-
- -
- `; - - const mainContent = document.querySelector('.main-content, .container-fluid'); - if (mainContent) {{ - mainContent.appendChild(resultsContainer); - }} - }} - - createQuickFilters() {{ - const quickFiltersContainer = document.createElement('div'); - quickFiltersContainer.className = 'quick-filters mb-3'; - quickFiltersContainer.innerHTML = ` -
- Quick Filters: - - - - -
-
- -
- `; - - const searchBar = document.querySelector('.advanced-search-bar'); - if (searchBar) {{ - searchBar.parentNode.insertBefore(quickFiltersContainer, searchBar.nextSibling); - }} - }} - - setupSearchEvents() {{ - // Search input events - const searchInput = document.getElementById('search-input'); - if (searchInput) {{ - searchInput.addEventListener('input', this.handleSearchInput.bind(this)); - searchInput.addEventListener('keypress', (e) => {{ - if (e.key === 'Enter') {{ - this.performSearch(); - }} - }}); - }} - - // Search button - document.getElementById('search-btn')?.addEventListener('click', this.performSearch.bind(this)); - - // Advanced filters button - document.getElementById('advanced-filters-btn')?.addEventListener('click', this.showAdvancedFilters.bind(this)); - - // Clear search button - document.getElementById('clear-search-btn')?.addEventListener('click', this.clearSearch.bind(this)); - - // Save search button - document.getElementById('save-search-btn')?.addEventListener('click', this.saveCurrentSearch.bind(this)); - - // Sort controls - document.getElementById('sort-by')?.addEventListener('change', this.handleSortChange.bind(this)); - document.getElementById('sort-order-btn')?.addEventListener('click', this.toggleSortOrder.bind(this)); - - // Quick filter tags - document.addEventListener('click', (e) => {{ - if (e.target.classList.contains('filter-tag')) {{ - this.applyFilterPreset(e.target.dataset.preset); - }} - }}); - - // Filter date change - document.getElementById('filter-date-added')?.addEventListener('change', (e) => {{ - const customRange = document.getElementById('custom-date-range'); - if (e.target.value === 'custom') {{ - customRange.classList.remove('d-none'); - }} else {{ - customRange.classList.add('d-none'); - }} - }}); - - // Apply filters button - document.getElementById('apply-filters-btn')?.addEventListener('click', this.applyAdvancedFilters.bind(this)); - - // Clear filters button - document.getElementById('clear-filters-btn')?.addEventListener('click', this.clearAllFilters.bind(this)); - }} - - setupKeyboardShortcuts() {{ - document.addEventListener('keydown', (e) => {{ - if (e.ctrlKey || e.metaKey) {{ - switch(e.key) {{ - case 'f': - e.preventDefault(); - document.getElementById('search-input')?.focus(); - break; - case 'k': - e.preventDefault(); - this.showAdvancedFilters(); - break; - }} - }} - }}); - }} - - handleSearchInput(e) {{ - const query = e.target.value; - - // Show suggestions after 2 characters - if (query.length >= 2) {{ - this.showSearchSuggestions(query); - }} else {{ - this.hideSearchSuggestions(); - }} - }} - - showSearchSuggestions(query) {{ - // Implement search suggestions - // This would call an API to get suggestions - fetch(`/api/search/suggestions?q=${{encodeURIComponent(query)}}`) - .then(response => response.json()) - .then(data => {{ - this.displaySuggestions(data.suggestions); - }}) - .catch(error => console.error('Error fetching suggestions:', error)); - }} - - displaySuggestions(suggestions) {{ - // Display search suggestions dropdown - let suggestionsDropdown = document.getElementById('search-suggestions'); - if (!suggestionsDropdown) {{ - suggestionsDropdown = document.createElement('div'); - suggestionsDropdown.id = 'search-suggestions'; - suggestionsDropdown.className = 'search-suggestions dropdown-menu show'; - - const searchInput = document.getElementById('search-input'); - searchInput.parentNode.appendChild(suggestionsDropdown); - }} - - suggestionsDropdown.innerHTML = suggestions.map(suggestion => ` - - - ${{suggestion.text}} - ${{suggestion.type}} - - `).join(''); - - // Add click handlers - suggestionsDropdown.querySelectorAll('.suggestion-item').forEach(item => {{ - item.addEventListener('click', (e) => {{ - e.preventDefault(); - document.getElementById('search-input').value = item.dataset.value; - this.performSearch(); - this.hideSearchSuggestions(); - }}); - }}); - }} - - hideSearchSuggestions() {{ - const suggestionsDropdown = document.getElementById('search-suggestions'); - if (suggestionsDropdown) {{ - suggestionsDropdown.remove(); - }} - }} - - async performSearch() {{ - const query = document.getElementById('search-input').value; - const searchType = document.getElementById('search-type').value; - - if (!query.trim()) {{ - this.clearResults(); - return; - }} - - // Add to search history - this.addToSearchHistory(query); - - // Show loading - this.showSearchLoading(); - - try {{ - const searchParams = {{ - query: query, - type: searchType, - filters: this.currentFilters, - sort_by: this.sortBy, - sort_order: this.sortOrder - }}; - - const response = await fetch('/api/search', {{ - method: 'POST', - headers: {{ - 'Content-Type': 'application/json' - }}, - body: JSON.stringify(searchParams) - }}); - - const data = await response.json(); - - if (data.success) {{ - this.searchResults = data.results; - this.displaySearchResults(data); - this.updateSearchInfo(query, data.total_results); - }} else {{ - this.showSearchError(data.error); - }} - }} catch (error) {{ - console.error('Search error:', error); - this.showSearchError('Search failed. Please try again.'); - }} - }} - - showSearchLoading() {{ - const resultsContainer = document.getElementById('search-results'); - if (resultsContainer) {{ - resultsContainer.innerHTML = ` -
-
- Searching... -
-

Searching...

-
- `; - }} - }} - - displaySearchResults(data) {{ - const resultsContainer = document.getElementById('search-results'); - if (!resultsContainer) return; - - if (data.results.length === 0) {{ - resultsContainer.innerHTML = ` -
- -

No results found. Try adjusting your search terms or filters.

-
- `; - return; - }} - - const resultsHTML = data.results.map(item => ` -
-
-
-
-
${{item.name}}
-
- ${{item.genre || 'Unknown'}} - ${{item.year || 'N/A'}} - ${{item.episodes || 0}} episodes - ${{item.rating ? ` ${{item.rating}}` : ''}} -
-
-
-
- - -
-
-
-
-
- `).join(''); - - resultsContainer.innerHTML = resultsHTML; - - // Update pagination if needed - this.updatePagination(data); - }} - - updateSearchInfo(query, totalResults) {{ - const resultsCount = document.querySelector('.results-count'); - const searchQuery = document.querySelector('.search-query'); - - if (resultsCount) {{ - resultsCount.textContent = `${{totalResults}} result${{totalResults === 1 ? '' : 's'}}`; - }} - - if (searchQuery) {{ - searchQuery.textContent = query ? `for "${{query}}"` : ''; - }} - }} - - showSearchError(error) {{ - const resultsContainer = document.getElementById('search-results'); - if (resultsContainer) {{ - resultsContainer.innerHTML = ` -
- - ${{error}} -
- `; - }} - }} - - clearResults() {{ - const resultsContainer = document.getElementById('search-results'); - if (resultsContainer) {{ - resultsContainer.innerHTML = ''; - }} - this.updateSearchInfo('', 0); - }} - - clearSearch() {{ - document.getElementById('search-input').value = ''; - document.getElementById('search-type').value = 'all'; - this.currentFilters = {{}}; - this.clearResults(); - this.updateActiveFilters(); - this.hideSearchSuggestions(); - }} - - applyFilterPreset(presetName) {{ - const preset = this.filterPresets[presetName]; - if (preset) {{ - this.currentFilters = {{ ...preset.filters }}; - this.updateActiveFilters(); - this.performSearch(); - }} - }} - - showAdvancedFilters() {{ - const modal = document.getElementById('advanced-filters-modal'); - if (modal) {{ - const bsModal = new bootstrap.Modal(modal); - bsModal.show(); - }} - }} - - applyAdvancedFilters() {{ - // Collect filter values from the modal - const filters = {{}}; - - // Genre filter - const genre = document.getElementById('filter-genre'); - if (genre.selectedOptions.length > 0) {{ - filters.genre = Array.from(genre.selectedOptions).map(o => o.value); - }} - - // Year range - const yearFrom = document.getElementById('filter-year-from').value; - const yearTo = document.getElementById('filter-year-to').value; - if (yearFrom || yearTo) {{ - filters.year_range = {{ from: yearFrom, to: yearTo }}; - }} - - // Rating range - const ratingMin = document.getElementById('filter-rating-min').value; - const ratingMax = document.getElementById('filter-rating-max').value; - if (ratingMin || ratingMax) {{ - filters.rating_range = {{ min: ratingMin, max: ratingMax }}; - }} - - // Status - const status = document.getElementById('filter-status'); - if (status.selectedOptions.length > 0) {{ - filters.status = Array.from(status.selectedOptions).map(o => o.value); - }} - - // Episode count range - const episodesMin = document.getElementById('filter-episodes-min').value; - const episodesMax = document.getElementById('filter-episodes-max').value; - if (episodesMin || episodesMax) {{ - filters.episodes_range = {{ min: episodesMin, max: episodesMax }}; - }} - - // Date added - const dateAdded = document.getElementById('filter-date-added').value; - if (dateAdded) {{ - if (dateAdded === 'custom') {{ - const dateFrom = document.getElementById('filter-date-from').value; - const dateTo = document.getElementById('filter-date-to').value; - if (dateFrom || dateTo) {{ - filters.date_range = {{ from: dateFrom, to: dateTo }}; - }} - }} else {{ - filters.date_added = dateAdded; - }} - }} - - this.currentFilters = filters; - this.updateActiveFilters(); - - // Close modal and perform search - const modal = bootstrap.Modal.getInstance(document.getElementById('advanced-filters-modal')); - modal.hide(); - - this.performSearch(); - }} - - clearAllFilters() {{ - this.currentFilters = {{}}; - - // Clear form fields - document.getElementById('filter-genre').selectedIndex = -1; - document.getElementById('filter-year-from').value = ''; - document.getElementById('filter-year-to').value = ''; - document.getElementById('filter-rating-min').value = ''; - document.getElementById('filter-rating-max').value = ''; - document.getElementById('filter-status').selectedIndex = -1; - document.getElementById('filter-episodes-min').value = ''; - document.getElementById('filter-episodes-max').value = ''; - document.getElementById('filter-date-added').value = ''; - document.getElementById('filter-date-from').value = ''; - document.getElementById('filter-date-to').value = ''; - - this.updateActiveFilters(); - this.performSearch(); - }} - - updateActiveFilters() {{ - const activeFiltersContainer = document.getElementById('active-filters'); - if (!activeFiltersContainer) return; - - const filterTags = []; - - for (const [key, value] of Object.entries(this.currentFilters)) {{ - let filterText = ''; - - switch(key) {{ - case 'genre': - filterText = `Genre: ${{value.join(', ')}}`; - break; - case 'year_range': - filterText = `Year: ${{value.from || '?'}} - ${{value.to || '?'}}`; - break; - case 'rating_range': - filterText = `Rating: ${{value.min || '?'}} - ${{value.max || '?'}}`; - break; - case 'status': - filterText = `Status: ${{value.join(', ')}}`; - break; - case 'episodes_range': - filterText = `Episodes: ${{value.min || '?'}} - ${{value.max || '?'}}`; - break; - case 'date_added': - filterText = `Added: ${{value}}`; - break; - case 'date_range': - filterText = `Date: ${{value.from || '?'}} - ${{value.to || '?'}}`; - break; - }} - - if (filterText) {{ - filterTags.push(` - - ${{filterText}} - - - `); - }} - }} - - if (filterTags.length > 0) {{ - activeFiltersContainer.innerHTML = ` -
- Active filters: - ${{filterTags.join('')}} -
- `; - }} else {{ - activeFiltersContainer.innerHTML = ''; - }} - }} - - removeFilter(key) {{ - delete this.currentFilters[key]; - this.updateActiveFilters(); - this.performSearch(); - }} - - handleSortChange(e) {{ - this.sortBy = e.target.value; - this.performSearch(); - }} - - toggleSortOrder() {{ - this.sortOrder = this.sortOrder === 'asc' ? 'desc' : 'asc'; - - const btn = document.getElementById('sort-order-btn'); - const icon = btn.querySelector('i'); - - if (this.sortOrder === 'desc') {{ - icon.className = 'fas fa-sort-alpha-up'; - }} else {{ - icon.className = 'fas fa-sort-alpha-down'; - }} - - this.performSearch(); - }} - - addToSearchHistory(query) {{ - // Remove if already exists - this.searchHistory = this.searchHistory.filter(item => item.query !== query); - - // Add to beginning - this.searchHistory.unshift({{ - query: query, - timestamp: Date.now(), - filters: {{ ...this.currentFilters }} - }}); - - // Keep only last 10 - if (this.searchHistory.length > 10) {{ - this.searchHistory = this.searchHistory.slice(0, 10); - }} - - this.saveSearchHistory(); - this.updateSearchHistoryMenu(); - }} - - loadSearchHistory() {{ - const stored = localStorage.getItem('aniworld_search_history'); - if (stored) {{ - try {{ - this.searchHistory = JSON.parse(stored); - }} catch (error) {{ - console.error('Error loading search history:', error); - this.searchHistory = []; - }} - }} - this.updateSearchHistoryMenu(); - }} - - saveSearchHistory() {{ - localStorage.setItem('aniworld_search_history', JSON.stringify(this.searchHistory)); - }} - - updateSearchHistoryMenu() {{ - const menu = document.getElementById('search-history-menu'); - if (!menu) return; - - if (this.searchHistory.length === 0) {{ - menu.innerHTML = '
  • No recent searches
  • '; - return; - }} - - const historyItems = this.searchHistory.map(item => ` -
  • - -
    - ${{item.query}} - ${{this.formatTimestamp(item.timestamp)}} -
    -
    -
  • - `).join(''); - - menu.innerHTML = ` -
  • - ${{historyItems}} -
  • -
  • Clear History
  • - `; - - // Add click handlers - menu.querySelectorAll('.history-item').forEach(item => {{ - item.addEventListener('click', (e) => {{ - e.preventDefault(); - document.getElementById('search-input').value = item.dataset.query; - this.currentFilters = JSON.parse(item.dataset.filters); - this.updateActiveFilters(); - this.performSearch(); - }}); - }}); - }} - - clearSearchHistory() {{ - this.searchHistory = []; - this.saveSearchHistory(); - this.updateSearchHistoryMenu(); - }} - - saveCurrentSearch() {{ - const query = document.getElementById('search-input').value; - if (!query.trim()) return; - - const name = prompt('Enter a name for this search:'); - if (!name) return; - - const searchId = Date.now().toString(); - this.savedSearches[searchId] = {{ - name: name, - query: query, - filters: {{ ...this.currentFilters }}, - created: Date.now() - }}; - - this.saveSavedSearches(); - this.populateSavedSearches(); - - this.showToast('Search saved successfully', 'success'); - }} - - loadSavedSearches() {{ - const stored = localStorage.getItem('aniworld_saved_searches'); - if (stored) {{ - try {{ - this.savedSearches = JSON.parse(stored); - }} catch (error) {{ - console.error('Error loading saved searches:', error); - this.savedSearches = {{}}; - }} - }} - }} - - saveSavedSearches() {{ - localStorage.setItem('aniworld_saved_searches', JSON.stringify(this.savedSearches)); - }} - - populateSavedSearches() {{ - const container = document.getElementById('saved-searches-list'); - if (!container) return; - - if (Object.keys(this.savedSearches).length === 0) {{ - container.innerHTML = '

    No saved searches

    '; - return; - }} - - const searches = Object.entries(this.savedSearches).map(([id, search]) => ` -
    -
    -
    - ${{search.name}} -
    ${{search.query}}
    -
    ${{this.formatTimestamp(search.created)}}
    -
    -
    - - -
    -
    -
    - `).join(''); - - container.innerHTML = searches; - }} - - loadSavedSearch(searchId) {{ - const search = this.savedSearches[searchId]; - if (!search) return; - - document.getElementById('search-input').value = search.query; - this.currentFilters = {{ ...search.filters }}; - this.updateActiveFilters(); - this.performSearch(); - - // Close modal - const modal = bootstrap.Modal.getInstance(document.getElementById('advanced-filters-modal')); - if (modal) {{ - modal.hide(); - }} - }} - - deleteSavedSearch(searchId) {{ - if (confirm('Are you sure you want to delete this saved search?')) {{ - delete this.savedSearches[searchId]; - this.saveSavedSearches(); - this.populateSavedSearches(); - }} - }} - - populateFilterPresets() {{ - const container = document.getElementById('filter-presets'); - if (!container) return; - - const presets = Object.entries(this.filterPresets).map(([key, preset]) => ` - - `).join(''); - - container.innerHTML = presets; - }} - - formatTimestamp(timestamp) {{ - const date = new Date(timestamp); - const now = new Date(); - const diff = now - date; - - if (diff < 60000) return 'Just now'; - if (diff < 3600000) return `${{Math.floor(diff / 60000)}}m ago`; - if (diff < 86400000) return `${{Math.floor(diff / 3600000)}}h ago`; - - return date.toLocaleDateString(); - }} - - showToast(message, type = 'info') {{ - // Create and show a toast notification - const toast = document.createElement('div'); - toast.className = `toast align-items-center text-white bg-${{type === 'error' ? 'danger' : type}}`; - toast.innerHTML = ` -
    -
    ${{message}}
    - -
    - `; - - let toastContainer = document.querySelector('.toast-container'); - if (!toastContainer) {{ - toastContainer = document.createElement('div'); - toastContainer.className = 'toast-container position-fixed bottom-0 end-0 p-3'; - document.body.appendChild(toastContainer); - }} - - toastContainer.appendChild(toast); - const bsToast = new bootstrap.Toast(toast); - bsToast.show(); - - toast.addEventListener('hidden.bs.toast', () => {{ - if (toast.parentNode) {{ - toastContainer.removeChild(toast); - }} - }}); - }} -}} - -// Initialize advanced search when DOM is loaded -document.addEventListener('DOMContentLoaded', () => {{ - window.advancedSearch = new AdvancedSearchManager(); -}}); -""" - - def get_css(self): - """Generate CSS for advanced search functionality.""" - return """ -/* Advanced Search Styles */ -.advanced-search-bar { - background: var(--bs-light); - border-radius: 8px; - padding: 1rem; - border: 1px solid var(--bs-border-color); -} - -.search-suggestions { - position: absolute; - top: 100%; - left: 0; - right: 0; - z-index: 1000; - max-height: 300px; - overflow-y: auto; -} - -.suggestion-item { - display: flex; - justify-content: between; - align-items: center; -} - -.suggestion-item i { - margin-right: 0.5rem; - width: 16px; -} - -.quick-filters .filter-tag.active { - background-color: var(--bs-primary); - color: white; -} - -.active-filters .badge { - display: inline-flex; - align-items: center; -} - -.active-filters .btn-close { - --bs-btn-close-bg: none; -} - -.search-result-item { - transition: transform 0.2s ease, box-shadow 0.2s ease; -} - -.search-result-item:hover { - transform: translateY(-2px); - box-shadow: 0 4px 8px rgba(0,0,0,0.1); -} - -.search-info .results-count { - font-weight: 600; - color: var(--bs-primary); -} - -.sort-controls { - display: flex; - align-items: center; - gap: 0.5rem; -} - -#advanced-filters-modal .modal-dialog { - max-width: 800px; -} - -#advanced-filters-modal h6 { - color: var(--bs-primary); - border-bottom: 1px solid var(--bs-border-color); - padding-bottom: 0.5rem; - margin-bottom: 1rem; -} - -.saved-search-item { - background: var(--bs-light); -} - -.saved-search-item:hover { - background: var(--bs-secondary-bg); -} - -/* Search loading animation */ -.search-results .spinner-border { - width: 3rem; - height: 3rem; -} - -/* Responsive design */ -@media (max-width: 768px) { - .advanced-search-bar .row { - gap: 0.5rem; - } - - .advanced-search-bar .col-md-3, - .advanced-search-bar .col-md-6 { - flex: 1 1 100%; - max-width: 100%; - } - - .quick-filters { - flex-direction: column; - align-items: flex-start; - } - - .quick-filters > div { - flex-wrap: wrap; - } - - .search-result-item .row { - flex-direction: column; - } - - .search-result-item .text-end { - text-align: start !important; - margin-top: 0.5rem; - } - - .sort-controls { - justify-content: space-between; - width: 100%; - } -} - -/* Dark theme support */ -[data-bs-theme="dark"] .advanced-search-bar { - background: var(--bs-dark); - border-color: var(--bs-border-color-translucent); -} - -[data-bs-theme="dark"] .search-suggestions { - background: var(--bs-dark); - border-color: var(--bs-border-color-translucent); -} - -[data-bs-theme="dark"] .saved-search-item { - background: var(--bs-dark); -} - -/* Animation for search results */ -@keyframes fadeInUp { - from { - opacity: 0; - transform: translateY(20px); - } - to { - opacity: 1; - transform: translateY(0); - } -} - -.search-result-item { - animation: fadeInUp 0.3s ease-out; -} - -/* Accessibility improvements */ -.search-suggestions .dropdown-item:focus { - background-color: var(--bs-primary); - color: var(--bs-white); -} - -@media (prefers-reduced-motion: reduce) { - .search-result-item, - .search-result-item:hover { - transition: none; - transform: none; - } - - .search-result-item { - animation: none; - } -} -""" - - def search_series(self, query: str, search_type: str = 'all', filters: Dict[str, Any] = None, - sort_by: str = 'name', sort_order: str = 'asc') -> Dict[str, Any]: - """Search for series based on query and filters.""" - try: - # This would implement actual search logic - # For now, return mock data - - results = [ - { - 'id': '1', - 'name': 'Attack on Titan', - 'genre': 'Action', - 'year': 2013, - 'episodes': 75, - 'rating': 9.0, - 'status': 'completed' - }, - { - 'id': '2', - 'name': 'Death Note', - 'genre': 'Thriller', - 'year': 2006, - 'episodes': 37, - 'rating': 9.1, - 'status': 'completed' - } - ] - - # Apply search query filtering - if query: - results = [r for r in results if query.lower() in r['name'].lower()] - - # Apply filters if provided - if filters: - if 'genre' in filters: - genres = filters['genre'] if isinstance(filters['genre'], list) else [filters['genre']] - results = [r for r in results if r.get('genre') in genres] - - if 'status' in filters: - statuses = filters['status'] if isinstance(filters['status'], list) else [filters['status']] - results = [r for r in results if r.get('status') in statuses] - - if 'year_range' in filters: - year_range = filters['year_range'] - if year_range.get('from'): - results = [r for r in results if r.get('year', 0) >= int(year_range['from'])] - if year_range.get('to'): - results = [r for r in results if r.get('year', 0) <= int(year_range['to'])] - - # Apply sorting - if sort_by in ['name', 'year', 'rating', 'episodes']: - reverse = (sort_order == 'desc') - results.sort(key=lambda x: x.get(sort_by, ''), reverse=reverse) - - return { - 'success': True, - 'results': results, - 'total_results': len(results), - 'query': query, - 'filters': filters or {} - } - - except Exception as e: - return { - 'success': False, - 'error': str(e), - 'results': [], - 'total_results': 0 - } - - def get_search_suggestions(self, query: str) -> List[Dict[str, Any]]: - """Get search suggestions for a query.""" - suggestions = [] - - # Mock suggestions - replace with actual implementation - if 'attack' in query.lower(): - suggestions.append({ - 'text': 'Attack on Titan', - 'value': 'Attack on Titan', - 'type': 'Series', - 'icon': 'fa-film' - }) - - if 'action' in query.lower(): - suggestions.append({ - 'text': 'Action', - 'value': 'Action', - 'type': 'Genre', - 'icon': 'fa-tags' - }) - - return suggestions - - -# Create search API blueprint -search_bp = Blueprint('search', __name__, url_prefix='/api') - -# Global search manager instance -search_manager = AdvancedSearchManager() - -@search_bp.route('/search', methods=['POST']) -def search(): - """Perform search with query and filters.""" - try: - data = request.get_json() - query = data.get('query', '') - search_type = data.get('type', 'all') - filters = data.get('filters', {}) - sort_by = data.get('sort_by', 'name') - sort_order = data.get('sort_order', 'asc') - - result = search_manager.search_series(query, search_type, filters, sort_by, sort_order) - return jsonify(result) - - except Exception as e: - return jsonify({ - 'success': False, - 'error': str(e), - 'results': [], - 'total_results': 0 - }), 500 - -@search_bp.route('/search/suggestions', methods=['GET']) -def get_suggestions(): - """Get search suggestions.""" - try: - query = request.args.get('q', '') - suggestions = search_manager.get_search_suggestions(query) - - return jsonify({ - 'success': True, - 'suggestions': suggestions - }) - - except Exception as e: - return jsonify({ - 'success': False, - 'error': str(e), - 'suggestions': [] - }), 500 - - -# Export the search manager -advanced_search_manager = AdvancedSearchManager() \ No newline at end of file diff --git a/src/server/services/setup_service.py b/src/server/services/setup_service.py deleted file mode 100644 index 76cd5ca..0000000 --- a/src/server/services/setup_service.py +++ /dev/null @@ -1,268 +0,0 @@ -""" -Setup service for detecting and managing application setup state. - -This service determines if the application is properly configured and set up, -following the application flow pattern: setup → auth → main application. -""" - -import json -import logging -import sqlite3 -from datetime import datetime -from pathlib import Path -from typing import Any, Dict, List, Optional - -logger = logging.getLogger(__name__) - - -class SetupService: - """Service for managing application setup detection and configuration.""" - - def __init__(self, config_path: str = "data/config.json", db_path: str = "data/aniworld.db"): - """Initialize the setup service with configuration and database paths.""" - self.config_path = Path(config_path) - self.db_path = Path(db_path) - self._config_cache: Optional[Dict[str, Any]] = None - - def is_setup_complete(self) -> bool: - """ - Check if the application setup is complete. - - Setup is considered complete if: - 1. Configuration file exists and is valid - 2. Database exists and is accessible - 3. Master password is configured - 4. Setup completion flag is set (if present) - - Returns: - bool: True if setup is complete, False otherwise - """ - try: - # Check if configuration file exists and is valid - if not self._is_config_valid(): - logger.info("Setup incomplete: Configuration file is missing or invalid") - return False - - # Check if database exists and is accessible - if not self._is_database_accessible(): - logger.info("Setup incomplete: Database is not accessible") - return False - - # Check if master password is configured - if not self._is_master_password_configured(): - logger.info("Setup incomplete: Master password is not configured") - return False - - # Check for explicit setup completion flag - config = self.get_config() - if config and config.get("setup", {}).get("completed") is False: - logger.info("Setup incomplete: Setup completion flag is False") - return False - - logger.debug("Setup validation complete: All checks passed") - return True - - except Exception as e: - logger.error(f"Error checking setup completion: {e}") - return False - - def _is_config_valid(self) -> bool: - """Check if the configuration file exists and contains valid JSON.""" - try: - if not self.config_path.exists(): - return False - - config = self.get_config() - return config is not None and isinstance(config, dict) - - except Exception as e: - logger.error(f"Configuration validation error: {e}") - return False - - def _is_database_accessible(self) -> bool: - """Check if the database exists and is accessible.""" - try: - if not self.db_path.exists(): - return False - - # Try to connect and perform a simple query - with sqlite3.connect(str(self.db_path)) as conn: - cursor = conn.cursor() - cursor.execute("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1") - return True - - except Exception as e: - logger.error(f"Database accessibility check failed: {e}") - return False - - def _is_master_password_configured(self) -> bool: - """Check if master password is properly configured.""" - try: - config = self.get_config() - if not config: - return False - - security_config = config.get("security", {}) - - # Check if password hash exists - password_hash = security_config.get("master_password_hash") - salt = security_config.get("salt") - - return bool(password_hash and salt and len(password_hash) > 0 and len(salt) > 0) - - except Exception as e: - logger.error(f"Master password configuration check failed: {e}") - return False - - def get_config(self, force_reload: bool = False) -> Optional[Dict[str, Any]]: - """ - Get the configuration data from the config file. - - Args: - force_reload: If True, reload config from file even if cached - - Returns: - dict: Configuration data or None if not accessible - """ - try: - if self._config_cache is None or force_reload: - if not self.config_path.exists(): - return None - - with open(self.config_path, 'r', encoding='utf-8') as f: - self._config_cache = json.load(f) - - return self._config_cache - - except Exception as e: - logger.error(f"Error loading configuration: {e}") - return None - - def mark_setup_complete(self, config_updates: Optional[Dict[str, Any]] = None) -> bool: - """ - Mark the setup as completed and optionally update configuration. - - Args: - config_updates: Additional configuration updates to apply - - Returns: - bool: True if successful, False otherwise - """ - try: - config = self.get_config() or {} - - # Update configuration with any provided updates - if config_updates: - config.update(config_updates) - - # Set setup completion flag - if "setup" not in config: - config["setup"] = {} - config["setup"]["completed"] = True - config["setup"]["completed_at"] = str(datetime.utcnow()) - - # Save updated configuration - return self._save_config(config) - - except Exception as e: - logger.error(f"Error marking setup as complete: {e}") - return False - - def reset_setup(self) -> bool: - """ - Reset the setup completion status (for development/testing). - - Returns: - bool: True if successful, False otherwise - """ - try: - config = self.get_config() - if not config: - return False - - # Remove or set setup completion flag to false - if "setup" in config: - config["setup"]["completed"] = False - - return self._save_config(config) - - except Exception as e: - logger.error(f"Error resetting setup: {e}") - return False - - def _save_config(self, config: Dict[str, Any]) -> bool: - """Save configuration to file.""" - try: - # Ensure directory exists - self.config_path.parent.mkdir(parents=True, exist_ok=True) - - # Save configuration - with open(self.config_path, 'w', encoding='utf-8') as f: - json.dump(config, f, indent=4, ensure_ascii=False) - - # Clear cache to force reload on next access - self._config_cache = None - - logger.info(f"Configuration saved to {self.config_path}") - return True - - except Exception as e: - logger.error(f"Error saving configuration: {e}") - return False - - def get_setup_requirements(self) -> Dict[str, bool]: - """ - Get detailed breakdown of setup requirements and their status. - - Returns: - dict: Dictionary with requirement names and their completion status - """ - config = self.get_config() - return { - "config_file_exists": self.config_path.exists(), - "config_file_valid": self._is_config_valid(), - "database_exists": self.db_path.exists(), - "database_accessible": self._is_database_accessible(), - "master_password_configured": self._is_master_password_configured(), - "setup_marked_complete": bool(config and config.get("setup", {}).get("completed", True)) - } - - def get_missing_requirements(self) -> List[str]: - """ - Get list of missing setup requirements. - - Returns: - list: List of missing requirement descriptions - """ - requirements = self.get_setup_requirements() - missing = [] - - if not requirements["config_file_exists"]: - missing.append("Configuration file is missing") - elif not requirements["config_file_valid"]: - missing.append("Configuration file is invalid or corrupted") - - if not requirements["database_exists"]: - missing.append("Database file is missing") - elif not requirements["database_accessible"]: - missing.append("Database is not accessible or corrupted") - - if not requirements["master_password_configured"]: - missing.append("Master password is not configured") - - if not requirements["setup_marked_complete"]: - missing.append("Setup process was not completed") - - return missing - - -# Convenience functions for easy import -def is_setup_complete() -> bool: - """Convenience function to check if setup is complete.""" - service = SetupService() - return service.is_setup_complete() - - -def get_setup_service() -> SetupService: - """Get a configured setup service instance.""" - return SetupService()