moved routing

This commit is contained in:
2025-09-29 14:13:15 +02:00
parent b73210a3c9
commit 423b77033c
20 changed files with 2328 additions and 1267 deletions

View File

@@ -19,7 +19,7 @@ def get_logging_config():
"""Get current logging configuration."""
try:
# Import here to avoid circular imports
from logging_config import logging_config as log_config
from server.infrastructure.logging.config import logging_config as log_config
config_data = {
'log_level': config.log_level,
@@ -67,7 +67,7 @@ def update_logging_config():
# Update runtime logging level
try:
from logging_config import logging_config as log_config
from server.infrastructure.logging.config import logging_config as log_config
log_config.update_log_level(config.log_level)
except ImportError:
# Fallback for basic logging
@@ -99,7 +99,7 @@ def update_logging_config():
def list_log_files():
"""Get list of available log files."""
try:
from logging_config import logging_config as log_config
from server.infrastructure.logging.config import logging_config as log_config
log_files = log_config.get_log_files()
@@ -200,7 +200,7 @@ def cleanup_logs():
days = int(data.get('days', 30))
days = max(1, min(days, 365)) # Limit between 1-365 days
from logging_config import logging_config as log_config
from server.infrastructure.logging.config import logging_config as log_config
cleaned_files = log_config.cleanup_old_logs(days)
logger.info(f"Cleaned up {len(cleaned_files)} old log files (older than {days} days)")
@@ -232,14 +232,14 @@ def test_logging():
# Test fail2ban logging
try:
from logging_config import log_auth_failure
from server.infrastructure.logging.config import log_auth_failure
log_auth_failure("127.0.0.1", "test_user")
except ImportError:
pass
# Test download progress logging
try:
from logging_config import log_download_progress
from server.infrastructure.logging.config import log_download_progress
log_download_progress("Test Series", "S01E01", 50.0, "1.2 MB/s", "5m 30s")
except ImportError:
pass

View File

@@ -64,7 +64,7 @@ class SessionManager:
if config.enable_fail2ban_logging:
try:
# Import here to avoid circular imports
from logging_config import log_auth_failure
from server.infrastructure.logging.config import log_auth_failure
log_auth_failure(ip_address, username)
except ImportError:
# Fallback to simple logging if new system not available

View File

@@ -0,0 +1,42 @@
"""
Routes package for Aniworld web application.
"""
# Import blueprints that are available
__all__ = []
try:
from .auth_routes import auth_bp, auth_api_bp
__all__.extend(['auth_bp', 'auth_api_bp'])
except ImportError:
pass
try:
from .api_routes import api_bp
__all__.append('api_bp')
except ImportError:
pass
try:
from .main_routes import main_bp
__all__.append('main_bp')
except ImportError:
pass
try:
from .static_routes import static_bp
__all__.append('static_bp')
except ImportError:
pass
try:
from .diagnostic_routes import diagnostic_bp
__all__.append('diagnostic_bp')
except ImportError:
pass
try:
from .config_routes import config_bp
__all__.append('config_bp')
except ImportError:
pass

View File

@@ -0,0 +1,827 @@
"""
API routes for series management, downloads, and operations.
"""
from flask import Blueprint, request, jsonify
from flask_socketio import emit
import threading
from datetime import datetime
from functools import wraps
from web.controllers.auth_controller import optional_auth, require_auth
api_bp = Blueprint('api', __name__, url_prefix='/api')
# Global variables to store app state
series_app = None
is_scanning = False
is_downloading = False
should_stop_downloads = False
# Placeholder process lock constants and functions
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
CLEANUP_LOCK = "cleanup"
# Simple in-memory process lock system
_active_locks = {}
def is_process_running(lock_name):
"""Check if a process is currently running (locked)."""
return lock_name in _active_locks
def acquire_lock(lock_name, locked_by="system"):
"""Acquire a process lock."""
if lock_name in _active_locks:
raise ProcessLockError(f"Process {lock_name} is already running")
_active_locks[lock_name] = {
'locked_by': locked_by,
'timestamp': datetime.now()
}
def release_lock(lock_name):
"""Release a process lock."""
if lock_name in _active_locks:
del _active_locks[lock_name]
class ProcessLockError(Exception):
"""Placeholder exception for process lock errors."""
pass
def with_process_lock(lock_name, timeout_minutes=30):
"""Decorator for process locking."""
def decorator(f):
@wraps(f)
def decorated_function(*args, **kwargs):
# Extract locked_by from kwargs if provided
locked_by = kwargs.pop('_locked_by', 'system')
try:
acquire_lock(lock_name, locked_by)
return f(*args, **kwargs)
finally:
release_lock(lock_name)
return decorated_function
return decorator
# Simple decorator to replace handle_api_errors
def handle_api_errors(f):
"""Simple error handling decorator."""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
return jsonify({'status': 'error', 'message': str(e)}), 500
return decorated_function
def init_series_app():
"""Initialize the SeriesApp with configuration directory."""
global series_app
from config import config
from main import SeriesApp
directory_to_search = config.anime_directory
series_app = SeriesApp(directory_to_search)
return series_app
# Import socketio instance - this will need to be passed from app.py
socketio = None
def set_socketio(socket_instance):
"""Set the socketio instance for this blueprint."""
global socketio
socketio = socket_instance
@api_bp.route('/config/directory', methods=['POST'])
@require_auth
def update_directory():
"""Update anime directory configuration."""
try:
from config import config
data = request.get_json()
new_directory = data.get('directory')
if not new_directory:
return jsonify({
'success': False,
'error': 'Directory is required'
}), 400
# Update configuration
config.anime_directory = new_directory
config.save_config()
# Reinitialize series app
init_series_app()
return jsonify({
'success': True,
'message': 'Directory updated successfully',
'directory': new_directory
})
except Exception as e:
return jsonify({
'success': False,
'error': str(e)
}), 500
@api_bp.route('/series', methods=['GET'])
@optional_auth
def get_series():
"""Get all series data."""
try:
if series_app is None or series_app.List is None:
return jsonify({
'status': 'success',
'series': [],
'total_series': 0,
'message': 'No series data available. Please perform a scan to load series.'
})
# Get series data
series_data = []
for serie in series_app.List.GetList():
series_data.append({
'folder': serie.folder,
'name': serie.name or serie.folder,
'total_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()),
'missing_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()),
'status': 'ongoing',
'episodes': {
season: episodes
for season, episodes in serie.episodeDict.items()
}
})
return jsonify({
'status': 'success',
'series': series_data,
'total_series': len(series_data)
})
except Exception as e:
# Log the error but don't return 500 to prevent page reload loops
print(f"Error in get_series: {e}")
return jsonify({
'status': 'success',
'series': [],
'total_series': 0,
'message': 'Error loading series data. Please try rescanning.'
})
@api_bp.route('/search', methods=['POST'])
@optional_auth
@handle_api_errors
def search_series():
"""Search for series online."""
try:
# Get the search query from the request
data = request.get_json()
if not data or 'query' not in data:
return jsonify({
'status': 'error',
'message': 'Search query is required'
}), 400
query = data['query'].strip()
if not query:
return jsonify({
'status': 'error',
'message': 'Search query cannot be empty'
}), 400
# Check if series_app is available
if series_app is None:
return jsonify({
'status': 'error',
'message': 'Series application not initialized'
}), 500
# Perform the search
search_results = series_app.search(query)
# Format results for the frontend
results = []
if search_results:
for result in search_results:
if isinstance(result, dict) and 'name' in result and 'link' in result:
results.append({
'name': result['name'],
'link': result['link']
})
return jsonify({
'status': 'success',
'results': results,
'total': len(results)
})
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Search failed: {str(e)}'
}), 500
@api_bp.route('/add_series', methods=['POST'])
@optional_auth
@handle_api_errors
def add_series():
"""Add a new series to the collection."""
try:
from server.core.entities.series import Serie
# Get the request data
data = request.get_json()
if not data:
return jsonify({
'status': 'error',
'message': 'Request data is required'
}), 400
# Validate required fields
if 'link' not in data or 'name' not in data:
return jsonify({
'status': 'error',
'message': 'Both link and name are required'
}), 400
link = data['link'].strip()
name = data['name'].strip()
if not link or not name:
return jsonify({
'status': 'error',
'message': 'Link and name cannot be empty'
}), 400
# Check if series_app is available
if series_app is None:
return jsonify({
'status': 'error',
'message': 'Series application not initialized'
}), 500
# Create and add the series
new_serie = Serie(link, name, "aniworld.to", link, {})
series_app.List.add(new_serie)
return jsonify({
'status': 'success',
'message': f'Series "{name}" added successfully'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Failed to add series: {str(e)}'
}), 500
@api_bp.route('/rescan', methods=['POST'])
@optional_auth
def rescan_series():
"""Rescan/reinit the series directory."""
global is_scanning
# Check if rescan is already running using process lock
if is_process_running(RESCAN_LOCK) or is_scanning:
return jsonify({
'status': 'error',
'message': 'Rescan is already running. Please wait for it to complete.',
'is_running': True
}), 409
def scan_thread():
global is_scanning
try:
# Use process lock to prevent duplicate rescans
@with_process_lock(RESCAN_LOCK, timeout_minutes=120)
def perform_rescan():
global is_scanning
is_scanning = True
try:
from server.core.entities import SerieList
# Emit scanning started
if socketio:
socketio.emit('scan_started')
# Reinit and scan
series_app.SerieScanner.Reinit()
series_app.SerieScanner.Scan(lambda folder, counter:
socketio.emit('scan_progress', {
'folder': folder,
'counter': counter
}) if socketio else None
)
# Refresh the series list
series_app.List = SerieList.SerieList(series_app.directory_to_search)
series_app.__InitList__()
# Emit scan completed
if socketio:
socketio.emit('scan_completed')
except Exception as e:
if socketio:
socketio.emit('scan_error', {'message': str(e)})
raise
finally:
is_scanning = False
perform_rescan(_locked_by='web_interface')
except ProcessLockError:
if socketio:
socketio.emit('scan_error', {'message': 'Rescan is already running'})
except Exception as e:
if socketio:
socketio.emit('scan_error', {'message': str(e)})
# Start scan in background thread
threading.Thread(target=scan_thread, daemon=True).start()
return jsonify({
'status': 'success',
'message': 'Rescan started'
})
# Download endpoint - adds items to queue
@api_bp.route('/download', methods=['POST'])
@optional_auth
def download_series():
"""Add selected series to download queue."""
try:
data = request.get_json()
if not data or 'folders' not in data:
return jsonify({
'status': 'error',
'message': 'Folders list is required'
}), 400
folders = data['folders']
if not folders:
return jsonify({
'status': 'error',
'message': 'No series selected'
}), 400
# Import the queue functions
from application.services.queue_service import add_to_download_queue
added_count = 0
for folder in folders:
try:
# Find the serie in our list
serie = None
if series_app and series_app.List:
for s in series_app.List.GetList():
if s.folder == folder:
serie = s
break
if serie:
# Check if this serie has missing episodes (non-empty episodeDict)
if serie.episodeDict:
# Create download entries for each season/episode combination
for season, episodes in serie.episodeDict.items():
for episode in episodes:
episode_info = {
'folder': folder,
'season': season,
'episode_number': episode,
'title': f'S{season:02d}E{episode:02d}',
'url': '', # Will be populated during actual download
'serie_name': serie.name or folder
}
add_to_download_queue(
serie_name=serie.name or folder,
episode_info=episode_info,
priority='normal'
)
added_count += 1
else:
# No missing episodes, add a placeholder entry indicating series is complete
episode_info = {
'folder': folder,
'season': None,
'episode_number': 'Complete',
'title': 'No missing episodes',
'url': '',
'serie_name': serie.name or folder
}
add_to_download_queue(
serie_name=serie.name or folder,
episode_info=episode_info,
priority='normal'
)
added_count += 1
else:
# Serie not found, add with folder name only
episode_info = {
'folder': folder,
'episode_number': 'Unknown',
'title': 'Serie Check Required',
'url': '',
'serie_name': folder
}
add_to_download_queue(
serie_name=folder,
episode_info=episode_info,
priority='normal'
)
added_count += 1
except Exception as e:
print(f"Error processing folder {folder}: {e}")
continue
if added_count > 0:
return jsonify({
'status': 'success',
'message': f'Added {added_count} items to download queue'
})
else:
return jsonify({
'status': 'error',
'message': 'No items could be added to the queue'
}), 400
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Failed to add to queue: {str(e)}'
}), 500
@api_bp.route('/queue/start', methods=['POST'])
@optional_auth
def start_download_queue():
"""Start processing the download queue."""
global is_downloading, should_stop_downloads
# Check if download is already running using process lock
if is_process_running(DOWNLOAD_LOCK) or is_downloading:
return jsonify({
'status': 'error',
'message': 'Download is already running. Please wait for it to complete.',
'is_running': True
}), 409
def download_thread():
global is_downloading, should_stop_downloads
should_stop_downloads = False # Reset stop flag when starting
try:
# Use process lock to prevent duplicate downloads
@with_process_lock(DOWNLOAD_LOCK, timeout_minutes=720) # 12 hours max
def perform_downloads():
global is_downloading
is_downloading = True
try:
from application.services.queue_service import start_next_download, move_download_to_completed, update_download_progress
# Emit download started
if socketio:
socketio.emit('download_started')
# Process queue items
while True:
# Check for stop signal
global should_stop_downloads
if should_stop_downloads:
should_stop_downloads = False # Reset the flag
break
# Start next download
current_download = start_next_download()
if not current_download:
break # No more items in queue
try:
if socketio:
socketio.emit('download_progress', {
'id': current_download['id'],
'serie': current_download['serie_name'],
'episode': current_download['episode']['episode_number'],
'status': 'downloading'
})
# Find the serie in our series list to get the key
serie = None
if series_app and series_app.List:
for s in series_app.List.GetList():
if s.folder == current_download['episode']['folder']:
serie = s
break
if not serie:
raise Exception(f"Serie not found: {current_download['episode']['folder']}")
# Check if serie has a valid key
if not hasattr(serie, 'key') or not serie.key:
raise Exception(f"Serie '{serie.name or serie.folder}' has no valid key. Please rescan or search for this series first.")
# Check if episode info indicates no missing episodes
if current_download['episode']['episode_number'] == 'Complete':
# Mark as completed immediately - no episodes to download
move_download_to_completed(current_download['id'], success=True)
if socketio:
socketio.emit('download_completed', {
'id': current_download['id'],
'serie': current_download['serie_name'],
'episode': 'No missing episodes'
})
continue
# Create progress callback for real download
def progress_callback(d):
# Check for stop signal during download
global should_stop_downloads
if should_stop_downloads:
return
if d['status'] == 'downloading':
total = d.get('total_bytes') or d.get('total_bytes_estimate')
downloaded = d.get('downloaded_bytes', 0)
if total and downloaded:
percent = (downloaded / total) * 100
speed_bytes_per_sec = d.get('speed', 0) or 0
speed_mbps = (speed_bytes_per_sec * 8) / (1024 * 1024) if speed_bytes_per_sec else 0 # Convert to Mbps
# Calculate ETA
eta_seconds = 0
if speed_bytes_per_sec > 0:
remaining_bytes = total - downloaded
eta_seconds = remaining_bytes / speed_bytes_per_sec
update_download_progress(current_download['id'], {
'percent': percent,
'speed_mbps': speed_mbps,
'eta_seconds': eta_seconds,
'downloaded_bytes': downloaded,
'total_bytes': total
})
if socketio:
socketio.emit('download_progress', {
'id': current_download['id'],
'serie': current_download['serie_name'],
'episode': current_download['episode']['episode_number'],
'progress': percent,
'speed_mbps': speed_mbps,
'eta_seconds': eta_seconds
})
else:
# Progress without total size
downloaded_mb = downloaded / (1024 * 1024) if downloaded else 0
if socketio:
socketio.emit('download_progress', {
'id': current_download['id'],
'serie': current_download['serie_name'],
'episode': current_download['episode']['episode_number'],
'progress': 0,
'downloaded_mb': downloaded_mb
})
elif d['status'] == 'finished':
update_download_progress(current_download['id'], {
'percent': 100,
'speed_mbps': 0,
'eta_seconds': 0
})
# Perform actual download using the loader
loader = series_app.Loaders.GetLoader(key="aniworld.to")
# Check if we should stop before starting download
if should_stop_downloads:
move_download_to_completed(current_download['id'], success=False, error='Download stopped by user')
if socketio:
socketio.emit('download_stopped', {
'message': 'Download queue stopped by user'
})
should_stop_downloads = False
break
# Check language availability first
season = current_download['episode']['season']
episode_num = current_download['episode']['episode_number']
# Ensure episode_num is an integer
try:
episode_num = int(episode_num)
except (ValueError, TypeError):
raise Exception(f"Invalid episode number: {episode_num}")
# Ensure season is an integer (can be None for some entries)
if season is None:
season = 1 # Default to season 1
try:
season = int(season)
except (ValueError, TypeError):
raise Exception(f"Invalid season number: {season}")
# Log the download attempt
print(f"Starting download: {serie.name} S{season:02d}E{episode_num:02d}")
if not loader.IsLanguage(season, episode_num, serie.key):
raise Exception(f"Episode S{season:02d}E{episode_num:02d} not available in German Dub")
# Perform the actual download with retry logic
success = False
for attempt in range(3): # 3 retry attempts
if should_stop_downloads:
break
try:
success = loader.Download(
baseDirectory=series_app.directory_to_search,
serieFolder=serie.folder,
season=season,
episode=episode_num,
key=serie.key,
language="German Dub",
progress_callback=progress_callback
)
if success:
break
except Exception as e:
if attempt == 2: # Last attempt
raise e
import time
time.sleep(2) # Wait before retry
if should_stop_downloads:
move_download_to_completed(current_download['id'], success=False, error='Download stopped by user')
if socketio:
socketio.emit('download_stopped', {
'message': 'Download queue stopped by user'
})
should_stop_downloads = False
break
if success:
# Mark as completed
move_download_to_completed(current_download['id'], success=True)
if socketio:
socketio.emit('download_completed', {
'id': current_download['id'],
'serie': current_download['serie_name'],
'episode': current_download['episode']['episode_number']
})
else:
raise Exception("Download failed after all retry attempts")
except Exception as e:
# Mark as failed
move_download_to_completed(current_download['id'], success=False, error=str(e))
if socketio:
socketio.emit('download_error', {
'id': current_download['id'],
'serie': current_download['serie_name'],
'episode': current_download['episode']['episode_number'],
'error': str(e)
})
# Emit download queue completed
if socketio:
socketio.emit('download_queue_completed')
except Exception as e:
if socketio:
socketio.emit('download_error', {'message': str(e)})
raise
finally:
is_downloading = False
perform_downloads(_locked_by='web_interface')
except ProcessLockError:
if socketio:
socketio.emit('download_error', {'message': 'Download is already running'})
except Exception as e:
if socketio:
socketio.emit('download_error', {'message': str(e)})
# Start download in background thread
threading.Thread(target=download_thread, daemon=True).start()
return jsonify({
'status': 'success',
'message': 'Download queue processing started'
})
@api_bp.route('/queue/stop', methods=['POST'])
@optional_auth
def stop_download_queue():
"""Stop processing the download queue."""
global is_downloading, should_stop_downloads
# Check if any download is currently running
if not is_downloading and not is_process_running(DOWNLOAD_LOCK):
return jsonify({
'status': 'error',
'message': 'No download is currently running'
}), 400
# Set stop signal for graceful shutdown
should_stop_downloads = True
# Don't forcefully set is_downloading to False here, let the download thread handle it
# This prevents race conditions where the thread might still be running
# Emit stop signal to clients immediately
if socketio:
socketio.emit('download_stop_requested')
return jsonify({
'status': 'success',
'message': 'Download stop requested. Downloads will stop gracefully.'
})
@api_bp.route('/status', methods=['GET'])
@handle_api_errors
@optional_auth
def get_status():
"""Get current system status."""
import os
try:
# Get anime directory from environment or config
anime_directory = os.environ.get('ANIME_DIRECTORY', 'Not configured')
# Get series count (placeholder implementation)
series_count = 0
try:
# This would normally get the actual series count from your series scanner
# For now, return a placeholder value
series_count = 0
except Exception:
series_count = 0
return jsonify({
'success': True,
'directory': anime_directory,
'series_count': series_count,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({
'success': False,
'error': str(e),
'directory': 'Error',
'series_count': 0
})
@api_bp.route('/process/locks/status', methods=['GET'])
@handle_api_errors
@optional_auth
def process_locks_status():
"""Get current process lock status."""
try:
# Use the constants and functions defined above in this file
locks = {
'rescan': {
'is_locked': is_process_running(RESCAN_LOCK),
'locked_by': 'system' if is_process_running(RESCAN_LOCK) else None,
'lock_time': None # Could be extended to track actual lock times
},
'download': {
'is_locked': is_process_running(DOWNLOAD_LOCK),
'locked_by': 'system' if is_process_running(DOWNLOAD_LOCK) else None,
'lock_time': None # Could be extended to track actual lock times
}
}
return jsonify({
'success': True,
'locks': locks,
'timestamp': datetime.now().isoformat()
})
except Exception as e:
return jsonify({
'success': False,
'error': str(e),
'locks': {
'rescan': {'is_locked': False, 'locked_by': None, 'lock_time': None},
'download': {'is_locked': False, 'locked_by': None, 'lock_time': None}
}
})
# Initialize the series app when the blueprint is loaded
try:
init_series_app()
except Exception as e:
print(f"Failed to initialize series app in API blueprint: {e}")
series_app = None

View File

@@ -0,0 +1,132 @@
"""
Authentication routes.
"""
from flask import Blueprint, render_template, request, jsonify, redirect, url_for
from web.controllers.auth_controller import session_manager, require_auth
# Create separate blueprints for API and page routes
auth_bp = Blueprint('auth', __name__)
auth_api_bp = Blueprint('auth_api', __name__, url_prefix='/api/auth')
# Import config at module level to avoid circular imports
from config import config
def init_series_app():
"""Initialize the SeriesApp with configuration directory."""
from main import SeriesApp
directory_to_search = config.anime_directory
return SeriesApp(directory_to_search)
# API Routes
@auth_api_bp.route('/setup', methods=['POST'])
def auth_setup():
"""Complete initial setup."""
if config.has_master_password():
return jsonify({
'status': 'error',
'message': 'Setup already completed'
}), 400
try:
data = request.get_json()
password = data.get('password')
directory = data.get('directory')
if not password or len(password) < 8:
return jsonify({
'status': 'error',
'message': 'Password must be at least 8 characters long'
}), 400
if not directory:
return jsonify({
'status': 'error',
'message': 'Directory is required'
}), 400
# Set master password and directory
config.set_master_password(password)
config.anime_directory = directory
config.save_config()
# Reinitialize series app with new directory
init_series_app()
return jsonify({
'status': 'success',
'message': 'Setup completed successfully'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@auth_api_bp.route('/login', methods=['POST'])
def auth_login():
"""Authenticate user."""
try:
data = request.get_json()
password = data.get('password')
if not password:
return jsonify({
'status': 'error',
'message': 'Password is required'
}), 400
# Verify password using session manager
result = session_manager.login(password, request.remote_addr)
return jsonify(result)
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@auth_api_bp.route('/logout', methods=['POST'])
@require_auth
def auth_logout():
"""Logout user."""
session_manager.logout()
return jsonify({
'status': 'success',
'message': 'Logged out successfully'
})
@auth_api_bp.route('/status', methods=['GET'])
def auth_status():
"""Get authentication status."""
return jsonify({
'authenticated': session_manager.is_authenticated(),
'has_master_password': config.has_master_password(),
'setup_required': not config.has_master_password(),
'session_info': session_manager.get_session_info()
})
# Page Routes (Non-API)
@auth_bp.route('/login')
def login():
"""Login page."""
if not config.has_master_password():
return redirect(url_for('auth.setup'))
if session_manager.is_authenticated():
return redirect(url_for('main.index'))
return render_template('login.html',
session_timeout=config.session_timeout_hours,
max_attempts=config.max_failed_attempts,
lockout_duration=config.lockout_duration_minutes)
@auth_bp.route('/setup')
def setup():
"""Initial setup page."""
if config.has_master_password():
return redirect(url_for('auth.login'))
return render_template('setup.html', current_directory=config.anime_directory)

View File

@@ -0,0 +1,191 @@
"""
Configuration management routes.
"""
from flask import Blueprint, jsonify, request
from datetime import datetime
from functools import wraps
from web.controllers.auth_controller import optional_auth, require_auth
config_bp = Blueprint('config', __name__, url_prefix='/api')
# Simple decorator to handle API errors
def handle_api_errors(f):
"""Simple error handling decorator."""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
return jsonify({'status': 'error', 'message': str(e)}), 500
return decorated_function
# Scheduler configuration endpoints
@config_bp.route('/scheduler/config', methods=['GET'])
@handle_api_errors
@optional_auth
def get_scheduler_config():
"""Get scheduler configuration."""
return jsonify({
'success': True,
'config': {
'enabled': False,
'time': '03:00',
'auto_download_after_rescan': False,
'next_run': None,
'last_run': None,
'is_running': False
}
})
@config_bp.route('/scheduler/config', methods=['POST'])
@handle_api_errors
@optional_auth
def set_scheduler_config():
"""Set scheduler configuration."""
return jsonify({
'success': True,
'message': 'Scheduler configuration saved (placeholder)'
})
# Logging configuration endpoints
@config_bp.route('/logging/config', methods=['GET'])
@handle_api_errors
@optional_auth
def get_logging_config():
"""Get logging configuration."""
return jsonify({
'success': True,
'config': {
'log_level': 'INFO',
'enable_console_logging': True,
'enable_console_progress': True,
'enable_fail2ban_logging': False
}
})
@config_bp.route('/logging/config', methods=['POST'])
@handle_api_errors
@optional_auth
def set_logging_config():
"""Set logging configuration."""
return jsonify({
'success': True,
'message': 'Logging configuration saved (placeholder)'
})
@config_bp.route('/logging/files', methods=['GET'])
@handle_api_errors
@optional_auth
def get_log_files():
"""Get available log files."""
return jsonify({
'success': True,
'files': []
})
@config_bp.route('/logging/test', methods=['POST'])
@handle_api_errors
@optional_auth
def test_logging():
"""Test logging functionality."""
return jsonify({
'success': True,
'message': 'Test logging completed (placeholder)'
})
@config_bp.route('/logging/cleanup', methods=['POST'])
@handle_api_errors
@optional_auth
def cleanup_logs():
"""Clean up old log files."""
data = request.get_json()
days = data.get('days', 30) if data else 30
return jsonify({
'success': True,
'message': f'Log files older than {days} days have been cleaned up (placeholder)'
})
@config_bp.route('/logging/files/<filename>/tail')
@handle_api_errors
@optional_auth
def tail_log_file(filename):
"""Get the tail of a log file."""
lines = request.args.get('lines', 100, type=int)
return jsonify({
'success': True,
'content': f'Last {lines} lines of {filename} (placeholder)',
'filename': filename
})
# Advanced configuration endpoints
@config_bp.route('/config/section/advanced', methods=['GET'])
@handle_api_errors
@optional_auth
def get_advanced_config():
"""Get advanced configuration."""
return jsonify({
'success': True,
'config': {
'max_concurrent_downloads': 3,
'provider_timeout': 30,
'enable_debug_mode': False
}
})
@config_bp.route('/config/section/advanced', methods=['POST'])
@handle_api_errors
@optional_auth
def set_advanced_config():
"""Set advanced configuration."""
data = request.get_json()
# Here you would normally save the configuration
# For now, we'll just return success
return jsonify({
'success': True,
'message': 'Advanced configuration saved successfully'
})
# Configuration backup endpoints
@config_bp.route('/config/backup', methods=['POST'])
@handle_api_errors
@optional_auth
def create_config_backup():
"""Create a configuration backup."""
return jsonify({
'success': True,
'message': 'Configuration backup created successfully',
'filename': f'config_backup_{datetime.now().strftime("%Y%m%d_%H%M%S")}.json'
})
@config_bp.route('/config/backups', methods=['GET'])
@handle_api_errors
@optional_auth
def get_config_backups():
"""Get list of configuration backups."""
return jsonify({
'success': True,
'backups': [] # Empty list for now - would normally list actual backup files
})
@config_bp.route('/config/backup/<filename>/restore', methods=['POST'])
@handle_api_errors
@optional_auth
def restore_config_backup(filename):
"""Restore a configuration backup."""
return jsonify({
'success': True,
'message': f'Configuration restored from {filename}'
})
@config_bp.route('/config/backup/<filename>/download', methods=['GET'])
@handle_api_errors
@optional_auth
def download_config_backup(filename):
"""Download a configuration backup file."""
# For now, return an empty response - would normally serve the actual file
return jsonify({
'success': True,
'message': 'Backup download endpoint (placeholder)'
})

View File

@@ -0,0 +1,176 @@
"""
Diagnostic and monitoring routes.
"""
from flask import Blueprint, jsonify, request
from datetime import datetime
from functools import wraps
from web.controllers.auth_controller import optional_auth, require_auth
diagnostic_bp = Blueprint('diagnostic', __name__, url_prefix='/api/diagnostics')
# Simple decorator to handle API errors
def handle_api_errors(f):
"""Simple error handling decorator."""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
return jsonify({'status': 'error', 'message': str(e)}), 500
return decorated_function
# Placeholder objects for missing modules
class PlaceholderNetworkChecker:
def get_network_status(self):
return {
"status": "unknown",
"connected": True,
"ping_ms": 0,
"dns_working": True
}
def check_url_reachability(self, url):
return True
class PlaceholderErrorManager:
def __init__(self):
self.error_history = []
self.blacklisted_urls = {}
self.retry_counts = {}
class PlaceholderHealthMonitor:
def get_current_health_status(self):
return {
"status": "healthy",
"uptime": "1h 30m",
"memory_usage": "45%",
"cpu_usage": "12%"
}
class RetryableError(Exception):
"""Placeholder exception for retryable errors."""
pass
network_health_checker = PlaceholderNetworkChecker()
error_recovery_manager = PlaceholderErrorManager()
health_monitor = PlaceholderHealthMonitor()
# Placeholder process lock constants and functions
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
# Simple in-memory process lock system
_active_locks = {}
def is_process_running(lock_name):
"""Check if a process is currently running (locked)."""
return lock_name in _active_locks
@diagnostic_bp.route('/network')
@handle_api_errors
@optional_auth
def network_diagnostics():
"""Get network diagnostics and connectivity status."""
try:
network_status = network_health_checker.get_network_status()
# Test AniWorld connectivity
aniworld_reachable = network_health_checker.check_url_reachability("https://aniworld.to")
network_status['aniworld_reachable'] = aniworld_reachable
return jsonify({
'status': 'success',
'data': network_status
})
except Exception as e:
raise RetryableError(f"Network diagnostics failed: {e}")
@diagnostic_bp.route('/errors')
@handle_api_errors
@optional_auth
def get_error_history():
"""Get recent error history."""
try:
recent_errors = error_recovery_manager.error_history[-50:] # Last 50 errors
return jsonify({
'status': 'success',
'data': {
'recent_errors': recent_errors,
'total_errors': len(error_recovery_manager.error_history),
'blacklisted_urls': list(error_recovery_manager.blacklisted_urls.keys())
}
})
except Exception as e:
raise RetryableError(f"Error history retrieval failed: {e}")
@diagnostic_bp.route('/system-status')
@handle_api_errors
@optional_auth
def system_status_summary():
"""Get comprehensive system status summary."""
try:
# Get health status
health_status = health_monitor.get_current_health_status()
# Get network status
network_status = network_health_checker.get_network_status()
# Get process status
process_status = {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
}
# Get error statistics
error_stats = {
'total_errors': len(error_recovery_manager.error_history),
'recent_errors': len([e for e in error_recovery_manager.error_history
if (datetime.now() - datetime.fromisoformat(e.get('timestamp', datetime.now().isoformat()))).seconds < 3600]),
'blacklisted_urls': len(error_recovery_manager.blacklisted_urls)
}
return jsonify({
'status': 'success',
'data': {
'health': health_status,
'network': network_status,
'processes': process_status,
'errors': error_stats,
'timestamp': datetime.now().isoformat()
}
})
except Exception as e:
raise RetryableError(f"System status retrieval failed: {e}")
# Recovery routes
@diagnostic_bp.route('/recovery/clear-blacklist', methods=['POST'])
@handle_api_errors
@require_auth
def clear_blacklist():
"""Clear URL blacklist."""
try:
error_recovery_manager.blacklisted_urls.clear()
return jsonify({
'status': 'success',
'message': 'URL blacklist cleared successfully'
})
except Exception as e:
raise RetryableError(f"Blacklist clearing failed: {e}")
@diagnostic_bp.route('/recovery/retry-counts')
@handle_api_errors
@optional_auth
def get_retry_counts():
"""Get retry statistics."""
try:
return jsonify({
'status': 'success',
'data': {
'retry_counts': error_recovery_manager.retry_counts,
'total_retries': sum(error_recovery_manager.retry_counts.values())
}
})
except Exception as e:
raise RetryableError(f"Retry statistics retrieval failed: {e}")

View File

@@ -0,0 +1,30 @@
"""
Main application routes.
"""
from flask import Blueprint, render_template, redirect, url_for
from web.controllers.auth_controller import optional_auth
main_bp = Blueprint('main', __name__)
# Placeholder process lock constants and functions
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
# Simple in-memory process lock system
_active_locks = {}
def is_process_running(lock_name):
"""Check if a process is currently running (locked)."""
return lock_name in _active_locks
@main_bp.route('/')
@optional_auth
def index():
"""Main page route."""
# Check process status
process_status = {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
}
return render_template('index.html', process_status=process_status)

View File

@@ -0,0 +1,145 @@
"""
Static file and JavaScript routes for UX features.
"""
from flask import Blueprint, Response
static_bp = Blueprint('static', __name__)
# Create placeholder managers for missing modules
class PlaceholderManager:
"""Placeholder manager for missing UX modules."""
def get_shortcuts_js(self): return ""
def get_drag_drop_js(self): return ""
def get_bulk_operations_js(self): return ""
def get_preferences_js(self): return ""
def get_search_js(self): return ""
def get_undo_redo_js(self): return ""
def get_mobile_responsive_js(self): return ""
def get_touch_gesture_js(self): return ""
def get_accessibility_js(self): return ""
def get_screen_reader_js(self): return ""
def get_contrast_js(self): return ""
def get_multiscreen_js(self): return ""
def get_css(self): return ""
def get_contrast_css(self): return ""
def get_multiscreen_css(self): return ""
# Create placeholder instances
keyboard_manager = PlaceholderManager()
drag_drop_manager = PlaceholderManager()
bulk_operations_manager = PlaceholderManager()
preferences_manager = PlaceholderManager()
advanced_search_manager = PlaceholderManager()
undo_redo_manager = PlaceholderManager()
mobile_responsive_manager = PlaceholderManager()
touch_gesture_manager = PlaceholderManager()
accessibility_manager = PlaceholderManager()
screen_reader_manager = PlaceholderManager()
color_contrast_manager = PlaceholderManager()
multi_screen_manager = PlaceholderManager()
# UX JavaScript routes
@static_bp.route('/static/js/keyboard-shortcuts.js')
def keyboard_shortcuts_js():
"""Serve keyboard shortcuts JavaScript."""
js_content = keyboard_manager.get_shortcuts_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/drag-drop.js')
def drag_drop_js():
"""Serve drag and drop JavaScript."""
js_content = drag_drop_manager.get_drag_drop_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/bulk-operations.js')
def bulk_operations_js():
"""Serve bulk operations JavaScript."""
js_content = bulk_operations_manager.get_bulk_operations_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/user-preferences.js')
def user_preferences_js():
"""Serve user preferences JavaScript."""
js_content = preferences_manager.get_preferences_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/advanced-search.js')
def advanced_search_js():
"""Serve advanced search JavaScript."""
js_content = advanced_search_manager.get_search_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/undo-redo.js')
def undo_redo_js():
"""Serve undo/redo JavaScript."""
js_content = undo_redo_manager.get_undo_redo_js()
return Response(js_content, mimetype='application/javascript')
# Mobile & Accessibility JavaScript routes
@static_bp.route('/static/js/mobile-responsive.js')
def mobile_responsive_js():
"""Serve mobile responsive JavaScript."""
js_content = mobile_responsive_manager.get_mobile_responsive_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/touch-gestures.js')
def touch_gestures_js():
"""Serve touch gestures JavaScript."""
js_content = touch_gesture_manager.get_touch_gesture_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/accessibility-features.js')
def accessibility_features_js():
"""Serve accessibility features JavaScript."""
js_content = accessibility_manager.get_accessibility_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/screen-reader-support.js')
def screen_reader_support_js():
"""Serve screen reader support JavaScript."""
js_content = screen_reader_manager.get_screen_reader_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/color-contrast-compliance.js')
def color_contrast_compliance_js():
"""Serve color contrast compliance JavaScript."""
js_content = color_contrast_manager.get_contrast_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/js/multi-screen-support.js')
def multi_screen_support_js():
"""Serve multi-screen support JavaScript."""
js_content = multi_screen_manager.get_multiscreen_js()
return Response(js_content, mimetype='application/javascript')
@static_bp.route('/static/css/ux-features.css')
def ux_features_css():
"""Serve UX features CSS."""
css_content = f"""
/* Keyboard shortcuts don't require additional CSS */
{drag_drop_manager.get_css()}
{bulk_operations_manager.get_css()}
{preferences_manager.get_css()}
{advanced_search_manager.get_css()}
{undo_redo_manager.get_css()}
/* Mobile & Accessibility CSS */
{mobile_responsive_manager.get_css()}
{touch_gesture_manager.get_css()}
{accessibility_manager.get_css()}
{screen_reader_manager.get_css()}
{color_contrast_manager.get_contrast_css()}
{multi_screen_manager.get_multiscreen_css()}
"""
return Response(css_content, mimetype='text/css')

View File

@@ -0,0 +1,54 @@
"""
WebSocket event handlers for real-time updates.
"""
from flask_socketio import emit
# Placeholder process lock constants and functions
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
# Simple in-memory process lock system
_active_locks = {}
def is_process_running(lock_name):
"""Check if a process is currently running (locked)."""
return lock_name in _active_locks
def register_socketio_handlers(socketio):
"""Register WebSocket event handlers."""
@socketio.on('connect')
def handle_connect():
"""Handle client connection."""
emit('status', {
'message': 'Connected to server',
'processes': {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
}
})
@socketio.on('disconnect')
def handle_disconnect():
"""Handle client disconnection."""
print('Client disconnected')
@socketio.on('get_status')
def handle_get_status():
"""Handle status request."""
# Import series_app from the main module if available
try:
from main import SeriesApp
# This would need to be properly initialized
series_count = 0 # Placeholder
except:
series_count = 0
emit('status_update', {
'processes': {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
},
'series_count': series_count
})

View File

@@ -212,11 +212,36 @@ class AniWorldApp {
});
this.socket.on('download_progress', (data) => {
if (data.total_bytes) {
const percent = ((data.downloaded_bytes || 0) / data.total_bytes * 100).toFixed(1);
this.updateProgress(percent, `Downloading: ${percent}%`);
let status = '';
let percent = 0;
if (data.progress !== undefined) {
percent = data.progress;
status = `Downloading: ${percent.toFixed(1)}%`;
// Add speed information if available
if (data.speed_mbps && data.speed_mbps > 0) {
status += ` (${data.speed_mbps.toFixed(1)} Mbps)`;
}
// Add ETA information if available
if (data.eta_seconds && data.eta_seconds > 0) {
const eta = this.formatETA(data.eta_seconds);
status += ` - ETA: ${eta}`;
}
} else if (data.total_bytes) {
percent = ((data.downloaded_bytes || 0) / data.total_bytes * 100);
status = `Downloading: ${percent.toFixed(1)}%`;
} else if (data.downloaded_mb !== undefined) {
status = `Downloaded: ${data.downloaded_mb.toFixed(1)} MB`;
} else {
this.updateStatus(`Downloading: ${data.percent || '0%'}`);
status = `Downloading: ${data.percent || '0%'}`;
}
if (percent > 0) {
this.updateProgress(percent, status);
} else {
this.updateStatus(status);
}
});
@@ -1980,6 +2005,25 @@ class AniWorldApp {
console.log('Mobile & Accessibility features initialized');
}
formatETA(seconds) {
if (!seconds || seconds <= 0) return '---';
if (seconds < 60) {
return `${Math.round(seconds)}s`;
} else if (seconds < 3600) {
const minutes = Math.round(seconds / 60);
return `${minutes}m`;
} else if (seconds < 86400) {
const hours = Math.floor(seconds / 3600);
const minutes = Math.round((seconds % 3600) / 60);
return `${hours}h ${minutes}m`;
} else {
const days = Math.floor(seconds / 86400);
const hours = Math.round((seconds % 86400) / 3600);
return `${days}d ${hours}h`;
}
}
}
// Initialize the application when DOM is loaded