refactoring

This commit is contained in:
2025-09-29 21:18:42 +02:00
parent 1719a36f57
commit e477780ed6
105 changed files with 568 additions and 6646 deletions

View File

@@ -1,53 +0,0 @@
# Flask Configuration
FLASK_ENV=development
FLASK_APP=app.py
SECRET_KEY=your-secret-key-here
DEBUG=True
# Database Configuration
DATABASE_URL=sqlite:///data/database/anime.db
DATABASE_POOL_SIZE=10
DATABASE_TIMEOUT=30
# API Configuration
API_KEY=your-api-key
API_RATE_LIMIT=100
API_TIMEOUT=30
# Cache Configuration
CACHE_TYPE=simple
REDIS_URL=redis://localhost:6379/0
CACHE_TIMEOUT=300
# Logging Configuration
LOG_LEVEL=INFO
LOG_FORMAT=detailed
LOG_FILE_MAX_SIZE=10MB
LOG_BACKUP_COUNT=5
# Security Configuration
SESSION_TIMEOUT=3600
CSRF_TOKEN_TIMEOUT=3600
MAX_LOGIN_ATTEMPTS=5
LOGIN_LOCKOUT_DURATION=900
# Download Configuration
DOWNLOAD_PATH=/downloads
MAX_CONCURRENT_DOWNLOADS=5
DOWNLOAD_TIMEOUT=1800
RETRY_ATTEMPTS=3
# Provider Configuration
PROVIDER_TIMEOUT=30
PROVIDER_RETRIES=3
USER_AGENT=AniWorld-Downloader/1.0
# Notification Configuration
DISCORD_WEBHOOK_URL=
TELEGRAM_BOT_TOKEN=
TELEGRAM_CHAT_ID=
# Monitoring Configuration
HEALTH_CHECK_INTERVAL=60
METRICS_ENABLED=True
PERFORMANCE_MONITORING=True

View File

@@ -1,146 +0,0 @@
# AniWorld Web Manager
A modern Flask-based web application for managing anime downloads with a beautiful Fluent UI design.
## Features
**Anime Search**
- Real-time search with auto-suggest
- Easy addition of series from search results
- Clear search functionality
**Series Management**
- Grid layout with card-based display
- Shows missing episodes count
- Multi-select with checkboxes
- Select all/deselect all functionality
**Download Management**
- Background downloading with progress tracking
- Pause, resume, and cancel functionality
- Real-time status updates via WebSocket
**Modern UI**
- Fluent UI design system (Windows 11 style)
- Dark and light theme support
- Responsive design for desktop and mobile
- Smooth animations and transitions
**Localization**
- Support for multiple languages (English, German)
- Easy to add new languages
- Resource-based text management
**Real-time Updates**
- WebSocket connection for live updates
- Toast notifications for user feedback
- Status panel with progress tracking
## Setup
1. **Install Dependencies**
```bash
pip install Flask Flask-SocketIO eventlet
```
2. **Environment Configuration**
Set the `ANIME_DIRECTORY` environment variable to your anime storage path:
```bash
# Windows
set ANIME_DIRECTORY="Z:\media\serien\Serien"
# Linux/Mac
export ANIME_DIRECTORY="/path/to/your/anime/directory"
```
3. **Run the Application**
```bash
cd src/server
python app.py
```
4. **Access the Web Interface**
Open your browser and navigate to: `http://localhost:5000`
## Usage
### Searching and Adding Anime
1. Use the search bar to find anime
2. Browse search results
3. Click "Add" to add series to your collection
### Managing Downloads
1. Select series using checkboxes
2. Click "Download Selected" to start downloading
3. Monitor progress in the status panel
4. Use pause/resume/cancel controls as needed
### Theme and Language
- Click the moon/sun icon to toggle between light and dark themes
- Language is automatically detected from browser settings
- Supports English and German out of the box
### Configuration
- Click the "Config" button to view current settings
- Shows anime directory path, series count, and connection status
## File Structure
```
src/server/
├── app.py # Main Flask application
├── templates/
│ └── index.html # Main HTML template
├── static/
│ ├── css/
│ │ └── styles.css # Fluent UI styles
│ └── js/
│ ├── app.js # Main application logic
│ └── localization.js # Multi-language support
```
## API Endpoints
- `GET /` - Main web interface
- `GET /api/series` - Get all series with missing episodes
- `POST /api/search` - Search for anime
- `POST /api/add_series` - Add series to collection
- `POST /api/download` - Start downloading selected series
- `POST /api/rescan` - Rescan anime directory
- `GET /api/status` - Get application status
- `POST /api/download/pause` - Pause current download
- `POST /api/download/resume` - Resume paused download
- `POST /api/download/cancel` - Cancel current download
## WebSocket Events
- `connect` - Client connection established
- `scan_started` - Directory scan initiated
- `scan_progress` - Scan progress update
- `scan_completed` - Scan finished successfully
- `download_started` - Download initiated
- `download_progress` - Download progress update
- `download_completed` - Download finished
- `download_paused` - Download paused
- `download_resumed` - Download resumed
- `download_cancelled` - Download cancelled
## Security Features
- Input validation on all API endpoints
- No exposure of internal stack traces
- Secure WebSocket connections
- Environment-based configuration
## Browser Compatibility
- Modern browsers with ES6+ support
- WebSocket support required
- Responsive design works on mobile devices
## Development Notes
- Uses existing `SeriesApp` class without modifications
- Maintains compatibility with original CLI application
- Thread-safe download management
- Proper error handling and user feedback

View File

@@ -1,109 +0,0 @@
# Route Organization Summary
This document describes the reorganization of routes from a single `app.py` file into separate blueprint files for better organization and maintainability.
## New File Structure
```
src/server/web/routes/
├── __init__.py # Package initialization with graceful imports
├── main_routes.py # Main page routes (index)
├── auth_routes.py # Authentication routes (login, setup, API auth)
├── api_routes.py # Core API routes (series, search, download, rescan)
├── static_routes.py # Static file routes (JS/CSS for UX features)
├── diagnostic_routes.py # Diagnostic and monitoring routes
├── config_routes.py # Configuration management routes
└── websocket_handlers.py # WebSocket event handlers
```
## Route Categories
### 1. Main Routes (`main_routes.py`)
- `/` - Main index page
### 2. Authentication Routes (`auth_routes.py`)
Contains two blueprints:
- **auth_bp**: Page routes (`/login`, `/setup`)
- **auth_api_bp**: API routes (`/api/auth/*`)
### 3. API Routes (`api_routes.py`)
- `/api/series` - Get series data
- `/api/search` - Search for series
- `/api/add_series` - Add new series
- `/api/rescan` - Rescan series directory
- `/api/download` - Add to download queue
- `/api/queue/start` - Start download queue
- `/api/queue/stop` - Stop download queue
- `/api/status` - Get system status
- `/api/process/locks/status` - Get process lock status
- `/api/config/directory` - Update directory configuration
### 4. Static Routes (`static_routes.py`)
- `/static/js/*` - JavaScript files for UX features
- `/static/css/*` - CSS files for styling
### 5. Diagnostic Routes (`diagnostic_routes.py`)
- `/api/diagnostics/network` - Network diagnostics
- `/api/diagnostics/errors` - Error history
- `/api/diagnostics/system-status` - System status summary
- `/api/diagnostics/recovery/*` - Recovery endpoints
### 6. Config Routes (`config_routes.py`)
- `/api/scheduler/config` - Scheduler configuration
- `/api/logging/config` - Logging configuration
- `/api/config/section/advanced` - Advanced configuration
- `/api/config/backup*` - Configuration backup management
### 7. WebSocket Handlers (`websocket_handlers.py`)
- `connect` - Client connection handler
- `disconnect` - Client disconnection handler
- `get_status` - Status request handler
## Changes Made to `app.py`
1. **Removed Routes**: All route definitions have been moved to their respective blueprint files
2. **Added Imports**: Import statements for the new route blueprints
3. **Blueprint Registration**: Register all blueprints with the Flask app
4. **Global Variables**: Moved to appropriate route files where they're used
5. **Placeholder Classes**: Moved to relevant route files
6. **WebSocket Integration**: Set up socketio instance sharing with API routes
## Benefits
1. **Better Organization**: Routes are grouped by functionality
2. **Maintainability**: Easier to find and modify specific route logic
3. **Separation of Concerns**: Each file has a specific responsibility
4. **Scalability**: Easy to add new routes in appropriate files
5. **Testing**: Individual route groups can be tested separately
6. **Code Reuse**: Common functionality can be shared between route files
## Usage
The Flask app now imports and registers all blueprints:
```python
from web.routes import (
auth_bp, auth_api_bp, api_bp, main_bp, static_bp,
diagnostic_bp, config_bp
)
app.register_blueprint(main_bp)
app.register_blueprint(auth_bp)
app.register_blueprint(auth_api_bp)
app.register_blueprint(api_bp)
app.register_blueprint(static_bp)
app.register_blueprint(diagnostic_bp)
app.register_blueprint(config_bp)
```
## Error Handling
The `__init__.py` file includes graceful import handling, so if any route file has import errors, the application will continue to function with the available routes.
## Future Enhancements
- Add route-specific middleware
- Implement route-level caching
- Add route-specific rate limiting
- Create route-specific documentation
- Add route-specific testing

View File

@@ -1 +0,0 @@
# Server package

View File

@@ -1,23 +1,7 @@
# --- Global UTF-8 logging setup (fix UnicodeEncodeError) ---
import sys
import io
import logging
try:
if hasattr(sys.stdout, 'reconfigure'):
sys.stdout.reconfigure(encoding='utf-8', errors='replace')
handler = logging.StreamHandler(sys.stdout)
else:
utf8_stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
handler = logging.StreamHandler(utf8_stdout)
handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s', datefmt='%H:%M:%S'))
root_logger = logging.getLogger()
root_logger.handlers = []
root_logger.addHandler(handler)
root_logger.setLevel(logging.INFO)
except Exception:
logging.basicConfig(stream=sys.stdout, format='[%(asctime)s] %(levelname)s: %(message)s', datefmt='%H:%M:%S')
import os
import threading
from datetime import datetime
@@ -33,30 +17,16 @@ from flask_socketio import SocketIO, emit
import logging
import atexit
from Main import SeriesApp
from src.cli.Main import SeriesApp
# --- Fix Unicode logging error for Windows console ---
import sys
import io
# --- Robust Unicode logging for Windows console ---
try:
if hasattr(sys.stdout, 'reconfigure'):
handler = logging.StreamHandler(sys.stdout)
handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
handler.stream.reconfigure(encoding='utf-8')
logging.getLogger().handlers = [handler]
else:
# Fallback for older Python versions
utf8_stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
handler = logging.StreamHandler(utf8_stdout)
handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
logging.getLogger().handlers = [handler]
except Exception:
# Last resort fallback
logging.basicConfig(stream=sys.stdout, format='%(levelname)s: %(message)s')
from server.core.entities.series import Serie
from server.core.entities import SerieList
from server.infrastructure.file_system import SerieScanner
from server.core import SerieScanner
from server.infrastructure.providers.provider_factory import Loaders
from web.controllers.auth_controller import session_manager, require_auth, optional_auth
from config import config
@@ -81,11 +51,6 @@ from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK, DOWNLOAD
# Import error handling and monitoring modules
from web.middleware.error_handler import handle_api_errors
# Performance optimization modules - not yet implemented
# API integration and database modules - not yet implemented
# User experience and accessibility modules - not yet implemented
app = Flask(__name__,
template_folder='web/templates/base',
static_folder='web/static')
@@ -106,6 +71,66 @@ def handle_api_not_found(error):
# For non-API routes, let Flask handle it normally
return error
# Global error handler to log any unhandled exceptions
@app.errorhandler(Exception)
def handle_exception(e):
logging.error("Unhandled exception occurred: %s", e, exc_info=True)
if request.path.startswith('/api/'):
return jsonify({'success': False, 'error': 'Internal Server Error'}), 500
return "Internal Server Error", 500
# Register cleanup functions
@atexit.register
def cleanup_on_exit():
"""Clean up resources on application exit."""
try:
# Additional cleanup functions will be added when features are implemented
logging.info("Application cleanup completed")
except Exception as e:
logging.error(f"Error during cleanup: {e}")
def rescan_callback():
"""Callback for scheduled rescan operations."""
try:
# Reinit and scan
series_app.SerieScanner.Reinit()
series_app.SerieScanner.Scan()
# Refresh the series list
series_app.List = SerieList.SerieList(series_app.directory_to_search)
series_app.__InitList__()
return {"status": "success", "message": "Scheduled rescan completed"}
except Exception as e:
raise Exception(f"Scheduled rescan failed: {e}")
def download_callback():
"""Callback for auto-download after scheduled rescan."""
try:
if not series_app or not series_app.List:
return {"status": "skipped", "message": "No series data available"}
# Find series with missing episodes
series_with_missing = []
for serie in series_app.List.GetList():
if serie.episodeDict:
series_with_missing.append(serie)
if not series_with_missing:
return {"status": "skipped", "message": "No series with missing episodes found"}
# Note: Actual download implementation would go here
# For now, just return the count of series that would be downloaded
return {
"status": "started",
"message": f"Auto-download initiated for {len(series_with_missing)} series",
"series_count": len(series_with_missing)
}
except Exception as e:
raise Exception(f"Auto-download failed: {e}")
# Register all blueprints
app.register_blueprint(download_queue_bp)
app.register_blueprint(main_bp)
@@ -120,35 +145,6 @@ app.register_blueprint(process_bp)
app.register_blueprint(scheduler_bp)
app.register_blueprint(logging_bp)
app.register_blueprint(health_bp)
# Additional blueprints will be registered when features are implemented
# Additional feature initialization will be added when features are implemented
# Global variables are now managed in their respective route files
# Keep only series_app for backward compatibility
series_app = None
def init_series_app(verbose=True):
"""Initialize the SeriesApp with configuration directory."""
global series_app
try:
directory_to_search = config.anime_directory
if verbose:
print(f"Initializing SeriesApp with directory: {directory_to_search}")
series_app = SeriesApp(directory_to_search)
if verbose:
print(f"SeriesApp initialized successfully. List length: {len(series_app.List.GetList()) if series_app.List else 'No List'}")
return series_app
except Exception as e:
print(f"Error initializing SeriesApp: {e}")
import traceback
traceback.print_exc()
return None
def get_series_app():
"""Get the current series app instance."""
global series_app
return series_app
# Register WebSocket handlers
register_socketio_handlers(socketio)
@@ -159,110 +155,17 @@ set_socketio(socketio)
# Initialize scheduler
scheduler = init_scheduler(config, socketio)
def setup_scheduler_callbacks():
"""Setup callbacks for scheduler operations."""
def rescan_callback():
"""Callback for scheduled rescan operations."""
try:
# Reinit and scan
series_app.SerieScanner.Reinit()
series_app.SerieScanner.Scan()
# Refresh the series list
series_app.List = SerieList.SerieList(series_app.directory_to_search)
series_app.__InitList__()
return {"status": "success", "message": "Scheduled rescan completed"}
except Exception as e:
raise Exception(f"Scheduled rescan failed: {e}")
def download_callback():
"""Callback for auto-download after scheduled rescan."""
try:
if not series_app or not series_app.List:
return {"status": "skipped", "message": "No series data available"}
# Find series with missing episodes
series_with_missing = []
for serie in series_app.List.GetList():
if serie.episodeDict:
series_with_missing.append(serie)
if not series_with_missing:
return {"status": "skipped", "message": "No series with missing episodes found"}
# Note: Actual download implementation would go here
# For now, just return the count of series that would be downloaded
return {
"status": "started",
"message": f"Auto-download initiated for {len(series_with_missing)} series",
"series_count": len(series_with_missing)
}
except Exception as e:
raise Exception(f"Auto-download failed: {e}")
scheduler.set_rescan_callback(rescan_callback)
scheduler.set_download_callback(download_callback)
# Setup scheduler callbacks
setup_scheduler_callbacks()
# Advanced system initialization will be added when features are implemented
# Register cleanup functions
@atexit.register
def cleanup_on_exit():
"""Clean up resources on application exit."""
try:
# Additional cleanup functions will be added when features are implemented
logging.info("Application cleanup completed")
except Exception as e:
logging.error(f"Error during cleanup: {e}")
scheduler.set_rescan_callback(rescan_callback)
scheduler.set_download_callback(download_callback)
if __name__ == '__main__':
# Only run initialization and logging setup in the main process
# This prevents duplicate initialization when Flask debug reloader starts
# Configure enhanced logging system first
try:
from server.infrastructure.logging.config import get_logger, logging_config
logger = get_logger(__name__, 'webapp')
logger.info("Enhanced logging system initialized")
except ImportError:
# Fallback to basic logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.warning("Using fallback logging - enhanced logging not available")
if __name__ == '__main__':
# Configure enhanced logging system first
try:
from server.infrastructure.logging.config import get_logger, logging_config
logger = get_logger(__name__, 'webapp')
logger.info("Enhanced logging system initialized")
except ImportError:
# Fallback to basic logging with UTF-8 support
import logging
logging.basicConfig(
level=logging.INFO,
format='[%(asctime)s] %(levelname)s: %(message)s',
datefmt='%H:%M:%S',
handlers=[
logging.StreamHandler(sys.stdout)
]
)
logger = logging.getLogger(__name__)
logger.warning("Using fallback logging - enhanced logging not available")
# Try to configure console for UTF-8 on Windows
try:
if hasattr(sys.stdout, 'reconfigure'):
sys.stdout.reconfigure(encoding='utf-8', errors='replace')
except Exception:
pass
from server.infrastructure.logging.config import get_logger, logging_config
logger = get_logger(__name__, 'webapp')
logger.info("Enhanced logging system initialized")
# Only run startup messages and scheduler in the parent process
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
@@ -270,17 +173,9 @@ if __name__ == '__main__':
logger.info(f"Anime directory: {config.anime_directory}")
logger.info(f"Log level: {config.log_level}")
# Start scheduler if enabled
if hasattr(config, 'scheduled_rescan_enabled') and config.scheduled_rescan_enabled:
logger.info(f"Starting scheduler - daily rescan at {getattr(config, 'scheduled_rescan_time', '03:00')}")
scheduler.start_scheduler()
else:
logger.info("Scheduled operations disabled")
scheduler.start_scheduler()
init_series_app(verbose=True)
logger.info("Server will be available at http://localhost:5000")
else:
# Initialize the series app only in the reloader child process (the actual working process)
init_series_app(verbose=True)
try:
# Run with SocketIO

View File

@@ -1,823 +0,0 @@
import os
import sys
import threading
from datetime import datetime
from flask import Flask, render_template, request, jsonify, redirect, url_for
from flask_socketio import SocketIO, emit
import logging
import atexit
# Add the parent directory to sys.path to import our modules
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from ..main import SeriesApp
from .core.entities.series import Serie
from .core.entities import SerieList
from .infrastructure.file_system import SerieScanner
from .infrastructure.providers.provider_factory import Loaders
from .web.controllers.auth_controller import session_manager, require_auth, optional_auth
from .config import config
from .application.services.queue_service import download_queue_bp
# TODO: Fix these imports
# from process_api import process_bp
# from scheduler_api import scheduler_bp
# from logging_api import logging_bp
# from config_api import config_bp
# from scheduler import init_scheduler, get_scheduler
# from process_locks import (with_process_lock, RESCAN_LOCK, DOWNLOAD_LOCK,
# ProcessLockError, is_process_running, check_process_locks)
# TODO: Fix these imports
# # Import new error handling and health monitoring modules
# from error_handler import (
# handle_api_errors, error_recovery_manager, recovery_strategies,
# network_health_checker, NetworkError, DownloadError, RetryableError
# )
# from health_monitor import health_bp, health_monitor, init_health_monitoring, cleanup_health_monitoring
# Import performance optimization modules
from performance_optimizer import (
init_performance_monitoring, cleanup_performance_monitoring,
speed_limiter, download_cache, memory_monitor, download_manager
)
from performance_api import performance_bp
# Import API integration modules
from api_integration import (
init_api_integrations, cleanup_api_integrations,
webhook_manager, export_manager, notification_service
)
from api_endpoints import api_integration_bp
# Import database management modules
from database_manager import (
database_manager, anime_repository, backup_manager, storage_manager,
init_database_system, cleanup_database_system
)
from database_api import database_bp
# Import health check endpoints
from health_endpoints import health_bp
# Import user experience modules
from keyboard_shortcuts import keyboard_manager
from drag_drop import drag_drop_manager
from bulk_operations import bulk_operations_manager
from user_preferences import preferences_manager, preferences_bp
from advanced_search import advanced_search_manager, search_bp
from undo_redo_manager import undo_redo_manager, undo_redo_bp
# Import Mobile & Accessibility modules
from mobile_responsive import mobile_responsive_manager
from touch_gestures import touch_gesture_manager
from accessibility_features import accessibility_manager
from screen_reader_support import screen_reader_manager
from color_contrast_compliance import color_contrast_manager
from multi_screen_support import multi_screen_manager
app = Flask(__name__)
app.config['SECRET_KEY'] = os.urandom(24)
app.config['PERMANENT_SESSION_LIFETIME'] = 86400 # 24 hours
socketio = SocketIO(app, cors_allowed_origins="*")
# Register blueprints
app.register_blueprint(download_queue_bp)
app.register_blueprint(process_bp)
app.register_blueprint(scheduler_bp)
app.register_blueprint(logging_bp)
app.register_blueprint(config_bp)
app.register_blueprint(health_bp)
app.register_blueprint(performance_bp)
app.register_blueprint(api_integration_bp)
app.register_blueprint(database_bp)
# Note: health_endpoints blueprint already imported above as health_bp, no need to register twice
# Register bulk operations API
from bulk_api import bulk_api_bp
app.register_blueprint(bulk_api_bp)
# Register user preferences API
app.register_blueprint(preferences_bp)
# Register advanced search API
app.register_blueprint(search_bp)
# Register undo/redo API
app.register_blueprint(undo_redo_bp)
# Register Mobile & Accessibility APIs
app.register_blueprint(color_contrast_manager.get_contrast_api_blueprint())
# Initialize user experience features
# keyboard_manager doesn't need init_app - it's a simple utility class
bulk_operations_manager.init_app(app)
preferences_manager.init_app(app)
advanced_search_manager.init_app(app)
undo_redo_manager.init_app(app)
# Initialize Mobile & Accessibility features
mobile_responsive_manager.init_app(app)
touch_gesture_manager.init_app(app)
accessibility_manager.init_app(app)
screen_reader_manager.init_app(app)
color_contrast_manager.init_app(app)
multi_screen_manager.init_app(app)
# Global variables to store app state
series_app = None
is_scanning = False
is_downloading = False
is_paused = False
download_thread = None
download_progress = {}
download_queue = []
current_downloading = None
download_stats = {
'total_series': 0,
'completed_series': 0,
'current_episode': None,
'total_episodes': 0,
'completed_episodes': 0
}
def init_series_app():
"""Initialize the SeriesApp with configuration directory."""
global series_app
directory_to_search = config.anime_directory
series_app = SeriesApp(directory_to_search)
return series_app
# Initialize the app on startup
init_series_app()
# Initialize scheduler
scheduler = init_scheduler(config, socketio)
def setup_scheduler_callbacks():
"""Setup callbacks for scheduler operations."""
def rescan_callback():
"""Callback for scheduled rescan operations."""
try:
# Reinit and scan
series_app.SerieScanner.Reinit()
series_app.SerieScanner.Scan()
# Refresh the series list
series_app.List = SerieList.SerieList(series_app.directory_to_search)
series_app.__InitList__()
return {"status": "success", "message": "Scheduled rescan completed"}
except Exception as e:
raise Exception(f"Scheduled rescan failed: {e}")
def download_callback():
"""Callback for auto-download after scheduled rescan."""
try:
if not series_app or not series_app.List:
return {"status": "skipped", "message": "No series data available"}
# Find series with missing episodes
series_with_missing = []
for serie in series_app.List.GetList():
if serie.episodeDict:
series_with_missing.append(serie)
if not series_with_missing:
return {"status": "skipped", "message": "No series with missing episodes found"}
# Note: Actual download implementation would go here
# For now, just return the count of series that would be downloaded
return {
"status": "started",
"message": f"Auto-download initiated for {len(series_with_missing)} series",
"series_count": len(series_with_missing)
}
except Exception as e:
raise Exception(f"Auto-download failed: {e}")
scheduler.set_rescan_callback(rescan_callback)
scheduler.set_download_callback(download_callback)
# Setup scheduler callbacks
setup_scheduler_callbacks()
# Initialize error handling and health monitoring
try:
init_health_monitoring()
logging.info("Health monitoring initialized successfully")
except Exception as e:
logging.error(f"Failed to initialize health monitoring: {e}")
# Initialize performance monitoring
try:
init_performance_monitoring()
logging.info("Performance monitoring initialized successfully")
except Exception as e:
logging.error(f"Failed to initialize performance monitoring: {e}")
# Initialize API integrations
try:
init_api_integrations()
# Set export manager's series app reference
export_manager.series_app = series_app
logging.info("API integrations initialized successfully")
except Exception as e:
logging.error(f"Failed to initialize API integrations: {e}")
# Initialize database system
try:
init_database_system()
logging.info("Database system initialized successfully")
except Exception as e:
logging.error(f"Failed to initialize database system: {e}")
# Register cleanup functions
@atexit.register
def cleanup_on_exit():
"""Clean up resources on application exit."""
try:
cleanup_health_monitoring()
cleanup_performance_monitoring()
cleanup_api_integrations()
cleanup_database_system()
logging.info("Application cleanup completed")
except Exception as e:
logging.error(f"Error during cleanup: {e}")
# UX JavaScript and CSS routes
@app.route('/static/js/keyboard-shortcuts.js')
def keyboard_shortcuts_js():
"""Serve keyboard shortcuts JavaScript."""
from flask import Response
js_content = keyboard_manager.get_shortcuts_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/drag-drop.js')
def drag_drop_js():
"""Serve drag and drop JavaScript."""
from flask import Response
js_content = drag_drop_manager.get_drag_drop_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/bulk-operations.js')
def bulk_operations_js():
"""Serve bulk operations JavaScript."""
from flask import Response
js_content = bulk_operations_manager.get_bulk_operations_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/user-preferences.js')
def user_preferences_js():
"""Serve user preferences JavaScript."""
from flask import Response
js_content = preferences_manager.get_preferences_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/advanced-search.js')
def advanced_search_js():
"""Serve advanced search JavaScript."""
from flask import Response
js_content = advanced_search_manager.get_search_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/undo-redo.js')
def undo_redo_js():
"""Serve undo/redo JavaScript."""
from flask import Response
js_content = undo_redo_manager.get_undo_redo_js()
return Response(js_content, mimetype='application/javascript')
# Mobile & Accessibility JavaScript routes
@app.route('/static/js/mobile-responsive.js')
def mobile_responsive_js():
"""Serve mobile responsive JavaScript."""
from flask import Response
js_content = mobile_responsive_manager.get_mobile_responsive_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/touch-gestures.js')
def touch_gestures_js():
"""Serve touch gestures JavaScript."""
from flask import Response
js_content = touch_gesture_manager.get_touch_gesture_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/accessibility-features.js')
def accessibility_features_js():
"""Serve accessibility features JavaScript."""
from flask import Response
js_content = accessibility_manager.get_accessibility_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/screen-reader-support.js')
def screen_reader_support_js():
"""Serve screen reader support JavaScript."""
from flask import Response
js_content = screen_reader_manager.get_screen_reader_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/color-contrast-compliance.js')
def color_contrast_compliance_js():
"""Serve color contrast compliance JavaScript."""
from flask import Response
js_content = color_contrast_manager.get_contrast_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/js/multi-screen-support.js')
def multi_screen_support_js():
"""Serve multi-screen support JavaScript."""
from flask import Response
js_content = multi_screen_manager.get_multiscreen_js()
return Response(js_content, mimetype='application/javascript')
@app.route('/static/css/ux-features.css')
def ux_features_css():
"""Serve UX features CSS."""
from flask import Response
css_content = f"""
/* Keyboard shortcuts don't require additional CSS */
{drag_drop_manager.get_css()}
{bulk_operations_manager.get_css()}
{preferences_manager.get_css()}
{advanced_search_manager.get_css()}
{undo_redo_manager.get_css()}
/* Mobile & Accessibility CSS */
{mobile_responsive_manager.get_css()}
{touch_gesture_manager.get_css()}
{accessibility_manager.get_css()}
{screen_reader_manager.get_css()}
{color_contrast_manager.get_contrast_css()}
{multi_screen_manager.get_multiscreen_css()}
"""
return Response(css_content, mimetype='text/css')
@app.route('/')
@optional_auth
def index():
"""Main page route."""
# Check process status
process_status = {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
}
return render_template('index.html', process_status=process_status)
# Authentication routes
@app.route('/login')
def login():
"""Login page."""
if not config.has_master_password():
return redirect(url_for('setup'))
if session_manager.is_authenticated():
return redirect(url_for('index'))
return render_template('login.html',
session_timeout=config.session_timeout_hours,
max_attempts=config.max_failed_attempts,
lockout_duration=config.lockout_duration_minutes)
@app.route('/setup')
def setup():
"""Initial setup page."""
if config.has_master_password():
return redirect(url_for('login'))
return render_template('setup.html', current_directory=config.anime_directory)
@app.route('/api/auth/setup', methods=['POST'])
def auth_setup():
"""Complete initial setup."""
if config.has_master_password():
return jsonify({
'status': 'error',
'message': 'Setup already completed'
}), 400
try:
data = request.get_json()
password = data.get('password')
directory = data.get('directory')
if not password or len(password) < 8:
return jsonify({
'status': 'error',
'message': 'Password must be at least 8 characters long'
}), 400
if not directory:
return jsonify({
'status': 'error',
'message': 'Directory is required'
}), 400
# Set master password and directory
config.set_master_password(password)
config.anime_directory = directory
config.save_config()
# Reinitialize series app with new directory
init_series_app()
return jsonify({
'status': 'success',
'message': 'Setup completed successfully'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@app.route('/api/auth/login', methods=['POST'])
def auth_login():
"""Authenticate user."""
try:
data = request.get_json()
password = data.get('password')
if not password:
return jsonify({
'status': 'error',
'message': 'Password is required'
}), 400
# Verify password using session manager
result = session_manager.login(password, request.remote_addr)
return jsonify(result)
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@app.route('/api/auth/logout', methods=['POST'])
@require_auth
def auth_logout():
"""Logout user."""
session_manager.logout()
return jsonify({
'status': 'success',
'message': 'Logged out successfully'
})
@app.route('/api/auth/status', methods=['GET'])
def auth_status():
"""Get authentication status."""
return jsonify({
'authenticated': session_manager.is_authenticated(),
'has_master_password': config.has_master_password(),
'setup_required': not config.has_master_password(),
'session_info': session_manager.get_session_info()
})
@app.route('/api/config/directory', methods=['POST'])
@require_auth
def update_directory():
"""Update anime directory configuration."""
try:
data = request.get_json()
new_directory = data.get('directory')
if not new_directory:
return jsonify({
'status': 'error',
'message': 'Directory is required'
}), 400
# Update configuration
config.anime_directory = new_directory
config.save_config()
# Reinitialize series app
init_series_app()
return jsonify({
'status': 'success',
'message': 'Directory updated successfully',
'directory': new_directory
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@app.route('/api/series', methods=['GET'])
@optional_auth
def get_series():
"""Get all series data."""
try:
if series_app is None or series_app.List is None:
return jsonify({
'status': 'success',
'series': [],
'total_series': 0,
'message': 'No series data available. Please perform a scan to load series.'
})
# Get series data
series_data = []
for serie in series_app.List.GetList():
series_data.append({
'folder': serie.folder,
'name': serie.name or serie.folder,
'total_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()),
'missing_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()),
'status': 'ongoing',
'episodes': {
season: episodes
for season, episodes in serie.episodeDict.items()
}
})
return jsonify({
'status': 'success',
'series': series_data,
'total_series': len(series_data)
})
except Exception as e:
# Log the error but don't return 500 to prevent page reload loops
print(f"Error in get_series: {e}")
return jsonify({
'status': 'success',
'series': [],
'total_series': 0,
'message': 'Error loading series data. Please try rescanning.'
})
@app.route('/api/rescan', methods=['POST'])
@optional_auth
def rescan_series():
"""Rescan/reinit the series directory."""
global is_scanning
# Check if rescan is already running using process lock
if is_process_running(RESCAN_LOCK) or is_scanning:
return jsonify({
'status': 'error',
'message': 'Rescan is already running. Please wait for it to complete.',
'is_running': True
}), 409
def scan_thread():
global is_scanning
try:
# Use process lock to prevent duplicate rescans
@with_process_lock(RESCAN_LOCK, timeout_minutes=120)
def perform_rescan():
global is_scanning
is_scanning = True
try:
# Emit scanning started
socketio.emit('scan_started')
# Reinit and scan
series_app.SerieScanner.Reinit()
series_app.SerieScanner.Scan(lambda folder, counter:
socketio.emit('scan_progress', {
'folder': folder,
'counter': counter
})
)
# Refresh the series list
series_app.List = SerieList.SerieList(series_app.directory_to_search)
series_app.__InitList__()
# Emit scan completed
socketio.emit('scan_completed')
except Exception as e:
socketio.emit('scan_error', {'message': str(e)})
raise
finally:
is_scanning = False
perform_rescan(_locked_by='web_interface')
except ProcessLockError:
socketio.emit('scan_error', {'message': 'Rescan is already running'})
except Exception as e:
socketio.emit('scan_error', {'message': str(e)})
# Start scan in background thread
threading.Thread(target=scan_thread, daemon=True).start()
return jsonify({
'status': 'success',
'message': 'Rescan started'
})
# Basic download endpoint - simplified for now
@app.route('/api/download', methods=['POST'])
@optional_auth
def download_series():
"""Download selected series."""
global is_downloading
# Check if download is already running using process lock
if is_process_running(DOWNLOAD_LOCK) or is_downloading:
return jsonify({
'status': 'error',
'message': 'Download is already running. Please wait for it to complete.',
'is_running': True
}), 409
return jsonify({
'status': 'success',
'message': 'Download functionality will be implemented with queue system'
})
# WebSocket events for real-time updates
@socketio.on('connect')
def handle_connect():
"""Handle client connection."""
emit('status', {
'message': 'Connected to server',
'processes': {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
}
})
@socketio.on('disconnect')
def handle_disconnect():
"""Handle client disconnection."""
print('Client disconnected')
@socketio.on('get_status')
def handle_get_status():
"""Handle status request."""
emit('status_update', {
'processes': {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
},
'series_count': len(series_app.List.GetList()) if series_app and series_app.List else 0
})
# Error Recovery and Diagnostics Endpoints
@app.route('/api/diagnostics/network')
@handle_api_errors
@optional_auth
def network_diagnostics():
"""Get network diagnostics and connectivity status."""
try:
network_status = network_health_checker.get_network_status()
# Test AniWorld connectivity
aniworld_reachable = network_health_checker.check_url_reachability("https://aniworld.to")
network_status['aniworld_reachable'] = aniworld_reachable
return jsonify({
'status': 'success',
'data': network_status
})
except Exception as e:
raise RetryableError(f"Network diagnostics failed: {e}")
@app.route('/api/diagnostics/errors')
@handle_api_errors
@optional_auth
def get_error_history():
"""Get recent error history."""
try:
recent_errors = error_recovery_manager.error_history[-50:] # Last 50 errors
return jsonify({
'status': 'success',
'data': {
'recent_errors': recent_errors,
'total_errors': len(error_recovery_manager.error_history),
'blacklisted_urls': list(error_recovery_manager.blacklisted_urls.keys())
}
})
except Exception as e:
raise RetryableError(f"Error history retrieval failed: {e}")
@app.route('/api/recovery/clear-blacklist', methods=['POST'])
@handle_api_errors
@require_auth
def clear_blacklist():
"""Clear URL blacklist."""
try:
error_recovery_manager.blacklisted_urls.clear()
return jsonify({
'status': 'success',
'message': 'URL blacklist cleared successfully'
})
except Exception as e:
raise RetryableError(f"Blacklist clearing failed: {e}")
@app.route('/api/recovery/retry-counts')
@handle_api_errors
@optional_auth
def get_retry_counts():
"""Get retry statistics."""
try:
return jsonify({
'status': 'success',
'data': {
'retry_counts': error_recovery_manager.retry_counts,
'total_retries': sum(error_recovery_manager.retry_counts.values())
}
})
except Exception as e:
raise RetryableError(f"Retry statistics retrieval failed: {e}")
@app.route('/api/diagnostics/system-status')
@handle_api_errors
@optional_auth
def system_status_summary():
"""Get comprehensive system status summary."""
try:
# Get health status
health_status = health_monitor.get_current_health_status()
# Get network status
network_status = network_health_checker.get_network_status()
# Get process status
process_status = {
'rescan_running': is_process_running(RESCAN_LOCK),
'download_running': is_process_running(DOWNLOAD_LOCK)
}
# Get error statistics
error_stats = {
'total_errors': len(error_recovery_manager.error_history),
'recent_errors': len([e for e in error_recovery_manager.error_history
if (datetime.now() - datetime.fromisoformat(e['timestamp'])).seconds < 3600]),
'blacklisted_urls': len(error_recovery_manager.blacklisted_urls)
}
return jsonify({
'status': 'success',
'data': {
'health': health_status,
'network': network_status,
'processes': process_status,
'errors': error_stats,
'timestamp': datetime.now().isoformat()
}
})
except Exception as e:
raise RetryableError(f"System status retrieval failed: {e}")
if __name__ == '__main__':
# Clean up any expired locks on startup
check_process_locks()
# Configure enhanced logging system
try:
from logging_config import get_logger, logging_config
logger = get_logger(__name__, 'webapp')
logger.info("Enhanced logging system initialized")
except ImportError:
# Fallback to basic logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.warning("Using fallback logging - enhanced logging not available")
logger.info("Starting Aniworld Flask server...")
logger.info(f"Anime directory: {config.anime_directory}")
logger.info(f"Log level: {config.log_level}")
# Start scheduler if enabled
if config.scheduled_rescan_enabled:
logger.info(f"Starting scheduler - daily rescan at {config.scheduled_rescan_time}")
scheduler.start_scheduler()
else:
logger.info("Scheduled operations disabled")
logger.info("Server will be available at http://localhost:5000")
try:
# Run with SocketIO
socketio.run(app, debug=True, host='0.0.0.0', port=5000, allow_unsafe_werkzeug=True)
finally:
# Clean shutdown
if scheduler:
scheduler.stop_scheduler()
logger.info("Scheduler stopped")

View File

@@ -1,3 +0,0 @@
"""
Application services layer for business logic coordination.
"""

View File

@@ -16,7 +16,7 @@ class UserPreferencesManager:
def __init__(self, app=None):
self.app = app
self.preferences_file = 'user_preferences.json'
self.preferences_file = 'data/user_preferences.json'
self.preferences = {} # Initialize preferences attribute
self.default_preferences = {
'ui': {
@@ -76,7 +76,7 @@ class UserPreferencesManager:
def init_app(self, app):
"""Initialize with Flask app."""
self.app = app
self.preferences_file = os.path.join(app.instance_path, 'user_preferences.json')
self.preferences_file = os.path.join(app.instance_path, 'data/user_preferences.json')
# Ensure instance path exists
os.makedirs(app.instance_path, exist_ok=True)

View File

View File

@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
class Config:
"""Configuration management for AniWorld Flask app."""
def __init__(self, config_file: str = "config.json"):
def __init__(self, config_file: str = "data/config.json"):
self.config_file = config_file
self.default_config = {
"security": {

View File

@@ -1,11 +0,0 @@
"""
Core module for AniWorld application.
Contains domain entities, interfaces, use cases, and exceptions.
"""
from . import entities
from . import exceptions
from . import interfaces
from . import use_cases
__all__ = ['entities', 'exceptions', 'interfaces', 'use_cases']

View File

@@ -1,56 +0,0 @@
import os
import json
import logging
from .series import Serie
class SerieList:
def __init__(self, basePath: str):
self.directory = basePath
self.folderDict: dict[str, Serie] = {} # Proper initialization
self.load_series()
def add(self, serie: Serie):
if (not self.contains(serie.key)):
dataPath = os.path.join(self.directory, serie.folder, "data")
animePath = os.path.join(self.directory, serie.folder)
os.makedirs(animePath, exist_ok=True)
if not os.path.isfile(dataPath):
serie.save_to_file(dataPath)
self.folderDict[serie.folder] = serie;
def contains(self, key: str) -> bool:
for k, value in self.folderDict.items():
if value.key == key:
return True
return False
def load_series(self):
""" Scan folders and load data files """
logging.info(f"Scanning anime folders in: {self.directory}")
for anime_folder in os.listdir(self.directory):
anime_path = os.path.join(self.directory, anime_folder, "data")
if os.path.isfile(anime_path):
logging.debug(f"Found data folder: {anime_path}")
self.load_data(anime_folder, anime_path)
else:
logging.warning(f"Skipping {anime_folder} - No data folder found")
def load_data(self, anime_folder, data_path):
""" Load pickle files from the data folder """
try:
self.folderDict[anime_folder] = Serie.load_from_file(data_path)
logging.debug(f"Successfully loaded {data_path} for {anime_folder}")
except Exception as e:
logging.error(f"Failed to load {data_path} in {anime_folder}: {e}")
def GetMissingEpisode(self):
"""Find all series with a non-empty episodeDict"""
return [serie for serie in self.folderDict.values() if len(serie.episodeDict) > 0]
def GetList(self):
"""Get all series in the list"""
return list(self.folderDict.values())
#k = AnimeList("\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
#bbabab = k.GetMissingEpisode()
#print(bbabab)

View File

@@ -1,8 +0,0 @@
"""
Domain entities for the AniWorld application.
"""
from .SerieList import SerieList
from .series import Serie
__all__ = ['SerieList', 'Serie']

View File

@@ -1,82 +0,0 @@
import json
class Serie:
def __init__(self, key: str, name: str, site: str, folder: str, episodeDict: dict[int, list[int]]):
self._key = key
self._name = name
self._site = site
self._folder = folder
self._episodeDict = episodeDict
def __str__(self):
"""String representation of Serie object"""
return f"Serie(key='{self.key}', name='{self.name}', site='{self.site}', folder='{self.folder}', episodeDict={self.episodeDict})"
@property
def key(self) -> str:
return self._key
@key.setter
def key(self, value: str):
self._key = value
@property
def name(self) -> str:
return self._name
@name.setter
def name(self, value: str):
self._name = value
@property
def site(self) -> str:
return self._site
@site.setter
def site(self, value: str):
self._site = value
@property
def folder(self) -> str:
return self._folder
@folder.setter
def folder(self, value: str):
self._folder = value
@property
def episodeDict(self) -> dict[int, list[int]]:
return self._episodeDict
@episodeDict.setter
def episodeDict(self, value: dict[int, list[int]]):
self._episodeDict = value
def to_dict(self):
"""Convert Serie object to dictionary for JSON serialization."""
return {
"key": self.key,
"name": self.name,
"site": self.site,
"folder": self.folder,
"episodeDict": {str(k): list(v) for k, v in self.episodeDict.items()}
}
@staticmethod
def from_dict(data: dict):
"""Create a Serie object from dictionary."""
episode_dict = {int(k): v for k, v in data["episodeDict"].items()} # Convert keys to int
return Serie(data["key"], data["name"], data["site"], data["folder"], episode_dict)
def save_to_file(self, filename: str):
"""Save Serie object to JSON file."""
with open(filename, "w") as file:
json.dump(self.to_dict(), file, indent=4)
@classmethod
def load_from_file(cls, filename: str) -> "Serie":
"""Load Serie object from JSON file."""
with open(filename, "r") as file:
data = json.load(file)
return cls.from_dict(data)

View File

@@ -1,7 +0,0 @@
class NoKeyFoundException(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class MatchNotFoundError(Exception):
"""Exception raised when an anime key cannot be found."""
pass

View File

@@ -1,3 +0,0 @@
"""
Domain exceptions for the AniWorld application.
"""

View File

@@ -1,3 +0,0 @@
"""
Domain interfaces and contracts for the AniWorld application.
"""

View File

@@ -1,12 +0,0 @@
from server.infrastructure.providers.streaming.Provider import Provider
from server.infrastructure.providers.streaming.voe import VOE
class Providers:
def __init__(self):
self.dict = {"VOE": VOE()}
def GetProvider(self, key: str) -> Provider:
return self.dict[key]

View File

@@ -1,3 +0,0 @@
"""
Business use cases for the AniWorld application.
"""

View File

@@ -1,7 +1,7 @@
{
"security": {
"master_password_hash": "37b5bb3de81bce2d9c17e4f775536d618bdcb0f34aba599cc55b82b087a7ade7",
"salt": "f8e09fa3f58d7ffece5d194108cb8c32bf0ad4da10e79d4bae4ef12dfce8ab57",
"master_password_hash": "1353f6d9db7090c302864c2d6437dc11cc96cd66d59d7737d1b345603fdbdfda",
"salt": "a25e23440d681cef2d75c0adb6de0913359a1d8b9f98f9747fc75f53c79c4bd4",
"session_timeout_hours": 24,
"max_failed_attempts": 5,
"lockout_duration_minutes": 30

View File

@@ -1,3 +0,0 @@
"""
Infrastructure layer for external concerns implementation.
"""

View File

@@ -1,131 +0,0 @@
import os
import re
import logging
from server.core.entities.series import Serie
import traceback
from server.infrastructure.logging.GlobalLogger import error_logger, noKeyFound_logger
from server.core.exceptions.Exceptions import NoKeyFoundException, MatchNotFoundError
from server.infrastructure.providers.base_provider import Loader
class SerieScanner:
def __init__(self, basePath: str, loader: Loader):
self.directory = basePath
self.folderDict: dict[str, Serie] = {} # Proper initialization
self.loader = loader
logging.info(f"Initialized Loader with base path: {self.directory}")
def Reinit(self):
self.folderDict: dict[str, Serie] = {} # Proper initialization
def is_null_or_whitespace(self, s):
return s is None or s.strip() == ""
def GetTotalToScan(self):
result = self.__find_mp4_files()
return sum(1 for _ in result)
def Scan(self, callback):
logging.info("Starting process to load missing episodes")
result = self.__find_mp4_files()
counter = 0
for folder, mp4_files in result:
try:
counter += 1
callback(folder, counter)
serie = self.__ReadDataFromFile(folder)
if (serie != None and not self.is_null_or_whitespace(serie.key)):
missings, site = self.__GetMissingEpisodesAndSeason(serie.key, mp4_files)
serie.episodeDict = missings
serie.folder = folder
serie.save_to_file(os.path.join(os.path.join(self.directory, folder), 'data'))
if (serie.key in self.folderDict):
logging.ERROR(f"dublication found: {serie.key}");
pass
self.folderDict[serie.key] = serie
noKeyFound_logger.info(f"Saved Serie: '{str(serie)}'")
except NoKeyFoundException as nkfe:
NoKeyFoundException.error(f"Error processing folder '{folder}': {nkfe}")
except Exception as e:
error_logger.error(f"Folder: '{folder}' - Unexpected error processing folder '{folder}': {e} \n {traceback.format_exc()}")
continue
def __find_mp4_files(self):
logging.info("Scanning for .mp4 files")
for anime_name in os.listdir(self.directory):
anime_path = os.path.join(self.directory, anime_name)
if os.path.isdir(anime_path):
mp4_files = []
has_files = False
for root, _, files in os.walk(anime_path):
for file in files:
if file.endswith(".mp4"):
mp4_files.append(os.path.join(root, file))
has_files = True
yield anime_name, mp4_files if has_files else []
def __remove_year(self, input_string: str):
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
logging.debug(f"Removed year from '{input_string}' -> '{cleaned_string}'")
return cleaned_string
def __ReadDataFromFile(self, folder_name: str):
folder_path = os.path.join(self.directory, folder_name)
key = None
key_file = os.path.join(folder_path, 'key')
serie_file = os.path.join(folder_path, 'data')
if os.path.exists(key_file):
with open(key_file, 'r') as file:
key = file.read().strip()
logging.info(f"Key found for folder '{folder_name}': {key}")
return Serie(key, "", "aniworld.to", folder_name, dict())
if os.path.exists(serie_file):
with open(serie_file, "rb") as file:
logging.info(f"load serie_file from '{folder_name}': {serie_file}")
return Serie.load_from_file(serie_file)
return None
def __GetEpisodeAndSeason(self, filename: str):
pattern = r'S(\d+)E(\d+)'
match = re.search(pattern, filename)
if match:
season = match.group(1)
episode = match.group(2)
logging.debug(f"Extracted season {season}, episode {episode} from '{filename}'")
return int(season), int(episode)
else:
logging.error(f"Failed to find season/episode pattern in '{filename}'")
raise MatchNotFoundError("Season and episode pattern not found in the filename.")
def __GetEpisodesAndSeasons(self, mp4_files: []):
episodes_dict = {}
for file in mp4_files:
season, episode = self.__GetEpisodeAndSeason(file)
if season in episodes_dict:
episodes_dict[season].append(episode)
else:
episodes_dict[season] = [episode]
return episodes_dict
def __GetMissingEpisodesAndSeason(self, key: str, mp4_files: []):
expected_dict = self.loader.get_season_episode_count(key) # key season , value count of episodes
filedict = self.__GetEpisodesAndSeasons(mp4_files)
episodes_dict = {}
for season, expected_count in expected_dict.items():
existing_episodes = filedict.get(season, [])
missing_episodes = [ep for ep in range(1, expected_count + 1) if ep not in existing_episodes and self.loader.IsLanguage(season, ep, key)]
if missing_episodes:
episodes_dict[season] = missing_episodes
return episodes_dict, "aniworld.to"

View File

@@ -1,353 +0,0 @@
"""
Logging configuration for AniWorld Flask application.
Provides structured logging with different handlers for console, file, and fail2ban.
"""
import logging
import logging.handlers
import os
import sys
from datetime import datetime
from typing import Optional
from config import config
class UnicodeStreamHandler(logging.StreamHandler):
"""Custom stream handler that safely handles Unicode characters."""
def __init__(self, stream=None):
super().__init__(stream)
def emit(self, record):
try:
msg = self.format(record)
stream = self.stream
# Handle Unicode encoding issues on Windows
if hasattr(stream, 'encoding') and stream.encoding:
try:
# Try to encode with the stream's encoding
encoded_msg = msg.encode(stream.encoding, errors='replace').decode(stream.encoding)
stream.write(encoded_msg + self.terminator)
except (UnicodeEncodeError, UnicodeDecodeError):
# Fallback: replace problematic characters
safe_msg = msg.encode('ascii', errors='replace').decode('ascii')
stream.write(safe_msg + self.terminator)
else:
# No encoding info, write directly but catch errors
try:
stream.write(msg + self.terminator)
except UnicodeEncodeError:
# Last resort: ASCII-only output
safe_msg = msg.encode('ascii', errors='replace').decode('ascii')
stream.write(safe_msg + self.terminator)
self.flush()
except RecursionError:
raise
except Exception:
self.handleError(record)
class Fail2BanFormatter(logging.Formatter):
"""Custom formatter for fail2ban compatible authentication failure logs."""
def format(self, record):
if hasattr(record, 'client_ip') and hasattr(record, 'username'):
# Format: "authentication failure for [IP] user [username]"
return f"authentication failure for [{record.client_ip}] user [{record.username}]"
return super().format(record)
class StructuredFormatter(logging.Formatter):
"""Enhanced formatter for structured logging with consistent format."""
def format(self, record):
# Add timestamp if not present
if not hasattr(record, 'asctime'):
record.asctime = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
# Add component info
component = getattr(record, 'component', record.name)
# Safely get message and handle Unicode
try:
message = record.getMessage()
except (UnicodeEncodeError, UnicodeDecodeError):
message = str(record.msg)
# Format: timestamp - level - component - function - message
formatted = f"{record.asctime} - {record.levelname:8} - {component:15} - {record.funcName:20} - {message}"
# Add exception info if present
if record.exc_info:
formatted += f"\n{self.formatException(record.exc_info)}"
return formatted
class ConsoleOnlyFormatter(logging.Formatter):
"""Minimal formatter for console output - only essential information."""
def format(self, record):
# Only show timestamp, level and message for console
timestamp = datetime.now().strftime('%H:%M:%S')
try:
message = record.getMessage()
# Ensure the message can be safely encoded
if isinstance(message, str):
# Replace problematic Unicode characters with safe alternatives
message = message.encode('ascii', errors='replace').decode('ascii')
except (UnicodeEncodeError, UnicodeDecodeError):
message = str(record.msg)
return f"[{timestamp}] {record.levelname}: {message}"
class LoggingConfig:
"""Centralized logging configuration manager."""
def __init__(self):
self.log_directory = "logs"
self.main_log_file = "aniworld.log"
self.auth_log_file = "auth_failures.log"
self.download_log_file = "downloads.log"
# Create logs directory if it doesn't exist
os.makedirs(self.log_directory, exist_ok=True)
# Configure loggers
self._setup_loggers()
def _setup_loggers(self):
"""Setup all loggers with appropriate handlers and formatters."""
# Get log level from config
log_level = getattr(config, 'log_level', 'INFO')
console_logging = getattr(config, 'enable_console_logging', True)
console_progress = getattr(config, 'enable_console_progress', False)
# Convert string log level to logging constant
numeric_level = getattr(logging, log_level.upper(), logging.INFO)
# Clear existing handlers
logging.root.handlers.clear()
# Root logger configuration
root_logger = logging.getLogger()
root_logger.setLevel(logging.DEBUG) # Capture everything, filter at handler level
# File handler for main application log
file_handler = logging.handlers.RotatingFileHandler(
os.path.join(self.log_directory, self.main_log_file),
maxBytes=10*1024*1024, # 10MB
backupCount=5
)
file_handler.setLevel(logging.DEBUG)
file_handler.setFormatter(StructuredFormatter())
# Console handler (optional, controlled by config)
if console_logging:
console_handler = UnicodeStreamHandler(sys.stdout)
console_handler.setLevel(numeric_level)
console_handler.setFormatter(ConsoleOnlyFormatter())
root_logger.addHandler(console_handler)
root_logger.addHandler(file_handler)
# Fail2ban authentication logger
self._setup_auth_logger()
# Download progress logger (separate from console)
self._setup_download_logger()
# Configure third-party library loggers to reduce noise
self._configure_third_party_loggers()
# Suppress progress bars in console if disabled
if not console_progress:
self._suppress_progress_output()
def _setup_auth_logger(self):
"""Setup dedicated logger for authentication failures (fail2ban compatible)."""
auth_logger = logging.getLogger('auth_failures')
auth_logger.setLevel(logging.INFO)
auth_logger.propagate = False # Don't propagate to root logger
# File handler for authentication failures
auth_handler = logging.handlers.RotatingFileHandler(
os.path.join(self.log_directory, self.auth_log_file),
maxBytes=5*1024*1024, # 5MB
backupCount=3
)
auth_handler.setLevel(logging.INFO)
auth_handler.setFormatter(Fail2BanFormatter())
auth_logger.addHandler(auth_handler)
def _setup_download_logger(self):
"""Setup dedicated logger for download progress (separate from console)."""
download_logger = logging.getLogger('download_progress')
download_logger.setLevel(logging.INFO)
download_logger.propagate = False # Don't propagate to root logger
# File handler for download progress
download_handler = logging.handlers.RotatingFileHandler(
os.path.join(self.log_directory, self.download_log_file),
maxBytes=20*1024*1024, # 20MB
backupCount=3
)
download_handler.setLevel(logging.INFO)
download_handler.setFormatter(StructuredFormatter())
download_logger.addHandler(download_handler)
def _configure_third_party_loggers(self):
"""Configure third-party library loggers to reduce noise."""
# Suppress noisy third-party loggers
noisy_loggers = [
'urllib3.connectionpool',
'charset_normalizer',
'requests.packages.urllib3',
'werkzeug',
'socketio.server',
'engineio.server'
]
for logger_name in noisy_loggers:
logger = logging.getLogger(logger_name)
logger.setLevel(logging.WARNING)
def _suppress_progress_output(self):
"""Suppress progress bar output from console."""
# This will be used to control progress bar display
# The actual progress bars should check this setting
pass
def get_logger(self, name: str, component: Optional[str] = None) -> logging.Logger:
"""Get a logger instance with optional component name."""
logger = logging.getLogger(name)
# Add component info for structured logging
if component:
# Create a custom LoggerAdapter to add component info
class ComponentAdapter(logging.LoggerAdapter):
def process(self, msg, kwargs):
return msg, kwargs
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
if extra is None:
extra = {}
extra['component'] = component
return self.logger._log(level, msg, args, exc_info, extra, stack_info)
return ComponentAdapter(logger, {})
return logger
def log_auth_failure(self, client_ip: str, username: str = "unknown"):
"""Log authentication failure in fail2ban compatible format."""
auth_logger = logging.getLogger('auth_failures')
# Create log record with custom attributes
record = logging.LogRecord(
name='auth_failures',
level=logging.INFO,
pathname='',
lineno=0,
msg='Authentication failure',
args=(),
exc_info=None
)
record.client_ip = client_ip
record.username = username
auth_logger.handle(record)
def log_download_progress(self, series_name: str, episode: str, progress: float,
speed: str = "", eta: str = ""):
"""Log download progress to dedicated download log."""
download_logger = logging.getLogger('download_progress')
message = f"Downloading {series_name} - {episode} - Progress: {progress:.1f}%"
if speed:
message += f" - Speed: {speed}"
if eta:
message += f" - ETA: {eta}"
download_logger.info(message)
def update_log_level(self, level: str):
"""Update the log level for console output."""
try:
numeric_level = getattr(logging, level.upper())
# Update console handler level
root_logger = logging.getLogger()
for handler in root_logger.handlers:
if isinstance(handler, logging.StreamHandler) and handler.stream == sys.stdout:
handler.setLevel(numeric_level)
break
# Update config
config.set('logging.level', level.upper())
return True
except AttributeError:
return False
def get_log_files(self):
"""Get list of current log files with their sizes."""
log_files = []
for filename in os.listdir(self.log_directory):
if filename.endswith('.log'):
file_path = os.path.join(self.log_directory, filename)
file_size = os.path.getsize(file_path)
file_modified = datetime.fromtimestamp(os.path.getmtime(file_path))
log_files.append({
'name': filename,
'size': file_size,
'size_mb': round(file_size / (1024 * 1024), 2),
'modified': file_modified.isoformat(),
'path': file_path
})
return log_files
def cleanup_old_logs(self, days: int = 30):
"""Clean up log files older than specified days."""
import time
cutoff_time = time.time() - (days * 24 * 60 * 60)
cleaned_files = []
for filename in os.listdir(self.log_directory):
if filename.endswith('.log') and not filename.startswith('aniworld.log'):
file_path = os.path.join(self.log_directory, filename)
if os.path.getmtime(file_path) < cutoff_time:
try:
os.remove(file_path)
cleaned_files.append(filename)
except OSError:
pass
return cleaned_files
# Global logging configuration instance
logging_config = LoggingConfig()
def get_logger(name: str, component: Optional[str] = None) -> logging.Logger:
"""Convenience function to get a logger instance."""
return logging_config.get_logger(name, component)
def log_auth_failure(client_ip: str, username: str = "unknown"):
"""Convenience function to log authentication failure."""
logging_config.log_auth_failure(client_ip, username)
def log_download_progress(series_name: str, episode: str, progress: float,
speed: str = "", eta: str = ""):
"""Convenience function to log download progress."""
logging_config.log_download_progress(series_name, episode, progress, speed, eta)

View File

@@ -1,343 +0,0 @@
import os
import re
import logging
import json
import requests
import html
from urllib.parse import quote
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from server.infrastructure.providers.base_provider import Loader
from server.core.interfaces.providers import Providers
from yt_dlp import YoutubeDL
import shutil
# Read timeout from environment variable, default to 600 seconds (10 minutes)
timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
download_error_logger = logging.getLogger("DownloadErrors")
download_error_handler = logging.FileHandler("../../download_errors.log")
download_error_handler.setLevel(logging.ERROR)
noKeyFound_logger = logging.getLogger("NoKeyFound")
noKeyFound_handler = logging.FileHandler("../../NoKeyFound.log")
noKeyFound_handler.setLevel(logging.ERROR)
class AniworldLoader(Loader):
def __init__(self):
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
self.AniworldHeaders = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"accept-encoding": "gzip, deflate, br, zstd",
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
"cache-control": "max-age=0",
"priority": "u=0, i",
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"sec-fetch-dest": "document",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "none",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
}
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
self.RANDOM_USER_AGENT = UserAgent().random
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
self.PROVIDER_HEADERS = {
"Vidmoly": ['Referer: "https://vidmoly.to"'],
"Doodstream": ['Referer: "https://dood.li/"'],
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
"Luluvdo": [
f'User-Agent: {self.LULUVDO_USER_AGENT}',
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
'Origin: "https://luluvdo.com"',
'Referer: "https://luluvdo.com/"'
]}
self.ANIWORLD_TO = "https://aniworld.to"
self.session = requests.Session()
# Configure retries with backoff
retries = Retry(
total=5, # Number of retries
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
allowed_methods=["GET"]
)
adapter = HTTPAdapter(max_retries=retries)
self.session.mount("https://", adapter)
self.DEFAULT_REQUEST_TIMEOUT = 30
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
self.Providers = Providers()
def ClearCache(self):
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
def RemoveFromCache(self):
self._EpisodeHTMLDict = {}
def Search(self, word: str) -> list:
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
anime_list = self.fetch_anime_list(search_url)
return anime_list
def fetch_anime_list(self, url: str) -> list:
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
clean_text = response.text.strip()
try:
decoded_data = json.loads(html.unescape(clean_text))
return decoded_data if isinstance(decoded_data, list) else []
except json.JSONDecodeError:
try:
# Remove BOM and problematic characters
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
# Remove problematic characters
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
# Parse the new text
decoded_data = json.loads(clean_text)
return decoded_data if isinstance(decoded_data, list) else []
except (requests.RequestException, json.JSONDecodeError) as exc:
raise ValueError("Could not get valid anime: ") from exc
def _GetLanguageKey(self, language: str) -> int:
languageCode = 0
if (language == "German Dub"):
languageCode = 1
if (language == "English Sub"):
languageCode = 2
if (language == "German Sub"):
languageCode = 3
return languageCode
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
"""
Language Codes:
1: German Dub
2: English Sub
3: German Sub
"""
languageCode = self._GetLanguageKey(language)
episode_soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
change_language_box_div = episode_soup.find(
'div', class_='changeLanguageBox')
languages = []
if change_language_box_div:
img_tags = change_language_box_div.find_all('img')
for img in img_tags:
lang_key = img.get('data-lang-key')
if lang_key and lang_key.isdigit():
languages.append(int(lang_key))
return languageCode in languages
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, language: str = "German Dub", progress_callback: callable = None) -> bool:
sanitized_anime_title = ''.join(
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
)
if season == 0:
output_file = (
f"{sanitized_anime_title} - "
f"Movie {episode:02} - "
f"({language}).mp4"
)
else:
output_file = (
f"{sanitized_anime_title} - "
f"S{season:02}E{episode:03} - "
f"({language}).mp4"
)
folderPath = os.path.join(os.path.join(baseDirectory, serieFolder), f"Season {season}")
output_path = os.path.join(folderPath, output_file)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
temp_dir = "./Temp/"
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
temp_Path = os.path.join(temp_dir, output_file)
for provider in self.SUPPORTED_PROVIDERS:
link, header = self._get_direct_link_from_provider(season, episode, key, language)
ydl_opts = {
'fragment_retries': float('inf'),
'outtmpl': temp_Path,
'quiet': True,
'no_warnings': True,
'progress_with_newline': False,
'nocheckcertificate': True,
}
if header:
ydl_opts['http_headers'] = header
if progress_callback:
ydl_opts['progress_hooks'] = [progress_callback]
with YoutubeDL(ydl_opts) as ydl:
ydl.download([link])
if (os.path.exists(temp_Path)):
shutil.copy(temp_Path, output_path)
os.remove(temp_Path)
break
self.ClearCache()
def GetSiteKey(self) -> str:
return "aniworld.to"
def GetTitle(self, key: str) -> str:
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
title_div = soup.find('div', class_='series-title')
if title_div:
return title_div.find('h1').find('span').text
return ""
def _GetKeyHTML(self, key: str):
if key in self._KeyHTMLDict:
return self._KeyHTMLDict[key]
self._KeyHTMLDict[key] = self.session.get(
f"{self.ANIWORLD_TO}/anime/stream/{key}",
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
return self._KeyHTMLDict[key]
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
if key in self._EpisodeHTMLDict:
return self._EpisodeHTMLDict[(key, season, episode)]
link = (
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
f"staffel-{season}/episode-{episode}"
)
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
self._EpisodeHTMLDict[(key, season, episode)] = html
return self._EpisodeHTMLDict[(key, season, episode)]
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
"""
Parses the HTML content to extract streaming providers,
their language keys, and redirect links.
Returns a dictionary with provider names as keys
and language key-to-redirect URL mappings as values.
Example:
{
'VOE': {1: 'https://aniworld.to/redirect/1766412',
2: 'https://aniworld.to/redirect/1766405'},
'Doodstream': {1: 'https://aniworld.to/redirect/1987922',
2: 'https://aniworld.to/redirect/2700342'},
...
}
Access redirect link with:
print(self.provider["VOE"][2])
"""
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
providers = {}
episode_links = soup.find_all(
'li', class_=lambda x: x and x.startswith('episodeLink')
)
if not episode_links:
return providers
for link in episode_links:
provider_name_tag = link.find('h4')
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
redirect_link_tag = link.find('a', class_='watchEpisode')
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
lang_key = link.get('data-lang-key')
lang_key = int(
lang_key) if lang_key and lang_key.isdigit() else None
if provider_name and redirect_link and lang_key:
if provider_name not in providers:
providers[provider_name] = {}
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
return providers
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
languageCode = self._GetLanguageKey(language)
if (self.IsLanguage(season, episode, key, language)):
for provider_name, lang_dict in self._get_provider_from_html(season, episode, key).items():
if languageCode in lang_dict:
return(lang_dict[languageCode], provider_name)
break
return None
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
embeded_link = self.session.get(
redirect_link, timeout=self.DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': self.RANDOM_USER_AGENT}).url
return embeded_link
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
"""
providers = {
"Vidmoly": get_direct_link_from_vidmoly,
"Vidoza": get_direct_link_from_vidoza,
"VOE": get_direct_link_from_voe,
"Doodstream": get_direct_link_from_doodstream,
"SpeedFiles": get_direct_link_from_speedfiles,
"Luluvdo": get_direct_link_from_luluvdo
}
"""
embeded_link = self._get_embeded_link(season, episode, key, language)
if embeded_link is None:
return None
return self.Providers.GetProvider("VOE").GetLink(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
def get_season_episode_count(self, slug : str) -> dict:
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
season_meta = soup.find('meta', itemprop='numberOfSeasons')
number_of_seasons = int(season_meta['content']) if season_meta else 0
episode_counts = {}
for season in range(1, number_of_seasons + 1):
season_url = f"{base_url}staffel-{season}"
response = requests.get(season_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
episode_links = soup.find_all('a', href=True)
unique_links = set(
link['href']
for link in episode_links
if f"staffel-{season}/episode-" in link['href']
)
episode_counts[season] = len(unique_links)
return episode_counts

View File

@@ -1,27 +0,0 @@
from abc import ABC, abstractmethod
class Loader(ABC):
@abstractmethod
def Search(self, word: str) -> list:
pass
@abstractmethod
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
pass
@abstractmethod
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, progress_callback: callable = None) -> bool:
pass
@abstractmethod
def GetSiteKey(self) -> str:
pass
@abstractmethod
def GetTitle(self) -> str:
pass
@abstractmethod
def get_season_episode_count(self, slug: str) -> dict:
pass

View File

@@ -1,671 +0,0 @@
"""
Enhanced AniWorld Loader with Error Handling and Recovery
This module extends the original AniWorldLoader with comprehensive
error handling, retry mechanisms, and recovery strategies.
"""
import os
import re
import logging
import json
import requests
import html
from urllib.parse import quote
import time
import hashlib
from typing import Optional, Dict, Any, Callable
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from yt_dlp import YoutubeDL
import shutil
from server.infrastructure.providers.base_provider import Loader
from server.core.interfaces.providers import Providers
from error_handler import (
with_error_recovery,
recovery_strategies,
NetworkError,
DownloadError,
RetryableError,
NonRetryableError,
file_corruption_detector
)
class EnhancedAniWorldLoader(Loader):
"""Enhanced AniWorld loader with comprehensive error handling."""
def __init__(self):
super().__init__()
self.logger = logging.getLogger(__name__)
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
self.AniworldHeaders = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"accept-encoding": "gzip, deflate, br, zstd",
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
"cache-control": "max-age=0",
"priority": "u=0, i",
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"sec-fetch-dest": "document",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "none",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
}
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
self.RANDOM_USER_AGENT = UserAgent().random
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
self.PROVIDER_HEADERS = {
"Vidmoly": ['Referer: "https://vidmoly.to"'],
"Doodstream": ['Referer: "https://dood.li/"'],
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
"Luluvdo": [
f'User-Agent: {self.LULUVDO_USER_AGENT}',
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
'Origin: "https://luluvdo.com"',
'Referer: "https://luluvdo.com/"'
]
}
self.ANIWORLD_TO = "https://aniworld.to"
self.DEFAULT_REQUEST_TIMEOUT = 30
# Initialize session with enhanced retry configuration
self.session = self._create_robust_session()
# Cache dictionaries
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
# Provider manager
self.Providers = Providers()
# Download statistics
self.download_stats = {
'total_downloads': 0,
'successful_downloads': 0,
'failed_downloads': 0,
'retried_downloads': 0
}
# Read timeout from environment variable
self.download_timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
# Setup logging
self._setup_logging()
def _create_robust_session(self) -> requests.Session:
"""Create a session with robust retry and error handling configuration."""
session = requests.Session()
# Enhanced retry strategy
retries = Retry(
total=5,
backoff_factor=2, # More aggressive backoff
status_forcelist=[408, 429, 500, 502, 503, 504, 520, 521, 522, 523, 524],
allowed_methods=["GET", "POST", "HEAD"],
raise_on_status=False # Handle status errors manually
)
adapter = HTTPAdapter(
max_retries=retries,
pool_connections=10,
pool_maxsize=20,
pool_block=True
)
session.mount("https://", adapter)
session.mount("http://", adapter)
# Set default headers
session.headers.update(self.AniworldHeaders)
return session
def _setup_logging(self):
"""Setup specialized logging for download errors and missing keys."""
# Download error logger
self.download_error_logger = logging.getLogger("DownloadErrors")
download_error_handler = logging.FileHandler("../../download_errors.log")
download_error_handler.setLevel(logging.ERROR)
download_error_formatter = logging.Formatter(
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
)
download_error_handler.setFormatter(download_error_formatter)
if not self.download_error_logger.handlers:
self.download_error_logger.addHandler(download_error_handler)
self.download_error_logger.setLevel(logging.ERROR)
# No key found logger
self.nokey_logger = logging.getLogger("NoKeyFound")
nokey_handler = logging.FileHandler("../../NoKeyFound.log")
nokey_handler.setLevel(logging.ERROR)
nokey_handler.setFormatter(download_error_formatter)
if not self.nokey_logger.handlers:
self.nokey_logger.addHandler(nokey_handler)
self.nokey_logger.setLevel(logging.ERROR)
def ClearCache(self):
"""Clear all cached data."""
self._KeyHTMLDict.clear()
self._EpisodeHTMLDict.clear()
self.logger.debug("Cache cleared")
def RemoveFromCache(self):
"""Remove episode HTML cache."""
self._EpisodeHTMLDict.clear()
self.logger.debug("Episode cache cleared")
@with_error_recovery(max_retries=3, context="anime_search")
def Search(self, word: str) -> list:
"""Search for anime with error handling."""
if not word or not word.strip():
raise ValueError("Search term cannot be empty")
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
try:
return self._fetch_anime_list_with_recovery(search_url)
except Exception as e:
self.logger.error(f"Search failed for term '{word}': {e}")
raise RetryableError(f"Search failed: {e}") from e
def _fetch_anime_list_with_recovery(self, url: str) -> list:
"""Fetch anime list with comprehensive error handling."""
try:
response = recovery_strategies.handle_network_failure(
self.session.get,
url,
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
if not response.ok:
if response.status_code == 404:
raise NonRetryableError(f"URL not found: {url}")
elif response.status_code == 403:
raise NonRetryableError(f"Access forbidden: {url}")
elif response.status_code >= 500:
raise RetryableError(f"Server error {response.status_code}")
else:
raise RetryableError(f"HTTP error {response.status_code}")
return self._parse_anime_response(response.text)
except (requests.RequestException, ConnectionError) as e:
raise NetworkError(f"Network error during anime search: {e}") from e
def _parse_anime_response(self, response_text: str) -> list:
"""Parse anime search response with error handling."""
if not response_text or not response_text.strip():
raise ValueError("Empty response from server")
clean_text = response_text.strip()
# Try multiple parsing strategies
parsing_strategies = [
lambda text: json.loads(html.unescape(text)),
lambda text: json.loads(text.encode('utf-8').decode('utf-8-sig')),
lambda text: json.loads(re.sub(r'[\x00-\x1F\x7F-\x9F]', '', text))
]
for i, strategy in enumerate(parsing_strategies):
try:
decoded_data = strategy(clean_text)
if isinstance(decoded_data, list):
self.logger.debug(f"Successfully parsed anime response with strategy {i + 1}")
return decoded_data
else:
self.logger.warning(f"Strategy {i + 1} returned non-list data: {type(decoded_data)}")
except json.JSONDecodeError as e:
self.logger.debug(f"Parsing strategy {i + 1} failed: {e}")
continue
raise ValueError("Could not parse anime search response with any strategy")
def _GetLanguageKey(self, language: str) -> int:
"""Get numeric language code."""
language_map = {
"German Dub": 1,
"English Sub": 2,
"German Sub": 3
}
return language_map.get(language, 0)
@with_error_recovery(max_retries=2, context="language_check")
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
"""Check if episode is available in specified language with error handling."""
try:
languageCode = self._GetLanguageKey(language)
if languageCode == 0:
raise ValueError(f"Unknown language: {language}")
episode_response = self._GetEpisodeHTML(season, episode, key)
soup = BeautifulSoup(episode_response.content, 'html.parser')
change_language_box_div = soup.find('div', class_='changeLanguageBox')
if not change_language_box_div:
self.logger.debug(f"No language box found for {key} S{season}E{episode}")
return False
img_tags = change_language_box_div.find_all('img')
available_languages = []
for img in img_tags:
lang_key = img.get('data-lang-key')
if lang_key and lang_key.isdigit():
available_languages.append(int(lang_key))
is_available = languageCode in available_languages
self.logger.debug(f"Language check for {key} S{season}E{episode} - "
f"Requested: {languageCode}, Available: {available_languages}, "
f"Result: {is_available}")
return is_available
except Exception as e:
self.logger.error(f"Language check failed for {key} S{season}E{episode}: {e}")
raise RetryableError(f"Language check failed: {e}") from e
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int,
key: str, language: str = "German Dub", progress_callback: Callable = None) -> bool:
"""Download episode with comprehensive error handling and recovery."""
self.download_stats['total_downloads'] += 1
try:
# Validate inputs
if not all([baseDirectory, serieFolder, key]):
raise ValueError("Missing required parameters for download")
if season < 0 or episode < 0:
raise ValueError("Season and episode must be non-negative")
# Prepare file paths
sanitized_anime_title = ''.join(
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
)
if not sanitized_anime_title:
sanitized_anime_title = f"Unknown_{key}"
# Generate output filename
if season == 0:
output_file = f"{sanitized_anime_title} - Movie {episode:02} - ({language}).mp4"
else:
output_file = f"{sanitized_anime_title} - S{season:02}E{episode:03} - ({language}).mp4"
# Create directory structure
folder_path = os.path.join(baseDirectory, serieFolder, f"Season {season}")
output_path = os.path.join(folder_path, output_file)
# Check if file already exists and is valid
if os.path.exists(output_path):
if file_corruption_detector.is_valid_video_file(output_path):
self.logger.info(f"File already exists and is valid: {output_file}")
self.download_stats['successful_downloads'] += 1
return True
else:
self.logger.warning(f"Existing file appears corrupted, removing: {output_path}")
try:
os.remove(output_path)
except Exception as e:
self.logger.error(f"Failed to remove corrupted file: {e}")
os.makedirs(folder_path, exist_ok=True)
# Create temp directory
temp_dir = "./Temp/"
os.makedirs(temp_dir, exist_ok=True)
temp_path = os.path.join(temp_dir, output_file)
# Attempt download with recovery strategies
success = self._download_with_recovery(
season, episode, key, language, temp_path, output_path, progress_callback
)
if success:
self.download_stats['successful_downloads'] += 1
self.logger.info(f"Successfully downloaded: {output_file}")
else:
self.download_stats['failed_downloads'] += 1
self.download_error_logger.error(
f"Download failed for {key} S{season}E{episode} ({language})"
)
return success
except Exception as e:
self.download_stats['failed_downloads'] += 1
self.download_error_logger.error(
f"Download error for {key} S{season}E{episode}: {e}", exc_info=True
)
raise DownloadError(f"Download failed: {e}") from e
finally:
self.ClearCache()
def _download_with_recovery(self, season: int, episode: int, key: str, language: str,
temp_path: str, output_path: str, progress_callback: Callable) -> bool:
"""Attempt download with multiple providers and recovery strategies."""
for provider_name in self.SUPPORTED_PROVIDERS:
try:
self.logger.info(f"Attempting download with provider: {provider_name}")
# Get download link and headers for provider
link, headers = recovery_strategies.handle_network_failure(
self._get_direct_link_from_provider,
season, episode, key, language
)
if not link:
self.logger.warning(f"No download link found for provider: {provider_name}")
continue
# Configure yt-dlp options
ydl_opts = {
'fragment_retries': float('inf'),
'outtmpl': temp_path,
'quiet': True,
'no_warnings': True,
'progress_with_newline': False,
'nocheckcertificate': True,
'socket_timeout': self.download_timeout,
'http_chunk_size': 1024 * 1024, # 1MB chunks
}
if headers:
ydl_opts['http_headers'] = headers
if progress_callback:
ydl_opts['progress_hooks'] = [progress_callback]
# Perform download with recovery
success = recovery_strategies.handle_download_failure(
self._perform_ytdl_download,
temp_path,
ydl_opts,
link
)
if success and os.path.exists(temp_path):
# Verify downloaded file
if file_corruption_detector.is_valid_video_file(temp_path):
# Move to final location
shutil.copy2(temp_path, output_path)
# Clean up temp file
try:
os.remove(temp_path)
except Exception as e:
self.logger.warning(f"Failed to remove temp file: {e}")
return True
else:
self.logger.warning(f"Downloaded file failed validation: {temp_path}")
try:
os.remove(temp_path)
except Exception:
pass
except Exception as e:
self.logger.warning(f"Provider {provider_name} failed: {e}")
self.download_stats['retried_downloads'] += 1
continue
return False
def _perform_ytdl_download(self, ydl_opts: Dict[str, Any], link: str) -> bool:
"""Perform actual download using yt-dlp."""
try:
with YoutubeDL(ydl_opts) as ydl:
ydl.download([link])
return True
except Exception as e:
self.logger.error(f"yt-dlp download failed: {e}")
raise DownloadError(f"Download failed: {e}") from e
@with_error_recovery(max_retries=2, context="get_title")
def GetTitle(self, key: str) -> str:
"""Get anime title with error handling."""
try:
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
title_div = soup.find('div', class_='series-title')
if title_div:
title_span = title_div.find('h1')
if title_span:
span = title_span.find('span')
if span:
return span.text.strip()
self.logger.warning(f"Could not extract title for key: {key}")
return f"Unknown_Title_{key}"
except Exception as e:
self.logger.error(f"Failed to get title for key {key}: {e}")
raise RetryableError(f"Title extraction failed: {e}") from e
def GetSiteKey(self) -> str:
"""Get site identifier."""
return "aniworld.to"
@with_error_recovery(max_retries=2, context="get_key_html")
def _GetKeyHTML(self, key: str):
"""Get cached HTML for anime key."""
if key in self._KeyHTMLDict:
return self._KeyHTMLDict[key]
try:
url = f"{self.ANIWORLD_TO}/anime/stream/{key}"
response = recovery_strategies.handle_network_failure(
self.session.get,
url,
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
if not response.ok:
if response.status_code == 404:
self.nokey_logger.error(f"Anime key not found: {key}")
raise NonRetryableError(f"Anime key not found: {key}")
else:
raise RetryableError(f"HTTP error {response.status_code} for key {key}")
self._KeyHTMLDict[key] = response
return self._KeyHTMLDict[key]
except Exception as e:
self.logger.error(f"Failed to get HTML for key {key}: {e}")
raise
@with_error_recovery(max_retries=2, context="get_episode_html")
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
"""Get cached HTML for specific episode."""
cache_key = (key, season, episode)
if cache_key in self._EpisodeHTMLDict:
return self._EpisodeHTMLDict[cache_key]
try:
url = f"{self.ANIWORLD_TO}/anime/stream/{key}/staffel-{season}/episode-{episode}"
response = recovery_strategies.handle_network_failure(
self.session.get,
url,
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
if not response.ok:
if response.status_code == 404:
raise NonRetryableError(f"Episode not found: {key} S{season}E{episode}")
else:
raise RetryableError(f"HTTP error {response.status_code} for episode")
self._EpisodeHTMLDict[cache_key] = response
return self._EpisodeHTMLDict[cache_key]
except Exception as e:
self.logger.error(f"Failed to get episode HTML for {key} S{season}E{episode}: {e}")
raise
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
"""Extract providers from HTML with error handling."""
try:
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
providers = {}
episode_links = soup.find_all(
'li', class_=lambda x: x and x.startswith('episodeLink')
)
if not episode_links:
self.logger.warning(f"No episode links found for {key} S{season}E{episode}")
return providers
for link in episode_links:
provider_name_tag = link.find('h4')
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
redirect_link_tag = link.find('a', class_='watchEpisode')
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
lang_key = link.get('data-lang-key')
lang_key = int(lang_key) if lang_key and lang_key.isdigit() else None
if provider_name and redirect_link and lang_key:
if provider_name not in providers:
providers[provider_name] = {}
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
self.logger.debug(f"Found {len(providers)} providers for {key} S{season}E{episode}")
return providers
except Exception as e:
self.logger.error(f"Failed to parse providers from HTML: {e}")
raise RetryableError(f"Provider parsing failed: {e}") from e
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
"""Get redirect link for episode with error handling."""
languageCode = self._GetLanguageKey(language)
if not self.IsLanguage(season, episode, key, language):
raise NonRetryableError(f"Language {language} not available for {key} S{season}E{episode}")
providers = self._get_provider_from_html(season, episode, key)
for provider_name, lang_dict in providers.items():
if languageCode in lang_dict:
return lang_dict[languageCode], provider_name
raise NonRetryableError(f"No provider found for {language} in {key} S{season}E{episode}")
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
"""Get embedded link with error handling."""
try:
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
response = recovery_strategies.handle_network_failure(
self.session.get,
redirect_link,
timeout=self.DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': self.RANDOM_USER_AGENT}
)
return response.url
except Exception as e:
self.logger.error(f"Failed to get embedded link: {e}")
raise
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub"):
"""Get direct download link from provider with error handling."""
try:
embedded_link = self._get_embeded_link(season, episode, key, language)
if not embedded_link:
raise NonRetryableError("No embedded link found")
# Use VOE provider as default (could be made configurable)
provider = self.Providers.GetProvider("VOE")
if not provider:
raise NonRetryableError("VOE provider not available")
return provider.GetLink(embedded_link, self.DEFAULT_REQUEST_TIMEOUT)
except Exception as e:
self.logger.error(f"Failed to get direct link from provider: {e}")
raise
@with_error_recovery(max_retries=2, context="get_season_episode_count")
def get_season_episode_count(self, slug: str) -> dict:
"""Get episode count per season with error handling."""
try:
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
response = recovery_strategies.handle_network_failure(
requests.get,
base_url,
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
soup = BeautifulSoup(response.content, 'html.parser')
season_meta = soup.find('meta', itemprop='numberOfSeasons')
number_of_seasons = int(season_meta['content']) if season_meta else 0
episode_counts = {}
for season in range(1, number_of_seasons + 1):
season_url = f"{base_url}staffel-{season}"
season_response = recovery_strategies.handle_network_failure(
requests.get,
season_url,
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
season_soup = BeautifulSoup(season_response.content, 'html.parser')
episode_links = season_soup.find_all('a', href=True)
unique_links = set(
link['href']
for link in episode_links
if f"staffel-{season}/episode-" in link['href']
)
episode_counts[season] = len(unique_links)
return episode_counts
except Exception as e:
self.logger.error(f"Failed to get episode counts for {slug}: {e}")
raise RetryableError(f"Episode count retrieval failed: {e}") from e
def get_download_statistics(self) -> Dict[str, Any]:
"""Get download statistics."""
stats = self.download_stats.copy()
stats['success_rate'] = (
(stats['successful_downloads'] / stats['total_downloads'] * 100)
if stats['total_downloads'] > 0 else 0
)
return stats
def reset_statistics(self):
"""Reset download statistics."""
self.download_stats = {
'total_downloads': 0,
'successful_downloads': 0,
'failed_downloads': 0,
'retried_downloads': 0
}
# For backward compatibility, create wrapper that uses enhanced loader
class AniworldLoader(EnhancedAniWorldLoader):
"""Backward compatibility wrapper for the enhanced loader."""
pass

View File

@@ -1,10 +0,0 @@
from server.infrastructure.providers.aniworld_provider import AniworldLoader
from server.infrastructure.providers.base_provider import Loader
class Loaders:
def __init__(self):
self.dict = {"aniworld.to": AniworldLoader()}
def GetLoader(self, key: str) -> Loader:
return self.dict[key]

View File

@@ -1,7 +0,0 @@
from abc import ABC, abstractmethod
class Provider(ABC):
@abstractmethod
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
pass

View File

@@ -1,59 +0,0 @@
import re
import random
import time
from fake_useragent import UserAgent
import requests
from .Provider import Provider
class Doodstream(Provider):
def __init__(self):
self.RANDOM_USER_AGENT = UserAgent().random
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> str:
headers = {
'User-Agent': self.RANDOM_USER_AGENT,
'Referer': 'https://dood.li/'
}
def extract_data(pattern, content):
match = re.search(pattern, content)
return match.group(1) if match else None
def generate_random_string(length=10):
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
return ''.join(random.choice(characters) for _ in range(length))
response = requests.get(
embededLink,
headers=headers,
timeout=DEFAULT_REQUEST_TIMEOUT,
verify=False
)
response.raise_for_status()
pass_md5_pattern = r"\$\.get\('([^']*\/pass_md5\/[^']*)'"
pass_md5_url = extract_data(pass_md5_pattern, response.text)
if not pass_md5_url:
raise ValueError(
f'pass_md5 URL not found using {embededLink}.')
full_md5_url = f"https://dood.li{pass_md5_url}"
token_pattern = r"token=([a-zA-Z0-9]+)"
token = extract_data(token_pattern, response.text)
if not token:
raise ValueError(f'Token not found using {embededLink}.')
md5_response = requests.get(
full_md5_url, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT, verify=False)
md5_response.raise_for_status()
video_base_url = md5_response.text.strip()
random_string = generate_random_string(10)
expiry = int(time.time())
direct_link = f"{video_base_url}{random_string}?token={token}&expiry={expiry}"
# print(direct_link)
return direct_link

View File

@@ -1,51 +0,0 @@
import re
import requests
# import jsbeautifier.unpackers.packer as packer
from aniworld import config
REDIRECT_REGEX = re.compile(
r'<iframe *(?:[^>]+ )?src=(?:\'([^\']+)\'|"([^"]+)")[^>]*>')
SCRIPT_REGEX = re.compile(
r'(?s)<script\s+[^>]*?data-cfasync=["\']?false["\']?[^>]*>(.+?)</script>')
VIDEO_URL_REGEX = re.compile(r'file:\s*"([^"]+\.m3u8[^"]*)"')
# TODO Implement this script fully
def get_direct_link_from_filemoon(embeded_filemoon_link: str):
session = requests.Session()
session.verify = False
headers = {
"User-Agent": config.RANDOM_USER_AGENT,
"Referer": embeded_filemoon_link,
}
response = session.get(embeded_filemoon_link, headers=headers)
source = response.text
match = REDIRECT_REGEX.search(source)
if match:
redirect_url = match.group(1) or match.group(2)
response = session.get(redirect_url, headers=headers)
source = response.text
for script_match in SCRIPT_REGEX.finditer(source):
script_content = script_match.group(1).strip()
if not script_content.startswith("eval("):
continue
if packer.detect(script_content):
unpacked = packer.unpack(script_content)
video_match = VIDEO_URL_REGEX.search(unpacked)
if video_match:
return video_match.group(1)
raise Exception("No Video link found!")
if __name__ == '__main__':
url = input("Enter Filemoon Link: ")
print(get_direct_link_from_filemoon(url))

View File

@@ -1,90 +0,0 @@
import re
import json
import sys
import requests
from aniworld.config import DEFAULT_REQUEST_TIMEOUT
def fetch_page_content(url):
try:
response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
return response.text
except requests.exceptions.RequestException as e:
print(f"Failed to fetch the page content: {e}")
return None
def extract_video_data(page_content):
match = re.search(r'^.*videos_manifest.*$', page_content, re.MULTILINE)
if not match:
raise ValueError("Failed to extract video manifest from the response.")
json_str = match.group(0)[match.group(0).find(
'{'):match.group(0).rfind('}') + 1]
return json.loads(json_str)
def get_streams(url):
page_content = fetch_page_content(url)
data = extract_video_data(page_content)
video_info = data['state']['data']['video']
name = video_info['hentai_video']['name']
streams = video_info['videos_manifest']['servers'][0]['streams']
return {"name": name, "streams": streams}
def display_streams(streams):
if not streams:
print("No streams available.")
return
print("Available qualities:")
for i, stream in enumerate(streams, 1):
premium_tag = "(Premium)" if not stream['is_guest_allowed'] else ""
print(
f"{i}. {stream['width']}x{stream['height']}\t"
f"({stream['filesize_mbs']}MB) {premium_tag}")
def get_user_selection(streams):
try:
selected_index = int(input("Select a stream: ").strip()) - 1
if 0 <= selected_index < len(streams):
return selected_index
print("Invalid selection.")
return None
except ValueError:
print("Invalid input.")
return None
def get_direct_link_from_hanime(url=None):
try:
if url is None:
if len(sys.argv) > 1:
url = sys.argv[1]
else:
url = input("Please enter the hanime.tv video URL: ").strip()
try:
video_data = get_streams(url)
print(f"Video: {video_data['name']}")
print('*' * 40)
display_streams(video_data['streams'])
selected_index = None
while selected_index is None:
selected_index = get_user_selection(video_data['streams'])
print(f"M3U8 URL: {video_data['streams'][selected_index]['url']}")
except ValueError as e:
print(f"Error: {e}")
except KeyboardInterrupt:
pass
if __name__ == "__main__":
get_direct_link_from_hanime()

View File

@@ -1,35 +0,0 @@
import requests
import json
from urllib.parse import urlparse
# TODO Doesn't work on download yet and has to be implemented
def get_direct_link_from_loadx(embeded_loadx_link: str):
response = requests.head(
embeded_loadx_link, allow_redirects=True, verify=False)
parsed_url = urlparse(response.url)
path_parts = parsed_url.path.split("/")
if len(path_parts) < 3:
raise ValueError("Invalid path!")
id_hash = path_parts[2]
host = parsed_url.netloc
post_url = f"https://{host}/player/index.php?data={id_hash}&do=getVideo"
headers = {"X-Requested-With": "XMLHttpRequest"}
response = requests.post(post_url, headers=headers, verify=False)
data = json.loads(response.text)
print(data)
video_url = data.get("videoSource")
if not video_url:
raise ValueError("No Video link found!")
return video_url
if __name__ == '__main__':
url = input("Enter Loadx Link: ")
print(get_direct_link_from_loadx(url))

View File

@@ -1,39 +0,0 @@
import re
import requests
from aniworld import config
def get_direct_link_from_luluvdo(embeded_luluvdo_link, arguments=None):
luluvdo_id = embeded_luluvdo_link.split('/')[-1]
filelink = (
f"https://luluvdo.com/dl?op=embed&file_code={luluvdo_id}&embed=1&referer=luluvdo.com&adb=0"
)
# The User-Agent needs to be the same as the direct-link ones to work
headers = {
"Origin": "https://luluvdo.com",
"Referer": "https://luluvdo.com/",
"User-Agent": config.LULUVDO_USER_AGENT
}
if arguments.action == "Download":
headers["Accept-Language"] = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7"
response = requests.get(filelink, headers=headers,
timeout=config.DEFAULT_REQUEST_TIMEOUT)
if response.status_code == 200:
pattern = r'file:\s*"([^"]+)"'
matches = re.findall(pattern, str(response.text))
if matches:
return matches[0]
raise ValueError("No match found")
if __name__ == '__main__':
url = input("Enter Luluvdo Link: ")
print(get_direct_link_from_luluvdo(url))

View File

@@ -1,43 +0,0 @@
import re
import base64
import requests
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
SPEEDFILES_PATTERN = re.compile(r'var _0x5opu234 = "(?P<encoded_data>.*?)";')
def get_direct_link_from_speedfiles(embeded_speedfiles_link):
response = requests.get(
embeded_speedfiles_link,
timeout=DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': RANDOM_USER_AGENT}
)
if "<span class=\"inline-block\">Web server is down</span>" in response.text:
raise ValueError(
"The SpeedFiles server is currently down.\n"
"Please try again later or choose a different hoster."
)
match = SPEEDFILES_PATTERN.search(response.text)
if not match:
raise ValueError("Pattern not found in the response.")
encoded_data = match.group("encoded_data")
decoded = base64.b64decode(encoded_data).decode()
decoded = decoded.swapcase()[::-1]
decoded = base64.b64decode(decoded).decode()[::-1]
decoded_hex = ''.join(chr(int(decoded[i:i + 2], 16))
for i in range(0, len(decoded), 2))
shifted = ''.join(chr(ord(char) - 3) for char in decoded_hex)
result = base64.b64decode(shifted.swapcase()[::-1]).decode()
return result
if __name__ == '__main__':
speedfiles_link = input("Enter Speedfiles Link: ")
print(get_direct_link_from_speedfiles(
embeded_speedfiles_link=speedfiles_link))

View File

@@ -1,2 +0,0 @@
def get_direct_link_from_streamtape(embeded_streamtape_link: str) -> str:
pass

View File

@@ -1,34 +0,0 @@
import re
import requests
from bs4 import BeautifulSoup
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
def get_direct_link_from_vidmoly(embeded_vidmoly_link: str):
response = requests.get(
embeded_vidmoly_link,
headers={'User-Agent': RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
html_content = response.text
soup = BeautifulSoup(html_content, 'html.parser')
scripts = soup.find_all('script')
file_link_pattern = r'file:\s*"(https?://.*?)"'
for script in scripts:
if script.string:
match = re.search(file_link_pattern, script.string)
if match:
file_link = match.group(1)
return file_link
raise ValueError("No direct link found.")
if __name__ == '__main__':
link = input("Enter Vidmoly Link: ")
print('Note: --referer "https://vidmoly.to"')
print(get_direct_link_from_vidmoly(embeded_vidmoly_link=link))

View File

@@ -1,29 +0,0 @@
import re
import requests
from bs4 import BeautifulSoup
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
def get_direct_link_from_vidoza(embeded_vidoza_link: str) -> str:
response = requests.get(
embeded_vidoza_link,
headers={'User-Agent': RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
soup = BeautifulSoup(response.content, "html.parser")
for tag in soup.find_all('script'):
if 'sourcesCode:' in tag.text:
match = re.search(r'src: "(.*?)"', tag.text)
if match:
return match.group(1)
raise ValueError("No direct link found.")
if __name__ == '__main__':
link = input("Enter Vidoza Link: ")
print(get_direct_link_from_vidoza(embeded_vidoza_link=link))

View File

@@ -1,113 +0,0 @@
import re
import base64
import json
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from .Provider import Provider
# Compile regex patterns once for better performance
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
class VOE(Provider):
def __init__(self):
self.RANDOM_USER_AGENT = UserAgent().random
self.Header = {
"User-Agent": self.RANDOM_USER_AGENT
}
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
self.session = requests.Session()
# Configure retries with backoff
retries = Retry(
total=5, # Number of retries
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
allowed_methods=["GET"]
)
adapter = HTTPAdapter(max_retries=retries)
self.session.mount("https://", adapter)
DEFAULT_REQUEST_TIMEOUT = 30
response = self.session.get(
embededLink,
headers={'User-Agent': self.RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
redirect = re.search(r"https?://[^'\"<>]+", response.text)
if not redirect:
raise ValueError("No redirect found.")
redirect_url = redirect.group(0)
parts = redirect_url.strip().split("/")
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
response = self.session.get(redirect_url, headers={'User-Agent': self.RANDOM_USER_AGENT})
html = response.content
# Method 1: Extract from script tag
extracted = self.extract_voe_from_script(html)
if extracted:
return extracted, self.Header
# Method 2: Extract from base64 encoded variable
htmlText = html.decode('utf-8')
b64_match = B64_PATTERN.search(htmlText)
if b64_match:
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
source = json.loads(decoded).get("source")
if source:
return source, self.Header
# Method 3: Extract HLS source
hls_match = HLS_PATTERN.search(htmlText)
if hls_match:
return base64.b64decode(hls_match.group("hls")).decode(), self.Header
def shift_letters(self, input_str):
result = ''
for c in input_str:
code = ord(c)
if 65 <= code <= 90:
code = (code - 65 + 13) % 26 + 65
elif 97 <= code <= 122:
code = (code - 97 + 13) % 26 + 97
result += chr(code)
return result
def replace_junk(self, input_str):
junk_parts = ['@$', '^^', '~@', '%?', '*~', '!!', '#&']
for part in junk_parts:
input_str = re.sub(re.escape(part), '_', input_str)
return input_str
def shift_back(self, s, n):
return ''.join(chr(ord(c) - n) for c in s)
def decode_voe_string(self, encoded):
step1 = self.shift_letters(encoded)
step2 = self.replace_junk(step1).replace('_', '')
step3 = base64.b64decode(step2).decode()
step4 = self.shift_back(step3, 3)
step5 = base64.b64decode(step4[::-1]).decode()
return json.loads(step5)
def extract_voe_from_script(self, html):
soup = BeautifulSoup(html, "html.parser")
script = soup.find("script", type="application/json")
return self.decode_voe_string(script.text[2:-2])["source"]

View File

@@ -9328,3 +9328,465 @@
2025-09-29 15:56:13 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
2025-09-29 15:56:13 - INFO - __main__ - <module> - Scheduler stopped
2025-09-29 15:56:13 - INFO - root - cleanup_on_exit - Application cleanup completed
2025-09-29 16:18:51 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 16:18:51 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 16:18:51 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 16:18:51 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 16:18:51 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 16:18:51 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 16:18:51 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 16:18:53 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 16:18:53 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 16:18:53 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 16:18:53 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin in Another World with Level 2 Super Cheat Powers (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin in Another World with Level 2 Super Cheat Powers (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ill Become a Villainess Who Goes Down in History (2024)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ill Become a Villainess Who Goes Down in History (2024)\data for Ill Become a Villainess Who Goes Down in History (2024)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba An Explosion on This Wonderful World! (2023)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba An Explosion on This Wonderful World! (2023)\data for KonoSuba An Explosion on This Wonderful World! (2023)
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla Gods Go-To Girl (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla Gods Go-To Girl (2025)\data for The Gorilla Gods Go-To Girl (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saints Magic Power is Omnipotent (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saints Magic Power is Omnipotent (2021)\data for The Saints Magic Power is Omnipotent (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Worlds End Harem (2022)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Worlds End Harem (2022)\data for Worlds End Harem (2022)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping clevatess - No data folder found
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
2025-09-29 16:18:55 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
2025-09-29 16:18:55 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
2025-09-29 16:18:55 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
2025-09-29 16:18:55 - WARNING - werkzeug - _log - * Debugger is active!
2025-09-29 16:19:21 - DEBUG - schedule - clear - Deleting *all* jobs

View File

@@ -1,205 +0,0 @@
import os
import sys
import logging
from flask import Flask, request, jsonify, render_template, redirect, url_for, session, send_from_directory
from flask_socketio import SocketIO, emit
import atexit
import signal
import time
from datetime import datetime
# Add the parent directory to sys.path to import our modules
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
from main import SeriesApp
from server.core.entities.series import Serie
from server.core.entities import SerieList
from server.infrastructure.file_system import SerieScanner
from server.infrastructure.providers.provider_factory import Loaders
from web.controllers.auth_controller import session_manager, require_auth, optional_auth
from config import config
from application.services.queue_service import download_queue_bp
app = Flask(__name__)
app.config['SECRET_KEY'] = os.urandom(24)
app.config['PERMANENT_SESSION_LIFETIME'] = 86400 # 24 hours
socketio = SocketIO(app, cors_allowed_origins="*")
# Register essential blueprints only
app.register_blueprint(download_queue_bp)
# Initialize series application
series_app = None
anime_directory = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
def create_app():
"""Create Flask application."""
# Configure logging
logging.basicConfig(level=logging.INFO)
logger = logging.getLogger(__name__)
logger.info("Starting Aniworld Flask server...")
return app
def init_series_app():
"""Initialize series application."""
global series_app
try:
logger = logging.getLogger(__name__)
logger.info(f"Initializing series app with directory: {anime_directory}")
series_app = SeriesApp(anime_directory)
logger.info("Series app initialized successfully")
except Exception as e:
logger = logging.getLogger(__name__)
logger.error(f"Failed to initialize series app: {e}")
# Create a minimal fallback
series_app = type('SeriesApp', (), {
'List': None,
'directory_to_search': anime_directory
})()
@app.route('/')
@optional_auth
def index():
"""Main application page."""
return render_template('base/index.html')
@app.route('/login')
def login():
"""Login page."""
return render_template('base/login.html')
@app.route('/api/auth/login', methods=['POST'])
def api_login():
"""Handle login requests."""
try:
data = request.get_json()
password = data.get('password', '')
result = session_manager.login(password, request.remote_addr)
return jsonify(result)
except Exception as e:
return jsonify({'status': 'error', 'message': str(e)}), 500
@app.route('/api/auth/logout', methods=['POST'])
def api_logout():
"""Handle logout requests."""
session_manager.logout()
return jsonify({'status': 'success', 'message': 'Logged out successfully'})
@app.route('/api/auth/status')
@optional_auth
def auth_status():
"""Get authentication status."""
return jsonify({
'authenticated': session_manager.is_authenticated(),
'user': session.get('user', 'guest'),
'login_time': session.get('login_time'),
'session_info': session_manager.get_session_info()
})
@app.route('/api/series', methods=['GET'])
@optional_auth
def get_series():
"""Get all series data."""
try:
if series_app is None or series_app.List is None:
return jsonify({
'status': 'success',
'series': [],
'total_series': 0,
'message': 'No series data available. Please perform a scan to load series.'
})
# Get series data
series_data = []
for serie in series_app.List.GetList():
series_data.append({
'folder': serie.folder,
'name': serie.name or serie.folder,
'total_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()) if hasattr(serie, 'episodeDict') and serie.episodeDict else 0,
'missing_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()) if hasattr(serie, 'episodeDict') and serie.episodeDict else 0,
'status': 'ongoing',
'episodes': {
season: episodes
for season, episodes in serie.episodeDict.items()
} if hasattr(serie, 'episodeDict') and serie.episodeDict else {}
})
return jsonify({
'status': 'success',
'series': series_data,
'total_series': len(series_data)
})
except Exception as e:
# Log the error but don't return 500 to prevent page reload loops
print(f"Error in get_series: {e}")
return jsonify({
'status': 'success',
'series': [],
'total_series': 0,
'message': 'Error loading series data. Please try rescanning.'
})
@app.route('/api/preferences', methods=['GET'])
@optional_auth
def get_preferences():
"""Get user preferences."""
# Return basic preferences for now
return jsonify({
'theme': 'dark',
'language': 'en',
'auto_refresh': True,
'notifications': True
})
# Basic health status endpoint
@app.route('/api/process/locks/status')
@optional_auth
def process_locks_status():
"""Get process lock status."""
return jsonify({
'rescan_locked': False,
'download_locked': False,
'cleanup_locked': False,
'message': 'All processes available'
})
# Undo/Redo status endpoint
@app.route('/api/undo-redo/status')
@optional_auth
def undo_redo_status():
"""Get undo/redo status."""
return jsonify({
'can_undo': False,
'can_redo': False,
'undo_count': 0,
'redo_count': 0,
'last_action': None
})
# Static file serving
@app.route('/static/<path:filename>')
def static_files(filename):
"""Serve static files."""
return send_from_directory('web/static', filename)
def cleanup_on_exit():
"""Cleanup function to run on application exit."""
logger = logging.getLogger(__name__)
logger.info("Application cleanup completed")
# Register cleanup function
atexit.register(cleanup_on_exit)
if __name__ == '__main__':
# Initialize series app
init_series_app()
# Start the application
print("Server will be available at http://localhost:5000")
socketio.run(app, debug=True, host='0.0.0.0', port=5000, allow_unsafe_werkzeug=True)

View File

@@ -1,83 +0,0 @@
@echo off
REM Test Runner Script for AniWorld Testing Pipeline (Windows)
REM This script provides an easy way to run the AniWorld test suite on Windows
echo AniWorld Test Suite Runner
echo ==========================
REM Check if we're in the right directory
if not exist "test_pipeline.py" (
echo Error: Please run this script from the src\server directory
exit /b 1
)
REM Get test type parameter (default to basic)
set TEST_TYPE=%1
if "%TEST_TYPE%"=="" set TEST_TYPE=basic
echo Running test type: %TEST_TYPE%
echo.
if "%TEST_TYPE%"=="unit" (
echo Running Unit Tests Only
python test_pipeline.py --unit
goto :end
)
if "%TEST_TYPE%"=="integration" (
echo Running Integration Tests Only
python test_pipeline.py --integration
goto :end
)
if "%TEST_TYPE%"=="performance" (
echo Running Performance Tests Only
python test_pipeline.py --performance
goto :end
)
if "%TEST_TYPE%"=="coverage" (
echo Running Code Coverage Analysis
python test_pipeline.py --coverage
goto :end
)
if "%TEST_TYPE%"=="load" (
echo Running Load Tests
python test_pipeline.py --load
goto :end
)
if "%TEST_TYPE%"=="all" (
echo Running Complete Test Pipeline
python test_pipeline.py --all
goto :end
)
REM Default case - basic tests
echo Running Basic Test Suite (Unit + Integration)
echo.
echo Running Unit Tests...
python test_pipeline.py --unit
set unit_result=%errorlevel%
echo.
echo Running Integration Tests...
python test_pipeline.py --integration
set integration_result=%errorlevel%
echo.
echo ==========================================
if %unit_result%==0 if %integration_result%==0 (
echo ✅ Basic Test Suite: ALL TESTS PASSED
exit /b 0
) else (
echo ❌ Basic Test Suite: SOME TESTS FAILED
exit /b 1
)
:end
echo.
echo Test execution completed!
echo Check the output above for detailed results.

View File

@@ -1,81 +0,0 @@
#!/bin/bash
# Test Runner Script for AniWorld Testing Pipeline
# This script provides an easy way to run the AniWorld test suite
echo "AniWorld Test Suite Runner"
echo "=========================="
# Check if we're in the right directory
if [ ! -f "test_pipeline.py" ]; then
echo "Error: Please run this script from the src/server directory"
exit 1
fi
# Function to run tests with error handling
run_test() {
local test_name="$1"
local command="$2"
echo ""
echo "Running $test_name..."
echo "----------------------------------------"
if eval "$command"; then
echo "$test_name completed successfully"
return 0
else
echo "$test_name failed"
return 1
fi
}
# Default to running basic tests
TEST_TYPE="${1:-basic}"
case "$TEST_TYPE" in
"unit")
echo "Running Unit Tests Only"
run_test "Unit Tests" "python test_pipeline.py --unit"
;;
"integration")
echo "Running Integration Tests Only"
run_test "Integration Tests" "python test_pipeline.py --integration"
;;
"performance")
echo "Running Performance Tests Only"
run_test "Performance Tests" "python test_pipeline.py --performance"
;;
"coverage")
echo "Running Code Coverage Analysis"
run_test "Code Coverage" "python test_pipeline.py --coverage"
;;
"load")
echo "Running Load Tests"
run_test "Load Tests" "python test_pipeline.py --load"
;;
"all")
echo "Running Complete Test Pipeline"
run_test "Full Pipeline" "python test_pipeline.py --all"
;;
"basic"|*)
echo "Running Basic Test Suite (Unit + Integration)"
success=true
run_test "Unit Tests" "python test_pipeline.py --unit" || success=false
run_test "Integration Tests" "python test_pipeline.py --integration" || success=false
echo ""
echo "=========================================="
if [ "$success" = true ]; then
echo "✅ Basic Test Suite: ALL TESTS PASSED"
exit 0
else
echo "❌ Basic Test Suite: SOME TESTS FAILED"
exit 1
fi
;;
esac
echo ""
echo "Test execution completed!"
echo "Check the output above for detailed results."

View File

@@ -1,3 +0,0 @@
"""
Shared utilities and constants for the AniWorld application.
"""

View File

@@ -1,56 +0,0 @@
import os
import hashlib
from collections import defaultdict
def compute_hash(filepath, chunk_size=8192):
sha256 = hashlib.sha256()
try:
with open(filepath, 'rb') as f:
for chunk in iter(lambda: f.read(chunk_size), b''):
sha256.update(chunk)
except Exception as e:
print(f"Error reading {filepath}: {e}")
return None
return sha256.hexdigest()
def find_duplicates(root_dir):
size_dict = defaultdict(list)
# Step 1: Group files by size
for dirpath, _, filenames in os.walk(root_dir):
for file in filenames:
if file.lower().endswith('.mp4'):
filepath = os.path.join(dirpath, file)
try:
size = os.path.getsize(filepath)
size_dict[size].append(filepath)
except Exception as e:
print(f"Error accessing {filepath}: {e}")
# Step 2: Within size groups, group by hash
duplicates = defaultdict(list)
for size, files in size_dict.items():
if len(files) < 2:
continue
hash_dict = defaultdict(list)
for file in files:
file_hash = compute_hash(file)
if file_hash:
hash_dict[file_hash].append(file)
for h, paths in hash_dict.items():
if len(paths) > 1:
duplicates[h].extend(paths)
return duplicates
# Example usage
if __name__ == "__main__":
folder_to_scan = "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"
dupes = find_duplicates(folder_to_scan)
for hash_val, files in dupes.items():
print(f"\nDuplicate group (hash: {hash_val}):")
for f in files:
print(f" {f}")

View File

@@ -101,238 +101,6 @@ class SpeedLimiter:
return speed_bps / (1024 * 1024) # Convert to MB/s
return 0.0
class DownloadCache:
"""Caching system for frequently accessed data."""
def __init__(self, cache_dir: str = "./cache", max_size_mb: int = 500):
self.cache_dir = cache_dir
self.max_size_bytes = max_size_mb * 1024 * 1024
self.cache_db = os.path.join(cache_dir, 'cache.db')
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
# Create cache directory
os.makedirs(cache_dir, exist_ok=True)
# Initialize database
self._init_database()
# Clean expired entries on startup
self._cleanup_expired()
def _init_database(self):
"""Initialize cache database."""
with sqlite3.connect(self.cache_db) as conn:
conn.execute("""
CREATE TABLE IF NOT EXISTS cache_entries (
key TEXT PRIMARY KEY,
file_path TEXT,
created_at TIMESTAMP,
expires_at TIMESTAMP,
access_count INTEGER DEFAULT 0,
size_bytes INTEGER,
metadata TEXT
)
""")
conn.execute("""
CREATE INDEX IF NOT EXISTS idx_expires_at ON cache_entries(expires_at)
""")
conn.execute("""
CREATE INDEX IF NOT EXISTS idx_access_count ON cache_entries(access_count)
""")
def _generate_key(self, data: str) -> str:
"""Generate cache key from data."""
return hashlib.md5(data.encode()).hexdigest()
def put(self, key: str, data: bytes, ttl_seconds: int = 3600, metadata: Optional[Dict] = None):
"""Store data in cache."""
with self.lock:
try:
cache_key = self._generate_key(key)
file_path = os.path.join(self.cache_dir, f"{cache_key}.cache")
# Write data to file
with open(file_path, 'wb') as f:
f.write(data)
# Store metadata in database
expires_at = datetime.now() + timedelta(seconds=ttl_seconds)
with sqlite3.connect(self.cache_db) as conn:
conn.execute("""
INSERT OR REPLACE INTO cache_entries
(key, file_path, created_at, expires_at, size_bytes, metadata)
VALUES (?, ?, ?, ?, ?, ?)
""", (
cache_key, file_path, datetime.now(), expires_at,
len(data), json.dumps(metadata or {})
))
# Clean up if cache is too large
self._cleanup_if_needed()
self.logger.debug(f"Cached data for key: {key} (size: {len(data)} bytes)")
except Exception as e:
self.logger.error(f"Failed to cache data for key {key}: {e}")
def get(self, key: str) -> Optional[bytes]:
"""Retrieve data from cache."""
with self.lock:
try:
cache_key = self._generate_key(key)
with sqlite3.connect(self.cache_db) as conn:
cursor = conn.execute("""
SELECT file_path, expires_at FROM cache_entries
WHERE key = ? AND expires_at > ?
""", (cache_key, datetime.now()))
row = cursor.fetchone()
if not row:
return None
file_path, _ = row
# Update access count
conn.execute("""
UPDATE cache_entries SET access_count = access_count + 1
WHERE key = ?
""", (cache_key,))
# Read and return data
if os.path.exists(file_path):
with open(file_path, 'rb') as f:
data = f.read()
self.logger.debug(f"Cache hit for key: {key}")
return data
else:
# File missing, remove from database
conn.execute("DELETE FROM cache_entries WHERE key = ?", (cache_key,))
except Exception as e:
self.logger.error(f"Failed to retrieve cached data for key {key}: {e}")
return None
def _cleanup_expired(self):
"""Remove expired cache entries."""
try:
with sqlite3.connect(self.cache_db) as conn:
# Get expired entries
cursor = conn.execute("""
SELECT key, file_path FROM cache_entries
WHERE expires_at <= ?
""", (datetime.now(),))
expired_entries = cursor.fetchall()
# Remove files and database entries
for cache_key, file_path in expired_entries:
try:
if os.path.exists(file_path):
os.remove(file_path)
except Exception as e:
self.logger.warning(f"Failed to remove expired cache file {file_path}: {e}")
# Remove from database
conn.execute("DELETE FROM cache_entries WHERE expires_at <= ?", (datetime.now(),))
if expired_entries:
self.logger.info(f"Cleaned up {len(expired_entries)} expired cache entries")
except Exception as e:
self.logger.error(f"Failed to cleanup expired cache entries: {e}")
def _cleanup_if_needed(self):
"""Clean up cache if it exceeds size limit."""
try:
with sqlite3.connect(self.cache_db) as conn:
# Calculate total cache size
cursor = conn.execute("SELECT SUM(size_bytes) FROM cache_entries")
total_size = cursor.fetchone()[0] or 0
if total_size > self.max_size_bytes:
# Remove least accessed entries until under limit
cursor = conn.execute("""
SELECT key, file_path, size_bytes FROM cache_entries
ORDER BY access_count ASC, created_at ASC
""")
removed_size = 0
target_size = self.max_size_bytes * 0.8 # Remove until 80% full
for cache_key, file_path, size_bytes in cursor:
try:
if os.path.exists(file_path):
os.remove(file_path)
conn.execute("DELETE FROM cache_entries WHERE key = ?", (cache_key,))
removed_size += size_bytes
if total_size - removed_size <= target_size:
break
except Exception as e:
self.logger.warning(f"Failed to remove cache file {file_path}: {e}")
if removed_size > 0:
self.logger.info(f"Cache cleanup: removed {removed_size / (1024*1024):.1f} MB")
except Exception as e:
self.logger.error(f"Failed to cleanup cache: {e}")
def clear(self):
"""Clear entire cache."""
with self.lock:
try:
with sqlite3.connect(self.cache_db) as conn:
cursor = conn.execute("SELECT file_path FROM cache_entries")
for (file_path,) in cursor:
try:
if os.path.exists(file_path):
os.remove(file_path)
except Exception as e:
self.logger.warning(f"Failed to remove cache file {file_path}: {e}")
conn.execute("DELETE FROM cache_entries")
self.logger.info("Cache cleared successfully")
except Exception as e:
self.logger.error(f"Failed to clear cache: {e}")
def get_stats(self) -> Dict[str, Any]:
"""Get cache statistics."""
try:
with sqlite3.connect(self.cache_db) as conn:
cursor = conn.execute("""
SELECT
COUNT(*) as entry_count,
SUM(size_bytes) as total_size,
SUM(access_count) as total_accesses,
AVG(access_count) as avg_accesses
FROM cache_entries
""")
row = cursor.fetchone()
return {
'entry_count': row[0] or 0,
'total_size_mb': (row[1] or 0) / (1024 * 1024),
'total_accesses': row[2] or 0,
'avg_accesses': row[3] or 0,
'max_size_mb': self.max_size_bytes / (1024 * 1024)
}
except Exception as e:
self.logger.error(f"Failed to get cache stats: {e}")
return {}
class MemoryMonitor:
"""Monitor and optimize memory usage."""
@@ -747,7 +515,6 @@ class ResumeManager:
# Global instances
speed_limiter = SpeedLimiter()
download_cache = DownloadCache()
memory_monitor = MemoryMonitor()
download_manager = ParallelDownloadManager(max_workers=3, speed_limiter=speed_limiter)
resume_manager = ResumeManager()
@@ -768,7 +535,6 @@ def cleanup_performance_monitoring():
# Export main components
__all__ = [
'SpeedLimiter',
'DownloadCache',
'MemoryMonitor',
'ParallelDownloadManager',
'ResumeManager',

File diff suppressed because it is too large Load Diff

View File

@@ -1,38 +0,0 @@
#!/usr/bin/env python3
"""
Simple script to test the API endpoint without crashing the server.
"""
import requests
import json
import time
def test_api():
url = "http://localhost:5000/api/series"
try:
print("Testing API endpoint...")
response = requests.get(url, timeout=30)
print(f"Status Code: {response.status_code}")
if response.status_code == 200:
data = response.json()
print(f"Response status: {data.get('status', 'unknown')}")
print(f"Total series: {data.get('total_series', 0)}")
print(f"Message: {data.get('message', 'No message')}")
# Print first few series
series = data.get('series', [])
if series:
print(f"\nFirst 3 series:")
for i, serie in enumerate(series[:3]):
print(f" {i+1}. {serie.get('name', 'Unknown')} ({serie.get('folder', 'Unknown folder')})")
else:
print("No series found in response")
else:
print(f"Error: {response.text}")
except requests.exceptions.RequestException as e:
print(f"Request failed: {e}")
except Exception as e:
print(f"Error: {e}")
if __name__ == "__main__":
test_api()

View File

@@ -1,3 +0,0 @@
"""
Web presentation layer with controllers, middleware, and templates.
"""

View File

@@ -1 +0,0 @@
# Web controllers - Flask blueprints

View File

@@ -1 +0,0 @@
# Admin controllers

View File

@@ -1 +0,0 @@
# API endpoints version 1

View File

@@ -1 +0,0 @@
# API middleware

View File

@@ -1 +0,0 @@
# Web middleware

View File

@@ -79,7 +79,7 @@ def init_series_app():
"""Initialize the SeriesApp with configuration directory."""
global series_app
from config import config
from Main import SeriesApp
from src.cli.Main import SeriesApp
directory_to_search = config.anime_directory
series_app = SeriesApp(directory_to_search)
return series_app

View File

@@ -1,14 +0,0 @@
"""
WSGI entry point for production deployment.
This file is used by WSGI servers like Gunicorn, uWSGI, etc.
"""
from src.server.app import create_app
# Create the Flask application instance
application = create_app()
app = application # Some WSGI servers expect 'app' variable
if __name__ == "__main__":
# This is for development only
app.run(debug=False)