refactoring
This commit is contained in:
@@ -5,7 +5,7 @@ from server.infrastructure.providers import aniworld_provider
|
||||
|
||||
from rich.progress import Progress
|
||||
from server.core.entities import SerieList
|
||||
from server.infrastructure.file_system.SerieScanner import SerieScanner
|
||||
from src.server.core.SerieScanner import SerieScanner
|
||||
from server.infrastructure.providers.provider_factory import Loaders
|
||||
from server.core.entities.series import Serie
|
||||
import time
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Command line interface for the AniWorld application.
|
||||
"""
|
||||
@@ -462,3 +462,30 @@
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 20:23:16 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 20:23:16 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:16 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:16 - ERROR - root - init_series_app - Error initializing SeriesApp:
|
||||
Traceback (most recent call last):
|
||||
File "D:\repo\Aniworld/src/server/app.py", line 145, in init_series_app
|
||||
series_app = SeriesApp(directory_to_search)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File "D:\repo\Aniworld\src\Main.py", line 54, in __init__
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 9, in __init__
|
||||
self.load_series()
|
||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 29, in load_series
|
||||
for anime_folder in os.listdir(self.directory):
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
FileNotFoundError: [WinError 53] Der Netzwerkpfad wurde nicht gefunden: '\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien'
|
||||
2025-09-29 20:23:16 - WARNING - werkzeug - _log - * Debugger is active!
|
||||
2025-09-29 20:33:06 - DEBUG - schedule - clear - Deleting *all* jobs
|
||||
2025-09-29 20:33:06 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
|
||||
2025-09-29 20:33:06 - INFO - __main__ - <module> - Scheduler stopped
|
||||
@@ -1,53 +0,0 @@
|
||||
# Flask Configuration
|
||||
FLASK_ENV=development
|
||||
FLASK_APP=app.py
|
||||
SECRET_KEY=your-secret-key-here
|
||||
DEBUG=True
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_URL=sqlite:///data/database/anime.db
|
||||
DATABASE_POOL_SIZE=10
|
||||
DATABASE_TIMEOUT=30
|
||||
|
||||
# API Configuration
|
||||
API_KEY=your-api-key
|
||||
API_RATE_LIMIT=100
|
||||
API_TIMEOUT=30
|
||||
|
||||
# Cache Configuration
|
||||
CACHE_TYPE=simple
|
||||
REDIS_URL=redis://localhost:6379/0
|
||||
CACHE_TIMEOUT=300
|
||||
|
||||
# Logging Configuration
|
||||
LOG_LEVEL=INFO
|
||||
LOG_FORMAT=detailed
|
||||
LOG_FILE_MAX_SIZE=10MB
|
||||
LOG_BACKUP_COUNT=5
|
||||
|
||||
# Security Configuration
|
||||
SESSION_TIMEOUT=3600
|
||||
CSRF_TOKEN_TIMEOUT=3600
|
||||
MAX_LOGIN_ATTEMPTS=5
|
||||
LOGIN_LOCKOUT_DURATION=900
|
||||
|
||||
# Download Configuration
|
||||
DOWNLOAD_PATH=/downloads
|
||||
MAX_CONCURRENT_DOWNLOADS=5
|
||||
DOWNLOAD_TIMEOUT=1800
|
||||
RETRY_ATTEMPTS=3
|
||||
|
||||
# Provider Configuration
|
||||
PROVIDER_TIMEOUT=30
|
||||
PROVIDER_RETRIES=3
|
||||
USER_AGENT=AniWorld-Downloader/1.0
|
||||
|
||||
# Notification Configuration
|
||||
DISCORD_WEBHOOK_URL=
|
||||
TELEGRAM_BOT_TOKEN=
|
||||
TELEGRAM_CHAT_ID=
|
||||
|
||||
# Monitoring Configuration
|
||||
HEALTH_CHECK_INTERVAL=60
|
||||
METRICS_ENABLED=True
|
||||
PERFORMANCE_MONITORING=True
|
||||
@@ -1,146 +0,0 @@
|
||||
# AniWorld Web Manager
|
||||
|
||||
A modern Flask-based web application for managing anime downloads with a beautiful Fluent UI design.
|
||||
|
||||
## Features
|
||||
|
||||
✅ **Anime Search**
|
||||
- Real-time search with auto-suggest
|
||||
- Easy addition of series from search results
|
||||
- Clear search functionality
|
||||
|
||||
✅ **Series Management**
|
||||
- Grid layout with card-based display
|
||||
- Shows missing episodes count
|
||||
- Multi-select with checkboxes
|
||||
- Select all/deselect all functionality
|
||||
|
||||
✅ **Download Management**
|
||||
- Background downloading with progress tracking
|
||||
- Pause, resume, and cancel functionality
|
||||
- Real-time status updates via WebSocket
|
||||
|
||||
✅ **Modern UI**
|
||||
- Fluent UI design system (Windows 11 style)
|
||||
- Dark and light theme support
|
||||
- Responsive design for desktop and mobile
|
||||
- Smooth animations and transitions
|
||||
|
||||
✅ **Localization**
|
||||
- Support for multiple languages (English, German)
|
||||
- Easy to add new languages
|
||||
- Resource-based text management
|
||||
|
||||
✅ **Real-time Updates**
|
||||
- WebSocket connection for live updates
|
||||
- Toast notifications for user feedback
|
||||
- Status panel with progress tracking
|
||||
|
||||
## Setup
|
||||
|
||||
1. **Install Dependencies**
|
||||
```bash
|
||||
pip install Flask Flask-SocketIO eventlet
|
||||
```
|
||||
|
||||
2. **Environment Configuration**
|
||||
Set the `ANIME_DIRECTORY` environment variable to your anime storage path:
|
||||
```bash
|
||||
# Windows
|
||||
set ANIME_DIRECTORY="Z:\media\serien\Serien"
|
||||
|
||||
# Linux/Mac
|
||||
export ANIME_DIRECTORY="/path/to/your/anime/directory"
|
||||
```
|
||||
|
||||
3. **Run the Application**
|
||||
```bash
|
||||
cd src/server
|
||||
python app.py
|
||||
```
|
||||
|
||||
4. **Access the Web Interface**
|
||||
Open your browser and navigate to: `http://localhost:5000`
|
||||
|
||||
## Usage
|
||||
|
||||
### Searching and Adding Anime
|
||||
1. Use the search bar to find anime
|
||||
2. Browse search results
|
||||
3. Click "Add" to add series to your collection
|
||||
|
||||
### Managing Downloads
|
||||
1. Select series using checkboxes
|
||||
2. Click "Download Selected" to start downloading
|
||||
3. Monitor progress in the status panel
|
||||
4. Use pause/resume/cancel controls as needed
|
||||
|
||||
### Theme and Language
|
||||
- Click the moon/sun icon to toggle between light and dark themes
|
||||
- Language is automatically detected from browser settings
|
||||
- Supports English and German out of the box
|
||||
|
||||
### Configuration
|
||||
- Click the "Config" button to view current settings
|
||||
- Shows anime directory path, series count, and connection status
|
||||
|
||||
## File Structure
|
||||
|
||||
```
|
||||
src/server/
|
||||
├── app.py # Main Flask application
|
||||
├── templates/
|
||||
│ └── index.html # Main HTML template
|
||||
├── static/
|
||||
│ ├── css/
|
||||
│ │ └── styles.css # Fluent UI styles
|
||||
│ └── js/
|
||||
│ ├── app.js # Main application logic
|
||||
│ └── localization.js # Multi-language support
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
- `GET /` - Main web interface
|
||||
- `GET /api/series` - Get all series with missing episodes
|
||||
- `POST /api/search` - Search for anime
|
||||
- `POST /api/add_series` - Add series to collection
|
||||
- `POST /api/download` - Start downloading selected series
|
||||
- `POST /api/rescan` - Rescan anime directory
|
||||
- `GET /api/status` - Get application status
|
||||
- `POST /api/download/pause` - Pause current download
|
||||
- `POST /api/download/resume` - Resume paused download
|
||||
- `POST /api/download/cancel` - Cancel current download
|
||||
|
||||
## WebSocket Events
|
||||
|
||||
- `connect` - Client connection established
|
||||
- `scan_started` - Directory scan initiated
|
||||
- `scan_progress` - Scan progress update
|
||||
- `scan_completed` - Scan finished successfully
|
||||
- `download_started` - Download initiated
|
||||
- `download_progress` - Download progress update
|
||||
- `download_completed` - Download finished
|
||||
- `download_paused` - Download paused
|
||||
- `download_resumed` - Download resumed
|
||||
- `download_cancelled` - Download cancelled
|
||||
|
||||
## Security Features
|
||||
|
||||
- Input validation on all API endpoints
|
||||
- No exposure of internal stack traces
|
||||
- Secure WebSocket connections
|
||||
- Environment-based configuration
|
||||
|
||||
## Browser Compatibility
|
||||
|
||||
- Modern browsers with ES6+ support
|
||||
- WebSocket support required
|
||||
- Responsive design works on mobile devices
|
||||
|
||||
## Development Notes
|
||||
|
||||
- Uses existing `SeriesApp` class without modifications
|
||||
- Maintains compatibility with original CLI application
|
||||
- Thread-safe download management
|
||||
- Proper error handling and user feedback
|
||||
@@ -1,109 +0,0 @@
|
||||
# Route Organization Summary
|
||||
|
||||
This document describes the reorganization of routes from a single `app.py` file into separate blueprint files for better organization and maintainability.
|
||||
|
||||
## New File Structure
|
||||
|
||||
```
|
||||
src/server/web/routes/
|
||||
├── __init__.py # Package initialization with graceful imports
|
||||
├── main_routes.py # Main page routes (index)
|
||||
├── auth_routes.py # Authentication routes (login, setup, API auth)
|
||||
├── api_routes.py # Core API routes (series, search, download, rescan)
|
||||
├── static_routes.py # Static file routes (JS/CSS for UX features)
|
||||
├── diagnostic_routes.py # Diagnostic and monitoring routes
|
||||
├── config_routes.py # Configuration management routes
|
||||
└── websocket_handlers.py # WebSocket event handlers
|
||||
```
|
||||
|
||||
## Route Categories
|
||||
|
||||
### 1. Main Routes (`main_routes.py`)
|
||||
- `/` - Main index page
|
||||
|
||||
### 2. Authentication Routes (`auth_routes.py`)
|
||||
Contains two blueprints:
|
||||
- **auth_bp**: Page routes (`/login`, `/setup`)
|
||||
- **auth_api_bp**: API routes (`/api/auth/*`)
|
||||
|
||||
### 3. API Routes (`api_routes.py`)
|
||||
- `/api/series` - Get series data
|
||||
- `/api/search` - Search for series
|
||||
- `/api/add_series` - Add new series
|
||||
- `/api/rescan` - Rescan series directory
|
||||
- `/api/download` - Add to download queue
|
||||
- `/api/queue/start` - Start download queue
|
||||
- `/api/queue/stop` - Stop download queue
|
||||
- `/api/status` - Get system status
|
||||
- `/api/process/locks/status` - Get process lock status
|
||||
- `/api/config/directory` - Update directory configuration
|
||||
|
||||
### 4. Static Routes (`static_routes.py`)
|
||||
- `/static/js/*` - JavaScript files for UX features
|
||||
- `/static/css/*` - CSS files for styling
|
||||
|
||||
### 5. Diagnostic Routes (`diagnostic_routes.py`)
|
||||
- `/api/diagnostics/network` - Network diagnostics
|
||||
- `/api/diagnostics/errors` - Error history
|
||||
- `/api/diagnostics/system-status` - System status summary
|
||||
- `/api/diagnostics/recovery/*` - Recovery endpoints
|
||||
|
||||
### 6. Config Routes (`config_routes.py`)
|
||||
- `/api/scheduler/config` - Scheduler configuration
|
||||
- `/api/logging/config` - Logging configuration
|
||||
- `/api/config/section/advanced` - Advanced configuration
|
||||
- `/api/config/backup*` - Configuration backup management
|
||||
|
||||
### 7. WebSocket Handlers (`websocket_handlers.py`)
|
||||
- `connect` - Client connection handler
|
||||
- `disconnect` - Client disconnection handler
|
||||
- `get_status` - Status request handler
|
||||
|
||||
## Changes Made to `app.py`
|
||||
|
||||
1. **Removed Routes**: All route definitions have been moved to their respective blueprint files
|
||||
2. **Added Imports**: Import statements for the new route blueprints
|
||||
3. **Blueprint Registration**: Register all blueprints with the Flask app
|
||||
4. **Global Variables**: Moved to appropriate route files where they're used
|
||||
5. **Placeholder Classes**: Moved to relevant route files
|
||||
6. **WebSocket Integration**: Set up socketio instance sharing with API routes
|
||||
|
||||
## Benefits
|
||||
|
||||
1. **Better Organization**: Routes are grouped by functionality
|
||||
2. **Maintainability**: Easier to find and modify specific route logic
|
||||
3. **Separation of Concerns**: Each file has a specific responsibility
|
||||
4. **Scalability**: Easy to add new routes in appropriate files
|
||||
5. **Testing**: Individual route groups can be tested separately
|
||||
6. **Code Reuse**: Common functionality can be shared between route files
|
||||
|
||||
## Usage
|
||||
|
||||
The Flask app now imports and registers all blueprints:
|
||||
|
||||
```python
|
||||
from web.routes import (
|
||||
auth_bp, auth_api_bp, api_bp, main_bp, static_bp,
|
||||
diagnostic_bp, config_bp
|
||||
)
|
||||
|
||||
app.register_blueprint(main_bp)
|
||||
app.register_blueprint(auth_bp)
|
||||
app.register_blueprint(auth_api_bp)
|
||||
app.register_blueprint(api_bp)
|
||||
app.register_blueprint(static_bp)
|
||||
app.register_blueprint(diagnostic_bp)
|
||||
app.register_blueprint(config_bp)
|
||||
```
|
||||
|
||||
## Error Handling
|
||||
|
||||
The `__init__.py` file includes graceful import handling, so if any route file has import errors, the application will continue to function with the available routes.
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
- Add route-specific middleware
|
||||
- Implement route-level caching
|
||||
- Add route-specific rate limiting
|
||||
- Create route-specific documentation
|
||||
- Add route-specific testing
|
||||
@@ -1 +0,0 @@
|
||||
# Server package
|
||||
@@ -1,23 +1,7 @@
|
||||
|
||||
# --- Global UTF-8 logging setup (fix UnicodeEncodeError) ---
|
||||
import sys
|
||||
import io
|
||||
import logging
|
||||
try:
|
||||
if hasattr(sys.stdout, 'reconfigure'):
|
||||
sys.stdout.reconfigure(encoding='utf-8', errors='replace')
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
else:
|
||||
utf8_stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
|
||||
handler = logging.StreamHandler(utf8_stdout)
|
||||
handler.setFormatter(logging.Formatter('[%(asctime)s] %(levelname)s: %(message)s', datefmt='%H:%M:%S'))
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.handlers = []
|
||||
root_logger.addHandler(handler)
|
||||
root_logger.setLevel(logging.INFO)
|
||||
except Exception:
|
||||
logging.basicConfig(stream=sys.stdout, format='[%(asctime)s] %(levelname)s: %(message)s', datefmt='%H:%M:%S')
|
||||
|
||||
import os
|
||||
import threading
|
||||
from datetime import datetime
|
||||
@@ -33,30 +17,16 @@ from flask_socketio import SocketIO, emit
|
||||
import logging
|
||||
import atexit
|
||||
|
||||
from Main import SeriesApp
|
||||
from src.cli.Main import SeriesApp
|
||||
|
||||
# --- Fix Unicode logging error for Windows console ---
|
||||
import sys
|
||||
import io
|
||||
# --- Robust Unicode logging for Windows console ---
|
||||
try:
|
||||
if hasattr(sys.stdout, 'reconfigure'):
|
||||
handler = logging.StreamHandler(sys.stdout)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
|
||||
handler.stream.reconfigure(encoding='utf-8')
|
||||
logging.getLogger().handlers = [handler]
|
||||
else:
|
||||
# Fallback for older Python versions
|
||||
utf8_stdout = io.TextIOWrapper(sys.stdout.buffer, encoding='utf-8', errors='replace')
|
||||
handler = logging.StreamHandler(utf8_stdout)
|
||||
handler.setFormatter(logging.Formatter('%(levelname)s: %(message)s'))
|
||||
logging.getLogger().handlers = [handler]
|
||||
except Exception:
|
||||
# Last resort fallback
|
||||
logging.basicConfig(stream=sys.stdout, format='%(levelname)s: %(message)s')
|
||||
|
||||
|
||||
from server.core.entities.series import Serie
|
||||
from server.core.entities import SerieList
|
||||
from server.infrastructure.file_system import SerieScanner
|
||||
from server.core import SerieScanner
|
||||
from server.infrastructure.providers.provider_factory import Loaders
|
||||
from web.controllers.auth_controller import session_manager, require_auth, optional_auth
|
||||
from config import config
|
||||
@@ -81,11 +51,6 @@ from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK, DOWNLOAD
|
||||
# Import error handling and monitoring modules
|
||||
from web.middleware.error_handler import handle_api_errors
|
||||
|
||||
# Performance optimization modules - not yet implemented
|
||||
|
||||
# API integration and database modules - not yet implemented
|
||||
# User experience and accessibility modules - not yet implemented
|
||||
|
||||
app = Flask(__name__,
|
||||
template_folder='web/templates/base',
|
||||
static_folder='web/static')
|
||||
@@ -106,6 +71,66 @@ def handle_api_not_found(error):
|
||||
# For non-API routes, let Flask handle it normally
|
||||
return error
|
||||
|
||||
# Global error handler to log any unhandled exceptions
|
||||
@app.errorhandler(Exception)
|
||||
def handle_exception(e):
|
||||
logging.error("Unhandled exception occurred: %s", e, exc_info=True)
|
||||
if request.path.startswith('/api/'):
|
||||
return jsonify({'success': False, 'error': 'Internal Server Error'}), 500
|
||||
return "Internal Server Error", 500
|
||||
|
||||
# Register cleanup functions
|
||||
@atexit.register
|
||||
def cleanup_on_exit():
|
||||
"""Clean up resources on application exit."""
|
||||
try:
|
||||
# Additional cleanup functions will be added when features are implemented
|
||||
logging.info("Application cleanup completed")
|
||||
except Exception as e:
|
||||
logging.error(f"Error during cleanup: {e}")
|
||||
|
||||
|
||||
def rescan_callback():
|
||||
"""Callback for scheduled rescan operations."""
|
||||
try:
|
||||
# Reinit and scan
|
||||
series_app.SerieScanner.Reinit()
|
||||
series_app.SerieScanner.Scan()
|
||||
|
||||
# Refresh the series list
|
||||
series_app.List = SerieList.SerieList(series_app.directory_to_search)
|
||||
series_app.__InitList__()
|
||||
|
||||
return {"status": "success", "message": "Scheduled rescan completed"}
|
||||
except Exception as e:
|
||||
raise Exception(f"Scheduled rescan failed: {e}")
|
||||
|
||||
def download_callback():
|
||||
"""Callback for auto-download after scheduled rescan."""
|
||||
try:
|
||||
if not series_app or not series_app.List:
|
||||
return {"status": "skipped", "message": "No series data available"}
|
||||
|
||||
# Find series with missing episodes
|
||||
series_with_missing = []
|
||||
for serie in series_app.List.GetList():
|
||||
if serie.episodeDict:
|
||||
series_with_missing.append(serie)
|
||||
|
||||
if not series_with_missing:
|
||||
return {"status": "skipped", "message": "No series with missing episodes found"}
|
||||
|
||||
# Note: Actual download implementation would go here
|
||||
# For now, just return the count of series that would be downloaded
|
||||
return {
|
||||
"status": "started",
|
||||
"message": f"Auto-download initiated for {len(series_with_missing)} series",
|
||||
"series_count": len(series_with_missing)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f"Auto-download failed: {e}")
|
||||
|
||||
# Register all blueprints
|
||||
app.register_blueprint(download_queue_bp)
|
||||
app.register_blueprint(main_bp)
|
||||
@@ -120,35 +145,6 @@ app.register_blueprint(process_bp)
|
||||
app.register_blueprint(scheduler_bp)
|
||||
app.register_blueprint(logging_bp)
|
||||
app.register_blueprint(health_bp)
|
||||
# Additional blueprints will be registered when features are implemented
|
||||
|
||||
# Additional feature initialization will be added when features are implemented
|
||||
|
||||
# Global variables are now managed in their respective route files
|
||||
# Keep only series_app for backward compatibility
|
||||
series_app = None
|
||||
|
||||
def init_series_app(verbose=True):
|
||||
"""Initialize the SeriesApp with configuration directory."""
|
||||
global series_app
|
||||
try:
|
||||
directory_to_search = config.anime_directory
|
||||
if verbose:
|
||||
print(f"Initializing SeriesApp with directory: {directory_to_search}")
|
||||
series_app = SeriesApp(directory_to_search)
|
||||
if verbose:
|
||||
print(f"SeriesApp initialized successfully. List length: {len(series_app.List.GetList()) if series_app.List else 'No List'}")
|
||||
return series_app
|
||||
except Exception as e:
|
||||
print(f"Error initializing SeriesApp: {e}")
|
||||
import traceback
|
||||
traceback.print_exc()
|
||||
return None
|
||||
|
||||
def get_series_app():
|
||||
"""Get the current series app instance."""
|
||||
global series_app
|
||||
return series_app
|
||||
|
||||
# Register WebSocket handlers
|
||||
register_socketio_handlers(socketio)
|
||||
@@ -159,110 +155,17 @@ set_socketio(socketio)
|
||||
|
||||
# Initialize scheduler
|
||||
scheduler = init_scheduler(config, socketio)
|
||||
|
||||
def setup_scheduler_callbacks():
|
||||
"""Setup callbacks for scheduler operations."""
|
||||
|
||||
def rescan_callback():
|
||||
"""Callback for scheduled rescan operations."""
|
||||
try:
|
||||
# Reinit and scan
|
||||
series_app.SerieScanner.Reinit()
|
||||
series_app.SerieScanner.Scan()
|
||||
|
||||
# Refresh the series list
|
||||
series_app.List = SerieList.SerieList(series_app.directory_to_search)
|
||||
series_app.__InitList__()
|
||||
|
||||
return {"status": "success", "message": "Scheduled rescan completed"}
|
||||
except Exception as e:
|
||||
raise Exception(f"Scheduled rescan failed: {e}")
|
||||
|
||||
def download_callback():
|
||||
"""Callback for auto-download after scheduled rescan."""
|
||||
try:
|
||||
if not series_app or not series_app.List:
|
||||
return {"status": "skipped", "message": "No series data available"}
|
||||
|
||||
# Find series with missing episodes
|
||||
series_with_missing = []
|
||||
for serie in series_app.List.GetList():
|
||||
if serie.episodeDict:
|
||||
series_with_missing.append(serie)
|
||||
|
||||
if not series_with_missing:
|
||||
return {"status": "skipped", "message": "No series with missing episodes found"}
|
||||
|
||||
# Note: Actual download implementation would go here
|
||||
# For now, just return the count of series that would be downloaded
|
||||
return {
|
||||
"status": "started",
|
||||
"message": f"Auto-download initiated for {len(series_with_missing)} series",
|
||||
"series_count": len(series_with_missing)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f"Auto-download failed: {e}")
|
||||
|
||||
scheduler.set_rescan_callback(rescan_callback)
|
||||
scheduler.set_download_callback(download_callback)
|
||||
|
||||
# Setup scheduler callbacks
|
||||
setup_scheduler_callbacks()
|
||||
|
||||
# Advanced system initialization will be added when features are implemented
|
||||
|
||||
# Register cleanup functions
|
||||
@atexit.register
|
||||
def cleanup_on_exit():
|
||||
"""Clean up resources on application exit."""
|
||||
try:
|
||||
# Additional cleanup functions will be added when features are implemented
|
||||
logging.info("Application cleanup completed")
|
||||
except Exception as e:
|
||||
logging.error(f"Error during cleanup: {e}")
|
||||
|
||||
scheduler.set_rescan_callback(rescan_callback)
|
||||
scheduler.set_download_callback(download_callback)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Only run initialization and logging setup in the main process
|
||||
# This prevents duplicate initialization when Flask debug reloader starts
|
||||
|
||||
# Configure enhanced logging system first
|
||||
try:
|
||||
from server.infrastructure.logging.config import get_logger, logging_config
|
||||
logger = get_logger(__name__, 'webapp')
|
||||
logger.info("Enhanced logging system initialized")
|
||||
except ImportError:
|
||||
# Fallback to basic logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning("Using fallback logging - enhanced logging not available")
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Configure enhanced logging system first
|
||||
try:
|
||||
from server.infrastructure.logging.config import get_logger, logging_config
|
||||
logger = get_logger(__name__, 'webapp')
|
||||
logger.info("Enhanced logging system initialized")
|
||||
except ImportError:
|
||||
# Fallback to basic logging with UTF-8 support
|
||||
import logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='[%(asctime)s] %(levelname)s: %(message)s',
|
||||
datefmt='%H:%M:%S',
|
||||
handlers=[
|
||||
logging.StreamHandler(sys.stdout)
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning("Using fallback logging - enhanced logging not available")
|
||||
|
||||
# Try to configure console for UTF-8 on Windows
|
||||
try:
|
||||
if hasattr(sys.stdout, 'reconfigure'):
|
||||
sys.stdout.reconfigure(encoding='utf-8', errors='replace')
|
||||
except Exception:
|
||||
pass
|
||||
from server.infrastructure.logging.config import get_logger, logging_config
|
||||
logger = get_logger(__name__, 'webapp')
|
||||
logger.info("Enhanced logging system initialized")
|
||||
|
||||
|
||||
# Only run startup messages and scheduler in the parent process
|
||||
if os.environ.get('WERKZEUG_RUN_MAIN') != 'true':
|
||||
@@ -270,17 +173,9 @@ if __name__ == '__main__':
|
||||
logger.info(f"Anime directory: {config.anime_directory}")
|
||||
logger.info(f"Log level: {config.log_level}")
|
||||
|
||||
# Start scheduler if enabled
|
||||
if hasattr(config, 'scheduled_rescan_enabled') and config.scheduled_rescan_enabled:
|
||||
logger.info(f"Starting scheduler - daily rescan at {getattr(config, 'scheduled_rescan_time', '03:00')}")
|
||||
scheduler.start_scheduler()
|
||||
else:
|
||||
logger.info("Scheduled operations disabled")
|
||||
|
||||
scheduler.start_scheduler()
|
||||
init_series_app(verbose=True)
|
||||
logger.info("Server will be available at http://localhost:5000")
|
||||
else:
|
||||
# Initialize the series app only in the reloader child process (the actual working process)
|
||||
init_series_app(verbose=True)
|
||||
|
||||
try:
|
||||
# Run with SocketIO
|
||||
|
||||
@@ -1,823 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import threading
|
||||
from datetime import datetime
|
||||
from flask import Flask, render_template, request, jsonify, redirect, url_for
|
||||
from flask_socketio import SocketIO, emit
|
||||
import logging
|
||||
import atexit
|
||||
|
||||
# Add the parent directory to sys.path to import our modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from ..main import SeriesApp
|
||||
from .core.entities.series import Serie
|
||||
from .core.entities import SerieList
|
||||
from .infrastructure.file_system import SerieScanner
|
||||
from .infrastructure.providers.provider_factory import Loaders
|
||||
from .web.controllers.auth_controller import session_manager, require_auth, optional_auth
|
||||
from .config import config
|
||||
from .application.services.queue_service import download_queue_bp
|
||||
# TODO: Fix these imports
|
||||
# from process_api import process_bp
|
||||
# from scheduler_api import scheduler_bp
|
||||
# from logging_api import logging_bp
|
||||
# from config_api import config_bp
|
||||
# from scheduler import init_scheduler, get_scheduler
|
||||
# from process_locks import (with_process_lock, RESCAN_LOCK, DOWNLOAD_LOCK,
|
||||
# ProcessLockError, is_process_running, check_process_locks)
|
||||
|
||||
# TODO: Fix these imports
|
||||
# # Import new error handling and health monitoring modules
|
||||
# from error_handler import (
|
||||
# handle_api_errors, error_recovery_manager, recovery_strategies,
|
||||
# network_health_checker, NetworkError, DownloadError, RetryableError
|
||||
# )
|
||||
# from health_monitor import health_bp, health_monitor, init_health_monitoring, cleanup_health_monitoring
|
||||
|
||||
# Import performance optimization modules
|
||||
from performance_optimizer import (
|
||||
init_performance_monitoring, cleanup_performance_monitoring,
|
||||
speed_limiter, download_cache, memory_monitor, download_manager
|
||||
)
|
||||
from performance_api import performance_bp
|
||||
|
||||
# Import API integration modules
|
||||
from api_integration import (
|
||||
init_api_integrations, cleanup_api_integrations,
|
||||
webhook_manager, export_manager, notification_service
|
||||
)
|
||||
from api_endpoints import api_integration_bp
|
||||
|
||||
# Import database management modules
|
||||
from database_manager import (
|
||||
database_manager, anime_repository, backup_manager, storage_manager,
|
||||
init_database_system, cleanup_database_system
|
||||
)
|
||||
from database_api import database_bp
|
||||
|
||||
# Import health check endpoints
|
||||
from health_endpoints import health_bp
|
||||
|
||||
# Import user experience modules
|
||||
from keyboard_shortcuts import keyboard_manager
|
||||
from drag_drop import drag_drop_manager
|
||||
from bulk_operations import bulk_operations_manager
|
||||
from user_preferences import preferences_manager, preferences_bp
|
||||
from advanced_search import advanced_search_manager, search_bp
|
||||
from undo_redo_manager import undo_redo_manager, undo_redo_bp
|
||||
|
||||
# Import Mobile & Accessibility modules
|
||||
from mobile_responsive import mobile_responsive_manager
|
||||
from touch_gestures import touch_gesture_manager
|
||||
from accessibility_features import accessibility_manager
|
||||
from screen_reader_support import screen_reader_manager
|
||||
from color_contrast_compliance import color_contrast_manager
|
||||
from multi_screen_support import multi_screen_manager
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['SECRET_KEY'] = os.urandom(24)
|
||||
app.config['PERMANENT_SESSION_LIFETIME'] = 86400 # 24 hours
|
||||
socketio = SocketIO(app, cors_allowed_origins="*")
|
||||
|
||||
# Register blueprints
|
||||
app.register_blueprint(download_queue_bp)
|
||||
app.register_blueprint(process_bp)
|
||||
app.register_blueprint(scheduler_bp)
|
||||
app.register_blueprint(logging_bp)
|
||||
app.register_blueprint(config_bp)
|
||||
app.register_blueprint(health_bp)
|
||||
app.register_blueprint(performance_bp)
|
||||
app.register_blueprint(api_integration_bp)
|
||||
app.register_blueprint(database_bp)
|
||||
# Note: health_endpoints blueprint already imported above as health_bp, no need to register twice
|
||||
|
||||
# Register bulk operations API
|
||||
from bulk_api import bulk_api_bp
|
||||
app.register_blueprint(bulk_api_bp)
|
||||
|
||||
# Register user preferences API
|
||||
app.register_blueprint(preferences_bp)
|
||||
|
||||
# Register advanced search API
|
||||
app.register_blueprint(search_bp)
|
||||
|
||||
# Register undo/redo API
|
||||
app.register_blueprint(undo_redo_bp)
|
||||
|
||||
# Register Mobile & Accessibility APIs
|
||||
app.register_blueprint(color_contrast_manager.get_contrast_api_blueprint())
|
||||
|
||||
# Initialize user experience features
|
||||
# keyboard_manager doesn't need init_app - it's a simple utility class
|
||||
bulk_operations_manager.init_app(app)
|
||||
preferences_manager.init_app(app)
|
||||
advanced_search_manager.init_app(app)
|
||||
undo_redo_manager.init_app(app)
|
||||
|
||||
# Initialize Mobile & Accessibility features
|
||||
mobile_responsive_manager.init_app(app)
|
||||
touch_gesture_manager.init_app(app)
|
||||
accessibility_manager.init_app(app)
|
||||
screen_reader_manager.init_app(app)
|
||||
color_contrast_manager.init_app(app)
|
||||
multi_screen_manager.init_app(app)
|
||||
|
||||
# Global variables to store app state
|
||||
series_app = None
|
||||
is_scanning = False
|
||||
is_downloading = False
|
||||
is_paused = False
|
||||
download_thread = None
|
||||
download_progress = {}
|
||||
download_queue = []
|
||||
current_downloading = None
|
||||
download_stats = {
|
||||
'total_series': 0,
|
||||
'completed_series': 0,
|
||||
'current_episode': None,
|
||||
'total_episodes': 0,
|
||||
'completed_episodes': 0
|
||||
}
|
||||
|
||||
def init_series_app():
|
||||
"""Initialize the SeriesApp with configuration directory."""
|
||||
global series_app
|
||||
directory_to_search = config.anime_directory
|
||||
series_app = SeriesApp(directory_to_search)
|
||||
return series_app
|
||||
|
||||
# Initialize the app on startup
|
||||
init_series_app()
|
||||
|
||||
# Initialize scheduler
|
||||
scheduler = init_scheduler(config, socketio)
|
||||
|
||||
def setup_scheduler_callbacks():
|
||||
"""Setup callbacks for scheduler operations."""
|
||||
|
||||
def rescan_callback():
|
||||
"""Callback for scheduled rescan operations."""
|
||||
try:
|
||||
# Reinit and scan
|
||||
series_app.SerieScanner.Reinit()
|
||||
series_app.SerieScanner.Scan()
|
||||
|
||||
# Refresh the series list
|
||||
series_app.List = SerieList.SerieList(series_app.directory_to_search)
|
||||
series_app.__InitList__()
|
||||
|
||||
return {"status": "success", "message": "Scheduled rescan completed"}
|
||||
except Exception as e:
|
||||
raise Exception(f"Scheduled rescan failed: {e}")
|
||||
|
||||
def download_callback():
|
||||
"""Callback for auto-download after scheduled rescan."""
|
||||
try:
|
||||
if not series_app or not series_app.List:
|
||||
return {"status": "skipped", "message": "No series data available"}
|
||||
|
||||
# Find series with missing episodes
|
||||
series_with_missing = []
|
||||
for serie in series_app.List.GetList():
|
||||
if serie.episodeDict:
|
||||
series_with_missing.append(serie)
|
||||
|
||||
if not series_with_missing:
|
||||
return {"status": "skipped", "message": "No series with missing episodes found"}
|
||||
|
||||
# Note: Actual download implementation would go here
|
||||
# For now, just return the count of series that would be downloaded
|
||||
return {
|
||||
"status": "started",
|
||||
"message": f"Auto-download initiated for {len(series_with_missing)} series",
|
||||
"series_count": len(series_with_missing)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise Exception(f"Auto-download failed: {e}")
|
||||
|
||||
scheduler.set_rescan_callback(rescan_callback)
|
||||
scheduler.set_download_callback(download_callback)
|
||||
|
||||
# Setup scheduler callbacks
|
||||
setup_scheduler_callbacks()
|
||||
|
||||
# Initialize error handling and health monitoring
|
||||
try:
|
||||
init_health_monitoring()
|
||||
logging.info("Health monitoring initialized successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to initialize health monitoring: {e}")
|
||||
|
||||
# Initialize performance monitoring
|
||||
try:
|
||||
init_performance_monitoring()
|
||||
logging.info("Performance monitoring initialized successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to initialize performance monitoring: {e}")
|
||||
|
||||
# Initialize API integrations
|
||||
try:
|
||||
init_api_integrations()
|
||||
# Set export manager's series app reference
|
||||
export_manager.series_app = series_app
|
||||
logging.info("API integrations initialized successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to initialize API integrations: {e}")
|
||||
|
||||
# Initialize database system
|
||||
try:
|
||||
init_database_system()
|
||||
logging.info("Database system initialized successfully")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to initialize database system: {e}")
|
||||
|
||||
# Register cleanup functions
|
||||
@atexit.register
|
||||
def cleanup_on_exit():
|
||||
"""Clean up resources on application exit."""
|
||||
try:
|
||||
cleanup_health_monitoring()
|
||||
cleanup_performance_monitoring()
|
||||
cleanup_api_integrations()
|
||||
cleanup_database_system()
|
||||
logging.info("Application cleanup completed")
|
||||
except Exception as e:
|
||||
logging.error(f"Error during cleanup: {e}")
|
||||
|
||||
# UX JavaScript and CSS routes
|
||||
@app.route('/static/js/keyboard-shortcuts.js')
|
||||
def keyboard_shortcuts_js():
|
||||
"""Serve keyboard shortcuts JavaScript."""
|
||||
from flask import Response
|
||||
js_content = keyboard_manager.get_shortcuts_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/drag-drop.js')
|
||||
def drag_drop_js():
|
||||
"""Serve drag and drop JavaScript."""
|
||||
from flask import Response
|
||||
js_content = drag_drop_manager.get_drag_drop_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/bulk-operations.js')
|
||||
def bulk_operations_js():
|
||||
"""Serve bulk operations JavaScript."""
|
||||
from flask import Response
|
||||
js_content = bulk_operations_manager.get_bulk_operations_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/user-preferences.js')
|
||||
def user_preferences_js():
|
||||
"""Serve user preferences JavaScript."""
|
||||
from flask import Response
|
||||
js_content = preferences_manager.get_preferences_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/advanced-search.js')
|
||||
def advanced_search_js():
|
||||
"""Serve advanced search JavaScript."""
|
||||
from flask import Response
|
||||
js_content = advanced_search_manager.get_search_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/undo-redo.js')
|
||||
def undo_redo_js():
|
||||
"""Serve undo/redo JavaScript."""
|
||||
from flask import Response
|
||||
js_content = undo_redo_manager.get_undo_redo_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
# Mobile & Accessibility JavaScript routes
|
||||
@app.route('/static/js/mobile-responsive.js')
|
||||
def mobile_responsive_js():
|
||||
"""Serve mobile responsive JavaScript."""
|
||||
from flask import Response
|
||||
js_content = mobile_responsive_manager.get_mobile_responsive_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/touch-gestures.js')
|
||||
def touch_gestures_js():
|
||||
"""Serve touch gestures JavaScript."""
|
||||
from flask import Response
|
||||
js_content = touch_gesture_manager.get_touch_gesture_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/accessibility-features.js')
|
||||
def accessibility_features_js():
|
||||
"""Serve accessibility features JavaScript."""
|
||||
from flask import Response
|
||||
js_content = accessibility_manager.get_accessibility_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/screen-reader-support.js')
|
||||
def screen_reader_support_js():
|
||||
"""Serve screen reader support JavaScript."""
|
||||
from flask import Response
|
||||
js_content = screen_reader_manager.get_screen_reader_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/color-contrast-compliance.js')
|
||||
def color_contrast_compliance_js():
|
||||
"""Serve color contrast compliance JavaScript."""
|
||||
from flask import Response
|
||||
js_content = color_contrast_manager.get_contrast_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/js/multi-screen-support.js')
|
||||
def multi_screen_support_js():
|
||||
"""Serve multi-screen support JavaScript."""
|
||||
from flask import Response
|
||||
js_content = multi_screen_manager.get_multiscreen_js()
|
||||
return Response(js_content, mimetype='application/javascript')
|
||||
|
||||
@app.route('/static/css/ux-features.css')
|
||||
def ux_features_css():
|
||||
"""Serve UX features CSS."""
|
||||
from flask import Response
|
||||
css_content = f"""
|
||||
/* Keyboard shortcuts don't require additional CSS */
|
||||
|
||||
{drag_drop_manager.get_css()}
|
||||
|
||||
{bulk_operations_manager.get_css()}
|
||||
|
||||
{preferences_manager.get_css()}
|
||||
|
||||
{advanced_search_manager.get_css()}
|
||||
|
||||
{undo_redo_manager.get_css()}
|
||||
|
||||
/* Mobile & Accessibility CSS */
|
||||
{mobile_responsive_manager.get_css()}
|
||||
|
||||
{touch_gesture_manager.get_css()}
|
||||
|
||||
{accessibility_manager.get_css()}
|
||||
|
||||
{screen_reader_manager.get_css()}
|
||||
|
||||
{color_contrast_manager.get_contrast_css()}
|
||||
|
||||
{multi_screen_manager.get_multiscreen_css()}
|
||||
"""
|
||||
return Response(css_content, mimetype='text/css')
|
||||
|
||||
@app.route('/')
|
||||
@optional_auth
|
||||
def index():
|
||||
"""Main page route."""
|
||||
# Check process status
|
||||
process_status = {
|
||||
'rescan_running': is_process_running(RESCAN_LOCK),
|
||||
'download_running': is_process_running(DOWNLOAD_LOCK)
|
||||
}
|
||||
return render_template('index.html', process_status=process_status)
|
||||
|
||||
# Authentication routes
|
||||
@app.route('/login')
|
||||
def login():
|
||||
"""Login page."""
|
||||
if not config.has_master_password():
|
||||
return redirect(url_for('setup'))
|
||||
|
||||
if session_manager.is_authenticated():
|
||||
return redirect(url_for('index'))
|
||||
|
||||
return render_template('login.html',
|
||||
session_timeout=config.session_timeout_hours,
|
||||
max_attempts=config.max_failed_attempts,
|
||||
lockout_duration=config.lockout_duration_minutes)
|
||||
|
||||
@app.route('/setup')
|
||||
def setup():
|
||||
"""Initial setup page."""
|
||||
if config.has_master_password():
|
||||
return redirect(url_for('login'))
|
||||
|
||||
return render_template('setup.html', current_directory=config.anime_directory)
|
||||
|
||||
@app.route('/api/auth/setup', methods=['POST'])
|
||||
def auth_setup():
|
||||
"""Complete initial setup."""
|
||||
if config.has_master_password():
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Setup already completed'
|
||||
}), 400
|
||||
|
||||
try:
|
||||
data = request.get_json()
|
||||
password = data.get('password')
|
||||
directory = data.get('directory')
|
||||
|
||||
if not password or len(password) < 8:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Password must be at least 8 characters long'
|
||||
}), 400
|
||||
|
||||
if not directory:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Directory is required'
|
||||
}), 400
|
||||
|
||||
# Set master password and directory
|
||||
config.set_master_password(password)
|
||||
config.anime_directory = directory
|
||||
config.save_config()
|
||||
|
||||
# Reinitialize series app with new directory
|
||||
init_series_app()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Setup completed successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@app.route('/api/auth/login', methods=['POST'])
|
||||
def auth_login():
|
||||
"""Authenticate user."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
password = data.get('password')
|
||||
|
||||
if not password:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Password is required'
|
||||
}), 400
|
||||
|
||||
# Verify password using session manager
|
||||
result = session_manager.login(password, request.remote_addr)
|
||||
|
||||
return jsonify(result)
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@app.route('/api/auth/logout', methods=['POST'])
|
||||
@require_auth
|
||||
def auth_logout():
|
||||
"""Logout user."""
|
||||
session_manager.logout()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Logged out successfully'
|
||||
})
|
||||
|
||||
@app.route('/api/auth/status', methods=['GET'])
|
||||
def auth_status():
|
||||
"""Get authentication status."""
|
||||
return jsonify({
|
||||
'authenticated': session_manager.is_authenticated(),
|
||||
'has_master_password': config.has_master_password(),
|
||||
'setup_required': not config.has_master_password(),
|
||||
'session_info': session_manager.get_session_info()
|
||||
})
|
||||
|
||||
@app.route('/api/config/directory', methods=['POST'])
|
||||
@require_auth
|
||||
def update_directory():
|
||||
"""Update anime directory configuration."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
new_directory = data.get('directory')
|
||||
|
||||
if not new_directory:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Directory is required'
|
||||
}), 400
|
||||
|
||||
# Update configuration
|
||||
config.anime_directory = new_directory
|
||||
config.save_config()
|
||||
|
||||
# Reinitialize series app
|
||||
init_series_app()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Directory updated successfully',
|
||||
'directory': new_directory
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@app.route('/api/series', methods=['GET'])
|
||||
@optional_auth
|
||||
def get_series():
|
||||
"""Get all series data."""
|
||||
try:
|
||||
if series_app is None or series_app.List is None:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'series': [],
|
||||
'total_series': 0,
|
||||
'message': 'No series data available. Please perform a scan to load series.'
|
||||
})
|
||||
|
||||
# Get series data
|
||||
series_data = []
|
||||
for serie in series_app.List.GetList():
|
||||
series_data.append({
|
||||
'folder': serie.folder,
|
||||
'name': serie.name or serie.folder,
|
||||
'total_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()),
|
||||
'missing_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()),
|
||||
'status': 'ongoing',
|
||||
'episodes': {
|
||||
season: episodes
|
||||
for season, episodes in serie.episodeDict.items()
|
||||
}
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'series': series_data,
|
||||
'total_series': len(series_data)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
# Log the error but don't return 500 to prevent page reload loops
|
||||
print(f"Error in get_series: {e}")
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'series': [],
|
||||
'total_series': 0,
|
||||
'message': 'Error loading series data. Please try rescanning.'
|
||||
})
|
||||
|
||||
@app.route('/api/rescan', methods=['POST'])
|
||||
@optional_auth
|
||||
def rescan_series():
|
||||
"""Rescan/reinit the series directory."""
|
||||
global is_scanning
|
||||
|
||||
# Check if rescan is already running using process lock
|
||||
if is_process_running(RESCAN_LOCK) or is_scanning:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Rescan is already running. Please wait for it to complete.',
|
||||
'is_running': True
|
||||
}), 409
|
||||
|
||||
def scan_thread():
|
||||
global is_scanning
|
||||
|
||||
try:
|
||||
# Use process lock to prevent duplicate rescans
|
||||
@with_process_lock(RESCAN_LOCK, timeout_minutes=120)
|
||||
def perform_rescan():
|
||||
global is_scanning
|
||||
is_scanning = True
|
||||
|
||||
try:
|
||||
# Emit scanning started
|
||||
socketio.emit('scan_started')
|
||||
|
||||
# Reinit and scan
|
||||
series_app.SerieScanner.Reinit()
|
||||
series_app.SerieScanner.Scan(lambda folder, counter:
|
||||
socketio.emit('scan_progress', {
|
||||
'folder': folder,
|
||||
'counter': counter
|
||||
})
|
||||
)
|
||||
|
||||
# Refresh the series list
|
||||
series_app.List = SerieList.SerieList(series_app.directory_to_search)
|
||||
series_app.__InitList__()
|
||||
|
||||
# Emit scan completed
|
||||
socketio.emit('scan_completed')
|
||||
|
||||
except Exception as e:
|
||||
socketio.emit('scan_error', {'message': str(e)})
|
||||
raise
|
||||
finally:
|
||||
is_scanning = False
|
||||
|
||||
perform_rescan(_locked_by='web_interface')
|
||||
|
||||
except ProcessLockError:
|
||||
socketio.emit('scan_error', {'message': 'Rescan is already running'})
|
||||
except Exception as e:
|
||||
socketio.emit('scan_error', {'message': str(e)})
|
||||
|
||||
# Start scan in background thread
|
||||
threading.Thread(target=scan_thread, daemon=True).start()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Rescan started'
|
||||
})
|
||||
|
||||
# Basic download endpoint - simplified for now
|
||||
@app.route('/api/download', methods=['POST'])
|
||||
@optional_auth
|
||||
def download_series():
|
||||
"""Download selected series."""
|
||||
global is_downloading
|
||||
|
||||
# Check if download is already running using process lock
|
||||
if is_process_running(DOWNLOAD_LOCK) or is_downloading:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download is already running. Please wait for it to complete.',
|
||||
'is_running': True
|
||||
}), 409
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download functionality will be implemented with queue system'
|
||||
})
|
||||
|
||||
# WebSocket events for real-time updates
|
||||
@socketio.on('connect')
|
||||
def handle_connect():
|
||||
"""Handle client connection."""
|
||||
emit('status', {
|
||||
'message': 'Connected to server',
|
||||
'processes': {
|
||||
'rescan_running': is_process_running(RESCAN_LOCK),
|
||||
'download_running': is_process_running(DOWNLOAD_LOCK)
|
||||
}
|
||||
})
|
||||
|
||||
@socketio.on('disconnect')
|
||||
def handle_disconnect():
|
||||
"""Handle client disconnection."""
|
||||
print('Client disconnected')
|
||||
|
||||
@socketio.on('get_status')
|
||||
def handle_get_status():
|
||||
"""Handle status request."""
|
||||
emit('status_update', {
|
||||
'processes': {
|
||||
'rescan_running': is_process_running(RESCAN_LOCK),
|
||||
'download_running': is_process_running(DOWNLOAD_LOCK)
|
||||
},
|
||||
'series_count': len(series_app.List.GetList()) if series_app and series_app.List else 0
|
||||
})
|
||||
|
||||
# Error Recovery and Diagnostics Endpoints
|
||||
@app.route('/api/diagnostics/network')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def network_diagnostics():
|
||||
"""Get network diagnostics and connectivity status."""
|
||||
try:
|
||||
network_status = network_health_checker.get_network_status()
|
||||
|
||||
# Test AniWorld connectivity
|
||||
aniworld_reachable = network_health_checker.check_url_reachability("https://aniworld.to")
|
||||
network_status['aniworld_reachable'] = aniworld_reachable
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': network_status
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Network diagnostics failed: {e}")
|
||||
|
||||
@app.route('/api/diagnostics/errors')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_error_history():
|
||||
"""Get recent error history."""
|
||||
try:
|
||||
recent_errors = error_recovery_manager.error_history[-50:] # Last 50 errors
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'recent_errors': recent_errors,
|
||||
'total_errors': len(error_recovery_manager.error_history),
|
||||
'blacklisted_urls': list(error_recovery_manager.blacklisted_urls.keys())
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Error history retrieval failed: {e}")
|
||||
|
||||
@app.route('/api/recovery/clear-blacklist', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def clear_blacklist():
|
||||
"""Clear URL blacklist."""
|
||||
try:
|
||||
error_recovery_manager.blacklisted_urls.clear()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'URL blacklist cleared successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Blacklist clearing failed: {e}")
|
||||
|
||||
@app.route('/api/recovery/retry-counts')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_retry_counts():
|
||||
"""Get retry statistics."""
|
||||
try:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'retry_counts': error_recovery_manager.retry_counts,
|
||||
'total_retries': sum(error_recovery_manager.retry_counts.values())
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Retry statistics retrieval failed: {e}")
|
||||
|
||||
@app.route('/api/diagnostics/system-status')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def system_status_summary():
|
||||
"""Get comprehensive system status summary."""
|
||||
try:
|
||||
# Get health status
|
||||
health_status = health_monitor.get_current_health_status()
|
||||
|
||||
# Get network status
|
||||
network_status = network_health_checker.get_network_status()
|
||||
|
||||
# Get process status
|
||||
process_status = {
|
||||
'rescan_running': is_process_running(RESCAN_LOCK),
|
||||
'download_running': is_process_running(DOWNLOAD_LOCK)
|
||||
}
|
||||
|
||||
# Get error statistics
|
||||
error_stats = {
|
||||
'total_errors': len(error_recovery_manager.error_history),
|
||||
'recent_errors': len([e for e in error_recovery_manager.error_history
|
||||
if (datetime.now() - datetime.fromisoformat(e['timestamp'])).seconds < 3600]),
|
||||
'blacklisted_urls': len(error_recovery_manager.blacklisted_urls)
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'health': health_status,
|
||||
'network': network_status,
|
||||
'processes': process_status,
|
||||
'errors': error_stats,
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"System status retrieval failed: {e}")
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Clean up any expired locks on startup
|
||||
check_process_locks()
|
||||
|
||||
# Configure enhanced logging system
|
||||
try:
|
||||
from logging_config import get_logger, logging_config
|
||||
logger = get_logger(__name__, 'webapp')
|
||||
logger.info("Enhanced logging system initialized")
|
||||
except ImportError:
|
||||
# Fallback to basic logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.warning("Using fallback logging - enhanced logging not available")
|
||||
|
||||
logger.info("Starting Aniworld Flask server...")
|
||||
logger.info(f"Anime directory: {config.anime_directory}")
|
||||
logger.info(f"Log level: {config.log_level}")
|
||||
|
||||
# Start scheduler if enabled
|
||||
if config.scheduled_rescan_enabled:
|
||||
logger.info(f"Starting scheduler - daily rescan at {config.scheduled_rescan_time}")
|
||||
scheduler.start_scheduler()
|
||||
else:
|
||||
logger.info("Scheduled operations disabled")
|
||||
|
||||
logger.info("Server will be available at http://localhost:5000")
|
||||
|
||||
try:
|
||||
# Run with SocketIO
|
||||
socketio.run(app, debug=True, host='0.0.0.0', port=5000, allow_unsafe_werkzeug=True)
|
||||
finally:
|
||||
# Clean shutdown
|
||||
if scheduler:
|
||||
scheduler.stop_scheduler()
|
||||
logger.info("Scheduler stopped")
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Application services layer for business logic coordination.
|
||||
"""
|
||||
@@ -16,7 +16,7 @@ class UserPreferencesManager:
|
||||
|
||||
def __init__(self, app=None):
|
||||
self.app = app
|
||||
self.preferences_file = 'user_preferences.json'
|
||||
self.preferences_file = 'data/user_preferences.json'
|
||||
self.preferences = {} # Initialize preferences attribute
|
||||
self.default_preferences = {
|
||||
'ui': {
|
||||
@@ -76,7 +76,7 @@ class UserPreferencesManager:
|
||||
def init_app(self, app):
|
||||
"""Initialize with Flask app."""
|
||||
self.app = app
|
||||
self.preferences_file = os.path.join(app.instance_path, 'user_preferences.json')
|
||||
self.preferences_file = os.path.join(app.instance_path, 'data/user_preferences.json')
|
||||
|
||||
# Ensure instance path exists
|
||||
os.makedirs(app.instance_path, exist_ok=True)
|
||||
|
||||
0
src/server/cache/__init__.py
vendored
0
src/server/cache/__init__.py
vendored
@@ -1,49 +0,0 @@
|
||||
{
|
||||
"security": {
|
||||
"master_password_hash": "37b5bb3de81bce2d9c17e4f775536d618bdcb0f34aba599cc55b82b087a7ade7",
|
||||
"salt": "f8e09fa3f58d7ffece5d194108cb8c32bf0ad4da10e79d4bae4ef12dfce8ab57",
|
||||
"session_timeout_hours": 24,
|
||||
"max_failed_attempts": 5,
|
||||
"lockout_duration_minutes": 30
|
||||
},
|
||||
"anime": {
|
||||
"directory": "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien",
|
||||
"download_threads": 3,
|
||||
"download_speed_limit": null,
|
||||
"auto_rescan_time": "03:00",
|
||||
"auto_download_after_rescan": false
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"enable_console_logging": true,
|
||||
"enable_console_progress": false,
|
||||
"enable_fail2ban_logging": true,
|
||||
"log_file": "aniworld.log",
|
||||
"max_log_size_mb": 10,
|
||||
"log_backup_count": 5
|
||||
},
|
||||
"providers": {
|
||||
"default_provider": "aniworld.to",
|
||||
"preferred_language": "German Dub",
|
||||
"fallback_providers": [
|
||||
"aniworld.to"
|
||||
],
|
||||
"provider_timeout": 30,
|
||||
"retry_attempts": 3,
|
||||
"provider_settings": {
|
||||
"aniworld.to": {
|
||||
"enabled": true,
|
||||
"priority": 1,
|
||||
"quality_preference": "720p"
|
||||
}
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"max_concurrent_downloads": 3,
|
||||
"download_buffer_size": 8192,
|
||||
"connection_timeout": 30,
|
||||
"read_timeout": 300,
|
||||
"enable_debug_mode": false,
|
||||
"cache_duration_minutes": 60
|
||||
}
|
||||
}
|
||||
@@ -9,7 +9,7 @@ from datetime import datetime, timedelta
|
||||
class Config:
|
||||
"""Configuration management for AniWorld Flask app."""
|
||||
|
||||
def __init__(self, config_file: str = "config.json"):
|
||||
def __init__(self, config_file: str = "data/config.json"):
|
||||
self.config_file = config_file
|
||||
self.default_config = {
|
||||
"security": {
|
||||
|
||||
@@ -1,8 +0,0 @@
|
||||
"""
|
||||
Domain entities for the AniWorld application.
|
||||
"""
|
||||
|
||||
from .SerieList import SerieList
|
||||
from .series import Serie
|
||||
|
||||
__all__ = ['SerieList', 'Serie']
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Domain exceptions for the AniWorld application.
|
||||
"""
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Domain interfaces and contracts for the AniWorld application.
|
||||
"""
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Business use cases for the AniWorld application.
|
||||
"""
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Infrastructure layer for external concerns implementation.
|
||||
"""
|
||||
@@ -1,353 +0,0 @@
|
||||
"""
|
||||
Logging configuration for AniWorld Flask application.
|
||||
Provides structured logging with different handlers for console, file, and fail2ban.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import logging.handlers
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
from config import config
|
||||
|
||||
|
||||
class UnicodeStreamHandler(logging.StreamHandler):
|
||||
"""Custom stream handler that safely handles Unicode characters."""
|
||||
|
||||
def __init__(self, stream=None):
|
||||
super().__init__(stream)
|
||||
|
||||
def emit(self, record):
|
||||
try:
|
||||
msg = self.format(record)
|
||||
stream = self.stream
|
||||
|
||||
# Handle Unicode encoding issues on Windows
|
||||
if hasattr(stream, 'encoding') and stream.encoding:
|
||||
try:
|
||||
# Try to encode with the stream's encoding
|
||||
encoded_msg = msg.encode(stream.encoding, errors='replace').decode(stream.encoding)
|
||||
stream.write(encoded_msg + self.terminator)
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
# Fallback: replace problematic characters
|
||||
safe_msg = msg.encode('ascii', errors='replace').decode('ascii')
|
||||
stream.write(safe_msg + self.terminator)
|
||||
else:
|
||||
# No encoding info, write directly but catch errors
|
||||
try:
|
||||
stream.write(msg + self.terminator)
|
||||
except UnicodeEncodeError:
|
||||
# Last resort: ASCII-only output
|
||||
safe_msg = msg.encode('ascii', errors='replace').decode('ascii')
|
||||
stream.write(safe_msg + self.terminator)
|
||||
|
||||
self.flush()
|
||||
except RecursionError:
|
||||
raise
|
||||
except Exception:
|
||||
self.handleError(record)
|
||||
|
||||
|
||||
class Fail2BanFormatter(logging.Formatter):
|
||||
"""Custom formatter for fail2ban compatible authentication failure logs."""
|
||||
|
||||
def format(self, record):
|
||||
if hasattr(record, 'client_ip') and hasattr(record, 'username'):
|
||||
# Format: "authentication failure for [IP] user [username]"
|
||||
return f"authentication failure for [{record.client_ip}] user [{record.username}]"
|
||||
return super().format(record)
|
||||
|
||||
|
||||
class StructuredFormatter(logging.Formatter):
|
||||
"""Enhanced formatter for structured logging with consistent format."""
|
||||
|
||||
def format(self, record):
|
||||
# Add timestamp if not present
|
||||
if not hasattr(record, 'asctime'):
|
||||
record.asctime = datetime.now().strftime('%Y-%m-%d %H:%M:%S')
|
||||
|
||||
# Add component info
|
||||
component = getattr(record, 'component', record.name)
|
||||
|
||||
# Safely get message and handle Unicode
|
||||
try:
|
||||
message = record.getMessage()
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
message = str(record.msg)
|
||||
|
||||
# Format: timestamp - level - component - function - message
|
||||
formatted = f"{record.asctime} - {record.levelname:8} - {component:15} - {record.funcName:20} - {message}"
|
||||
|
||||
# Add exception info if present
|
||||
if record.exc_info:
|
||||
formatted += f"\n{self.formatException(record.exc_info)}"
|
||||
|
||||
return formatted
|
||||
|
||||
|
||||
class ConsoleOnlyFormatter(logging.Formatter):
|
||||
"""Minimal formatter for console output - only essential information."""
|
||||
|
||||
def format(self, record):
|
||||
# Only show timestamp, level and message for console
|
||||
timestamp = datetime.now().strftime('%H:%M:%S')
|
||||
try:
|
||||
message = record.getMessage()
|
||||
# Ensure the message can be safely encoded
|
||||
if isinstance(message, str):
|
||||
# Replace problematic Unicode characters with safe alternatives
|
||||
message = message.encode('ascii', errors='replace').decode('ascii')
|
||||
except (UnicodeEncodeError, UnicodeDecodeError):
|
||||
message = str(record.msg)
|
||||
|
||||
return f"[{timestamp}] {record.levelname}: {message}"
|
||||
|
||||
|
||||
class LoggingConfig:
|
||||
"""Centralized logging configuration manager."""
|
||||
|
||||
def __init__(self):
|
||||
self.log_directory = "logs"
|
||||
self.main_log_file = "aniworld.log"
|
||||
self.auth_log_file = "auth_failures.log"
|
||||
self.download_log_file = "downloads.log"
|
||||
|
||||
# Create logs directory if it doesn't exist
|
||||
os.makedirs(self.log_directory, exist_ok=True)
|
||||
|
||||
# Configure loggers
|
||||
self._setup_loggers()
|
||||
|
||||
def _setup_loggers(self):
|
||||
"""Setup all loggers with appropriate handlers and formatters."""
|
||||
|
||||
# Get log level from config
|
||||
log_level = getattr(config, 'log_level', 'INFO')
|
||||
console_logging = getattr(config, 'enable_console_logging', True)
|
||||
console_progress = getattr(config, 'enable_console_progress', False)
|
||||
|
||||
# Convert string log level to logging constant
|
||||
numeric_level = getattr(logging, log_level.upper(), logging.INFO)
|
||||
|
||||
# Clear existing handlers
|
||||
logging.root.handlers.clear()
|
||||
|
||||
# Root logger configuration
|
||||
root_logger = logging.getLogger()
|
||||
root_logger.setLevel(logging.DEBUG) # Capture everything, filter at handler level
|
||||
|
||||
# File handler for main application log
|
||||
file_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(self.log_directory, self.main_log_file),
|
||||
maxBytes=10*1024*1024, # 10MB
|
||||
backupCount=5
|
||||
)
|
||||
file_handler.setLevel(logging.DEBUG)
|
||||
file_handler.setFormatter(StructuredFormatter())
|
||||
|
||||
# Console handler (optional, controlled by config)
|
||||
if console_logging:
|
||||
console_handler = UnicodeStreamHandler(sys.stdout)
|
||||
console_handler.setLevel(numeric_level)
|
||||
console_handler.setFormatter(ConsoleOnlyFormatter())
|
||||
root_logger.addHandler(console_handler)
|
||||
|
||||
root_logger.addHandler(file_handler)
|
||||
|
||||
# Fail2ban authentication logger
|
||||
self._setup_auth_logger()
|
||||
|
||||
# Download progress logger (separate from console)
|
||||
self._setup_download_logger()
|
||||
|
||||
# Configure third-party library loggers to reduce noise
|
||||
self._configure_third_party_loggers()
|
||||
|
||||
# Suppress progress bars in console if disabled
|
||||
if not console_progress:
|
||||
self._suppress_progress_output()
|
||||
|
||||
def _setup_auth_logger(self):
|
||||
"""Setup dedicated logger for authentication failures (fail2ban compatible)."""
|
||||
auth_logger = logging.getLogger('auth_failures')
|
||||
auth_logger.setLevel(logging.INFO)
|
||||
auth_logger.propagate = False # Don't propagate to root logger
|
||||
|
||||
# File handler for authentication failures
|
||||
auth_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(self.log_directory, self.auth_log_file),
|
||||
maxBytes=5*1024*1024, # 5MB
|
||||
backupCount=3
|
||||
)
|
||||
auth_handler.setLevel(logging.INFO)
|
||||
auth_handler.setFormatter(Fail2BanFormatter())
|
||||
|
||||
auth_logger.addHandler(auth_handler)
|
||||
|
||||
def _setup_download_logger(self):
|
||||
"""Setup dedicated logger for download progress (separate from console)."""
|
||||
download_logger = logging.getLogger('download_progress')
|
||||
download_logger.setLevel(logging.INFO)
|
||||
download_logger.propagate = False # Don't propagate to root logger
|
||||
|
||||
# File handler for download progress
|
||||
download_handler = logging.handlers.RotatingFileHandler(
|
||||
os.path.join(self.log_directory, self.download_log_file),
|
||||
maxBytes=20*1024*1024, # 20MB
|
||||
backupCount=3
|
||||
)
|
||||
download_handler.setLevel(logging.INFO)
|
||||
download_handler.setFormatter(StructuredFormatter())
|
||||
|
||||
download_logger.addHandler(download_handler)
|
||||
|
||||
def _configure_third_party_loggers(self):
|
||||
"""Configure third-party library loggers to reduce noise."""
|
||||
# Suppress noisy third-party loggers
|
||||
noisy_loggers = [
|
||||
'urllib3.connectionpool',
|
||||
'charset_normalizer',
|
||||
'requests.packages.urllib3',
|
||||
'werkzeug',
|
||||
'socketio.server',
|
||||
'engineio.server'
|
||||
]
|
||||
|
||||
for logger_name in noisy_loggers:
|
||||
logger = logging.getLogger(logger_name)
|
||||
logger.setLevel(logging.WARNING)
|
||||
|
||||
def _suppress_progress_output(self):
|
||||
"""Suppress progress bar output from console."""
|
||||
# This will be used to control progress bar display
|
||||
# The actual progress bars should check this setting
|
||||
pass
|
||||
|
||||
def get_logger(self, name: str, component: Optional[str] = None) -> logging.Logger:
|
||||
"""Get a logger instance with optional component name."""
|
||||
logger = logging.getLogger(name)
|
||||
|
||||
# Add component info for structured logging
|
||||
if component:
|
||||
# Create a custom LoggerAdapter to add component info
|
||||
class ComponentAdapter(logging.LoggerAdapter):
|
||||
def process(self, msg, kwargs):
|
||||
return msg, kwargs
|
||||
|
||||
def _log(self, level, msg, args, exc_info=None, extra=None, stack_info=False):
|
||||
if extra is None:
|
||||
extra = {}
|
||||
extra['component'] = component
|
||||
return self.logger._log(level, msg, args, exc_info, extra, stack_info)
|
||||
|
||||
return ComponentAdapter(logger, {})
|
||||
|
||||
return logger
|
||||
|
||||
def log_auth_failure(self, client_ip: str, username: str = "unknown"):
|
||||
"""Log authentication failure in fail2ban compatible format."""
|
||||
auth_logger = logging.getLogger('auth_failures')
|
||||
|
||||
# Create log record with custom attributes
|
||||
record = logging.LogRecord(
|
||||
name='auth_failures',
|
||||
level=logging.INFO,
|
||||
pathname='',
|
||||
lineno=0,
|
||||
msg='Authentication failure',
|
||||
args=(),
|
||||
exc_info=None
|
||||
)
|
||||
record.client_ip = client_ip
|
||||
record.username = username
|
||||
|
||||
auth_logger.handle(record)
|
||||
|
||||
def log_download_progress(self, series_name: str, episode: str, progress: float,
|
||||
speed: str = "", eta: str = ""):
|
||||
"""Log download progress to dedicated download log."""
|
||||
download_logger = logging.getLogger('download_progress')
|
||||
|
||||
message = f"Downloading {series_name} - {episode} - Progress: {progress:.1f}%"
|
||||
if speed:
|
||||
message += f" - Speed: {speed}"
|
||||
if eta:
|
||||
message += f" - ETA: {eta}"
|
||||
|
||||
download_logger.info(message)
|
||||
|
||||
def update_log_level(self, level: str):
|
||||
"""Update the log level for console output."""
|
||||
try:
|
||||
numeric_level = getattr(logging, level.upper())
|
||||
|
||||
# Update console handler level
|
||||
root_logger = logging.getLogger()
|
||||
for handler in root_logger.handlers:
|
||||
if isinstance(handler, logging.StreamHandler) and handler.stream == sys.stdout:
|
||||
handler.setLevel(numeric_level)
|
||||
break
|
||||
|
||||
# Update config
|
||||
config.set('logging.level', level.upper())
|
||||
return True
|
||||
|
||||
except AttributeError:
|
||||
return False
|
||||
|
||||
def get_log_files(self):
|
||||
"""Get list of current log files with their sizes."""
|
||||
log_files = []
|
||||
|
||||
for filename in os.listdir(self.log_directory):
|
||||
if filename.endswith('.log'):
|
||||
file_path = os.path.join(self.log_directory, filename)
|
||||
file_size = os.path.getsize(file_path)
|
||||
file_modified = datetime.fromtimestamp(os.path.getmtime(file_path))
|
||||
|
||||
log_files.append({
|
||||
'name': filename,
|
||||
'size': file_size,
|
||||
'size_mb': round(file_size / (1024 * 1024), 2),
|
||||
'modified': file_modified.isoformat(),
|
||||
'path': file_path
|
||||
})
|
||||
|
||||
return log_files
|
||||
|
||||
def cleanup_old_logs(self, days: int = 30):
|
||||
"""Clean up log files older than specified days."""
|
||||
import time
|
||||
|
||||
cutoff_time = time.time() - (days * 24 * 60 * 60)
|
||||
cleaned_files = []
|
||||
|
||||
for filename in os.listdir(self.log_directory):
|
||||
if filename.endswith('.log') and not filename.startswith('aniworld.log'):
|
||||
file_path = os.path.join(self.log_directory, filename)
|
||||
if os.path.getmtime(file_path) < cutoff_time:
|
||||
try:
|
||||
os.remove(file_path)
|
||||
cleaned_files.append(filename)
|
||||
except OSError:
|
||||
pass
|
||||
|
||||
return cleaned_files
|
||||
|
||||
|
||||
# Global logging configuration instance
|
||||
logging_config = LoggingConfig()
|
||||
|
||||
def get_logger(name: str, component: Optional[str] = None) -> logging.Logger:
|
||||
"""Convenience function to get a logger instance."""
|
||||
return logging_config.get_logger(name, component)
|
||||
|
||||
def log_auth_failure(client_ip: str, username: str = "unknown"):
|
||||
"""Convenience function to log authentication failure."""
|
||||
logging_config.log_auth_failure(client_ip, username)
|
||||
|
||||
def log_download_progress(series_name: str, episode: str, progress: float,
|
||||
speed: str = "", eta: str = ""):
|
||||
"""Convenience function to log download progress."""
|
||||
logging_config.log_download_progress(series_name, episode, progress, speed, eta)
|
||||
@@ -9328,3 +9328,465 @@
|
||||
2025-09-29 15:56:13 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
|
||||
2025-09-29 15:56:13 - INFO - __main__ - <module> - Scheduler stopped
|
||||
2025-09-29 15:56:13 - INFO - root - cleanup_on_exit - Application cleanup completed
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 16:18:51 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 16:18:53 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 16:18:53 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 16:18:53 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 16:18:53 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin’ in Another World with Level 2 Super Cheat Powers (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data for I’ll Become a Villainess Who Goes Down in History (2024)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data for KonoSuba – An Explosion on This Wonderful World! (2023)
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
|
||||
2025-09-29 16:18:53 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
|
||||
2025-09-29 16:18:53 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data for The Gorilla God’s Go-To Girl (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data for The Saint’s Magic Power is Omnipotent (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data for World’s End Harem (2022)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
|
||||
2025-09-29 16:18:54 - WARNING - root - load_series - Skipping clevatess - No data folder found
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
|
||||
2025-09-29 16:18:54 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
|
||||
2025-09-29 16:18:55 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
|
||||
2025-09-29 16:18:55 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
|
||||
2025-09-29 16:18:55 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
|
||||
2025-09-29 16:18:55 - DEBUG - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
|
||||
2025-09-29 16:18:55 - WARNING - werkzeug - _log - * Debugger is active!
|
||||
2025-09-29 16:19:21 - DEBUG - schedule - clear - Deleting *all* jobs
|
||||
|
||||
@@ -1,205 +0,0 @@
|
||||
import os
|
||||
import sys
|
||||
import logging
|
||||
from flask import Flask, request, jsonify, render_template, redirect, url_for, session, send_from_directory
|
||||
from flask_socketio import SocketIO, emit
|
||||
import atexit
|
||||
import signal
|
||||
import time
|
||||
from datetime import datetime
|
||||
|
||||
# Add the parent directory to sys.path to import our modules
|
||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..'))
|
||||
|
||||
from main import SeriesApp
|
||||
from server.core.entities.series import Serie
|
||||
from server.core.entities import SerieList
|
||||
from server.infrastructure.file_system import SerieScanner
|
||||
from server.infrastructure.providers.provider_factory import Loaders
|
||||
from web.controllers.auth_controller import session_manager, require_auth, optional_auth
|
||||
from config import config
|
||||
from application.services.queue_service import download_queue_bp
|
||||
|
||||
app = Flask(__name__)
|
||||
app.config['SECRET_KEY'] = os.urandom(24)
|
||||
app.config['PERMANENT_SESSION_LIFETIME'] = 86400 # 24 hours
|
||||
socketio = SocketIO(app, cors_allowed_origins="*")
|
||||
|
||||
# Register essential blueprints only
|
||||
app.register_blueprint(download_queue_bp)
|
||||
|
||||
# Initialize series application
|
||||
series_app = None
|
||||
anime_directory = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
||||
|
||||
def create_app():
|
||||
"""Create Flask application."""
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO)
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Starting Aniworld Flask server...")
|
||||
|
||||
return app
|
||||
|
||||
def init_series_app():
|
||||
"""Initialize series application."""
|
||||
global series_app
|
||||
try:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info(f"Initializing series app with directory: {anime_directory}")
|
||||
|
||||
series_app = SeriesApp(anime_directory)
|
||||
logger.info("Series app initialized successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.error(f"Failed to initialize series app: {e}")
|
||||
# Create a minimal fallback
|
||||
series_app = type('SeriesApp', (), {
|
||||
'List': None,
|
||||
'directory_to_search': anime_directory
|
||||
})()
|
||||
|
||||
@app.route('/')
|
||||
@optional_auth
|
||||
def index():
|
||||
"""Main application page."""
|
||||
return render_template('base/index.html')
|
||||
|
||||
@app.route('/login')
|
||||
def login():
|
||||
"""Login page."""
|
||||
return render_template('base/login.html')
|
||||
|
||||
@app.route('/api/auth/login', methods=['POST'])
|
||||
def api_login():
|
||||
"""Handle login requests."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
password = data.get('password', '')
|
||||
|
||||
result = session_manager.login(password, request.remote_addr)
|
||||
|
||||
return jsonify(result)
|
||||
except Exception as e:
|
||||
return jsonify({'status': 'error', 'message': str(e)}), 500
|
||||
|
||||
@app.route('/api/auth/logout', methods=['POST'])
|
||||
def api_logout():
|
||||
"""Handle logout requests."""
|
||||
session_manager.logout()
|
||||
return jsonify({'status': 'success', 'message': 'Logged out successfully'})
|
||||
|
||||
@app.route('/api/auth/status')
|
||||
@optional_auth
|
||||
def auth_status():
|
||||
"""Get authentication status."""
|
||||
return jsonify({
|
||||
'authenticated': session_manager.is_authenticated(),
|
||||
'user': session.get('user', 'guest'),
|
||||
'login_time': session.get('login_time'),
|
||||
'session_info': session_manager.get_session_info()
|
||||
})
|
||||
|
||||
@app.route('/api/series', methods=['GET'])
|
||||
@optional_auth
|
||||
def get_series():
|
||||
"""Get all series data."""
|
||||
try:
|
||||
if series_app is None or series_app.List is None:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'series': [],
|
||||
'total_series': 0,
|
||||
'message': 'No series data available. Please perform a scan to load series.'
|
||||
})
|
||||
|
||||
# Get series data
|
||||
series_data = []
|
||||
for serie in series_app.List.GetList():
|
||||
series_data.append({
|
||||
'folder': serie.folder,
|
||||
'name': serie.name or serie.folder,
|
||||
'total_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()) if hasattr(serie, 'episodeDict') and serie.episodeDict else 0,
|
||||
'missing_episodes': sum(len(episodes) for episodes in serie.episodeDict.values()) if hasattr(serie, 'episodeDict') and serie.episodeDict else 0,
|
||||
'status': 'ongoing',
|
||||
'episodes': {
|
||||
season: episodes
|
||||
for season, episodes in serie.episodeDict.items()
|
||||
} if hasattr(serie, 'episodeDict') and serie.episodeDict else {}
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'series': series_data,
|
||||
'total_series': len(series_data)
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
# Log the error but don't return 500 to prevent page reload loops
|
||||
print(f"Error in get_series: {e}")
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'series': [],
|
||||
'total_series': 0,
|
||||
'message': 'Error loading series data. Please try rescanning.'
|
||||
})
|
||||
|
||||
@app.route('/api/preferences', methods=['GET'])
|
||||
@optional_auth
|
||||
def get_preferences():
|
||||
"""Get user preferences."""
|
||||
# Return basic preferences for now
|
||||
return jsonify({
|
||||
'theme': 'dark',
|
||||
'language': 'en',
|
||||
'auto_refresh': True,
|
||||
'notifications': True
|
||||
})
|
||||
|
||||
# Basic health status endpoint
|
||||
@app.route('/api/process/locks/status')
|
||||
@optional_auth
|
||||
def process_locks_status():
|
||||
"""Get process lock status."""
|
||||
return jsonify({
|
||||
'rescan_locked': False,
|
||||
'download_locked': False,
|
||||
'cleanup_locked': False,
|
||||
'message': 'All processes available'
|
||||
})
|
||||
|
||||
# Undo/Redo status endpoint
|
||||
@app.route('/api/undo-redo/status')
|
||||
@optional_auth
|
||||
def undo_redo_status():
|
||||
"""Get undo/redo status."""
|
||||
return jsonify({
|
||||
'can_undo': False,
|
||||
'can_redo': False,
|
||||
'undo_count': 0,
|
||||
'redo_count': 0,
|
||||
'last_action': None
|
||||
})
|
||||
|
||||
# Static file serving
|
||||
@app.route('/static/<path:filename>')
|
||||
def static_files(filename):
|
||||
"""Serve static files."""
|
||||
return send_from_directory('web/static', filename)
|
||||
|
||||
def cleanup_on_exit():
|
||||
"""Cleanup function to run on application exit."""
|
||||
logger = logging.getLogger(__name__)
|
||||
logger.info("Application cleanup completed")
|
||||
|
||||
# Register cleanup function
|
||||
atexit.register(cleanup_on_exit)
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Initialize series app
|
||||
init_series_app()
|
||||
|
||||
# Start the application
|
||||
print("Server will be available at http://localhost:5000")
|
||||
socketio.run(app, debug=True, host='0.0.0.0', port=5000, allow_unsafe_werkzeug=True)
|
||||
@@ -1,83 +0,0 @@
|
||||
@echo off
|
||||
REM Test Runner Script for AniWorld Testing Pipeline (Windows)
|
||||
REM This script provides an easy way to run the AniWorld test suite on Windows
|
||||
|
||||
echo AniWorld Test Suite Runner
|
||||
echo ==========================
|
||||
|
||||
REM Check if we're in the right directory
|
||||
if not exist "test_pipeline.py" (
|
||||
echo Error: Please run this script from the src\server directory
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
REM Get test type parameter (default to basic)
|
||||
set TEST_TYPE=%1
|
||||
if "%TEST_TYPE%"=="" set TEST_TYPE=basic
|
||||
|
||||
echo Running test type: %TEST_TYPE%
|
||||
echo.
|
||||
|
||||
if "%TEST_TYPE%"=="unit" (
|
||||
echo Running Unit Tests Only
|
||||
python test_pipeline.py --unit
|
||||
goto :end
|
||||
)
|
||||
|
||||
if "%TEST_TYPE%"=="integration" (
|
||||
echo Running Integration Tests Only
|
||||
python test_pipeline.py --integration
|
||||
goto :end
|
||||
)
|
||||
|
||||
if "%TEST_TYPE%"=="performance" (
|
||||
echo Running Performance Tests Only
|
||||
python test_pipeline.py --performance
|
||||
goto :end
|
||||
)
|
||||
|
||||
if "%TEST_TYPE%"=="coverage" (
|
||||
echo Running Code Coverage Analysis
|
||||
python test_pipeline.py --coverage
|
||||
goto :end
|
||||
)
|
||||
|
||||
if "%TEST_TYPE%"=="load" (
|
||||
echo Running Load Tests
|
||||
python test_pipeline.py --load
|
||||
goto :end
|
||||
)
|
||||
|
||||
if "%TEST_TYPE%"=="all" (
|
||||
echo Running Complete Test Pipeline
|
||||
python test_pipeline.py --all
|
||||
goto :end
|
||||
)
|
||||
|
||||
REM Default case - basic tests
|
||||
echo Running Basic Test Suite (Unit + Integration)
|
||||
echo.
|
||||
|
||||
echo Running Unit Tests...
|
||||
python test_pipeline.py --unit
|
||||
set unit_result=%errorlevel%
|
||||
|
||||
echo.
|
||||
echo Running Integration Tests...
|
||||
python test_pipeline.py --integration
|
||||
set integration_result=%errorlevel%
|
||||
|
||||
echo.
|
||||
echo ==========================================
|
||||
if %unit_result%==0 if %integration_result%==0 (
|
||||
echo ✅ Basic Test Suite: ALL TESTS PASSED
|
||||
exit /b 0
|
||||
) else (
|
||||
echo ❌ Basic Test Suite: SOME TESTS FAILED
|
||||
exit /b 1
|
||||
)
|
||||
|
||||
:end
|
||||
echo.
|
||||
echo Test execution completed!
|
||||
echo Check the output above for detailed results.
|
||||
@@ -1,81 +0,0 @@
|
||||
#!/bin/bash
|
||||
# Test Runner Script for AniWorld Testing Pipeline
|
||||
# This script provides an easy way to run the AniWorld test suite
|
||||
|
||||
echo "AniWorld Test Suite Runner"
|
||||
echo "=========================="
|
||||
|
||||
# Check if we're in the right directory
|
||||
if [ ! -f "test_pipeline.py" ]; then
|
||||
echo "Error: Please run this script from the src/server directory"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
# Function to run tests with error handling
|
||||
run_test() {
|
||||
local test_name="$1"
|
||||
local command="$2"
|
||||
|
||||
echo ""
|
||||
echo "Running $test_name..."
|
||||
echo "----------------------------------------"
|
||||
|
||||
if eval "$command"; then
|
||||
echo "✅ $test_name completed successfully"
|
||||
return 0
|
||||
else
|
||||
echo "❌ $test_name failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# Default to running basic tests
|
||||
TEST_TYPE="${1:-basic}"
|
||||
|
||||
case "$TEST_TYPE" in
|
||||
"unit")
|
||||
echo "Running Unit Tests Only"
|
||||
run_test "Unit Tests" "python test_pipeline.py --unit"
|
||||
;;
|
||||
"integration")
|
||||
echo "Running Integration Tests Only"
|
||||
run_test "Integration Tests" "python test_pipeline.py --integration"
|
||||
;;
|
||||
"performance")
|
||||
echo "Running Performance Tests Only"
|
||||
run_test "Performance Tests" "python test_pipeline.py --performance"
|
||||
;;
|
||||
"coverage")
|
||||
echo "Running Code Coverage Analysis"
|
||||
run_test "Code Coverage" "python test_pipeline.py --coverage"
|
||||
;;
|
||||
"load")
|
||||
echo "Running Load Tests"
|
||||
run_test "Load Tests" "python test_pipeline.py --load"
|
||||
;;
|
||||
"all")
|
||||
echo "Running Complete Test Pipeline"
|
||||
run_test "Full Pipeline" "python test_pipeline.py --all"
|
||||
;;
|
||||
"basic"|*)
|
||||
echo "Running Basic Test Suite (Unit + Integration)"
|
||||
success=true
|
||||
|
||||
run_test "Unit Tests" "python test_pipeline.py --unit" || success=false
|
||||
run_test "Integration Tests" "python test_pipeline.py --integration" || success=false
|
||||
|
||||
echo ""
|
||||
echo "=========================================="
|
||||
if [ "$success" = true ]; then
|
||||
echo "✅ Basic Test Suite: ALL TESTS PASSED"
|
||||
exit 0
|
||||
else
|
||||
echo "❌ Basic Test Suite: SOME TESTS FAILED"
|
||||
exit 1
|
||||
fi
|
||||
;;
|
||||
esac
|
||||
|
||||
echo ""
|
||||
echo "Test execution completed!"
|
||||
echo "Check the output above for detailed results."
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Shared utilities and constants for the AniWorld application.
|
||||
"""
|
||||
@@ -1,56 +0,0 @@
|
||||
import os
|
||||
import hashlib
|
||||
from collections import defaultdict
|
||||
|
||||
|
||||
def compute_hash(filepath, chunk_size=8192):
|
||||
sha256 = hashlib.sha256()
|
||||
try:
|
||||
with open(filepath, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(chunk_size), b''):
|
||||
sha256.update(chunk)
|
||||
except Exception as e:
|
||||
print(f"Error reading {filepath}: {e}")
|
||||
return None
|
||||
return sha256.hexdigest()
|
||||
|
||||
|
||||
def find_duplicates(root_dir):
|
||||
size_dict = defaultdict(list)
|
||||
|
||||
# Step 1: Group files by size
|
||||
for dirpath, _, filenames in os.walk(root_dir):
|
||||
for file in filenames:
|
||||
if file.lower().endswith('.mp4'):
|
||||
filepath = os.path.join(dirpath, file)
|
||||
try:
|
||||
size = os.path.getsize(filepath)
|
||||
size_dict[size].append(filepath)
|
||||
except Exception as e:
|
||||
print(f"Error accessing {filepath}: {e}")
|
||||
|
||||
# Step 2: Within size groups, group by hash
|
||||
duplicates = defaultdict(list)
|
||||
for size, files in size_dict.items():
|
||||
if len(files) < 2:
|
||||
continue
|
||||
hash_dict = defaultdict(list)
|
||||
for file in files:
|
||||
file_hash = compute_hash(file)
|
||||
if file_hash:
|
||||
hash_dict[file_hash].append(file)
|
||||
for h, paths in hash_dict.items():
|
||||
if len(paths) > 1:
|
||||
duplicates[h].extend(paths)
|
||||
|
||||
return duplicates
|
||||
|
||||
|
||||
# Example usage
|
||||
if __name__ == "__main__":
|
||||
folder_to_scan = "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"
|
||||
dupes = find_duplicates(folder_to_scan)
|
||||
for hash_val, files in dupes.items():
|
||||
print(f"\nDuplicate group (hash: {hash_val}):")
|
||||
for f in files:
|
||||
print(f" {f}")
|
||||
@@ -101,238 +101,6 @@ class SpeedLimiter:
|
||||
return speed_bps / (1024 * 1024) # Convert to MB/s
|
||||
return 0.0
|
||||
|
||||
|
||||
class DownloadCache:
|
||||
"""Caching system for frequently accessed data."""
|
||||
|
||||
def __init__(self, cache_dir: str = "./cache", max_size_mb: int = 500):
|
||||
self.cache_dir = cache_dir
|
||||
self.max_size_bytes = max_size_mb * 1024 * 1024
|
||||
self.cache_db = os.path.join(cache_dir, 'cache.db')
|
||||
self.lock = threading.Lock()
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Create cache directory
|
||||
os.makedirs(cache_dir, exist_ok=True)
|
||||
|
||||
# Initialize database
|
||||
self._init_database()
|
||||
|
||||
# Clean expired entries on startup
|
||||
self._cleanup_expired()
|
||||
|
||||
def _init_database(self):
|
||||
"""Initialize cache database."""
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS cache_entries (
|
||||
key TEXT PRIMARY KEY,
|
||||
file_path TEXT,
|
||||
created_at TIMESTAMP,
|
||||
expires_at TIMESTAMP,
|
||||
access_count INTEGER DEFAULT 0,
|
||||
size_bytes INTEGER,
|
||||
metadata TEXT
|
||||
)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_expires_at ON cache_entries(expires_at)
|
||||
""")
|
||||
conn.execute("""
|
||||
CREATE INDEX IF NOT EXISTS idx_access_count ON cache_entries(access_count)
|
||||
""")
|
||||
|
||||
def _generate_key(self, data: str) -> str:
|
||||
"""Generate cache key from data."""
|
||||
return hashlib.md5(data.encode()).hexdigest()
|
||||
|
||||
def put(self, key: str, data: bytes, ttl_seconds: int = 3600, metadata: Optional[Dict] = None):
|
||||
"""Store data in cache."""
|
||||
with self.lock:
|
||||
try:
|
||||
cache_key = self._generate_key(key)
|
||||
file_path = os.path.join(self.cache_dir, f"{cache_key}.cache")
|
||||
|
||||
# Write data to file
|
||||
with open(file_path, 'wb') as f:
|
||||
f.write(data)
|
||||
|
||||
# Store metadata in database
|
||||
expires_at = datetime.now() + timedelta(seconds=ttl_seconds)
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
conn.execute("""
|
||||
INSERT OR REPLACE INTO cache_entries
|
||||
(key, file_path, created_at, expires_at, size_bytes, metadata)
|
||||
VALUES (?, ?, ?, ?, ?, ?)
|
||||
""", (
|
||||
cache_key, file_path, datetime.now(), expires_at,
|
||||
len(data), json.dumps(metadata or {})
|
||||
))
|
||||
|
||||
# Clean up if cache is too large
|
||||
self._cleanup_if_needed()
|
||||
|
||||
self.logger.debug(f"Cached data for key: {key} (size: {len(data)} bytes)")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cache data for key {key}: {e}")
|
||||
|
||||
def get(self, key: str) -> Optional[bytes]:
|
||||
"""Retrieve data from cache."""
|
||||
with self.lock:
|
||||
try:
|
||||
cache_key = self._generate_key(key)
|
||||
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
cursor = conn.execute("""
|
||||
SELECT file_path, expires_at FROM cache_entries
|
||||
WHERE key = ? AND expires_at > ?
|
||||
""", (cache_key, datetime.now()))
|
||||
|
||||
row = cursor.fetchone()
|
||||
if not row:
|
||||
return None
|
||||
|
||||
file_path, _ = row
|
||||
|
||||
# Update access count
|
||||
conn.execute("""
|
||||
UPDATE cache_entries SET access_count = access_count + 1
|
||||
WHERE key = ?
|
||||
""", (cache_key,))
|
||||
|
||||
# Read and return data
|
||||
if os.path.exists(file_path):
|
||||
with open(file_path, 'rb') as f:
|
||||
data = f.read()
|
||||
|
||||
self.logger.debug(f"Cache hit for key: {key}")
|
||||
return data
|
||||
else:
|
||||
# File missing, remove from database
|
||||
conn.execute("DELETE FROM cache_entries WHERE key = ?", (cache_key,))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to retrieve cached data for key {key}: {e}")
|
||||
|
||||
return None
|
||||
|
||||
def _cleanup_expired(self):
|
||||
"""Remove expired cache entries."""
|
||||
try:
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
# Get expired entries
|
||||
cursor = conn.execute("""
|
||||
SELECT key, file_path FROM cache_entries
|
||||
WHERE expires_at <= ?
|
||||
""", (datetime.now(),))
|
||||
|
||||
expired_entries = cursor.fetchall()
|
||||
|
||||
# Remove files and database entries
|
||||
for cache_key, file_path in expired_entries:
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to remove expired cache file {file_path}: {e}")
|
||||
|
||||
# Remove from database
|
||||
conn.execute("DELETE FROM cache_entries WHERE expires_at <= ?", (datetime.now(),))
|
||||
|
||||
if expired_entries:
|
||||
self.logger.info(f"Cleaned up {len(expired_entries)} expired cache entries")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cleanup expired cache entries: {e}")
|
||||
|
||||
def _cleanup_if_needed(self):
|
||||
"""Clean up cache if it exceeds size limit."""
|
||||
try:
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
# Calculate total cache size
|
||||
cursor = conn.execute("SELECT SUM(size_bytes) FROM cache_entries")
|
||||
total_size = cursor.fetchone()[0] or 0
|
||||
|
||||
if total_size > self.max_size_bytes:
|
||||
# Remove least accessed entries until under limit
|
||||
cursor = conn.execute("""
|
||||
SELECT key, file_path, size_bytes FROM cache_entries
|
||||
ORDER BY access_count ASC, created_at ASC
|
||||
""")
|
||||
|
||||
removed_size = 0
|
||||
target_size = self.max_size_bytes * 0.8 # Remove until 80% full
|
||||
|
||||
for cache_key, file_path, size_bytes in cursor:
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
|
||||
conn.execute("DELETE FROM cache_entries WHERE key = ?", (cache_key,))
|
||||
removed_size += size_bytes
|
||||
|
||||
if total_size - removed_size <= target_size:
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to remove cache file {file_path}: {e}")
|
||||
|
||||
if removed_size > 0:
|
||||
self.logger.info(f"Cache cleanup: removed {removed_size / (1024*1024):.1f} MB")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cleanup cache: {e}")
|
||||
|
||||
def clear(self):
|
||||
"""Clear entire cache."""
|
||||
with self.lock:
|
||||
try:
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
cursor = conn.execute("SELECT file_path FROM cache_entries")
|
||||
|
||||
for (file_path,) in cursor:
|
||||
try:
|
||||
if os.path.exists(file_path):
|
||||
os.remove(file_path)
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to remove cache file {file_path}: {e}")
|
||||
|
||||
conn.execute("DELETE FROM cache_entries")
|
||||
|
||||
self.logger.info("Cache cleared successfully")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to clear cache: {e}")
|
||||
|
||||
def get_stats(self) -> Dict[str, Any]:
|
||||
"""Get cache statistics."""
|
||||
try:
|
||||
with sqlite3.connect(self.cache_db) as conn:
|
||||
cursor = conn.execute("""
|
||||
SELECT
|
||||
COUNT(*) as entry_count,
|
||||
SUM(size_bytes) as total_size,
|
||||
SUM(access_count) as total_accesses,
|
||||
AVG(access_count) as avg_accesses
|
||||
FROM cache_entries
|
||||
""")
|
||||
|
||||
row = cursor.fetchone()
|
||||
|
||||
return {
|
||||
'entry_count': row[0] or 0,
|
||||
'total_size_mb': (row[1] or 0) / (1024 * 1024),
|
||||
'total_accesses': row[2] or 0,
|
||||
'avg_accesses': row[3] or 0,
|
||||
'max_size_mb': self.max_size_bytes / (1024 * 1024)
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get cache stats: {e}")
|
||||
return {}
|
||||
|
||||
|
||||
class MemoryMonitor:
|
||||
"""Monitor and optimize memory usage."""
|
||||
|
||||
@@ -747,7 +515,6 @@ class ResumeManager:
|
||||
|
||||
# Global instances
|
||||
speed_limiter = SpeedLimiter()
|
||||
download_cache = DownloadCache()
|
||||
memory_monitor = MemoryMonitor()
|
||||
download_manager = ParallelDownloadManager(max_workers=3, speed_limiter=speed_limiter)
|
||||
resume_manager = ResumeManager()
|
||||
@@ -768,7 +535,6 @@ def cleanup_performance_monitoring():
|
||||
# Export main components
|
||||
__all__ = [
|
||||
'SpeedLimiter',
|
||||
'DownloadCache',
|
||||
'MemoryMonitor',
|
||||
'ParallelDownloadManager',
|
||||
'ResumeManager',
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
@@ -1,38 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Simple script to test the API endpoint without crashing the server.
|
||||
"""
|
||||
import requests
|
||||
import json
|
||||
import time
|
||||
|
||||
def test_api():
|
||||
url = "http://localhost:5000/api/series"
|
||||
try:
|
||||
print("Testing API endpoint...")
|
||||
response = requests.get(url, timeout=30)
|
||||
print(f"Status Code: {response.status_code}")
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
print(f"Response status: {data.get('status', 'unknown')}")
|
||||
print(f"Total series: {data.get('total_series', 0)}")
|
||||
print(f"Message: {data.get('message', 'No message')}")
|
||||
|
||||
# Print first few series
|
||||
series = data.get('series', [])
|
||||
if series:
|
||||
print(f"\nFirst 3 series:")
|
||||
for i, serie in enumerate(series[:3]):
|
||||
print(f" {i+1}. {serie.get('name', 'Unknown')} ({serie.get('folder', 'Unknown folder')})")
|
||||
else:
|
||||
print("No series found in response")
|
||||
else:
|
||||
print(f"Error: {response.text}")
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Request failed: {e}")
|
||||
except Exception as e:
|
||||
print(f"Error: {e}")
|
||||
|
||||
if __name__ == "__main__":
|
||||
test_api()
|
||||
@@ -1,3 +0,0 @@
|
||||
"""
|
||||
Web presentation layer with controllers, middleware, and templates.
|
||||
"""
|
||||
@@ -1 +0,0 @@
|
||||
# Web controllers - Flask blueprints
|
||||
@@ -1 +0,0 @@
|
||||
# Admin controllers
|
||||
@@ -1 +0,0 @@
|
||||
# API endpoints version 1
|
||||
@@ -1 +0,0 @@
|
||||
# API middleware
|
||||
@@ -1 +0,0 @@
|
||||
# Web middleware
|
||||
@@ -79,7 +79,7 @@ def init_series_app():
|
||||
"""Initialize the SeriesApp with configuration directory."""
|
||||
global series_app
|
||||
from config import config
|
||||
from Main import SeriesApp
|
||||
from src.cli.Main import SeriesApp
|
||||
directory_to_search = config.anime_directory
|
||||
series_app = SeriesApp(directory_to_search)
|
||||
return series_app
|
||||
|
||||
@@ -1,14 +0,0 @@
|
||||
"""
|
||||
WSGI entry point for production deployment.
|
||||
This file is used by WSGI servers like Gunicorn, uWSGI, etc.
|
||||
"""
|
||||
|
||||
from src.server.app import create_app
|
||||
|
||||
# Create the Flask application instance
|
||||
application = create_app()
|
||||
app = application # Some WSGI servers expect 'app' variable
|
||||
|
||||
if __name__ == "__main__":
|
||||
# This is for development only
|
||||
app.run(debug=False)
|
||||
Reference in New Issue
Block a user