diff --git a/.coverage b/.coverage index b338456..1b41ab9 100644 Binary files a/.coverage and b/.coverage differ diff --git a/README.md b/README.md new file mode 100644 index 0000000..a78b12c --- /dev/null +++ b/README.md @@ -0,0 +1,140 @@ +# Aniworld Download Manager + +A web-based anime download manager with REST API, WebSocket real-time updates, and a modern web interface. + +## Features + +- Web interface for managing anime library +- REST API for programmatic access +- WebSocket real-time progress updates +- Download queue with priority management +- Automatic library scanning for missing episodes +- JWT-based authentication +- SQLite database for persistence + +## Quick Start + +### Prerequisites + +- Python 3.10+ +- Conda (recommended) or virtualenv + +### Installation + +1. Clone the repository: + +```bash +git clone https://github.com/your-repo/aniworld.git +cd aniworld +``` + +2. Create and activate conda environment: + +```bash +conda create -n AniWorld python=3.10 +conda activate AniWorld +``` + +3. Install dependencies: + +```bash +pip install -r requirements.txt +``` + +4. Start the server: + +```bash +python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 +``` + +5. Open http://127.0.0.1:8000 in your browser + +### First-Time Setup + +1. Navigate to http://127.0.0.1:8000/setup +2. Set a master password (minimum 8 characters, mixed case, number, special character) +3. Configure your anime directory path +4. Login with your master password + +## Documentation + +| Document | Description | +| ---------------------------------------------- | -------------------------------- | +| [docs/API.md](docs/API.md) | REST API and WebSocket reference | +| [docs/ARCHITECTURE.md](docs/ARCHITECTURE.md) | System architecture and design | +| [docs/CONFIGURATION.md](docs/CONFIGURATION.md) | Configuration options | +| [docs/DATABASE.md](docs/DATABASE.md) | Database schema | +| [docs/DEVELOPMENT.md](docs/DEVELOPMENT.md) | Developer setup guide | +| [docs/TESTING.md](docs/TESTING.md) | Testing guidelines | + +## Project Structure + +``` +src/ ++-- cli/ # CLI interface (legacy) ++-- config/ # Application settings ++-- core/ # Domain logic +| +-- SeriesApp.py # Main application facade +| +-- SerieScanner.py # Directory scanning +| +-- entities/ # Domain entities +| +-- providers/ # External provider adapters ++-- server/ # FastAPI web server + +-- api/ # REST API endpoints + +-- services/ # Business logic + +-- models/ # Pydantic models + +-- database/ # SQLAlchemy ORM + +-- middleware/ # Auth, rate limiting +``` + +## API Endpoints + +| Endpoint | Description | +| ------------------------------ | -------------------------------- | +| `POST /api/auth/login` | Authenticate and get JWT token | +| `GET /api/anime` | List anime with missing episodes | +| `GET /api/anime/search?query=` | Search for anime | +| `POST /api/queue/add` | Add episodes to download queue | +| `POST /api/queue/start` | Start queue processing | +| `GET /api/queue/status` | Get queue status | +| `WS /ws/connect` | WebSocket for real-time updates | + +See [docs/API.md](docs/API.md) for complete API reference. + +## Configuration + +Environment variables (via `.env` file): + +| Variable | Default | Description | +| ----------------- | ------------------------------ | ---------------------- | +| `JWT_SECRET_KEY` | (random) | Secret for JWT signing | +| `DATABASE_URL` | `sqlite:///./data/aniworld.db` | Database connection | +| `ANIME_DIRECTORY` | (empty) | Path to anime library | +| `LOG_LEVEL` | `INFO` | Logging level | + +See [docs/CONFIGURATION.md](docs/CONFIGURATION.md) for all options. + +## Running Tests + +```bash +# Run all tests +conda run -n AniWorld python -m pytest tests/ -v + +# Run unit tests only +conda run -n AniWorld python -m pytest tests/unit/ -v + +# Run integration tests +conda run -n AniWorld python -m pytest tests/integration/ -v +``` + +## Technology Stack + +- **Web Framework**: FastAPI 0.104.1 +- **Database**: SQLite + SQLAlchemy 2.0 +- **Auth**: JWT (python-jose) + passlib +- **Validation**: Pydantic 2.5 +- **Logging**: structlog +- **Testing**: pytest + pytest-asyncio + +## License + +MIT License diff --git a/SERVER_COMMANDS.md b/SERVER_COMMANDS.md deleted file mode 100644 index d0a0c7c..0000000 --- a/SERVER_COMMANDS.md +++ /dev/null @@ -1,215 +0,0 @@ -# Server Management Commands - -Quick reference for starting, stopping, and managing the Aniworld server. - -## Start Server - -### Using the start script (Recommended) - -```bash -./start_server.sh -``` - -### Using conda directly - -```bash -conda run -n AniWorld python run_server.py -``` - -### Using uvicorn directly - -```bash -conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload -``` - -## Stop Server - -### Using the stop script (Recommended) - -```bash -./stop_server.sh -``` - -### Manual commands - -**Kill uvicorn processes:** - -```bash -pkill -f "uvicorn.*fastapi_app:app" -``` - -**Kill process on port 8000:** - -```bash -lsof -ti:8000 | xargs kill -9 -``` - -**Kill run_server.py processes:** - -```bash -pkill -f "run_server.py" -``` - -## Check Server Status - -**Check if port 8000 is in use:** - -```bash -lsof -i:8000 -``` - -**Check for running uvicorn processes:** - -```bash -ps aux | grep uvicorn -``` - -**Check server is responding:** - -```bash -curl http://127.0.0.1:8000/api/health -``` - -## Restart Server - -```bash -./stop_server.sh && ./start_server.sh -``` - -## Common Issues - -### "Address already in use" Error - -**Problem:** Port 8000 is already occupied - -**Solution:** - -```bash -./stop_server.sh -# or -lsof -ti:8000 | xargs kill -9 -``` - -### Server not responding - -**Check logs:** - -```bash -tail -f logs/app.log -``` - -**Check if process is running:** - -```bash -ps aux | grep uvicorn -``` - -### Cannot connect to server - -**Verify server is running:** - -```bash -curl http://127.0.0.1:8000/api/health -``` - -**Check firewall:** - -```bash -sudo ufw status -``` - -## Development Mode - -**Run with auto-reload:** - -```bash -./start_server.sh # Already includes --reload -``` - -**Run with custom port:** - -```bash -conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8080 --reload -``` - -**Run with debug logging:** - -```bash -export LOG_LEVEL=DEBUG -./start_server.sh -``` - -## Production Mode - -**Run without auto-reload:** - -```bash -conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 0.0.0.0 --port 8000 --workers 4 -``` - -**Run with systemd (Linux):** - -```bash -sudo systemctl start aniworld -sudo systemctl stop aniworld -sudo systemctl restart aniworld -sudo systemctl status aniworld -``` - -## URLs - -- **Web Interface:** http://127.0.0.1:8000 -- **API Documentation:** http://127.0.0.1:8000/api/docs -- **Login Page:** http://127.0.0.1:8000/login -- **Queue Management:** http://127.0.0.1:8000/queue -- **Health Check:** http://127.0.0.1:8000/api/health - -## Default Credentials - -- **Password:** `Hallo123!` - -## Log Files - -- **Application logs:** `logs/app.log` -- **Download logs:** `logs/downloads/` -- **Error logs:** Check console output or systemd journal - -## Quick Troubleshooting - -| Symptom | Solution | -| ------------------------ | ------------------------------------ | -| Port already in use | `./stop_server.sh` | -| Server won't start | Check `logs/app.log` | -| 404 errors | Verify URL and check routing | -| WebSocket not connecting | Check server is running and firewall | -| Slow responses | Check system resources (`htop`) | -| Database errors | Check `data/` directory permissions | - -## Environment Variables - -```bash -# Set log level -export LOG_LEVEL=DEBUG|INFO|WARNING|ERROR - -# Set server port -export PORT=8000 - -# Set host -export HOST=127.0.0.1 - -# Set workers (production) -export WORKERS=4 -``` - -## Related Scripts - -- `start_server.sh` - Start the server -- `stop_server.sh` - Stop the server -- `run_server.py` - Python server runner -- `scripts/setup.py` - Initial setup - -## More Information - -- [User Guide](docs/user_guide.md) -- [API Reference](docs/api_reference.md) -- [Deployment Guide](docs/deployment.md) diff --git a/__pycache__/Loader.cpython-310.pyc b/__pycache__/Loader.cpython-310.pyc deleted file mode 100644 index 90bd0ff..0000000 Binary files a/__pycache__/Loader.cpython-310.pyc and /dev/null differ diff --git a/data/aniworld.db-shm b/data/aniworld.db-shm new file mode 100644 index 0000000..ecba76f Binary files /dev/null and b/data/aniworld.db-shm differ diff --git a/data/aniworld.db-wal b/data/aniworld.db-wal new file mode 100644 index 0000000..f5171de Binary files /dev/null and b/data/aniworld.db-wal differ diff --git a/data/config.json b/data/config.json index 4346592..264a02d 100644 --- a/data/config.json +++ b/data/config.json @@ -17,7 +17,7 @@ "keep_days": 30 }, "other": { - "master_password_hash": "$pbkdf2-sha256$29000$854zxnhvzXmPsVbqvXduTQ$G0HVRAt3kyO5eFwvo.ILkpX9JdmyXYJ9MNPTS/UxAGk", + "master_password_hash": "$pbkdf2-sha256$29000$o/R.b.0dYwzhfG/t/R9DSA$kQAcjHoByVaftRAT1OaZg5rILdhMSDNS6uIz67jwdOo", "anime_directory": "/mnt/server/serien/Serien/" }, "version": "1.0.0" diff --git a/data/config_backups/config_backup_20251128_161248.json b/data/config_backups/config_backup_20251128_161248.json deleted file mode 100644 index ca736ff..0000000 --- a/data/config_backups/config_backup_20251128_161248.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "Aniworld", - "data_dir": "data", - "scheduler": { - "enabled": true, - "interval_minutes": 60 - }, - "logging": { - "level": "INFO", - "file": null, - "max_bytes": null, - "backup_count": 3 - }, - "backup": { - "enabled": false, - "path": "data/backups", - "keep_days": 30 - }, - "other": { - "master_password_hash": "$pbkdf2-sha256$29000$VCqllLL2vldKyTmHkJIyZg$jNllpzlpENdgCslmS.tG.PGxRZ9pUnrqFEQFveDEcYk", - "anime_directory": "/mnt/server/serien/Serien/" - }, - "version": "1.0.0" -} \ No newline at end of file diff --git a/data/config_backups/config_backup_20251128_161448.json b/data/config_backups/config_backup_20251128_161448.json deleted file mode 100644 index 8bc5e7a..0000000 --- a/data/config_backups/config_backup_20251128_161448.json +++ /dev/null @@ -1,24 +0,0 @@ -{ - "name": "Aniworld", - "data_dir": "data", - "scheduler": { - "enabled": true, - "interval_minutes": 60 - }, - "logging": { - "level": "INFO", - "file": null, - "max_bytes": null, - "backup_count": 3 - }, - "backup": { - "enabled": false, - "path": "data/backups", - "keep_days": 30 - }, - "other": { - "master_password_hash": "$pbkdf2-sha256$29000$3/t/7733PkdoTckZQyildA$Nz9SdX2ZgqBwyzhQ9FGNcnzG1X.TW9oce3sDxJbVSdY", - "anime_directory": "/mnt/server/serien/Serien/" - }, - "version": "1.0.0" -} \ No newline at end of file diff --git a/data/download_queue.json b/data/download_queue.json deleted file mode 100644 index 5cffb6f..0000000 --- a/data/download_queue.json +++ /dev/null @@ -1,327 +0,0 @@ -{ - "pending": [ - { - "id": "ae6424dc-558b-4946-9f07-20db1a09bf33", - "serie_id": "test-series-2", - "serie_folder": "Another Series (2024)", - "serie_name": "Another Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "HIGH", - "added_at": "2025-11-28T17:54:38.593236Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "011c2038-9fe3-41cb-844f-ce50c40e415f", - "serie_id": "series-high", - "serie_folder": "Series High (2024)", - "serie_name": "Series High", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "HIGH", - "added_at": "2025-11-28T17:54:38.632289Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "0eee56e0-414d-4cd7-8da7-b5a139abd8b5", - "serie_id": "series-normal", - "serie_folder": "Series Normal (2024)", - "serie_name": "Series Normal", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:38.635082Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "eea9f4f3-98e5-4041-9fc6-92e3d4c6fee6", - "serie_id": "series-low", - "serie_folder": "Series Low (2024)", - "serie_name": "Series Low", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "LOW", - "added_at": "2025-11-28T17:54:38.637038Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "b6f84ea9-86c8-4cc9-90e5-c7c6ce10c593", - "serie_id": "test-series", - "serie_folder": "Test Series (2024)", - "serie_name": "Test Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:38.801266Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "412aa28d-9763-41ef-913d-3d63919f9346", - "serie_id": "test-series", - "serie_folder": "Test Series (2024)", - "serie_name": "Test Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:38.867939Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "3a036824-2d14-41dd-81b8-094dd322a137", - "serie_id": "invalid-series", - "serie_folder": "Invalid Series (2024)", - "serie_name": "Invalid Series", - "episode": { - "season": 99, - "episode": 99, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:38.935125Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "1f4108ed-5488-4f46-ad5b-fe27e3b04790", - "serie_id": "test-series", - "serie_folder": "Test Series (2024)", - "serie_name": "Test Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:38.968296Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "5e880954-1a9f-450a-8008-5b9d6ac07d66", - "serie_id": "series-2", - "serie_folder": "Series 2 (2024)", - "serie_name": "Series 2", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.055885Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "2415ac21-509b-4d71-b5b9-b824116d6785", - "serie_id": "series-0", - "serie_folder": "Series 0 (2024)", - "serie_name": "Series 0", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.056795Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "716f9823-d59a-4b04-863b-c75fd54bc464", - "serie_id": "series-1", - "serie_folder": "Series 1 (2024)", - "serie_name": "Series 1", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.057486Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "36ad4323-daa9-49c4-97e8-a0aec0cca7a1", - "serie_id": "series-4", - "serie_folder": "Series 4 (2024)", - "serie_name": "Series 4", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.058179Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "695ee7a9-42bb-4953-9a8a-10bd7f533369", - "serie_id": "series-3", - "serie_folder": "Series 3 (2024)", - "serie_name": "Series 3", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.058816Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "aa948908-c410-42ec-85d6-a0298d7d95a5", - "serie_id": "persistent-series", - "serie_folder": "Persistent Series (2024)", - "serie_name": "Persistent Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.152427Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "2537f20e-f394-4c68-81d5-48be3c0c402a", - "serie_id": "ws-series", - "serie_folder": "WebSocket Series (2024)", - "serie_name": "WebSocket Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "NORMAL", - "added_at": "2025-11-28T17:54:39.219061Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - }, - { - "id": "aaaf3b05-cce8-47d5-b350-59c5d72533ad", - "serie_id": "workflow-series", - "serie_folder": "Workflow Test Series (2024)", - "serie_name": "Workflow Test Series", - "episode": { - "season": 1, - "episode": 1, - "title": null - }, - "status": "pending", - "priority": "HIGH", - "added_at": "2025-11-28T17:54:39.254462Z", - "started_at": null, - "completed_at": null, - "progress": null, - "error": null, - "retry_count": 0, - "source_url": null - } - ], - "active": [], - "failed": [], - "timestamp": "2025-11-28T17:54:39.259761+00:00" -} \ No newline at end of file diff --git a/diagrams/README.md b/diagrams/README.md new file mode 100644 index 0000000..1e6144d --- /dev/null +++ b/diagrams/README.md @@ -0,0 +1,23 @@ +# Architecture Diagrams + +This directory contains architecture diagram source files for the Aniworld documentation. + +## Diagrams + +### System Architecture (Mermaid) + +See [system-architecture.mmd](system-architecture.mmd) for the system overview diagram. + +### Rendering + +Diagrams can be rendered using: + +- Mermaid Live Editor: https://mermaid.live/ +- VS Code Mermaid extension +- GitHub/GitLab native Mermaid support + +## Formats + +- `.mmd` - Mermaid diagram source files +- `.svg` - Exported vector graphics (add when needed) +- `.png` - Exported raster graphics (add when needed) diff --git a/diagrams/download-flow.mmd b/diagrams/download-flow.mmd new file mode 100644 index 0000000..66800ea --- /dev/null +++ b/diagrams/download-flow.mmd @@ -0,0 +1,44 @@ +%%{init: {'theme': 'base'}}%% +sequenceDiagram + participant Client + participant FastAPI + participant AuthMiddleware + participant DownloadService + participant ProgressService + participant WebSocketService + participant SeriesApp + participant Database + + Note over Client,Database: Download Flow + + %% Add to queue + Client->>FastAPI: POST /api/queue/add + FastAPI->>AuthMiddleware: Validate JWT + AuthMiddleware-->>FastAPI: OK + FastAPI->>DownloadService: add_to_queue() + DownloadService->>Database: save_item() + Database-->>DownloadService: item_id + DownloadService-->>FastAPI: [item_ids] + FastAPI-->>Client: 201 Created + + %% Start queue + Client->>FastAPI: POST /api/queue/start + FastAPI->>AuthMiddleware: Validate JWT + AuthMiddleware-->>FastAPI: OK + FastAPI->>DownloadService: start_queue_processing() + + loop For each pending item + DownloadService->>SeriesApp: download_episode() + + loop Progress updates + SeriesApp->>ProgressService: emit("progress_updated") + ProgressService->>WebSocketService: broadcast_to_room() + WebSocketService-->>Client: WebSocket message + end + + SeriesApp-->>DownloadService: completed + DownloadService->>Database: update_status() + end + + DownloadService-->>FastAPI: OK + FastAPI-->>Client: 200 OK diff --git a/diagrams/system-architecture.mmd b/diagrams/system-architecture.mmd new file mode 100644 index 0000000..6445d57 --- /dev/null +++ b/diagrams/system-architecture.mmd @@ -0,0 +1,82 @@ +%%{init: {'theme': 'base', 'themeVariables': { 'primaryColor': '#4a90d9'}}}%% +flowchart TB + subgraph Clients["Client Layer"] + Browser["Web Browser
(HTML/CSS/JS)"] + CLI["CLI Client
(Main.py)"] + end + + subgraph Server["Server Layer (FastAPI)"] + direction TB + Middleware["Middleware
Auth, Rate Limit, Error Handler"] + + subgraph API["API Routers"] + AuthAPI["/api/auth"] + AnimeAPI["/api/anime"] + QueueAPI["/api/queue"] + ConfigAPI["/api/config"] + SchedulerAPI["/api/scheduler"] + HealthAPI["/health"] + WebSocketAPI["/ws"] + end + + subgraph Services["Services"] + AuthService["AuthService"] + AnimeService["AnimeService"] + DownloadService["DownloadService"] + ConfigService["ConfigService"] + ProgressService["ProgressService"] + WebSocketService["WebSocketService"] + end + end + + subgraph Core["Core Layer"] + SeriesApp["SeriesApp"] + SerieScanner["SerieScanner"] + SerieList["SerieList"] + end + + subgraph Data["Data Layer"] + SQLite[(SQLite
aniworld.db)] + ConfigJSON[(config.json)] + FileSystem[(File System
Anime Directory)] + end + + subgraph External["External"] + Provider["Anime Provider
(aniworld.to)"] + end + + %% Client connections + Browser -->|HTTP/WebSocket| Middleware + CLI -->|Direct| SeriesApp + + %% Middleware to API + Middleware --> API + + %% API to Services + AuthAPI --> AuthService + AnimeAPI --> AnimeService + QueueAPI --> DownloadService + ConfigAPI --> ConfigService + SchedulerAPI --> AnimeService + WebSocketAPI --> WebSocketService + + %% Services to Core + AnimeService --> SeriesApp + DownloadService --> SeriesApp + + %% Services to Data + AuthService --> ConfigJSON + ConfigService --> ConfigJSON + DownloadService --> SQLite + AnimeService --> SQLite + + %% Core to Data + SeriesApp --> SerieScanner + SeriesApp --> SerieList + SerieScanner --> FileSystem + SerieScanner --> Provider + + %% Event flow + ProgressService -.->|Events| WebSocketService + DownloadService -.->|Progress| ProgressService + WebSocketService -.->|Broadcast| Browser diff --git a/docs/API.md b/docs/API.md new file mode 100644 index 0000000..76a3d6b --- /dev/null +++ b/docs/API.md @@ -0,0 +1,1194 @@ +# API Documentation + +## Document Purpose + +This document provides comprehensive REST API and WebSocket reference for the Aniworld application. + +Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L1-L252) + +--- + +## 1. API Overview + +### Base URL and Versioning + +| Environment | Base URL | +| ------------------- | --------------------------------- | +| Local Development | `http://127.0.0.1:8000` | +| API Documentation | `http://127.0.0.1:8000/api/docs` | +| ReDoc Documentation | `http://127.0.0.1:8000/api/redoc` | + +The API does not use versioning prefixes. All endpoints are available under `/api/*`. + +Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L177-L184) + +### Authentication + +The API uses JWT Bearer Token authentication. + +**Header Format:** + +``` +Authorization: Bearer +``` + +**Public Endpoints (no authentication required):** + +- `/api/auth/*` - Authentication endpoints +- `/api/health` - Health check endpoints +- `/api/docs`, `/api/redoc` - API documentation +- `/static/*` - Static files +- `/`, `/login`, `/setup`, `/queue` - UI pages + +Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L39-L52) + +### Content Types + +| Direction | Content-Type | +| --------- | ----------------------------- | +| Request | `application/json` | +| Response | `application/json` | +| WebSocket | `application/json` (messages) | + +### Common Headers + +| Header | Required | Description | +| --------------- | -------- | ------------------------------------ | +| `Authorization` | Yes\* | Bearer token for protected endpoints | +| `Content-Type` | Yes | `application/json` for POST/PUT | +| `Origin` | No | Required for CORS preflight | + +\*Not required for public endpoints listed above. + +--- + +## 2. Authentication Endpoints + +Prefix: `/api/auth` + +Source: [src/server/api/auth.py](../src/server/api/auth.py#L1-L180) + +### POST /api/auth/setup + +Initial setup endpoint to configure the master password. Can only be called once. + +**Request Body:** + +```json +{ + "master_password": "string (min 8 chars, mixed case, number, special char)", + "anime_directory": "string (optional, path to anime folder)" +} +``` + +**Response (201 Created):** + +```json +{ + "status": "ok" +} +``` + +**Errors:** + +- `400 Bad Request` - Master password already configured or invalid password + +Source: [src/server/api/auth.py](../src/server/api/auth.py#L28-L90) + +### POST /api/auth/login + +Validate master password and return JWT token. + +**Request Body:** + +```json +{ + "password": "string", + "remember": false +} +``` + +**Response (200 OK):** + +```json +{ + "access_token": "eyJ...", + "token_type": "bearer", + "expires_at": "2025-12-14T10:30:00Z" +} +``` + +**Errors:** + +- `401 Unauthorized` - Invalid credentials +- `429 Too Many Requests` - Account locked due to failed attempts + +Source: [src/server/api/auth.py](../src/server/api/auth.py#L93-L124) + +### POST /api/auth/logout + +Logout by revoking token. + +**Response (200 OK):** + +```json +{ + "status": "ok", + "message": "Logged out successfully" +} +``` + +Source: [src/server/api/auth.py](../src/server/api/auth.py#L127-L140) + +### GET /api/auth/status + +Return whether master password is configured and if caller is authenticated. + +**Response (200 OK):** + +```json +{ + "configured": true, + "authenticated": true +} +``` + +Source: [src/server/api/auth.py](../src/server/api/auth.py#L157-L162) + +--- + +## 3. Anime Endpoints + +Prefix: `/api/anime` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L1-L812) + +### Series Identifier Convention + +The API uses two identifier fields: + +| Field | Purpose | Example | +| -------- | ---------------------------------------------------- | -------------------------- | +| `key` | **Primary identifier** - provider-assigned, URL-safe | `"attack-on-titan"` | +| `folder` | Metadata only - filesystem folder name | `"Attack on Titan (2013)"` | + +Use `key` for all API operations. The `folder` field is for display purposes only. + +### GET /api/anime/status + +Get anime library status information. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "directory": "/path/to/anime", + "series_count": 42 +} +``` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L28-L58) + +### GET /api/anime + +List library series that have missing episodes. + +**Authentication:** Required + +**Query Parameters:** +| Parameter | Type | Default | Description | +|-----------|------|---------|-------------| +| `page` | int | 1 | Page number (must be positive) | +| `per_page` | int | 20 | Items per page (max 1000) | +| `sort_by` | string | null | Sort field: `title`, `id`, `name`, `missing_episodes` | +| `filter` | string | null | Filter term | + +**Response (200 OK):** + +```json +[ + { + "key": "beheneko-the-elf-girls-cat", + "name": "Beheneko", + "site": "aniworld.to", + "folder": "beheneko the elf girls cat (2025)", + "missing_episodes": { "1": [1, 2, 3, 4] }, + "link": "" + } +] +``` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L155-L303) + +### GET /api/anime/search + +Search the provider for anime series matching a query. + +**Authentication:** Not required + +**Query Parameters:** +| Parameter | Type | Required | Description | +|-----------|------|----------|-------------| +| `query` | string | Yes | Search term (max 200 chars) | + +**Response (200 OK):** + +```json +[ + { + "key": "attack-on-titan", + "name": "Attack on Titan", + "site": "aniworld.to", + "folder": "Attack on Titan (2013)", + "missing_episodes": {}, + "link": "https://aniworld.to/anime/stream/attack-on-titan" + } +] +``` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L431-L474) + +### POST /api/anime/search + +Search via POST body. + +**Request Body:** + +```json +{ + "query": "attack on titan" +} +``` + +**Response:** Same as GET /api/anime/search + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L477-L495) + +### POST /api/anime/add + +Add a new series to the library with automatic database persistence, folder creation, and episode scanning. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "link": "https://aniworld.to/anime/stream/attack-on-titan", + "name": "Attack on Titan" +} +``` + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Successfully added series: Attack on Titan", + "key": "attack-on-titan", + "folder": "Attack on Titan", + "db_id": 1, + "missing_episodes": ["1-1", "1-2", "1-3"], + "total_missing": 3 +} +``` + +**Enhanced Flow:** + +1. Validates the request (link format, name) +2. Creates Serie object with sanitized folder name +3. Saves to database via AnimeDBService +4. Creates folder using sanitized display name (not internal key) +5. Performs targeted episode scan for this anime only +6. Returns response with missing episodes count + +**Folder Name Sanitization:** + +- Removes invalid filesystem characters: `< > : " / \ | ? *` +- Trims leading/trailing whitespace and dots +- Preserves Unicode characters (for Japanese titles) +- Example: `"Attack on Titan: Final Season"` → `"Attack on Titan Final Season"` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L604-L710) + +### POST /api/anime/rescan + +Trigger a rescan of the local library. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "success": true, + "message": "Rescan started successfully" +} +``` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L306-L337) + +### GET /api/anime/{anime_id} + +Return detailed information about a specific series. + +**Authentication:** Not required + +**Path Parameters:** +| Parameter | Description | +|-----------|-------------| +| `anime_id` | Series `key` (primary) or `folder` (deprecated fallback) | + +**Response (200 OK):** + +```json +{ + "key": "attack-on-titan", + "title": "Attack on Titan", + "folder": "Attack on Titan (2013)", + "episodes": ["1-1", "1-2", "1-3"], + "description": null +} +``` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L713-L793) + +--- + +## 4. Download Queue Endpoints + +Prefix: `/api/queue` + +Source: [src/server/api/download.py](../src/server/api/download.py#L1-L529) + +### GET /api/queue/status + +Get current download queue status and statistics. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": { + "is_running": false, + "is_paused": false, + "active_downloads": [], + "pending_queue": [], + "completed_downloads": [], + "failed_downloads": [] + }, + "statistics": { + "total_items": 5, + "pending_count": 3, + "active_count": 1, + "completed_count": 1, + "failed_count": 0, + "total_downloaded_mb": 1024.5, + "average_speed_mbps": 2.5, + "estimated_time_remaining": 3600 + } +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L21-L56) + +### POST /api/queue/add + +Add episodes to the download queue. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "serie_id": "attack-on-titan", + "serie_folder": "Attack on Titan (2013)", + "serie_name": "Attack on Titan", + "episodes": [ + { "season": 1, "episode": 1, "title": "Episode 1" }, + { "season": 1, "episode": 2, "title": "Episode 2" } + ], + "priority": "NORMAL" +} +``` + +**Priority Values:** `LOW`, `NORMAL`, `HIGH` + +**Response (201 Created):** + +```json +{ + "status": "success", + "message": "Added 2 episode(s) to download queue", + "added_items": ["uuid1", "uuid2"], + "item_ids": ["uuid1", "uuid2"], + "failed_items": [] +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L59-L120) + +### POST /api/queue/start + +Start automatic queue processing. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Queue processing started" +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L293-L331) + +### POST /api/queue/stop + +Stop processing new downloads from queue. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Queue processing stopped (current download will continue)" +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L334-L387) + +### POST /api/queue/pause + +Pause queue processing (alias for stop). + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Queue processing paused" +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L416-L445) + +### DELETE /api/queue/{item_id} + +Remove a specific item from the download queue. + +**Authentication:** Required + +**Path Parameters:** +| Parameter | Description | +|-----------|-------------| +| `item_id` | Download item UUID | + +**Response (204 No Content)** + +Source: [src/server/api/download.py](../src/server/api/download.py#L225-L256) + +### DELETE /api/queue + +Remove multiple items from the download queue. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "item_ids": ["uuid1", "uuid2"] +} +``` + +**Response (204 No Content)** + +Source: [src/server/api/download.py](../src/server/api/download.py#L259-L290) + +### DELETE /api/queue/completed + +Clear completed downloads from history. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Cleared 5 completed item(s)", + "count": 5 +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L123-L149) + +### DELETE /api/queue/failed + +Clear failed downloads from history. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Cleared 2 failed item(s)", + "count": 2 +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L152-L178) + +### DELETE /api/queue/pending + +Clear all pending downloads from the queue. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Removed 10 pending item(s)", + "count": 10 +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L181-L207) + +### POST /api/queue/reorder + +Reorder items in the pending queue. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "item_ids": ["uuid3", "uuid1", "uuid2"] +} +``` + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Queue reordered with 3 items" +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L448-L477) + +### POST /api/queue/retry + +Retry failed downloads. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "item_ids": ["uuid1", "uuid2"] +} +``` + +Pass empty `item_ids` array to retry all failed items. + +**Response (200 OK):** + +```json +{ + "status": "success", + "message": "Retrying 2 failed item(s)", + "retried_count": 2, + "retried_ids": ["uuid1", "uuid2"] +} +``` + +Source: [src/server/api/download.py](../src/server/api/download.py#L480-L514) + +--- + +## 5. Configuration Endpoints + +Prefix: `/api/config` + +Source: [src/server/api/config.py](../src/server/api/config.py#L1-L374) + +### GET /api/config + +Return current application configuration. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "name": "Aniworld", + "data_dir": "data", + "scheduler": { + "enabled": true, + "interval_minutes": 60 + }, + "logging": { + "level": "INFO", + "file": null, + "max_bytes": null, + "backup_count": 3 + }, + "backup": { + "enabled": false, + "path": "data/backups", + "keep_days": 30 + }, + "other": {} +} +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L16-L27) + +### PUT /api/config + +Apply an update to the configuration. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "scheduler": { + "enabled": true, + "interval_minutes": 30 + }, + "logging": { + "level": "DEBUG" + } +} +``` + +**Response (200 OK):** Updated configuration object + +Source: [src/server/api/config.py](../src/server/api/config.py#L30-L47) + +### POST /api/config/validate + +Validate a configuration without applying it. + +**Authentication:** Required + +**Request Body:** Full `AppConfig` object + +**Response (200 OK):** + +```json +{ + "valid": true, + "errors": [] +} +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L50-L64) + +### GET /api/config/backups + +List all available configuration backups. + +**Authentication:** Required + +**Response (200 OK):** + +```json +[ + { + "name": "config_backup_20251213_090130.json", + "size": 1024, + "created": "2025-12-13T09:01:30Z" + } +] +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L67-L81) + +### POST /api/config/backups + +Create a backup of the current configuration. + +**Authentication:** Required + +**Query Parameters:** +| Parameter | Type | Required | Description | +|-----------|------|----------|-------------| +| `name` | string | No | Custom backup name | + +**Response (200 OK):** + +```json +{ + "name": "config_backup_20251213_090130.json", + "message": "Backup created successfully" +} +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L84-L102) + +### POST /api/config/backups/{backup_name}/restore + +Restore configuration from a backup. + +**Authentication:** Required + +**Response (200 OK):** Restored configuration object + +Source: [src/server/api/config.py](../src/server/api/config.py#L105-L123) + +### DELETE /api/config/backups/{backup_name} + +Delete a configuration backup. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "message": "Backup 'config_backup_20251213.json' deleted successfully" +} +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L126-L142) + +### POST /api/config/directory + +Update anime directory configuration. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "directory": "/path/to/anime" +} +``` + +**Response (200 OK):** + +```json +{ + "message": "Anime directory updated successfully", + "synced_series": 15 +} +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L189-L247) + +--- + +## 6. Scheduler Endpoints + +Prefix: `/api/scheduler` + +Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L1-L122) + +### GET /api/scheduler/config + +Get current scheduler configuration. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "enabled": true, + "interval_minutes": 60 +} +``` + +Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L22-L42) + +### POST /api/scheduler/config + +Update scheduler configuration. + +**Authentication:** Required + +**Request Body:** + +```json +{ + "enabled": true, + "interval_minutes": 30 +} +``` + +**Response (200 OK):** Updated scheduler configuration + +Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L45-L75) + +### POST /api/scheduler/trigger-rescan + +Manually trigger a library rescan. + +**Authentication:** Required + +**Response (200 OK):** + +```json +{ + "success": true, + "message": "Rescan started successfully" +} +``` + +Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L78-L122) + +--- + +## 7. Health Check Endpoints + +Prefix: `/health` + +Source: [src/server/api/health.py](../src/server/api/health.py#L1-L267) + +### GET /health + +Basic health check endpoint. + +**Authentication:** Not required + +**Response (200 OK):** + +```json +{ + "status": "healthy", + "timestamp": "2025-12-13T10:30:00.000Z", + "version": "1.0.0" +} +``` + +Source: [src/server/api/health.py](../src/server/api/health.py#L151-L161) + +### GET /health/detailed + +Comprehensive health check with database, filesystem, and system metrics. + +**Authentication:** Not required + +**Response (200 OK):** + +```json +{ + "status": "healthy", + "timestamp": "2025-12-13T10:30:00.000Z", + "version": "1.0.0", + "dependencies": { + "database": { + "status": "healthy", + "connection_time_ms": 1.5, + "message": "Database connection successful" + }, + "filesystem": { + "status": "healthy", + "data_dir_writable": true, + "logs_dir_writable": true + }, + "system": { + "cpu_percent": 25.0, + "memory_percent": 45.0, + "memory_available_mb": 8192.0, + "disk_percent": 60.0, + "disk_free_mb": 102400.0, + "uptime_seconds": 86400.0 + } + }, + "startup_time": "2025-12-13T08:00:00.000Z" +} +``` + +Source: [src/server/api/health.py](../src/server/api/health.py#L164-L200) + +--- + +## 8. WebSocket Protocol + +Endpoint: `/ws/connect` + +Source: [src/server/api/websocket.py](../src/server/api/websocket.py#L1-L260) + +### Connection + +**URL:** `ws://127.0.0.1:8000/ws/connect` + +**Query Parameters:** +| Parameter | Required | Description | +|-----------|----------|-------------| +| `token` | No | JWT token for authenticated access | + +### Message Types + +| Type | Direction | Description | +| ------------------- | ---------------- | -------------------------- | +| `connected` | Server -> Client | Connection confirmation | +| `ping` | Client -> Server | Keepalive request | +| `pong` | Server -> Client | Keepalive response | +| `download_progress` | Server -> Client | Download progress update | +| `download_complete` | Server -> Client | Download completed | +| `download_failed` | Server -> Client | Download failed | +| `download_added` | Server -> Client | Item added to queue | +| `download_removed` | Server -> Client | Item removed from queue | +| `queue_status` | Server -> Client | Queue status update | +| `queue_started` | Server -> Client | Queue processing started | +| `queue_stopped` | Server -> Client | Queue processing stopped | +| `scan_progress` | Server -> Client | Library scan progress | +| `scan_complete` | Server -> Client | Library scan completed | +| `system_info` | Server -> Client | System information message | +| `error` | Server -> Client | Error message | + +Source: [src/server/models/websocket.py](../src/server/models/websocket.py#L25-L57) + +### Room Subscriptions + +Clients can join/leave rooms to receive specific updates. + +**Join Room:** + +```json +{ + "action": "join", + "data": { "room": "downloads" } +} +``` + +**Leave Room:** + +```json +{ + "action": "leave", + "data": { "room": "downloads" } +} +``` + +**Available Rooms:** + +- `downloads` - Download progress and status updates + +### Server Message Format + +```json +{ + "type": "download_progress", + "timestamp": "2025-12-13T10:30:00.000Z", + "data": { + "download_id": "uuid-here", + "key": "attack-on-titan", + "folder": "Attack on Titan (2013)", + "percent": 45.2, + "speed_mbps": 2.5, + "eta_seconds": 180 + } +} +``` + +### WebSocket Status Endpoint + +**GET /ws/status** + +Returns WebSocket service status. + +**Response (200 OK):** + +```json +{ + "status": "operational", + "active_connections": 5, + "supported_message_types": [ + "download_progress", + "download_complete", + "download_failed", + "queue_status", + "connected", + "ping", + "pong", + "error" + ] +} +``` + +Source: [src/server/api/websocket.py](../src/server/api/websocket.py#L238-L260) + +--- + +## 9. Data Models + +### Download Item + +```json +{ + "id": "uuid-string", + "serie_id": "attack-on-titan", + "serie_folder": "Attack on Titan (2013)", + "serie_name": "Attack on Titan", + "episode": { + "season": 1, + "episode": 1, + "title": "To You, in 2000 Years" + }, + "status": "pending", + "priority": "NORMAL", + "added_at": "2025-12-13T10:00:00Z", + "started_at": null, + "completed_at": null, + "progress": null, + "error": null, + "retry_count": 0, + "source_url": null +} +``` + +**Status Values:** `pending`, `downloading`, `paused`, `completed`, `failed`, `cancelled` + +**Priority Values:** `LOW`, `NORMAL`, `HIGH` + +Source: [src/server/models/download.py](../src/server/models/download.py#L63-L118) + +### Episode Identifier + +```json +{ + "season": 1, + "episode": 1, + "title": "Episode Title" +} +``` + +Source: [src/server/models/download.py](../src/server/models/download.py#L36-L41) + +### Download Progress + +```json +{ + "percent": 45.2, + "downloaded_mb": 256.0, + "total_mb": 512.0, + "speed_mbps": 2.5, + "eta_seconds": 180 +} +``` + +Source: [src/server/models/download.py](../src/server/models/download.py#L44-L60) + +--- + +## 10. Error Handling + +### HTTP Status Codes + +| Code | Meaning | When Used | +| ---- | --------------------- | --------------------------------- | +| 200 | OK | Successful request | +| 201 | Created | Resource created | +| 204 | No Content | Successful deletion | +| 400 | Bad Request | Invalid request body/parameters | +| 401 | Unauthorized | Missing or invalid authentication | +| 403 | Forbidden | Insufficient permissions | +| 404 | Not Found | Resource does not exist | +| 422 | Unprocessable Entity | Validation error | +| 429 | Too Many Requests | Rate limit exceeded | +| 500 | Internal Server Error | Server-side error | + +### Error Response Format + +```json +{ + "success": false, + "error": "VALIDATION_ERROR", + "message": "Human-readable error message", + "details": { + "field": "Additional context" + }, + "request_id": "uuid-for-tracking" +} +``` + +Source: [src/server/middleware/error_handler.py](../src/server/middleware/error_handler.py#L26-L56) + +### Common Error Codes + +| Error Code | HTTP Status | Description | +| ---------------------- | ----------- | ------------------------------ | +| `AUTHENTICATION_ERROR` | 401 | Invalid or missing credentials | +| `AUTHORIZATION_ERROR` | 403 | Insufficient permissions | +| `VALIDATION_ERROR` | 422 | Request validation failed | +| `NOT_FOUND_ERROR` | 404 | Resource not found | +| `CONFLICT_ERROR` | 409 | Resource conflict | +| `RATE_LIMIT_ERROR` | 429 | Rate limit exceeded | + +--- + +## 11. Rate Limiting + +### Authentication Endpoints + +| Endpoint | Limit | Window | +| ---------------------- | ---------- | ---------- | +| `POST /api/auth/login` | 5 requests | 60 seconds | +| `POST /api/auth/setup` | 5 requests | 60 seconds | + +Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L143-L162) + +### Origin-Based Limiting + +All endpoints from the same origin are limited to 60 requests per minute per origin. + +Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L115-L133) + +### Rate Limit Response + +```json +{ + "detail": "Too many authentication attempts, try again later" +} +``` + +HTTP Status: 429 Too Many Requests + +--- + +## 12. Pagination + +The anime list endpoint supports pagination. + +**Query Parameters:** +| Parameter | Default | Max | Description | +|-----------|---------|-----|-------------| +| `page` | 1 | - | Page number (1-indexed) | +| `per_page` | 20 | 1000 | Items per page | + +**Example:** + +``` +GET /api/anime?page=2&per_page=50 +``` + +Source: [src/server/api/anime.py](../src/server/api/anime.py#L180-L220) diff --git a/docs/ARCHITECTURE.md b/docs/ARCHITECTURE.md new file mode 100644 index 0000000..0cf45be --- /dev/null +++ b/docs/ARCHITECTURE.md @@ -0,0 +1,625 @@ +# Architecture Documentation + +## Document Purpose + +This document describes the system architecture of the Aniworld anime download manager. + +--- + +## 1. System Overview + +Aniworld is a web-based anime download manager built with Python, FastAPI, and SQLite. It provides a REST API and WebSocket interface for managing anime libraries, downloading episodes, and tracking progress. + +### High-Level Architecture + +``` ++------------------+ +------------------+ +------------------+ +| Web Browser | | CLI Client | | External | +| (Frontend) | | (Main.py) | | Providers | ++--------+---------+ +--------+---------+ +--------+---------+ + | | | + | HTTP/WebSocket | Direct | HTTP + | | | ++--------v---------+ +--------v---------+ +--------v---------+ +| | | | | | +| FastAPI <-----> Core Layer <-----> Provider | +| Server Layer | | (SeriesApp) | | Adapters | +| | | | | | ++--------+---------+ +--------+---------+ +------------------+ + | | + | | ++--------v---------+ +--------v---------+ +| | | | +| SQLite DB | | File System | +| (aniworld.db) | | (data/*.json) | +| | | | ++------------------+ +------------------+ +``` + +Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L1-L252) + +--- + +## 2. Architectural Layers + +### 2.1 CLI Layer (`src/cli/`) + +Legacy command-line interface for direct interaction with the core layer. + +| Component | File | Purpose | +| --------- | ----------------------------- | --------------- | +| Main | [Main.py](../src/cli/Main.py) | CLI entry point | + +### 2.2 Server Layer (`src/server/`) + +FastAPI-based REST API and WebSocket server. + +``` +src/server/ ++-- fastapi_app.py # Application entry point, lifespan management ++-- api/ # API route handlers +| +-- anime.py # /api/anime/* endpoints +| +-- auth.py # /api/auth/* endpoints +| +-- config.py # /api/config/* endpoints +| +-- download.py # /api/queue/* endpoints +| +-- scheduler.py # /api/scheduler/* endpoints +| +-- websocket.py # /ws/* WebSocket handlers +| +-- health.py # /health/* endpoints ++-- controllers/ # Page controllers for HTML rendering +| +-- page_controller.py # UI page routes +| +-- health_controller.py# Health check route +| +-- error_controller.py # Error pages (404, 500) ++-- services/ # Business logic +| +-- anime_service.py # Anime operations +| +-- auth_service.py # Authentication +| +-- config_service.py # Configuration management +| +-- download_service.py # Download queue management +| +-- progress_service.py # Progress tracking +| +-- websocket_service.py# WebSocket broadcasting +| +-- queue_repository.py # Database persistence ++-- models/ # Pydantic models +| +-- auth.py # Auth request/response models +| +-- config.py # Configuration models +| +-- download.py # Download queue models +| +-- websocket.py # WebSocket message models ++-- middleware/ # Request processing +| +-- auth.py # JWT validation, rate limiting +| +-- error_handler.py # Exception handlers +| +-- setup_redirect.py # Setup flow redirect ++-- database/ # SQLAlchemy ORM +| +-- connection.py # Database connection +| +-- models.py # ORM models +| +-- service.py # Database service ++-- utils/ # Utility modules +| +-- filesystem.py # Folder sanitization, path safety +| +-- validators.py # Input validation utilities +| +-- dependencies.py # FastAPI dependency injection ++-- web/ # Static files and templates + +-- static/ # CSS, JS, images + +-- templates/ # Jinja2 templates +``` + +Source: [src/server/](../src/server/) + +### 2.2.1 Frontend Architecture (`src/server/web/static/`) + +The frontend uses a modular architecture with no build step required. CSS and JavaScript files are organized by responsibility. + +#### CSS Structure + +``` +src/server/web/static/css/ ++-- styles.css # Entry point with @import statements ++-- base/ +| +-- variables.css # CSS custom properties (colors, fonts, spacing) +| +-- reset.css # CSS reset and normalize styles +| +-- typography.css # Font styles, headings, text utilities ++-- components/ +| +-- buttons.css # All button styles +| +-- cards.css # Card and panel components +| +-- forms.css # Form inputs, labels, validation styles +| +-- modals.css # Modal and overlay styles +| +-- navigation.css # Header, nav, sidebar styles +| +-- progress.css # Progress bars, loading indicators +| +-- notifications.css # Toast, alerts, messages +| +-- tables.css # Table and list styles +| +-- status.css # Status badges and indicators ++-- pages/ +| +-- login.css # Login page specific styles +| +-- index.css # Index/library page specific styles +| +-- queue.css # Queue page specific styles ++-- utilities/ + +-- animations.css # Keyframes and animation classes + +-- responsive.css # Media queries and breakpoints + +-- helpers.css # Utility classes (hidden, flex, spacing) +``` + +#### JavaScript Structure + +JavaScript uses the IIFE pattern with a shared `AniWorld` namespace for browser compatibility without build tools. + +``` +src/server/web/static/js/ ++-- shared/ # Shared utilities used by all pages +| +-- constants.js # API endpoints, localStorage keys, defaults +| +-- auth.js # Token management (getToken, setToken, checkAuth) +| +-- api-client.js # Fetch wrapper with auto-auth headers +| +-- theme.js # Dark/light theme toggle +| +-- ui-utils.js # Toast notifications, format helpers +| +-- websocket-client.js # Socket.IO wrapper ++-- index/ # Index page modules +| +-- series-manager.js # Series list rendering and filtering +| +-- selection-manager.js# Multi-select and bulk download +| +-- search.js # Series search functionality +| +-- scan-manager.js # Library rescan operations +| +-- scheduler-config.js # Scheduler configuration +| +-- logging-config.js # Logging configuration +| +-- advanced-config.js # Advanced settings +| +-- main-config.js # Main configuration and backup +| +-- config-manager.js # Config modal orchestrator +| +-- socket-handler.js # WebSocket event handlers +| +-- app-init.js # Application initialization ++-- queue/ # Queue page modules + +-- queue-api.js # Queue API interactions + +-- queue-renderer.js # Queue list rendering + +-- progress-handler.js # Download progress updates + +-- queue-socket-handler.js # WebSocket events for queue + +-- queue-init.js # Queue page initialization +``` + +#### Module Pattern + +All JavaScript modules follow the IIFE pattern with namespace: + +```javascript +var AniWorld = window.AniWorld || {}; + +AniWorld.ModuleName = (function () { + "use strict"; + + // Private variables and functions + + // Public API + return { + init: init, + publicMethod: publicMethod, + }; +})(); +``` + +Source: [src/server/web/static/](../src/server/web/static/) + +### 2.3 Core Layer (`src/core/`) + +Domain logic for anime series management. + +``` +src/core/ ++-- SeriesApp.py # Main application facade ++-- SerieScanner.py # Directory scanning, targeted single-series scan ++-- entities/ # Domain entities +| +-- series.py # Serie class with sanitized_folder property +| +-- SerieList.py # SerieList collection with sanitized folder support ++-- providers/ # External provider adapters +| +-- base_provider.py # Loader interface +| +-- provider_factory.py # Provider registry ++-- interfaces/ # Abstract interfaces +| +-- callbacks.py # Progress callback system ++-- exceptions/ # Domain exceptions + +-- Exceptions.py # Custom exceptions +``` + +**Key Components:** + +| Component | Purpose | +| -------------- | -------------------------------------------------------------------------- | +| `SeriesApp` | Main application facade for anime operations | +| `SerieScanner` | Scans directories for anime; `scan_single_series()` for targeted scans | +| `Serie` | Domain entity with `sanitized_folder` property for filesystem-safe names | +| `SerieList` | Collection management with automatic folder creation using sanitized names | + +Source: [src/core/](../src/core/) + +### 2.4 Infrastructure Layer (`src/infrastructure/`) + +Cross-cutting concerns. + +``` +src/infrastructure/ ++-- logging/ # Structured logging setup ++-- security/ # Security utilities +``` + +### 2.5 Configuration Layer (`src/config/`) + +Application settings management. + +| Component | File | Purpose | +| --------- | ---------------------------------------- | ------------------------------- | +| Settings | [settings.py](../src/config/settings.py) | Environment-based configuration | + +Source: [src/config/settings.py](../src/config/settings.py#L1-L96) + +--- + +## 11. Graceful Shutdown + +The application implements a comprehensive graceful shutdown mechanism that ensures data integrity and proper cleanup when the server is stopped via Ctrl+C (SIGINT) or SIGTERM. + +### 11.1 Shutdown Sequence + +``` +1. SIGINT/SIGTERM received + +-- Uvicorn catches signal + +-- Stops accepting new requests + +2. FastAPI lifespan shutdown triggered + +-- 30 second total timeout + +3. WebSocket shutdown (5s timeout) + +-- Broadcast {"type": "server_shutdown"} to all clients + +-- Close each connection with code 1001 (Going Away) + +-- Clear connection tracking data + +4. Download service stop (10s timeout) + +-- Set shutdown flag + +-- Persist active download as "pending" in database + +-- Cancel active download task + +-- Shutdown ThreadPoolExecutor with wait + +5. Progress service cleanup + +-- Clear event subscribers + +-- Clear active progress tracking + +6. Database cleanup (10s timeout) + +-- SQLite: Run PRAGMA wal_checkpoint(TRUNCATE) + +-- Dispose async engine + +-- Dispose sync engine + +7. Process exits cleanly +``` + +Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L142-L210) + +### 11.2 Key Components + +| Component | File | Shutdown Method | +| ------------------- | ------------------------------------------------------------------- | ------------------------------ | +| WebSocket Service | [websocket_service.py](../src/server/services/websocket_service.py) | `shutdown(timeout=5.0)` | +| Download Service | [download_service.py](../src/server/services/download_service.py) | `stop(timeout=10.0)` | +| Database Connection | [connection.py](../src/server/database/connection.py) | `close_db()` | +| Uvicorn Config | [run_server.py](../run_server.py) | `timeout_graceful_shutdown=30` | +| Stop Script | [stop_server.sh](../stop_server.sh) | SIGTERM with fallback | + +### 11.3 Data Integrity Guarantees + +1. **Active downloads preserved**: In-progress downloads are saved as "pending" and can resume on restart. + +2. **Database WAL flushed**: SQLite WAL checkpoint ensures all writes are in the main database file. + +3. **WebSocket clients notified**: Clients receive shutdown message before connection closes. + +4. **Thread pool cleanup**: Background threads complete or are gracefully cancelled. + +### 11.4 Manual Stop + +```bash +# Graceful stop via script (sends SIGTERM, waits up to 30s) +./stop_server.sh + +# Or press Ctrl+C in terminal running the server +``` + +Source: [stop_server.sh](../stop_server.sh#L1-L80) + +--- + +## 3. Component Interactions + +### 3.1 Request Flow (REST API) + +``` +1. Client sends HTTP request +2. AuthMiddleware validates JWT token (if required) +3. Rate limiter checks request frequency +4. FastAPI router dispatches to endpoint handler +5. Endpoint calls service layer +6. Service layer uses core layer or database +7. Response returned as JSON +``` + +Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L1-L209) + +### 3.2 Download Flow + +``` +1. POST /api/queue/add + +-- DownloadService.add_to_queue() + +-- QueueRepository.save_item() -> SQLite + +2. POST /api/queue/start + +-- DownloadService.start_queue_processing() + +-- Process pending items sequentially + +-- ProgressService emits events + +-- WebSocketService broadcasts to clients + +3. During download: + +-- ProgressService.emit("progress_updated") + +-- WebSocketService.broadcast_to_room() + +-- Client receives WebSocket message +``` + +Source: [src/server/services/download_service.py](../src/server/services/download_service.py#L1-L150) + +### 3.3 WebSocket Event Flow + +``` +1. Client connects to /ws/connect +2. Server sends "connected" message +3. Client joins room: {"action": "join", "data": {"room": "downloads"}} +4. ProgressService emits events +5. WebSocketService broadcasts to room subscribers +6. Client receives real-time updates +``` + +Source: [src/server/api/websocket.py](../src/server/api/websocket.py#L1-L260) + +--- + +## 4. Design Patterns + +### 4.1 Repository Pattern + +Database access is abstracted through repository classes. + +```python +# QueueRepository provides CRUD for download items +class QueueRepository: + async def save_item(self, item: DownloadItem) -> None: ... + async def get_all_items(self) -> List[DownloadItem]: ... + async def delete_item(self, item_id: str) -> bool: ... +``` + +Source: [src/server/services/queue_repository.py](../src/server/services/queue_repository.py) + +### 4.2 Dependency Injection + +FastAPI's `Depends()` provides constructor injection. + +```python +@router.get("/status") +async def get_status( + download_service: DownloadService = Depends(get_download_service), +): + ... +``` + +Source: [src/server/utils/dependencies.py](../src/server/utils/dependencies.py) + +### 4.3 Event-Driven Architecture + +Progress updates use an event subscription model. + +```python +# ProgressService publishes events +progress_service.emit("progress_updated", event) + +# WebSocketService subscribes +progress_service.subscribe("progress_updated", ws_handler) +``` + +Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L98-L108) + +### 4.4 Singleton Pattern + +Services use module-level singletons for shared state. + +```python +# In download_service.py +_download_service_instance: Optional[DownloadService] = None + +def get_download_service() -> DownloadService: + global _download_service_instance + if _download_service_instance is None: + _download_service_instance = DownloadService(...) + return _download_service_instance +``` + +Source: [src/server/services/download_service.py](../src/server/services/download_service.py) + +--- + +## 5. Data Flow + +### 5.1 Series Identifier Convention + +The system uses two identifier fields: + +| Field | Type | Purpose | Example | +| -------- | -------- | -------------------------------------- | -------------------------- | +| `key` | Primary | Provider-assigned, URL-safe identifier | `"attack-on-titan"` | +| `folder` | Metadata | Filesystem folder name (display only) | `"Attack on Titan (2013)"` | + +All API operations use `key`. The `folder` is for filesystem operations only. + +Source: [src/server/database/models.py](../src/server/database/models.py#L26-L50) + +### 5.2 Database Schema + +``` ++----------------+ +----------------+ +--------------------+ +| anime_series | | episodes | | download_queue_item| ++----------------+ +----------------+ +--------------------+ +| id (PK) |<--+ | id (PK) | +-->| id (PK) | +| key (unique) | | | series_id (FK) |---+ | series_id (FK) | +| name | +---| season | | status | +| site | | episode_number | | priority | +| folder | | title | | progress_percent | +| created_at | | is_downloaded | | added_at | +| updated_at | | file_path | | started_at | ++----------------+ +----------------+ +--------------------+ +``` + +Source: [src/server/database/models.py](../src/server/database/models.py#L1-L200) + +### 5.3 Configuration Storage + +Configuration is stored in `data/config.json`: + +```json +{ + "name": "Aniworld", + "data_dir": "data", + "scheduler": { "enabled": true, "interval_minutes": 60 }, + "logging": { "level": "INFO" }, + "backup": { "enabled": false, "path": "data/backups" }, + "other": { + "master_password_hash": "$pbkdf2-sha256$...", + "anime_directory": "/path/to/anime" + } +} +``` + +Source: [data/config.json](../data/config.json) + +--- + +## 6. Technology Stack + +| Layer | Technology | Version | Purpose | +| ------------- | ------------------- | ------- | ---------------------- | +| Web Framework | FastAPI | 0.104.1 | REST API, WebSocket | +| ASGI Server | Uvicorn | 0.24.0 | HTTP server | +| Database | SQLite + SQLAlchemy | 2.0.35 | Persistence | +| Auth | python-jose | 3.3.0 | JWT tokens | +| Password | passlib | 1.7.4 | bcrypt hashing | +| Validation | Pydantic | 2.5.0 | Data models | +| Templates | Jinja2 | 3.1.2 | HTML rendering | +| Logging | structlog | 24.1.0 | Structured logging | +| Testing | pytest | 7.4.3 | Unit/integration tests | + +Source: [requirements.txt](../requirements.txt) + +--- + +## 7. Scalability Considerations + +### Current Limitations + +1. **Single-process deployment**: In-memory rate limiting and session state are not shared across processes. + +2. **SQLite database**: Not suitable for high concurrency. Consider PostgreSQL for production. + +3. **Sequential downloads**: Only one download processes at a time by design. + +### Recommended Improvements for Scale + +| Concern | Current | Recommended | +| -------------- | --------------- | ----------------- | +| Rate limiting | In-memory dict | Redis | +| Session store | In-memory | Redis or database | +| Database | SQLite | PostgreSQL | +| Task queue | In-memory deque | Celery + Redis | +| Load balancing | None | Nginx/HAProxy | + +--- + +## 8. Integration Points + +### 8.1 External Providers + +The system integrates with anime streaming providers via the Loader interface. + +```python +class Loader(ABC): + @abstractmethod + def search(self, query: str) -> List[Serie]: ... + + @abstractmethod + def get_episodes(self, serie: Serie) -> Dict[int, List[int]]: ... +``` + +Source: [src/core/providers/base_provider.py](../src/core/providers/base_provider.py) + +### 8.2 Filesystem Integration + +The scanner reads anime directories to detect downloaded episodes. + +```python +SerieScanner( + basePath="/path/to/anime", # Anime library directory + loader=provider, # Provider for metadata + db_session=session # Optional database +) +``` + +Source: [src/core/SerieScanner.py](../src/core/SerieScanner.py#L59-L96) + +--- + +## 9. Security Architecture + +### 9.1 Authentication Flow + +``` +1. User sets master password via POST /api/auth/setup +2. Password hashed with pbkdf2_sha256 (via passlib) +3. Hash stored in config.json +4. Login validates password, returns JWT token +5. JWT contains: session_id, user, created_at, expires_at +6. Subsequent requests include: Authorization: Bearer +``` + +Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L1-L150) + +### 9.2 Password Requirements + +- Minimum 8 characters +- Mixed case (upper and lower) +- At least one number +- At least one special character + +Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L97-L125) + +### 9.3 Rate Limiting + +| Endpoint | Limit | Window | +| ----------------- | ----------- | ---------- | +| `/api/auth/login` | 5 requests | 60 seconds | +| `/api/auth/setup` | 5 requests | 60 seconds | +| All origins | 60 requests | 60 seconds | + +Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L54-L68) + +--- + +## 10. Deployment Modes + +### 10.1 Development + +```bash +# Run with hot reload +python -m uvicorn src.server.fastapi_app:app --reload +``` + +### 10.2 Production + +```bash +# Via conda environment +conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app \ + --host 127.0.0.1 --port 8000 +``` + +### 10.3 Configuration + +Environment variables (via `.env` or shell): + +| Variable | Default | Description | +| ----------------- | ------------------------------ | ---------------------- | +| `JWT_SECRET_KEY` | Random | Secret for JWT signing | +| `DATABASE_URL` | `sqlite:///./data/aniworld.db` | Database connection | +| `ANIME_DIRECTORY` | (empty) | Path to anime library | +| `LOG_LEVEL` | `INFO` | Logging level | +| `CORS_ORIGINS` | `localhost:3000,8000` | Allowed CORS origins | + +Source: [src/config/settings.py](../src/config/settings.py#L1-L96) diff --git a/docs/CHANGELOG.md b/docs/CHANGELOG.md new file mode 100644 index 0000000..a6e52ed --- /dev/null +++ b/docs/CHANGELOG.md @@ -0,0 +1,105 @@ +# Changelog + +## Document Purpose + +This document tracks all notable changes to the Aniworld project. + +### What This Document Contains + +- **Version History**: All released versions with dates +- **Added Features**: New functionality in each release +- **Changed Features**: Modifications to existing features +- **Deprecated Features**: Features marked for removal +- **Removed Features**: Features removed from the codebase +- **Fixed Bugs**: Bug fixes with issue references +- **Security Fixes**: Security-related changes +- **Breaking Changes**: Changes requiring user action + +### What This Document Does NOT Contain + +- Internal refactoring details (unless user-facing) +- Commit-level changes +- Work-in-progress features +- Roadmap or planned features + +### Target Audience + +- All users and stakeholders +- Operators planning upgrades +- Developers tracking changes +- Support personnel + +--- + +## Format + +This changelog follows [Keep a Changelog](https://keepachangelog.com/) principles and adheres to [Semantic Versioning](https://semver.org/). + +## Sections for Each Release + +```markdown +## [Version] - YYYY-MM-DD + +### Added + +- New features + +### Changed + +- Changes to existing functionality + +### Deprecated + +- Features that will be removed in future versions + +### Removed + +- Features removed in this release + +### Fixed + +- Bug fixes + +### Security + +- Security-related fixes +``` + +--- + +## Unreleased + +_Changes that are in development but not yet released._ + +### Added + +- **Enhanced Anime Add Flow**: Automatic database persistence, targeted episode scanning, and folder creation with sanitized names +- Filesystem utility module (`src/server/utils/filesystem.py`) with `sanitize_folder_name()`, `is_safe_path()`, and `create_safe_folder()` functions +- `Serie.sanitized_folder` property for generating filesystem-safe folder names from display names +- `SerieScanner.scan_single_series()` method for targeted scanning of individual anime without full library rescan +- Add series API response now includes `missing_episodes` list and `total_missing` count +- Database transaction support with `@transactional` decorator and `atomic()` context manager +- Transaction propagation modes (REQUIRED, REQUIRES_NEW, NESTED) for fine-grained control +- Savepoint support for nested transactions with partial rollback capability +- `TransactionManager` helper class for manual transaction control +- Bulk operations: `bulk_mark_downloaded`, `bulk_delete`, `clear_all` for batch processing +- `rotate_session` atomic operation for secure session rotation +- Transaction utilities: `is_session_in_transaction`, `get_session_transaction_depth` +- `get_transactional_session` for sessions without auto-commit + +### Changed + +- `QueueRepository.save_item()` now uses atomic transactions for data consistency +- `QueueRepository.clear_all()` now uses atomic transactions for all-or-nothing behavior +- Service layer documentation updated to reflect transaction-aware design + +### Fixed + +- Scan status indicator now correctly shows running state after page reload during active scan +- Improved reliability of process status updates in the UI header + +--- + +## Version History + +_To be documented as versions are released._ diff --git a/docs/CONFIGURATION.md b/docs/CONFIGURATION.md new file mode 100644 index 0000000..3b341c8 --- /dev/null +++ b/docs/CONFIGURATION.md @@ -0,0 +1,298 @@ +# Configuration Reference + +## Document Purpose + +This document provides a comprehensive reference for all configuration options in the Aniworld application. + +--- + +## 1. Configuration Overview + +### Configuration Sources + +Aniworld uses a layered configuration system: + +1. **Environment Variables** (highest priority) +2. **`.env` file** in project root +3. **`data/config.json`** file +4. **Default values** (lowest priority) + +### Loading Mechanism + +Configuration is loaded at application startup via Pydantic Settings. + +```python +# src/config/settings.py +class Settings(BaseSettings): + model_config = SettingsConfigDict(env_file=".env", extra="ignore") +``` + +Source: [src/config/settings.py](../src/config/settings.py#L1-L96) + +--- + +## 2. Environment Variables + +### Authentication Settings + +| Variable | Type | Default | Description | +| ----------------------- | ------ | ---------------- | ------------------------------------------------------------------- | +| `JWT_SECRET_KEY` | string | (random) | Secret key for JWT token signing. Auto-generated if not set. | +| `PASSWORD_SALT` | string | `"default-salt"` | Salt for password hashing. | +| `MASTER_PASSWORD_HASH` | string | (none) | Pre-hashed master password. Loaded from config.json if not set. | +| `MASTER_PASSWORD` | string | (none) | **DEVELOPMENT ONLY** - Plaintext password. Never use in production. | +| `SESSION_TIMEOUT_HOURS` | int | `24` | JWT token expiry time in hours. | + +Source: [src/config/settings.py](../src/config/settings.py#L13-L42) + +### Server Settings + +| Variable | Type | Default | Description | +| ----------------- | ------ | -------------------------------- | --------------------------------------------------------------------- | +| `ANIME_DIRECTORY` | string | `""` | Path to anime library directory. | +| `LOG_LEVEL` | string | `"INFO"` | Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL. | +| `DATABASE_URL` | string | `"sqlite:///./data/aniworld.db"` | Database connection string. | +| `CORS_ORIGINS` | string | `"http://localhost:3000"` | Comma-separated allowed CORS origins. Use `*` for localhost defaults. | +| `API_RATE_LIMIT` | int | `100` | Maximum API requests per minute. | + +Source: [src/config/settings.py](../src/config/settings.py#L43-L68) + +### Provider Settings + +| Variable | Type | Default | Description | +| ------------------ | ------ | --------------- | --------------------------------------------- | +| `DEFAULT_PROVIDER` | string | `"aniworld.to"` | Default anime provider. | +| `PROVIDER_TIMEOUT` | int | `30` | HTTP timeout for provider requests (seconds). | +| `RETRY_ATTEMPTS` | int | `3` | Number of retry attempts for failed requests. | + +Source: [src/config/settings.py](../src/config/settings.py#L69-L79) + +--- + +## 3. Configuration File (config.json) + +Location: `data/config.json` + +### File Structure + +```json +{ + "name": "Aniworld", + "data_dir": "data", + "scheduler": { + "enabled": true, + "interval_minutes": 60 + }, + "logging": { + "level": "INFO", + "file": null, + "max_bytes": null, + "backup_count": 3 + }, + "backup": { + "enabled": false, + "path": "data/backups", + "keep_days": 30 + }, + "other": { + "master_password_hash": "$pbkdf2-sha256$...", + "anime_directory": "/path/to/anime" + }, + "version": "1.0.0" +} +``` + +Source: [data/config.json](../data/config.json) + +--- + +## 4. Configuration Sections + +### 4.1 General Settings + +| Field | Type | Default | Description | +| ---------- | ------ | ------------ | ------------------------------ | +| `name` | string | `"Aniworld"` | Application name. | +| `data_dir` | string | `"data"` | Base directory for data files. | + +Source: [src/server/models/config.py](../src/server/models/config.py#L62-L66) + +### 4.2 Scheduler Settings + +Controls automatic library rescanning. + +| Field | Type | Default | Description | +| ---------------------------- | ---- | ------- | -------------------------------------------- | +| `scheduler.enabled` | bool | `true` | Enable/disable automatic scans. | +| `scheduler.interval_minutes` | int | `60` | Minutes between automatic scans. Minimum: 1. | + +Source: [src/server/models/config.py](../src/server/models/config.py#L5-L12) + +### 4.3 Logging Settings + +| Field | Type | Default | Description | +| ---------------------- | ------ | -------- | ------------------------------------------------- | +| `logging.level` | string | `"INFO"` | Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL. | +| `logging.file` | string | `null` | Optional log file path. | +| `logging.max_bytes` | int | `null` | Maximum log file size for rotation. | +| `logging.backup_count` | int | `3` | Number of rotated log files to keep. | + +Source: [src/server/models/config.py](../src/server/models/config.py#L27-L46) + +### 4.4 Backup Settings + +| Field | Type | Default | Description | +| ------------------ | ------ | ---------------- | -------------------------------- | +| `backup.enabled` | bool | `false` | Enable automatic config backups. | +| `backup.path` | string | `"data/backups"` | Directory for backup files. | +| `backup.keep_days` | int | `30` | Days to retain backups. | + +Source: [src/server/models/config.py](../src/server/models/config.py#L15-L24) + +### 4.5 Other Settings (Dynamic) + +The `other` field stores arbitrary settings. + +| Key | Type | Description | +| ---------------------- | ------ | --------------------------------------- | +| `master_password_hash` | string | Hashed master password (pbkdf2-sha256). | +| `anime_directory` | string | Path to anime library. | +| `advanced` | object | Advanced configuration options. | + +--- + +## 5. Configuration Precedence + +Settings are resolved in this order (first match wins): + +1. Environment variable (e.g., `ANIME_DIRECTORY`) +2. `.env` file in project root +3. `data/config.json` (for dynamic settings) +4. Code defaults in `Settings` class + +--- + +## 6. Validation Rules + +### Password Requirements + +Master password must meet all criteria: + +- Minimum 8 characters +- At least one uppercase letter +- At least one lowercase letter +- At least one digit +- At least one special character + +Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L97-L125) + +### Logging Level Validation + +Must be one of: `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL` + +Source: [src/server/models/config.py](../src/server/models/config.py#L43-L47) + +### Backup Path Validation + +If `backup.enabled` is `true`, `backup.path` must be set. + +Source: [src/server/models/config.py](../src/server/models/config.py#L87-L91) + +--- + +## 7. Example Configurations + +### Minimal Development Setup + +**.env file:** + +``` +LOG_LEVEL=DEBUG +ANIME_DIRECTORY=/home/user/anime +``` + +### Production Setup + +**.env file:** + +``` +JWT_SECRET_KEY=your-secure-random-key-here +DATABASE_URL=postgresql+asyncpg://user:pass@localhost/aniworld +LOG_LEVEL=WARNING +CORS_ORIGINS=https://your-domain.com +API_RATE_LIMIT=60 +``` + +### Docker Setup + +```yaml +# docker-compose.yml +environment: + - JWT_SECRET_KEY=${JWT_SECRET_KEY} + - DATABASE_URL=sqlite:///./data/aniworld.db + - ANIME_DIRECTORY=/media/anime + - LOG_LEVEL=INFO +volumes: + - ./data:/app/data + - /media/anime:/media/anime:ro +``` + +--- + +## 8. Configuration Backup Management + +### Automatic Backups + +Backups are created automatically before config changes when `backup.enabled` is `true`. + +Location: `data/config_backups/` + +Naming: `config_backup_YYYYMMDD_HHMMSS.json` + +### Manual Backup via API + +```bash +# Create backup +curl -X POST http://localhost:8000/api/config/backups \ + -H "Authorization: Bearer $TOKEN" + +# List backups +curl http://localhost:8000/api/config/backups \ + -H "Authorization: Bearer $TOKEN" + +# Restore backup +curl -X POST http://localhost:8000/api/config/backups/config_backup_20251213.json/restore \ + -H "Authorization: Bearer $TOKEN" +``` + +Source: [src/server/api/config.py](../src/server/api/config.py#L67-L142) + +--- + +## 9. Troubleshooting + +### Configuration Not Loading + +1. Check file permissions on `data/config.json` +2. Verify JSON syntax with a validator +3. Check logs for Pydantic validation errors + +### Environment Variable Not Working + +1. Ensure variable name matches exactly (case-sensitive) +2. Check `.env` file location (project root) +3. Restart application after changes + +### Master Password Issues + +1. Password hash is stored in `config.json` under `other.master_password_hash` +2. Delete this field to reset (requires re-setup) +3. Check hash format starts with `$pbkdf2-sha256$` + +--- + +## 10. Related Documentation + +- [API.md](API.md) - Configuration API endpoints +- [DEVELOPMENT.md](DEVELOPMENT.md) - Development environment setup +- [ARCHITECTURE.md](ARCHITECTURE.md) - Configuration service architecture diff --git a/docs/DATABASE.md b/docs/DATABASE.md new file mode 100644 index 0000000..dc0ac0e --- /dev/null +++ b/docs/DATABASE.md @@ -0,0 +1,421 @@ +# Database Documentation + +## Document Purpose + +This document describes the database schema, models, and data layer of the Aniworld application. + +--- + +## 1. Database Overview + +### Technology + +- **Database Engine**: SQLite 3 (default), PostgreSQL supported +- **ORM**: SQLAlchemy 2.0 with async support (aiosqlite) +- **Location**: `data/aniworld.db` (configurable via `DATABASE_URL`) + +Source: [src/config/settings.py](../src/config/settings.py#L53-L55) + +### Connection Configuration + +```python +# Default connection string +DATABASE_URL = "sqlite+aiosqlite:///./data/aniworld.db" + +# PostgreSQL alternative +DATABASE_URL = "postgresql+asyncpg://user:pass@localhost/aniworld" +``` + +Source: [src/server/database/connection.py](../src/server/database/connection.py) + +--- + +## 2. Entity Relationship Diagram + +``` ++-------------------+ +-------------------+ +------------------------+ +| anime_series | | episodes | | download_queue_item | ++-------------------+ +-------------------+ +------------------------+ +| id (PK) |<--+ | id (PK) | +-->| id (PK, VARCHAR) | +| key (UNIQUE) | | | series_id (FK)----+---+ | series_id (FK)---------+ +| name | +---| | | status | +| site | | season | | priority | +| folder | | episode_number | | season | +| created_at | | title | | episode | +| updated_at | | file_path | | progress_percent | ++-------------------+ | is_downloaded | | error_message | + | created_at | | retry_count | + | updated_at | | added_at | + +-------------------+ | started_at | + | completed_at | + | created_at | + | updated_at | + +------------------------+ +``` + +--- + +## 3. Table Schemas + +### 3.1 anime_series + +Stores anime series metadata. + +| Column | Type | Constraints | Description | +| ------------ | ------------- | -------------------------- | ------------------------------------------------------- | +| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID | +| `key` | VARCHAR(255) | UNIQUE, NOT NULL, INDEX | **Primary identifier** - provider-assigned URL-safe key | +| `name` | VARCHAR(500) | NOT NULL, INDEX | Display name of the series | +| `site` | VARCHAR(500) | NOT NULL | Provider site URL | +| `folder` | VARCHAR(1000) | NOT NULL | Filesystem folder name (metadata only) | +| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp | +| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp | + +**Identifier Convention:** + +- `key` is the **primary identifier** for all operations (e.g., `"attack-on-titan"`) +- `folder` is **metadata only** for filesystem operations (e.g., `"Attack on Titan (2013)"`) +- `id` is used only for database relationships + +Source: [src/server/database/models.py](../src/server/database/models.py#L23-L87) + +### 3.2 episodes + +Stores individual episode information. + +| Column | Type | Constraints | Description | +| ---------------- | ------------- | ---------------------------- | ----------------------------- | +| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID | +| `series_id` | INTEGER | FOREIGN KEY, NOT NULL, INDEX | Reference to anime_series.id | +| `season` | INTEGER | NOT NULL | Season number (1-based) | +| `episode_number` | INTEGER | NOT NULL | Episode number within season | +| `title` | VARCHAR(500) | NULLABLE | Episode title if known | +| `file_path` | VARCHAR(1000) | NULLABLE | Local file path if downloaded | +| `is_downloaded` | BOOLEAN | NOT NULL, DEFAULT FALSE | Download status flag | +| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp | +| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp | + +**Foreign Key:** + +- `series_id` -> `anime_series.id` (ON DELETE CASCADE) + +Source: [src/server/database/models.py](../src/server/database/models.py#L122-L181) + +### 3.3 download_queue_item + +Stores download queue items with status tracking. + +| Column | Type | Constraints | Description | +| ------------------ | ------------- | --------------------------- | ------------------------------ | +| `id` | VARCHAR(36) | PRIMARY KEY | UUID identifier | +| `series_id` | INTEGER | FOREIGN KEY, NOT NULL | Reference to anime_series.id | +| `season` | INTEGER | NOT NULL | Season number | +| `episode` | INTEGER | NOT NULL | Episode number | +| `status` | VARCHAR(20) | NOT NULL, DEFAULT 'pending' | Download status | +| `priority` | VARCHAR(10) | NOT NULL, DEFAULT 'NORMAL' | Queue priority | +| `progress_percent` | FLOAT | NULLABLE | Download progress (0-100) | +| `error_message` | TEXT | NULLABLE | Error description if failed | +| `retry_count` | INTEGER | NOT NULL, DEFAULT 0 | Number of retry attempts | +| `source_url` | VARCHAR(2000) | NULLABLE | Download source URL | +| `added_at` | DATETIME | NOT NULL, DEFAULT NOW | When added to queue | +| `started_at` | DATETIME | NULLABLE | When download started | +| `completed_at` | DATETIME | NULLABLE | When download completed/failed | +| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp | +| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp | + +**Status Values:** `pending`, `downloading`, `paused`, `completed`, `failed`, `cancelled` + +**Priority Values:** `LOW`, `NORMAL`, `HIGH` + +**Foreign Key:** + +- `series_id` -> `anime_series.id` (ON DELETE CASCADE) + +Source: [src/server/database/models.py](../src/server/database/models.py#L200-L300) + +--- + +## 4. Indexes + +| Table | Index Name | Columns | Purpose | +| --------------------- | ----------------------- | ----------- | --------------------------------- | +| `anime_series` | `ix_anime_series_key` | `key` | Fast lookup by primary identifier | +| `anime_series` | `ix_anime_series_name` | `name` | Search by name | +| `episodes` | `ix_episodes_series_id` | `series_id` | Join with series | +| `download_queue_item` | `ix_download_series_id` | `series_id` | Filter by series | +| `download_queue_item` | `ix_download_status` | `status` | Filter by status | + +--- + +## 5. Model Layer + +### 5.1 SQLAlchemy ORM Models + +```python +# src/server/database/models.py + +class AnimeSeries(Base, TimestampMixin): + __tablename__ = "anime_series" + + id: Mapped[int] = mapped_column(Integer, primary_key=True) + key: Mapped[str] = mapped_column(String(255), unique=True, index=True) + name: Mapped[str] = mapped_column(String(500), index=True) + site: Mapped[str] = mapped_column(String(500)) + folder: Mapped[str] = mapped_column(String(1000)) + + episodes: Mapped[List["Episode"]] = relationship( + "Episode", back_populates="series", cascade="all, delete-orphan" + ) +``` + +Source: [src/server/database/models.py](../src/server/database/models.py#L23-L87) + +### 5.2 Pydantic API Models + +```python +# src/server/models/download.py + +class DownloadItem(BaseModel): + id: str + serie_id: str # Maps to anime_series.key + serie_folder: str # Metadata only + serie_name: str + episode: EpisodeIdentifier + status: DownloadStatus + priority: DownloadPriority +``` + +Source: [src/server/models/download.py](../src/server/models/download.py#L63-L118) + +### 5.3 Model Mapping + +| API Field | Database Column | Notes | +| -------------- | --------------------- | ------------------ | +| `serie_id` | `anime_series.key` | Primary identifier | +| `serie_folder` | `anime_series.folder` | Metadata only | +| `serie_name` | `anime_series.name` | Display name | + +--- + +## 6. Transaction Support + +### 6.1 Overview + +The database layer provides comprehensive transaction support to ensure data consistency across compound operations. All write operations can be wrapped in explicit transactions. + +Source: [src/server/database/transaction.py](../src/server/database/transaction.py) + +### 6.2 Transaction Utilities + +| Component | Type | Description | +| ------------------------- | ----------------- | ---------------------------------------- | +| `@transactional` | Decorator | Wraps function in transaction boundary | +| `atomic()` | Async context mgr | Provides atomic operation block | +| `atomic_sync()` | Sync context mgr | Sync version of atomic() | +| `TransactionContext` | Class | Explicit sync transaction control | +| `AsyncTransactionContext` | Class | Explicit async transaction control | +| `TransactionManager` | Class | Helper for manual transaction management | + +### 6.3 Transaction Propagation Modes + +| Mode | Behavior | +| -------------- | ------------------------------------------------ | +| `REQUIRED` | Use existing transaction or create new (default) | +| `REQUIRES_NEW` | Always create new transaction | +| `NESTED` | Create savepoint within existing transaction | + +### 6.4 Usage Examples + +**Using @transactional decorator:** + +```python +from src.server.database.transaction import transactional + +@transactional() +async def compound_operation(db: AsyncSession, data: dict): + # All operations commit together or rollback on error + series = await AnimeSeriesService.create(db, ...) + episode = await EpisodeService.create(db, series_id=series.id, ...) + return series, episode +``` + +**Using atomic() context manager:** + +```python +from src.server.database.transaction import atomic + +async def some_function(db: AsyncSession): + async with atomic(db) as tx: + await operation1(db) + await operation2(db) + # Auto-commits on success, rolls back on exception +``` + +**Using savepoints for partial rollback:** + +```python +async with atomic(db) as tx: + await outer_operation(db) + + async with tx.savepoint() as sp: + await risky_operation(db) + if error_condition: + await sp.rollback() # Only rollback nested ops + + await final_operation(db) # Still executes +``` + +Source: [src/server/database/transaction.py](../src/server/database/transaction.py) + +### 6.5 Connection Module Additions + +| Function | Description | +| ------------------------------- | -------------------------------------------- | +| `get_transactional_session` | Session without auto-commit for transactions | +| `TransactionManager` | Helper class for manual transaction control | +| `is_session_in_transaction` | Check if session is in active transaction | +| `get_session_transaction_depth` | Get nesting depth of transactions | + +Source: [src/server/database/connection.py](../src/server/database/connection.py) + +--- + +## 7. Repository Pattern + +The `QueueRepository` class provides data access abstraction. + +```python +class QueueRepository: + async def save_item(self, item: DownloadItem) -> None: + """Save or update a download item (atomic operation).""" + + async def get_all_items(self) -> List[DownloadItem]: + """Get all items from database.""" + + async def delete_item(self, item_id: str) -> bool: + """Delete item by ID.""" + + async def clear_all(self) -> int: + """Clear all items (atomic operation).""" +``` + +Note: Compound operations (`save_item`, `clear_all`) are wrapped in `atomic()` transactions. + +Source: [src/server/services/queue_repository.py](../src/server/services/queue_repository.py) + +--- + +## 8. Database Service + +The `AnimeSeriesService` provides async CRUD operations. + +```python +class AnimeSeriesService: + @staticmethod + async def create( + db: AsyncSession, + key: str, + name: str, + site: str, + folder: str + ) -> AnimeSeries: + """Create a new anime series.""" + + @staticmethod + async def get_by_key( + db: AsyncSession, + key: str + ) -> Optional[AnimeSeries]: + """Get series by primary key identifier.""" +``` + +### Bulk Operations + +Services provide bulk operations for transaction-safe batch processing: + +| Service | Method | Description | +| ---------------------- | ---------------------- | ------------------------------ | +| `EpisodeService` | `bulk_mark_downloaded` | Mark multiple episodes at once | +| `DownloadQueueService` | `bulk_delete` | Delete multiple queue items | +| `DownloadQueueService` | `clear_all` | Clear entire queue | +| `UserSessionService` | `rotate_session` | Revoke old + create new atomic | +| `UserSessionService` | `cleanup_expired` | Bulk delete expired sessions | + +Source: [src/server/database/service.py](../src/server/database/service.py) + +--- + +## 9. Data Integrity Rules + +### Validation Constraints + +| Field | Rule | Error Message | +| ------------------------- | ------------------------ | ------------------------------------- | +| `anime_series.key` | Non-empty, max 255 chars | "Series key cannot be empty" | +| `anime_series.name` | Non-empty, max 500 chars | "Series name cannot be empty" | +| `episodes.season` | 0-1000 | "Season number must be non-negative" | +| `episodes.episode_number` | 0-10000 | "Episode number must be non-negative" | + +Source: [src/server/database/models.py](../src/server/database/models.py#L89-L119) + +### Cascade Rules + +- Deleting `anime_series` deletes all related `episodes` and `download_queue_item` + +--- + +## 10. Migration Strategy + +Currently, SQLAlchemy's `create_all()` is used for schema creation. + +```python +# src/server/database/connection.py +async def init_db(): + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) +``` + +For production migrations, Alembic is recommended but not yet implemented. + +Source: [src/server/database/connection.py](../src/server/database/connection.py) + +--- + +## 11. Common Query Patterns + +### Get all series with missing episodes + +```python +series = await db.execute( + select(AnimeSeries).options(selectinload(AnimeSeries.episodes)) +) +for serie in series.scalars(): + downloaded = [e for e in serie.episodes if e.is_downloaded] +``` + +### Get pending downloads ordered by priority + +```python +items = await db.execute( + select(DownloadQueueItem) + .where(DownloadQueueItem.status == "pending") + .order_by( + case( + (DownloadQueueItem.priority == "HIGH", 1), + (DownloadQueueItem.priority == "NORMAL", 2), + (DownloadQueueItem.priority == "LOW", 3), + ), + DownloadQueueItem.added_at + ) +) +``` + +--- + +## 12. Database Location + +| Environment | Default Location | +| ----------- | ------------------------------------------------- | +| Development | `./data/aniworld.db` | +| Production | Via `DATABASE_URL` environment variable | +| Testing | In-memory SQLite (`sqlite+aiosqlite:///:memory:`) | diff --git a/docs/DEVELOPMENT.md b/docs/DEVELOPMENT.md new file mode 100644 index 0000000..dc18406 --- /dev/null +++ b/docs/DEVELOPMENT.md @@ -0,0 +1,64 @@ +# Development Guide + +## Document Purpose + +This document provides guidance for developers working on the Aniworld project. + +### What This Document Contains + +- **Prerequisites**: Required software and tools +- **Environment Setup**: Step-by-step local development setup +- **Project Structure**: Source code organization explanation +- **Development Workflow**: Branch strategy, commit conventions +- **Coding Standards**: Style guide, linting, formatting +- **Running the Application**: Development server, CLI usage +- **Debugging Tips**: Common debugging approaches +- **IDE Configuration**: VS Code settings, recommended extensions +- **Contributing Guidelines**: How to submit changes +- **Code Review Process**: Review checklist and expectations + +### What This Document Does NOT Contain + +- Production deployment (see [DEPLOYMENT.md](DEPLOYMENT.md)) +- API reference (see [API.md](API.md)) +- Architecture decisions (see [ARCHITECTURE.md](ARCHITECTURE.md)) +- Test writing guides (see [TESTING.md](TESTING.md)) +- Security guidelines (see [SECURITY.md](SECURITY.md)) + +### Target Audience + +- New Developers joining the project +- Contributors (internal and external) +- Anyone setting up a development environment + +--- + +## Sections to Document + +1. Prerequisites + - Python version + - Conda environment + - Node.js (if applicable) + - Git +2. Getting Started + - Clone repository + - Setup conda environment + - Install dependencies + - Configuration setup +3. Project Structure Overview +4. Development Server + - Starting FastAPI server + - Hot reload configuration + - Debug mode +5. CLI Development +6. Code Style + - PEP 8 compliance + - Type hints requirements + - Docstring format + - Import organization +7. Git Workflow + - Branch naming + - Commit message format + - Pull request process +8. Common Development Tasks +9. Troubleshooting Development Issues diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 0000000..5c57b87 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,39 @@ +# Aniworld Documentation + +## Overview + +This directory contains all documentation for the Aniworld anime download manager project. + +## Documentation Structure + +| Document | Purpose | Target Audience | +| ---------------------------------------- | ---------------------------------------------- | ---------------------------------- | +| [ARCHITECTURE.md](ARCHITECTURE.md) | System architecture and design decisions | Architects, Senior Developers | +| [API.md](API.md) | REST API reference and WebSocket documentation | Frontend Developers, API Consumers | +| [DEVELOPMENT.md](DEVELOPMENT.md) | Developer setup and contribution guide | All Developers | +| [DEPLOYMENT.md](DEPLOYMENT.md) | Deployment and operations guide | DevOps, System Administrators | +| [DATABASE.md](DATABASE.md) | Database schema and data models | Backend Developers | +| [TESTING.md](TESTING.md) | Testing strategy and guidelines | QA Engineers, Developers | +| [SECURITY.md](SECURITY.md) | Security considerations and guidelines | Security Engineers, All Developers | +| [CONFIGURATION.md](CONFIGURATION.md) | Configuration options reference | Operators, Developers | +| [CHANGELOG.md](CHANGELOG.md) | Version history and changes | All Stakeholders | +| [TROUBLESHOOTING.md](TROUBLESHOOTING.md) | Common issues and solutions | Support, Operators | +| [features.md](features.md) | Feature list and capabilities | Product Owners, Users | +| [instructions.md](instructions.md) | AI agent development instructions | AI Agents, Developers | + +## Documentation Standards + +- All documentation uses Markdown format +- Keep documentation up-to-date with code changes +- Include code examples where applicable +- Use clear, concise language +- Include diagrams for complex concepts (use Mermaid syntax) + +## Contributing to Documentation + +When adding or updating documentation: + +1. Follow the established format in each document +2. Update the README.md if adding new documents +3. Ensure cross-references are valid +4. Review for spelling and grammar diff --git a/docs/TESTING.md b/docs/TESTING.md new file mode 100644 index 0000000..4cf745b --- /dev/null +++ b/docs/TESTING.md @@ -0,0 +1,71 @@ +# Testing Documentation + +## Document Purpose + +This document describes the testing strategy, guidelines, and practices for the Aniworld project. + +### What This Document Contains + +- **Testing Strategy**: Overall approach to quality assurance +- **Test Categories**: Unit, integration, API, performance, security tests +- **Test Structure**: Organization of test files and directories +- **Writing Tests**: Guidelines for writing effective tests +- **Fixtures and Mocking**: Shared test utilities and mock patterns +- **Running Tests**: Commands and configurations +- **Coverage Requirements**: Minimum coverage thresholds +- **CI/CD Integration**: How tests run in automation +- **Test Data Management**: Managing test fixtures and data +- **Best Practices**: Do's and don'ts for testing + +### What This Document Does NOT Contain + +- Production deployment (see [DEPLOYMENT.md](DEPLOYMENT.md)) +- Security audit procedures (see [SECURITY.md](SECURITY.md)) +- Bug tracking and issue management +- Performance benchmarking results + +### Target Audience + +- Developers writing tests +- QA Engineers +- CI/CD Engineers +- Code reviewers + +--- + +## Sections to Document + +1. Testing Philosophy + - Test pyramid approach + - Quality gates +2. Test Categories + - Unit Tests (`tests/unit/`) + - Integration Tests (`tests/integration/`) + - API Tests (`tests/api/`) + - Frontend Tests (`tests/frontend/`) + - Performance Tests (`tests/performance/`) + - Security Tests (`tests/security/`) +3. Test Structure and Naming + - File naming conventions + - Test function naming + - Test class organization +4. Running Tests + - pytest commands + - Running specific tests + - Verbose output + - Coverage reports +5. Fixtures and Conftest + - Shared fixtures + - Database fixtures + - Mock services +6. Mocking Guidelines + - What to mock + - Mock patterns + - External service mocks +7. Coverage Requirements +8. CI/CD Integration +9. Writing Good Tests + - Arrange-Act-Assert pattern + - Test isolation + - Edge cases +10. Common Pitfalls to Avoid diff --git a/features.md b/docs/features.md similarity index 100% rename from features.md rename to docs/features.md diff --git a/docs/identifier_standardization_validation.md b/docs/identifier_standardization_validation.md deleted file mode 100644 index 9e0d05e..0000000 --- a/docs/identifier_standardization_validation.md +++ /dev/null @@ -1,426 +0,0 @@ -# Series Identifier Standardization - Validation Instructions - -## Overview - -This document provides comprehensive instructions for AI agents to validate the **Series Identifier Standardization** change across the Aniworld codebase. The change standardizes `key` as the primary identifier for series and relegates `folder` to metadata-only status. - -## Summary of the Change - -| Field | Purpose | Usage | -| -------- | ------------------------------------------------------------------------------ | --------------------------------------------------------------- | -| `key` | **Primary Identifier** - Provider-assigned, URL-safe (e.g., `attack-on-titan`) | All lookups, API operations, database queries, WebSocket events | -| `folder` | **Metadata Only** - Filesystem folder name (e.g., `Attack on Titan (2013)`) | Display purposes, filesystem operations only | -| `id` | **Database Primary Key** - Internal auto-increment integer | Database relationships only | - ---- - -## Validation Checklist - -### Phase 2: Application Layer Services - -**Files to validate:** - -1. **`src/server/services/anime_service.py`** - - - [ ] Class docstring explains `key` vs `folder` convention - - [ ] All public methods accept `key` parameter for series identification - - [ ] No methods accept `folder` as an identifier parameter - - [ ] Event handler methods document key/folder convention - - [ ] Progress tracking uses `key` in progress IDs where possible - -2. **`src/server/services/download_service.py`** - - - [ ] `DownloadItem` uses `serie_id` (which should be the `key`) - - [ ] `serie_folder` is documented as metadata only - - [ ] Queue operations look up series by `key` not `folder` - - [ ] Persistence format includes `serie_id` as the key identifier - -3. **`src/server/services/websocket_service.py`** - - - [ ] Module docstring explains key/folder convention - - [ ] Broadcast methods include `key` in message payloads - - [ ] `folder` is documented as optional/display only - - [ ] Event broadcasts use `key` as primary identifier - -4. **`src/server/services/scan_service.py`** - - - [ ] Scan operations use `key` for identification - - [ ] Progress events include `key` field - -5. **`src/server/services/progress_service.py`** - - [ ] Progress tracking includes `key` in metadata where applicable - -**Validation Commands:** - -```bash -# Check service layer for folder-based lookups -grep -rn "by_folder\|folder.*=.*identifier\|folder.*lookup" src/server/services/ --include="*.py" - -# Verify key is used in services -grep -rn "serie_id\|series_key\|key.*identifier" src/server/services/ --include="*.py" -``` - ---- - -### Phase 3: API Endpoints and Responses - -**Files to validate:** - -1. **`src/server/api/anime.py`** - - - [ ] `AnimeSummary` model has `key` field with proper description - - [ ] `AnimeDetail` model has `key` field with proper description - - [ ] API docstrings explain `key` is the primary identifier - - [ ] `folder` field descriptions state "metadata only" - - [ ] Endpoint paths use `key` parameter (e.g., `/api/anime/{key}`) - - [ ] No endpoints use `folder` as path parameter for lookups - -2. **`src/server/api/download.py`** - - - [ ] Download endpoints use `serie_id` (key) for operations - - [ ] Request models document key/folder convention - - [ ] Response models include `key` as primary identifier - -3. **`src/server/models/anime.py`** - - - [ ] Module docstring explains identifier convention - - [ ] `AnimeSeriesResponse` has `key` field properly documented - - [ ] `SearchResult` has `key` field properly documented - - [ ] Field validators normalize `key` to lowercase - - [ ] `folder` fields document metadata-only purpose - -4. **`src/server/models/download.py`** - - - [ ] `DownloadItem` has `serie_id` documented as the key - - [ ] `serie_folder` documented as metadata only - - [ ] Field descriptions are clear about primary vs metadata - -5. **`src/server/models/websocket.py`** - - [ ] Module docstring explains key/folder convention - - [ ] Message models document `key` as primary identifier - - [ ] `folder` documented as optional display metadata - -**Validation Commands:** - -```bash -# Check API endpoints for folder-based paths -grep -rn "folder.*Path\|/{folder}" src/server/api/ --include="*.py" - -# Verify key is used in endpoints -grep -rn "/{key}\|series_key\|serie_id" src/server/api/ --include="*.py" - -# Check model field descriptions -grep -rn "Field.*description.*identifier\|Field.*description.*key\|Field.*description.*folder" src/server/models/ --include="*.py" -``` - ---- - -### Phase 4: Frontend Integration - -**Files to validate:** - -1. **`src/server/web/static/js/app.js`** - - - [ ] `selectedSeries` Set uses `key` values, not `folder` - - [ ] `seriesData` array comments indicate `key` as primary identifier - - [ ] Selection operations use `key` property - - [ ] API calls pass `key` for series identification - - [ ] WebSocket message handlers extract `key` from data - - [ ] No code uses `folder` for series lookups - -2. **`src/server/web/static/js/queue.js`** - - - [ ] Queue items reference series by `key` or `serie_id` - - [ ] WebSocket handlers extract `key` from messages - - [ ] UI operations use `key` for identification - - [ ] `serie_folder` used only for display - -3. **`src/server/web/static/js/websocket_client.js`** - - - [ ] Message handling preserves `key` field - - [ ] No transformation that loses `key` information - -4. **HTML Templates** (`src/server/web/templates/`) - - [ ] Data attributes use `key` for identification (e.g., `data-key`) - - [ ] No `data-folder` used for identification purposes - - [ ] Display uses `folder` or `name` appropriately - -**Validation Commands:** - -```bash -# Check JavaScript for folder-based lookups -grep -rn "\.folder\s*==\|folder.*identifier\|getByFolder" src/server/web/static/js/ --include="*.js" - -# Check data attributes in templates -grep -rn "data-key\|data-folder\|data-series" src/server/web/templates/ --include="*.html" -``` - ---- - -### Phase 5: Database Operations - -**Files to validate:** - -1. **`src/server/database/models.py`** - - - [ ] `AnimeSeries` model has `key` column with unique constraint - - [ ] `key` column is indexed - - [ ] Model docstring explains identifier convention - - [ ] `folder` column docstring states "metadata only" - - [ ] Validators check `key` is not empty - - [ ] No `folder` uniqueness constraint (unless intentional) - -2. **`src/server/database/service.py`** - - - [ ] `AnimeSeriesService` has `get_by_key()` method - - [ ] Class docstring explains lookup convention - - [ ] No `get_by_folder()` without deprecation - - [ ] All CRUD operations use `key` for identification - - [ ] Logging uses `key` in messages - -3. **`src/server/database/migrations/`** - - [ ] Migration files maintain `key` as unique, indexed column - - [ ] No migrations that use `folder` as identifier - -**Validation Commands:** - -```bash -# Check database models -grep -rn "unique=True\|index=True" src/server/database/models.py - -# Check service lookups -grep -rn "get_by_key\|get_by_folder\|filter.*key\|filter.*folder" src/server/database/service.py -``` - ---- - -### Phase 6: WebSocket Events - -**Files to validate:** - -1. **All WebSocket broadcast calls** should include `key` in payload: - - - `download_progress` → includes `key` - - `download_complete` → includes `key` - - `download_failed` → includes `key` - - `scan_progress` → includes `key` (where applicable) - - `queue_status` → items include `key` - -2. **Message format validation:** - ```json - { - "type": "download_progress", - "data": { - "key": "attack-on-titan", // PRIMARY - always present - "folder": "Attack on Titan (2013)", // OPTIONAL - display only - "progress": 45.5, - ... - } - } - ``` - -**Validation Commands:** - -```bash -# Check WebSocket broadcast calls -grep -rn "broadcast.*key\|send_json.*key" src/server/services/ --include="*.py" - -# Check message construction -grep -rn '"key":\|"folder":' src/server/services/ --include="*.py" -``` - ---- - -### Phase 7: Test Coverage - -**Test files to validate:** - -1. **`tests/unit/test_serie_class.py`** - - - [ ] Tests for key validation (empty, whitespace, None) - - [ ] Tests for key as primary identifier - - [ ] Tests for folder as metadata only - -2. **`tests/unit/test_anime_service.py`** - - - [ ] Service tests use `key` for operations - - [ ] Mock objects have proper `key` attributes - -3. **`tests/unit/test_database_models.py`** - - - [ ] Tests for `key` uniqueness constraint - - [ ] Tests for `key` validation - -4. **`tests/unit/test_database_service.py`** - - - [ ] Tests for `get_by_key()` method - - [ ] No tests for deprecated folder lookups - -5. **`tests/api/test_anime_endpoints.py`** - - - [ ] API tests use `key` in requests - - [ ] Mock `FakeSerie` has proper `key` attribute - - [ ] Comments explain key/folder convention - -6. **`tests/unit/test_websocket_service.py`** - - [ ] WebSocket tests verify `key` in messages - - [ ] Broadcast tests include `key` in payload - -**Validation Commands:** - -```bash -# Run all tests -conda run -n AniWorld python -m pytest tests/ -v --tb=short - -# Run specific test files -conda run -n AniWorld python -m pytest tests/unit/test_serie_class.py -v -conda run -n AniWorld python -m pytest tests/unit/test_database_models.py -v -conda run -n AniWorld python -m pytest tests/api/test_anime_endpoints.py -v - -# Search tests for identifier usage -grep -rn "key.*identifier\|folder.*metadata" tests/ --include="*.py" -``` - ---- - -## Common Issues to Check - -### 1. Inconsistent Naming - -Look for inconsistent parameter names: - -- `serie_key` vs `series_key` vs `key` -- `serie_id` should refer to `key`, not database `id` -- `serie_folder` vs `folder` - -### 2. Missing Documentation - -Check that ALL models, services, and APIs document: - -- What `key` is and how to use it -- That `folder` is metadata only - -### 3. Legacy Code Patterns - -Search for deprecated patterns: - -```python -# Bad - using folder for lookup -series = get_by_folder(folder_name) - -# Good - using key for lookup -series = get_by_key(series_key) -``` - -### 4. API Response Consistency - -Verify all API responses include: - -- `key` field (primary identifier) -- `folder` field (optional, for display) - -### 5. Frontend Data Flow - -Verify the frontend: - -- Stores `key` in selection sets -- Passes `key` to API calls -- Uses `folder` only for display - ---- - -## Deprecation Warnings - -The following should have deprecation warnings (for removal in v3.0.0): - -1. Any `get_by_folder()` or `GetByFolder()` methods -2. Any API endpoints that accept `folder` as a lookup parameter -3. Any frontend code that uses `folder` for identification - -**Example deprecation:** - -```python -import warnings - -def get_by_folder(self, folder: str): - """DEPRECATED: Use get_by_key() instead.""" - warnings.warn( - "get_by_folder() is deprecated, use get_by_key(). " - "Will be removed in v3.0.0", - DeprecationWarning, - stacklevel=2 - ) - # ... implementation -``` - ---- - -## Automated Validation Script - -Run this script to perform automated checks: - -```bash -#!/bin/bash -# identifier_validation.sh - -echo "=== Series Identifier Standardization Validation ===" -echo "" - -echo "1. Checking core entities..." -grep -rn "PRIMARY IDENTIFIER\|metadata only" src/core/entities/ --include="*.py" | head -20 - -echo "" -echo "2. Checking for deprecated folder lookups..." -grep -rn "get_by_folder\|GetByFolder" src/ --include="*.py" - -echo "" -echo "3. Checking API models for key field..." -grep -rn 'key.*Field\|Field.*key' src/server/models/ --include="*.py" | head -20 - -echo "" -echo "4. Checking database models..." -grep -rn "key.*unique\|key.*index" src/server/database/models.py - -echo "" -echo "5. Checking frontend key usage..." -grep -rn "selectedSeries\|\.key\|data-key" src/server/web/static/js/ --include="*.js" | head -20 - -echo "" -echo "6. Running tests..." -conda run -n AniWorld python -m pytest tests/unit/test_serie_class.py -v --tb=short - -echo "" -echo "=== Validation Complete ===" -``` - ---- - -## Expected Results - -After validation, you should confirm: - -1. ✅ All core entities use `key` as primary identifier -2. ✅ All services look up series by `key` -3. ✅ All API endpoints use `key` for operations -4. ✅ All database queries use `key` for lookups -5. ✅ Frontend uses `key` for selection and API calls -6. ✅ WebSocket events include `key` in payload -7. ✅ All tests pass -8. ✅ Documentation clearly explains the convention -9. ✅ Deprecation warnings exist for legacy patterns - ---- - -## Sign-off - -Once validation is complete, update this section: - -- [x] Phase 1: Core Entities - Validated by: **AI Agent** Date: **28 Nov 2025** -- [x] Phase 2: Services - Validated by: **AI Agent** Date: **28 Nov 2025** -- [ ] Phase 3: API - Validated by: **\_\_\_** Date: **\_\_\_** -- [ ] Phase 4: Frontend - Validated by: **\_\_\_** Date: **\_\_\_** -- [ ] Phase 5: Database - Validated by: **\_\_\_** Date: **\_\_\_** -- [ ] Phase 6: WebSocket - Validated by: **\_\_\_** Date: **\_\_\_** -- [ ] Phase 7: Tests - Validated by: **\_\_\_** Date: **\_\_\_** - -**Final Approval:** \***\*\*\*\*\***\_\_\_\***\*\*\*\*\*** Date: **\*\***\_**\*\*** diff --git a/docs/infrastructure.md b/docs/infrastructure.md deleted file mode 100644 index 9fd262c..0000000 --- a/docs/infrastructure.md +++ /dev/null @@ -1,337 +0,0 @@ -# Aniworld Web Application Infrastructure - -```bash -conda activate AniWorld -``` - -## Project Structure - -``` -src/ -├── core/ # Core application logic -│ ├── SeriesApp.py # Main application class -│ ├── SerieScanner.py # Directory scanner -│ ├── entities/ # Domain entities (series.py, SerieList.py) -│ ├── interfaces/ # Abstract interfaces (providers.py, callbacks.py) -│ ├── providers/ # Content providers (aniworld, streaming) -│ └── exceptions/ # Custom exceptions -├── server/ # FastAPI web application -│ ├── fastapi_app.py # Main FastAPI application -│ ├── controllers/ # Route controllers (health, page, error) -│ ├── api/ # API routes (auth, config, anime, download, websocket) -│ ├── models/ # Pydantic models -│ ├── services/ # Business logic services -│ ├── database/ # SQLAlchemy ORM layer -│ ├── utils/ # Utilities (dependencies, templates, security) -│ └── web/ # Frontend (templates, static assets) -├── cli/ # CLI application -data/ # Config, database, queue state -logs/ # Application logs -tests/ # Test suites -``` - -## Technology Stack - -| Layer | Technology | -| --------- | ---------------------------------------------- | -| Backend | FastAPI, Uvicorn, SQLAlchemy, SQLite, Pydantic | -| Frontend | HTML5, CSS3, Vanilla JS, Bootstrap 5, HTMX | -| Security | JWT (python-jose), bcrypt (passlib) | -| Real-time | Native WebSocket | - -## Series Identifier Convention - -Throughout the codebase, three identifiers are used for anime series: - -| Identifier | Type | Purpose | Example | -| ---------- | --------------- | ----------------------------------------------------------- | -------------------------- | -| `key` | Unique, Indexed | **PRIMARY** - All lookups, API operations, WebSocket events | `"attack-on-titan"` | -| `folder` | String | Display/filesystem metadata only (never for lookups) | `"Attack on Titan (2013)"` | -| `id` | Primary Key | Internal database key for relationships | `1`, `42` | - -### Key Format Requirements - -- **Lowercase only**: No uppercase letters allowed -- **URL-safe**: Only alphanumeric characters and hyphens -- **Hyphen-separated**: Words separated by single hyphens -- **No leading/trailing hyphens**: Must start and end with alphanumeric -- **No consecutive hyphens**: `attack--titan` is invalid - -**Valid examples**: `"attack-on-titan"`, `"one-piece"`, `"86-eighty-six"`, `"re-zero"` -**Invalid examples**: `"Attack On Titan"`, `"attack_on_titan"`, `"attack on titan"` - -### Migration Notes - -- **Backward Compatibility**: API endpoints accepting `anime_id` will check `key` first, then fall back to `folder` lookup -- **Deprecation**: Folder-based lookups are deprecated and will be removed in a future version -- **New Code**: Always use `key` for identification; `folder` is metadata only - -## API Endpoints - -### Authentication (`/api/auth`) - -- `POST /login` - Master password authentication (returns JWT) -- `POST /logout` - Invalidate session -- `GET /status` - Check authentication status - -### Configuration (`/api/config`) - -- `GET /` - Get configuration -- `PUT /` - Update configuration -- `POST /validate` - Validate without applying -- `GET /backups` - List backups -- `POST /backups/{name}/restore` - Restore backup - -### Anime (`/api/anime`) - -- `GET /` - List anime with missing episodes (returns `key` as identifier) -- `GET /{anime_id}` - Get anime details (accepts `key` or `folder` for backward compatibility) -- `POST /search` - Search for anime (returns `key` as identifier) -- `POST /add` - Add new series (extracts `key` from link URL) -- `POST /rescan` - Trigger library rescan - -**Response Models:** - -- `AnimeSummary`: `key` (primary identifier), `name`, `site`, `folder` (metadata), `missing_episodes`, `link` -- `AnimeDetail`: `key` (primary identifier), `title`, `folder` (metadata), `episodes`, `description` - -### Download Queue (`/api/queue`) - -- `GET /status` - Queue status and statistics -- `POST /add` - Add episodes to queue -- `DELETE /{item_id}` - Remove item -- `POST /start` | `/stop` | `/pause` | `/resume` - Queue control -- `POST /retry` - Retry failed downloads -- `DELETE /completed` - Clear completed items - -**Request Models:** - -- `DownloadRequest`: `serie_id` (key, primary identifier), `serie_folder` (filesystem path), `serie_name` (display), `episodes`, `priority` - -**Response Models:** - -- `DownloadItem`: `id`, `serie_id` (key), `serie_folder` (metadata), `serie_name`, `episode`, `status`, `progress` -- `QueueStatus`: `is_running`, `is_paused`, `active_downloads`, `pending_queue`, `completed_downloads`, `failed_downloads` - -### WebSocket (`/ws/connect`) - -Real-time updates for downloads, scans, and queue operations. - -**Rooms**: `downloads`, `download_progress`, `scan_progress` - -**Message Types**: `download_progress`, `download_complete`, `download_failed`, `queue_status`, `scan_progress`, `scan_complete`, `scan_failed` - -**Series Identifier in Messages:** -All series-related WebSocket events include `key` as the primary identifier in their data payload: - -```json -{ - "type": "download_progress", - "timestamp": "2025-10-17T10:30:00.000Z", - "data": { - "download_id": "abc123", - "key": "attack-on-titan", - "folder": "Attack on Titan (2013)", - "percent": 45.2, - "speed_mbps": 2.5, - "eta_seconds": 180 - } -} -``` - -## Database Models - -| Model | Purpose | -| ----------------- | ---------------------------------------- | -| AnimeSeries | Series metadata (key, name, folder, etc) | -| Episode | Episodes linked to series | -| DownloadQueueItem | Queue items with status and progress | -| UserSession | JWT sessions with expiry | - -**Mixins**: `TimestampMixin` (created_at, updated_at), `SoftDeleteMixin` - -### AnimeSeries Identifier Fields - -| Field | Type | Purpose | -| -------- | --------------- | ------------------------------------------------- | -| `id` | Primary Key | Internal database key for relationships | -| `key` | Unique, Indexed | **PRIMARY IDENTIFIER** for all lookups | -| `folder` | String | Filesystem metadata only (not for identification) | - -**Database Service Methods:** - -- `AnimeSeriesService.get_by_key(key)` - **Primary lookup method** -- `AnimeSeriesService.get_by_id(id)` - Internal lookup by database ID -- No `get_by_folder()` method exists - folder is never used for lookups - -## Core Services - -### SeriesApp (`src/core/SeriesApp.py`) - -Main engine for anime series management with async support, progress callbacks, and cancellation. - -### Callback System (`src/core/interfaces/callbacks.py`) - -- `ProgressCallback`, `ErrorCallback`, `CompletionCallback` -- Context classes include `key` + optional `folder` fields -- Thread-safe `CallbackManager` for multiple callback registration - -### Services (`src/server/services/`) - -| Service | Purpose | -| ---------------- | ----------------------------------------- | -| AnimeService | Series management, scans (uses SeriesApp) | -| DownloadService | Queue management, download execution | -| ScanService | Library scan operations with callbacks | -| ProgressService | Centralized progress tracking + WebSocket | -| WebSocketService | Real-time connection management | -| AuthService | JWT authentication, rate limiting | -| ConfigService | Configuration persistence with backups | - -## Validation Utilities (`src/server/utils/validators.py`) - -Provides data validation functions for ensuring data integrity across the application. - -### Series Key Validation - -- **`validate_series_key(key)`**: Validates key format (URL-safe, lowercase, hyphens only) - - Valid: `"attack-on-titan"`, `"one-piece"`, `"86-eighty-six"` - - Invalid: `"Attack On Titan"`, `"attack_on_titan"`, `"attack on titan"` -- **`validate_series_key_or_folder(identifier, allow_folder=True)`**: Backward-compatible validation - - Returns tuple `(identifier, is_key)` where `is_key` indicates if it's a valid key format - - Set `allow_folder=False` to require strict key format - -### Other Validators - -| Function | Purpose | -| --------------------------- | ------------------------------------------ | -| `validate_series_name` | Series display name validation | -| `validate_episode_range` | Episode range validation (1-1000) | -| `validate_download_quality` | Quality setting (360p-1080p, best, worst) | -| `validate_language` | Language codes (ger-sub, ger-dub, etc.) | -| `validate_anime_url` | Aniworld.to/s.to URL validation | -| `validate_backup_name` | Backup filename validation | -| `validate_config_data` | Configuration data structure validation | -| `sanitize_filename` | Sanitize filenames for safe filesystem use | - -## Template Helpers (`src/server/utils/template_helpers.py`) - -Provides utilities for template rendering and series data preparation. - -### Core Functions - -| Function | Purpose | -| -------------------------- | --------------------------------- | -| `get_base_context` | Base context for all templates | -| `render_template` | Render template with context | -| `validate_template_exists` | Check if template file exists | -| `list_available_templates` | List all available template files | - -### Series Context Helpers - -All series helpers use `key` as the primary identifier: - -| Function | Purpose | -| ----------------------------------- | ---------------------------------------------- | -| `prepare_series_context` | Prepare series data for templates (uses `key`) | -| `get_series_by_key` | Find series by `key` (not `folder`) | -| `filter_series_by_missing_episodes` | Filter series with missing episodes | - -**Example Usage:** - -```python -from src.server.utils.template_helpers import prepare_series_context - -series_data = [ - {"key": "attack-on-titan", "name": "Attack on Titan", "folder": "Attack on Titan (2013)"}, - {"key": "one-piece", "name": "One Piece", "folder": "One Piece (1999)"} -] -prepared = prepare_series_context(series_data, sort_by="name") -# Returns sorted list using 'key' as identifier -``` - -## Frontend - -### Static Files - -- CSS: `styles.css` (Fluent UI design), `ux_features.css` (accessibility) -- JS: `app.js`, `queue.js`, `websocket_client.js`, accessibility modules - -### WebSocket Client - -Native WebSocket wrapper with Socket.IO-compatible API: - -```javascript -const socket = io(); -socket.join("download_progress"); -socket.on("download_progress", (data) => { - /* ... */ -}); -``` - -### Authentication - -JWT tokens stored in localStorage, included as `Authorization: Bearer `. - -## Testing - -```bash -# All tests -conda run -n AniWorld python -m pytest tests/ -v - -# Unit tests only -conda run -n AniWorld python -m pytest tests/unit/ -v - -# API tests -conda run -n AniWorld python -m pytest tests/api/ -v -``` - -## Production Notes - -### Current (Single-Process) - -- SQLite with WAL mode -- In-memory WebSocket connections -- File-based config and queue persistence - -### Multi-Process Deployment - -- Switch to PostgreSQL/MySQL -- Move WebSocket registry to Redis -- Use distributed locking for queue operations -- Consider Redis for session/cache storage - -## Code Examples - -### API Usage with Key Identifier - -```python -# Fetching anime list - response includes 'key' as identifier -response = requests.get("/api/anime", headers={"Authorization": f"Bearer {token}"}) -anime_list = response.json() -# Each item has: key="attack-on-titan", folder="Attack on Titan (2013)", ... - -# Fetching specific anime by key (preferred) -response = requests.get("/api/anime/attack-on-titan", headers={"Authorization": f"Bearer {token}"}) - -# Adding to download queue using key -download_request = { - "serie_id": "attack-on-titan", # Use key, not folder - "serie_folder": "Attack on Titan (2013)", # Metadata for filesystem - "serie_name": "Attack on Titan", - "episodes": ["S01E01", "S01E02"], - "priority": 1 -} -response = requests.post("/api/queue/add", json=download_request, headers=headers) -``` - -### WebSocket Event Handling - -```javascript -// WebSocket events always include 'key' as identifier -socket.on("download_progress", (data) => { - const key = data.key; // Primary identifier: "attack-on-titan" - const folder = data.folder; // Metadata: "Attack on Titan (2013)" - updateProgressBar(key, data.percent); -}); -``` diff --git a/instructions.md b/docs/instructions.md similarity index 87% rename from instructions.md rename to docs/instructions.md index 6f5248e..6a09568 100644 --- a/instructions.md +++ b/docs/instructions.md @@ -75,7 +75,7 @@ conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0. --- -## Final Implementation Notes +## Implementation Notes 1. **Incremental Development**: Implement features incrementally, testing each component thoroughly before moving to the next 2. **Code Review**: Review all generated code for adherence to project standards @@ -100,23 +100,10 @@ For each task completed: - [ ] Performance validated - [ ] Code reviewed - [ ] Task marked as complete in instructions.md -- [ ] Infrastructure.md updated +- [ ] Infrastructure.md updated and other docs - [ ] Changes committed to git; keep your messages in git short and clear - [ ] Take the next task --- -### Prerequisites - -1. Server is running: `conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload` -2. Password: `Hallo123!` -3. Login via browser at `http://127.0.0.1:8000/login` - -### Notes - -- This is a simplification that removes complexity while maintaining core functionality -- Improves user experience with explicit manual control -- Easier to understand, test, and maintain -- Good foundation for future enhancements if needed - ---- +## TODO List: diff --git a/requirements.txt b/requirements.txt index fe6fe32..dab5a18 100644 --- a/requirements.txt +++ b/requirements.txt @@ -14,5 +14,4 @@ pytest==7.4.3 pytest-asyncio==0.21.1 httpx==0.25.2 sqlalchemy>=2.0.35 -alembic==1.13.0 aiosqlite>=0.19.0 \ No newline at end of file diff --git a/run_server.py b/run_server.py index 39f173c..ed82e76 100644 --- a/run_server.py +++ b/run_server.py @@ -2,7 +2,8 @@ """ Startup script for the Aniworld FastAPI application. -This script starts the application with proper logging configuration. +This script starts the application with proper logging configuration +and graceful shutdown support via Ctrl+C (SIGINT) or SIGTERM. """ import uvicorn @@ -15,6 +16,11 @@ if __name__ == "__main__": # Run the application with logging. # Only watch .py files in src/, explicitly exclude __pycache__. # This prevents reload loops from .pyc compilation. + # + # Graceful shutdown: + # - Ctrl+C (SIGINT) or SIGTERM triggers graceful shutdown + # - timeout_graceful_shutdown ensures shutdown completes within 30s + # - The FastAPI lifespan handler orchestrates cleanup in proper order uvicorn.run( "src.server.fastapi_app:app", host="127.0.0.1", @@ -24,4 +30,5 @@ if __name__ == "__main__": reload_includes=["*.py"], reload_excludes=["*/__pycache__/*", "*.pyc"], log_config=log_config, + timeout_graceful_shutdown=30, # Allow 30s for graceful shutdown ) diff --git a/scripts/start.sh b/scripts/start.sh index 186a105..498a94f 100644 --- a/scripts/start.sh +++ b/scripts/start.sh @@ -7,7 +7,7 @@ # installs dependencies, sets up the database, and starts the application. # # Usage: -# ./start.sh [development|production] [--no-install] [--no-migrate] +# ./start.sh [development|production] [--no-install] # # Environment Variables: # ENVIRONMENT: 'development' or 'production' (default: development) @@ -28,7 +28,6 @@ PROJECT_ROOT="$(dirname "$SCRIPT_DIR")" CONDA_ENV="${CONDA_ENV:-AniWorld}" ENVIRONMENT="${1:-development}" INSTALL_DEPS="${INSTALL_DEPS:-true}" -RUN_MIGRATIONS="${RUN_MIGRATIONS:-true}" PORT="${PORT:-8000}" HOST="${HOST:-127.0.0.1}" @@ -104,20 +103,6 @@ install_dependencies() { log_success "Dependencies installed." } -# Run database migrations -run_migrations() { - if [[ "$RUN_MIGRATIONS" != "true" ]]; then - log_warning "Skipping database migrations." - return - fi - - log_info "Running database migrations..." - cd "$PROJECT_ROOT" - conda run -n "$CONDA_ENV" \ - python -m alembic upgrade head 2>/dev/null || log_warning "No migrations to run." - log_success "Database migrations completed." -} - # Initialize database init_database() { log_info "Initializing database..." @@ -220,10 +205,6 @@ main() { INSTALL_DEPS="false" shift ;; - --no-migrate) - RUN_MIGRATIONS="false" - shift - ;; *) ENVIRONMENT="$1" shift @@ -237,7 +218,6 @@ main() { create_env_file install_dependencies init_database - run_migrations start_application } diff --git a/src/__pycache__/Exceptions.cpython-310.pyc b/src/__pycache__/Exceptions.cpython-310.pyc deleted file mode 100644 index a7c467e..0000000 Binary files a/src/__pycache__/Exceptions.cpython-310.pyc and /dev/null differ diff --git a/src/__pycache__/GlobalLogger.cpython-310.pyc b/src/__pycache__/GlobalLogger.cpython-310.pyc deleted file mode 100644 index d19e475..0000000 Binary files a/src/__pycache__/GlobalLogger.cpython-310.pyc and /dev/null differ diff --git a/src/__pycache__/Serie.cpython-310.pyc b/src/__pycache__/Serie.cpython-310.pyc deleted file mode 100644 index 8428bb6..0000000 Binary files a/src/__pycache__/Serie.cpython-310.pyc and /dev/null differ diff --git a/src/cli/Main.py b/src/cli/Main.py deleted file mode 100644 index 1a3a32a..0000000 --- a/src/cli/Main.py +++ /dev/null @@ -1,316 +0,0 @@ -"""Command-line interface for the Aniworld anime download manager.""" - -import logging -import os -from typing import Optional, Sequence - -from rich.progress import Progress - -from src.core.entities.series import Serie -from src.core.SeriesApp import SeriesApp as CoreSeriesApp - -LOG_FORMAT = "%(asctime)s - %(levelname)s - %(name)s - %(message)s" - -logger = logging.getLogger(__name__) - - -class SeriesCLI: - """Thin wrapper around :class:`SeriesApp` providing an interactive CLI.""" - - def __init__(self, directory_to_search: str) -> None: - print("Please wait while initializing...") - self.directory_to_search = directory_to_search - self.series_app = CoreSeriesApp(directory_to_search) - - self._progress: Optional[Progress] = None - self._overall_task_id: Optional[int] = None - self._series_task_id: Optional[int] = None - self._episode_task_id: Optional[int] = None - self._scan_task_id: Optional[int] = None - - # ------------------------------------------------------------------ - # Utility helpers - # ------------------------------------------------------------------ - def _get_series_list(self) -> Sequence[Serie]: - """Return the currently cached series with missing episodes.""" - return self.series_app.get_series_list() - - # ------------------------------------------------------------------ - # Display & selection - # ------------------------------------------------------------------ - def display_series(self) -> None: - """Print all series with assigned numbers.""" - series = self._get_series_list() - if not series: - print("\nNo series with missing episodes were found.") - return - - print("\nCurrent result:") - for index, serie in enumerate(series, start=1): - name = (serie.name or "").strip() - label = name if name else serie.folder - print(f"{index}. {label}") - - def get_user_selection(self) -> Optional[Sequence[Serie]]: - """Prompt the user to select one or more series for download.""" - series = list(self._get_series_list()) - if not series: - print("No series available for download.") - return None - - self.display_series() - prompt = ( - "\nSelect series by number (e.g. '1', '1,2' or 'all') " - "or type 'exit' to return: " - ) - selection = input(prompt).strip().lower() - - if selection in {"exit", ""}: - return None - - if selection == "all": - return series - - try: - indexes = [ - int(value.strip()) - 1 - for value in selection.split(",") - ] - except ValueError: - print("Invalid selection. Returning to main menu.") - return None - - chosen = [ - series[i] - for i in indexes - if 0 <= i < len(series) - ] - - if not chosen: - print("No valid series selected.") - return None - - return chosen - - # ------------------------------------------------------------------ - # Download logic - # ------------------------------------------------------------------ - def download_series(self, series: Sequence[Serie]) -> None: - """Download all missing episodes for the provided series list.""" - total_episodes = sum( - len(episodes) - for serie in series - for episodes in serie.episodeDict.values() - ) - - if total_episodes == 0: - print("Selected series do not contain missing episodes.") - return - - self._progress = Progress() - with self._progress: - self._overall_task_id = self._progress.add_task( - "[red]Processing...", total=total_episodes - ) - self._series_task_id = self._progress.add_task( - "[green]Current series", total=1 - ) - self._episode_task_id = self._progress.add_task( - "[gray]Download", total=100 - ) - - for serie in series: - serie_total = sum(len(eps) for eps in serie.episodeDict.values()) - self._progress.update( - self._series_task_id, - total=max(serie_total, 1), - completed=0, - description=f"[green]{serie.folder}", - ) - - for season, episodes in serie.episodeDict.items(): - for episode in episodes: - if not self.series_app.loader.is_language( - season, episode, serie.key - ): - logger.info( - "Skipping %s S%02dE%02d because the desired language is unavailable", - serie.folder, - season, - episode, - ) - continue - - result = self.series_app.download( - serieFolder=serie.folder, - season=season, - episode=episode, - key=serie.key, - callback=self._update_download_progress, - ) - - if not result.success: - logger.error("Download failed: %s", result.message) - - self._progress.advance(self._overall_task_id) - self._progress.advance(self._series_task_id) - self._progress.update( - self._episode_task_id, - completed=0, - description="[gray]Waiting...", - ) - - self._progress = None - self.series_app.refresh_series_list() - - def _update_download_progress(self, percent: float) -> None: - """Update the episode progress bar based on download progress.""" - if not self._progress or self._episode_task_id is None: - return - - description = f"[gray]Download: {percent:.1f}%" - self._progress.update( - self._episode_task_id, - completed=percent, - description=description, - ) - - # ------------------------------------------------------------------ - # Rescan logic - # ------------------------------------------------------------------ - def rescan(self) -> None: - """Trigger a rescan of the anime directory using the core app.""" - total_to_scan = self.series_app.SerieScanner.get_total_to_scan() - total_to_scan = max(total_to_scan, 1) - - self._progress = Progress() - with self._progress: - self._scan_task_id = self._progress.add_task( - "[red]Scanning folders...", - total=total_to_scan, - ) - - result = self.series_app.ReScan( - callback=self._wrap_scan_callback(total_to_scan) - ) - - self._progress = None - self._scan_task_id = None - - if result.success: - print(result.message) - else: - print(f"Scan failed: {result.message}") - - def _wrap_scan_callback(self, total: int): - """Create a callback that updates the scan progress bar.""" - - def _callback(folder: str, current: int) -> None: - if not self._progress or self._scan_task_id is None: - return - - self._progress.update( - self._scan_task_id, - completed=min(current, total), - description=f"[green]{folder}", - ) - - return _callback - - # ------------------------------------------------------------------ - # Search & add logic - # ------------------------------------------------------------------ - def search_mode(self) -> None: - """Search for a series and add it to the local list if chosen.""" - query = input("Enter search string: ").strip() - if not query: - return - - results = self.series_app.search(query) - if not results: - print("No results found. Returning to main menu.") - return - - print("\nSearch results:") - for index, result in enumerate(results, start=1): - print(f"{index}. {result.get('name', 'Unknown')}") - - selection = input( - "\nSelect an option by number or press to cancel: " - ).strip() - - if selection == "": - return - - try: - chosen_index = int(selection) - 1 - except ValueError: - print("Invalid input. Returning to main menu.") - return - - if not (0 <= chosen_index < len(results)): - print("Invalid selection. Returning to main menu.") - return - - chosen = results[chosen_index] - serie = Serie( - chosen.get("link", ""), - chosen.get("name", "Unknown"), - "aniworld.to", - chosen.get("link", ""), - {}, - ) - self.series_app.List.add(serie) - self.series_app.refresh_series_list() - print(f"Added '{serie.name}' to the local catalogue.") - - # ------------------------------------------------------------------ - # Main loop - # ------------------------------------------------------------------ - def run(self) -> None: - """Run the interactive CLI loop.""" - while True: - action = input( - "\nChoose action ('s' for search, 'i' for rescan, 'd' for download, 'q' to quit): " - ).strip().lower() - - if action == "s": - self.search_mode() - elif action == "i": - print("\nRescanning series...\n") - self.rescan() - elif action == "d": - selected_series = self.get_user_selection() - if selected_series: - self.download_series(selected_series) - elif action in {"q", "quit", "exit"}: - print("Goodbye!") - break - else: - print("Unknown command. Please choose 's', 'i', 'd', or 'q'.") - - -def configure_logging() -> None: - """Set up a basic logging configuration for the CLI.""" - logging.basicConfig(level=logging.INFO, format=LOG_FORMAT) - logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR) - logging.getLogger("charset_normalizer").setLevel(logging.ERROR) - - -def main() -> None: - """Entry point for the CLI application.""" - configure_logging() - - default_dir = os.getenv("ANIME_DIRECTORY") - if not default_dir: - print( - "Environment variable ANIME_DIRECTORY is not set. Please configure it to the base anime directory." - ) - return - - app = SeriesCLI(default_dir) - app.run() - - -if __name__ == "__main__": - main() diff --git a/src/core/SerieScanner.py b/src/core/SerieScanner.py index f5acbf1..082dfef 100644 --- a/src/core/SerieScanner.py +++ b/src/core/SerieScanner.py @@ -3,25 +3,24 @@ SerieScanner - Scans directories for anime series and missing episodes. This module provides functionality to scan anime directories, identify missing episodes, and report progress through callback interfaces. + +Note: + This module is pure domain logic. Database operations are handled + by the service layer (AnimeService). """ +from __future__ import annotations import logging import os import re import traceback import uuid -from typing import Callable, Iterable, Iterator, Optional +from typing import Iterable, Iterator, Optional + +from events import Events from src.core.entities.series import Serie from src.core.exceptions.Exceptions import MatchNotFoundError, NoKeyFoundException -from src.core.interfaces.callbacks import ( - CallbackManager, - CompletionContext, - ErrorContext, - OperationType, - ProgressContext, - ProgressPhase, -) from src.core.providers.base_provider import Loader logger = logging.getLogger(__name__) @@ -34,13 +33,22 @@ class SerieScanner: Scans directories for anime series and identifies missing episodes. Supports progress callbacks for real-time scanning updates. + + Note: + This class is pure domain logic. Database operations are handled + by the service layer (AnimeService). Scan results are stored + in keyDict and can be retrieved after scanning. + + Example: + scanner = SerieScanner("/path/to/anime", loader) + scanner.scan() + # Results are in scanner.keyDict """ def __init__( self, basePath: str, loader: Loader, - callback_manager: Optional[CallbackManager] = None ) -> None: """ Initialize the SerieScanner. @@ -67,18 +75,84 @@ class SerieScanner: self.directory: str = abs_path self.keyDict: dict[str, Serie] = {} self.loader: Loader = loader - self._callback_manager: CallbackManager = ( - callback_manager or CallbackManager() - ) self._current_operation_id: Optional[str] = None + self.events = Events() + + self.events.on_progress = [] + self.events.on_error = [] + self.events.on_completion = [] logger.info("Initialized SerieScanner with base path: %s", abs_path) + + def _safe_call_event(self, event_handler, data: dict) -> None: + """Safely call an event handler if it exists. + + Args: + event_handler: Event handler attribute (e.g., self.events.on_progress) + data: Data dictionary to pass to the event handler + """ + if event_handler: + try: + # Event handlers are stored as lists, iterate over them + for handler in event_handler: + handler(data) + except Exception as e: + logger.error("Error calling event handler: %s", e, exc_info=True) - @property - def callback_manager(self) -> CallbackManager: - """Get the callback manager instance.""" - return self._callback_manager + def subscribe_on_progress(self, handler): + """ + Subscribe a handler to an event. + Args: + handler: Callable to handle the event + """ + if handler not in self.events.on_progress: + self.events.on_progress.append(handler) + def unsubscribe_on_progress(self, handler): + """ + Unsubscribe a handler from an event. + Args: + handler: Callable to remove + """ + if handler in self.events.on_progress: + self.events.on_progress.remove(handler) + + def subscribe_on_error(self, handler): + """ + Subscribe a handler to an event. + Args: + handler: Callable to handle the event + """ + if handler not in self.events.on_error: + self.events.on_error.append(handler) + + def unsubscribe_on_error(self, handler): + """ + Unsubscribe a handler from an event. + Args: + handler: Callable to remove + """ + if handler in self.events.on_error: + self.events.on_error.remove(handler) + + def subscribe_on_completion(self, handler): + """ + Subscribe a handler to an event. + Args: + handler: Callable to handle the event + """ + if handler not in self.events.on_completion: + self.events.on_completion.append(handler) + + def unsubscribe_on_completion(self, handler): + """ + Unsubscribe a handler from an event. + Args: + handler: Callable to remove + """ + if handler in self.events.on_completion: + self.events.on_completion.remove(handler) + def reinit(self) -> None: """Reinitialize the series dictionary (keyed by serie.key).""" self.keyDict: dict[str, Serie] = {} @@ -92,15 +166,12 @@ class SerieScanner: result = self.__find_mp4_files() return sum(1 for _ in result) - def scan( - self, - callback: Optional[Callable[[str, int], None]] = None - ) -> None: + def scan(self) -> None: """ Scan directories for anime series and missing episodes. - Args: - callback: Optional legacy callback function (folder, count) + Results are stored in self.keyDict and can be retrieved after + scanning. Data files are also saved to disk for persistence. Raises: Exception: If scan fails critically @@ -111,16 +182,16 @@ class SerieScanner: logger.info("Starting scan for missing episodes") # Notify scan starting - self._callback_manager.notify_progress( - ProgressContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - phase=ProgressPhase.STARTING, - current=0, - total=0, - percentage=0.0, - message="Initializing scan" - ) + self._safe_call_event( + self.events.on_progress, + { + "operation_id": self._current_operation_id, + "phase": "STARTING", + "current": 0, + "total": 0, + "percentage": 0.0, + "message": "Initializing scan" + } ) try: @@ -144,27 +215,20 @@ class SerieScanner: else: percentage = 0.0 - # Progress is surfaced both through the callback manager - # (for the web/UI layer) and, for compatibility, through a - # legacy callback that updates CLI progress bars. # Notify progress - self._callback_manager.notify_progress( - ProgressContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - phase=ProgressPhase.IN_PROGRESS, - current=counter, - total=total_to_scan, - percentage=percentage, - message=f"Scanning: {folder}", - details=f"Found {len(mp4_files)} episodes" - ) + self._safe_call_event( + self.events.on_progress, + { + "operation_id": self._current_operation_id, + "phase": "IN_PROGRESS", + "current": counter, + "total": total_to_scan, + "percentage": percentage, + "message": f"Scanning: {folder}", + "details": f"Found {len(mp4_files)} episodes" + } ) - # Call legacy callback if provided - if callback: - callback(folder, counter) - serie = self.__read_data_from_file(folder) if ( serie is not None @@ -211,15 +275,15 @@ class SerieScanner: error_msg = f"Error processing folder '{folder}': {nkfe}" logger.error(error_msg) - self._callback_manager.notify_error( - ErrorContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - error=nkfe, - message=error_msg, - recoverable=True, - metadata={"folder": folder, "key": None} - ) + self._safe_call_event( + self.events.on_error, + { + "operation_id": self._current_operation_id, + "error": nkfe, + "message": error_msg, + "recoverable": True, + "metadata": {"folder": folder, "key": None} + } ) except Exception as e: # Log error and notify via callback @@ -233,30 +297,30 @@ class SerieScanner: traceback.format_exc() ) - self._callback_manager.notify_error( - ErrorContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - error=e, - message=error_msg, - recoverable=True, - metadata={"folder": folder, "key": None} - ) + self._safe_call_event( + self.events.on_error, + { + "operation_id": self._current_operation_id, + "error": e, + "message": error_msg, + "recoverable": True, + "metadata": {"folder": folder, "key": None} + } ) continue # Notify scan completion - self._callback_manager.notify_completion( - CompletionContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - success=True, - message=f"Scan completed. Processed {counter} folders.", - statistics={ + self._safe_call_event( + self.events.on_completion, + { + "operation_id": self._current_operation_id, + "success": True, + "message": f"Scan completed. Processed {counter} folders.", + "statistics": { "total_folders": counter, "series_found": len(self.keyDict) } - ) + } ) logger.info( @@ -270,23 +334,23 @@ class SerieScanner: error_msg = f"Critical scan error: {e}" logger.error("%s\n%s", error_msg, traceback.format_exc()) - self._callback_manager.notify_error( - ErrorContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - error=e, - message=error_msg, - recoverable=False - ) + self._safe_call_event( + self.events.on_error, + { + "operation_id": self._current_operation_id, + "error": e, + "message": error_msg, + "recoverable": False + } ) - self._callback_manager.notify_completion( - CompletionContext( - operation_type=OperationType.SCAN, - operation_id=self._current_operation_id, - success=False, - message=error_msg - ) + self._safe_call_event( + self.events.on_completion, + { + "operation_id": self._current_operation_id, + "success": False, + "message": error_msg + } ) raise @@ -306,16 +370,6 @@ class SerieScanner: has_files = True yield anime_name, mp4_files if has_files else [] - def __remove_year(self, input_string: str) -> str: - """Remove year information from input string.""" - cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip() - logger.debug( - "Removed year from '%s' -> '%s'", - input_string, - cleaned_string - ) - return cleaned_string - def __read_data_from_file(self, folder_name: str) -> Optional[Serie]: """Read serie data from file or key file. @@ -442,3 +496,185 @@ class SerieScanner: episodes_dict[season] = missing_episodes return episodes_dict, "aniworld.to" + + def scan_single_series( + self, + key: str, + folder: str, + ) -> dict[int, list[int]]: + """ + Scan a single series for missing episodes. + + This method performs a targeted scan for only the specified series, + without triggering a full library rescan. It fetches available + episodes from the provider and compares with local files. + + Args: + key: The unique provider key for the series + folder: The filesystem folder name where the series is stored + + Returns: + dict[int, list[int]]: Dictionary mapping season numbers to lists + of missing episode numbers. Empty dict if no missing episodes. + + Raises: + ValueError: If key or folder is empty + + Example: + >>> scanner = SerieScanner("/path/to/anime", loader) + >>> missing = scanner.scan_single_series( + ... "attack-on-titan", + ... "Attack on Titan" + ... ) + >>> print(missing) + {1: [5, 6, 7], 2: [1, 2]} + """ + if not key or not key.strip(): + raise ValueError("Series key cannot be empty") + if not folder or not folder.strip(): + raise ValueError("Series folder cannot be empty") + + logger.info( + "Starting targeted scan for series: %s (folder: %s)", + key, + folder + ) + + # Generate unique operation ID for this targeted scan + operation_id = str(uuid.uuid4()) + # Notify scan starting + self._safe_call_event( + self.events.on_progress, + { + "operation_id": operation_id, + "phase": "STARTING", + "current": 0, + "total": 1, + "percentage": 0.0, + "message": f"Scanning series: {folder}", + "details": f"Key: {key}" + } + ) + + try: + # Get the folder path + folder_path = os.path.join(self.directory, folder) + + # Check if folder exists + if not os.path.isdir(folder_path): + logger.info( + "Series folder does not exist yet: %s - " + "will scan for available episodes from provider", + folder_path + ) + mp4_files: list[str] = [] + else: + # Find existing MP4 files in the folder + mp4_files = [] + for root, _, files in os.walk(folder_path): + for file in files: + if file.endswith(".mp4"): + mp4_files.append(os.path.join(root, file)) + + logger.debug( + "Found %d existing MP4 files in folder %s", + len(mp4_files), + folder + ) + + # Get missing episodes from provider + missing_episodes, site = self.__get_missing_episodes_and_season( + key, mp4_files + ) + + # Update progress + self._safe_call_event( + self.events.on_progress, + { + "operation_id": operation_id, + "phase": "IN_PROGRESS", + "current": 1, + "total": 1, + "percentage": 100.0, + "message": f"Scanned: {folder}", + "details": f"Found {sum(len(eps) for eps in missing_episodes.values())} missing episodes" + } + ) + + # Create or update Serie in keyDict + if key in self.keyDict: + # Update existing serie + self.keyDict[key].episodeDict = missing_episodes + logger.debug( + "Updated existing series %s with %d missing episodes", + key, + sum(len(eps) for eps in missing_episodes.values()) + ) + else: + # Create new serie entry + serie = Serie( + key=key, + name="", # Will be populated by caller if needed + site=site, + folder=folder, + episodeDict=missing_episodes + ) + self.keyDict[key] = serie + logger.debug( + "Created new series entry for %s with %d missing episodes", + key, + sum(len(eps) for eps in missing_episodes.values()) + ) + + # Notify completion + self._safe_call_event( + self.events.on_completion, + { + "operation_id": operation_id, + "success": True, + "message": f"Scan completed for {folder}", + "statistics": { + "missing_episodes": sum( + len(eps) for eps in missing_episodes.values() + ), + "seasons_with_missing": len(missing_episodes) + } + } + ) + + logger.info( + "Targeted scan completed for %s: %d missing episodes across %d seasons", + key, + sum(len(eps) for eps in missing_episodes.values()), + len(missing_episodes) + ) + + return missing_episodes + + except Exception as e: + error_msg = f"Failed to scan series {key}: {e}" + logger.error(error_msg, exc_info=True) + + # Notify error + self._safe_call_event( + self.events.on_error, + { + "operation_id": operation_id, + "error": e, + "message": error_msg, + "recoverable": True, + "metadata": {"key": key, "folder": folder} + } + ) + # Notify completion with failure + self._safe_call_event( + self.events.on_completion, + { + "operation_id": operation_id, + "success": False, + "message": error_msg + } + ) + # Return empty dict on error (scan failed but not critical) + return {} + diff --git a/src/core/SeriesApp.py b/src/core/SeriesApp.py index 0b5df93..3f80c68 100644 --- a/src/core/SeriesApp.py +++ b/src/core/SeriesApp.py @@ -4,10 +4,15 @@ SeriesApp - Core application logic for anime series management. This module provides the main application interface for searching, downloading, and managing anime series with support for async callbacks, progress reporting, and error handling. + +Note: + This module is pure domain logic with no database dependencies. + Database operations are handled by the service layer (AnimeService). """ import asyncio import logging +from concurrent.futures import ThreadPoolExecutor from typing import Any, Dict, List, Optional from events import Events @@ -119,6 +124,10 @@ class SeriesApp: - Managing series lists Supports async callbacks for progress reporting. + + Note: + This class is now pure domain logic with no database dependencies. + Database operations are handled by the service layer (AnimeService). Events: download_status: Raised when download status changes. @@ -140,15 +149,19 @@ class SeriesApp: self.directory_to_search = directory_to_search + # Initialize thread pool executor + self.executor = ThreadPoolExecutor(max_workers=3) + # Initialize events self._events = Events() - self._events.download_status = None - self._events.scan_status = None self.loaders = Loaders() self.loader = self.loaders.GetLoader(key="aniworld.to") - self.serie_scanner = SerieScanner(directory_to_search, self.loader) + self.serie_scanner = SerieScanner( + directory_to_search, self.loader + ) self.list = SerieList(self.directory_to_search) + self.series_list: List[Any] = [] # Synchronous init used during constructor to avoid awaiting # in __init__ self._init_list_sync() @@ -187,6 +200,26 @@ class SeriesApp: def scan_status(self, value): """Set scan_status event handler.""" self._events.scan_status = value + + def load_series_from_list(self, series: list) -> None: + """ + Load series into the in-memory list. + + This method is called by the service layer after loading + series from the database. + + Args: + series: List of Serie objects to load + """ + self.list.keyDict.clear() + for serie in series: + self.list.keyDict[serie.key] = serie + self.series_list = self.list.GetMissingEpisode() + logger.debug( + "Loaded %d series with %d having missing episodes", + len(series), + len(self.series_list) + ) def _init_list_sync(self) -> None: """Synchronous initialization helper for constructor.""" @@ -198,7 +231,9 @@ class SeriesApp: async def _init_list(self) -> None: """Initialize the series list with missing episodes (async).""" - self.series_list = await asyncio.to_thread( + loop = asyncio.get_running_loop() + self.series_list = await loop.run_in_executor( + self.executor, self.list.GetMissingEpisode ) logger.debug( @@ -220,7 +255,12 @@ class SeriesApp: RuntimeError: If search fails """ logger.info("Searching for: %s", words) - results = await asyncio.to_thread(self.loader.search, words) + loop = asyncio.get_running_loop() + results = await loop.run_in_executor( + self.executor, + self.loader.search, + words + ) logger.info("Found %d results", len(results)) return results @@ -255,6 +295,7 @@ class SeriesApp: lookups. The 'serie_folder' parameter is only used for filesystem operations. """ + logger.info( "Starting download: %s (key: %s) S%02dE%02d", serie_folder, @@ -277,9 +318,10 @@ class SeriesApp: ) try: - def download_callback(progress_info): + def download_progress_handler(progress_info): + """Handle download progress events from loader.""" logger.debug( - "wrapped_callback called with: %s", progress_info + "download_progress_handler called with: %s", progress_info ) downloaded = progress_info.get('downloaded_bytes', 0) @@ -309,17 +351,28 @@ class SeriesApp: item_id=item_id, ) ) - # Perform download in thread to avoid blocking event loop - download_success = await asyncio.to_thread( - self.loader.download, - self.directory_to_search, - serie_folder, - season, - episode, - key, - language, - download_callback - ) + + # Subscribe to loader's download progress events + self.loader.subscribe_download_progress(download_progress_handler) + + try: + # Perform download in thread to avoid blocking event loop + loop = asyncio.get_running_loop() + download_success = await loop.run_in_executor( + self.executor, + self.loader.download, + self.directory_to_search, + serie_folder, + season, + episode, + key, + language + ) + finally: + # Always unsubscribe after download completes or fails + self.loader.unsubscribe_download_progress( + download_progress_handler + ) if download_success: logger.info( @@ -367,7 +420,30 @@ class SeriesApp: return download_success - except Exception as e: + except InterruptedError: + # Download was cancelled - propagate the cancellation + logger.info( + "Download cancelled: %s (key: %s) S%02dE%02d", + serie_folder, + key, + season, + episode, + ) + # Fire download cancelled event + self._events.download_status( + DownloadStatusEventArgs( + serie_folder=serie_folder, + key=key, + season=season, + episode=episode, + status="cancelled", + message="Download cancelled by user", + item_id=item_id, + ) + ) + raise # Re-raise to propagate cancellation + + except Exception as e: # pylint: disable=broad-except logger.error( "Download error: %s (key: %s) S%02dE%02d - %s", serie_folder, @@ -394,23 +470,40 @@ class SeriesApp: return False - async def rescan(self) -> int: + async def rescan(self) -> list: """ Rescan directory for missing episodes (async). + + This method performs a file-based scan and returns the results. + Database persistence is handled by the service layer (AnimeService). Returns: - Number of series with missing episodes after rescan. + List of Serie objects found during scan with their + missing episodes. + + Note: + This method no longer saves to database directly. The returned + list should be persisted by the caller (AnimeService). """ logger.info("Starting directory rescan") + total_to_scan = 0 + try: # Get total items to scan - total_to_scan = await asyncio.to_thread( + logger.info("Getting total items to scan...") + loop = asyncio.get_running_loop() + total_to_scan = await loop.run_in_executor( + self.executor, self.serie_scanner.get_total_to_scan ) logger.info("Total folders to scan: %d", total_to_scan) # Fire scan started event + logger.info( + "Firing scan_status 'started' event, handler=%s", + self._events.scan_status + ) self._events.scan_status( ScanStatusEventArgs( current=0, @@ -423,37 +516,60 @@ class SeriesApp: ) # Reinitialize scanner - await asyncio.to_thread(self.serie_scanner.reinit) - - def scan_callback(folder: str, current: int): - # Calculate progress - if total_to_scan > 0: - progress = current / total_to_scan - else: - progress = 0.0 + await loop.run_in_executor( + self.executor, + self.serie_scanner.reinit + ) + def scan_progress_handler(progress_data): + """Handle scan progress events from scanner.""" # Fire scan progress event + message = progress_data.get('message', '') + folder = message.replace('Scanning: ', '') self._events.scan_status( ScanStatusEventArgs( - current=current, - total=total_to_scan, + current=progress_data.get('current', 0), + total=progress_data.get('total', total_to_scan), folder=folder, status="progress", - progress=progress, - message=f"Scanning: {folder}", + progress=( + progress_data.get('percentage', 0.0) / 100.0 + ), + message=message, ) ) - # Perform scan - await asyncio.to_thread(self.serie_scanner.scan, scan_callback) + # Subscribe to scanner's progress events + self.serie_scanner.subscribe_on_progress(scan_progress_handler) + + try: + # Perform scan (file-based, returns results in scanner.keyDict) + await loop.run_in_executor( + self.executor, + self.serie_scanner.scan + ) + finally: + # Always unsubscribe after scan completes or fails + self.serie_scanner.unsubscribe_on_progress( + scan_progress_handler + ) + + # Get scanned series from scanner + scanned_series = list(self.serie_scanner.keyDict.values()) - # Reinitialize list - self.list = SerieList(self.directory_to_search) - await self._init_list() + # Update in-memory list with scan results + self.list.keyDict.clear() + for serie in scanned_series: + self.list.keyDict[serie.key] = serie + self.series_list = self.list.GetMissingEpisode() logger.info("Directory rescan completed successfully") # Fire scan completed event + logger.info( + "Firing scan_status 'completed' event, handler=%s", + self._events.scan_status + ) self._events.scan_status( ScanStatusEventArgs( current=total_to_scan, @@ -468,7 +584,7 @@ class SeriesApp: ) ) - return len(self.series_list) + return scanned_series except InterruptedError: logger.warning("Scan cancelled by user") @@ -477,7 +593,7 @@ class SeriesApp: self._events.scan_status( ScanStatusEventArgs( current=0, - total=total_to_scan if 'total_to_scan' in locals() else 0, + total=total_to_scan, folder="", status="cancelled", message="Scan cancelled by user", @@ -492,7 +608,7 @@ class SeriesApp: self._events.scan_status( ScanStatusEventArgs( current=0, - total=total_to_scan if 'total_to_scan' in locals() else 0, + total=total_to_scan, folder="", status="failed", error=e, @@ -536,3 +652,66 @@ class SeriesApp: looks up series by their unique key, not by folder name. """ return self.list.get_by_key(key) + + def get_all_series_from_data_files(self) -> List[Serie]: + """ + Get all series from data files in the anime directory. + + Scans the directory_to_search for all 'data' files and loads + the Serie metadata from each file. This method is synchronous + and can be wrapped with asyncio.to_thread if needed for async + contexts. + + Returns: + List of Serie objects found in data files. Returns an empty + list if no data files are found or if the directory doesn't + exist. + + Example: + series_app = SeriesApp("/path/to/anime") + all_series = series_app.get_all_series_from_data_files() + for serie in all_series: + print(f"Found: {serie.name} (key={serie.key})") + """ + logger.info( + "Scanning for data files in directory: %s", + self.directory_to_search + ) + + # Create a fresh SerieList instance for file-based loading + # This ensures we get all series from data files without + # interfering with the main instance's state + try: + temp_list = SerieList( + self.directory_to_search, + skip_load=False # Allow automatic loading + ) + except (OSError, ValueError) as e: + logger.error( + "Failed to scan directory for data files: %s", + str(e), + exc_info=True + ) + return [] + + # Get all series from the temporary list + all_series = temp_list.get_all() + + logger.info( + "Found %d series from data files in %s", + len(all_series), + self.directory_to_search + ) + + return all_series + + def shutdown(self) -> None: + """ + Shutdown the thread pool executor. + + Should be called when the SeriesApp instance is no longer needed + to properly clean up resources. + """ + if hasattr(self, 'executor'): + self.executor.shutdown(wait=True) + logger.info("ThreadPoolExecutor shut down successfully") diff --git a/src/core/entities/SerieList.py b/src/core/entities/SerieList.py index bb82b10..ef232cc 100644 --- a/src/core/entities/SerieList.py +++ b/src/core/entities/SerieList.py @@ -1,4 +1,14 @@ -"""Utilities for loading and managing stored anime series metadata.""" +"""Utilities for loading and managing stored anime series metadata. + +This module provides the SerieList class for managing collections of anime +series metadata. It uses file-based storage only. + +Note: + This module is part of the core domain layer and has no database + dependencies. All database operations are handled by the service layer. +""" + +from __future__ import annotations import logging import os @@ -8,6 +18,8 @@ from typing import Dict, Iterable, List, Optional from src.core.entities.series import Serie +logger = logging.getLogger(__name__) + class SerieList: """ @@ -15,34 +27,84 @@ class SerieList: Series are identified by their unique 'key' (provider identifier). The 'folder' is metadata only and not used for lookups. + + This class manages in-memory series data loaded from filesystem. + It has no database dependencies - all persistence is handled by + the service layer. + + Example: + # File-based mode + serie_list = SerieList("/path/to/anime") + series = serie_list.get_all() + + Attributes: + directory: Path to the anime directory + keyDict: Internal dictionary mapping serie.key to Serie objects """ - def __init__(self, base_path: str) -> None: + def __init__( + self, + base_path: str, + skip_load: bool = False + ) -> None: + """Initialize the SerieList. + + Args: + base_path: Path to the anime directory + skip_load: If True, skip automatic loading of series from files. + Useful when planning to load from database instead. + """ self.directory: str = base_path # Internal storage using serie.key as the dictionary key self.keyDict: Dict[str, Serie] = {} - self.load_series() - - def add(self, serie: Serie) -> None: - """ - Persist a new series if it is not already present. - Uses serie.key for identification. The serie.folder is used for - filesystem operations only. + # Only auto-load from files if not skipping + if not skip_load: + self.load_series() + + def add(self, serie: Serie, use_sanitized_folder: bool = True) -> str: + """ + Persist a new series if it is not already present (file-based mode). + + Uses serie.key for identification. Creates the filesystem folder + using either the sanitized display name (default) or the existing + folder property. Args: serie: The Serie instance to add + use_sanitized_folder: If True (default), use serie.sanitized_folder + for the filesystem folder name based on display name. + If False, use serie.folder as-is for backward compatibility. + + Returns: + str: The folder path that was created/used + + Note: + This method creates data files on disk. For database storage, + use add_to_db() instead. """ if self.contains(serie.key): - return + # Return existing folder path + existing = self.keyDict[serie.key] + return os.path.join(self.directory, existing.folder) - data_path = os.path.join(self.directory, serie.folder, "data") - anime_path = os.path.join(self.directory, serie.folder) + # Determine folder name to use + if use_sanitized_folder: + folder_name = serie.sanitized_folder + # Update the serie's folder property to match what we create + serie.folder = folder_name + else: + folder_name = serie.folder + + data_path = os.path.join(self.directory, folder_name, "data") + anime_path = os.path.join(self.directory, folder_name) os.makedirs(anime_path, exist_ok=True) if not os.path.isfile(data_path): serie.save_to_file(data_path) # Store by key, not folder self.keyDict[serie.key] = serie + + return anime_path def contains(self, key: str) -> bool: """ diff --git a/src/core/entities/series.py b/src/core/entities/series.py index 410c84c..1d8ad7c 100644 --- a/src/core/entities/series.py +++ b/src/core/entities/series.py @@ -1,4 +1,7 @@ import json +import warnings + +from src.server.utils.filesystem import sanitize_folder_name class Serie: @@ -126,6 +129,35 @@ class Serie: def episodeDict(self, value: dict[int, list[int]]): self._episodeDict = value + @property + def sanitized_folder(self) -> str: + """ + Get a filesystem-safe folder name derived from the display name. + + This property returns a sanitized version of the series name + suitable for use as a filesystem folder name. It removes/replaces + characters that are invalid for filesystems while preserving + Unicode characters. + + Use this property when creating folders for the series on disk. + The `folder` property stores the actual folder name used. + + Returns: + str: Filesystem-safe folder name based on display name + + Example: + >>> serie = Serie("attack-on-titan", "Attack on Titan: Final", ...) + >>> serie.sanitized_folder + 'Attack on Titan Final' + """ + # Use name if available, fall back to folder, then key + name_to_sanitize = self._name or self._folder or self._key + try: + return sanitize_folder_name(name_to_sanitize) + except ValueError: + # Fallback to key if name cannot be sanitized + return sanitize_folder_name(self._key) + def to_dict(self): """Convert Serie object to dictionary for JSON serialization.""" return { @@ -154,13 +186,46 @@ class Serie: ) def save_to_file(self, filename: str): - """Save Serie object to JSON file.""" + """Save Serie object to JSON file. + + .. deprecated:: + File-based storage is deprecated. Use database storage via + `AnimeSeriesService.create()` instead. This method will be + removed in v3.0.0. + + Args: + filename: Path to save the JSON file + """ + warnings.warn( + "save_to_file() is deprecated and will be removed in v3.0.0. " + "Use database storage via AnimeSeriesService.create() instead.", + DeprecationWarning, + stacklevel=2 + ) with open(filename, "w", encoding="utf-8") as file: json.dump(self.to_dict(), file, indent=4) @classmethod def load_from_file(cls, filename: str) -> "Serie": - """Load Serie object from JSON file.""" + """Load Serie object from JSON file. + + .. deprecated:: + File-based storage is deprecated. Use database storage via + `AnimeSeriesService.get_by_key()` instead. This method will be + removed in v3.0.0. + + Args: + filename: Path to load the JSON file from + + Returns: + Serie: The loaded Serie object + """ + warnings.warn( + "load_from_file() is deprecated and will be removed in v3.0.0. " + "Use database storage via AnimeSeriesService instead.", + DeprecationWarning, + stacklevel=2 + ) with open(filename, "r", encoding="utf-8") as file: data = json.load(file) return cls.from_dict(data) diff --git a/src/core/providers/aniworld_provider.py b/src/core/providers/aniworld_provider.py index a3f2e15..3789c63 100644 --- a/src/core/providers/aniworld_provider.py +++ b/src/core/providers/aniworld_provider.py @@ -1,18 +1,22 @@ + import html import json import logging import os import re import shutil +import threading from pathlib import Path from urllib.parse import quote import requests from bs4 import BeautifulSoup +from events import Events from fake_useragent import UserAgent from requests.adapters import HTTPAdapter from urllib3.util.retry import Retry from yt_dlp import YoutubeDL +from yt_dlp.utils import DownloadCancelled from ..interfaces.providers import Providers from .base_provider import Loader @@ -71,6 +75,9 @@ class AniworldLoader(Loader): self.ANIWORLD_TO = "https://aniworld.to" self.session = requests.Session() + # Cancellation flag for graceful shutdown + self._cancel_flag = threading.Event() + # Configure retries with backoff retries = Retry( total=5, # Number of retries @@ -91,6 +98,23 @@ class AniworldLoader(Loader): self._EpisodeHTMLDict = {} self.Providers = Providers() + # Events: download_progress is triggered with progress dict + self.events = Events() + + def subscribe_download_progress(self, handler): + """Subscribe a handler to the download_progress event. + Args: + handler: Callable to be called with progress dict. + """ + self.events.download_progress += handler + + def unsubscribe_download_progress(self, handler): + """Unsubscribe a handler from the download_progress event. + Args: + handler: Callable previously subscribed. + """ + self.events.download_progress -= handler + def clear_cache(self): """Clear the cached HTML data.""" logging.debug("Clearing HTML cache") @@ -196,7 +220,7 @@ class AniworldLoader(Loader): is_available = language_code in languages logging.debug(f"Available languages for S{season:02}E{episode:03}: {languages}, requested: {language_code}, available: {is_available}") - return is_available + return is_available def download( self, @@ -205,8 +229,7 @@ class AniworldLoader(Loader): season: int, episode: int, key: str, - language: str = "German Dub", - progress_callback=None + language: str = "German Dub" ) -> bool: """Download episode to specified directory. @@ -219,8 +242,6 @@ class AniworldLoader(Loader): key: Series unique identifier from provider (used for identification and API calls) language: Audio language preference (default: German Dub) - progress_callback: Optional callback for download progress - Returns: bool: True if download succeeded, False otherwise """ @@ -266,6 +287,16 @@ class AniworldLoader(Loader): season, episode, key, language ) logging.debug("Direct link obtained from provider") + + cancel_flag = self._cancel_flag + + def events_progress_hook(d): + if cancel_flag.is_set(): + logging.info("Cancellation detected in progress hook") + raise DownloadCancelled("Download cancelled by user") + # Fire the event for progress + self.events.download_progress(d) + ydl_opts = { 'fragment_retries': float('inf'), 'outtmpl': temp_path, @@ -273,30 +304,18 @@ class AniworldLoader(Loader): 'no_warnings': True, 'progress_with_newline': False, 'nocheckcertificate': True, + 'progress_hooks': [events_progress_hook], } if header: ydl_opts['http_headers'] = header logging.debug("Using custom headers for download") - if progress_callback: - # Wrap the callback to add logging - def logged_progress_callback(d): - logging.debug( - f"YT-DLP progress: status={d.get('status')}, " - f"downloaded={d.get('downloaded_bytes')}, " - f"total={d.get('total_bytes')}, " - f"speed={d.get('speed')}" - ) - progress_callback(d) - - ydl_opts['progress_hooks'] = [logged_progress_callback] - logging.debug("Progress callback registered with YT-DLP") try: logging.debug("Starting YoutubeDL download") logging.debug(f"Download link: {link[:100]}...") logging.debug(f"YDL options: {ydl_opts}") - + with YoutubeDL(ydl_opts) as ydl: info = ydl.extract_info(link, download=True) logging.debug( @@ -325,17 +344,15 @@ class AniworldLoader(Loader): f"Broken pipe error with provider {provider}: {e}. " f"This usually means the stream connection was closed." ) - # Try next provider if available continue except Exception as e: logging.error( f"YoutubeDL download failed with provider {provider}: " f"{type(e).__name__}: {e}" ) - # Try next provider if available continue break - + # If we get here, all providers failed logging.error("All download providers failed") self.clear_cache() diff --git a/src/core/providers/base_provider.py b/src/core/providers/base_provider.py index 2058aef..5ecd51b 100644 --- a/src/core/providers/base_provider.py +++ b/src/core/providers/base_provider.py @@ -1,9 +1,21 @@ from abc import ABC, abstractmethod -from typing import Any, Callable, Dict, List, Optional +from typing import Any, Dict, List class Loader(ABC): """Abstract base class for anime data loaders/providers.""" + @abstractmethod + def subscribe_download_progress(self, handler): + """Subscribe a handler to the download_progress event. + Args: + handler: Callable to be called with progress dict. + """ + @abstractmethod + def unsubscribe_download_progress(self, handler): + """Unsubscribe a handler from the download_progress event. + Args: + handler: Callable previously subscribed. + """ @abstractmethod def search(self, word: str) -> List[Dict[str, Any]]: @@ -44,8 +56,7 @@ class Loader(ABC): season: int, episode: int, key: str, - language: str = "German Dub", - progress_callback: Optional[Callable[[str, Dict], None]] = None, + language: str = "German Dub" ) -> bool: """Download episode to specified directory. @@ -56,8 +67,6 @@ class Loader(ABC): episode: Episode number within season key: Unique series identifier/key language: Language version to download (default: German Dub) - progress_callback: Optional callback for progress updates - called with (event_type: str, data: Dict) Returns: True if download successful, False otherwise diff --git a/src/infrastructure/security/database_integrity.py b/src/infrastructure/security/database_integrity.py index acecfe6..66dee66 100644 --- a/src/infrastructure/security/database_integrity.py +++ b/src/infrastructure/security/database_integrity.py @@ -229,37 +229,6 @@ class DatabaseIntegrityChecker: logger.warning(msg) issues_found += count - # Check for invalid progress percentages - stmt = select(DownloadQueueItem).where( - (DownloadQueueItem.progress < 0) | - (DownloadQueueItem.progress > 100) - ) - invalid_progress = self.session.execute(stmt).scalars().all() - - if invalid_progress: - count = len(invalid_progress) - msg = ( - f"Found {count} queue items with invalid progress " - f"percentages" - ) - self.issues.append(msg) - logger.warning(msg) - issues_found += count - - # Check for queue items with invalid status - valid_statuses = {'pending', 'downloading', 'completed', 'failed'} - stmt = select(DownloadQueueItem).where( - ~DownloadQueueItem.status.in_(valid_statuses) - ) - invalid_status = self.session.execute(stmt).scalars().all() - - if invalid_status: - count = len(invalid_status) - msg = f"Found {count} queue items with invalid status" - self.issues.append(msg) - logger.warning(msg) - issues_found += count - if issues_found == 0: logger.info("No data consistency issues found") diff --git a/src/server/api/anime.py b/src/server/api/anime.py index 04beb19..55c3944 100644 --- a/src/server/api/anime.py +++ b/src/server/api/anime.py @@ -1,17 +1,28 @@ import logging +import os import warnings from typing import Any, List, Optional from fastapi import APIRouter, Depends, HTTPException, status from pydantic import BaseModel, Field +from sqlalchemy.ext.asyncio import AsyncSession from src.core.entities.series import Serie +from src.server.database.service import AnimeSeriesService +from src.server.exceptions import ( + BadRequestError, + NotFoundError, + ServerError, + ValidationError, +) from src.server.services.anime_service import AnimeService, AnimeServiceError from src.server.utils.dependencies import ( get_anime_service, + get_optional_database_session, get_series_app, require_auth, ) +from src.server.utils.filesystem import sanitize_folder_name logger = logging.getLogger(__name__) @@ -52,9 +63,8 @@ async def get_anime_status( "series_count": series_count } except Exception as exc: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to get status: {str(exc)}", + raise ServerError( + message=f"Failed to get status: {str(exc)}" ) from exc @@ -73,6 +83,7 @@ class AnimeSummary(BaseModel): site: Provider site URL folder: Filesystem folder name (metadata only) missing_episodes: Episode dictionary mapping seasons to episode numbers + has_missing: Boolean flag indicating if series has missing episodes link: Optional link to the series page (used when adding new series) """ key: str = Field( @@ -95,6 +106,10 @@ class AnimeSummary(BaseModel): ..., description="Episode dictionary: {season: [episode_numbers]}" ) + has_missing: bool = Field( + default=False, + description="Whether the series has any missing episodes" + ) link: Optional[str] = Field( default="", description="Link to the series page (for adding new series)" @@ -109,6 +124,7 @@ class AnimeSummary(BaseModel): "site": "aniworld.to", "folder": "beheneko the elf girls cat (2025)", "missing_episodes": {"1": [1, 2, 3, 4]}, + "has_missing": True, "link": "https://aniworld.to/anime/stream/beheneko" } } @@ -173,11 +189,14 @@ async def list_anime( _auth: dict = Depends(require_auth), series_app: Any = Depends(get_series_app), ) -> List[AnimeSummary]: - """List library series that still have missing episodes. + """List all library series with their missing episodes status. Returns AnimeSummary objects where `key` is the primary identifier used for all operations. The `folder` field is metadata only and should not be used for lookups. + + All series are returned, with `has_missing` flag indicating whether + a series has any missing episodes. Args: page: Page number for pagination (must be positive) @@ -196,6 +215,7 @@ async def list_anime( - site: Provider site - folder: Filesystem folder name (metadata only) - missing_episodes: Dict mapping seasons to episode numbers + - has_missing: Whether the series has any missing episodes Raises: HTTPException: When the underlying lookup fails or params invalid. @@ -205,35 +225,30 @@ async def list_anime( try: page_num = int(page) if page_num < 1: - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Page number must be positive" + raise ValidationError( + message="Page number must be positive" ) page = page_num except (ValueError, TypeError): - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Page must be a valid number" + raise ValidationError( + message="Page must be a valid number" ) if per_page is not None: try: per_page_num = int(per_page) if per_page_num < 1: - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Per page must be positive" + raise ValidationError( + message="Per page must be positive" ) if per_page_num > 1000: - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Per page cannot exceed 1000" + raise ValidationError( + message="Per page cannot exceed 1000" ) per_page = per_page_num except (ValueError, TypeError): - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Per page must be a valid number" + raise ValidationError( + message="Per page must be a valid number" ) # Validate sort_by parameter to prevent ORM injection @@ -242,9 +257,8 @@ async def list_anime( allowed_sort_fields = ["title", "id", "missing_episodes", "name"] if sort_by not in allowed_sort_fields: allowed = ", ".join(allowed_sort_fields) - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail=f"Invalid sort_by parameter. Allowed: {allowed}" + raise ValidationError( + message=f"Invalid sort_by parameter. Allowed: {allowed}" ) # Validate filter parameter @@ -257,17 +271,16 @@ async def list_anime( lower_filter = filter.lower() for pattern in dangerous_patterns: if pattern in lower_filter: - raise HTTPException( - status_code=status.HTTP_422_UNPROCESSABLE_ENTITY, - detail="Invalid filter parameter" + raise ValidationError( + message="Invalid filter parameter" ) try: - # Get missing episodes from series app + # Get all series from series app if not hasattr(series_app, "list"): return [] - series = series_app.list.GetMissingEpisode() + series = series_app.list.GetList() summaries: List[AnimeSummary] = [] for serie in series: # Get all properties from the serie object @@ -280,6 +293,9 @@ async def list_anime( # Convert episode dict keys to strings for JSON serialization missing_episodes = {str(k): v for k, v in episode_dict.items()} + # Determine if series has missing episodes + has_missing = bool(episode_dict) + summaries.append( AnimeSummary( key=key, @@ -287,6 +303,7 @@ async def list_anime( site=site, folder=folder, missing_episodes=missing_episodes, + has_missing=has_missing, ) ) @@ -307,12 +324,11 @@ async def list_anime( ) return summaries - except HTTPException: + except (ValidationError, BadRequestError, NotFoundError, ServerError): raise except Exception as exc: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Failed to retrieve anime list", + raise ServerError( + message="Failed to retrieve anime list" ) from exc @@ -343,17 +359,40 @@ async def trigger_rescan( "message": "Rescan started successfully", } except AnimeServiceError as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Rescan failed: {str(e)}", + raise ServerError( + message=f"Rescan failed: {str(e)}" ) from e except Exception as exc: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail="Failed to start rescan", + raise ServerError( + message="Failed to start rescan" ) from exc +@router.get("/scan/status") +async def get_scan_status( + _auth: dict = Depends(require_auth), + anime_service: AnimeService = Depends(get_anime_service), +) -> dict: + """Get the current scan status. + + Returns the current state of any ongoing library scan, + useful for restoring UI state after page reload. + + Args: + _auth: Ensures the caller is authenticated (value unused) + anime_service: AnimeService instance provided via dependency. + + Returns: + Dict[str, Any]: Current scan status including: + - is_scanning: Whether a scan is in progress + - total_items: Total items to scan + - directories_scanned: Items scanned so far + - current_directory: Current item being scanned + - directory: Root scan directory + """ + return anime_service.get_scan_status() + + class AddSeriesRequest(BaseModel): """Request model for adding a new series.""" @@ -582,13 +621,21 @@ async def add_series( request: AddSeriesRequest, _auth: dict = Depends(require_auth), series_app: Any = Depends(get_series_app), + db: Optional[AsyncSession] = Depends(get_optional_database_session), + anime_service: AnimeService = Depends(get_anime_service), ) -> dict: - """Add a new series to the library. + """Add a new series to the library with full initialization. - Extracts the series `key` from the provided link URL. + This endpoint performs the complete series addition flow: + 1. Validates inputs and extracts the series key from the link URL + 2. Creates a sanitized folder name from the display name + 3. Saves the series to the database (if available) + 4. Creates the folder on disk with the sanitized name + 5. Triggers a targeted scan for missing episodes (only this series) + The `key` is the URL-safe identifier used for all lookups. - The `name` is stored as display metadata along with a - filesystem-friendly `folder` name derived from the name. + The `name` is stored as display metadata and used to derive + the filesystem folder name (sanitized for filesystem safety). Args: request: Request containing the series link and name. @@ -596,15 +643,24 @@ async def add_series( - name: Display name for the series _auth: Ensures the caller is authenticated (value unused) series_app: Core `SeriesApp` instance provided via dependency + db: Optional database session for async operations + anime_service: AnimeService for scanning operations Returns: - Dict[str, Any]: Status payload with success message and key + Dict[str, Any]: Status payload with: + - status: "success" or "exists" + - message: Human-readable status message + - key: Series unique identifier + - folder: Created folder path + - db_id: Database ID (if saved to DB) + - missing_episodes: Dict of missing episodes by season + - total_missing: Total count of missing episodes Raises: HTTPException: If adding the series fails or link is invalid """ try: - # Validate inputs + # Step A: Validate inputs if not request.link or not request.link.strip(): raise HTTPException( status_code=status.HTTP_400_BAD_REQUEST, @@ -617,13 +673,6 @@ async def add_series( detail="Series name cannot be empty", ) - # Check if series_app has the list attribute - if not hasattr(series_app, "list"): - raise HTTPException( - status_code=status.HTTP_501_NOT_IMPLEMENTED, - detail="Series list functionality not available", - ) - # Extract key from link URL # Expected format: https://aniworld.to/anime/stream/{key} link = request.link.strip() @@ -644,38 +693,150 @@ async def add_series( detail="Could not extract series key from link", ) - # Create folder from name (filesystem-friendly) - folder = request.name.strip() + # Step B: Create sanitized folder name from display name + name = request.name.strip() + try: + folder = sanitize_folder_name(name) + except ValueError as e: + raise HTTPException( + status_code=status.HTTP_400_BAD_REQUEST, + detail=f"Invalid series name for folder: {str(e)}", + ) - # Create a new Serie object - # key: unique identifier extracted from link - # name: display name from request - # folder: filesystem folder name (derived from name) - # episodeDict: empty for new series - serie = Serie( - key=key, - name=request.name.strip(), - site="aniworld.to", - folder=folder, - episodeDict={} - ) + db_id = None + missing_episodes: dict = {} + scan_error: Optional[str] = None - # Add the series to the list - series_app.list.add(serie) + # Step C: Save to database if available + if db is not None: + # Check if series already exists in database + existing = await AnimeSeriesService.get_by_key(db, key) + if existing: + return { + "status": "exists", + "message": f"Series already exists: {name}", + "key": key, + "folder": existing.folder, + "db_id": existing.id, + "missing_episodes": {}, + "total_missing": 0 + } + + # Save to database using AnimeSeriesService + anime_series = await AnimeSeriesService.create( + db=db, + key=key, + name=name, + site="aniworld.to", + folder=folder, + ) + db_id = anime_series.id + + logger.info( + "Added series to database: %s (key=%s, db_id=%d)", + name, + key, + db_id + ) - # Refresh the series list to update the cache - if hasattr(series_app, "refresh_series_list"): - series_app.refresh_series_list() + # Step D: Create folder on disk and add to SerieList + folder_path = None + if series_app and hasattr(series_app, "list"): + serie = Serie( + key=key, + name=name, + site="aniworld.to", + folder=folder, + episodeDict={} + ) + + # Add to SerieList - this creates the folder with sanitized name + if hasattr(series_app.list, 'add'): + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + folder_path = series_app.list.add(serie, use_sanitized_folder=True) + # Update folder to reflect what was actually created + folder = serie.folder + elif hasattr(series_app.list, 'keyDict'): + # Manual folder creation and cache update + if hasattr(series_app.list, 'directory'): + folder_path = os.path.join(series_app.list.directory, folder) + os.makedirs(folder_path, exist_ok=True) + series_app.list.keyDict[key] = serie + + logger.info( + "Created folder for series: %s at %s", + name, + folder_path or folder + ) - return { - "status": "success", - "message": f"Successfully added series: {request.name}", - "key": key, - "folder": folder + # Step E: Trigger targeted scan for missing episodes + try: + if series_app and hasattr(series_app, "scanner"): + missing_episodes = series_app.scanner.scan_single_series( + key=key, + folder=folder + ) + logger.info( + "Targeted scan completed for %s: found %d missing episodes", + key, + sum(len(eps) for eps in missing_episodes.values()) + ) + + # Update the serie in keyDict with the missing episodes + if hasattr(series_app, "list") and hasattr(series_app.list, "keyDict"): + if key in series_app.list.keyDict: + series_app.list.keyDict[key].episodeDict = missing_episodes + elif anime_service: + # Fallback to anime_service if scanner not directly available + # Note: This is a lightweight scan, not a full rescan + logger.info( + "Scanner not directly available, " + "skipping targeted scan for %s", + key + ) + except Exception as e: + # Scan failure is not critical - series was still added + scan_error = str(e) + logger.warning( + "Targeted scan failed for %s: %s (series still added)", + key, + e + ) + + # Convert missing episodes keys to strings for JSON serialization + missing_episodes_serializable = { + str(season): episodes + for season, episodes in missing_episodes.items() } + + # Calculate total missing + total_missing = sum(len(eps) for eps in missing_episodes.values()) + + # Step F: Return response + response = { + "status": "success", + "message": f"Successfully added series: {name}", + "key": key, + "folder": folder_path or folder, + "db_id": db_id, + "missing_episodes": missing_episodes_serializable, + "total_missing": total_missing + } + + if scan_error: + response["scan_warning"] = f"Scan partially failed: {scan_error}" + + return response + except HTTPException: raise except Exception as exc: + logger.error("Failed to add series: %s", exc, exc_info=True) + + # Attempt to rollback database entry if folder creation failed + # (This is a best-effort cleanup) + raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, detail=f"Failed to add series: {str(exc)}", diff --git a/src/server/api/auth.py b/src/server/api/auth.py index 199fd41..45fd11d 100644 --- a/src/server/api/auth.py +++ b/src/server/api/auth.py @@ -26,7 +26,7 @@ optional_bearer = HTTPBearer(auto_error=False) @router.post("/setup", status_code=http_status.HTTP_201_CREATED) -def setup_auth(req: SetupRequest): +async def setup_auth(req: SetupRequest): """Initial setup endpoint to configure the master password. This endpoint also initializes the configuration with default values @@ -57,17 +57,44 @@ def setup_auth(req: SetupRequest): config.other['master_password_hash'] = password_hash # Store anime directory in config's other field if provided + anime_directory = None if hasattr(req, 'anime_directory') and req.anime_directory: - config.other['anime_directory'] = req.anime_directory + anime_directory = req.anime_directory.strip() + if anime_directory: + config.other['anime_directory'] = anime_directory # Save the config with the password hash and anime directory config_service.save_config(config, create_backup=False) + # Sync series from data files to database if anime directory is set + if anime_directory: + try: + import structlog + + from src.server.services.anime_service import ( + sync_series_from_data_files, + ) + logger = structlog.get_logger(__name__) + sync_count = await sync_series_from_data_files( + anime_directory, logger + ) + logger.info( + "Setup complete: synced series from data files", + count=sync_count + ) + except Exception as e: + # Log but don't fail setup if sync fails + import structlog + structlog.get_logger(__name__).warning( + "Failed to sync series after setup", + error=str(e) + ) + + return {"status": "ok"} + except ValueError as e: raise HTTPException(status_code=400, detail=str(e)) from e - return {"status": "ok"} - @router.post("/login", response_model=LoginResponse) def login(req: LoginRequest): diff --git a/src/server/api/config.py b/src/server/api/config.py index 595bbaf..8db9ce7 100644 --- a/src/server/api/config.py +++ b/src/server/api/config.py @@ -1,4 +1,4 @@ -from typing import Dict, List, Optional +from typing import Any, Dict, List, Optional from fastapi import APIRouter, Depends, HTTPException, status @@ -210,10 +210,10 @@ def update_advanced_config( ) from e -@router.post("/directory", response_model=Dict[str, str]) -def update_directory( +@router.post("/directory", response_model=Dict[str, Any]) +async def update_directory( directory_config: Dict[str, str], auth: dict = Depends(require_auth) -) -> Dict[str, str]: +) -> Dict[str, Any]: """Update anime directory configuration. Args: @@ -235,13 +235,37 @@ def update_directory( app_config = config_service.load_config() # Store directory in other section - if "anime_directory" not in app_config.other: - app_config.other["anime_directory"] = directory - else: - app_config.other["anime_directory"] = directory + app_config.other["anime_directory"] = directory config_service.save_config(app_config) - return {"message": "Anime directory updated successfully"} + + # Sync series from data files to database + sync_count = 0 + try: + import structlog + + from src.server.services.anime_service import sync_series_from_data_files + logger = structlog.get_logger(__name__) + sync_count = await sync_series_from_data_files(directory, logger) + logger.info( + "Directory updated: synced series from data files", + directory=directory, + count=sync_count + ) + except Exception as e: + # Log but don't fail the directory update if sync fails + import structlog + structlog.get_logger(__name__).warning( + "Failed to sync series after directory update", + error=str(e) + ) + + response: Dict[str, Any] = { + "message": "Anime directory updated successfully", + "synced_series": sync_count + } + + return response except ConfigServiceError as e: raise HTTPException( status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, diff --git a/src/server/api/download.py b/src/server/api/download.py index a796b23..691f4ce 100644 --- a/src/server/api/download.py +++ b/src/server/api/download.py @@ -4,9 +4,10 @@ This module provides REST API endpoints for managing the anime download queue, including adding episodes, removing items, controlling queue processing, and retrieving queue status and statistics. """ -from fastapi import APIRouter, Depends, HTTPException, Path, status +from fastapi import APIRouter, Depends, Path, status from fastapi.responses import JSONResponse +from src.server.exceptions import BadRequestError, NotFoundError, ServerError from src.server.models.download import ( DownloadRequest, QueueOperationRequest, @@ -52,9 +53,8 @@ async def get_queue_status( return response except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to retrieve queue status: {str(e)}", + raise ServerError( + message=f"Failed to retrieve queue status: {str(e)}" ) @@ -91,9 +91,8 @@ async def add_to_queue( try: # Validate request if not request.episodes: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="At least one episode must be specified", + raise BadRequestError( + message="At least one episode must be specified" ) # Add to queue @@ -122,16 +121,12 @@ async def add_to_queue( ) except DownloadServiceError as e: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=str(e), - ) - except HTTPException: + raise BadRequestError(message=str(e)) + except (BadRequestError, NotFoundError, ServerError): raise except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to add episodes to queue: {str(e)}", + raise ServerError( + message=f"Failed to add episodes to queue: {str(e)}" ) @@ -163,9 +158,8 @@ async def clear_completed( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to clear completed items: {str(e)}", + raise ServerError( + message=f"Failed to clear completed items: {str(e)}" ) @@ -197,9 +191,8 @@ async def clear_failed( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to clear failed items: {str(e)}", + raise ServerError( + message=f"Failed to clear failed items: {str(e)}" ) @@ -231,9 +224,8 @@ async def clear_pending( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to clear pending items: {str(e)}", + raise ServerError( + message=f"Failed to clear pending items: {str(e)}" ) @@ -262,22 +254,19 @@ async def remove_from_queue( removed_ids = await download_service.remove_from_queue([item_id]) if not removed_ids: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail=f"Download item {item_id} not found in queue", + raise NotFoundError( + message=f"Download item {item_id} not found in queue", + resource_type="download_item", + resource_id=item_id ) except DownloadServiceError as e: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=str(e), - ) - except HTTPException: + raise BadRequestError(message=str(e)) + except (BadRequestError, NotFoundError, ServerError): raise except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to remove item from queue: {str(e)}", + raise ServerError( + message=f"Failed to remove item from queue: {str(e)}" ) @@ -307,22 +296,18 @@ async def remove_multiple_from_queue( ) if not removed_ids: - raise HTTPException( - status_code=status.HTTP_404_NOT_FOUND, - detail="No matching items found in queue", + raise NotFoundError( + message="No matching items found in queue", + resource_type="download_items" ) except DownloadServiceError as e: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=str(e), - ) - except HTTPException: + raise BadRequestError(message=str(e)) + except (BadRequestError, NotFoundError, ServerError): raise except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to remove items from queue: {str(e)}", + raise ServerError( + message=f"Failed to remove items from queue: {str(e)}" ) @@ -354,9 +339,8 @@ async def start_queue( result = await download_service.start_queue_processing() if result is None: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail="No pending downloads in queue", + raise BadRequestError( + message="No pending downloads in queue" ) return { @@ -365,16 +349,12 @@ async def start_queue( } except DownloadServiceError as e: - raise HTTPException( - status_code=status.HTTP_400_BAD_REQUEST, - detail=str(e), - ) - except HTTPException: + raise BadRequestError(message=str(e)) + except (BadRequestError, NotFoundError, ServerError): raise except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to start queue processing: {str(e)}", + raise ServerError( + message=f"Failed to start queue processing: {str(e)}" ) @@ -408,9 +388,8 @@ async def stop_queue( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to stop queue processing: {str(e)}", + raise ServerError( + message=f"Failed to stop queue processing: {str(e)}" ) @@ -442,9 +421,8 @@ async def pause_queue( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to pause queue processing: {str(e)}", + raise ServerError( + message=f"Failed to pause queue processing: {str(e)}" ) @@ -480,9 +458,8 @@ async def reorder_queue( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to reorder queue: {str(e)}", + raise ServerError( + message=f"Failed to reorder queue: {str(e)}" ) @@ -522,7 +499,6 @@ async def retry_failed( } except Exception as e: - raise HTTPException( - status_code=status.HTTP_500_INTERNAL_SERVER_ERROR, - detail=f"Failed to retry downloads: {str(e)}", + raise ServerError( + message=f"Failed to retry downloads: {str(e)}" ) diff --git a/src/server/api/health.py b/src/server/api/health.py index 9d9a81c..da01f0e 100644 --- a/src/server/api/health.py +++ b/src/server/api/health.py @@ -23,6 +23,9 @@ class HealthStatus(BaseModel): status: str timestamp: str version: str = "1.0.0" + service: str = "aniworld-api" + series_app_initialized: bool = False + anime_directory_configured: bool = False class DatabaseHealth(BaseModel): @@ -170,14 +173,24 @@ def get_system_metrics() -> SystemMetrics: @router.get("", response_model=HealthStatus) async def basic_health_check() -> HealthStatus: """Basic health check endpoint. + + This endpoint does not depend on anime_directory configuration + and should always return 200 OK for basic health monitoring. + Includes service information for identification. Returns: - HealthStatus: Simple health status with timestamp. + HealthStatus: Simple health status with timestamp and service info. """ + from src.config.settings import settings + from src.server.utils.dependencies import _series_app + logger.debug("Basic health check requested") return HealthStatus( status="healthy", timestamp=datetime.now().isoformat(), + service="aniworld-api", + series_app_initialized=_series_app is not None, + anime_directory_configured=bool(settings.anime_directory), ) diff --git a/src/server/api/websocket.py b/src/server/api/websocket.py index aa550d6..7277169 100644 --- a/src/server/api/websocket.py +++ b/src/server/api/websocket.py @@ -13,8 +13,9 @@ in their data payload. The `folder` field is optional for display purposes. """ from __future__ import annotations +import time import uuid -from typing import Optional +from typing import Dict, Optional, Set import structlog from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect, status @@ -34,6 +35,73 @@ logger = structlog.get_logger(__name__) router = APIRouter(prefix="/ws", tags=["websocket"]) +# Valid room names - explicit allow-list for security +VALID_ROOMS: Set[str] = { + "downloads", # Download progress updates + "queue", # Queue status changes + "scan", # Scan progress updates + "system", # System notifications + "errors", # Error notifications +} + +# Rate limiting configuration for WebSocket messages +WS_RATE_LIMIT_MESSAGES_PER_MINUTE = 60 +WS_RATE_LIMIT_WINDOW_SECONDS = 60 + +# In-memory rate limiting for WebSocket connections +# WARNING: This resets on process restart. For production, consider Redis. +_ws_rate_limits: Dict[str, Dict[str, float]] = {} + + +def _check_ws_rate_limit(connection_id: str) -> bool: + """Check if a WebSocket connection has exceeded its rate limit. + + Args: + connection_id: Unique identifier for the WebSocket connection + + Returns: + bool: True if within rate limit, False if exceeded + """ + now = time.time() + + if connection_id not in _ws_rate_limits: + _ws_rate_limits[connection_id] = { + "count": 0, + "window_start": now, + } + + record = _ws_rate_limits[connection_id] + + # Reset window if expired + if now - record["window_start"] > WS_RATE_LIMIT_WINDOW_SECONDS: + record["window_start"] = now + record["count"] = 0 + + record["count"] += 1 + + return record["count"] <= WS_RATE_LIMIT_MESSAGES_PER_MINUTE + + +def _cleanup_ws_rate_limits(connection_id: str) -> None: + """Remove rate limit record for a disconnected connection. + + Args: + connection_id: Unique identifier for the WebSocket connection + """ + _ws_rate_limits.pop(connection_id, None) + + +def _validate_room_name(room: str) -> bool: + """Validate that a room name is in the allowed set. + + Args: + room: Room name to validate + + Returns: + bool: True if room is valid, False otherwise + """ + return room in VALID_ROOMS + @router.websocket("/connect") async def websocket_endpoint( @@ -130,6 +198,19 @@ async def websocket_endpoint( # Receive message from client data = await websocket.receive_json() + # Check rate limit + if not _check_ws_rate_limit(connection_id): + logger.warning( + "WebSocket rate limit exceeded", + connection_id=connection_id, + ) + await ws_service.send_error( + connection_id, + "Rate limit exceeded. Please slow down.", + "RATE_LIMIT_EXCEEDED", + ) + continue + # Parse client message try: client_msg = ClientMessage(**data) @@ -149,9 +230,26 @@ async def websocket_endpoint( # Handle room subscription requests if client_msg.action in ["join", "leave"]: try: + room_name = client_msg.data.get("room", "") + + # Validate room name against allow-list + if not _validate_room_name(room_name): + logger.warning( + "Invalid room name requested", + connection_id=connection_id, + room=room_name, + ) + await ws_service.send_error( + connection_id, + f"Invalid room name: {room_name}. " + f"Valid rooms: {', '.join(sorted(VALID_ROOMS))}", + "INVALID_ROOM", + ) + continue + room_req = RoomSubscriptionRequest( action=client_msg.action, - room=client_msg.data.get("room", ""), + room=room_name, ) if room_req.action == "join": @@ -241,7 +339,8 @@ async def websocket_endpoint( error=str(e), ) finally: - # Cleanup connection + # Cleanup connection and rate limit record + _cleanup_ws_rate_limits(connection_id) await ws_service.disconnect(connection_id) logger.info("WebSocket connection closed", connection_id=connection_id) @@ -263,5 +362,6 @@ async def websocket_status( "status": "operational", "active_connections": connection_count, "supported_message_types": [t.value for t in WebSocketMessageType], + "valid_rooms": sorted(VALID_ROOMS), }, ) diff --git a/src/server/config/development.py b/src/server/config/development.py index 761a0d0..98cbab2 100644 --- a/src/server/config/development.py +++ b/src/server/config/development.py @@ -8,7 +8,7 @@ Environment Variables: JWT_SECRET_KEY: Secret key for JWT token signing (default: dev-secret) PASSWORD_SALT: Salt for password hashing (default: dev-salt) DATABASE_URL: Development database connection string (default: SQLite) - LOG_LEVEL: Logging level (default: DEBUG) + LOG_LEVEL: Logging level (default: INFO) CORS_ORIGINS: Comma-separated list of allowed CORS origins API_RATE_LIMIT: API rate limit per minute (default: 1000) """ @@ -91,8 +91,8 @@ class DevelopmentSettings(BaseSettings): # Logging Settings # ============================================================================ - log_level: str = Field(default="DEBUG", env="LOG_LEVEL") - """Logging level (DEBUG for detailed output).""" + log_level: str = Field(default="INFO", env="LOG_LEVEL") + """Logging level (INFO for standard output).""" log_file: str = Field(default="logs/development.log", env="LOG_FILE") """Path to development log file.""" diff --git a/src/server/config/logging_config.py b/src/server/config/logging_config.py index 5642667..ff899eb 100644 --- a/src/server/config/logging_config.py +++ b/src/server/config/logging_config.py @@ -60,7 +60,7 @@ def setup_logging() -> Dict[str, logging.Logger]: # File handler for general server logs server_file_handler = logging.FileHandler(server_log_file, mode='a', encoding='utf-8') - server_file_handler.setLevel(logging.DEBUG) + server_file_handler.setLevel(logging.INFO) server_file_handler.setFormatter(detailed_format) root_logger.addHandler(server_file_handler) diff --git a/src/server/controllers/health_controller.py b/src/server/controllers/health_controller.py deleted file mode 100644 index d2cf4ab..0000000 --- a/src/server/controllers/health_controller.py +++ /dev/null @@ -1,27 +0,0 @@ -""" -Health check controller for monitoring and status endpoints. - -This module provides health check endpoints for application monitoring. -""" -from fastapi import APIRouter - -from src.config.settings import settings -from src.server.utils.dependencies import _series_app - -router = APIRouter(prefix="/health", tags=["health"]) - - -@router.get("") -async def health_check(): - """Health check endpoint for monitoring. - - This endpoint does not depend on anime_directory configuration - and should always return 200 OK for basic health monitoring. - """ - return { - "status": "healthy", - "service": "aniworld-api", - "version": "1.0.0", - "series_app_initialized": _series_app is not None, - "anime_directory_configured": bool(settings.anime_directory) - } diff --git a/src/server/database/README.md b/src/server/database/README.md index 63a8d19..12c0309 100644 --- a/src/server/database/README.md +++ b/src/server/database/README.md @@ -13,7 +13,7 @@ This package provides persistent storage for anime series, episodes, download qu Install required dependencies: ```bash -pip install sqlalchemy alembic aiosqlite +pip install sqlalchemy aiosqlite ``` Or use the project requirements: @@ -163,24 +163,6 @@ from src.config.settings import settings settings.database_url = "sqlite:///./data/aniworld.db" ``` -## Migrations (Future) - -Alembic is installed for database migrations: - -```bash -# Initialize Alembic -alembic init alembic - -# Generate migration -alembic revision --autogenerate -m "Description" - -# Apply migrations -alembic upgrade head - -# Rollback -alembic downgrade -1 -``` - ## Testing Run database tests: @@ -196,8 +178,7 @@ The test suite uses an in-memory SQLite database for isolation and speed. - **base.py**: Base declarative class and mixins - **models.py**: SQLAlchemy ORM models (4 models) - **connection.py**: Engine, session factory, dependency injection -- **migrations.py**: Alembic migration placeholder -- ****init**.py**: Package exports +- \***\*init**.py\*\*: Package exports - **service.py**: Service layer with CRUD operations ## Service Layer @@ -432,5 +413,4 @@ Solution: Ensure referenced records exist before creating relationships. ## Further Reading - [SQLAlchemy 2.0 Documentation](https://docs.sqlalchemy.org/en/20/) -- [Alembic Tutorial](https://alembic.sqlalchemy.org/en/latest/tutorial.html) - [FastAPI with Databases](https://fastapi.tiangolo.com/tutorial/sql-databases/) diff --git a/src/server/database/__init__.py b/src/server/database/__init__.py index 5d993b0..afbdccc 100644 --- a/src/server/database/__init__.py +++ b/src/server/database/__init__.py @@ -30,7 +30,6 @@ from src.server.database.init import ( create_database_backup, create_database_schema, get_database_info, - get_migration_guide, get_schema_version, initialize_database, seed_initial_data, @@ -64,7 +63,6 @@ __all__ = [ "check_database_health", "create_database_backup", "get_database_info", - "get_migration_guide", "CURRENT_SCHEMA_VERSION", "EXPECTED_TABLES", # Models diff --git a/src/server/database/connection.py b/src/server/database/connection.py index 3aca2aa..511b182 100644 --- a/src/server/database/connection.py +++ b/src/server/database/connection.py @@ -7,7 +7,11 @@ Functions: - init_db: Initialize database engine and create tables - close_db: Close database connections and cleanup - get_db_session: FastAPI dependency for database sessions + - get_transactional_session: Session without auto-commit for transactions - get_engine: Get database engine instance + +Classes: + - TransactionManager: Helper class for manual transaction control """ from __future__ import annotations @@ -86,19 +90,24 @@ async def init_db() -> None: db_url = _get_database_url() logger.info(f"Initializing database: {db_url}") + # Build engine kwargs based on database type + is_sqlite = "sqlite" in db_url + engine_kwargs = { + "echo": settings.log_level == "DEBUG", + "poolclass": pool.StaticPool if is_sqlite else pool.QueuePool, + "pool_pre_ping": True, + } + + # Only add pool_size and max_overflow for non-SQLite databases + if not is_sqlite: + engine_kwargs["pool_size"] = 5 + engine_kwargs["max_overflow"] = 10 + # Create async engine - _engine = create_async_engine( - db_url, - echo=settings.log_level == "DEBUG", - poolclass=pool.StaticPool if "sqlite" in db_url else pool.QueuePool, - pool_size=5 if "sqlite" not in db_url else None, - max_overflow=10 if "sqlite" not in db_url else None, - pool_pre_ping=True, - future=True, - ) + _engine = create_async_engine(db_url, **engine_kwargs) # Configure SQLite if needed - if "sqlite" in db_url: + if is_sqlite: _configure_sqlite_engine(_engine) # Create async session factory @@ -112,12 +121,13 @@ async def init_db() -> None: # Create sync engine for initial setup sync_url = settings.database_url - _sync_engine = create_engine( - sync_url, - echo=settings.log_level == "DEBUG", - poolclass=pool.StaticPool if "sqlite" in sync_url else pool.QueuePool, - pool_pre_ping=True, - ) + is_sqlite_sync = "sqlite" in sync_url + sync_engine_kwargs = { + "echo": settings.log_level == "DEBUG", + "poolclass": pool.StaticPool if is_sqlite_sync else pool.QueuePool, + "pool_pre_ping": True, + } + _sync_engine = create_engine(sync_url, **sync_engine_kwargs) # Create sync session factory _sync_session_factory = sessionmaker( @@ -140,11 +150,29 @@ async def init_db() -> None: async def close_db() -> None: """Close database connections and cleanup resources. + Performs a WAL checkpoint for SQLite databases to ensure all + pending writes are flushed to the main database file before + closing connections. This prevents database corruption during + shutdown. + Should be called during application shutdown. """ global _engine, _sync_engine, _session_factory, _sync_session_factory try: + # For SQLite: checkpoint WAL to ensure all writes are flushed + if _sync_engine and "sqlite" in str(_sync_engine.url): + logger.info("Running SQLite WAL checkpoint before shutdown...") + try: + from sqlalchemy import text + with _sync_engine.connect() as conn: + # TRUNCATE mode: checkpoint and truncate WAL file + conn.execute(text("PRAGMA wal_checkpoint(TRUNCATE)")) + conn.commit() + logger.info("SQLite WAL checkpoint completed") + except Exception as e: + logger.warning(f"WAL checkpoint failed (non-critical): {e}") + if _engine: logger.info("Closing async database engine...") await _engine.dispose() @@ -258,3 +286,307 @@ def get_sync_session() -> Session: ) return _sync_session_factory() + + +def get_async_session_factory() -> AsyncSession: + """Get a new async database session (factory function). + + Creates a new session instance for use in repository patterns. + The caller is responsible for committing/rolling back and closing. + + Returns: + AsyncSession: New database session for async operations + + Raises: + RuntimeError: If database is not initialized + + Example: + session = get_async_session_factory() + try: + result = await session.execute(select(AnimeSeries)) + await session.commit() + return result.scalars().all() + except Exception: + await session.rollback() + raise + finally: + await session.close() + """ + if _session_factory is None: + raise RuntimeError( + "Database not initialized. Call init_db() first." + ) + + return _session_factory() + + +@asynccontextmanager +async def get_transactional_session() -> AsyncGenerator[AsyncSession, None]: + """Get a database session without auto-commit for explicit transaction control. + + Unlike get_db_session(), this does NOT auto-commit on success. + Use this when you need explicit transaction control with the + @transactional decorator or atomic() context manager. + + Yields: + AsyncSession: Database session for async operations + + Raises: + RuntimeError: If database is not initialized + + Example: + async with get_transactional_session() as session: + async with atomic(session) as tx: + # Multiple operations in transaction + await operation1(session) + await operation2(session) + # Committed when exiting atomic() context + """ + if _session_factory is None: + raise RuntimeError( + "Database not initialized. Call init_db() first." + ) + + session = _session_factory() + try: + yield session + except Exception: + await session.rollback() + raise + finally: + await session.close() + + +class TransactionManager: + """Helper class for manual transaction control. + + Provides a cleaner interface for managing transactions across + multiple service calls within a single request. + + Attributes: + _session_factory: Factory for creating new sessions + _session: Current active session + _in_transaction: Whether currently in a transaction + + Example: + async with TransactionManager() as tm: + session = await tm.get_session() + await tm.begin() + try: + await service1.operation(session) + await service2.operation(session) + await tm.commit() + except Exception: + await tm.rollback() + raise + """ + + def __init__( + self, + session_factory: Optional[async_sessionmaker] = None + ) -> None: + """Initialize transaction manager. + + Args: + session_factory: Optional custom session factory. + Uses global factory if not provided. + """ + self._session_factory = session_factory or _session_factory + self._session: Optional[AsyncSession] = None + self._in_transaction = False + + if self._session_factory is None: + raise RuntimeError( + "Database not initialized. Call init_db() first." + ) + + async def __aenter__(self) -> "TransactionManager": + """Enter context manager and create session.""" + self._session = self._session_factory() + logger.debug("TransactionManager: Created new session") + return self + + async def __aexit__( + self, + exc_type: Optional[type], + exc_val: Optional[BaseException], + exc_tb: Optional[object], + ) -> bool: + """Exit context manager and cleanup session. + + Automatically rolls back if an exception occurred and + transaction wasn't explicitly committed. + """ + if self._session: + if exc_type is not None and self._in_transaction: + logger.warning( + "TransactionManager: Rolling back due to exception: %s", + exc_val, + ) + await self._session.rollback() + + await self._session.close() + self._session = None + self._in_transaction = False + logger.debug("TransactionManager: Session closed") + + return False + + async def get_session(self) -> AsyncSession: + """Get the current session. + + Returns: + Current AsyncSession instance + + Raises: + RuntimeError: If not within context manager + """ + if self._session is None: + raise RuntimeError( + "TransactionManager must be used as async context manager" + ) + return self._session + + async def begin(self) -> None: + """Begin a new transaction. + + Raises: + RuntimeError: If already in a transaction or no session + """ + if self._session is None: + raise RuntimeError("No active session") + + if self._in_transaction: + raise RuntimeError("Already in a transaction") + + await self._session.begin() + self._in_transaction = True + logger.debug("TransactionManager: Transaction started") + + async def commit(self) -> None: + """Commit the current transaction. + + Raises: + RuntimeError: If not in a transaction + """ + if not self._in_transaction or self._session is None: + raise RuntimeError("Not in a transaction") + + await self._session.commit() + self._in_transaction = False + logger.debug("TransactionManager: Transaction committed") + + async def rollback(self) -> None: + """Rollback the current transaction. + + Raises: + RuntimeError: If not in a transaction + """ + if self._session is None: + raise RuntimeError("No active session") + + await self._session.rollback() + self._in_transaction = False + logger.debug("TransactionManager: Transaction rolled back") + + async def savepoint(self, name: Optional[str] = None) -> "SavepointHandle": + """Create a savepoint within the current transaction. + + Args: + name: Optional savepoint name + + Returns: + SavepointHandle for controlling the savepoint + + Raises: + RuntimeError: If not in a transaction + """ + if not self._in_transaction or self._session is None: + raise RuntimeError("Must be in a transaction to create savepoint") + + nested = await self._session.begin_nested() + return SavepointHandle(nested, name or "unnamed") + + def is_in_transaction(self) -> bool: + """Check if currently in a transaction. + + Returns: + True if in an active transaction + """ + return self._in_transaction + + def get_transaction_depth(self) -> int: + """Get current transaction nesting depth. + + Returns: + 0 if not in transaction, 1+ for nested transactions + """ + if not self._in_transaction: + return 0 + return 1 # Basic implementation - could be extended + + +class SavepointHandle: + """Handle for controlling a database savepoint. + + Attributes: + _nested: SQLAlchemy nested transaction + _name: Savepoint name for logging + _released: Whether savepoint has been released + """ + + def __init__(self, nested: object, name: str) -> None: + """Initialize savepoint handle. + + Args: + nested: SQLAlchemy nested transaction object + name: Savepoint name + """ + self._nested = nested + self._name = name + self._released = False + logger.debug("Created savepoint: %s", name) + + async def rollback(self) -> None: + """Rollback to this savepoint.""" + if not self._released: + await self._nested.rollback() + self._released = True + logger.debug("Rolled back savepoint: %s", self._name) + + async def release(self) -> None: + """Release (commit) this savepoint.""" + if not self._released: + # Nested transactions commit automatically in SQLAlchemy + self._released = True + logger.debug("Released savepoint: %s", self._name) + + +def is_session_in_transaction(session: AsyncSession | Session) -> bool: + """Check if a session is currently in a transaction. + + Args: + session: SQLAlchemy session (sync or async) + + Returns: + True if session is in an active transaction + """ + return session.in_transaction() + + +def get_session_transaction_depth(session: AsyncSession | Session) -> int: + """Get the transaction nesting depth of a session. + + Args: + session: SQLAlchemy session (sync or async) + + Returns: + Number of nested transactions (0 if not in transaction) + """ + if not session.in_transaction(): + return 0 + + # Check for nested transaction state + # Note: SQLAlchemy doesn't directly expose nesting depth + return 1 + diff --git a/src/server/database/examples.py b/src/server/database/examples.py deleted file mode 100644 index d4f01b0..0000000 --- a/src/server/database/examples.py +++ /dev/null @@ -1,479 +0,0 @@ -"""Example integration of database service with existing services. - -This file demonstrates how to integrate the database service layer with -existing application services like AnimeService and DownloadService. - -These examples show patterns for: -- Persisting scan results to database -- Loading queue from database on startup -- Syncing download progress to database -- Maintaining consistency between in-memory state and database -""" -from __future__ import annotations - -import logging -from typing import List, Optional - -from sqlalchemy.ext.asyncio import AsyncSession - -from src.core.entities.series import Serie -from src.server.database.models import DownloadPriority, DownloadStatus -from src.server.database.service import ( - AnimeSeriesService, - DownloadQueueService, - EpisodeService, -) - -logger = logging.getLogger(__name__) - - -# ============================================================================ -# Example 1: Persist Scan Results -# ============================================================================ - - -async def persist_scan_results( - db: AsyncSession, - series_list: List[Serie], -) -> None: - """Persist scan results to database. - - Updates or creates anime series and their episodes based on - scan results from SerieScanner. - - Args: - db: Database session - series_list: List of Serie objects from scan - """ - logger.info(f"Persisting {len(series_list)} series to database") - - for serie in series_list: - # Check if series exists - existing = await AnimeSeriesService.get_by_key(db, serie.key) - - if existing: - # Update existing series - await AnimeSeriesService.update( - db, - existing.id, - name=serie.name, - site=serie.site, - folder=serie.folder, - episode_dict=serie.episode_dict, - ) - series_id = existing.id - else: - # Create new series - new_series = await AnimeSeriesService.create( - db, - key=serie.key, - name=serie.name, - site=serie.site, - folder=serie.folder, - episode_dict=serie.episode_dict, - ) - series_id = new_series.id - - # Update episodes for this series - await _update_episodes(db, series_id, serie) - - await db.commit() - logger.info("Scan results persisted successfully") - - -async def _update_episodes( - db: AsyncSession, - series_id: int, - serie: Serie, -) -> None: - """Update episodes for a series. - - Args: - db: Database session - series_id: Series ID in database - serie: Serie object with episode information - """ - # Get existing episodes - existing_episodes = await EpisodeService.get_by_series(db, series_id) - existing_map = { - (ep.season, ep.episode_number): ep - for ep in existing_episodes - } - - # Iterate through episode_dict to create/update episodes - for season, episodes in serie.episode_dict.items(): - for ep_num in episodes: - key = (int(season), int(ep_num)) - - if key in existing_map: - # Episode exists, check if downloaded - episode = existing_map[key] - # Update if needed (e.g., file path changed) - if not episode.is_downloaded: - # Check if file exists locally - # This would be done by checking serie.local_episodes - pass - else: - # Create new episode - await EpisodeService.create( - db, - series_id=series_id, - season=int(season), - episode_number=int(ep_num), - is_downloaded=False, - ) - - -# ============================================================================ -# Example 2: Load Queue from Database -# ============================================================================ - - -async def load_queue_from_database( - db: AsyncSession, -) -> List[dict]: - """Load download queue from database. - - Retrieves pending and active download items from database and - converts them to format suitable for DownloadService. - - Args: - db: Database session - - Returns: - List of download items as dictionaries - """ - logger.info("Loading download queue from database") - - # Get pending and active items - pending = await DownloadQueueService.get_pending(db) - active = await DownloadQueueService.get_active(db) - - all_items = pending + active - - # Convert to dictionary format for DownloadService - queue_items = [] - for item in all_items: - queue_items.append({ - "id": item.id, - "series_id": item.series_id, - "season": item.season, - "episode_number": item.episode_number, - "status": item.status.value, - "priority": item.priority.value, - "progress_percent": item.progress_percent, - "downloaded_bytes": item.downloaded_bytes, - "total_bytes": item.total_bytes, - "download_speed": item.download_speed, - "error_message": item.error_message, - "retry_count": item.retry_count, - }) - - logger.info(f"Loaded {len(queue_items)} items from database") - return queue_items - - -# ============================================================================ -# Example 3: Sync Download Progress to Database -# ============================================================================ - - -async def sync_download_progress( - db: AsyncSession, - item_id: int, - progress_percent: float, - downloaded_bytes: int, - total_bytes: Optional[int] = None, - download_speed: Optional[float] = None, -) -> None: - """Sync download progress to database. - - Updates download queue item progress in database. This would be called - from the download progress callback. - - Args: - db: Database session - item_id: Download queue item ID - progress_percent: Progress percentage (0-100) - downloaded_bytes: Bytes downloaded - total_bytes: Optional total file size - download_speed: Optional current speed (bytes/sec) - """ - await DownloadQueueService.update_progress( - db, - item_id, - progress_percent, - downloaded_bytes, - total_bytes, - download_speed, - ) - await db.commit() - - -async def mark_download_complete( - db: AsyncSession, - item_id: int, - file_path: str, - file_size: int, -) -> None: - """Mark download as complete in database. - - Updates download queue item status and marks episode as downloaded. - - Args: - db: Database session - item_id: Download queue item ID - file_path: Path to downloaded file - file_size: File size in bytes - """ - # Get download item - item = await DownloadQueueService.get_by_id(db, item_id) - if not item: - logger.error(f"Download item {item_id} not found") - return - - # Update download status - await DownloadQueueService.update_status( - db, - item_id, - DownloadStatus.COMPLETED, - ) - - # Find or create episode and mark as downloaded - episode = await EpisodeService.get_by_episode( - db, - item.series_id, - item.season, - item.episode_number, - ) - - if episode: - await EpisodeService.mark_downloaded( - db, - episode.id, - file_path, - file_size, - ) - else: - # Create episode - episode = await EpisodeService.create( - db, - series_id=item.series_id, - season=item.season, - episode_number=item.episode_number, - file_path=file_path, - file_size=file_size, - is_downloaded=True, - ) - - await db.commit() - logger.info( - f"Marked download complete: S{item.season:02d}E{item.episode_number:02d}" - ) - - -async def mark_download_failed( - db: AsyncSession, - item_id: int, - error_message: str, -) -> None: - """Mark download as failed in database. - - Args: - db: Database session - item_id: Download queue item ID - error_message: Error description - """ - await DownloadQueueService.update_status( - db, - item_id, - DownloadStatus.FAILED, - error_message=error_message, - ) - await db.commit() - - -# ============================================================================ -# Example 4: Add Episodes to Download Queue -# ============================================================================ - - -async def add_episodes_to_queue( - db: AsyncSession, - series_key: str, - episodes: List[tuple[int, int]], # List of (season, episode) tuples - priority: DownloadPriority = DownloadPriority.NORMAL, -) -> int: - """Add multiple episodes to download queue. - - Args: - db: Database session - series_key: Series provider key - episodes: List of (season, episode_number) tuples - priority: Download priority - - Returns: - Number of episodes added to queue - """ - # Get series - series = await AnimeSeriesService.get_by_key(db, series_key) - if not series: - logger.error(f"Series not found: {series_key}") - return 0 - - added_count = 0 - for season, episode_number in episodes: - # Check if already in queue - existing_items = await DownloadQueueService.get_all(db) - already_queued = any( - item.series_id == series.id - and item.season == season - and item.episode_number == episode_number - and item.status in (DownloadStatus.PENDING, DownloadStatus.DOWNLOADING) - for item in existing_items - ) - - if not already_queued: - await DownloadQueueService.create( - db, - series_id=series.id, - season=season, - episode_number=episode_number, - priority=priority, - ) - added_count += 1 - - await db.commit() - logger.info(f"Added {added_count} episodes to download queue") - return added_count - - -# ============================================================================ -# Example 5: Integration with AnimeService -# ============================================================================ - - -class EnhancedAnimeService: - """Enhanced AnimeService with database persistence. - - This is an example of how to wrap the existing AnimeService with - database persistence capabilities. - """ - - def __init__(self, db_session_factory): - """Initialize enhanced anime service. - - Args: - db_session_factory: Async session factory for database access - """ - self.db_session_factory = db_session_factory - - async def rescan_with_persistence(self, directory: str) -> dict: - """Rescan directory and persist results. - - Args: - directory: Directory to scan - - Returns: - Scan results dictionary - """ - # Import here to avoid circular dependencies - from src.core.SeriesApp import SeriesApp - - # Perform scan - app = SeriesApp(directory) - series_list = app.ReScan() - - # Persist to database - async with self.db_session_factory() as db: - await persist_scan_results(db, series_list) - - return { - "total_series": len(series_list), - "message": "Scan completed and persisted to database", - } - - async def get_series_with_missing_episodes(self) -> List[dict]: - """Get series with missing episodes from database. - - Returns: - List of series with missing episodes - """ - async with self.db_session_factory() as db: - # Get all series - all_series = await AnimeSeriesService.get_all( - db, - with_episodes=True, - ) - - # Filter series with missing episodes - series_with_missing = [] - for series in all_series: - if series.episode_dict: - total_episodes = sum( - len(eps) for eps in series.episode_dict.values() - ) - downloaded_episodes = sum( - 1 for ep in series.episodes if ep.is_downloaded - ) - - if downloaded_episodes < total_episodes: - series_with_missing.append({ - "id": series.id, - "key": series.key, - "name": series.name, - "total_episodes": total_episodes, - "downloaded_episodes": downloaded_episodes, - "missing_episodes": total_episodes - downloaded_episodes, - }) - - return series_with_missing - - -# ============================================================================ -# Usage Example -# ============================================================================ - - -async def example_usage(): - """Example usage of database service integration.""" - from src.server.database import get_db_session - - # Get database session - async with get_db_session() as db: - # Example 1: Add episodes to queue - added = await add_episodes_to_queue( - db, - series_key="attack-on-titan", - episodes=[(1, 1), (1, 2), (1, 3)], - priority=DownloadPriority.HIGH, - ) - print(f"Added {added} episodes to queue") - - # Example 2: Load queue - queue_items = await load_queue_from_database(db) - print(f"Queue has {len(queue_items)} items") - - # Example 3: Update progress - if queue_items: - await sync_download_progress( - db, - item_id=queue_items[0]["id"], - progress_percent=50.0, - downloaded_bytes=500000, - total_bytes=1000000, - ) - - # Example 4: Mark complete - if queue_items: - await mark_download_complete( - db, - item_id=queue_items[0]["id"], - file_path="/path/to/file.mp4", - file_size=1000000, - ) - - -if __name__ == "__main__": - import asyncio - asyncio.run(example_usage()) diff --git a/src/server/database/init.py b/src/server/database/init.py index e3cdf5f..b330de9 100644 --- a/src/server/database/init.py +++ b/src/server/database/init.py @@ -2,12 +2,9 @@ This module provides comprehensive database initialization functionality: - Schema creation and validation -- Initial data migration - Database health checks - Schema versioning support -- Migration utilities -For production deployments, consider using Alembic for managed migrations. """ from __future__ import annotations @@ -47,7 +44,7 @@ EXPECTED_INDEXES = { "episodes": ["ix_episodes_series_id"], "download_queue": [ "ix_download_queue_series_id", - "ix_download_queue_status", + "ix_download_queue_episode_id", ], "user_sessions": [ "ix_user_sessions_session_id", @@ -316,7 +313,6 @@ async def get_schema_version(engine: Optional[AsyncEngine] = None) -> str: """Get current database schema version. Returns version string based on existing tables and structure. - For production, consider using Alembic versioning. Args: engine: Optional database engine (uses default if not provided) @@ -354,8 +350,6 @@ async def create_schema_version_table( ) -> None: """Create schema version tracking table. - Future enhancement for tracking schema migrations with Alembic. - Args: engine: Optional database engine (uses default if not provided) """ @@ -587,60 +581,6 @@ def get_database_info() -> Dict[str, Any]: } -def get_migration_guide() -> str: - """Get migration guide for production deployments. - - Returns: - Migration guide text - """ - return """ -Database Migration Guide -======================== - -Current Setup: SQLAlchemy create_all() -- Automatically creates tables on startup -- Suitable for development and single-instance deployments -- Schema changes require manual handling - -For Production with Alembic: -============================ - -1. Initialize Alembic (already installed): - alembic init alembic - -2. Configure alembic/env.py: - from src.server.database.base import Base - target_metadata = Base.metadata - -3. Configure alembic.ini: - sqlalchemy.url = - -4. Generate initial migration: - alembic revision --autogenerate -m "Initial schema v1.0.0" - -5. Review migration in alembic/versions/ - -6. Apply migration: - alembic upgrade head - -7. For future schema changes: - - Modify models in src/server/database/models.py - - Generate migration: alembic revision --autogenerate -m "Description" - - Review generated migration - - Test in staging environment - - Apply: alembic upgrade head - - For rollback: alembic downgrade -1 - -Best Practices: -============== -- Always backup database before migrations -- Test migrations in staging first -- Review auto-generated migrations carefully -- Keep migrations in version control -- Document breaking changes -""" - - # ============================================================================= # Public API # ============================================================================= @@ -656,7 +596,6 @@ __all__ = [ "check_database_health", "create_database_backup", "get_database_info", - "get_migration_guide", "CURRENT_SCHEMA_VERSION", "EXPECTED_TABLES", ] diff --git a/src/server/database/migrations.py b/src/server/database/migrations.py deleted file mode 100644 index 23f7183..0000000 --- a/src/server/database/migrations.py +++ /dev/null @@ -1,167 +0,0 @@ -"""Database migration utilities. - -This module provides utilities for database migrations and schema versioning. -Alembic integration can be added when needed for production environments. - -For now, we use SQLAlchemy's create_all for automatic schema creation. -""" -from __future__ import annotations - -import logging -from typing import Optional - -from sqlalchemy import text -from sqlalchemy.ext.asyncio import AsyncEngine - -from src.server.database.base import Base -from src.server.database.connection import get_engine, get_sync_engine - -logger = logging.getLogger(__name__) - - -async def initialize_schema(engine: Optional[AsyncEngine] = None) -> None: - """Initialize database schema. - - Creates all tables defined in Base metadata if they don't exist. - This is a simple migration strategy suitable for single-instance deployments. - - For production with multiple instances, consider using Alembic: - - alembic init alembic - - alembic revision --autogenerate -m "Initial schema" - - alembic upgrade head - - Args: - engine: Optional database engine (uses default if not provided) - - Raises: - RuntimeError: If database is not initialized - """ - if engine is None: - engine = get_engine() - - logger.info("Initializing database schema...") - - # Create all tables - async with engine.begin() as conn: - await conn.run_sync(Base.metadata.create_all) - - logger.info("Database schema initialized successfully") - - -async def check_schema_version(engine: Optional[AsyncEngine] = None) -> str: - """Check current database schema version. - - Returns a simple version identifier based on existing tables. - For production, consider using Alembic for proper versioning. - - Args: - engine: Optional database engine (uses default if not provided) - - Returns: - Schema version string - - Raises: - RuntimeError: If database is not initialized - """ - if engine is None: - engine = get_engine() - - async with engine.connect() as conn: - # Check which tables exist - result = await conn.execute( - text( - "SELECT name FROM sqlite_master " - "WHERE type='table' AND name NOT LIKE 'sqlite_%'" - ) - ) - tables = [row[0] for row in result] - - if not tables: - return "empty" - elif len(tables) == 4 and all( - t in tables for t in [ - "anime_series", - "episodes", - "download_queue", - "user_sessions", - ] - ): - return "v1.0" - else: - return "custom" - - -def get_migration_info() -> str: - """Get information about database migration setup. - - Returns: - Migration setup information - """ - return """ -Database Migration Information -============================== - -Current Strategy: SQLAlchemy create_all() -- Automatically creates tables on startup -- Suitable for development and single-instance deployments -- Schema changes require manual handling - -For Production Migrations (Alembic): -==================================== - -1. Initialize Alembic: - alembic init alembic - -2. Configure alembic/env.py: - - Import Base from src.server.database.base - - Set target_metadata = Base.metadata - -3. Configure alembic.ini: - - Set sqlalchemy.url to your database URL - -4. Generate initial migration: - alembic revision --autogenerate -m "Initial schema" - -5. Apply migrations: - alembic upgrade head - -6. For future changes: - - Modify models in src/server/database/models.py - - Generate migration: alembic revision --autogenerate -m "Description" - - Review generated migration in alembic/versions/ - - Apply: alembic upgrade head - -Benefits of Alembic: -- Version control for database schema -- Automatic migration generation from model changes -- Rollback support with downgrade scripts -- Multi-instance deployment support -- Safe schema changes in production -""" - - -# ============================================================================= -# Future Alembic Integration -# ============================================================================= -# -# When ready to use Alembic, follow these steps: -# -# 1. Install Alembic (already in requirements.txt): -# pip install alembic -# -# 2. Initialize Alembic from project root: -# alembic init alembic -# -# 3. Update alembic/env.py to use our Base: -# from src.server.database.base import Base -# target_metadata = Base.metadata -# -# 4. Configure alembic.ini with DATABASE_URL from settings -# -# 5. Generate initial migration: -# alembic revision --autogenerate -m "Initial schema" -# -# 6. Review generated migration and apply: -# alembic upgrade head -# -# ============================================================================= diff --git a/src/server/database/migrations/20250124_001_initial_schema.py b/src/server/database/migrations/20250124_001_initial_schema.py deleted file mode 100644 index f3ffcbc..0000000 --- a/src/server/database/migrations/20250124_001_initial_schema.py +++ /dev/null @@ -1,236 +0,0 @@ -""" -Initial database schema migration. - -This migration creates the base tables for the Aniworld application, -including users, anime, downloads, and configuration tables. - -Version: 20250124_001 -Created: 2025-01-24 -""" - -import logging - -from sqlalchemy import text -from sqlalchemy.ext.asyncio import AsyncSession - -from ..migrations.base import Migration, MigrationError - -logger = logging.getLogger(__name__) - - -class InitialSchemaMigration(Migration): - """ - Creates initial database schema. - - This migration sets up all core tables needed for the application: - - users: User accounts and authentication - - anime: Anime series metadata - - episodes: Episode information - - downloads: Download queue and history - - config: Application configuration - """ - - def __init__(self): - """Initialize the initial schema migration.""" - super().__init__( - version="20250124_001", - description="Create initial database schema", - ) - - async def upgrade(self, session: AsyncSession) -> None: - """ - Create all initial tables. - - Args: - session: Database session - - Raises: - MigrationError: If table creation fails - """ - try: - # Create users table - await session.execute( - text( - """ - CREATE TABLE IF NOT EXISTS users ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - username TEXT NOT NULL UNIQUE, - email TEXT, - password_hash TEXT NOT NULL, - is_active BOOLEAN DEFAULT 1, - is_admin BOOLEAN DEFAULT 0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """ - ) - ) - - # Create anime table - await session.execute( - text( - """ - CREATE TABLE IF NOT EXISTS anime ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - title TEXT NOT NULL, - original_title TEXT, - description TEXT, - genres TEXT, - release_year INTEGER, - status TEXT, - total_episodes INTEGER, - cover_image_url TEXT, - aniworld_url TEXT, - mal_id INTEGER, - anilist_id INTEGER, - added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """ - ) - ) - - # Create episodes table - await session.execute( - text( - """ - CREATE TABLE IF NOT EXISTS episodes ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - anime_id INTEGER NOT NULL, - episode_number INTEGER NOT NULL, - season_number INTEGER DEFAULT 1, - title TEXT, - description TEXT, - duration_minutes INTEGER, - air_date DATE, - stream_url TEXT, - download_url TEXT, - file_path TEXT, - file_size_bytes INTEGER, - is_downloaded BOOLEAN DEFAULT 0, - download_progress REAL DEFAULT 0.0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (anime_id) REFERENCES anime(id) - ON DELETE CASCADE, - UNIQUE (anime_id, season_number, episode_number) - ) - """ - ) - ) - - # Create downloads table - await session.execute( - text( - """ - CREATE TABLE IF NOT EXISTS downloads ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - episode_id INTEGER NOT NULL, - user_id INTEGER, - status TEXT NOT NULL DEFAULT 'pending', - priority INTEGER DEFAULT 5, - progress REAL DEFAULT 0.0, - download_speed_mbps REAL, - eta_seconds INTEGER, - started_at TIMESTAMP, - completed_at TIMESTAMP, - failed_at TIMESTAMP, - error_message TEXT, - retry_count INTEGER DEFAULT 0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - FOREIGN KEY (episode_id) REFERENCES episodes(id) - ON DELETE CASCADE, - FOREIGN KEY (user_id) REFERENCES users(id) - ON DELETE SET NULL - ) - """ - ) - ) - - # Create config table - await session.execute( - text( - """ - CREATE TABLE IF NOT EXISTS config ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - key TEXT NOT NULL UNIQUE, - value TEXT NOT NULL, - category TEXT DEFAULT 'general', - description TEXT, - is_secret BOOLEAN DEFAULT 0, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - """ - ) - ) - - # Create indexes for better performance - await session.execute( - text( - "CREATE INDEX IF NOT EXISTS idx_anime_title " - "ON anime(title)" - ) - ) - - await session.execute( - text( - "CREATE INDEX IF NOT EXISTS idx_episodes_anime_id " - "ON episodes(anime_id)" - ) - ) - - await session.execute( - text( - "CREATE INDEX IF NOT EXISTS idx_downloads_status " - "ON downloads(status)" - ) - ) - - await session.execute( - text( - "CREATE INDEX IF NOT EXISTS " - "idx_downloads_episode_id ON downloads(episode_id)" - ) - ) - - logger.info("Initial schema created successfully") - - except Exception as e: - logger.error(f"Failed to create initial schema: {e}") - raise MigrationError( - f"Initial schema creation failed: {e}" - ) from e - - async def downgrade(self, session: AsyncSession) -> None: - """ - Drop all initial tables. - - Args: - session: Database session - - Raises: - MigrationError: If table dropping fails - """ - try: - # Drop tables in reverse order to respect foreign keys - tables = [ - "downloads", - "episodes", - "anime", - "users", - "config", - ] - - for table in tables: - await session.execute(text(f"DROP TABLE IF EXISTS {table}")) - logger.debug(f"Dropped table: {table}") - - logger.info("Initial schema rolled back successfully") - - except Exception as e: - logger.error(f"Failed to rollback initial schema: {e}") - raise MigrationError( - f"Initial schema rollback failed: {e}" - ) from e diff --git a/src/server/database/migrations/__init__.py b/src/server/database/migrations/__init__.py deleted file mode 100644 index af4c9b0..0000000 --- a/src/server/database/migrations/__init__.py +++ /dev/null @@ -1,17 +0,0 @@ -""" -Database migration system for Aniworld application. - -This package provides tools for managing database schema changes, -including migration creation, execution, and rollback capabilities. -""" - -from .base import Migration, MigrationError -from .runner import MigrationRunner -from .validator import MigrationValidator - -__all__ = [ - "Migration", - "MigrationError", - "MigrationRunner", - "MigrationValidator", -] diff --git a/src/server/database/migrations/base.py b/src/server/database/migrations/base.py deleted file mode 100644 index 34c7df8..0000000 --- a/src/server/database/migrations/base.py +++ /dev/null @@ -1,128 +0,0 @@ -""" -Base migration classes and utilities. - -This module provides the foundation for database migrations, -including the abstract Migration class and error handling. -""" - -from abc import ABC, abstractmethod -from datetime import datetime -from typing import Optional - -from sqlalchemy.ext.asyncio import AsyncSession - - -class MigrationError(Exception): - """Base exception for migration-related errors.""" - - pass - - -class Migration(ABC): - """ - Abstract base class for database migrations. - - Each migration should inherit from this class and implement - the upgrade and downgrade methods. - - Attributes: - version: Unique version identifier (e.g., "20250124_001") - description: Human-readable description of the migration - created_at: Timestamp when migration was created - """ - - def __init__( - self, - version: str, - description: str, - created_at: Optional[datetime] = None, - ): - """ - Initialize migration. - - Args: - version: Unique version identifier - description: Human-readable description - created_at: Creation timestamp (defaults to now) - """ - self.version = version - self.description = description - self.created_at = created_at or datetime.now() - - @abstractmethod - async def upgrade(self, session: AsyncSession) -> None: - """ - Apply the migration. - - Args: - session: Database session for executing changes - - Raises: - MigrationError: If migration fails - """ - pass - - @abstractmethod - async def downgrade(self, session: AsyncSession) -> None: - """ - Revert the migration. - - Args: - session: Database session for reverting changes - - Raises: - MigrationError: If rollback fails - """ - pass - - def __repr__(self) -> str: - """Return string representation of migration.""" - return f"Migration({self.version}: {self.description})" - - def __eq__(self, other: object) -> bool: - """Check equality based on version.""" - if not isinstance(other, Migration): - return False - return self.version == other.version - - def __hash__(self) -> int: - """Return hash based on version.""" - return hash(self.version) - - -class MigrationHistory: - """ - Tracks applied migrations in the database. - - This model stores information about which migrations have been - applied, when they were applied, and their execution status. - """ - - __tablename__ = "migration_history" - - def __init__( - self, - version: str, - description: str, - applied_at: datetime, - execution_time_ms: int, - success: bool = True, - error_message: Optional[str] = None, - ): - """ - Initialize migration history record. - - Args: - version: Migration version identifier - description: Migration description - applied_at: Timestamp when migration was applied - execution_time_ms: Time taken to execute in milliseconds - success: Whether migration succeeded - error_message: Error message if migration failed - """ - self.version = version - self.description = description - self.applied_at = applied_at - self.execution_time_ms = execution_time_ms - self.success = success - self.error_message = error_message diff --git a/src/server/database/migrations/runner.py b/src/server/database/migrations/runner.py deleted file mode 100644 index 5bd74da..0000000 --- a/src/server/database/migrations/runner.py +++ /dev/null @@ -1,323 +0,0 @@ -""" -Migration runner for executing database migrations. - -This module handles the execution of migrations in the correct order, -tracks migration history, and provides rollback capabilities. -""" - -import importlib.util -import logging -import time -from datetime import datetime -from pathlib import Path -from typing import List, Optional - -from sqlalchemy import text -from sqlalchemy.ext.asyncio import AsyncSession - -from .base import Migration, MigrationError, MigrationHistory - -logger = logging.getLogger(__name__) - - -class MigrationRunner: - """ - Manages database migration execution and tracking. - - This class handles loading migrations, executing them in order, - tracking their status, and rolling back when needed. - """ - - def __init__(self, migrations_dir: Path, session: AsyncSession): - """ - Initialize migration runner. - - Args: - migrations_dir: Directory containing migration files - session: Database session for executing migrations - """ - self.migrations_dir = migrations_dir - self.session = session - self._migrations: List[Migration] = [] - - async def initialize(self) -> None: - """ - Initialize migration system by creating tracking table if needed. - - Raises: - MigrationError: If initialization fails - """ - try: - # Create migration_history table if it doesn't exist - create_table_sql = """ - CREATE TABLE IF NOT EXISTS migration_history ( - id INTEGER PRIMARY KEY AUTOINCREMENT, - version TEXT NOT NULL UNIQUE, - description TEXT NOT NULL, - applied_at TIMESTAMP NOT NULL, - execution_time_ms INTEGER NOT NULL, - success BOOLEAN NOT NULL DEFAULT 1, - error_message TEXT - ) - """ - await self.session.execute(text(create_table_sql)) - await self.session.commit() - logger.info("Migration system initialized") - except Exception as e: - logger.error(f"Failed to initialize migration system: {e}") - raise MigrationError(f"Initialization failed: {e}") from e - - def load_migrations(self) -> None: - """ - Load all migration files from the migrations directory. - - Migration files should be named in format: {version}_{description}.py - and contain a Migration class that inherits from base.Migration. - - Raises: - MigrationError: If loading migrations fails - """ - try: - self._migrations.clear() - - if not self.migrations_dir.exists(): - logger.warning(f"Migrations directory does not exist: {self.migrations_dir}") - return - - # Find all Python files in migrations directory - migration_files = sorted(self.migrations_dir.glob("*.py")) - migration_files = [f for f in migration_files if f.name != "__init__.py"] - - for file_path in migration_files: - try: - # Import the migration module dynamically - spec = importlib.util.spec_from_file_location( - f"migration.{file_path.stem}", file_path - ) - if spec and spec.loader: - module = importlib.util.module_from_spec(spec) - spec.loader.exec_module(module) - - # Find Migration subclass in module - for attr_name in dir(module): - attr = getattr(module, attr_name) - if ( - isinstance(attr, type) - and issubclass(attr, Migration) - and attr != Migration - ): - migration_instance = attr() - self._migrations.append(migration_instance) - logger.debug(f"Loaded migration: {migration_instance.version}") - break - - except Exception as e: - logger.error(f"Failed to load migration {file_path.name}: {e}") - raise MigrationError(f"Failed to load {file_path.name}: {e}") from e - - # Sort migrations by version - self._migrations.sort(key=lambda m: m.version) - logger.info(f"Loaded {len(self._migrations)} migrations") - - except Exception as e: - logger.error(f"Failed to load migrations: {e}") - raise MigrationError(f"Loading migrations failed: {e}") from e - - async def get_applied_migrations(self) -> List[str]: - """ - Get list of already applied migration versions. - - Returns: - List of migration versions that have been applied - - Raises: - MigrationError: If query fails - """ - try: - result = await self.session.execute( - text("SELECT version FROM migration_history WHERE success = 1 ORDER BY version") - ) - versions = [row[0] for row in result.fetchall()] - return versions - except Exception as e: - logger.error(f"Failed to get applied migrations: {e}") - raise MigrationError(f"Query failed: {e}") from e - - async def get_pending_migrations(self) -> List[Migration]: - """ - Get list of migrations that haven't been applied yet. - - Returns: - List of pending Migration objects - - Raises: - MigrationError: If check fails - """ - applied = await self.get_applied_migrations() - pending = [m for m in self._migrations if m.version not in applied] - return pending - - async def apply_migration(self, migration: Migration) -> None: - """ - Apply a single migration. - - Args: - migration: Migration to apply - - Raises: - MigrationError: If migration fails - """ - start_time = time.time() - success = False - error_message = None - - try: - logger.info(f"Applying migration: {migration.version} - {migration.description}") - - # Execute the migration - await migration.upgrade(self.session) - await self.session.commit() - - success = True - execution_time_ms = int((time.time() - start_time) * 1000) - - logger.info( - f"Migration {migration.version} applied successfully in {execution_time_ms}ms" - ) - - except Exception as e: - error_message = str(e) - execution_time_ms = int((time.time() - start_time) * 1000) - logger.error(f"Migration {migration.version} failed: {e}") - await self.session.rollback() - raise MigrationError(f"Migration {migration.version} failed: {e}") from e - - finally: - # Record migration in history - try: - history_record = MigrationHistory( - version=migration.version, - description=migration.description, - applied_at=datetime.now(), - execution_time_ms=execution_time_ms, - success=success, - error_message=error_message, - ) - - insert_sql = """ - INSERT INTO migration_history - (version, description, applied_at, execution_time_ms, success, error_message) - VALUES (:version, :description, :applied_at, :execution_time_ms, :success, :error_message) - """ - - await self.session.execute( - text(insert_sql), - { - "version": history_record.version, - "description": history_record.description, - "applied_at": history_record.applied_at, - "execution_time_ms": history_record.execution_time_ms, - "success": history_record.success, - "error_message": history_record.error_message, - }, - ) - await self.session.commit() - - except Exception as e: - logger.error(f"Failed to record migration history: {e}") - - async def run_migrations(self, target_version: Optional[str] = None) -> int: - """ - Run all pending migrations up to target version. - - Args: - target_version: Stop at this version (None = run all) - - Returns: - Number of migrations applied - - Raises: - MigrationError: If migrations fail - """ - pending = await self.get_pending_migrations() - - if target_version: - pending = [m for m in pending if m.version <= target_version] - - if not pending: - logger.info("No pending migrations to apply") - return 0 - - logger.info(f"Applying {len(pending)} pending migrations") - - for migration in pending: - await self.apply_migration(migration) - - return len(pending) - - async def rollback_migration(self, migration: Migration) -> None: - """ - Rollback a single migration. - - Args: - migration: Migration to rollback - - Raises: - MigrationError: If rollback fails - """ - start_time = time.time() - - try: - logger.info(f"Rolling back migration: {migration.version}") - - # Execute the downgrade - await migration.downgrade(self.session) - await self.session.commit() - - execution_time_ms = int((time.time() - start_time) * 1000) - - # Remove from history - delete_sql = "DELETE FROM migration_history WHERE version = :version" - await self.session.execute(text(delete_sql), {"version": migration.version}) - await self.session.commit() - - logger.info( - f"Migration {migration.version} rolled back successfully in {execution_time_ms}ms" - ) - - except Exception as e: - logger.error(f"Rollback of {migration.version} failed: {e}") - await self.session.rollback() - raise MigrationError(f"Rollback of {migration.version} failed: {e}") from e - - async def rollback(self, steps: int = 1) -> int: - """ - Rollback the last N migrations. - - Args: - steps: Number of migrations to rollback - - Returns: - Number of migrations rolled back - - Raises: - MigrationError: If rollback fails - """ - applied = await self.get_applied_migrations() - - if not applied: - logger.info("No migrations to rollback") - return 0 - - # Get migrations to rollback (in reverse order) - to_rollback = applied[-steps:] - to_rollback.reverse() - - migrations_to_rollback = [m for m in self._migrations if m.version in to_rollback] - - logger.info(f"Rolling back {len(migrations_to_rollback)} migrations") - - for migration in migrations_to_rollback: - await self.rollback_migration(migration) - - return len(migrations_to_rollback) diff --git a/src/server/database/migrations/validator.py b/src/server/database/migrations/validator.py deleted file mode 100644 index c91c55c..0000000 --- a/src/server/database/migrations/validator.py +++ /dev/null @@ -1,222 +0,0 @@ -""" -Migration validator for ensuring migration safety and integrity. - -This module provides validation utilities to check migrations -before they are executed, ensuring they meet quality standards. -""" - -import logging -from typing import List, Optional, Set - -from .base import Migration, MigrationError - -logger = logging.getLogger(__name__) - - -class MigrationValidator: - """ - Validates migrations before execution. - - Performs various checks to ensure migrations are safe to run, - including version uniqueness, naming conventions, and - dependency resolution. - """ - - def __init__(self): - """Initialize migration validator.""" - self.errors: List[str] = [] - self.warnings: List[str] = [] - - def reset(self) -> None: - """Clear validation results.""" - self.errors.clear() - self.warnings.clear() - - def validate_migration(self, migration: Migration) -> bool: - """ - Validate a single migration. - - Args: - migration: Migration to validate - - Returns: - True if migration is valid, False otherwise - """ - self.reset() - - # Check version format - if not self._validate_version_format(migration.version): - self.errors.append( - f"Invalid version format: {migration.version}. " - "Expected format: YYYYMMDD_NNN" - ) - - # Check description - if not migration.description or len(migration.description) < 5: - self.errors.append( - f"Migration {migration.version} has invalid " - f"description: '{migration.description}'" - ) - - # Check for implementation - if not hasattr(migration, "upgrade") or not callable( - getattr(migration, "upgrade") - ): - self.errors.append( - f"Migration {migration.version} missing upgrade method" - ) - - if not hasattr(migration, "downgrade") or not callable( - getattr(migration, "downgrade") - ): - self.errors.append( - f"Migration {migration.version} missing downgrade method" - ) - - return len(self.errors) == 0 - - def validate_migrations(self, migrations: List[Migration]) -> bool: - """ - Validate a list of migrations. - - Args: - migrations: List of migrations to validate - - Returns: - True if all migrations are valid, False otherwise - """ - self.reset() - - if not migrations: - self.warnings.append("No migrations to validate") - return True - - # Check for duplicate versions - versions: Set[str] = set() - for migration in migrations: - if migration.version in versions: - self.errors.append( - f"Duplicate migration version: {migration.version}" - ) - versions.add(migration.version) - - # Return early if duplicates found - if self.errors: - return False - - # Validate each migration - for migration in migrations: - if not self.validate_migration(migration): - logger.error( - f"Migration {migration.version} " - f"validation failed: {self.errors}" - ) - return False - - # Check version ordering - sorted_versions = sorted([m.version for m in migrations]) - actual_versions = [m.version for m in migrations] - if sorted_versions != actual_versions: - self.warnings.append( - "Migrations are not in chronological order" - ) - - return len(self.errors) == 0 - - def _validate_version_format(self, version: str) -> bool: - """ - Validate version string format. - - Args: - version: Version string to validate - - Returns: - True if format is valid - """ - # Expected format: YYYYMMDD_NNN or YYYYMMDD_NNN_description - if not version: - return False - - parts = version.split("_") - if len(parts) < 2: - return False - - # Check date part (YYYYMMDD) - date_part = parts[0] - if len(date_part) != 8 or not date_part.isdigit(): - return False - - # Check sequence part (NNN) - seq_part = parts[1] - if not seq_part.isdigit(): - return False - - return True - - def check_migration_conflicts( - self, - pending: List[Migration], - applied: List[str], - ) -> Optional[str]: - """ - Check for conflicts between pending and applied migrations. - - Args: - pending: List of pending migrations - applied: List of applied migration versions - - Returns: - Error message if conflicts found, None otherwise - """ - # Check if any pending migration has version lower than applied - if not applied: - return None - - latest_applied = max(applied) - - for migration in pending: - if migration.version < latest_applied: - return ( - f"Migration {migration.version} is older than " - f"latest applied migration {latest_applied}. " - "This may indicate a merge conflict." - ) - - return None - - def get_validation_report(self) -> str: - """ - Get formatted validation report. - - Returns: - Formatted report string - """ - report = [] - - if self.errors: - report.append("Validation Errors:") - for error in self.errors: - report.append(f" - {error}") - - if self.warnings: - report.append("Validation Warnings:") - for warning in self.warnings: - report.append(f" - {warning}") - - if not self.errors and not self.warnings: - report.append("All validations passed") - - return "\n".join(report) - - def raise_if_invalid(self) -> None: - """ - Raise exception if validation failed. - - Raises: - MigrationError: If validation errors exist - """ - if self.errors: - error_msg = "\n".join(self.errors) - raise MigrationError( - f"Migration validation failed:\n{error_msg}" - ) diff --git a/src/server/database/models.py b/src/server/database/models.py index 077a3a0..6d58ceb 100644 --- a/src/server/database/models.py +++ b/src/server/database/models.py @@ -15,18 +15,7 @@ from datetime import datetime, timezone from enum import Enum from typing import List, Optional -from sqlalchemy import ( - JSON, - Boolean, - DateTime, - Float, - ForeignKey, - Integer, - String, - Text, - func, -) -from sqlalchemy import Enum as SQLEnum +from sqlalchemy import Boolean, DateTime, ForeignKey, Integer, String, Text, func from sqlalchemy.orm import Mapped, mapped_column, relationship, validates from src.server.database.base import Base, TimestampMixin @@ -51,10 +40,6 @@ class AnimeSeries(Base, TimestampMixin): name: Display name of the series site: Provider site URL folder: Filesystem folder name (metadata only, not for lookups) - description: Optional series description - status: Current status (ongoing, completed, etc.) - total_episodes: Total number of episodes - cover_url: URL to series cover image episodes: Relationship to Episode models (via id foreign key) download_items: Relationship to DownloadQueueItem models (via id foreign key) created_at: Creation timestamp (from TimestampMixin) @@ -89,30 +74,6 @@ class AnimeSeries(Base, TimestampMixin): doc="Filesystem folder name - METADATA ONLY, not for lookups" ) - # Metadata - description: Mapped[Optional[str]] = mapped_column( - Text, nullable=True, - doc="Series description" - ) - status: Mapped[Optional[str]] = mapped_column( - String(50), nullable=True, - doc="Series status (ongoing, completed, etc.)" - ) - total_episodes: Mapped[Optional[int]] = mapped_column( - Integer, nullable=True, - doc="Total number of episodes" - ) - cover_url: Mapped[Optional[str]] = mapped_column( - String(1000), nullable=True, - doc="URL to cover image" - ) - - # JSON field for episode dictionary (season -> [episodes]) - episode_dict: Mapped[Optional[dict]] = mapped_column( - JSON, nullable=True, - doc="Episode dictionary {season: [episodes]}" - ) - # Relationships episodes: Mapped[List["Episode"]] = relationship( "Episode", @@ -161,22 +122,6 @@ class AnimeSeries(Base, TimestampMixin): raise ValueError("Folder path must be 1000 characters or less") return value.strip() - @validates('cover_url') - def validate_cover_url(self, key: str, value: Optional[str]) -> Optional[str]: - """Validate cover URL length.""" - if value is not None and len(value) > 1000: - raise ValueError("Cover URL must be 1000 characters or less") - return value - - @validates('total_episodes') - def validate_total_episodes(self, key: str, value: Optional[int]) -> Optional[int]: - """Validate total episodes is positive.""" - if value is not None and value < 0: - raise ValueError("Total episodes must be non-negative") - if value is not None and value > 10000: - raise ValueError("Total episodes must be 10000 or less") - return value - def __repr__(self) -> str: return f"" @@ -194,9 +139,7 @@ class Episode(Base, TimestampMixin): episode_number: Episode number within season title: Episode title file_path: Local file path if downloaded - file_size: File size in bytes is_downloaded: Whether episode is downloaded - download_date: When episode was downloaded series: Relationship to AnimeSeries created_at: Creation timestamp (from TimestampMixin) updated_at: Last update timestamp (from TimestampMixin) @@ -234,18 +177,10 @@ class Episode(Base, TimestampMixin): String(1000), nullable=True, doc="Local file path" ) - file_size: Mapped[Optional[int]] = mapped_column( - Integer, nullable=True, - doc="File size in bytes" - ) is_downloaded: Mapped[bool] = mapped_column( Boolean, default=False, nullable=False, doc="Whether episode is downloaded" ) - download_date: Mapped[Optional[datetime]] = mapped_column( - DateTime(timezone=True), nullable=True, - doc="When episode was downloaded" - ) # Relationship series: Mapped["AnimeSeries"] = relationship( @@ -287,13 +222,6 @@ class Episode(Base, TimestampMixin): raise ValueError("File path must be 1000 characters or less") return value - @validates('file_size') - def validate_file_size(self, key: str, value: Optional[int]) -> Optional[int]: - """Validate file size is non-negative.""" - if value is not None and value < 0: - raise ValueError("File size must be non-negative") - return value - def __repr__(self) -> str: return ( f" int: - """Validate season number is positive.""" - if value < 0: - raise ValueError("Season number must be non-negative") - if value > 1000: - raise ValueError("Season number must be 1000 or less") - return value - - @validates('episode_number') - def validate_episode_number(self, key: str, value: int) -> int: - """Validate episode number is positive.""" - if value < 0: - raise ValueError("Episode number must be non-negative") - if value > 10000: - raise ValueError("Episode number must be 10000 or less") - return value - - @validates('progress_percent') - def validate_progress_percent(self, key: str, value: float) -> float: - """Validate progress is between 0 and 100.""" - if value < 0.0: - raise ValueError("Progress percent must be non-negative") - if value > 100.0: - raise ValueError("Progress percent cannot exceed 100") - return value - - @validates('downloaded_bytes') - def validate_downloaded_bytes(self, key: str, value: int) -> int: - """Validate downloaded bytes is non-negative.""" - if value < 0: - raise ValueError("Downloaded bytes must be non-negative") - return value - - @validates('total_bytes') - def validate_total_bytes( - self, key: str, value: Optional[int] - ) -> Optional[int]: - """Validate total bytes is non-negative.""" - if value is not None and value < 0: - raise ValueError("Total bytes must be non-negative") - return value - - @validates('download_speed') - def validate_download_speed( - self, key: str, value: Optional[float] - ) -> Optional[float]: - """Validate download speed is non-negative.""" - if value is not None and value < 0.0: - raise ValueError("Download speed must be non-negative") - return value - - @validates('retry_count') - def validate_retry_count(self, key: str, value: int) -> int: - """Validate retry count is non-negative.""" - if value < 0: - raise ValueError("Retry count must be non-negative") - if value > 100: - raise ValueError("Retry count cannot exceed 100") - return value + episode: Mapped["Episode"] = relationship( + "Episode" + ) @validates('download_url') def validate_download_url( @@ -523,8 +346,7 @@ class DownloadQueueItem(Base, TimestampMixin): return ( f"" + f"episode_id={self.episode_id})>" ) diff --git a/src/server/database/service.py b/src/server/database/service.py index 5cd5dde..5b13f9c 100644 --- a/src/server/database/service.py +++ b/src/server/database/service.py @@ -9,13 +9,22 @@ Services: - DownloadQueueService: CRUD operations for download queue - UserSessionService: CRUD operations for user sessions +Transaction Support: + All services are designed to work within transaction boundaries. + Individual operations use flush() instead of commit() to allow + the caller to control transaction boundaries. + + For compound operations spanning multiple services, use the + @transactional decorator or atomic() context manager from + src.server.database.transaction. + All services support both async and sync operations for flexibility. """ from __future__ import annotations import logging from datetime import datetime, timedelta, timezone -from typing import Dict, List, Optional +from typing import List, Optional from sqlalchemy import delete, select, update from sqlalchemy.ext.asyncio import AsyncSession @@ -23,9 +32,7 @@ from sqlalchemy.orm import Session, selectinload from src.server.database.models import ( AnimeSeries, - DownloadPriority, DownloadQueueItem, - DownloadStatus, Episode, UserSession, ) @@ -57,11 +64,6 @@ class AnimeSeriesService: name: str, site: str, folder: str, - description: Optional[str] = None, - status: Optional[str] = None, - total_episodes: Optional[int] = None, - cover_url: Optional[str] = None, - episode_dict: Optional[Dict] = None, ) -> AnimeSeries: """Create a new anime series. @@ -71,11 +73,6 @@ class AnimeSeriesService: name: Series name site: Provider site URL folder: Local filesystem path - description: Optional series description - status: Optional series status - total_episodes: Optional total episode count - cover_url: Optional cover image URL - episode_dict: Optional episode dictionary Returns: Created AnimeSeries instance @@ -88,11 +85,6 @@ class AnimeSeriesService: name=name, site=site, folder=folder, - description=description, - status=status, - total_episodes=total_episodes, - cover_url=cover_url, - episode_dict=episode_dict, ) db.add(series) await db.flush() @@ -262,7 +254,6 @@ class EpisodeService: episode_number: int, title: Optional[str] = None, file_path: Optional[str] = None, - file_size: Optional[int] = None, is_downloaded: bool = False, ) -> Episode: """Create a new episode. @@ -274,7 +265,6 @@ class EpisodeService: episode_number: Episode number within season title: Optional episode title file_path: Optional local file path - file_size: Optional file size in bytes is_downloaded: Whether episode is downloaded Returns: @@ -286,9 +276,7 @@ class EpisodeService: episode_number=episode_number, title=title, file_path=file_path, - file_size=file_size, is_downloaded=is_downloaded, - download_date=datetime.now(timezone.utc) if is_downloaded else None, ) db.add(episode) await db.flush() @@ -372,7 +360,6 @@ class EpisodeService: db: AsyncSession, episode_id: int, file_path: str, - file_size: int, ) -> Optional[Episode]: """Mark episode as downloaded. @@ -380,7 +367,6 @@ class EpisodeService: db: Database session episode_id: Episode primary key file_path: Local file path - file_size: File size in bytes Returns: Updated Episode instance or None if not found @@ -391,8 +377,6 @@ class EpisodeService: episode.is_downloaded = True episode.file_path = file_path - episode.file_size = file_size - episode.download_date = datetime.now(timezone.utc) await db.flush() await db.refresh(episode) @@ -418,6 +402,96 @@ class EpisodeService: ) return result.rowcount > 0 + @staticmethod + async def delete_by_series_and_episode( + db: AsyncSession, + series_key: str, + season: int, + episode_number: int, + ) -> bool: + """Delete episode by series key, season, and episode number. + + Used to remove episodes from the missing list when they are + downloaded successfully. + + Args: + db: Database session + series_key: Unique provider key for the series + season: Season number + episode_number: Episode number within season + + Returns: + True if deleted, False if not found + """ + # First get the series by key + series = await AnimeSeriesService.get_by_key(db, series_key) + if not series: + logger.warning( + f"Series not found for key: {series_key}" + ) + return False + + # Then delete the episode + result = await db.execute( + delete(Episode).where( + Episode.series_id == series.id, + Episode.season == season, + Episode.episode_number == episode_number, + ) + ) + deleted = result.rowcount > 0 + if deleted: + logger.info( + f"Removed episode from missing list: " + f"{series_key} S{season:02d}E{episode_number:02d}" + ) + return deleted + + @staticmethod + async def bulk_mark_downloaded( + db: AsyncSession, + episode_ids: List[int], + file_paths: Optional[List[str]] = None, + ) -> int: + """Mark multiple episodes as downloaded atomically. + + This operation should be wrapped in a transaction for atomicity. + All episodes will be updated or none if an error occurs. + + Args: + db: Database session + episode_ids: List of episode primary keys to update + file_paths: Optional list of file paths (parallel to episode_ids) + + Returns: + Number of episodes updated + + Note: + Use within @transactional or atomic() for guaranteed atomicity: + + async with atomic(db) as tx: + count = await EpisodeService.bulk_mark_downloaded( + db, episode_ids, file_paths + ) + """ + if not episode_ids: + return 0 + + updated_count = 0 + + for i, episode_id in enumerate(episode_ids): + episode = await EpisodeService.get_by_id(db, episode_id) + if episode: + episode.is_downloaded = True + if file_paths and i < len(file_paths): + episode.file_path = file_paths[i] + updated_count += 1 + + await db.flush() + logger.info(f"Bulk marked {updated_count} episodes as downloaded") + + return updated_count + # ============================================================================ # Download Queue Service @@ -427,17 +501,18 @@ class EpisodeService: class DownloadQueueService: """Service for download queue CRUD operations. - Provides methods for managing the download queue with status tracking, - priority management, and progress updates. + Provides methods for managing the download queue. + + Transaction Support: + All operations use flush() for transaction-safe operation. + For bulk operations, use @transactional or atomic() context. """ @staticmethod async def create( db: AsyncSession, series_id: int, - season: int, - episode_number: int, - priority: DownloadPriority = DownloadPriority.NORMAL, + episode_id: int, download_url: Optional[str] = None, file_destination: Optional[str] = None, ) -> DownloadQueueItem: @@ -446,9 +521,7 @@ class DownloadQueueService: Args: db: Database session series_id: Foreign key to AnimeSeries - season: Season number - episode_number: Episode number - priority: Download priority + episode_id: Foreign key to Episode download_url: Optional provider download URL file_destination: Optional target file path @@ -457,10 +530,7 @@ class DownloadQueueService: """ item = DownloadQueueItem( series_id=series_id, - season=season, - episode_number=episode_number, - status=DownloadStatus.PENDING, - priority=priority, + episode_id=episode_id, download_url=download_url, file_destination=file_destination, ) @@ -468,8 +538,8 @@ class DownloadQueueService: await db.flush() await db.refresh(item) logger.info( - f"Added to download queue: S{season:02d}E{episode_number:02d} " - f"for series_id={series_id} with priority={priority}" + f"Added to download queue: episode_id={episode_id} " + f"for series_id={series_id}" ) return item @@ -493,68 +563,25 @@ class DownloadQueueService: return result.scalar_one_or_none() @staticmethod - async def get_by_status( + async def get_by_episode( db: AsyncSession, - status: DownloadStatus, - limit: Optional[int] = None, - ) -> List[DownloadQueueItem]: - """Get download queue items by status. + episode_id: int, + ) -> Optional[DownloadQueueItem]: + """Get download queue item by episode ID. Args: db: Database session - status: Download status filter - limit: Optional limit for results + episode_id: Foreign key to Episode Returns: - List of DownloadQueueItem instances + DownloadQueueItem instance or None if not found """ - query = select(DownloadQueueItem).where( - DownloadQueueItem.status == status - ) - - # Order by priority (HIGH first) then creation time - query = query.order_by( - DownloadQueueItem.priority.desc(), - DownloadQueueItem.created_at.asc(), - ) - - if limit: - query = query.limit(limit) - - result = await db.execute(query) - return list(result.scalars().all()) - - @staticmethod - async def get_pending( - db: AsyncSession, - limit: Optional[int] = None, - ) -> List[DownloadQueueItem]: - """Get pending download queue items. - - Args: - db: Database session - limit: Optional limit for results - - Returns: - List of pending DownloadQueueItem instances ordered by priority - """ - return await DownloadQueueService.get_by_status( - db, DownloadStatus.PENDING, limit - ) - - @staticmethod - async def get_active(db: AsyncSession) -> List[DownloadQueueItem]: - """Get active download queue items. - - Args: - db: Database session - - Returns: - List of downloading DownloadQueueItem instances - """ - return await DownloadQueueService.get_by_status( - db, DownloadStatus.DOWNLOADING + result = await db.execute( + select(DownloadQueueItem).where( + DownloadQueueItem.episode_id == episode_id + ) ) + return result.scalar_one_or_none() @staticmethod async def get_all( @@ -576,7 +603,6 @@ class DownloadQueueService: query = query.options(selectinload(DownloadQueueItem.series)) query = query.order_by( - DownloadQueueItem.priority.desc(), DownloadQueueItem.created_at.asc(), ) @@ -584,19 +610,17 @@ class DownloadQueueService: return list(result.scalars().all()) @staticmethod - async def update_status( + async def set_error( db: AsyncSession, item_id: int, - status: DownloadStatus, - error_message: Optional[str] = None, + error_message: str, ) -> Optional[DownloadQueueItem]: - """Update download queue item status. + """Set error message on download queue item. Args: db: Database session item_id: Item primary key - status: New download status - error_message: Optional error message for failed status + error_message: Error description Returns: Updated DownloadQueueItem instance or None if not found @@ -605,61 +629,11 @@ class DownloadQueueService: if not item: return None - item.status = status - - # Update timestamps based on status - if status == DownloadStatus.DOWNLOADING and not item.started_at: - item.started_at = datetime.now(timezone.utc) - elif status in (DownloadStatus.COMPLETED, DownloadStatus.FAILED): - item.completed_at = datetime.now(timezone.utc) - - # Set error message for failed downloads - if status == DownloadStatus.FAILED and error_message: - item.error_message = error_message - item.retry_count += 1 - - await db.flush() - await db.refresh(item) - logger.debug(f"Updated download queue item {item_id} status to {status}") - return item - - @staticmethod - async def update_progress( - db: AsyncSession, - item_id: int, - progress_percent: float, - downloaded_bytes: int, - total_bytes: Optional[int] = None, - download_speed: Optional[float] = None, - ) -> Optional[DownloadQueueItem]: - """Update download progress. - - Args: - db: Database session - item_id: Item primary key - progress_percent: Progress percentage (0-100) - downloaded_bytes: Bytes downloaded - total_bytes: Optional total file size - download_speed: Optional current speed (bytes/sec) - - Returns: - Updated DownloadQueueItem instance or None if not found - """ - item = await DownloadQueueService.get_by_id(db, item_id) - if not item: - return None - - item.progress_percent = progress_percent - item.downloaded_bytes = downloaded_bytes - - if total_bytes is not None: - item.total_bytes = total_bytes - - if download_speed is not None: - item.download_speed = download_speed + item.error_message = error_message await db.flush() await db.refresh(item) + logger.debug(f"Set error on download queue item {item_id}") return item @staticmethod @@ -682,57 +656,87 @@ class DownloadQueueService: return deleted @staticmethod - async def clear_completed(db: AsyncSession) -> int: - """Clear completed downloads from queue. + async def delete_by_episode( + db: AsyncSession, + episode_id: int, + ) -> bool: + """Delete download queue item by episode ID. Args: db: Database session + episode_id: Foreign key to Episode Returns: - Number of items cleared + True if deleted, False if not found """ result = await db.execute( delete(DownloadQueueItem).where( - DownloadQueueItem.status == DownloadStatus.COMPLETED + DownloadQueueItem.episode_id == episode_id ) ) - count = result.rowcount - logger.info(f"Cleared {count} completed downloads from queue") - return count - + deleted = result.rowcount > 0 + if deleted: + logger.info( + f"Deleted download queue item with episode_id={episode_id}" + ) + return deleted + @staticmethod - async def retry_failed( + async def bulk_delete( db: AsyncSession, - max_retries: int = 3, - ) -> List[DownloadQueueItem]: - """Retry failed downloads that haven't exceeded max retries. + item_ids: List[int], + ) -> int: + """Delete multiple download queue items atomically. + + This operation should be wrapped in a transaction for atomicity. + All items will be deleted or none if an error occurs. Args: db: Database session - max_retries: Maximum number of retry attempts + item_ids: List of item primary keys to delete Returns: - List of items marked for retry + Number of items deleted + + Note: + Use within @transactional or atomic() for guaranteed atomicity: + + async with atomic(db) as tx: + count = await DownloadQueueService.bulk_delete(db, item_ids) """ + if not item_ids: + return 0 + result = await db.execute( - select(DownloadQueueItem).where( - DownloadQueueItem.status == DownloadStatus.FAILED, - DownloadQueueItem.retry_count < max_retries, + delete(DownloadQueueItem).where( + DownloadQueueItem.id.in_(item_ids) ) ) - items = list(result.scalars().all()) - for item in items: - item.status = DownloadStatus.PENDING - item.error_message = None - item.progress_percent = 0.0 - item.downloaded_bytes = 0 - item.started_at = None - item.completed_at = None + count = result.rowcount + logger.info(f"Bulk deleted {count} download queue items") - await db.flush() - logger.info(f"Marked {len(items)} failed downloads for retry") - return items + return count + + @staticmethod + async def clear_all( + db: AsyncSession, + ) -> int: + """Clear all download queue items. + + Deletes all items from the download queue. This operation + should be wrapped in a transaction. + + Args: + db: Database session + + Returns: + Number of items deleted + """ + result = await db.execute(delete(DownloadQueueItem)) + count = result.rowcount + logger.info(f"Cleared all {count} download queue items") + return count # ============================================================================ @@ -744,6 +748,10 @@ class UserSessionService: """Service for user session CRUD operations. Provides methods for managing user authentication sessions with JWT tokens. + + Transaction Support: + Session rotation and cleanup operations should use transactions + for atomicity when multiple sessions are involved. """ @staticmethod @@ -875,6 +883,9 @@ class UserSessionService: async def cleanup_expired(db: AsyncSession) -> int: """Clean up expired sessions. + This is a bulk delete operation that should be wrapped in + a transaction for atomicity when multiple sessions are deleted. + Args: db: Database session @@ -889,3 +900,66 @@ class UserSessionService: count = result.rowcount logger.info(f"Cleaned up {count} expired sessions") return count + + @staticmethod + async def rotate_session( + db: AsyncSession, + old_session_id: str, + new_session_id: str, + new_token_hash: str, + new_expires_at: datetime, + user_id: Optional[str] = None, + ip_address: Optional[str] = None, + user_agent: Optional[str] = None, + ) -> Optional[UserSession]: + """Rotate a session by revoking old and creating new atomically. + + This compound operation revokes the old session and creates a new + one. Should be wrapped in a transaction for atomicity. + + Args: + db: Database session + old_session_id: Session ID to revoke + new_session_id: New session ID + new_token_hash: New token hash + new_expires_at: New expiration time + user_id: Optional user identifier + ip_address: Optional client IP + user_agent: Optional user agent + + Returns: + New UserSession instance, or None if old session not found + + Note: + Use within @transactional or atomic() for atomicity: + + async with atomic(db) as tx: + new_session = await UserSessionService.rotate_session( + db, old_id, new_id, hash, expires + ) + """ + # Revoke old session + old_revoked = await UserSessionService.revoke(db, old_session_id) + if not old_revoked: + logger.warning( + f"Could not rotate: old session {old_session_id} not found" + ) + return None + + # Create new session + new_session = await UserSessionService.create( + db=db, + session_id=new_session_id, + token_hash=new_token_hash, + expires_at=new_expires_at, + user_id=user_id, + ip_address=ip_address, + user_agent=user_agent, + ) + + logger.info( + f"Rotated session: {old_session_id} -> {new_session_id}" + ) + + return new_session + diff --git a/src/server/database/transaction.py b/src/server/database/transaction.py new file mode 100644 index 0000000..f587c8d --- /dev/null +++ b/src/server/database/transaction.py @@ -0,0 +1,715 @@ +"""Transaction management utilities for SQLAlchemy. + +This module provides transaction management utilities including decorators, +context managers, and helper functions for ensuring data consistency +across database operations. + +Components: + - @transactional decorator: Wraps functions in transaction boundaries + - TransactionContext: Sync context manager for explicit transaction control + - atomic(): Async context manager for async operations + - TransactionPropagation: Enum for transaction propagation modes + +Usage: + @transactional + async def compound_operation(session: AsyncSession, data: Model) -> Result: + # Multiple write operations here + # All succeed or all fail + pass + + async with atomic(session) as tx: + # Operations here + async with tx.savepoint() as sp: + # Nested operations with partial rollback capability + pass +""" +from __future__ import annotations + +import functools +import logging +from contextlib import asynccontextmanager, contextmanager +from enum import Enum +from typing import ( + Any, + AsyncGenerator, + Callable, + Generator, + Optional, + ParamSpec, + TypeVar, +) + +from sqlalchemy.ext.asyncio import AsyncSession +from sqlalchemy.orm import Session + +logger = logging.getLogger(__name__) + +# Type variables for generic typing +T = TypeVar("T") +P = ParamSpec("P") + + +class TransactionPropagation(Enum): + """Transaction propagation behavior options. + + Defines how transactions should behave when called within + an existing transaction context. + + Values: + REQUIRED: Use existing transaction or create new one (default) + REQUIRES_NEW: Always create a new transaction (suspend existing) + NESTED: Create a savepoint within existing transaction + """ + + REQUIRED = "required" + REQUIRES_NEW = "requires_new" + NESTED = "nested" + + +class TransactionError(Exception): + """Exception raised for transaction-related errors.""" + + +class TransactionContext: + """Synchronous context manager for explicit transaction control. + + Provides a clean interface for managing database transactions with + automatic commit/rollback semantics and savepoint support. + + Attributes: + session: SQLAlchemy Session instance + _savepoint_count: Counter for nested savepoints + + Example: + with TransactionContext(session) as tx: + # Database operations here + with tx.savepoint() as sp: + # Nested operations with partial rollback + pass + """ + + def __init__(self, session: Session) -> None: + """Initialize transaction context. + + Args: + session: SQLAlchemy sync session + """ + self.session = session + self._savepoint_count = 0 + self._committed = False + + def __enter__(self) -> "TransactionContext": + """Enter transaction context. + + Begins a new transaction if not already in one. + + Returns: + Self for context manager protocol + """ + logger.debug("Entering transaction context") + + # Check if session is already in a transaction + if not self.session.in_transaction(): + self.session.begin() + logger.debug("Started new transaction") + + return self + + def __exit__( + self, + exc_type: Optional[type], + exc_val: Optional[BaseException], + exc_tb: Optional[Any], + ) -> bool: + """Exit transaction context. + + Commits on success, rolls back on exception. + + Args: + exc_type: Exception type if raised + exc_val: Exception value if raised + exc_tb: Exception traceback if raised + + Returns: + False to propagate exceptions + """ + if exc_type is not None: + logger.warning( + "Transaction rollback due to exception: %s: %s", + exc_type.__name__, + exc_val, + ) + self.session.rollback() + return False + + if not self._committed: + self.session.commit() + logger.debug("Transaction committed") + self._committed = True + + return False + + @contextmanager + def savepoint(self, name: Optional[str] = None) -> Generator["SavepointContext", None, None]: + """Create a savepoint for partial rollback capability. + + Savepoints allow nested transactions where inner operations + can be rolled back without affecting outer operations. + + Args: + name: Optional savepoint name (auto-generated if not provided) + + Yields: + SavepointContext for nested transaction control + + Example: + with tx.savepoint() as sp: + # Operations here can be rolled back independently + if error_condition: + sp.rollback() + """ + self._savepoint_count += 1 + savepoint_name = name or f"sp_{self._savepoint_count}" + + logger.debug("Creating savepoint: %s", savepoint_name) + nested = self.session.begin_nested() + + sp_context = SavepointContext(nested, savepoint_name) + + try: + yield sp_context + + if not sp_context._rolled_back: + # Commit the savepoint (release it) + logger.debug("Releasing savepoint: %s", savepoint_name) + + except Exception as e: + if not sp_context._rolled_back: + logger.warning( + "Rolling back savepoint %s due to exception: %s", + savepoint_name, + e, + ) + nested.rollback() + raise + + def commit(self) -> None: + """Explicitly commit the transaction. + + Use this for early commit within the context. + """ + if not self._committed: + self.session.commit() + self._committed = True + logger.debug("Transaction explicitly committed") + + def rollback(self) -> None: + """Explicitly rollback the transaction. + + Use this for early rollback within the context. + """ + self.session.rollback() + self._committed = True # Prevent double commit + logger.debug("Transaction explicitly rolled back") + + +class SavepointContext: + """Context for managing a database savepoint. + + Provides explicit control over savepoint commit/rollback. + + Attributes: + _nested: SQLAlchemy nested transaction object + _name: Savepoint name for logging + _rolled_back: Whether rollback has been called + """ + + def __init__(self, nested: Any, name: str) -> None: + """Initialize savepoint context. + + Args: + nested: SQLAlchemy nested transaction + name: Savepoint name for logging + """ + self._nested = nested + self._name = name + self._rolled_back = False + + def rollback(self) -> None: + """Rollback to this savepoint. + + Undoes all changes since the savepoint was created. + """ + if not self._rolled_back: + self._nested.rollback() + self._rolled_back = True + logger.debug("Savepoint %s rolled back", self._name) + + def commit(self) -> None: + """Commit (release) this savepoint. + + Makes changes since the savepoint permanent within + the parent transaction. + """ + if not self._rolled_back: + # SQLAlchemy commits nested transactions automatically + # when exiting the context without rollback + logger.debug("Savepoint %s committed", self._name) + + +class AsyncTransactionContext: + """Asynchronous context manager for explicit transaction control. + + Provides async interface for managing database transactions with + automatic commit/rollback semantics and savepoint support. + + Attributes: + session: SQLAlchemy AsyncSession instance + _savepoint_count: Counter for nested savepoints + + Example: + async with AsyncTransactionContext(session) as tx: + # Database operations here + async with tx.savepoint() as sp: + # Nested operations with partial rollback + pass + """ + + def __init__(self, session: AsyncSession) -> None: + """Initialize async transaction context. + + Args: + session: SQLAlchemy async session + """ + self.session = session + self._savepoint_count = 0 + self._committed = False + + async def __aenter__(self) -> "AsyncTransactionContext": + """Enter async transaction context. + + Begins a new transaction if not already in one. + + Returns: + Self for context manager protocol + """ + logger.debug("Entering async transaction context") + + # Check if session is already in a transaction + if not self.session.in_transaction(): + await self.session.begin() + logger.debug("Started new async transaction") + + return self + + async def __aexit__( + self, + exc_type: Optional[type], + exc_val: Optional[BaseException], + exc_tb: Optional[Any], + ) -> bool: + """Exit async transaction context. + + Commits on success, rolls back on exception. + + Args: + exc_type: Exception type if raised + exc_val: Exception value if raised + exc_tb: Exception traceback if raised + + Returns: + False to propagate exceptions + """ + if exc_type is not None: + logger.warning( + "Async transaction rollback due to exception: %s: %s", + exc_type.__name__, + exc_val, + ) + await self.session.rollback() + return False + + if not self._committed: + await self.session.commit() + logger.debug("Async transaction committed") + self._committed = True + + return False + + @asynccontextmanager + async def savepoint( + self, name: Optional[str] = None + ) -> AsyncGenerator["AsyncSavepointContext", None]: + """Create an async savepoint for partial rollback capability. + + Args: + name: Optional savepoint name (auto-generated if not provided) + + Yields: + AsyncSavepointContext for nested transaction control + """ + self._savepoint_count += 1 + savepoint_name = name or f"sp_{self._savepoint_count}" + + logger.debug("Creating async savepoint: %s", savepoint_name) + nested = await self.session.begin_nested() + + sp_context = AsyncSavepointContext(nested, savepoint_name, self.session) + + try: + yield sp_context + + if not sp_context._rolled_back: + logger.debug("Releasing async savepoint: %s", savepoint_name) + + except Exception as e: + if not sp_context._rolled_back: + logger.warning( + "Rolling back async savepoint %s due to exception: %s", + savepoint_name, + e, + ) + await nested.rollback() + raise + + async def commit(self) -> None: + """Explicitly commit the async transaction.""" + if not self._committed: + await self.session.commit() + self._committed = True + logger.debug("Async transaction explicitly committed") + + async def rollback(self) -> None: + """Explicitly rollback the async transaction.""" + await self.session.rollback() + self._committed = True # Prevent double commit + logger.debug("Async transaction explicitly rolled back") + + +class AsyncSavepointContext: + """Async context for managing a database savepoint. + + Attributes: + _nested: SQLAlchemy nested transaction object + _name: Savepoint name for logging + _session: Parent session for async operations + _rolled_back: Whether rollback has been called + """ + + def __init__( + self, nested: Any, name: str, session: AsyncSession + ) -> None: + """Initialize async savepoint context. + + Args: + nested: SQLAlchemy nested transaction + name: Savepoint name for logging + session: Parent async session + """ + self._nested = nested + self._name = name + self._session = session + self._rolled_back = False + + async def rollback(self) -> None: + """Rollback to this savepoint asynchronously.""" + if not self._rolled_back: + await self._nested.rollback() + self._rolled_back = True + logger.debug("Async savepoint %s rolled back", self._name) + + async def commit(self) -> None: + """Commit (release) this savepoint asynchronously.""" + if not self._rolled_back: + logger.debug("Async savepoint %s committed", self._name) + + +@asynccontextmanager +async def atomic( + session: AsyncSession, + propagation: TransactionPropagation = TransactionPropagation.REQUIRED, +) -> AsyncGenerator[AsyncTransactionContext, None]: + """Async context manager for atomic database operations. + + Provides a clean interface for wrapping database operations in + a transaction boundary with automatic commit/rollback. + + Args: + session: SQLAlchemy async session + propagation: Transaction propagation behavior + + Yields: + AsyncTransactionContext for transaction control + + Example: + async with atomic(session) as tx: + await some_operation(session) + await another_operation(session) + # All operations committed together or rolled back + + async with atomic(session) as tx: + await outer_operation(session) + async with tx.savepoint() as sp: + await risky_operation(session) + if error: + await sp.rollback() # Only rollback nested ops + """ + logger.debug( + "Starting atomic block with propagation: %s", + propagation.value, + ) + + if propagation == TransactionPropagation.NESTED: + # Use savepoint for nested propagation + if session.in_transaction(): + nested = await session.begin_nested() + sp_context = AsyncSavepointContext(nested, "atomic_nested", session) + + try: + # Create a wrapper context for consistency + wrapper = AsyncTransactionContext(session) + wrapper._committed = True # Parent manages commit + yield wrapper + + if not sp_context._rolled_back: + logger.debug("Releasing nested atomic savepoint") + + except Exception as e: + if not sp_context._rolled_back: + logger.warning( + "Rolling back nested atomic savepoint due to: %s", e + ) + await nested.rollback() + raise + else: + # No existing transaction, start new one + async with AsyncTransactionContext(session) as tx: + yield tx + else: + # REQUIRED or REQUIRES_NEW + async with AsyncTransactionContext(session) as tx: + yield tx + + +@contextmanager +def atomic_sync( + session: Session, + propagation: TransactionPropagation = TransactionPropagation.REQUIRED, +) -> Generator[TransactionContext, None, None]: + """Sync context manager for atomic database operations. + + Args: + session: SQLAlchemy sync session + propagation: Transaction propagation behavior + + Yields: + TransactionContext for transaction control + """ + logger.debug( + "Starting sync atomic block with propagation: %s", + propagation.value, + ) + + if propagation == TransactionPropagation.NESTED: + if session.in_transaction(): + nested = session.begin_nested() + sp_context = SavepointContext(nested, "atomic_nested") + + try: + wrapper = TransactionContext(session) + wrapper._committed = True + yield wrapper + + if not sp_context._rolled_back: + logger.debug("Releasing nested sync atomic savepoint") + + except Exception as e: + if not sp_context._rolled_back: + logger.warning( + "Rolling back nested sync savepoint due to: %s", e + ) + nested.rollback() + raise + else: + with TransactionContext(session) as tx: + yield tx + else: + with TransactionContext(session) as tx: + yield tx + + +def transactional( + propagation: TransactionPropagation = TransactionPropagation.REQUIRED, + session_param: str = "db", +) -> Callable[[Callable[P, T]], Callable[P, T]]: + """Decorator to wrap a function in a transaction boundary. + + Automatically handles commit on success and rollback on exception. + Works with both sync and async functions. + + Args: + propagation: Transaction propagation behavior + session_param: Name of the session parameter in the function signature + + Returns: + Decorated function wrapped in transaction + + Example: + @transactional() + async def create_user_with_profile(db: AsyncSession, data: dict): + user = await create_user(db, data['user']) + profile = await create_profile(db, user.id, data['profile']) + return user, profile + + @transactional(propagation=TransactionPropagation.NESTED) + async def risky_sub_operation(db: AsyncSession, data: dict): + # This can be rolled back without affecting parent transaction + pass + """ + def decorator(func: Callable[P, T]) -> Callable[P, T]: + import asyncio + + if asyncio.iscoroutinefunction(func): + @functools.wraps(func) + async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + # Get session from kwargs or args + session = _extract_session(func, args, kwargs, session_param) + + if session is None: + raise TransactionError( + f"Could not find session parameter '{session_param}' " + f"in function {func.__name__}" + ) + + logger.debug( + "Starting transaction for %s with propagation %s", + func.__name__, + propagation.value, + ) + + async with atomic(session, propagation): + result = await func(*args, **kwargs) + + logger.debug( + "Transaction completed for %s", + func.__name__, + ) + + return result + + return async_wrapper # type: ignore + else: + @functools.wraps(func) + def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> T: + # Get session from kwargs or args + session = _extract_session(func, args, kwargs, session_param) + + if session is None: + raise TransactionError( + f"Could not find session parameter '{session_param}' " + f"in function {func.__name__}" + ) + + logger.debug( + "Starting sync transaction for %s with propagation %s", + func.__name__, + propagation.value, + ) + + with atomic_sync(session, propagation): + result = func(*args, **kwargs) + + logger.debug( + "Sync transaction completed for %s", + func.__name__, + ) + + return result + + return sync_wrapper # type: ignore + + return decorator + + +def _extract_session( + func: Callable, + args: tuple, + kwargs: dict, + session_param: str, +) -> Optional[AsyncSession | Session]: + """Extract session from function arguments. + + Args: + func: The function being called + args: Positional arguments + kwargs: Keyword arguments + session_param: Name of the session parameter + + Returns: + Session instance or None if not found + """ + import inspect + + # Check kwargs first + if session_param in kwargs: + return kwargs[session_param] + + # Get function signature to find positional index + sig = inspect.signature(func) + params = list(sig.parameters.keys()) + + if session_param in params: + idx = params.index(session_param) + # Account for 'self' parameter in methods + if len(args) > idx: + return args[idx] + + return None + + +def is_in_transaction(session: AsyncSession | Session) -> bool: + """Check if session is currently in a transaction. + + Args: + session: SQLAlchemy session (sync or async) + + Returns: + True if session is in an active transaction + """ + return session.in_transaction() + + +def get_transaction_depth(session: AsyncSession | Session) -> int: + """Get the current transaction nesting depth. + + Args: + session: SQLAlchemy session (sync or async) + + Returns: + Number of nested transactions (0 if not in transaction) + """ + # SQLAlchemy doesn't expose nesting depth directly, + # but we can check transaction state + if not session.in_transaction(): + return 0 + + # Check for nested transaction + if hasattr(session, '_nested_transaction') and session._nested_transaction: + return 2 # At least one savepoint + + return 1 + + +__all__ = [ + "TransactionPropagation", + "TransactionError", + "TransactionContext", + "AsyncTransactionContext", + "SavepointContext", + "AsyncSavepointContext", + "atomic", + "atomic_sync", + "transactional", + "is_in_transaction", + "get_transaction_depth", +] diff --git a/src/server/exceptions/__init__.py b/src/server/exceptions/__init__.py index 9ff3a5d..77f89ad 100644 --- a/src/server/exceptions/__init__.py +++ b/src/server/exceptions/__init__.py @@ -144,6 +144,23 @@ class ConflictError(AniWorldAPIException): ) +class BadRequestError(AniWorldAPIException): + """Exception raised for bad request (400) errors.""" + + def __init__( + self, + message: str = "Bad request", + details: Optional[Dict[str, Any]] = None, + ): + """Initialize bad request error.""" + super().__init__( + message=message, + status_code=400, + error_code="BAD_REQUEST", + details=details, + ) + + class RateLimitError(AniWorldAPIException): """Exception raised when rate limit is exceeded.""" diff --git a/src/server/fastapi_app.py b/src/server/fastapi_app.py index 36267ad..4648865 100644 --- a/src/server/fastapi_app.py +++ b/src/server/fastapi_app.py @@ -5,6 +5,7 @@ This module provides the main FastAPI application with proper CORS configuration, middleware setup, static file serving, and Jinja2 template integration. """ +import asyncio from contextlib import asynccontextmanager from pathlib import Path @@ -21,6 +22,7 @@ from src.server.api.anime import router as anime_router from src.server.api.auth import router as auth_router from src.server.api.config import router as config_router from src.server.api.download import router as download_router +from src.server.api.health import router as health_router from src.server.api.scheduler import router as scheduler_router from src.server.api.websocket import router as websocket_router from src.server.controllers.error_controller import ( @@ -29,11 +31,11 @@ from src.server.controllers.error_controller import ( ) # Import controllers -from src.server.controllers.health_controller import router as health_router from src.server.controllers.page_controller import router as page_router from src.server.middleware.auth import AuthMiddleware from src.server.middleware.error_handler import register_exception_handlers from src.server.middleware.setup_redirect import SetupRedirectMiddleware +from src.server.services.anime_service import sync_series_from_data_files from src.server.services.progress_service import get_progress_service from src.server.services.websocket_service import get_websocket_service @@ -42,29 +44,54 @@ from src.server.services.websocket_service import get_websocket_service @asynccontextmanager -async def lifespan(app: FastAPI): - """Manage application lifespan (startup and shutdown).""" - # Setup logging first with DEBUG level - logger = setup_logging(log_level="DEBUG") +async def lifespan(_application: FastAPI): + """Manage application lifespan (startup and shutdown). + + Args: + _application: The FastAPI application instance (unused but required + by the lifespan protocol). + """ + # Setup logging first with INFO level + logger = setup_logging(log_level="INFO") # Startup try: logger.info("Starting FastAPI application...") + # Initialize database first (required for other services) + try: + from src.server.database.connection import init_db + await init_db() + logger.info("Database initialized successfully") + except Exception as e: + logger.error("Failed to initialize database: %s", e, exc_info=True) + raise # Database is required, fail startup if it fails + # Load configuration from config.json and sync with settings try: from src.server.services.config_service import get_config_service config_service = get_config_service() config = config_service.load_config() + logger.debug( + "Config loaded: other=%s", config.other + ) + # Sync anime_directory from config.json to settings - if config.other and config.other.get("anime_directory"): - settings.anime_directory = str(config.other["anime_directory"]) + # config.other is Dict[str, object] - pylint doesn't infer this + other_settings = dict(config.other) if config.other else {} + if other_settings.get("anime_directory"): + anime_dir = other_settings["anime_directory"] + settings.anime_directory = str(anime_dir) logger.info( "Loaded anime_directory from config: %s", settings.anime_directory ) - except Exception as e: + else: + logger.debug( + "anime_directory not found in config.other" + ) + except (OSError, ValueError, KeyError) as e: logger.warning("Failed to load config from config.json: %s", e) # Initialize progress service with event subscription @@ -86,6 +113,37 @@ async def lifespan(app: FastAPI): # Subscribe to progress events progress_service.subscribe("progress_updated", progress_event_handler) + # Initialize download service and restore queue from database + # Only if anime directory is configured + try: + from src.server.utils.dependencies import get_download_service + + logger.info( + "Checking anime_directory setting: '%s'", + settings.anime_directory + ) + + if settings.anime_directory: + download_service = get_download_service() + await download_service.initialize() + logger.info("Download service initialized and queue restored") + + # Sync series from data files to database + sync_count = await sync_series_from_data_files( + settings.anime_directory + ) + logger.info( + "Data file sync complete. Added %d series.", sync_count + ) + else: + logger.info( + "Download service initialization skipped - " + "anime directory not configured" + ) + except (OSError, RuntimeError, ValueError) as e: + logger.warning("Failed to initialize download service: %s", e) + # Continue startup - download service can be initialized later + logger.info("FastAPI application started successfully") logger.info("Server running on http://127.0.0.1:8000") logger.info( @@ -98,20 +156,88 @@ async def lifespan(app: FastAPI): # Yield control to the application yield - # Shutdown - logger.info("FastAPI application shutting down") + # Shutdown - execute in proper order with timeout protection + logger.info("FastAPI application shutting down (graceful shutdown initiated)") - # Shutdown download service and its thread pool + # Define shutdown timeout (total time allowed for all shutdown operations) + SHUTDOWN_TIMEOUT = 30.0 + + import time + shutdown_start = time.monotonic() + + def remaining_time() -> float: + """Calculate remaining shutdown time.""" + elapsed = time.monotonic() - shutdown_start + return max(0.0, SHUTDOWN_TIMEOUT - elapsed) + + # 1. Broadcast shutdown notification via WebSocket try: - from src.server.services.download_service import _download_service_instance + ws_service = get_websocket_service() + logger.info("Broadcasting shutdown notification to WebSocket clients...") + await asyncio.wait_for( + ws_service.shutdown(timeout=min(5.0, remaining_time())), + timeout=min(5.0, remaining_time()) + ) + logger.info("WebSocket shutdown complete") + except asyncio.TimeoutError: + logger.warning("WebSocket shutdown timed out") + except Exception as e: # pylint: disable=broad-exception-caught + logger.error("Error during WebSocket shutdown: %s", e, exc_info=True) + + # 2. Shutdown download service and persist active downloads + try: + from src.server.services.download_service import ( # noqa: E501 + _download_service_instance, + ) if _download_service_instance is not None: logger.info("Stopping download service...") - await _download_service_instance.stop() logger.info("Download service stopped successfully") - except Exception as e: + except asyncio.TimeoutError: + logger.warning("Download service shutdown timed out") + except Exception as e: # pylint: disable=broad-exception-caught logger.error("Error stopping download service: %s", e, exc_info=True) - logger.info("FastAPI application shutdown complete") + # 3. Shutdown SeriesApp and cleanup thread pool + try: + from src.server.utils.dependencies import _series_app + if _series_app is not None: + logger.info("Shutting down SeriesApp thread pool...") + _series_app.shutdown() + logger.info("SeriesApp shutdown complete") + except Exception as e: # pylint: disable=broad-exception-caught + logger.error("Error during SeriesApp shutdown: %s", e, exc_info=True) + + # 4. Cleanup progress service + try: + progress_service = get_progress_service() + logger.info("Cleaning up progress service...") + # Clear any active progress tracking and subscribers + progress_service._active_progress.clear() + logger.info("Progress service cleanup complete") + except Exception as e: # pylint: disable=broad-exception-caught + logger.error( + "Error cleaning up progress service: %s", e, exc_info=True + ) + + # 5. Close database connections with WAL checkpoint + try: + from src.server.database.connection import close_db + logger.info("Closing database connections...") + await asyncio.wait_for( + close_db(), + timeout=min(10.0, remaining_time()) + ) + logger.info("Database connections closed") + except asyncio.TimeoutError: + logger.warning("Database shutdown timed out") + except Exception as e: # pylint: disable=broad-exception-caught + logger.error("Error closing database: %s", e, exc_info=True) + + elapsed_total = time.monotonic() - shutdown_start + logger.info( + "FastAPI application shutdown complete (took %.2fs)", + elapsed_total + ) # Initialize FastAPI app with lifespan @@ -180,5 +306,5 @@ if __name__ == "__main__": host="127.0.0.1", port=8000, reload=True, - log_level="debug" + log_level="info" ) diff --git a/src/server/middleware/auth.py b/src/server/middleware/auth.py index ba3f201..9c6699c 100644 --- a/src/server/middleware/auth.py +++ b/src/server/middleware/auth.py @@ -8,6 +8,17 @@ Responsibilities: This middleware is intentionally lightweight and synchronous. For production use consider a distributed rate limiter (Redis) and a proper token revocation store. + +WARNING - SINGLE PROCESS LIMITATION: + Rate limiting state is stored in memory dictionaries which RESET when + the process restarts. This means: + - Attackers can bypass rate limits by triggering a process restart + - Rate limits are not shared across multiple workers/processes + + For production deployments, consider: + - Using Redis-backed rate limiting (e.g., slowapi with Redis) + - Running behind a reverse proxy with rate limiting (nginx, HAProxy) + - Using a dedicated rate limiting service """ from __future__ import annotations diff --git a/src/server/middleware/error_handler.py b/src/server/middleware/error_handler.py index 1f0b885..dcda117 100644 --- a/src/server/middleware/error_handler.py +++ b/src/server/middleware/error_handler.py @@ -15,6 +15,7 @@ from src.server.exceptions import ( AniWorldAPIException, AuthenticationError, AuthorizationError, + BadRequestError, ConflictError, NotFoundError, RateLimitError, @@ -127,6 +128,26 @@ def register_exception_handlers(app: FastAPI) -> None: ), ) + @app.exception_handler(BadRequestError) + async def bad_request_error_handler( + request: Request, exc: BadRequestError + ) -> JSONResponse: + """Handle bad request errors (400).""" + logger.info( + f"Bad request error: {exc.message}", + extra={"details": exc.details, "path": str(request.url.path)}, + ) + return JSONResponse( + status_code=exc.status_code, + content=create_error_response( + status_code=exc.status_code, + error=exc.error_code, + message=exc.message, + details=exc.details, + request_id=getattr(request.state, "request_id", None), + ), + ) + @app.exception_handler(NotFoundError) async def not_found_error_handler( request: Request, exc: NotFoundError diff --git a/src/server/middleware/setup_redirect.py b/src/server/middleware/setup_redirect.py index 1e92ec6..5670396 100644 --- a/src/server/middleware/setup_redirect.py +++ b/src/server/middleware/setup_redirect.py @@ -11,7 +11,7 @@ from typing import Callable from fastapi import Request from starlette.middleware.base import BaseHTTPMiddleware -from starlette.responses import RedirectResponse +from starlette.responses import RedirectResponse, Response from starlette.types import ASGIApp from src.server.services.auth_service import auth_service @@ -91,11 +91,11 @@ class SetupRedirectMiddleware(BaseHTTPMiddleware): config = config_service.load_config() # Validate the loaded config - validation = config.validate() + validation = config.validate_config() if not validation.valid: return True - except Exception: + except (FileNotFoundError, ValueError, OSError, AttributeError): # If we can't load or validate config, setup is needed return True @@ -103,7 +103,7 @@ class SetupRedirectMiddleware(BaseHTTPMiddleware): async def dispatch( self, request: Request, call_next: Callable - ) -> RedirectResponse: + ) -> Response: """Process the request and redirect to setup if needed. Args: diff --git a/src/server/models/anime.py b/src/server/models/anime.py index 5a71747..5c693ae 100644 --- a/src/server/models/anime.py +++ b/src/server/models/anime.py @@ -70,8 +70,6 @@ class AnimeSeriesResponse(BaseModel): ) ) alt_titles: List[str] = Field(default_factory=list, description="Alternative titles") - description: Optional[str] = Field(None, description="Short series description") - total_episodes: Optional[int] = Field(None, ge=0, description="Declared total episode count if known") episodes: List[EpisodeInfo] = Field(default_factory=list, description="Known episodes information") missing_episodes: List[MissingEpisodeInfo] = Field(default_factory=list, description="Detected missing episode ranges") thumbnail: Optional[HttpUrl] = Field(None, description="Optional thumbnail image URL") diff --git a/src/server/models/config.py b/src/server/models/config.py index 8ab5365..17e3ba1 100644 --- a/src/server/models/config.py +++ b/src/server/models/config.py @@ -58,8 +58,9 @@ class ValidationResult(BaseModel): """Result of a configuration validation attempt.""" valid: bool = Field(..., description="Whether the configuration is valid") - errors: Optional[List[str]] = Field( - default_factory=list, description="List of validation error messages" + errors: List[str] = Field( + default_factory=lambda: [], + description="List of validation error messages" ) @@ -71,14 +72,16 @@ class AppConfig(BaseModel): name: str = Field(default="Aniworld", description="Application name") data_dir: str = Field(default="data", description="Base data directory") - scheduler: SchedulerConfig = Field(default_factory=SchedulerConfig) + scheduler: SchedulerConfig = Field( + default_factory=SchedulerConfig + ) logging: LoggingConfig = Field(default_factory=LoggingConfig) backup: BackupConfig = Field(default_factory=BackupConfig) other: Dict[str, object] = Field( default_factory=dict, description="Arbitrary other settings" ) - def validate(self) -> ValidationResult: + def validate_config(self) -> ValidationResult: """Perform light-weight validation and return a ValidationResult. This method intentionally avoids performing IO (no filesystem checks) @@ -98,7 +101,8 @@ class AppConfig(BaseModel): errors.append(msg) # backup.path must be set when backups are enabled - if self.backup.enabled and (not self.backup.path): + backup_data = self.model_dump().get("backup", {}) + if backup_data.get("enabled") and not backup_data.get("path"): errors.append( "backup.path must be set when backups.enabled is true" ) diff --git a/src/server/services/anime_service.py b/src/server/services/anime_service.py index 94c8917..51d1cc5 100644 --- a/src/server/services/anime_service.py +++ b/src/server/services/anime_service.py @@ -1,6 +1,7 @@ from __future__ import annotations import asyncio +import time from functools import lru_cache from typing import Optional @@ -12,6 +13,10 @@ from src.server.services.progress_service import ( ProgressType, get_progress_service, ) +from src.server.services.websocket_service import ( + WebSocketService, + get_websocket_service, +) logger = structlog.get_logger(__name__) @@ -37,21 +42,37 @@ class AnimeService: self, series_app: SeriesApp, progress_service: Optional[ProgressService] = None, + websocket_service: Optional[WebSocketService] = None, ): self._app = series_app self._directory = series_app.directory_to_search self._progress_service = progress_service or get_progress_service() + self._websocket_service = websocket_service or get_websocket_service() self._event_loop: Optional[asyncio.AbstractEventLoop] = None + # Track scan progress for WebSocket updates + self._scan_start_time: Optional[float] = None + self._scan_directories_count: int = 0 + self._scan_files_count: int = 0 + self._scan_total_items: int = 0 + self._is_scanning: bool = False + self._scan_current_directory: str = "" + # Lock to prevent concurrent rescans + self._scan_lock = asyncio.Lock() # Subscribe to SeriesApp events # Note: Events library uses assignment (=), not += operator try: self._app.download_status = self._on_download_status self._app.scan_status = self._on_scan_status - logger.debug("Successfully subscribed to SeriesApp events") + logger.info( + "Subscribed to SeriesApp events", + scan_status_handler=str(self._app.scan_status), + series_app_id=id(self._app), + ) except Exception as e: logger.exception("Failed to subscribe to SeriesApp events") raise AnimeServiceError("Initialization failed") from e + def _on_download_status(self, args) -> None: """Handle download status events from SeriesApp. @@ -142,7 +163,7 @@ class AnimeService: ), loop ) - except Exception as exc: + except Exception as exc: # pylint: disable=broad-except logger.error( "Error handling download status event", error=str(exc) @@ -152,7 +173,8 @@ class AnimeService: """Handle scan status events from SeriesApp. Events include both 'key' (primary identifier) and 'folder' - (metadata for display purposes). + (metadata for display purposes). Also broadcasts via WebSocket + for real-time UI updates. Args: args: ScanStatusEventArgs from SeriesApp containing key, @@ -161,23 +183,50 @@ class AnimeService: try: scan_id = "library_scan" + logger.info( + "Scan status event received", + status=args.status, + current=args.current, + total=args.total, + folder=args.folder, + ) + # Get event loop - try running loop first, then stored loop loop = None try: loop = asyncio.get_running_loop() + logger.debug("Using running event loop for scan status") except RuntimeError: # No running loop in this thread - use stored loop loop = self._event_loop + logger.debug( + "Using stored event loop for scan status", + has_loop=loop is not None + ) if not loop: - logger.debug( + logger.warning( "No event loop available for scan status event", status=args.status ) return + + logger.info( + "Processing scan status event", + status=args.status, + loop_id=id(loop), + ) # Map SeriesApp scan events to progress service if args.status == "started": + # Track scan start time and reset counters + self._scan_start_time = time.time() + self._scan_directories_count = 0 + self._scan_files_count = 0 + self._scan_total_items = args.total + self._is_scanning = True + self._scan_current_directory = "" + asyncio.run_coroutine_threadsafe( self._progress_service.start_progress( progress_id=scan_id, @@ -187,7 +236,18 @@ class AnimeService: ), loop ) + # Broadcast scan started via WebSocket with total items + asyncio.run_coroutine_threadsafe( + self._broadcast_scan_started_safe(total_items=args.total), + loop + ) elif args.status == "progress": + # Update scan counters + self._scan_directories_count = args.current + self._scan_current_directory = args.folder or "" + # Estimate files found (use current as proxy since detailed + # file count isn't available from SerieScanner) + asyncio.run_coroutine_threadsafe( self._progress_service.update_progress( progress_id=scan_id, @@ -197,7 +257,25 @@ class AnimeService: ), loop ) + # Broadcast scan progress via WebSocket + asyncio.run_coroutine_threadsafe( + self._broadcast_scan_progress_safe( + directories_scanned=args.current, + files_found=args.current, # Use folder count as proxy + current_directory=args.folder or "", + total_items=args.total, + ), + loop + ) elif args.status == "completed": + # Calculate elapsed time + elapsed = 0.0 + if self._scan_start_time: + elapsed = time.time() - self._scan_start_time + + # Mark scan as complete + self._is_scanning = False + asyncio.run_coroutine_threadsafe( self._progress_service.complete_progress( progress_id=scan_id, @@ -205,7 +283,17 @@ class AnimeService: ), loop ) + # Broadcast scan completed via WebSocket + asyncio.run_coroutine_threadsafe( + self._broadcast_scan_completed_safe( + total_directories=args.total, + total_files=args.total, # Use folder count as proxy + elapsed_seconds=elapsed, + ), + loop + ) elif args.status == "failed": + self._is_scanning = False asyncio.run_coroutine_threadsafe( self._progress_service.fail_progress( progress_id=scan_id, @@ -214,6 +302,7 @@ class AnimeService: loop ) elif args.status == "cancelled": + self._is_scanning = False asyncio.run_coroutine_threadsafe( self._progress_service.fail_progress( progress_id=scan_id, @@ -221,8 +310,119 @@ class AnimeService: ), loop ) + except Exception as exc: # pylint: disable=broad-except + logger.error("Error handling scan status event: %s", exc) + + async def _broadcast_scan_started_safe(self, total_items: int = 0) -> None: + """Safely broadcast scan started event via WebSocket. + + Wraps the WebSocket broadcast in try/except to ensure scan + continues even if WebSocket fails. + + Args: + total_items: Total number of items to scan + """ + try: + logger.info( + "Broadcasting scan_started via WebSocket", + directory=self._directory, + total_items=total_items, + ) + await self._websocket_service.broadcast_scan_started( + directory=self._directory, + total_items=total_items, + ) + logger.info("scan_started broadcast sent successfully") except Exception as exc: - logger.error("Error handling scan status event", error=str(exc)) + logger.warning( + "Failed to broadcast scan_started via WebSocket", + error=str(exc) + ) + + async def _broadcast_scan_progress_safe( + self, + directories_scanned: int, + files_found: int, + current_directory: str, + total_items: int = 0, + ) -> None: + """Safely broadcast scan progress event via WebSocket. + + Wraps the WebSocket broadcast in try/except to ensure scan + continues even if WebSocket fails. + + Args: + directories_scanned: Number of directories scanned so far + files_found: Number of files found so far + current_directory: Current directory being scanned + total_items: Total number of items to scan + """ + try: + await self._websocket_service.broadcast_scan_progress( + directories_scanned=directories_scanned, + files_found=files_found, + current_directory=current_directory, + total_items=total_items, + ) + except Exception as exc: + logger.warning( + "Failed to broadcast scan_progress via WebSocket", + error=str(exc) + ) + + async def _broadcast_scan_completed_safe( + self, + total_directories: int, + total_files: int, + elapsed_seconds: float, + ) -> None: + """Safely broadcast scan completed event via WebSocket. + + Wraps the WebSocket broadcast in try/except to ensure scan + cleanup continues even if WebSocket fails. + + Args: + total_directories: Total directories scanned + total_files: Total files found + elapsed_seconds: Time taken for the scan + """ + try: + await self._websocket_service.broadcast_scan_completed( + total_directories=total_directories, + total_files=total_files, + elapsed_seconds=elapsed_seconds, + ) + except Exception as exc: + logger.warning( + "Failed to broadcast scan_completed via WebSocket", + error=str(exc) + ) + + def get_scan_status(self) -> dict: + """Get the current scan status. + + Returns: + Dictionary with scan status information including: + - is_scanning: Whether a scan is currently in progress + - total_items: Total number of items to scan + - directories_scanned: Number of directories scanned so far + - current_directory: Current directory being scanned + - directory: Root directory being scanned + """ + status = { + "is_scanning": self._is_scanning, + "total_items": self._scan_total_items, + "directories_scanned": self._scan_directories_count, + "current_directory": self._scan_current_directory, + "directory": self._directory, + } + logger.debug( + "Scan status requested", + is_scanning=self._is_scanning, + total_items=self._scan_total_items, + directories_scanned=self._scan_directories_count, + ) + return status @lru_cache(maxsize=128) def _cached_list_missing(self) -> list[dict]: @@ -288,25 +488,322 @@ class AnimeService: The SeriesApp handles progress tracking via events which are forwarded to the ProgressService through event handlers. + After scanning, results are persisted to the database. + All series are identified by their 'key' (provider identifier), with 'folder' stored as metadata. + + Note: + Only one scan can run at a time. If a scan is already in + progress, this method returns immediately without starting + a new scan. """ - try: - # Store event loop for event handlers - self._event_loop = asyncio.get_running_loop() - - # SeriesApp.rescan is now async and handles events internally - await self._app.rescan() - - # invalidate cache + # Check if a scan is already running (non-blocking) + if self._scan_lock.locked(): + logger.info("Rescan already in progress, ignoring request") + return + + async with self._scan_lock: try: - self._cached_list_missing.cache_clear() - except Exception: - pass + # Store event loop for event handlers + self._event_loop = asyncio.get_running_loop() + logger.info( + "Rescan started, event loop stored", + loop_id=id(self._event_loop), + series_app_id=id(self._app), + scan_handler=str(self._app.scan_status), + ) + + # SeriesApp.rescan returns scanned series list + scanned_series = await self._app.rescan() + + # Persist scan results to database + if scanned_series: + await self._save_scan_results_to_db(scanned_series) + + # Reload series from database to ensure consistency + await self._load_series_from_db() - except Exception as exc: - logger.exception("rescan failed") - raise AnimeServiceError("Rescan failed") from exc + # invalidate cache + try: + self._cached_list_missing.cache_clear() + except Exception: # pylint: disable=broad-except + pass + + except Exception as exc: # pylint: disable=broad-except + logger.exception("rescan failed") + raise AnimeServiceError("Rescan failed") from exc + + async def _save_scan_results_to_db(self, series_list: list) -> int: + """ + Save scan results to the database. + + Creates or updates series records in the database based on + scan results. + + Args: + series_list: List of Serie objects from scan + + Returns: + Number of series saved/updated + """ + from src.server.database.connection import get_db_session + from src.server.database.service import AnimeSeriesService + + saved_count = 0 + + async with get_db_session() as db: + for serie in series_list: + try: + # Check if series already exists + existing = await AnimeSeriesService.get_by_key( + db, serie.key + ) + + if existing: + # Update existing series + await self._update_series_in_db( + serie, existing, db + ) + else: + # Create new series + await self._create_series_in_db(serie, db) + + saved_count += 1 + except Exception as e: # pylint: disable=broad-except + logger.warning( + "Failed to save series to database: %s (key=%s) - %s", + serie.name, + serie.key, + str(e) + ) + + logger.info( + "Saved %d series to database from scan results", + saved_count + ) + return saved_count + + async def _create_series_in_db(self, serie, db) -> None: + """Create a new series in the database.""" + from src.server.database.service import AnimeSeriesService, EpisodeService + + anime_series = await AnimeSeriesService.create( + db=db, + key=serie.key, + name=serie.name, + site=serie.site, + folder=serie.folder, + ) + + # Create Episode records + if serie.episodeDict: + for season, episode_numbers in serie.episodeDict.items(): + for ep_num in episode_numbers: + await EpisodeService.create( + db=db, + series_id=anime_series.id, + season=season, + episode_number=ep_num, + ) + + logger.debug( + "Created series in database: %s (key=%s)", + serie.name, + serie.key + ) + + async def _update_series_in_db(self, serie, existing, db) -> None: + """Update an existing series in the database. + + Syncs the database episodes with the current missing episodes from scan. + - Adds new missing episodes that are not in the database + - Removes episodes from database that are no longer missing + (i.e., the file has been added to the filesystem) + """ + from src.server.database.service import AnimeSeriesService, EpisodeService + + # Get existing episodes from database + existing_episodes = await EpisodeService.get_by_series(db, existing.id) + + # Build dict of existing episodes: {season: {ep_num: episode_id}} + existing_dict: dict[int, dict[int, int]] = {} + for ep in existing_episodes: + if ep.season not in existing_dict: + existing_dict[ep.season] = {} + existing_dict[ep.season][ep.episode_number] = ep.id + + # Get new missing episodes from scan + new_dict = serie.episodeDict or {} + + # Build set of new missing episodes for quick lookup + new_missing_set: set[tuple[int, int]] = set() + for season, episode_numbers in new_dict.items(): + for ep_num in episode_numbers: + new_missing_set.add((season, ep_num)) + + # Add new missing episodes that are not in the database + for season, episode_numbers in new_dict.items(): + existing_season_eps = existing_dict.get(season, {}) + for ep_num in episode_numbers: + if ep_num not in existing_season_eps: + await EpisodeService.create( + db=db, + series_id=existing.id, + season=season, + episode_number=ep_num, + ) + logger.debug( + "Added missing episode to database: %s S%02dE%02d", + serie.key, + season, + ep_num + ) + + # Remove episodes from database that are no longer missing + # (i.e., the episode file now exists on the filesystem) + for season, eps_dict in existing_dict.items(): + for ep_num, episode_id in eps_dict.items(): + if (season, ep_num) not in new_missing_set: + await EpisodeService.delete(db, episode_id) + logger.info( + "Removed episode from database (no longer missing): " + "%s S%02dE%02d", + serie.key, + season, + ep_num + ) + + # Update folder if changed + if existing.folder != serie.folder: + await AnimeSeriesService.update( + db, + existing.id, + folder=serie.folder + ) + + logger.debug( + "Updated series in database: %s (key=%s)", + serie.name, + serie.key + ) + + async def _load_series_from_db(self) -> None: + """ + Load series from the database into SeriesApp. + + This method is called during initialization and after rescans + to ensure the in-memory series list is in sync with the database. + """ + from src.core.entities.series import Serie + from src.server.database.connection import get_db_session + from src.server.database.service import AnimeSeriesService + + async with get_db_session() as db: + anime_series_list = await AnimeSeriesService.get_all( + db, with_episodes=True + ) + + # Convert to Serie objects + series_list = [] + for anime_series in anime_series_list: + # Build episode_dict from episodes relationship + episode_dict: dict[int, list[int]] = {} + if anime_series.episodes: + for episode in anime_series.episodes: + season = episode.season + if season not in episode_dict: + episode_dict[season] = [] + episode_dict[season].append(episode.episode_number) + # Sort episode numbers + for season in episode_dict: + episode_dict[season].sort() + + serie = Serie( + key=anime_series.key, + name=anime_series.name, + site=anime_series.site, + folder=anime_series.folder, + episodeDict=episode_dict + ) + series_list.append(serie) + + # Load into SeriesApp + self._app.load_series_from_list(series_list) + + async def add_series_to_db( + self, + serie, + db + ): + """ + Add a series to the database if it doesn't already exist. + + Uses serie.key for identification. Creates a new AnimeSeries + record in the database if it doesn't already exist. + + Args: + serie: The Serie instance to add + db: Database session for async operations + + Returns: + Created AnimeSeries instance, or None if already exists + """ + from src.server.database.service import AnimeSeriesService, EpisodeService + + # Check if series already exists in DB + existing = await AnimeSeriesService.get_by_key(db, serie.key) + if existing: + logger.debug( + "Series already exists in database: %s (key=%s)", + serie.name, + serie.key + ) + return None + + # Create new series in database + anime_series = await AnimeSeriesService.create( + db=db, + key=serie.key, + name=serie.name, + site=serie.site, + folder=serie.folder, + ) + + # Create Episode records for each episode in episodeDict + if serie.episodeDict: + for season, episode_numbers in serie.episodeDict.items(): + for episode_number in episode_numbers: + await EpisodeService.create( + db=db, + series_id=anime_series.id, + season=season, + episode_number=episode_number, + ) + + logger.info( + "Added series to database: %s (key=%s)", + serie.name, + serie.key + ) + + return anime_series + + async def contains_in_db(self, key: str, db) -> bool: + """ + Check if a series with the given key exists in the database. + + Args: + key: The unique provider identifier for the series + db: Database session for async operations + + Returns: + True if the series exists in the database + """ + from src.server.database.service import AnimeSeriesService + + existing = await AnimeSeriesService.get_by_key(db, key) + return existing is not None async def download( self, @@ -335,6 +832,7 @@ class AnimeService: Raises: AnimeServiceError: If download fails + InterruptedError: If download was cancelled Note: The 'key' parameter is the primary identifier used for all @@ -353,6 +851,10 @@ class AnimeService: key=key, item_id=item_id, ) + except InterruptedError: + # Download was cancelled - re-raise for proper handling + logger.info("Download cancelled, propagating cancellation") + raise except Exception as exc: logger.exception("download failed") raise AnimeServiceError("Download failed") from exc @@ -361,3 +863,135 @@ class AnimeService: def get_anime_service(series_app: SeriesApp) -> AnimeService: """Factory used for creating AnimeService with a SeriesApp instance.""" return AnimeService(series_app) + + +async def sync_series_from_data_files( + anime_directory: str, + log_instance=None # pylint: disable=unused-argument +) -> int: + """ + Sync series from data files to the database. + + Scans the anime directory for data files and adds any new series + to the database. Existing series are skipped (no duplicates). + + This function is typically called during application startup to ensure + series metadata stored in filesystem data files is available in the + database. + + Args: + anime_directory: Path to the anime directory with data files + log_instance: Optional logger instance (unused, kept for API + compatibility). This function always uses structlog internally. + + Returns: + Number of new series added to the database + """ + # Always use structlog for structured logging with keyword arguments + log = structlog.get_logger(__name__) + + try: + from src.server.database.connection import get_db_session + from src.server.database.service import AnimeSeriesService, EpisodeService + + log.info( + "Starting data file to database sync", + directory=anime_directory + ) + + # Get all series from data files using SeriesApp + series_app = SeriesApp(anime_directory) + all_series = await asyncio.to_thread( + series_app.get_all_series_from_data_files + ) + + if not all_series: + log.info("No series found in data files to sync") + return 0 + + log.info( + "Found series in data files, syncing to database", + count=len(all_series) + ) + + async with get_db_session() as db: + added_count = 0 + skipped_count = 0 + for serie in all_series: + # Handle series with empty name - use folder as fallback + if not serie.name or not serie.name.strip(): + if serie.folder and serie.folder.strip(): + serie.name = serie.folder.strip() + log.debug( + "Using folder as name fallback", + key=serie.key, + folder=serie.folder + ) + else: + log.warning( + "Skipping series with empty name and folder", + key=serie.key + ) + skipped_count += 1 + continue + + try: + # Check if series already exists in DB + existing = await AnimeSeriesService.get_by_key(db, serie.key) + if existing: + log.debug( + "Series already exists in database", + name=serie.name, + key=serie.key + ) + continue + + # Create new series in database + anime_series = await AnimeSeriesService.create( + db=db, + key=serie.key, + name=serie.name, + site=serie.site, + folder=serie.folder, + ) + + # Create Episode records for each episode in episodeDict + if serie.episodeDict: + for season, episode_numbers in serie.episodeDict.items(): + for episode_number in episode_numbers: + await EpisodeService.create( + db=db, + series_id=anime_series.id, + season=season, + episode_number=episode_number, + ) + + added_count += 1 + log.debug( + "Added series to database", + name=serie.name, + key=serie.key + ) + except Exception as e: # pylint: disable=broad-except + log.warning( + "Failed to add series to database", + key=serie.key, + name=serie.name, + error=str(e) + ) + skipped_count += 1 + + log.info( + "Data file sync complete", + added=added_count, + skipped=len(all_series) - added_count + ) + return added_count + + except Exception as e: # pylint: disable=broad-except + log.warning( + "Failed to sync series to database", + error=str(e), + exc_info=True + ) + return 0 diff --git a/src/server/services/auth_service.py b/src/server/services/auth_service.py index bc47edf..d021c35 100644 --- a/src/server/services/auth_service.py +++ b/src/server/services/auth_service.py @@ -42,6 +42,17 @@ class AuthService: config persistence should be used (not implemented here). - Lockout policy is kept in-memory and will reset when the process restarts. This is acceptable for single-process deployments. + + WARNING - SINGLE PROCESS LIMITATION: + Failed login attempts are stored in memory dictionaries which RESET + when the process restarts. This means: + - Attackers can bypass lockouts by triggering a process restart + - Lockout state is not shared across multiple workers/processes + + For production deployments, consider: + - Storing failed attempts in database with TTL-based expiration + - Using Redis for distributed lockout state + - Implementing account-based (not just IP-based) lockout tracking """ def __init__(self) -> None: diff --git a/src/server/services/config_service.py b/src/server/services/config_service.py index 61d2d75..da38fb1 100644 --- a/src/server/services/config_service.py +++ b/src/server/services/config_service.py @@ -4,7 +4,7 @@ This service handles: - Loading and saving configuration to JSON files - Configuration validation - Backup and restore functionality -- Configuration migration for version updates +- Configuration version management """ import json @@ -35,8 +35,8 @@ class ConfigBackupError(ConfigServiceError): class ConfigService: """Service for managing application configuration persistence. - Handles loading, saving, validation, backup, and migration of - configuration files. Uses JSON format for human-readable and + Handles loading, saving, validation, backup, and version management + of configuration files. Uses JSON format for human-readable and version-control friendly storage. """ @@ -84,18 +84,13 @@ class ConfigService: with open(self.config_path, "r", encoding="utf-8") as f: data = json.load(f) - # Check if migration is needed - file_version = data.get("version", "1.0.0") - if file_version != self.CONFIG_VERSION: - data = self._migrate_config(data, file_version) - # Remove version key before constructing AppConfig data.pop("version", None) config = AppConfig(**data) # Validate configuration - validation = config.validate() + validation = config.validate_config() if not validation.valid: errors = ', '.join(validation.errors or []) raise ConfigValidationError( @@ -128,7 +123,7 @@ class ConfigService: ConfigValidationError: If config validation fails """ # Validate before saving - validation = config.validate() + validation = config.validate_config() if not validation.valid: errors = ', '.join(validation.errors or []) raise ConfigValidationError( @@ -185,7 +180,7 @@ class ConfigService: Returns: ValidationResult: Validation result with errors if any """ - return config.validate() + return config.validate_config() def create_backup(self, name: Optional[str] = None) -> Path: """Create backup of current configuration. @@ -328,26 +323,6 @@ class ConfigService: except (OSError, IOError): # Ignore errors during cleanup continue - - def _migrate_config( - self, data: Dict, from_version: str # noqa: ARG002 - ) -> Dict: - """Migrate configuration from old version to current. - - Args: - data: Configuration data to migrate - from_version: Version to migrate from (reserved for future use) - - Returns: - Dict: Migrated configuration data - """ - # Currently only one version exists - # Future migrations would go here - # Example: - # if from_version == "1.0.0" and self.CONFIG_VERSION == "2.0.0": - # data = self._migrate_1_0_to_2_0(data) - - return data # Singleton instance diff --git a/src/server/services/download_service.py b/src/server/services/download_service.py index d822e9c..ddd7b48 100644 --- a/src/server/services/download_service.py +++ b/src/server/services/download_service.py @@ -2,18 +2,19 @@ This module provides a simplified queue management system for handling anime episode downloads with manual start/stop controls, progress tracking, -persistence, and retry functionality. +database persistence, and retry functionality. + +The service uses SQLite database for persistent storage via QueueRepository +while maintaining an in-memory cache for performance. """ from __future__ import annotations import asyncio -import json import uuid from collections import deque from concurrent.futures import ThreadPoolExecutor from datetime import datetime, timezone -from pathlib import Path -from typing import Dict, List, Optional +from typing import TYPE_CHECKING, Dict, List, Optional import structlog @@ -28,6 +29,9 @@ from src.server.models.download import ( from src.server.services.anime_service import AnimeService, AnimeServiceError from src.server.services.progress_service import ProgressService, get_progress_service +if TYPE_CHECKING: + from src.server.services.queue_repository import QueueRepository + logger = structlog.get_logger(__name__) @@ -42,7 +46,7 @@ class DownloadService: - Manual download start/stop - FIFO queue processing - Real-time progress tracking - - Queue persistence and recovery + - Database persistence via QueueRepository - Automatic retry logic - WebSocket broadcast support """ @@ -50,24 +54,28 @@ class DownloadService: def __init__( self, anime_service: AnimeService, + queue_repository: Optional["QueueRepository"] = None, max_retries: int = 3, - persistence_path: str = "./data/download_queue.json", progress_service: Optional[ProgressService] = None, ): """Initialize the download service. Args: anime_service: Service for anime operations + queue_repository: Optional repository for database persistence. + If not provided, will use default singleton. max_retries: Maximum retry attempts for failed downloads - persistence_path: Path to persist queue state progress_service: Optional progress service for tracking """ self._anime_service = anime_service self._max_retries = max_retries - self._persistence_path = Path(persistence_path) self._progress_service = progress_service or get_progress_service() + + # Database repository for persistence + self._queue_repository = queue_repository + self._db_initialized = False - # Queue storage by status + # In-memory cache for performance (synced with database) self._pending_queue: deque[DownloadItem] = deque() # Helper dict for O(1) lookup of pending items by ID self._pending_items_by_id: Dict[str, DownloadItem] = {} @@ -92,14 +100,159 @@ class DownloadService: # Track if queue progress has been initialized self._queue_progress_initialized: bool = False - # Load persisted queue - self._load_queue() - logger.info( "DownloadService initialized", max_retries=max_retries, ) + def _get_repository(self) -> "QueueRepository": + """Get the queue repository, initializing if needed. + + Returns: + QueueRepository instance + """ + if self._queue_repository is None: + from src.server.services.queue_repository import get_queue_repository + self._queue_repository = get_queue_repository() + return self._queue_repository + + async def initialize(self) -> None: + """Initialize the service by loading queue state from database. + + Should be called after database is initialized during app startup. + Note: With the simplified model, status/priority/progress are now + managed in-memory only. The database stores the queue items + for persistence across restarts. + """ + if self._db_initialized: + return + + try: + repository = self._get_repository() + + # Load all items from database - they all start as PENDING + # since status is now managed in-memory only + all_items = await repository.get_all_items() + for item in all_items: + # All items from database are treated as pending + item.status = DownloadStatus.PENDING + self._add_to_pending_queue(item) + + self._db_initialized = True + + logger.info( + "Queue restored from database: pending_count=%d", + len(self._pending_queue), + ) + except Exception as e: + logger.error("Failed to load queue from database: %s", e, exc_info=True) + # Continue without persistence - queue will work in memory only + self._db_initialized = True + + async def _save_to_database(self, item: DownloadItem) -> DownloadItem: + """Save or update an item in the database. + + Args: + item: Download item to save + + Returns: + Saved item with database ID + """ + try: + repository = self._get_repository() + return await repository.save_item(item) + except Exception as e: + logger.error("Failed to save item to database: %s", e) + return item + + async def _set_error_in_database( + self, + item_id: str, + error: str, + ) -> bool: + """Set error message on an item in the database. + + Args: + item_id: Download item ID + error: Error message + + Returns: + True if update succeeded + """ + try: + repository = self._get_repository() + return await repository.set_error(item_id, error) + except Exception as e: + logger.error("Failed to set error in database: %s", e) + return False + + async def _delete_from_database(self, item_id: str) -> bool: + """Delete an item from the database. + + Args: + item_id: Download item ID + + Returns: + True if delete succeeded + """ + try: + repository = self._get_repository() + return await repository.delete_item(item_id) + except Exception as e: + logger.error("Failed to delete from database: %s", e) + return False + + async def _remove_episode_from_missing_list( + self, + series_key: str, + season: int, + episode: int, + ) -> bool: + """Remove a downloaded episode from the missing episodes list. + + Called when a download completes successfully to update the + database so the episode no longer appears as missing. + + Args: + series_key: Unique provider key for the series + season: Season number + episode: Episode number within season + + Returns: + True if episode was removed, False otherwise + """ + try: + from src.server.database.connection import get_db_session + from src.server.database.service import EpisodeService + + async with get_db_session() as db: + deleted = await EpisodeService.delete_by_series_and_episode( + db=db, + series_key=series_key, + season=season, + episode_number=episode, + ) + if deleted: + logger.info( + "Removed episode from missing list: " + "%s S%02dE%02d", + series_key, + season, + episode, + ) + # Clear the anime service cache so list_missing + # returns updated data + try: + self._anime_service._cached_list_missing.cache_clear() + except Exception: + pass + return deleted + except Exception as e: + logger.error( + "Failed to remove episode from missing list: %s", e + ) + return False + async def _init_queue_progress(self) -> None: """Initialize the download queue progress tracking. @@ -119,7 +272,7 @@ class DownloadService: ) self._queue_progress_initialized = True except Exception as e: - logger.error("Failed to initialize queue progress", error=str(e)) + logger.error("Failed to initialize queue progress: %s", e) def _add_to_pending_queue( self, item: DownloadItem, front: bool = False @@ -165,69 +318,6 @@ class DownloadService: """Generate unique identifier for download items.""" return str(uuid.uuid4()) - def _load_queue(self) -> None: - """Load persisted queue from disk.""" - try: - if self._persistence_path.exists(): - with open(self._persistence_path, "r", encoding="utf-8") as f: - data = json.load(f) - - # Restore pending items - for item_dict in data.get("pending", []): - item = DownloadItem(**item_dict) - # Reset status if was downloading when saved - if item.status == DownloadStatus.DOWNLOADING: - item.status = DownloadStatus.PENDING - self._add_to_pending_queue(item) - - # Restore failed items that can be retried - for item_dict in data.get("failed", []): - item = DownloadItem(**item_dict) - if item.retry_count < self._max_retries: - item.status = DownloadStatus.PENDING - self._add_to_pending_queue(item) - else: - self._failed_items.append(item) - - logger.info( - "Queue restored from disk", - pending_count=len(self._pending_queue), - failed_count=len(self._failed_items), - ) - except Exception as e: - logger.error("Failed to load persisted queue", error=str(e)) - - def _save_queue(self) -> None: - """Persist current queue state to disk.""" - try: - self._persistence_path.parent.mkdir(parents=True, exist_ok=True) - - active_items = ( - [self._active_download] if self._active_download else [] - ) - - data = { - "pending": [ - item.model_dump(mode="json") - for item in self._pending_queue - ], - "active": [ - item.model_dump(mode="json") for item in active_items - ], - "failed": [ - item.model_dump(mode="json") - for item in self._failed_items - ], - "timestamp": datetime.now(timezone.utc).isoformat(), - } - - with open(self._persistence_path, "w", encoding="utf-8") as f: - json.dump(data, f, indent=2) - - logger.debug("Queue persisted to disk") - except Exception as e: - logger.error("Failed to persist queue", error=str(e)) - async def add_to_queue( self, serie_id: str, @@ -274,22 +364,23 @@ class DownloadService: added_at=datetime.now(timezone.utc), ) - # Always append to end (FIFO order) - self._add_to_pending_queue(item, front=False) + # Save to database first to get persistent ID + saved_item = await self._save_to_database(item) - created_ids.append(item.id) + # Add to in-memory cache + self._add_to_pending_queue(saved_item, front=False) + + created_ids.append(saved_item.id) logger.info( "Item added to queue", - item_id=item.id, + item_id=saved_item.id, serie_key=serie_id, serie_name=serie_name, season=episode.season, episode=episode.episode, ) - self._save_queue() - # Notify via progress service queue_status = await self.get_queue_status() await self._progress_service.update_progress( @@ -306,7 +397,7 @@ class DownloadService: return created_ids except Exception as e: - logger.error("Failed to add items to queue", error=str(e)) + logger.error("Failed to add items to queue: %s", e) raise DownloadServiceError(f"Failed to add items: {str(e)}") from e async def remove_from_queue(self, item_ids: List[str]) -> List[str]: @@ -333,8 +424,10 @@ class DownloadService: item.completed_at = datetime.now(timezone.utc) self._failed_items.append(item) self._active_download = None + # Delete cancelled item from database + await self._delete_from_database(item_id) removed_ids.append(item_id) - logger.info("Cancelled active download", item_id=item_id) + logger.info("Cancelled active download: item_id=%s", item_id) continue # Check pending queue - O(1) lookup using helper dict @@ -342,13 +435,14 @@ class DownloadService: item = self._pending_items_by_id[item_id] self._pending_queue.remove(item) del self._pending_items_by_id[item_id] + # Delete from database + await self._delete_from_database(item_id) removed_ids.append(item_id) logger.info( "Removed from pending queue", item_id=item_id ) if removed_ids: - self._save_queue() # Notify via progress service queue_status = await self.get_queue_status() await self._progress_service.update_progress( @@ -365,7 +459,7 @@ class DownloadService: return removed_ids except Exception as e: - logger.error("Failed to remove items", error=str(e)) + logger.error("Failed to remove items: %s", e) raise DownloadServiceError( f"Failed to remove items: {str(e)}" ) from e @@ -379,6 +473,10 @@ class DownloadService: Raises: DownloadServiceError: If reordering fails + + Note: + Reordering is done in-memory only. Database priority is not + updated since the in-memory queue defines the actual order. """ try: # Build new queue based on specified order @@ -399,9 +497,6 @@ class DownloadService: # Replace queue self._pending_queue = new_queue - # Save updated queue - self._save_queue() - # Notify via progress service queue_status = await self.get_queue_status() await self._progress_service.update_progress( @@ -418,7 +513,7 @@ class DownloadService: logger.info("Queue reordered", reordered_count=len(item_ids)) except Exception as e: - logger.error("Failed to reorder queue", error=str(e)) + logger.error("Failed to reorder queue: %s", e) raise DownloadServiceError( f"Failed to reorder queue: {str(e)}" ) from e @@ -462,7 +557,7 @@ class DownloadService: return "queue_started" except Exception as e: - logger.error("Failed to start queue processing", error=str(e)) + logger.error("Failed to start queue processing: %s", e) raise DownloadServiceError( f"Failed to start queue processing: {str(e)}" ) from e @@ -692,13 +787,15 @@ class DownloadService: Number of items cleared """ count = len(self._pending_queue) + + # Delete all pending items from database + for item_id in list(self._pending_items_by_id.keys()): + await self._delete_from_database(item_id) + self._pending_queue.clear() self._pending_items_by_id.clear() logger.info("Cleared pending items", count=count) - # Save queue state - self._save_queue() - # Notify via progress service if count > 0: queue_status = await self.get_queue_status() @@ -749,14 +846,15 @@ class DownloadService: self._add_to_pending_queue(item) retried_ids.append(item.id) + # Status is now managed in-memory only + logger.info( - "Retrying failed item", - item_id=item.id, - retry_count=item.retry_count + "Retrying failed item: item_id=%s, retry_count=%d", + item.id, + item.retry_count, ) if retried_ids: - self._save_queue() # Notify via progress service queue_status = await self.get_queue_status() await self._progress_service.update_progress( @@ -773,7 +871,7 @@ class DownloadService: return retried_ids except Exception as e: - logger.error("Failed to retry items", error=str(e)) + logger.error("Failed to retry items: %s", e) raise DownloadServiceError( f"Failed to retry: {str(e)}" ) from e @@ -790,18 +888,17 @@ class DownloadService: logger.info("Skipping download due to shutdown") return - # Update status + # Update status in memory (status is now in-memory only) item.status = DownloadStatus.DOWNLOADING item.started_at = datetime.now(timezone.utc) self._active_download = item logger.info( - "Starting download", - item_id=item.id, - serie_key=item.serie_id, - serie_name=item.serie_name, - season=item.episode.season, - episode=item.episode.episode, + "Starting download: item_id=%s, serie_key=%s, S%02dE%02d", + item.id, + item.serie_id, + item.episode.season, + item.episode.episode, ) # Execute download via anime service @@ -809,7 +906,8 @@ class DownloadService: # - download started/progress/completed/failed events # - All updates forwarded to ProgressService # - ProgressService broadcasts to WebSocket clients - # Use serie_folder for filesystem operations and serie_id (key) for identification + # Use serie_folder for filesystem operations + # and serie_id (key) for identification if not item.serie_folder: raise DownloadServiceError( f"Missing serie_folder for download item {item.id}. " @@ -835,8 +933,18 @@ class DownloadService: self._completed_items.append(item) + # Delete completed item from database (status is in-memory) + await self._delete_from_database(item.id) + + # Remove episode from missing episodes list in database + await self._remove_episode_from_missing_list( + series_key=item.serie_id, + season=item.episode.season, + episode=item.episode.episode, + ) + logger.info( - "Download completed successfully", item_id=item.id + "Download completed successfully: item_id=%s", item.id ) else: raise AnimeServiceError("Download returned False") @@ -844,15 +952,36 @@ class DownloadService: except asyncio.CancelledError: # Handle task cancellation during shutdown logger.info( - "Download cancelled during shutdown", - item_id=item.id, + "Download task cancelled: item_id=%s", + item.id, ) item.status = DownloadStatus.CANCELLED item.completed_at = datetime.now(timezone.utc) + # Delete cancelled item from database + await self._delete_from_database(item.id) # Return item to pending queue if not shutting down if not self._is_shutting_down: self._add_to_pending_queue(item, front=True) + # Re-save to database as pending + await self._save_to_database(item) raise # Re-raise to properly cancel the task + + except InterruptedError: + # Handle download cancellation from provider + logger.info( + "Download interrupted/cancelled: item_id=%s", + item.id, + ) + item.status = DownloadStatus.CANCELLED + item.completed_at = datetime.now(timezone.utc) + # Delete cancelled item from database + await self._delete_from_database(item.id) + # Return item to pending queue if not shutting down + if not self._is_shutting_down: + self._add_to_pending_queue(item, front=True) + # Re-save to database as pending + await self._save_to_database(item) + # Don't re-raise - this is handled gracefully except Exception as e: # Handle failure @@ -861,11 +990,14 @@ class DownloadService: item.error = str(e) self._failed_items.append(item) + # Set error in database + await self._set_error_in_database(item.id, str(e)) + logger.error( - "Download failed", - item_id=item.id, - error=str(e), - retry_count=item.retry_count, + "Download failed: item_id=%s, error=%s, retry_count=%d", + item.id, + str(e), + item.retry_count, ) # Note: Failure is already broadcast by AnimeService # via ProgressService when SeriesApp fires failed event @@ -874,44 +1006,8 @@ class DownloadService: # Remove from active downloads if self._active_download and self._active_download.id == item.id: self._active_download = None - - self._save_queue() - async def start(self) -> None: - """Initialize the download queue service (compatibility method). - - Note: Downloads are started manually via start_next_download(). - """ - logger.info("Download queue service initialized") - async def stop(self) -> None: - """Stop the download queue service and cancel active downloads. - - Cancels any active download and shuts down the thread pool immediately. - """ - logger.info("Stopping download queue service...") - - # Set shutdown flag - self._is_shutting_down = True - self._is_stopped = True - - # Cancel active download task if running - if self._active_download_task and not self._active_download_task.done(): - logger.info("Cancelling active download task...") - self._active_download_task.cancel() - try: - await self._active_download_task - except asyncio.CancelledError: - logger.info("Active download task cancelled") - - # Save final state - self._save_queue() - - # Shutdown executor immediately, don't wait for tasks - logger.info("Shutting down thread pool executor...") - self._executor.shutdown(wait=False, cancel_futures=True) - - logger.info("Download queue service stopped") # Singleton instance diff --git a/src/server/services/progress_service.py b/src/server/services/progress_service.py index 9b5dc82..06d1beb 100644 --- a/src/server/services/progress_service.py +++ b/src/server/services/progress_service.py @@ -133,6 +133,30 @@ class ProgressServiceError(Exception): """Service-level exception for progress operations.""" +# Mapping from ProgressType to WebSocket room names +# This ensures compatibility with the valid rooms defined in the WebSocket API: +# "downloads", "queue", "scan", "system", "errors" +_PROGRESS_TYPE_TO_ROOM: Dict[ProgressType, str] = { + ProgressType.DOWNLOAD: "downloads", + ProgressType.SCAN: "scan", + ProgressType.QUEUE: "queue", + ProgressType.SYSTEM: "system", + ProgressType.ERROR: "errors", +} + + +def _get_room_for_progress_type(progress_type: ProgressType) -> str: + """Get the WebSocket room name for a progress type. + + Args: + progress_type: The type of progress update + + Returns: + The WebSocket room name to broadcast to + """ + return _PROGRESS_TYPE_TO_ROOM.get(progress_type, "system") + + class ProgressService: """Manages real-time progress updates and broadcasting. @@ -293,7 +317,7 @@ class ProgressService: ) # Emit event to subscribers - room = f"{progress_type.value}_progress" + room = _get_room_for_progress_type(progress_type) event = ProgressEvent( event_type=f"{progress_type.value}_progress", progress_id=progress_id, @@ -370,7 +394,7 @@ class ProgressService: should_broadcast = force_broadcast or percent_change >= 1.0 if should_broadcast: - room = f"{update.type.value}_progress" + room = _get_room_for_progress_type(update.type) event = ProgressEvent( event_type=f"{update.type.value}_progress", progress_id=progress_id, @@ -427,7 +451,7 @@ class ProgressService: ) # Emit completion event - room = f"{update.type.value}_progress" + room = _get_room_for_progress_type(update.type) event = ProgressEvent( event_type=f"{update.type.value}_progress", progress_id=progress_id, @@ -483,7 +507,7 @@ class ProgressService: ) # Emit failure event - room = f"{update.type.value}_progress" + room = _get_room_for_progress_type(update.type) event = ProgressEvent( event_type=f"{update.type.value}_progress", progress_id=progress_id, @@ -533,7 +557,7 @@ class ProgressService: ) # Emit cancellation event - room = f"{update.type.value}_progress" + room = _get_room_for_progress_type(update.type) event = ProgressEvent( event_type=f"{update.type.value}_progress", progress_id=progress_id, diff --git a/src/server/services/queue_repository.py b/src/server/services/queue_repository.py new file mode 100644 index 0000000..d017ec6 --- /dev/null +++ b/src/server/services/queue_repository.py @@ -0,0 +1,471 @@ +"""Queue repository adapter for database-backed download queue operations. + +This module provides a repository adapter that wraps the DownloadQueueService +and provides the interface needed by DownloadService for queue persistence. + +The repository pattern abstracts the database operations from the business +logic, allowing the DownloadService to work with domain models (DownloadItem) +while the repository handles conversion to/from database models. + +Transaction Support: + Compound operations (save_item, clear_all) are wrapped in atomic() + context managers to ensure all-or-nothing behavior. If any part of + a compound operation fails, all changes are rolled back. +""" +from __future__ import annotations + +import logging +from datetime import datetime, timezone +from typing import Callable, List, Optional + +from sqlalchemy.ext.asyncio import AsyncSession + +from src.server.database.models import DownloadQueueItem as DBDownloadQueueItem +from src.server.database.service import ( + AnimeSeriesService, + DownloadQueueService, + EpisodeService, +) +from src.server.database.transaction import atomic +from src.server.models.download import ( + DownloadItem, + DownloadPriority, + DownloadStatus, + EpisodeIdentifier, +) + +logger = logging.getLogger(__name__) + + +class QueueRepositoryError(Exception): + """Repository-level exception for queue operations.""" + + +class QueueRepository: + """Repository adapter for database-backed download queue operations. + + Provides clean interface for queue operations while handling + model conversion between Pydantic (DownloadItem) and SQLAlchemy + (DownloadQueueItem) models. + + Note: The database model (DownloadQueueItem) is simplified and only + stores episode_id as a foreign key. Status, priority, progress, and + retry_count are managed in-memory by the DownloadService. + + Transaction Support: + All compound operations are wrapped in atomic() transactions. + This ensures data consistency even if operations fail mid-way. + + Attributes: + _db_session_factory: Factory function to create database sessions + """ + + def __init__( + self, + db_session_factory: Callable[[], AsyncSession], + ) -> None: + """Initialize the queue repository. + + Args: + db_session_factory: Factory function that returns AsyncSession + """ + self._db_session_factory = db_session_factory + logger.info("QueueRepository initialized") + + # ========================================================================= + # Model Conversion Methods + # ========================================================================= + + def _from_db_model( + self, + db_item: DBDownloadQueueItem, + item_id: Optional[str] = None, + ) -> DownloadItem: + """Convert database model to DownloadItem. + + Note: Since the database model is simplified, status, priority, + progress, and retry_count default to initial values. + + Args: + db_item: SQLAlchemy download queue item + item_id: Optional override for item ID + + Returns: + Pydantic download item with default status/priority + """ + # Get episode info from the related Episode object + episode = db_item.episode + series = db_item.series + + episode_identifier = EpisodeIdentifier( + season=episode.season if episode else 1, + episode=episode.episode_number if episode else 1, + title=episode.title if episode else None, + ) + + return DownloadItem( + id=item_id or str(db_item.id), + serie_id=series.key if series else "", + serie_folder=series.folder if series else "", + serie_name=series.name if series else "", + episode=episode_identifier, + status=DownloadStatus.PENDING, # Default - managed in-memory + priority=DownloadPriority.NORMAL, # Default - managed in-memory + added_at=db_item.created_at or datetime.now(timezone.utc), + started_at=db_item.started_at, + completed_at=db_item.completed_at, + progress=None, # Managed in-memory + error=db_item.error_message, + retry_count=0, # Managed in-memory + source_url=db_item.download_url, + ) + + # ========================================================================= + # CRUD Operations + # ========================================================================= + + async def save_item( + self, + item: DownloadItem, + db: Optional[AsyncSession] = None, + ) -> DownloadItem: + """Save a download item to the database atomically. + + Creates a new record if the item doesn't exist in the database. + This compound operation (series lookup/create, episode lookup/create, + queue item create) is wrapped in a transaction for atomicity. + + Note: Status, priority, progress, and retry_count are NOT persisted. + + Args: + item: Download item to save + db: Optional existing database session + + Returns: + Saved download item with database ID + + Raises: + QueueRepositoryError: If save operation fails + """ + session = db or self._db_session_factory() + manage_session = db is None + + try: + async with atomic(session): + # Find series by key + series = await AnimeSeriesService.get_by_key(session, item.serie_id) + + if not series: + # Create series if it doesn't exist + # Use a placeholder site URL - will be updated later when actual URL is known + site_url = getattr(item, 'serie_site', None) or f"https://aniworld.to/anime/{item.serie_id}" + series = await AnimeSeriesService.create( + db=session, + key=item.serie_id, + name=item.serie_name, + site=site_url, + folder=item.serie_folder, + ) + logger.info( + "Created new series for queue item: key=%s, name=%s", + item.serie_id, + item.serie_name, + ) + + # Find or create episode + episode = await EpisodeService.get_by_episode( + session, + series.id, + item.episode.season, + item.episode.episode, + ) + + if not episode: + # Create episode if it doesn't exist + episode = await EpisodeService.create( + db=session, + series_id=series.id, + season=item.episode.season, + episode_number=item.episode.episode, + title=item.episode.title, + ) + logger.info( + "Created new episode for queue item: S%02dE%02d", + item.episode.season, + item.episode.episode, + ) + + # Create queue item + db_item = await DownloadQueueService.create( + db=session, + series_id=series.id, + episode_id=episode.id, + download_url=str(item.source_url) if item.source_url else None, + ) + + # Update the item ID with the database ID + item.id = str(db_item.id) + + # Transaction committed by atomic() context manager + + logger.debug( + "Saved queue item to database: item_id=%s, serie_key=%s", + item.id, + item.serie_id, + ) + + return item + + except Exception as e: + # Rollback handled by atomic() context manager + logger.error("Failed to save queue item: %s", e) + raise QueueRepositoryError(f"Failed to save item: {e}") from e + finally: + if manage_session: + await session.close() + + async def get_item( + self, + item_id: str, + db: Optional[AsyncSession] = None, + ) -> Optional[DownloadItem]: + """Get a download item by ID. + + Args: + item_id: Download item ID (database ID as string) + db: Optional existing database session + + Returns: + Download item or None if not found + + Raises: + QueueRepositoryError: If query fails + """ + session = db or self._db_session_factory() + manage_session = db is None + + try: + db_item = await DownloadQueueService.get_by_id( + session, int(item_id) + ) + + if not db_item: + return None + + return self._from_db_model(db_item, item_id) + + except ValueError: + # Invalid ID format + return None + except Exception as e: + logger.error("Failed to get queue item: %s", e) + raise QueueRepositoryError(f"Failed to get item: {e}") from e + finally: + if manage_session: + await session.close() + + async def get_all_items( + self, + db: Optional[AsyncSession] = None, + ) -> List[DownloadItem]: + """Get all download items regardless of status. + + Note: All items are returned with default status (PENDING) since + status is now managed in-memory by the DownloadService. + + Args: + db: Optional existing database session + + Returns: + List of all download items + + Raises: + QueueRepositoryError: If query fails + """ + session = db or self._db_session_factory() + manage_session = db is None + + try: + db_items = await DownloadQueueService.get_all( + session, with_series=True + ) + return [self._from_db_model(item) for item in db_items] + + except Exception as e: + logger.error("Failed to get all items: %s", e) + raise QueueRepositoryError(f"Failed to get all items: {e}") from e + finally: + if manage_session: + await session.close() + + async def set_error( + self, + item_id: str, + error: str, + db: Optional[AsyncSession] = None, + ) -> bool: + """Set error message on a download item. + + Args: + item_id: Download item ID + error: Error message + db: Optional existing database session + + Returns: + True if update succeeded, False if item not found + + Raises: + QueueRepositoryError: If update fails + """ + session = db or self._db_session_factory() + manage_session = db is None + + try: + result = await DownloadQueueService.set_error( + session, + int(item_id), + error, + ) + + if manage_session: + await session.commit() + + success = result is not None + + if success: + logger.debug( + "Set error on queue item: item_id=%s", + item_id, + ) + + return success + + except ValueError: + return False + except Exception as e: + if manage_session: + await session.rollback() + logger.error("Failed to set error: %s", e) + raise QueueRepositoryError(f"Failed to set error: {e}") from e + finally: + if manage_session: + await session.close() + + async def delete_item( + self, + item_id: str, + db: Optional[AsyncSession] = None, + ) -> bool: + """Delete a download item from the database. + + Args: + item_id: Download item ID + db: Optional existing database session + + Returns: + True if item was deleted, False if not found + + Raises: + QueueRepositoryError: If delete fails + """ + session = db or self._db_session_factory() + manage_session = db is None + + try: + result = await DownloadQueueService.delete(session, int(item_id)) + + if manage_session: + await session.commit() + + if result: + logger.debug("Deleted queue item: item_id=%s", item_id) + + return result + + except ValueError: + return False + except Exception as e: + if manage_session: + await session.rollback() + logger.error("Failed to delete item: %s", e) + raise QueueRepositoryError(f"Failed to delete item: {e}") from e + finally: + if manage_session: + await session.close() + + async def clear_all( + self, + db: Optional[AsyncSession] = None, + ) -> int: + """Clear all download items from the queue atomically. + + This bulk delete operation is wrapped in a transaction. + Either all items are deleted or none are. + + Args: + db: Optional existing database session + + Returns: + Number of items cleared + + Raises: + QueueRepositoryError: If operation fails + """ + session = db or self._db_session_factory() + manage_session = db is None + + try: + async with atomic(session): + # Use the bulk clear operation for efficiency and atomicity + count = await DownloadQueueService.clear_all(session) + + # Transaction committed by atomic() context manager + + logger.info("Cleared all items from queue: count=%d", count) + return count + + except Exception as e: + # Rollback handled by atomic() context manager + logger.error("Failed to clear queue: %s", e) + raise QueueRepositoryError(f"Failed to clear queue: {e}") from e + finally: + if manage_session: + await session.close() + + +# Singleton instance +_queue_repository_instance: Optional[QueueRepository] = None + + +def get_queue_repository( + db_session_factory: Optional[Callable[[], AsyncSession]] = None, +) -> QueueRepository: + """Get or create the QueueRepository singleton. + + Args: + db_session_factory: Optional factory function for database sessions. + If not provided, uses default from connection module. + + Returns: + QueueRepository singleton instance + """ + global _queue_repository_instance + + if _queue_repository_instance is None: + if db_session_factory is None: + # Use default session factory + from src.server.database.connection import get_async_session_factory + db_session_factory = get_async_session_factory + + _queue_repository_instance = QueueRepository(db_session_factory) + + return _queue_repository_instance + + +def reset_queue_repository() -> None: + """Reset the QueueRepository singleton. + + Used for testing to ensure fresh state between tests. + """ + global _queue_repository_instance + _queue_repository_instance = None diff --git a/src/server/services/scan_service.py b/src/server/services/scan_service.py index bcedba1..f68eab2 100644 --- a/src/server/services/scan_service.py +++ b/src/server/services/scan_service.py @@ -13,20 +13,8 @@ from typing import Any, Callable, Dict, List, Optional import structlog -from src.core.interfaces.callbacks import ( - CallbackManager, - CompletionCallback, - CompletionContext, - ErrorCallback, - ErrorContext, - OperationType, - ProgressCallback, - ProgressContext, - ProgressPhase, -) from src.server.services.progress_service import ( ProgressService, - ProgressStatus, ProgressType, get_progress_service, ) @@ -104,173 +92,6 @@ class ScanProgress: return result -class ScanServiceProgressCallback(ProgressCallback): - """Callback implementation for forwarding scan progress to ScanService. - - This callback receives progress events from SerieScanner and forwards - them to the ScanService for processing and broadcasting. - """ - - def __init__( - self, - service: "ScanService", - scan_progress: ScanProgress, - ): - """Initialize the callback. - - Args: - service: Parent ScanService instance - scan_progress: ScanProgress to update - """ - self._service = service - self._scan_progress = scan_progress - - def on_progress(self, context: ProgressContext) -> None: - """Handle progress update from SerieScanner. - - Args: - context: Progress context with key and folder information - """ - self._scan_progress.current = context.current - self._scan_progress.total = context.total - self._scan_progress.percentage = context.percentage - self._scan_progress.message = context.message - self._scan_progress.key = context.key - self._scan_progress.folder = context.folder - self._scan_progress.updated_at = datetime.now(timezone.utc) - - if context.phase == ProgressPhase.STARTING: - self._scan_progress.status = "started" - elif context.phase == ProgressPhase.IN_PROGRESS: - self._scan_progress.status = "in_progress" - elif context.phase == ProgressPhase.COMPLETED: - self._scan_progress.status = "completed" - elif context.phase == ProgressPhase.FAILED: - self._scan_progress.status = "failed" - - # Forward to service for broadcasting - # Use run_coroutine_threadsafe if event loop is available - try: - loop = asyncio.get_running_loop() - asyncio.run_coroutine_threadsafe( - self._service._handle_progress_update(self._scan_progress), - loop - ) - except RuntimeError: - # No running event loop - likely in test or sync context - pass - - -class ScanServiceErrorCallback(ErrorCallback): - """Callback implementation for handling scan errors. - - This callback receives error events from SerieScanner and forwards - them to the ScanService for processing and broadcasting. - """ - - def __init__( - self, - service: "ScanService", - scan_progress: ScanProgress, - ): - """Initialize the callback. - - Args: - service: Parent ScanService instance - scan_progress: ScanProgress to update - """ - self._service = service - self._scan_progress = scan_progress - - def on_error(self, context: ErrorContext) -> None: - """Handle error from SerieScanner. - - Args: - context: Error context with key and folder information - """ - error_msg = context.message - if context.folder: - error_msg = f"[{context.folder}] {error_msg}" - - self._scan_progress.errors.append(error_msg) - self._scan_progress.updated_at = datetime.now(timezone.utc) - - logger.warning( - "Scan error", - key=context.key, - folder=context.folder, - error=str(context.error), - recoverable=context.recoverable, - ) - - # Forward to service for broadcasting - # Use run_coroutine_threadsafe if event loop is available - try: - loop = asyncio.get_running_loop() - asyncio.run_coroutine_threadsafe( - self._service._handle_scan_error( - self._scan_progress, - context, - ), - loop - ) - except RuntimeError: - # No running event loop - likely in test or sync context - pass - - -class ScanServiceCompletionCallback(CompletionCallback): - """Callback implementation for handling scan completion. - - This callback receives completion events from SerieScanner and forwards - them to the ScanService for processing and broadcasting. - """ - - def __init__( - self, - service: "ScanService", - scan_progress: ScanProgress, - ): - """Initialize the callback. - - Args: - service: Parent ScanService instance - scan_progress: ScanProgress to update - """ - self._service = service - self._scan_progress = scan_progress - - def on_completion(self, context: CompletionContext) -> None: - """Handle completion from SerieScanner. - - Args: - context: Completion context with statistics - """ - self._scan_progress.status = "completed" if context.success else "failed" - self._scan_progress.message = context.message - self._scan_progress.updated_at = datetime.now(timezone.utc) - - if context.statistics: - self._scan_progress.series_found = context.statistics.get( - "series_found", 0 - ) - - # Forward to service for broadcasting - # Use run_coroutine_threadsafe if event loop is available - try: - loop = asyncio.get_running_loop() - asyncio.run_coroutine_threadsafe( - self._service._handle_scan_completion( - self._scan_progress, - context, - ), - loop - ) - except RuntimeError: - # No running event loop - likely in test or sync context - pass - - class ScanService: """Manages anime library scan operations. @@ -376,13 +197,13 @@ class ScanService: async def start_scan( self, - scanner_factory: Callable[..., Any], + scanner: Any, # SerieScanner instance ) -> str: """Start a new library scan. Args: - scanner_factory: Factory function that creates a SerieScanner. - The factory should accept a callback_manager parameter. + scanner: SerieScanner instance to use for scanning. + The service will subscribe to its events. Returns: Scan ID for tracking @@ -415,7 +236,7 @@ class ScanService: message="Initializing scan...", ) except Exception as e: - logger.error("Failed to start progress tracking", error=str(e)) + logger.error("Failed to start progress tracking: %s", e) # Emit scan started event await self._emit_scan_event({ @@ -423,42 +244,82 @@ class ScanService: "scan_id": scan_id, "message": "Library scan started", }) + + # Create event handlers for the scanner + def on_progress_handler(progress_data: Dict[str, Any]) -> None: + """Handle progress events from scanner.""" + scan_progress.current = progress_data.get('current', 0) + scan_progress.total = progress_data.get('total', 0) + scan_progress.percentage = progress_data.get('percentage', 0.0) + scan_progress.message = progress_data.get('message', '') + scan_progress.updated_at = datetime.now(timezone.utc) + + phase = progress_data.get('phase', '') + if phase == 'STARTING': + scan_progress.status = "started" + elif phase == 'IN_PROGRESS': + scan_progress.status = "in_progress" + + # Schedule the progress update on the event loop + try: + loop = asyncio.get_running_loop() + asyncio.run_coroutine_threadsafe( + self._handle_progress_update(scan_progress), + loop + ) + except RuntimeError: + pass + + def on_error_handler(error_data: Dict[str, Any]) -> None: + """Handle error events from scanner.""" + error_msg = error_data.get('message', 'Unknown error') + scan_progress.errors.append(error_msg) + scan_progress.updated_at = datetime.now(timezone.utc) + + logger.warning( + "Scan error", + error=str(error_data.get('error')), + recoverable=error_data.get('recoverable', True), + ) + + # Schedule the error handling on the event loop + try: + loop = asyncio.get_running_loop() + asyncio.run_coroutine_threadsafe( + self._handle_scan_error(scan_progress, error_data), + loop + ) + except RuntimeError: + pass + + def on_completion_handler(completion_data: Dict[str, Any]) -> None: + """Handle completion events from scanner.""" + success = completion_data.get('success', False) + scan_progress.status = "completed" if success else "failed" + scan_progress.message = completion_data.get('message', '') + scan_progress.updated_at = datetime.now(timezone.utc) + + if 'statistics' in completion_data: + stats = completion_data['statistics'] + scan_progress.series_found = stats.get('series_found', 0) + + # Schedule the completion handling on the event loop + try: + loop = asyncio.get_running_loop() + asyncio.run_coroutine_threadsafe( + self._handle_scan_completion(scan_progress, completion_data), + loop + ) + except RuntimeError: + pass + + # Subscribe to scanner events + scanner.subscribe_on_progress(on_progress_handler) + scanner.subscribe_on_error(on_error_handler) + scanner.subscribe_on_completion(on_completion_handler) return scan_id - def create_callback_manager( - self, - scan_progress: Optional[ScanProgress] = None, - ) -> CallbackManager: - """Create a callback manager for scan operations. - - Args: - scan_progress: Optional scan progress to use. If None, - uses current scan progress. - - Returns: - CallbackManager configured with scan callbacks - """ - progress = scan_progress or self._current_scan - if not progress: - progress = ScanProgress(str(uuid.uuid4())) - self._current_scan = progress - - callback_manager = CallbackManager() - - # Register callbacks - callback_manager.register_progress_callback( - ScanServiceProgressCallback(self, progress) - ) - callback_manager.register_error_callback( - ScanServiceErrorCallback(self, progress) - ) - callback_manager.register_completion_callback( - ScanServiceCompletionCallback(self, progress) - ) - - return callback_manager - async def _handle_progress_update( self, scan_progress: ScanProgress, @@ -475,11 +336,9 @@ class ScanService: current=scan_progress.current, total=scan_progress.total, message=scan_progress.message, - key=scan_progress.key, - folder=scan_progress.folder, ) except Exception as e: - logger.debug("Progress update skipped", error=str(e)) + logger.debug("Progress update skipped: %s", e) # Emit progress event with key as primary identifier await self._emit_scan_event({ @@ -490,36 +349,38 @@ class ScanService: async def _handle_scan_error( self, scan_progress: ScanProgress, - error_context: ErrorContext, + error_data: Dict[str, Any], ) -> None: """Handle a scan error. Args: scan_progress: Current scan progress - error_context: Error context with key and folder metadata + error_data: Error data dictionary with error info """ # Emit error event with key as primary identifier await self._emit_scan_event({ "type": "scan_error", "scan_id": scan_progress.scan_id, - "key": error_context.key, - "folder": error_context.folder, - "error": str(error_context.error), - "message": error_context.message, - "recoverable": error_context.recoverable, + "error": str(error_data.get('error')), + "message": error_data.get('message', 'Unknown error'), + "recoverable": error_data.get('recoverable', True), }) async def _handle_scan_completion( self, scan_progress: ScanProgress, - completion_context: CompletionContext, + completion_data: Dict[str, Any], ) -> None: """Handle scan completion. Args: scan_progress: Final scan progress - completion_context: Completion context with statistics + completion_data: Completion data dictionary with statistics """ + success = completion_data.get('success', False) + message = completion_data.get('message', '') + statistics = completion_data.get('statistics', {}) + async with self._lock: self._is_scanning = False @@ -530,33 +391,33 @@ class ScanService: # Complete progress tracking try: - if completion_context.success: + if success: await self._progress_service.complete_progress( progress_id=f"scan_{scan_progress.scan_id}", - message=completion_context.message, + message=message, ) else: await self._progress_service.fail_progress( progress_id=f"scan_{scan_progress.scan_id}", - error_message=completion_context.message, + error_message=message, ) except Exception as e: - logger.debug("Progress completion skipped", error=str(e)) + logger.debug("Progress completion skipped: %s", e) # Emit completion event await self._emit_scan_event({ - "type": "scan_completed" if completion_context.success else "scan_failed", + "type": "scan_completed" if success else "scan_failed", "scan_id": scan_progress.scan_id, - "success": completion_context.success, - "message": completion_context.message, - "statistics": completion_context.statistics, + "success": success, + "message": message, + "statistics": statistics, "data": scan_progress.to_dict(), }) logger.info( "Scan completed", scan_id=scan_progress.scan_id, - success=completion_context.success, + success=success, series_found=scan_progress.series_found, errors_count=len(scan_progress.errors), ) @@ -598,7 +459,7 @@ class ScanService: error_message="Scan cancelled by user", ) except Exception as e: - logger.debug("Progress cancellation skipped", error=str(e)) + logger.debug("Progress cancellation skipped: %s", e) logger.info("Scan cancelled") return True diff --git a/src/server/services/websocket_service.py b/src/server/services/websocket_service.py index 5a78130..39ef2f3 100644 --- a/src/server/services/websocket_service.py +++ b/src/server/services/websocket_service.py @@ -322,6 +322,85 @@ class ConnectionManager: connection_id=connection_id, ) + async def shutdown(self, timeout: float = 5.0) -> None: + """Gracefully shutdown all WebSocket connections. + + Broadcasts a shutdown notification to all clients, then closes + each connection with proper close codes. + + Args: + timeout: Maximum time (seconds) to wait for all closes to complete + """ + logger.info( + "Initiating WebSocket shutdown, connections=%d", + len(self._active_connections) + ) + + # Broadcast shutdown notification to all clients + shutdown_message = { + "type": "server_shutdown", + "timestamp": datetime.now(timezone.utc).isoformat(), + "data": { + "message": "Server is shutting down", + "reason": "graceful_shutdown", + }, + } + + try: + await self.broadcast(shutdown_message) + except Exception as e: + logger.warning("Failed to broadcast shutdown message: %s", e) + + # Close all connections gracefully + async with self._lock: + connection_ids = list(self._active_connections.keys()) + + close_tasks = [] + for connection_id in connection_ids: + websocket = self._active_connections.get(connection_id) + if websocket: + close_tasks.append( + self._close_connection_gracefully(connection_id, websocket) + ) + + if close_tasks: + # Wait for all closes with timeout + try: + await asyncio.wait_for( + asyncio.gather(*close_tasks, return_exceptions=True), + timeout=timeout + ) + except asyncio.TimeoutError: + logger.warning( + "WebSocket shutdown timed out after %.1f seconds", timeout + ) + + # Clear all data structures + async with self._lock: + self._active_connections.clear() + self._rooms.clear() + self._connection_metadata.clear() + + logger.info("WebSocket shutdown complete") + + async def _close_connection_gracefully( + self, connection_id: str, websocket: WebSocket + ) -> None: + """Close a single WebSocket connection gracefully. + + Args: + connection_id: The connection identifier + websocket: The WebSocket connection to close + """ + try: + # Code 1001 = Going Away (server shutdown) + await websocket.close(code=1001, reason="Server shutdown") + logger.debug("Closed WebSocket connection: %s", connection_id) + except Exception as e: + logger.debug( + "Error closing WebSocket %s: %s", connection_id, str(e) + ) + class WebSocketService: """High-level WebSocket service for application-wide messaging. @@ -498,6 +577,99 @@ class WebSocketService: } await self._manager.send_personal_message(message, connection_id) + async def broadcast_scan_started( + self, directory: str, total_items: int = 0 + ) -> None: + """Broadcast that a library scan has started. + + Args: + directory: The root directory path being scanned + total_items: Total number of items to scan (for progress display) + """ + message = { + "type": "scan_started", + "timestamp": datetime.now(timezone.utc).isoformat(), + "data": { + "directory": directory, + "total_items": total_items, + }, + } + await self._manager.broadcast(message) + logger.info( + "Broadcast scan_started", + directory=directory, + total_items=total_items, + ) + + async def broadcast_scan_progress( + self, + directories_scanned: int, + files_found: int, + current_directory: str, + total_items: int = 0, + ) -> None: + """Broadcast scan progress update to all clients. + + Args: + directories_scanned: Number of directories scanned so far + files_found: Number of MP4 files found so far + current_directory: Current directory being scanned + total_items: Total number of items to scan (for progress display) + """ + message = { + "type": "scan_progress", + "timestamp": datetime.now(timezone.utc).isoformat(), + "data": { + "directories_scanned": directories_scanned, + "files_found": files_found, + "current_directory": current_directory, + "total_items": total_items, + }, + } + await self._manager.broadcast(message) + + async def broadcast_scan_completed( + self, + total_directories: int, + total_files: int, + elapsed_seconds: float, + ) -> None: + """Broadcast scan completion to all clients. + + Args: + total_directories: Total number of directories scanned + total_files: Total number of MP4 files found + elapsed_seconds: Time taken for the scan in seconds + """ + message = { + "type": "scan_completed", + "timestamp": datetime.now(timezone.utc).isoformat(), + "data": { + "total_directories": total_directories, + "total_files": total_files, + "elapsed_seconds": round(elapsed_seconds, 2), + }, + } + await self._manager.broadcast(message) + logger.info( + "Broadcast scan_completed", + total_directories=total_directories, + total_files=total_files, + elapsed_seconds=round(elapsed_seconds, 2), + ) + + async def shutdown(self, timeout: float = 5.0) -> None: + """Gracefully shutdown the WebSocket service. + + Broadcasts shutdown notification and closes all connections. + + Args: + timeout: Maximum time (seconds) to wait for shutdown + """ + logger.info("Shutting down WebSocket service...") + await self._manager.shutdown(timeout=timeout) + logger.info("WebSocket service shutdown complete") + # Singleton instance for application-wide access _websocket_service: Optional[WebSocketService] = None diff --git a/src/server/utils/dependencies.py b/src/server/utils/dependencies.py index 086e774..496dbad 100644 --- a/src/server/utils/dependencies.py +++ b/src/server/utils/dependencies.py @@ -65,6 +65,10 @@ def get_series_app() -> SeriesApp: Raises: HTTPException: If SeriesApp is not initialized or anime directory is not configured + + Note: + This creates a SeriesApp without database support. For database- + backed storage, use get_series_app_with_db() instead. """ global _series_app @@ -103,7 +107,6 @@ def reset_series_app() -> None: _series_app = None - async def get_database_session() -> AsyncGenerator: """ Dependency to get database session. @@ -134,6 +137,38 @@ async def get_database_session() -> AsyncGenerator: ) +async def get_optional_database_session() -> AsyncGenerator: + """ + Dependency to get optional database session. + + Unlike get_database_session(), this returns None if the database + is not available, allowing endpoints to fall back to other storage. + + Yields: + AsyncSession or None: Database session if available, None otherwise + + Example: + @app.post("/anime/add") + async def add_anime( + db: Optional[AsyncSession] = Depends(get_optional_database_session) + ): + if db: + # Use database + await AnimeSeriesService.create(db, ...) + else: + # Fall back to file-based storage + series_app.list.add(serie) + """ + try: + from src.server.database import get_db_session + + async with get_db_session() as session: + yield session + except (ImportError, RuntimeError): + # Database not available - yield None + yield None + + def get_current_user( credentials: Optional[HTTPAuthorizationCredentials] = Depends( http_bearer_security diff --git a/src/server/utils/filesystem.py b/src/server/utils/filesystem.py new file mode 100644 index 0000000..e3efb8a --- /dev/null +++ b/src/server/utils/filesystem.py @@ -0,0 +1,180 @@ +"""Filesystem utilities for safe file and folder operations. + +This module provides utility functions for safely handling filesystem +operations, including sanitizing folder names and path validation. + +Security: + - All functions sanitize inputs to prevent path traversal attacks + - Invalid filesystem characters are removed or replaced + - Unicode characters are preserved for international titles +""" + +import os +import re +import unicodedata +from typing import Optional + +# Characters that are invalid in filesystem paths across platforms +# Windows: < > : " / \ | ? * +# Linux/Mac: / and null byte +INVALID_PATH_CHARS = '<>:"/\\|?*\x00' + +# Additional characters to remove for cleaner folder names +EXTRA_CLEANUP_CHARS = '\r\n\t' + +# Maximum folder name length (conservative for cross-platform compatibility) +MAX_FOLDER_NAME_LENGTH = 200 + + +def sanitize_folder_name( + name: str, + replacement: str = "", + max_length: Optional[int] = None, +) -> str: + """Sanitize a string for use as a filesystem folder name. + + Removes or replaces characters that are invalid for filesystems while + preserving Unicode characters (for Japanese/Chinese titles, etc.). + + Args: + name: The string to sanitize (e.g., anime display name) + replacement: Character to replace invalid chars with (default: "") + max_length: Maximum length for the result (default: MAX_FOLDER_NAME_LENGTH) + + Returns: + str: A filesystem-safe folder name + + Raises: + ValueError: If name is None, empty, or results in empty string + + Examples: + >>> sanitize_folder_name("Attack on Titan: Final Season") + 'Attack on Titan Final Season' + >>> sanitize_folder_name("What If...?") + 'What If...' + >>> sanitize_folder_name("Re:Zero") + 'ReZero' + >>> sanitize_folder_name("日本語タイトル") + '日本語タイトル' + """ + if name is None: + raise ValueError("Folder name cannot be None") + + # Strip leading/trailing whitespace + name = name.strip() + + if not name: + raise ValueError("Folder name cannot be empty") + + max_len = max_length or MAX_FOLDER_NAME_LENGTH + + # Normalize Unicode characters (NFC form for consistency) + name = unicodedata.normalize('NFC', name) + + # Remove invalid filesystem characters + for char in INVALID_PATH_CHARS: + name = name.replace(char, replacement) + + # Remove extra cleanup characters + for char in EXTRA_CLEANUP_CHARS: + name = name.replace(char, replacement) + + # Remove control characters but preserve Unicode + name = ''.join( + char for char in name + if not unicodedata.category(char).startswith('C') + or char == ' ' # Preserve spaces + ) + + # Collapse multiple consecutive spaces + name = re.sub(r' +', ' ', name) + + # Remove leading/trailing dots and whitespace + # (dots at start can make folders hidden on Unix) + name = name.strip('. ') + + # Handle edge case: all characters were invalid + if not name: + raise ValueError( + "Folder name contains only invalid characters" + ) + + # Truncate to max length while avoiding breaking in middle of word + if len(name) > max_len: + # Try to truncate at a word boundary + truncated = name[:max_len] + last_space = truncated.rfind(' ') + if last_space > max_len // 2: # Only if we don't lose too much + truncated = truncated[:last_space] + name = truncated.rstrip() + + return name + + +def is_safe_path(base_path: str, target_path: str) -> bool: + """Check if target_path is safely within base_path. + + Prevents path traversal attacks by ensuring the target path + is actually within the base path after resolution. + + Args: + base_path: The base directory that should contain the target + target_path: The path to validate + + Returns: + bool: True if target_path is safely within base_path + + Example: + >>> is_safe_path("/anime", "/anime/Attack on Titan") + True + >>> is_safe_path("/anime", "/anime/../etc/passwd") + False + """ + # Resolve to absolute paths + base_resolved = os.path.abspath(base_path) + target_resolved = os.path.abspath(target_path) + + # Check that target starts with base (with trailing separator) + base_with_sep = base_resolved + os.sep + return ( + target_resolved == base_resolved or + target_resolved.startswith(base_with_sep) + ) + + +def create_safe_folder( + base_path: str, + folder_name: str, + exist_ok: bool = True, +) -> str: + """Create a folder with a sanitized name safely within base_path. + + Args: + base_path: Base directory to create folder within + folder_name: Unsanitized folder name + exist_ok: If True, don't raise error if folder exists + + Returns: + str: Full path to the created folder + + Raises: + ValueError: If resulting path would be outside base_path + OSError: If folder creation fails + """ + # Sanitize the folder name + safe_name = sanitize_folder_name(folder_name) + + # Construct full path + full_path = os.path.join(base_path, safe_name) + + # Validate path safety + if not is_safe_path(base_path, full_path): + raise ValueError( + f"Folder name '{folder_name}' would create path outside " + f"base directory" + ) + + # Create the folder + os.makedirs(full_path, exist_ok=exist_ok) + + return full_path diff --git a/src/server/web/static/css/base/reset.css b/src/server/web/static/css/base/reset.css new file mode 100644 index 0000000..10855ba --- /dev/null +++ b/src/server/web/static/css/base/reset.css @@ -0,0 +1,33 @@ +/** + * AniWorld - CSS Reset + * + * Normalize and reset default browser styles + * for consistent cross-browser rendering. + */ + +* { + box-sizing: border-box; +} + +html { + font-size: 100%; +} + +body { + margin: 0; + padding: 0; + font-family: var(--font-family); + font-size: var(--font-size-body); + line-height: 1.5; + color: var(--color-text-primary); + background-color: var(--color-bg-primary); + transition: background-color var(--transition-duration) var(--transition-easing), + color var(--transition-duration) var(--transition-easing); +} + +/* App container */ +.app-container { + min-height: 100vh; + display: flex; + flex-direction: column; +} diff --git a/src/server/web/static/css/base/typography.css b/src/server/web/static/css/base/typography.css new file mode 100644 index 0000000..351151b --- /dev/null +++ b/src/server/web/static/css/base/typography.css @@ -0,0 +1,51 @@ +/** + * AniWorld - Typography Styles + * + * Font styles, headings, and text utilities. + */ + +h1, h2, h3, h4, h5, h6 { + margin: 0; + font-weight: 600; + color: var(--color-text-primary); +} + +h1 { + font-size: var(--font-size-large-title); +} + +h2 { + font-size: var(--font-size-title); +} + +h3 { + font-size: var(--font-size-subtitle); +} + +h4 { + font-size: var(--font-size-body); +} + +p { + margin: 0; + color: var(--color-text-secondary); +} + +a { + color: var(--color-accent); + text-decoration: none; +} + +a:hover { + text-decoration: underline; +} + +small { + font-size: var(--font-size-caption); + color: var(--color-text-tertiary); +} + +.error-message { + color: var(--color-error); + font-weight: 500; +} diff --git a/src/server/web/static/css/base/variables.css b/src/server/web/static/css/base/variables.css new file mode 100644 index 0000000..246fd5f --- /dev/null +++ b/src/server/web/static/css/base/variables.css @@ -0,0 +1,114 @@ +/** + * AniWorld - CSS Variables + * + * Fluent UI Design System custom properties for colors, typography, + * spacing, borders, shadows, and transitions. + * Includes both light and dark theme definitions. + */ + +:root { + /* Light theme colors */ + --color-bg-primary: #ffffff; + --color-bg-secondary: #faf9f8; + --color-bg-tertiary: #f3f2f1; + --color-surface: #ffffff; + --color-surface-hover: #f3f2f1; + --color-surface-pressed: #edebe9; + --color-text-primary: #323130; + --color-text-secondary: #605e5c; + --color-text-tertiary: #a19f9d; + --color-accent: #0078d4; + --color-accent-hover: #106ebe; + --color-accent-pressed: #005a9e; + --color-success: #107c10; + --color-warning: #ff8c00; + --color-error: #d13438; + --color-border: #e1dfdd; + --color-divider: #c8c6c4; + + /* Dark theme colors (stored as variables for theme switching) */ + --color-bg-primary-dark: #202020; + --color-bg-secondary-dark: #2d2d30; + --color-bg-tertiary-dark: #3e3e42; + --color-surface-dark: #292929; + --color-surface-hover-dark: #3e3e42; + --color-surface-pressed-dark: #484848; + --color-text-primary-dark: #ffffff; + --color-text-secondary-dark: #cccccc; + --color-text-tertiary-dark: #969696; + --color-accent-dark: #60cdff; + --color-accent-hover-dark: #4db8e8; + --color-accent-pressed-dark: #3aa0d1; + --color-border-dark: #484644; + --color-divider-dark: #605e5c; + + /* Typography */ + --font-family: 'Segoe UI', 'Segoe UI Web (West European)', -apple-system, BlinkMacSystemFont, Roboto, 'Helvetica Neue', sans-serif; + --font-size-caption: 12px; + --font-size-body: 14px; + --font-size-subtitle: 16px; + --font-size-title: 20px; + --font-size-large-title: 32px; + + /* Spacing */ + --spacing-xs: 4px; + --spacing-sm: 8px; + --spacing-md: 12px; + --spacing-lg: 16px; + --spacing-xl: 20px; + --spacing-xxl: 24px; + + /* Border radius */ + --border-radius-sm: 2px; + --border-radius-md: 4px; + --border-radius-lg: 6px; + --border-radius-xl: 8px; + --border-radius: var(--border-radius-md); + + /* Shadows */ + --shadow-card: 0 1.6px 3.6px 0 rgba(0, 0, 0, 0.132), 0 0.3px 0.9px 0 rgba(0, 0, 0, 0.108); + --shadow-elevated: 0 6.4px 14.4px 0 rgba(0, 0, 0, 0.132), 0 1.2px 3.6px 0 rgba(0, 0, 0, 0.108); + + /* Transitions */ + --transition-duration: 0.15s; + --transition-easing: cubic-bezier(0.1, 0.9, 0.2, 1); + --animation-duration-fast: 0.1s; + --animation-duration-normal: 0.15s; + --animation-easing-standard: cubic-bezier(0.1, 0.9, 0.2, 1); + + /* Additional color aliases */ + --color-primary: var(--color-accent); + --color-primary-light: #e6f2fb; + --color-primary-dark: #005a9e; + --color-text: var(--color-text-primary); + --color-text-disabled: #a19f9d; + --color-background: var(--color-bg-primary); + --color-background-secondary: var(--color-bg-secondary); + --color-background-tertiary: var(--color-bg-tertiary); + --color-background-subtle: var(--color-bg-secondary); +} + +/* Dark theme */ +[data-theme="dark"] { + --color-bg-primary: var(--color-bg-primary-dark); + --color-bg-secondary: var(--color-bg-secondary-dark); + --color-bg-tertiary: var(--color-bg-tertiary-dark); + --color-surface: var(--color-surface-dark); + --color-surface-hover: var(--color-surface-hover-dark); + --color-surface-pressed: var(--color-surface-pressed-dark); + --color-text-primary: var(--color-text-primary-dark); + --color-text-secondary: var(--color-text-secondary-dark); + --color-text-tertiary: var(--color-text-tertiary-dark); + --color-accent: var(--color-accent-dark); + --color-accent-hover: var(--color-accent-hover-dark); + --color-accent-pressed: var(--color-accent-pressed-dark); + --color-border: var(--color-border-dark); + --color-divider: var(--color-divider-dark); + --color-text: var(--color-text-primary-dark); + --color-text-disabled: #969696; + --color-background: var(--color-bg-primary-dark); + --color-background-secondary: var(--color-bg-secondary-dark); + --color-background-tertiary: var(--color-bg-tertiary-dark); + --color-background-subtle: var(--color-bg-tertiary-dark); + --color-primary-light: #1a3a5c; +} diff --git a/src/server/web/static/css/components/buttons.css b/src/server/web/static/css/components/buttons.css new file mode 100644 index 0000000..c0c28a0 --- /dev/null +++ b/src/server/web/static/css/components/buttons.css @@ -0,0 +1,123 @@ +/** + * AniWorld - Button Styles + * + * All button-related styles including variants, + * states, and sizes. + */ + +.btn { + display: inline-flex; + align-items: center; + gap: var(--spacing-xs); + padding: var(--spacing-sm) var(--spacing-md); + border: 1px solid transparent; + border-radius: var(--border-radius-md); + font-size: var(--font-size-body); + font-weight: 500; + text-decoration: none; + cursor: pointer; + transition: all var(--transition-duration) var(--transition-easing); + background-color: transparent; + color: var(--color-text-primary); +} + +.btn:disabled { + opacity: 0.6; + cursor: not-allowed; +} + +/* Primary button */ +.btn-primary { + background-color: var(--color-accent); + color: white; +} + +.btn-primary:hover:not(:disabled) { + background-color: var(--color-accent-hover); +} + +.btn-primary:active { + background-color: var(--color-accent-pressed); +} + +/* Secondary button */ +.btn-secondary { + background-color: var(--color-surface); + border-color: var(--color-border); + color: var(--color-text-primary); +} + +.btn-secondary:hover:not(:disabled) { + background-color: var(--color-surface-hover); +} + +/* Success button */ +.btn-success { + background-color: var(--color-success); + color: white; +} + +.btn-success:hover:not(:disabled) { + background-color: #0e6b0e; +} + +/* Warning button */ +.btn-warning { + background-color: var(--color-warning); + color: white; +} + +.btn-warning:hover:not(:disabled) { + background-color: #e67e00; +} + +/* Danger/Error button */ +.btn-danger { + background-color: var(--color-error); + color: white; +} + +.btn-danger:hover:not(:disabled) { + background-color: #b52d30; +} + +/* Icon button */ +.btn-icon { + padding: var(--spacing-sm); + min-width: auto; +} + +/* Small button */ +.btn-small { + padding: var(--spacing-xs) var(--spacing-sm); + font-size: var(--font-size-caption); +} + +/* Extra small button */ +.btn-xs { + padding: 2px 6px; + font-size: 0.75em; +} + +/* Filter button active state */ +.series-filters .btn { + transition: all 0.2s ease; +} + +.series-filters .btn[data-active="true"] { + background-color: var(--color-primary); + color: white; + border-color: var(--color-primary); + transform: scale(1.02); + box-shadow: 0 2px 8px rgba(0, 120, 212, 0.3); +} + +.series-filters .btn[data-active="true"]:hover { + background-color: var(--color-primary-dark); +} + +/* Dark theme adjustments */ +[data-theme="dark"] .series-filters .btn[data-active="true"] { + background-color: var(--color-primary); + color: white; +} diff --git a/src/server/web/static/css/components/cards.css b/src/server/web/static/css/components/cards.css new file mode 100644 index 0000000..5a6762e --- /dev/null +++ b/src/server/web/static/css/components/cards.css @@ -0,0 +1,271 @@ +/** + * AniWorld - Card Styles + * + * Card and panel component styles including + * series cards and stat cards. + */ + +/* Series Card */ +.series-card { + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + padding: var(--spacing-lg); + box-shadow: var(--shadow-card); + transition: all var(--transition-duration) var(--transition-easing); + position: relative; + display: flex; + flex-direction: column; + min-height: 120px; +} + +.series-card:hover { + box-shadow: var(--shadow-elevated); + transform: translateY(-1px); +} + +.series-card.selected { + border-color: var(--color-accent); + background-color: var(--color-surface-hover); +} + +.series-card-header { + display: flex; + justify-content: space-between; + align-items: flex-start; + margin-bottom: var(--spacing-md); + position: relative; +} + +.series-checkbox { + width: 18px; + height: 18px; + accent-color: var(--color-accent); +} + +.series-info h3 { + margin: 0 0 var(--spacing-xs) 0; + font-size: var(--font-size-subtitle); + color: var(--color-text-primary); + line-height: 1.3; +} + +.series-folder { + font-size: var(--font-size-caption); + color: var(--color-text-tertiary); + margin-bottom: var(--spacing-sm); +} + +.series-stats { + display: flex; + align-items: center; + gap: var(--spacing-md); + margin-top: auto; +} + +.series-site { + font-size: var(--font-size-caption); + color: var(--color-text-tertiary); +} + +/* Series Card Status Indicators */ +.series-status { + position: absolute; + top: var(--spacing-sm); + right: var(--spacing-sm); + display: flex; + align-items: center; +} + +.status-missing { + color: var(--color-warning); + font-size: 1.2em; +} + +.status-complete { + color: var(--color-success); + font-size: 1.2em; +} + +/* Series Card States */ +.series-card.has-missing { + border-left: 4px solid var(--color-warning); +} + +.series-card.complete { + border-left: 4px solid var(--color-success); + opacity: 0.8; +} + +.series-card.complete .series-checkbox { + opacity: 0.5; + cursor: not-allowed; +} + +.series-card.complete:not(.selected) { + background-color: var(--color-background-secondary); +} + +/* Dark theme adjustments */ +[data-theme="dark"] .series-card.complete:not(.selected) { + background-color: var(--color-background-tertiary); +} + +/* Stat Card */ +.stat-card { + background: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + padding: var(--spacing-lg); + display: flex; + align-items: center; + gap: var(--spacing-lg); + transition: all var(--transition-duration) var(--transition-easing); +} + +.stat-card:hover { + background: var(--color-surface-hover); + transform: translateY(-2px); + box-shadow: var(--shadow-elevated); +} + +.stat-icon { + font-size: 2rem; + width: 48px; + height: 48px; + display: flex; + align-items: center; + justify-content: center; + border-radius: 50%; + background: rgba(var(--color-primary-rgb), 0.1); +} + +.stat-value { + font-size: var(--font-size-title); + font-weight: 600; + color: var(--color-text-primary); + line-height: 1; +} + +.stat-label { + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + text-transform: uppercase; + letter-spacing: 0.5px; +} + +/* Download Card */ +.download-card { + background: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + padding: var(--spacing-lg); + margin-bottom: var(--spacing-md); + transition: all var(--transition-duration) var(--transition-easing); +} + +.download-card:hover { + background: var(--color-surface-hover); + transform: translateX(4px); +} + +.download-card.active { + border-left: 4px solid var(--color-primary); +} + +.download-card.completed { + border-left: 4px solid var(--color-success); + opacity: 0.8; +} + +.download-card.failed { + border-left: 4px solid var(--color-error); +} + +.download-card.pending { + border-left: 4px solid var(--color-warning); + position: relative; +} + +.download-card.pending.high-priority { + border-left-color: var(--color-accent); + background: linear-gradient(90deg, rgba(var(--color-accent-rgb), 0.05) 0%, transparent 10%); +} + +.download-header { + display: flex; + justify-content: space-between; + align-items: flex-start; +} + +.download-info h4 { + margin: 0 0 var(--spacing-xs) 0; + font-size: var(--font-size-subtitle); + color: var(--color-text-primary); +} + +.download-info p { + margin: 0 0 var(--spacing-xs) 0; + color: var(--color-text-secondary); + font-size: var(--font-size-body); +} + +.download-info small { + color: var(--color-text-tertiary); + font-size: var(--font-size-caption); +} + +.download-actions { + display: flex; + gap: var(--spacing-xs); + align-items: center; +} + +.priority-indicator { + color: var(--color-accent); + margin-right: var(--spacing-sm); +} + +/* Queue Position */ +.queue-position { + position: absolute; + top: var(--spacing-sm); + left: 48px; + background: var(--color-warning); + color: white; + width: 28px; + height: 28px; + border-radius: 50%; + display: flex; + align-items: center; + justify-content: center; + font-size: var(--font-size-caption); + font-weight: 600; +} + +.download-card.pending .download-info { + margin-left: 80px; +} + +.download-card.pending .download-header { + padding-left: 0; +} + +/* Dark Theme Adjustments for Cards */ +[data-theme="dark"] .stat-card { + background: var(--color-surface-dark); + border-color: var(--color-border-dark); +} + +[data-theme="dark"] .stat-card:hover { + background: var(--color-surface-hover-dark); +} + +[data-theme="dark"] .download-card { + background: var(--color-surface-dark); + border-color: var(--color-border-dark); +} + +[data-theme="dark"] .download-card:hover { + background: var(--color-surface-hover-dark); +} diff --git a/src/server/web/static/css/components/forms.css b/src/server/web/static/css/components/forms.css new file mode 100644 index 0000000..cf302bb --- /dev/null +++ b/src/server/web/static/css/components/forms.css @@ -0,0 +1,224 @@ +/** + * AniWorld - Form Styles + * + * Form inputs, labels, validation states, + * and form group layouts. + */ + +/* Input fields */ +.input-field { + width: 120px; + padding: var(--spacing-xs) var(--spacing-sm); + border: 1px solid var(--color-border); + border-radius: var(--border-radius); + background: var(--color-background); + color: var(--color-text-primary); + font-size: var(--font-size-body); + transition: border-color var(--animation-duration-fast) var(--animation-easing-standard); +} + +.input-field:focus { + outline: none; + border-color: var(--color-accent); +} + +/* Input groups */ +.input-group { + display: flex; + align-items: center; + gap: var(--spacing-xs); +} + +.input-group .input-field { + flex: 1; + width: auto; +} + +.input-group .btn { + flex-shrink: 0; +} + +/* Search input */ +.search-input { + flex: 1; + padding: var(--spacing-md); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-md); + font-size: var(--font-size-body); + background-color: var(--color-surface); + color: var(--color-text-primary); + transition: all var(--transition-duration) var(--transition-easing); +} + +.search-input:focus { + outline: none; + border-color: var(--color-accent); + box-shadow: 0 0 0 1px var(--color-accent); +} + +.search-input-group { + display: flex; + gap: var(--spacing-sm); + max-width: 600px; +} + +/* Checkbox custom styling */ +.checkbox-label { + display: flex; + align-items: center; + gap: var(--spacing-sm); + cursor: pointer; + user-select: none; +} + +.checkbox-label input[type="checkbox"] { + display: none; +} + +.checkbox-custom { + display: inline-block; + width: 18px; + height: 18px; + min-width: 18px; + min-height: 18px; + flex-shrink: 0; + border: 2px solid var(--color-border); + border-radius: 4px; + background: var(--color-background); + position: relative; + transition: all var(--animation-duration-fast) var(--animation-easing-standard); +} + +.checkbox-label input[type="checkbox"]:checked+.checkbox-custom { + background: var(--color-accent); + border-color: var(--color-accent); +} + +.checkbox-label input[type="checkbox"]:checked+.checkbox-custom::after { + content: ''; + position: absolute; + left: 4px; + top: 1px; + width: 6px; + height: 10px; + border: solid white; + border-width: 0 2px 2px 0; + transform: rotate(45deg); +} + +.checkbox-label:hover .checkbox-custom { + border-color: var(--color-accent); +} + +/* Form groups */ +.form-group { + display: flex; + flex-direction: column; + gap: 0.5rem; +} + +.form-label { + font-weight: 500; + color: var(--color-text); + font-size: 0.9rem; +} + +/* Config item styling */ +.config-item { + margin-bottom: var(--spacing-lg); +} + +.config-item:last-child { + margin-bottom: 0; +} + +.config-item label { + display: block; + font-weight: 500; + color: var(--color-text-primary); + margin-bottom: var(--spacing-xs); +} + +.config-value { + padding: var(--spacing-sm); + background-color: var(--color-bg-secondary); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-md); + font-family: monospace; + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + word-break: break-all; +} + +.config-value input[readonly] { + background-color: var(--color-bg-secondary); + cursor: not-allowed; +} + +[data-theme="dark"] .config-value input[readonly] { + background-color: var(--color-bg-secondary-dark); +} + +/* Config description */ +.config-description { + font-size: 0.9em; + color: var(--muted-text); + margin: 4px 0 8px 0; + line-height: 1.4; +} + +/* Config actions */ +.config-actions { + display: flex; + gap: var(--spacing-sm); + margin-top: var(--spacing-md); + flex-wrap: wrap; +} + +.config-actions .btn { + flex: 1; + min-width: 140px; +} + +/* Validation styles */ +.validation-results { + margin: 12px 0; + padding: 12px; + border-radius: 6px; + border: 1px solid var(--border-color); + background: var(--card-bg); +} + +.validation-results.hidden { + display: none; +} + +.validation-error { + color: var(--color-error); + margin: 4px 0; + font-size: 0.9em; +} + +.validation-warning { + color: var(--color-warning); + margin: 4px 0; + font-size: 0.9em; +} + +.validation-success { + color: var(--color-success); + margin: 4px 0; + font-size: 0.9em; +} + +/* Responsive form adjustments */ +@media (max-width: 768px) { + .config-actions { + flex-direction: column; + } + + .config-actions .btn { + flex: none; + width: 100%; + } +} diff --git a/src/server/web/static/css/components/modals.css b/src/server/web/static/css/components/modals.css new file mode 100644 index 0000000..0f678ad --- /dev/null +++ b/src/server/web/static/css/components/modals.css @@ -0,0 +1,264 @@ +/** + * AniWorld - Modal Styles + * + * Modal and overlay styles including + * config modal and confirmation dialogs. + */ + +.modal { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + z-index: 2000; + display: flex; + justify-content: center; + align-items: center; +} + +.modal-overlay { + position: absolute; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.5); +} + +.modal-content { + position: relative; + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + box-shadow: var(--shadow-elevated); + max-width: 500px; + width: 90%; + max-height: 80vh; + overflow: hidden; +} + +.modal-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: var(--spacing-lg); + border-bottom: 1px solid var(--color-border); +} + +.modal-header h3 { + margin: 0; + font-size: var(--font-size-subtitle); + color: var(--color-text-primary); +} + +.modal-body { + padding: var(--spacing-lg); + overflow-y: auto; +} + +/* Config Section within modals */ +.config-section { + border-top: 1px solid var(--color-divider); + margin-top: var(--spacing-lg); + padding-top: var(--spacing-lg); +} + +.config-section h4 { + margin: 0 0 var(--spacing-md) 0; + font-size: var(--font-size-subtitle); + font-weight: 600; + color: var(--color-text-primary); +} + +/* Scheduler info box */ +.scheduler-info { + background: var(--color-background-subtle); + border-radius: var(--border-radius); + padding: var(--spacing-md); + margin: var(--spacing-sm) 0; +} + +.info-row { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: var(--spacing-xs); +} + +.info-row:last-child { + margin-bottom: 0; +} + +.info-value { + font-weight: 500; + color: var(--color-text-secondary); +} + +/* Status badge */ +.status-badge { + padding: 2px 8px; + border-radius: 12px; + font-size: var(--font-size-caption); + font-weight: 600; +} + +.status-badge.running { + background: var(--color-accent); + color: white; +} + +.status-badge.stopped { + background: var(--color-text-disabled); + color: white; +} + +/* Rescan time config */ +#rescan-time-config { + margin-left: var(--spacing-lg); + opacity: 0.6; + transition: opacity var(--animation-duration-normal) var(--animation-easing-standard); +} + +#rescan-time-config.enabled { + opacity: 1; +} + +/* Loading overlay */ +.loading-overlay { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.5); + display: flex; + justify-content: center; + align-items: center; + z-index: 2000; +} + +.loading-spinner { + text-align: center; + color: white; +} + +.loading-spinner i { + font-size: 48px; + margin-bottom: var(--spacing-md); +} + +.loading-spinner p { + margin: 0; + font-size: var(--font-size-subtitle); +} + +/* Backup list */ +.backup-list { + max-height: 200px; + overflow-y: auto; + border: 1px solid var(--border-color); + border-radius: 6px; + margin: 8px 0; +} + +.backup-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: 8px 12px; + border-bottom: 1px solid var(--border-color); + font-size: 0.9em; +} + +.backup-item:last-child { + border-bottom: none; +} + +.backup-info { + flex: 1; +} + +.backup-name { + font-weight: 500; + color: var(--text-color); +} + +.backup-details { + font-size: 0.8em; + color: var(--muted-text); + margin-top: 2px; +} + +.backup-actions { + display: flex; + gap: 4px; +} + +.backup-actions .btn { + padding: 4px 8px; + font-size: 0.8em; +} + +/* Log files container */ +.log-files-container { + max-height: 200px; + overflow-y: auto; + border: 1px solid var(--border-color); + border-radius: 6px; + padding: 8px; + margin-top: 8px; +} + +.log-file-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: 8px; + border-bottom: 1px solid var(--border-color); + font-size: 0.9em; +} + +.log-file-item:last-child { + border-bottom: none; +} + +.log-file-info { + flex: 1; +} + +.log-file-name { + font-weight: 500; + color: var(--text-color); +} + +.log-file-details { + font-size: 0.8em; + color: var(--muted-text); + margin-top: 2px; +} + +.log-file-actions { + display: flex; + gap: 4px; +} + +.log-file-actions .btn { + padding: 4px 8px; + font-size: 0.8em; + min-width: auto; +} + +.log-file-actions .btn-xs { + padding: 2px 6px; + font-size: 0.75em; +} + +/* Responsive adjustments */ +@media (max-width: 768px) { + .info-row { + flex-direction: column; + align-items: flex-start; + gap: 4px; + } +} diff --git a/src/server/web/static/css/components/navigation.css b/src/server/web/static/css/components/navigation.css new file mode 100644 index 0000000..c3a46ea --- /dev/null +++ b/src/server/web/static/css/components/navigation.css @@ -0,0 +1,218 @@ +/** + * AniWorld - Navigation Styles + * + * Header, nav, and navigation link styles. + */ + +/* Header */ +.header { + background-color: var(--color-surface); + border-bottom: 1px solid var(--color-border); + padding: var(--spacing-lg) var(--spacing-xl); + box-shadow: var(--shadow-card); + transition: background-color var(--transition-duration) var(--transition-easing); +} + +.header-content { + display: flex; + justify-content: space-between; + align-items: center; + max-width: 1200px; + margin: 0 auto; + min-height: 60px; + position: relative; + width: 100%; + box-sizing: border-box; +} + +.header-title { + display: flex; + align-items: center; + gap: var(--spacing-md); + flex-shrink: 1; + min-width: 150px; +} + +.header-title i { + font-size: var(--font-size-title); + color: var(--color-accent); +} + +.header-title h1 { + margin: 0; + font-size: var(--font-size-title); + font-weight: 600; + color: var(--color-text-primary); +} + +.header-actions { + display: flex; + align-items: center; + gap: var(--spacing-lg); + flex-shrink: 0; + flex-wrap: nowrap; + justify-content: flex-end; +} + +/* Main content */ +.main-content { + flex: 1; + padding: var(--spacing-xl); + max-width: 1200px; + margin: 0 auto; + width: 100%; +} + +/* Section headers */ +.section-header { + display: flex; + justify-content: space-between; + align-items: center; + margin-bottom: var(--spacing-lg); + padding-bottom: var(--spacing-md); + border-bottom: 1px solid var(--color-border); +} + +.section-header h2 { + display: flex; + align-items: center; + gap: var(--spacing-sm); + margin: 0; + font-size: var(--font-size-title); + color: var(--color-text-primary); +} + +.section-actions { + display: flex; + gap: var(--spacing-sm); +} + +/* Series section */ +.series-section { + margin-bottom: var(--spacing-xxl); +} + +.series-header { + display: flex; + flex-direction: column; + gap: var(--spacing-lg); + margin-bottom: var(--spacing-xl); +} + +.series-header h2 { + margin: 0; + font-size: var(--font-size-title); + color: var(--color-text-primary); +} + +.series-filters { + display: flex; + gap: var(--spacing-md); + margin-bottom: var(--spacing-lg); +} + +.series-actions { + display: flex; + gap: var(--spacing-md); +} + +.series-grid { + display: grid; + grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); + gap: var(--spacing-lg); +} + +/* Search section */ +.search-section { + margin-bottom: var(--spacing-xxl); +} + +.search-container { + margin-bottom: var(--spacing-lg); +} + +/* Dark theme adjustments */ +[data-theme="dark"] .section-header { + border-bottom-color: var(--color-border-dark); +} + +/* Responsive design */ +@media (min-width: 768px) { + .series-header { + flex-direction: row; + align-items: center; + justify-content: space-between; + } + + .series-filters { + margin-bottom: 0; + } +} + +@media (max-width: 1024px) { + .header-title { + min-width: 120px; + } + + .header-title h1 { + font-size: 1.4rem; + } + + .header-actions { + gap: var(--spacing-sm); + } +} + +@media (max-width: 768px) { + .header-content { + flex-direction: column; + gap: var(--spacing-md); + min-height: auto; + } + + .header-title { + text-align: center; + min-width: auto; + justify-content: center; + } + + .header-actions { + justify-content: center; + flex-wrap: wrap; + width: 100%; + gap: var(--spacing-sm); + } + + .main-content { + padding: var(--spacing-md); + } + + .series-header { + flex-direction: column; + gap: var(--spacing-md); + align-items: stretch; + } + + .series-actions { + justify-content: center; + } + + .series-grid { + grid-template-columns: 1fr; + } + + .section-header { + flex-direction: column; + align-items: stretch; + gap: var(--spacing-md); + } + + .download-header { + flex-direction: column; + gap: var(--spacing-md); + } + + .download-actions { + justify-content: flex-end; + } +} diff --git a/src/server/web/static/css/components/notifications.css b/src/server/web/static/css/components/notifications.css new file mode 100644 index 0000000..dbd4e6d --- /dev/null +++ b/src/server/web/static/css/components/notifications.css @@ -0,0 +1,148 @@ +/** + * AniWorld - Notification Styles + * + * Toast notifications, alerts, and messages. + */ + +/* Toast container */ +.toast-container { + position: fixed; + top: var(--spacing-xl); + right: var(--spacing-xl); + z-index: 1100; + display: flex; + flex-direction: column; + gap: var(--spacing-sm); +} + +/* Toast base */ +.toast { + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + padding: var(--spacing-md) var(--spacing-lg); + box-shadow: var(--shadow-elevated); + min-width: 300px; + animation: slideIn var(--transition-duration) var(--transition-easing); +} + +/* Toast variants */ +.toast.success { + border-left: 4px solid var(--color-success); +} + +.toast.error { + border-left: 4px solid var(--color-error); +} + +.toast.warning { + border-left: 4px solid var(--color-warning); +} + +.toast.info { + border-left: 4px solid var(--color-accent); +} + +/* Status panel */ +.status-panel { + position: fixed; + bottom: var(--spacing-xl); + right: var(--spacing-xl); + width: 400px; + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + box-shadow: var(--shadow-elevated); + z-index: 1000; + transition: all var(--transition-duration) var(--transition-easing); +} + +.status-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: var(--spacing-md) var(--spacing-lg); + border-bottom: 1px solid var(--color-border); +} + +.status-header h3 { + margin: 0; + font-size: var(--font-size-subtitle); + color: var(--color-text-primary); +} + +.status-content { + padding: var(--spacing-lg); +} + +.status-message { + margin-bottom: var(--spacing-md); + color: var(--color-text-secondary); +} + +/* Status indicator */ +.status-indicator { + display: inline-block; + width: 8px; + height: 8px; + border-radius: 50%; + background-color: var(--color-error); + margin-right: var(--spacing-xs); +} + +.status-indicator.connected { + background-color: var(--color-success); +} + +/* Download controls */ +.download-controls { + display: flex; + gap: var(--spacing-sm); + margin-top: var(--spacing-md); + justify-content: center; +} + +/* Empty state */ +.empty-state { + text-align: center; + padding: var(--spacing-xxl); + color: var(--color-text-tertiary); +} + +.empty-state i { + font-size: 3rem; + margin-bottom: var(--spacing-md); + opacity: 0.5; +} + +.empty-state p { + margin: 0; + font-size: var(--font-size-subtitle); +} + +.empty-state small { + display: block; + margin-top: var(--spacing-sm); + font-size: var(--font-size-small); + opacity: 0.7; +} + +/* Responsive adjustments */ +@media (max-width: 768px) { + .status-panel { + bottom: var(--spacing-md); + right: var(--spacing-md); + left: var(--spacing-md); + width: auto; + } + + .toast-container { + top: var(--spacing-md); + right: var(--spacing-md); + left: var(--spacing-md); + } + + .toast { + min-width: auto; + } +} diff --git a/src/server/web/static/css/components/progress.css b/src/server/web/static/css/components/progress.css new file mode 100644 index 0000000..6761ded --- /dev/null +++ b/src/server/web/static/css/components/progress.css @@ -0,0 +1,196 @@ +/** + * AniWorld - Progress Styles + * + * Progress bars, loading indicators, + * and download progress displays. + */ + +/* Progress bar base */ +.progress-bar { + width: 100%; + height: 8px; + background-color: var(--color-bg-tertiary); + border-radius: var(--border-radius-sm); + overflow: hidden; +} + +.progress-fill { + height: 100%; + background-color: var(--color-accent); + border-radius: var(--border-radius-sm); + transition: width var(--transition-duration) var(--transition-easing); + width: 0%; +} + +.progress-text { + margin-top: var(--spacing-xs); + text-align: center; + font-size: var(--font-size-caption); + color: var(--color-text-secondary); +} + +/* Progress container */ +.progress-container { + margin-top: var(--spacing-md); +} + +/* Mini progress bar */ +.progress-bar-mini { + width: 80px; + height: 4px; + background-color: var(--color-bg-tertiary); + border-radius: var(--border-radius-sm); + overflow: hidden; +} + +.progress-fill-mini { + height: 100%; + background-color: var(--color-accent); + border-radius: var(--border-radius-sm); + transition: width var(--transition-duration) var(--transition-easing); + width: 0%; +} + +.progress-text-mini { + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + font-weight: 500; + min-width: 35px; +} + +/* Download progress */ +.download-progress { + display: flex; + align-items: center; + gap: var(--spacing-sm); + min-width: 120px; + margin-top: var(--spacing-lg); +} + +/* Progress bar gradient style */ +.download-progress .progress-bar { + width: 100%; + height: 8px; + background: var(--color-border); + border-radius: 4px; + overflow: hidden; + margin-bottom: var(--spacing-sm); +} + +.download-progress .progress-fill { + height: 100%; + background: linear-gradient(90deg, var(--color-primary), var(--color-accent)); + border-radius: 4px; + transition: width 0.3s ease; +} + +.progress-info { + display: flex; + justify-content: space-between; + align-items: center; + font-size: var(--font-size-caption); + color: var(--color-text-secondary); +} + +.download-speed { + color: var(--color-primary); + font-weight: 500; +} + +/* Missing episodes status */ +.missing-episodes { + display: flex; + align-items: center; + gap: var(--spacing-xs); + color: var(--color-text-secondary); + font-size: var(--font-size-caption); +} + +.missing-episodes i { + color: var(--color-warning); +} + +.missing-episodes.has-missing { + color: var(--color-warning); + font-weight: 500; +} + +.missing-episodes.complete { + color: var(--color-success); + font-weight: 500; +} + +.missing-episodes.has-missing i { + color: var(--color-warning); +} + +.missing-episodes.complete i { + color: var(--color-success); +} + +/* Speed and ETA section */ +.speed-eta-section { + display: flex; + justify-content: space-between; + align-items: center; + background: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + padding: var(--spacing-lg); +} + +.speed-info { + display: flex; + gap: var(--spacing-xl); +} + +.speed-current, +.speed-average, +.eta-info { + display: flex; + flex-direction: column; + gap: var(--spacing-xs); +} + +.speed-info .label, +.eta-info .label { + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + text-transform: uppercase; +} + +.speed-info .value, +.eta-info .value { + font-size: var(--font-size-subtitle); + font-weight: 500; + color: var(--color-text-primary); +} + +/* Dark theme adjustments */ +[data-theme="dark"] .speed-eta-section { + background: var(--color-surface-dark); + border-color: var(--color-border-dark); +} + +/* Responsive adjustments */ +@media (max-width: 768px) { + .current-download-item { + flex-direction: column; + align-items: stretch; + gap: var(--spacing-sm); + } + + .download-progress { + justify-content: space-between; + } + + .speed-eta-section { + flex-direction: column; + gap: var(--spacing-lg); + text-align: center; + } + + .speed-info { + justify-content: center; + } +} diff --git a/src/server/web/static/css/components/status.css b/src/server/web/static/css/components/status.css new file mode 100644 index 0000000..08b00a9 --- /dev/null +++ b/src/server/web/static/css/components/status.css @@ -0,0 +1,128 @@ +/** + * AniWorld - Process Status Styles + * + * Process status indicators for scan and download operations. + */ + +/* Process Status Indicators */ +.process-status { + display: flex; + gap: var(--spacing-sm); + align-items: center; +} + +.status-indicator { + display: flex; + align-items: center; + gap: var(--spacing-sm); + padding: var(--spacing-sm) var(--spacing-md); + background: transparent; + border-radius: var(--border-radius); + border: none; + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + transition: all var(--animation-duration-normal) var(--animation-easing-standard); + min-width: 0; + flex-shrink: 0; +} + +.status-indicator:hover { + background: transparent; + color: var(--color-text-primary); +} + +.status-indicator i { + font-size: 24px; + transition: all var(--animation-duration-normal) var(--animation-easing-standard); +} + +/* Status dots */ +.status-dot { + width: 8px; + height: 8px; + border-radius: 50%; + transition: all var(--animation-duration-normal) var(--animation-easing-standard); +} + +.status-dot.idle { + background-color: var(--color-text-disabled); +} + +.status-dot.running { + background-color: var(--color-accent); + animation: pulse 2s infinite; +} + +.status-dot.error { + background-color: #e74c3c; +} + +/* Rescan icon specific styling */ +#rescan-status { + cursor: pointer; +} + +#rescan-status i { + color: var(--color-text-disabled); +} + +#rescan-status.running i { + color: #22c55e; + animation: iconPulse 2s infinite; +} + +#rescan-status.running { + cursor: pointer; +} + +/* Animations */ +@keyframes pulse { + 0%, + 100% { + opacity: 1; + transform: scale(1); + } + + 50% { + opacity: 0.5; + transform: scale(1.2); + } +} + +@keyframes iconPulse { + 0%, + 100% { + opacity: 1; + transform: scale(1) rotate(0deg); + } + + 50% { + opacity: 0.7; + transform: scale(1.1) rotate(180deg); + } +} + +/* Mobile view */ +@media (max-width: 1024px) { + .process-status { + gap: 4px; + } +} + +@media (max-width: 768px) { + .process-status { + order: -1; + margin-right: 0; + margin-bottom: var(--spacing-sm); + } + + .status-indicator { + font-size: 11px; + padding: 6px 8px; + gap: 4px; + } + + .status-indicator i { + font-size: 20px; + } +} diff --git a/src/server/web/static/css/components/tables.css b/src/server/web/static/css/components/tables.css new file mode 100644 index 0000000..9ce4ecc --- /dev/null +++ b/src/server/web/static/css/components/tables.css @@ -0,0 +1,255 @@ +/** + * AniWorld - Table Styles + * + * Table, list, and queue item styles. + */ + +/* Search results */ +.search-results { + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + padding: var(--spacing-lg); + box-shadow: var(--shadow-card); + margin-top: var(--spacing-lg); +} + +.search-results h3 { + margin: 0 0 var(--spacing-md) 0; + font-size: var(--font-size-subtitle); + color: var(--color-text-primary); +} + +.search-results-list { + display: flex; + flex-direction: column; + gap: var(--spacing-sm); +} + +.search-result-item { + display: flex; + justify-content: space-between; + align-items: center; + padding: var(--spacing-md); + background-color: var(--color-bg-secondary); + border-radius: var(--border-radius-md); + transition: background-color var(--transition-duration) var(--transition-easing); +} + +.search-result-item:hover { + background-color: var(--color-surface-hover); +} + +.search-result-name { + font-weight: 500; + color: var(--color-text-primary); +} + +/* Download Queue Section */ +.download-queue-section { + margin-bottom: var(--spacing-xxl); + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + box-shadow: var(--shadow-card); + overflow: hidden; +} + +.queue-header { + display: flex; + justify-content: space-between; + align-items: center; + padding: var(--spacing-lg); + background-color: var(--color-bg-secondary); + border-bottom: 1px solid var(--color-border); +} + +.queue-header h2 { + margin: 0; + font-size: var(--font-size-subtitle); + color: var(--color-text-primary); + display: flex; + align-items: center; + gap: var(--spacing-sm); +} + +.queue-header i { + color: var(--color-accent); +} + +.queue-progress { + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + font-weight: 500; +} + +/* Current download */ +.current-download { + padding: var(--spacing-lg); + border-bottom: 1px solid var(--color-border); + background-color: var(--color-surface); +} + +.current-download-header { + margin-bottom: var(--spacing-md); +} + +.current-download-header h3 { + margin: 0; + font-size: var(--font-size-body); + color: var(--color-text-primary); + font-weight: 600; +} + +.current-download-item { + display: flex; + align-items: center; + gap: var(--spacing-lg); + padding: var(--spacing-md); + background-color: var(--color-bg-secondary); + border-radius: var(--border-radius-md); + border-left: 4px solid var(--color-accent); +} + +.download-info { + flex: 1; +} + +.serie-name { + font-weight: 600; + color: var(--color-text-primary); + margin-bottom: var(--spacing-xs); +} + +.episode-info { + font-size: var(--font-size-caption); + color: var(--color-text-secondary); +} + +/* Queue list */ +.queue-list-container { + padding: var(--spacing-lg); +} + +.queue-list-container h3 { + margin: 0 0 var(--spacing-md) 0; + font-size: var(--font-size-body); + color: var(--color-text-primary); + font-weight: 600; +} + +.queue-list { + display: flex; + flex-direction: column; + gap: var(--spacing-sm); +} + +.queue-item { + display: flex; + align-items: center; + gap: var(--spacing-md); + padding: var(--spacing-sm) var(--spacing-md); + background-color: var(--color-bg-secondary); + border-radius: var(--border-radius-md); + border-left: 3px solid var(--color-divider); +} + +.queue-item-index { + font-size: var(--font-size-caption); + color: var(--color-text-tertiary); + font-weight: 600; + min-width: 20px; +} + +.queue-item-name { + flex: 1; + color: var(--color-text-secondary); +} + +.queue-item-status { + font-size: var(--font-size-caption); + color: var(--color-text-tertiary); +} + +.queue-empty { + text-align: center; + padding: var(--spacing-xl); + color: var(--color-text-tertiary); + font-style: italic; +} + +/* Stats grid */ +.queue-stats-section { + margin-bottom: var(--spacing-xl); +} + +.stats-grid { + display: grid; + grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); + gap: var(--spacing-lg); + margin-bottom: var(--spacing-lg); +} + +/* Drag and Drop Styles */ +.draggable-item { + cursor: move; + user-select: none; +} + +.draggable-item.dragging { + opacity: 0.5; + transform: scale(0.98); + cursor: grabbing; +} + +.draggable-item.drag-over { + border-top: 3px solid var(--color-primary); + margin-top: 8px; +} + +.drag-handle { + position: absolute; + left: 8px; + top: 50%; + transform: translateY(-50%); + color: var(--color-text-tertiary); + cursor: grab; + font-size: 1.2rem; + padding: var(--spacing-xs); + transition: color var(--transition-duration); +} + +.drag-handle:hover { + color: var(--color-primary); +} + +.drag-handle:active { + cursor: grabbing; +} + +.sortable-list { + position: relative; + min-height: 100px; +} + +.pending-queue-list { + position: relative; +} + +/* Responsive adjustments */ +@media (max-width: 768px) { + .queue-item { + flex-direction: column; + align-items: stretch; + gap: var(--spacing-xs); + } + + .queue-item-index { + min-width: auto; + } + + .stats-grid { + grid-template-columns: repeat(2, 1fr); + gap: var(--spacing-md); + } +} diff --git a/src/server/web/static/css/pages/index.css b/src/server/web/static/css/pages/index.css new file mode 100644 index 0000000..ab3a40e --- /dev/null +++ b/src/server/web/static/css/pages/index.css @@ -0,0 +1,230 @@ +/** + * AniWorld - Index Page Styles + * + * Index/library page specific styles including + * series grid, search, and scan overlay. + */ + +/* Scan Progress Overlay */ +.scan-progress-overlay { + position: fixed; + top: 0; + left: 0; + width: 100%; + height: 100%; + background-color: rgba(0, 0, 0, 0.6); + display: flex; + justify-content: center; + align-items: center; + z-index: 3000; + opacity: 0; + visibility: hidden; + transition: opacity 0.3s ease, visibility 0.3s ease; +} + +.scan-progress-overlay.visible { + opacity: 1; + visibility: visible; +} + +.scan-progress-container { + background-color: var(--color-surface); + border: 1px solid var(--color-border); + border-radius: var(--border-radius-lg); + box-shadow: var(--shadow-elevated); + padding: var(--spacing-xxl); + max-width: 450px; + width: 90%; + text-align: center; + animation: scanProgressSlideIn 0.3s ease; +} + +@keyframes scanProgressSlideIn { + from { + transform: translateY(-20px); + opacity: 0; + } + to { + transform: translateY(0); + opacity: 1; + } +} + +.scan-progress-header { + margin-bottom: var(--spacing-lg); +} + +.scan-progress-header h3 { + margin: 0; + font-size: var(--font-size-title); + color: var(--color-text-primary); + display: flex; + align-items: center; + justify-content: center; + gap: var(--spacing-sm); +} + +.scan-progress-spinner { + display: inline-block; + width: 24px; + height: 24px; + border: 3px solid var(--color-bg-tertiary); + border-top-color: var(--color-accent); + border-radius: 50%; + animation: scanSpinner 1s linear infinite; +} + +@keyframes scanSpinner { + to { + transform: rotate(360deg); + } +} + +/* Progress bar for scan */ +.scan-progress-bar-container { + width: 100%; + height: 8px; + background-color: var(--color-bg-tertiary); + border-radius: 4px; + overflow: hidden; + margin-bottom: var(--spacing-sm); +} + +.scan-progress-bar { + height: 100%; + background: linear-gradient(90deg, var(--color-accent), var(--color-accent-hover, var(--color-accent))); + border-radius: 4px; + transition: width 0.3s ease; +} + +.scan-progress-container.completed .scan-progress-bar { + background: linear-gradient(90deg, var(--color-success), var(--color-success)); +} + +.scan-progress-text { + font-size: var(--font-size-body); + color: var(--color-text-secondary); + margin-bottom: var(--spacing-md); +} + +.scan-progress-text #scan-current-count { + font-weight: 600; + color: var(--color-accent); +} + +.scan-progress-text #scan-total-count { + font-weight: 600; + color: var(--color-text-primary); +} + +.scan-progress-container.completed .scan-progress-text #scan-current-count { + color: var(--color-success); +} + +.scan-progress-stats { + display: flex; + justify-content: space-around; + margin: var(--spacing-lg) 0; + padding: var(--spacing-md) 0; + border-top: 1px solid var(--color-border); + border-bottom: 1px solid var(--color-border); +} + +.scan-stat { + display: flex; + flex-direction: column; + align-items: center; + gap: var(--spacing-xs); +} + +.scan-stat-value { + font-size: var(--font-size-large-title); + font-weight: 600; + color: var(--color-accent); + line-height: 1; +} + +.scan-stat-label { + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + text-transform: uppercase; + letter-spacing: 0.5px; +} + +.scan-current-directory { + margin-top: var(--spacing-md); + padding: var(--spacing-sm) var(--spacing-md); + background-color: var(--color-bg-secondary); + border-radius: var(--border-radius-md); + font-size: var(--font-size-caption); + color: var(--color-text-secondary); + white-space: nowrap; + overflow: hidden; + text-overflow: ellipsis; + max-width: 100%; +} + +.scan-current-directory-label { + font-weight: 500; + color: var(--color-text-tertiary); + margin-right: var(--spacing-xs); +} + +/* Scan completed state */ +.scan-progress-container.completed .scan-progress-spinner { + display: none; +} + +.scan-progress-container.completed .scan-progress-header h3 { + color: var(--color-success); +} + +.scan-completed-icon { + display: none; + width: 24px; + height: 24px; + color: var(--color-success); +} + +.scan-progress-container.completed .scan-completed-icon { + display: inline-block; +} + +.scan-progress-container.completed .scan-stat-value { + color: var(--color-success); +} + +.scan-elapsed-time { + margin-top: var(--spacing-md); + font-size: var(--font-size-body); + color: var(--color-text-secondary); +} + +.scan-elapsed-time i { + margin-right: var(--spacing-xs); + color: var(--color-text-tertiary); +} + +/* Responsive adjustments for scan overlay */ +@media (max-width: 768px) { + .scan-progress-container { + padding: var(--spacing-lg); + max-width: 95%; + } + + .scan-progress-stats { + flex-direction: column; + gap: var(--spacing-md); + } + + .scan-stat { + flex-direction: row; + justify-content: space-between; + width: 100%; + padding: 0 var(--spacing-md); + } + + .scan-stat-value { + font-size: var(--font-size-title); + } +} diff --git a/src/server/web/static/css/pages/login.css b/src/server/web/static/css/pages/login.css new file mode 100644 index 0000000..28ad9a9 --- /dev/null +++ b/src/server/web/static/css/pages/login.css @@ -0,0 +1,168 @@ +/** + * AniWorld - Login Page Styles + * + * Login page specific styles including login card, + * form elements, and branding. + */ + +.login-container { + min-height: 100vh; + display: flex; + align-items: center; + justify-content: center; + background: linear-gradient(135deg, var(--color-primary-light) 0%, var(--color-primary) 100%); + padding: 1rem; +} + +.login-card { + background: var(--color-surface); + border-radius: 16px; + padding: 2rem; + box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1); + width: 100%; + max-width: 400px; + border: 1px solid var(--color-border); +} + +.login-header { + text-align: center; + margin-bottom: 2rem; +} + +.login-header .logo { + font-size: 3rem; + color: var(--color-primary); + margin-bottom: 0.5rem; +} + +.login-header h1 { + margin: 0; + color: var(--color-text); + font-size: 1.5rem; + font-weight: 600; +} + +.login-header p { + margin: 0.5rem 0 0 0; + color: var(--color-text-secondary); + font-size: 0.9rem; +} + +.login-form { + display: flex; + flex-direction: column; + gap: 1.5rem; +} + +/* Password input group */ +.password-input-group { + position: relative; +} + +.password-input { + width: 100%; + padding: 0.75rem 3rem 0.75rem 1rem; + border: 2px solid var(--color-border); + border-radius: 8px; + font-size: 1rem; + background: var(--color-background); + color: var(--color-text); + transition: all 0.2s ease; + box-sizing: border-box; +} + +.password-input:focus { + outline: none; + border-color: var(--color-primary); + box-shadow: 0 0 0 3px rgba(var(--color-primary-rgb), 0.1); +} + +.password-toggle { + position: absolute; + right: 0.75rem; + top: 50%; + transform: translateY(-50%); + background: none; + border: none; + color: var(--color-text-secondary); + cursor: pointer; + padding: 0.25rem; + display: flex; + align-items: center; + justify-content: center; +} + +.password-toggle:hover { + color: var(--color-text-primary); +} + +/* Login button */ +.login-btn { + width: 100%; + padding: 0.875rem; + background: var(--color-primary); + color: white; + border: none; + border-radius: 8px; + font-size: 1rem; + font-weight: 600; + cursor: pointer; + transition: all 0.2s ease; + display: flex; + align-items: center; + justify-content: center; + gap: 0.5rem; +} + +.login-btn:hover:not(:disabled) { + background: var(--color-accent-hover); + transform: translateY(-1px); +} + +.login-btn:disabled { + opacity: 0.6; + cursor: not-allowed; +} + +/* Error message */ +.login-error { + background: rgba(var(--color-error-rgb, 209, 52, 56), 0.1); + border: 1px solid var(--color-error); + border-radius: 8px; + padding: 0.75rem; + color: var(--color-error); + font-size: 0.875rem; + display: flex; + align-items: center; + gap: 0.5rem; +} + +/* Remember me checkbox */ +.remember-me { + display: flex; + align-items: center; + gap: 0.5rem; + font-size: 0.875rem; + color: var(--color-text-secondary); +} + +.remember-me input { + accent-color: var(--color-primary); +} + +/* Footer links */ +.login-footer { + margin-top: 1.5rem; + text-align: center; + font-size: 0.875rem; + color: var(--color-text-secondary); +} + +.login-footer a { + color: var(--color-primary); + text-decoration: none; +} + +.login-footer a:hover { + text-decoration: underline; +} diff --git a/src/server/web/static/css/pages/queue.css b/src/server/web/static/css/pages/queue.css new file mode 100644 index 0000000..035b8f5 --- /dev/null +++ b/src/server/web/static/css/pages/queue.css @@ -0,0 +1,46 @@ +/** + * AniWorld - Queue Page Styles + * + * Queue page specific styles for download management. + */ + +/* Active downloads section */ +.active-downloads-section { + margin-bottom: var(--spacing-xl); +} + +.active-downloads-list { + min-height: 100px; +} + +/* Pending queue section */ +.pending-queue-section { + margin-bottom: var(--spacing-xl); +} + +/* Completed downloads section */ +.completed-downloads-section { + margin-bottom: var(--spacing-xl); +} + +/* Failed downloads section */ +.failed-downloads-section { + margin-bottom: var(--spacing-xl); +} + +/* Queue page text color utilities */ +.text-primary { + color: var(--color-primary); +} + +.text-success { + color: var(--color-success); +} + +.text-warning { + color: var(--color-warning); +} + +.text-error { + color: var(--color-error); +} diff --git a/src/server/web/static/css/styles.css b/src/server/web/static/css/styles.css index 0630cae..5dc062b 100644 --- a/src/server/web/static/css/styles.css +++ b/src/server/web/static/css/styles.css @@ -1,1901 +1,32 @@ -/* Fluent UI Design System Variables */ -:root { - /* Light theme colors */ - --color-bg-primary: #ffffff; - --color-bg-secondary: #faf9f8; - --color-bg-tertiary: #f3f2f1; - --color-surface: #ffffff; - --color-surface-hover: #f3f2f1; - --color-surface-pressed: #edebe9; - --color-text-primary: #323130; - --color-text-secondary: #605e5c; - --color-text-tertiary: #a19f9d; - --color-accent: #0078d4; - --color-accent-hover: #106ebe; - --color-accent-pressed: #005a9e; - --color-success: #107c10; - --color-warning: #ff8c00; - --color-error: #d13438; - --color-border: #e1dfdd; - --color-divider: #c8c6c4; - - /* Dark theme colors */ - --color-bg-primary-dark: #202020; - --color-bg-secondary-dark: #2d2d30; - --color-bg-tertiary-dark: #3e3e42; - --color-surface-dark: #292929; - --color-surface-hover-dark: #3e3e42; - --color-surface-pressed-dark: #484848; - --color-text-primary-dark: #ffffff; - --color-text-secondary-dark: #cccccc; - --color-text-tertiary-dark: #969696; - --color-accent-dark: #60cdff; - --color-accent-hover-dark: #4db8e8; - --color-accent-pressed-dark: #3aa0d1; - --color-border-dark: #484644; - --color-divider-dark: #605e5c; - - /* Typography */ - --font-family: 'Segoe UI', 'Segoe UI Web (West European)', -apple-system, BlinkMacSystemFont, Roboto, 'Helvetica Neue', sans-serif; - --font-size-caption: 12px; - --font-size-body: 14px; - --font-size-subtitle: 16px; - --font-size-title: 20px; - --font-size-large-title: 32px; - - /* Spacing */ - --spacing-xs: 4px; - --spacing-sm: 8px; - --spacing-md: 12px; - --spacing-lg: 16px; - --spacing-xl: 20px; - --spacing-xxl: 24px; - - /* Border radius */ - --border-radius-sm: 2px; - --border-radius-md: 4px; - --border-radius-lg: 6px; - --border-radius-xl: 8px; - - /* Shadows */ - --shadow-card: 0 1.6px 3.6px 0 rgba(0, 0, 0, 0.132), 0 0.3px 0.9px 0 rgba(0, 0, 0, 0.108); - --shadow-elevated: 0 6.4px 14.4px 0 rgba(0, 0, 0, 0.132), 0 1.2px 3.6px 0 rgba(0, 0, 0, 0.108); - - /* Transitions */ - --transition-duration: 0.15s; - --transition-easing: cubic-bezier(0.1, 0.9, 0.2, 1); -} - -/* Dark theme */ -[data-theme="dark"] { - --color-bg-primary: var(--color-bg-primary-dark); - --color-bg-secondary: var(--color-bg-secondary-dark); - --color-bg-tertiary: var(--color-bg-tertiary-dark); - --color-surface: var(--color-surface-dark); - --color-surface-hover: var(--color-surface-hover-dark); - --color-surface-pressed: var(--color-surface-pressed-dark); - --color-text-primary: var(--color-text-primary-dark); - --color-text-secondary: var(--color-text-secondary-dark); - --color-text-tertiary: var(--color-text-tertiary-dark); - --color-accent: var(--color-accent-dark); - --color-accent-hover: var(--color-accent-hover-dark); - --color-accent-pressed: var(--color-accent-pressed-dark); - --color-border: var(--color-border-dark); - --color-divider: var(--color-divider-dark); -} - -/* Base styles */ -* { - box-sizing: border-box; -} - -html { - font-size: 100%; -} - -body { - margin: 0; - padding: 0; - font-family: var(--font-family); - font-size: var(--font-size-body); - line-height: 1.5; - color: var(--color-text-primary); - background-color: var(--color-bg-primary); - transition: background-color var(--transition-duration) var(--transition-easing), - color var(--transition-duration) var(--transition-easing); -} - -/* App container */ -.app-container { - min-height: 100vh; - display: flex; - flex-direction: column; -} - -/* Header */ -.header { - background-color: var(--color-surface); - border-bottom: 1px solid var(--color-border); - padding: var(--spacing-lg) var(--spacing-xl); - box-shadow: var(--shadow-card); - transition: background-color var(--transition-duration) var(--transition-easing); -} - -.header-content { - display: flex; - justify-content: space-between; - align-items: center; - max-width: 1200px; - margin: 0 auto; - min-height: 60px; - position: relative; - width: 100%; - box-sizing: border-box; -} - -.header-title { - display: flex; - align-items: center; - gap: var(--spacing-md); - flex-shrink: 1; - min-width: 150px; -} - -.header-title i { - font-size: var(--font-size-title); - color: var(--color-accent); -} - -.header-title h1 { - margin: 0; - font-size: var(--font-size-title); - font-weight: 600; - color: var(--color-text-primary); -} - -.header-actions { - display: flex; - align-items: center; - gap: var(--spacing-lg); - flex-shrink: 0; - flex-wrap: nowrap; - justify-content: flex-end; -} - -/* Main content */ -.main-content { - flex: 1; - padding: var(--spacing-xl); - max-width: 1200px; - margin: 0 auto; - width: 100%; -} - -/* Search section */ -.search-section { - margin-bottom: var(--spacing-xxl); -} - -.search-container { - margin-bottom: var(--spacing-lg); -} - -.search-input-group { - display: flex; - gap: var(--spacing-sm); - max-width: 600px; -} - -.search-input { - flex: 1; - padding: var(--spacing-md); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-md); - font-size: var(--font-size-body); - background-color: var(--color-surface); - color: var(--color-text-primary); - transition: all var(--transition-duration) var(--transition-easing); -} - -.search-input:focus { - outline: none; - border-color: var(--color-accent); - box-shadow: 0 0 0 1px var(--color-accent); -} - -.search-results { - background-color: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - padding: var(--spacing-lg); - box-shadow: var(--shadow-card); - margin-top: var(--spacing-lg); -} - -.search-results h3 { - margin: 0 0 var(--spacing-md) 0; - font-size: var(--font-size-subtitle); - color: var(--color-text-primary); -} - -.search-results-list { - display: flex; - flex-direction: column; - gap: var(--spacing-sm); -} - -.search-result-item { - display: flex; - justify-content: space-between; - align-items: center; - padding: var(--spacing-md); - background-color: var(--color-bg-secondary); - border-radius: var(--border-radius-md); - transition: background-color var(--transition-duration) var(--transition-easing); -} - -.search-result-item:hover { - background-color: var(--color-surface-hover); -} - -.search-result-name { - font-weight: 500; - color: var(--color-text-primary); -} - -/* Download Queue Section */ -.download-queue-section { - margin-bottom: var(--spacing-xxl); - background-color: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - box-shadow: var(--shadow-card); - overflow: hidden; -} - -.queue-header { - display: flex; - justify-content: space-between; - align-items: center; - padding: var(--spacing-lg); - background-color: var(--color-bg-secondary); - border-bottom: 1px solid var(--color-border); -} - -.queue-header h2 { - margin: 0; - font-size: var(--font-size-subtitle); - color: var(--color-text-primary); - display: flex; - align-items: center; - gap: var(--spacing-sm); -} - -.queue-header i { - color: var(--color-accent); -} - -.queue-progress { - font-size: var(--font-size-caption); - color: var(--color-text-secondary); - font-weight: 500; -} - -.current-download { - padding: var(--spacing-lg); - border-bottom: 1px solid var(--color-border); - background-color: var(--color-surface); -} - -.current-download-header { - margin-bottom: var(--spacing-md); -} - -.current-download-header h3 { - margin: 0; - font-size: var(--font-size-body); - color: var(--color-text-primary); - font-weight: 600; -} - -.current-download-item { - display: flex; - align-items: center; - gap: var(--spacing-lg); - padding: var(--spacing-md); - background-color: var(--color-bg-secondary); - border-radius: var(--border-radius-md); - border-left: 4px solid var(--color-accent); -} - -.download-info { - flex: 1; -} - -.serie-name { - font-weight: 600; - color: var(--color-text-primary); - margin-bottom: var(--spacing-xs); -} - -.episode-info { - font-size: var(--font-size-caption); - color: var(--color-text-secondary); -} - -.download-progress { - display: flex; - align-items: center; - gap: var(--spacing-sm); - min-width: 120px; -} - -.progress-bar-mini { - width: 80px; - height: 4px; - background-color: var(--color-bg-tertiary); - border-radius: var(--border-radius-sm); - overflow: hidden; -} - -.progress-fill-mini { - height: 100%; - background-color: var(--color-accent); - border-radius: var(--border-radius-sm); - transition: width var(--transition-duration) var(--transition-easing); - width: 0%; -} - -.progress-text-mini { - font-size: var(--font-size-caption); - color: var(--color-text-secondary); - font-weight: 500; - min-width: 35px; -} - -.queue-list-container { - padding: var(--spacing-lg); -} - -.queue-list-container h3 { - margin: 0 0 var(--spacing-md) 0; - font-size: var(--font-size-body); - color: var(--color-text-primary); - font-weight: 600; -} - -.queue-list { - display: flex; - flex-direction: column; - gap: var(--spacing-sm); -} - -.queue-item { - display: flex; - align-items: center; - gap: var(--spacing-md); - padding: var(--spacing-sm) var(--spacing-md); - background-color: var(--color-bg-secondary); - border-radius: var(--border-radius-md); - border-left: 3px solid var(--color-divider); -} - -.queue-item-index { - font-size: var(--font-size-caption); - color: var(--color-text-tertiary); - font-weight: 600; - min-width: 20px; -} - -.queue-item-name { - flex: 1; - color: var(--color-text-secondary); -} - -.queue-item-status { - font-size: var(--font-size-caption); - color: var(--color-text-tertiary); -} - -.queue-empty { - text-align: center; - padding: var(--spacing-xl); - color: var(--color-text-tertiary); - font-style: italic; -} - -/* Series section */ -.series-section { - margin-bottom: var(--spacing-xxl); -} - -.series-header { - display: flex; - justify-content: space-between; - align-items: center; - margin-bottom: var(--spacing-lg); -} - -.series-header h2 { - margin: 0; - font-size: var(--font-size-title); - color: var(--color-text-primary); -} - -.series-actions { - display: flex; - gap: var(--spacing-md); -} - -.series-grid { - display: grid; - grid-template-columns: repeat(auto-fill, minmax(300px, 1fr)); - gap: var(--spacing-lg); -} - -.series-card { - background-color: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - padding: var(--spacing-lg); - box-shadow: var(--shadow-card); - transition: all var(--transition-duration) var(--transition-easing); - position: relative; - display: flex; - flex-direction: column; - min-height: 120px; -} - -.series-card:hover { - box-shadow: var(--shadow-elevated); - transform: translateY(-1px); -} - -.series-card.selected { - border-color: var(--color-accent); - background-color: var(--color-surface-hover); -} - -.series-card-header { - display: flex; - justify-content: space-between; - align-items: flex-start; - margin-bottom: var(--spacing-md); -} - -.series-checkbox { - width: 18px; - height: 18px; - accent-color: var(--color-accent); -} - -.series-info h3 { - margin: 0 0 var(--spacing-xs) 0; - font-size: var(--font-size-subtitle); - color: var(--color-text-primary); - line-height: 1.3; -} - -.series-folder { - font-size: var(--font-size-caption); - color: var(--color-text-tertiary); - margin-bottom: var(--spacing-sm); -} - -.series-stats { - display: flex; - align-items: center; - gap: var(--spacing-md); - margin-top: auto; -} - -.missing-episodes { - display: flex; - align-items: center; - gap: var(--spacing-xs); - color: var(--color-text-secondary); - font-size: var(--font-size-caption); -} - -.missing-episodes i { - color: var(--color-warning); -} - -/* Button styles */ -.btn { - display: inline-flex; - align-items: center; - gap: var(--spacing-xs); - padding: var(--spacing-sm) var(--spacing-md); - border: 1px solid transparent; - border-radius: var(--border-radius-md); - font-size: var(--font-size-body); - font-weight: 500; - text-decoration: none; - cursor: pointer; - transition: all var(--transition-duration) var(--transition-easing); - background-color: transparent; - color: var(--color-text-primary); -} - -.btn:disabled { - opacity: 0.6; - cursor: not-allowed; -} - -.btn-primary { - background-color: var(--color-accent); - color: white; -} - -.btn-primary:hover:not(:disabled) { - background-color: var(--color-accent-hover); -} - -.btn-primary:active { - background-color: var(--color-accent-pressed); -} - -.btn-secondary { - background-color: var(--color-surface); - border-color: var(--color-border); - color: var(--color-text-primary); -} - -.btn-secondary:hover:not(:disabled) { - background-color: var(--color-surface-hover); -} - -.btn-success { - background-color: var(--color-success); - color: white; -} - -.btn-success:hover:not(:disabled) { - background-color: #0e6b0e; -} - -.btn-icon { - padding: var(--spacing-sm); - min-width: auto; -} - -.btn-small { - padding: var(--spacing-xs) var(--spacing-sm); - font-size: var(--font-size-caption); -} - -/* Status panel */ -.status-panel { - position: fixed; - bottom: var(--spacing-xl); - right: var(--spacing-xl); - width: 400px; - background-color: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - box-shadow: var(--shadow-elevated); - z-index: 1000; - transition: all var(--transition-duration) var(--transition-easing); -} - -.status-header { - display: flex; - justify-content: space-between; - align-items: center; - padding: var(--spacing-md) var(--spacing-lg); - border-bottom: 1px solid var(--color-border); -} - -.status-header h3 { - margin: 0; - font-size: var(--font-size-subtitle); - color: var(--color-text-primary); -} - -.status-content { - padding: var(--spacing-lg); -} - -.status-message { - margin-bottom: var(--spacing-md); - color: var(--color-text-secondary); -} - -.progress-container { - margin-top: var(--spacing-md); -} - -.progress-bar { - width: 100%; - height: 8px; - background-color: var(--color-bg-tertiary); - border-radius: var(--border-radius-sm); - overflow: hidden; -} - -.progress-fill { - height: 100%; - background-color: var(--color-accent); - border-radius: var(--border-radius-sm); - transition: width var(--transition-duration) var(--transition-easing); - width: 0%; -} - -.progress-text { - margin-top: var(--spacing-xs); - text-align: center; - font-size: var(--font-size-caption); - color: var(--color-text-secondary); -} - -/* Toast notifications */ -.toast-container { - position: fixed; - top: var(--spacing-xl); - right: var(--spacing-xl); - z-index: 1100; - display: flex; - flex-direction: column; - gap: var(--spacing-sm); -} - -.toast { - background-color: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - padding: var(--spacing-md) var(--spacing-lg); - box-shadow: var(--shadow-elevated); - min-width: 300px; - animation: slideIn var(--transition-duration) var(--transition-easing); -} - -.toast.success { - border-left: 4px solid var(--color-success); -} - -.toast.error { - border-left: 4px solid var(--color-error); -} - -.toast.warning { - border-left: 4px solid var(--color-warning); -} - -/* Loading overlay */ -.loading-overlay { - position: fixed; - top: 0; - left: 0; - width: 100%; - height: 100%; - background-color: rgba(0, 0, 0, 0.5); - display: flex; - justify-content: center; - align-items: center; - z-index: 2000; -} - -.loading-spinner { - text-align: center; - color: white; -} - -.loading-spinner i { - font-size: 48px; - margin-bottom: var(--spacing-md); -} - -.loading-spinner p { - margin: 0; - font-size: var(--font-size-subtitle); -} - -/* Modal styles */ -.modal { - position: fixed; - top: 0; - left: 0; - width: 100%; - height: 100%; - z-index: 2000; - display: flex; - justify-content: center; - align-items: center; -} - -.modal-overlay { - position: absolute; - top: 0; - left: 0; - width: 100%; - height: 100%; - background-color: rgba(0, 0, 0, 0.5); -} - -.modal-content { - position: relative; - background-color: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - box-shadow: var(--shadow-elevated); - max-width: 500px; - width: 90%; - max-height: 80vh; - overflow: hidden; -} - -.modal-header { - display: flex; - justify-content: space-between; - align-items: center; - padding: var(--spacing-lg); - border-bottom: 1px solid var(--color-border); -} - -.modal-header h3 { - margin: 0; - font-size: var(--font-size-subtitle); - color: var(--color-text-primary); -} - -.modal-body { - padding: var(--spacing-lg); - overflow-y: auto; -} - -.config-item { - margin-bottom: var(--spacing-lg); -} - -.config-item:last-child { - margin-bottom: 0; -} - -.config-item label { - display: block; - font-weight: 500; - color: var(--color-text-primary); - margin-bottom: var(--spacing-xs); -} - -.config-value { - padding: var(--spacing-sm); - background-color: var(--color-bg-secondary); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-md); - font-family: monospace; - font-size: var(--font-size-caption); - color: var(--color-text-secondary); - word-break: break-all; -} - -.status-indicator { - display: inline-block; - width: 8px; - height: 8px; - border-radius: 50%; - background-color: var(--color-error); - margin-right: var(--spacing-xs); -} - -.status-indicator.connected { - background-color: var(--color-success); -} - -.download-controls { - display: flex; - gap: var(--spacing-sm); - margin-top: var(--spacing-md); - justify-content: center; -} - -/* Utility classes */ -.hidden { - display: none !important; -} - -.text-center { - text-align: center; -} - -.mb-0 { - margin-bottom: 0 !important; -} - -.mb-1 { - margin-bottom: var(--spacing-xs) !important; -} - -.mb-2 { - margin-bottom: var(--spacing-sm) !important; -} - -.mb-3 { - margin-bottom: var(--spacing-md) !important; -} - -.mb-4 { - margin-bottom: var(--spacing-lg) !important; -} - -/* Animations */ -@keyframes slideIn { - from { - transform: translateX(100%); - opacity: 0; - } - - to { - transform: translateX(0); - opacity: 1; - } -} - -@keyframes fadeIn { - from { - opacity: 0; - } - - to { - opacity: 1; - } -} - -@keyframes fadeOut { - from { - opacity: 1; - } - - to { - opacity: 0; - } -} - -/* Responsive design */ -@media (max-width: 1024px) { - .header-title { - min-width: 120px; - } - - .header-title h1 { - font-size: 1.4rem; - } - - .header-actions { - gap: var(--spacing-sm); - } - - .process-status { - gap: 4px; - } -} - -@media (max-width: 768px) { - .header-content { - flex-direction: column; - gap: var(--spacing-md); - min-height: auto; - } - - .header-title { - text-align: center; - min-width: auto; - justify-content: center; - } - - .header-actions { - justify-content: center; - flex-wrap: wrap; - width: 100%; - gap: var(--spacing-sm); - } - - .main-content { - padding: var(--spacing-md); - } - - .series-header { - flex-direction: column; - gap: var(--spacing-md); - align-items: stretch; - } - - .series-actions { - justify-content: center; - } - - .series-grid { - grid-template-columns: 1fr; - } - - .status-panel { - bottom: var(--spacing-md); - right: var(--spacing-md); - left: var(--spacing-md); - width: auto; - } - - .toast-container { - top: var(--spacing-md); - right: var(--spacing-md); - left: var(--spacing-md); - } - - .toast { - min-width: auto; - } - - .current-download-item { - flex-direction: column; - align-items: stretch; - gap: var(--spacing-sm); - } - - .download-progress { - justify-content: space-between; - } - - .queue-item { - flex-direction: column; - align-items: stretch; - gap: var(--spacing-xs); - } - - .queue-item-index { - min-width: auto; - } -} - -/* Enhanced Anime Display Styles */ -.series-filters { - display: flex; - gap: var(--spacing-md); - margin-bottom: var(--spacing-lg); -} - -.series-filters .btn[data-active="true"] { - background-color: var(--color-primary); - color: white; - border-color: var(--color-primary); -} - -.series-filters .btn[data-active="true"]:hover { - background-color: var(--color-primary-dark); -} - -/* Series Card Status Indicators */ -.series-card-header { - position: relative; -} - -.series-status { - position: absolute; - top: var(--spacing-sm); - right: var(--spacing-sm); - display: flex; - align-items: center; -} - -.status-missing { - color: var(--color-warning); - font-size: 1.2em; -} - -.status-complete { - color: var(--color-success); - font-size: 1.2em; -} - -/* Series Card States */ -.series-card.has-missing { - border-left: 4px solid var(--color-warning); -} - -.series-card.complete { - border-left: 4px solid var(--color-success); - opacity: 0.8; -} - -.series-card.complete .series-checkbox { - opacity: 0.5; - cursor: not-allowed; -} - -.series-card.complete:not(.selected) { - background-color: var(--color-background-secondary); -} - -/* Missing Episodes Status */ -.missing-episodes.has-missing { - color: var(--color-warning); - font-weight: 500; -} - -.missing-episodes.complete { - color: var(--color-success); - font-weight: 500; -} - -.missing-episodes.has-missing i { - color: var(--color-warning); -} - -.missing-episodes.complete i { - color: var(--color-success); -} - -/* Dark theme adjustments */ -[data-theme="dark"] .series-card.complete:not(.selected) { - background-color: var(--color-background-tertiary); -} - -[data-theme="dark"] .series-filters .btn[data-active="true"] { - background-color: var(--color-primary); - color: white; -} - -/* Filter button active state animation */ -.series-filters .btn { - transition: all 0.2s ease; -} - -.series-filters .btn[data-active="true"] { - transform: scale(1.02); - box-shadow: 0 2px 8px rgba(0, 120, 212, 0.3); -} - -/* Enhanced series header layout */ -.series-header { - display: flex; - flex-direction: column; - gap: var(--spacing-lg); - margin-bottom: var(--spacing-xl); -} - -@media (min-width: 768px) { - .series-header { - flex-direction: row; - align-items: center; - justify-content: space-between; - } - - .series-filters { - margin-bottom: 0; - } -} - -/* Download Queue Management Styles */ -.queue-stats-section { - margin-bottom: var(--spacing-xl); -} - -.stats-grid { - display: grid; - grid-template-columns: repeat(auto-fit, minmax(200px, 1fr)); - gap: var(--spacing-lg); - margin-bottom: var(--spacing-lg); -} - -.stat-card { - background: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - padding: var(--spacing-lg); - display: flex; - align-items: center; - gap: var(--spacing-lg); - transition: all var(--transition-duration) var(--transition-easing); -} - -.stat-card:hover { - background: var(--color-surface-hover); - transform: translateY(-2px); - box-shadow: var(--shadow-elevated); -} - -.stat-icon { - font-size: 2rem; - width: 48px; - height: 48px; - display: flex; - align-items: center; - justify-content: center; - border-radius: 50%; - background: rgba(var(--color-primary-rgb), 0.1); -} - -.stat-value { - font-size: var(--font-size-title); - font-weight: 600; - color: var(--color-text-primary); - line-height: 1; -} - -.stat-label { - font-size: var(--font-size-caption); - color: var(--color-text-secondary); - text-transform: uppercase; - letter-spacing: 0.5px; -} - -.speed-eta-section { - display: flex; - justify-content: space-between; - align-items: center; - background: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - padding: var(--spacing-lg); -} - -.speed-info { - display: flex; - gap: var(--spacing-xl); -} - -.speed-current, -.speed-average, -.eta-info { - display: flex; - flex-direction: column; - gap: var(--spacing-xs); -} - -.speed-info .label, -.eta-info .label { - font-size: var(--font-size-caption); - color: var(--color-text-secondary); - text-transform: uppercase; -} - -.speed-info .value, -.eta-info .value { - font-size: var(--font-size-subtitle); - font-weight: 500; - color: var(--color-text-primary); -} - -/* Section Headers */ -.section-header { - display: flex; - justify-content: space-between; - align-items: center; - margin-bottom: var(--spacing-lg); - padding-bottom: var(--spacing-md); - border-bottom: 1px solid var(--color-border); -} - -.section-header h2 { - display: flex; - align-items: center; - gap: var(--spacing-sm); - margin: 0; - font-size: var(--font-size-title); - color: var(--color-text-primary); -} - -.section-actions { - display: flex; - gap: var(--spacing-sm); -} - -/* Download Cards */ -.download-card { - background: var(--color-surface); - border: 1px solid var(--color-border); - border-radius: var(--border-radius-lg); - padding: var(--spacing-lg); - margin-bottom: var(--spacing-md); - transition: all var(--transition-duration) var(--transition-easing); -} - -.download-card:hover { - background: var(--color-surface-hover); - transform: translateX(4px); -} - -.download-card.active { - border-left: 4px solid var(--color-primary); -} - -.download-card.completed { - border-left: 4px solid var(--color-success); - opacity: 0.8; -} - -.download-card.failed { - border-left: 4px solid var(--color-error); -} - -.download-card.pending { - border-left: 4px solid var(--color-warning); - position: relative; -} - -.download-card.pending.high-priority { - border-left-color: var(--color-accent); - background: linear-gradient(90deg, rgba(var(--color-accent-rgb), 0.05) 0%, transparent 10%); -} - -/* Drag and Drop Styles */ -.draggable-item { - cursor: move; - user-select: none; -} - -.draggable-item.dragging { - opacity: 0.5; - transform: scale(0.98); - cursor: grabbing; -} - -.draggable-item.drag-over { - border-top: 3px solid var(--color-primary); - margin-top: 8px; -} - -.drag-handle { - position: absolute; - left: 8px; - top: 50%; - transform: translateY(-50%); - color: var(--color-text-tertiary); - cursor: grab; - font-size: 1.2rem; - padding: var(--spacing-xs); - transition: color var(--transition-duration); -} - -.drag-handle:hover { - color: var(--color-primary); -} - -.drag-handle:active { - cursor: grabbing; -} - -.sortable-list { - position: relative; - min-height: 100px; -} - -.pending-queue-list { - position: relative; -} - -.download-header { - display: flex; - justify-content: space-between; - align-items: flex-start; -} - -.download-info h4 { - margin: 0 0 var(--spacing-xs) 0; - font-size: var(--font-size-subtitle); - color: var(--color-text-primary); -} - -.download-info p { - margin: 0 0 var(--spacing-xs) 0; - color: var(--color-text-secondary); - font-size: var(--font-size-body); -} - -.download-info small { - color: var(--color-text-tertiary); - font-size: var(--font-size-caption); -} - -.error-message { - color: var(--color-error); - font-weight: 500; -} - -.download-actions { - display: flex; - gap: var(--spacing-xs); - align-items: center; -} - -.priority-indicator { - color: var(--color-accent); - margin-right: var(--spacing-sm); -} - -/* Queue Position */ -.queue-position { - position: absolute; - top: var(--spacing-sm); - left: 48px; - background: var(--color-warning); - color: white; - width: 28px; - height: 28px; - border-radius: 50%; - display: flex; - align-items: center; - justify-content: center; - font-size: var(--font-size-caption); - font-weight: 600; -} - -.download-card.pending .download-info { - margin-left: 80px; -} - -.download-card.pending .download-header { - padding-left: 0; -} - -.empty-state small { - display: block; - margin-top: var(--spacing-sm); - font-size: var(--font-size-small); - opacity: 0.7; -} - -/* Progress Bars */ -.download-progress { - margin-top: var(--spacing-lg); -} - -.progress-bar { - width: 100%; - height: 8px; - background: var(--color-border); - border-radius: 4px; - overflow: hidden; - margin-bottom: var(--spacing-sm); -} - -.progress-fill { - height: 100%; - background: linear-gradient(90deg, var(--color-primary), var(--color-accent)); - border-radius: 4px; - transition: width 0.3s ease; -} - -.progress-info { - display: flex; - justify-content: space-between; - align-items: center; - font-size: var(--font-size-caption); - color: var(--color-text-secondary); -} - -.download-speed { - color: var(--color-primary); - font-weight: 500; -} - -/* Empty States */ -.empty-state { - text-align: center; - padding: var(--spacing-xxl); - color: var(--color-text-tertiary); -} - -.empty-state i { - font-size: 3rem; - margin-bottom: var(--spacing-md); - opacity: 0.5; -} - -.empty-state p { - margin: 0; - font-size: var(--font-size-subtitle); -} - -/* Text Color Utilities */ -.text-primary { - color: var(--color-primary); -} - -.text-success { - color: var(--color-success); -} - -.text-warning { - color: var(--color-warning); -} - -.text-error { - color: var(--color-error); -} - -/* Dark Theme Adjustments for Queue */ -[data-theme="dark"] .stat-card { - background: var(--color-surface-dark); - border-color: var(--color-border-dark); -} - -[data-theme="dark"] .stat-card:hover { - background: var(--color-surface-hover-dark); -} - -[data-theme="dark"] .speed-eta-section { - background: var(--color-surface-dark); - border-color: var(--color-border-dark); -} - -[data-theme="dark"] .download-card { - background: var(--color-surface-dark); - border-color: var(--color-border-dark); -} - -[data-theme="dark"] .download-card:hover { - background: var(--color-surface-hover-dark); -} - -[data-theme="dark"] .section-header { - border-bottom-color: var(--color-border-dark); -} - -/* Responsive Design for Queue */ -@media (max-width: 768px) { - .stats-grid { - grid-template-columns: repeat(2, 1fr); - gap: var(--spacing-md); - } - - .speed-eta-section { - flex-direction: column; - gap: var(--spacing-lg); - text-align: center; - } - - .speed-info { - justify-content: center; - } - - .section-header { - flex-direction: column; - align-items: stretch; - gap: var(--spacing-md); - } - - .download-header { - flex-direction: column; - gap: var(--spacing-md); - } - - .download-actions { - justify-content: flex-end; - } -} - -/* Process Status Indicators */ -.process-status { - display: flex; - gap: var(--spacing-sm); - align-items: center; -} - -.status-indicator { - display: flex; - align-items: center; - gap: var(--spacing-sm); - padding: var(--spacing-sm) var(--spacing-md); - background: var(--color-background-subtle); - border-radius: var(--border-radius); - border: 1px solid var(--color-border); - font-size: var(--font-size-caption); - color: var(--color-text-secondary); - transition: all var(--animation-duration-normal) var(--animation-easing-standard); - min-width: 0; - flex-shrink: 0; -} - -.status-indicator:hover { - background: var(--color-background-hover); - border-color: var(--color-accent); - color: var(--color-text-primary); -} - -.status-indicator i { - font-size: 24px; - /* 2x bigger: 12px -> 24px */ - transition: all var(--animation-duration-normal) var(--animation-easing-standard); -} - -/* Rescan icon specific styling */ -#rescan-status i { - color: var(--color-text-disabled); - /* Gray when idle */ -} - -#rescan-status.running i { - color: #22c55e; - /* Green when running */ - animation: iconPulse 2s infinite; -} - -/* Status text removed - using tooltips only */ - -.status-dot { - width: 8px; - height: 8px; - border-radius: 50%; - transition: all var(--animation-duration-normal) var(--animation-easing-standard); -} - -.status-dot.idle { - background-color: var(--color-text-disabled); -} - -.status-dot.running { - background-color: var(--color-accent); - animation: pulse 2s infinite; -} - -.status-dot.error { - background-color: #e74c3c; -} - -@keyframes pulse { - - 0%, - 100% { - opacity: 1; - transform: scale(1); - } - - 50% { - opacity: 0.5; - transform: scale(1.2); - } -} - -@keyframes iconPulse { - - 0%, - 100% { - opacity: 1; - transform: scale(1) rotate(0deg); - } - - 50% { - opacity: 0.7; - transform: scale(1.1) rotate(180deg); - } -} - -/* Process status in mobile view */ -@media (max-width: 768px) { - .process-status { - order: -1; - margin-right: 0; - margin-bottom: var(--spacing-sm); - } - - .status-indicator { - font-size: 11px; - padding: 6px 8px; - gap: 4px; - } - - .status-indicator i { - font-size: 20px; - /* Maintain 2x scale for mobile: was 14px -> 20px */ - } -} - -/* Scheduler Configuration */ -.config-section { - border-top: 1px solid var(--color-divider); - margin-top: var(--spacing-lg); - padding-top: var(--spacing-lg); -} - -.config-section h4 { - margin: 0 0 var(--spacing-md) 0; - font-size: var(--font-size-subtitle); - font-weight: 600; - color: var(--color-text-primary); -} - -.checkbox-label { - display: flex; - align-items: center; - gap: var(--spacing-sm); - cursor: pointer; - user-select: none; -} - -.checkbox-label input[type="checkbox"] { - display: none; -} - -.checkbox-custom { - display: inline-block; - width: 18px; - height: 18px; - min-width: 18px; - min-height: 18px; - flex-shrink: 0; - border: 2px solid var(--color-border); - border-radius: 4px; - background: var(--color-background); - position: relative; - transition: all var(--animation-duration-fast) var(--animation-easing-standard); -} - -.checkbox-label input[type="checkbox"]:checked+.checkbox-custom { - background: var(--color-accent); - border-color: var(--color-accent); -} - -.checkbox-label input[type="checkbox"]:checked+.checkbox-custom::after { - content: ''; - position: absolute; - left: 4px; - top: 1px; - width: 6px; - height: 10px; - border: solid white; - border-width: 0 2px 2px 0; - transform: rotate(45deg); -} - -.checkbox-label:hover .checkbox-custom { - border-color: var(--color-accent); -} - -.input-field { - width: 120px; - padding: var(--spacing-xs) var(--spacing-sm); - border: 1px solid var(--color-border); - border-radius: var(--border-radius); - background: var(--color-background); - color: var(--color-text-primary); - font-size: var(--font-size-body); - transition: border-color var(--animation-duration-fast) var(--animation-easing-standard); -} - -.input-field:focus { - outline: none; - border-color: var(--color-accent); -} - -.input-group { - display: flex; - align-items: center; - gap: var(--spacing-xs); -} - -.input-group .input-field { - flex: 1; - width: auto; -} - -.input-group .btn { - flex-shrink: 0; -} - -.config-value input[readonly] { - background-color: var(--color-bg-secondary); - cursor: not-allowed; -} - -[data-theme="dark"] .config-value input[readonly] { - background-color: var(--color-bg-secondary-dark); -} - -.scheduler-info { - background: var(--color-background-subtle); - border-radius: var(--border-radius); - padding: var(--spacing-md); - margin: var(--spacing-sm) 0; -} - -.info-row { - display: flex; - justify-content: space-between; - align-items: center; - margin-bottom: var(--spacing-xs); -} - -.info-row:last-child { - margin-bottom: 0; -} - -.info-value { - font-weight: 500; - color: var(--color-text-secondary); -} - -.status-badge { - padding: 2px 8px; - border-radius: 12px; - font-size: var(--font-size-caption); - font-weight: 600; -} - -.status-badge.running { - background: var(--color-accent); - color: white; -} - -.status-badge.stopped { - background: var(--color-text-disabled); - color: white; -} - -.config-actions { - display: flex; - gap: var(--spacing-sm); - margin-top: var(--spacing-md); - flex-wrap: wrap; -} - -.config-actions .btn { - flex: 1; - min-width: 140px; -} - -#rescan-time-config { - margin-left: var(--spacing-lg); - opacity: 0.6; - transition: opacity var(--animation-duration-normal) var(--animation-easing-standard); -} - -#rescan-time-config.enabled { - opacity: 1; -} - -/* Mobile adjustments for scheduler config */ -@media (max-width: 768px) { - .config-actions { - flex-direction: column; - } - - .config-actions .btn { - flex: none; - width: 100%; - } - - .info-row { - flex-direction: column; - align-items: flex-start; - gap: 4px; - } -} - -/* Logging configuration styles */ -.log-files-container { - max-height: 200px; - overflow-y: auto; - border: 1px solid var(--border-color); - border-radius: 6px; - padding: 8px; - margin-top: 8px; -} - -.log-file-item { - display: flex; - justify-content: space-between; - align-items: center; - padding: 8px; - border-bottom: 1px solid var(--border-color); - font-size: 0.9em; -} - -.log-file-item:last-child { - border-bottom: none; -} - -.log-file-info { - flex: 1; -} - -.log-file-name { - font-weight: 500; - color: var(--text-color); -} - -.log-file-details { - font-size: 0.8em; - color: var(--muted-text); - margin-top: 2px; -} - -.log-file-actions { - display: flex; - gap: 4px; -} - -.log-file-actions .btn { - padding: 4px 8px; - font-size: 0.8em; - min-width: auto; -} - -.log-file-actions .btn-xs { - padding: 2px 6px; - font-size: 0.75em; -} - -/* Configuration management styles */ -.config-description { - font-size: 0.9em; - color: var(--muted-text); - margin: 4px 0 8px 0; - line-height: 1.4; -} - -.validation-results { - margin: 12px 0; - padding: 12px; - border-radius: 6px; - border: 1px solid var(--border-color); - background: var(--card-bg); -} - -.validation-results.hidden { - display: none; -} - -.validation-error { - color: var(--error-color); - margin: 4px 0; - font-size: 0.9em; -} - -.validation-warning { - color: var(--warning-color); - margin: 4px 0; - font-size: 0.9em; -} - -.validation-success { - color: var(--success-color); - margin: 4px 0; - font-size: 0.9em; -} - -.backup-list { - max-height: 200px; - overflow-y: auto; - border: 1px solid var(--border-color); - border-radius: 6px; - margin: 8px 0; -} - -.backup-item { - display: flex; - justify-content: space-between; - align-items: center; - padding: 8px 12px; - border-bottom: 1px solid var(--border-color); - font-size: 0.9em; -} - -.backup-item:last-child { - border-bottom: none; -} - -.backup-info { - flex: 1; -} - -.backup-name { - font-weight: 500; - color: var(--text-color); -} - -.backup-details { - font-size: 0.8em; - color: var(--muted-text); - margin-top: 2px; -} - -.backup-actions { - display: flex; - gap: 4px; -} - -.backup-actions .btn { - padding: 4px 8px; - font-size: 0.8em; -} \ No newline at end of file +/** + * AniWorld - Main Stylesheet + * + * Entry point for all CSS modules. + * Uses @import to load modular CSS files. + */ + +/* Base */ +@import "base/variables.css"; +@import "base/reset.css"; +@import "base/typography.css"; + +/* Components */ +@import "components/buttons.css"; +@import "components/cards.css"; +@import "components/forms.css"; +@import "components/modals.css"; +@import "components/navigation.css"; +@import "components/progress.css"; +@import "components/notifications.css"; +@import "components/tables.css"; +@import "components/status.css"; + +/* Pages */ +@import "pages/login.css"; +@import "pages/index.css"; +@import "pages/queue.css"; + +/* Utilities (load last to allow overrides) */ +@import "utilities/animations.css"; +@import "utilities/responsive.css"; +@import "utilities/helpers.css"; diff --git a/src/server/web/static/css/utilities/animations.css b/src/server/web/static/css/utilities/animations.css new file mode 100644 index 0000000..1976b5d --- /dev/null +++ b/src/server/web/static/css/utilities/animations.css @@ -0,0 +1,160 @@ +/** + * AniWorld - Animation Styles + * + * Keyframes and animation utility classes. + */ + +/* Slide in animation */ +@keyframes slideIn { + from { + transform: translateX(100%); + opacity: 0; + } + + to { + transform: translateX(0); + opacity: 1; + } +} + +/* Fade in animation */ +@keyframes fadeIn { + from { + opacity: 0; + } + + to { + opacity: 1; + } +} + +/* Fade out animation */ +@keyframes fadeOut { + from { + opacity: 1; + } + + to { + opacity: 0; + } +} + +/* Slide up animation */ +@keyframes slideUp { + from { + transform: translateY(20px); + opacity: 0; + } + + to { + transform: translateY(0); + opacity: 1; + } +} + +/* Slide down animation */ +@keyframes slideDown { + from { + transform: translateY(-20px); + opacity: 0; + } + + to { + transform: translateY(0); + opacity: 1; + } +} + +/* Scale in animation */ +@keyframes scaleIn { + from { + transform: scale(0.9); + opacity: 0; + } + + to { + transform: scale(1); + opacity: 1; + } +} + +/* Spin animation for loading */ +@keyframes spin { + from { + transform: rotate(0deg); + } + + to { + transform: rotate(360deg); + } +} + +/* Bounce animation */ +@keyframes bounce { + 0%, 20%, 50%, 80%, 100% { + transform: translateY(0); + } + + 40% { + transform: translateY(-10px); + } + + 60% { + transform: translateY(-5px); + } +} + +/* Pulse animation */ +@keyframes pulsate { + 0% { + transform: scale(1); + opacity: 1; + } + + 50% { + transform: scale(1.05); + opacity: 0.8; + } + + 100% { + transform: scale(1); + opacity: 1; + } +} + +/* Animation utility classes */ +.animate-slide-in { + animation: slideIn var(--transition-duration) var(--transition-easing); +} + +.animate-fade-in { + animation: fadeIn var(--transition-duration) var(--transition-easing); +} + +.animate-fade-out { + animation: fadeOut var(--transition-duration) var(--transition-easing); +} + +.animate-slide-up { + animation: slideUp var(--transition-duration) var(--transition-easing); +} + +.animate-slide-down { + animation: slideDown var(--transition-duration) var(--transition-easing); +} + +.animate-scale-in { + animation: scaleIn var(--transition-duration) var(--transition-easing); +} + +.animate-spin { + animation: spin 1s linear infinite; +} + +.animate-bounce { + animation: bounce 1s ease; +} + +.animate-pulse { + animation: pulsate 2s ease-in-out infinite; +} diff --git a/src/server/web/static/css/utilities/helpers.css b/src/server/web/static/css/utilities/helpers.css new file mode 100644 index 0000000..8f8c21c --- /dev/null +++ b/src/server/web/static/css/utilities/helpers.css @@ -0,0 +1,368 @@ +/** + * AniWorld - Helper Utilities + * + * Utility classes for visibility, spacing, flexbox, and text. + */ + +/* Display utilities */ +.hidden { + display: none !important; +} + +.visible { + visibility: visible !important; +} + +.invisible { + visibility: hidden !important; +} + +.block { + display: block !important; +} + +.inline-block { + display: inline-block !important; +} + +.inline { + display: inline !important; +} + +.flex { + display: flex !important; +} + +.inline-flex { + display: inline-flex !important; +} + +.grid { + display: grid !important; +} + +/* Flexbox utilities */ +.flex-row { + flex-direction: row !important; +} + +.flex-column { + flex-direction: column !important; +} + +.flex-wrap { + flex-wrap: wrap !important; +} + +.flex-nowrap { + flex-wrap: nowrap !important; +} + +.justify-start { + justify-content: flex-start !important; +} + +.justify-end { + justify-content: flex-end !important; +} + +.justify-center { + justify-content: center !important; +} + +.justify-between { + justify-content: space-between !important; +} + +.justify-around { + justify-content: space-around !important; +} + +.align-start { + align-items: flex-start !important; +} + +.align-end { + align-items: flex-end !important; +} + +.align-center { + align-items: center !important; +} + +.align-stretch { + align-items: stretch !important; +} + +.flex-1 { + flex: 1 !important; +} + +.flex-auto { + flex: auto !important; +} + +.flex-none { + flex: none !important; +} + +/* Text alignment */ +.text-left { + text-align: left !important; +} + +.text-center { + text-align: center !important; +} + +.text-right { + text-align: right !important; +} + +/* Text transformation */ +.text-uppercase { + text-transform: uppercase !important; +} + +.text-lowercase { + text-transform: lowercase !important; +} + +.text-capitalize { + text-transform: capitalize !important; +} + +/* Font weight */ +.font-normal { + font-weight: 400 !important; +} + +.font-medium { + font-weight: 500 !important; +} + +.font-semibold { + font-weight: 600 !important; +} + +.font-bold { + font-weight: 700 !important; +} + +/* Margins */ +.m-0 { + margin: 0 !important; +} + +.mt-0 { + margin-top: 0 !important; +} + +.mb-0 { + margin-bottom: 0 !important; +} + +.ml-0 { + margin-left: 0 !important; +} + +.mr-0 { + margin-right: 0 !important; +} + +.mb-1 { + margin-bottom: var(--spacing-xs) !important; +} + +.mb-2 { + margin-bottom: var(--spacing-sm) !important; +} + +.mb-3 { + margin-bottom: var(--spacing-md) !important; +} + +.mb-4 { + margin-bottom: var(--spacing-lg) !important; +} + +.mt-1 { + margin-top: var(--spacing-xs) !important; +} + +.mt-2 { + margin-top: var(--spacing-sm) !important; +} + +.mt-3 { + margin-top: var(--spacing-md) !important; +} + +.mt-4 { + margin-top: var(--spacing-lg) !important; +} + +.mx-auto { + margin-left: auto !important; + margin-right: auto !important; +} + +/* Padding */ +.p-0 { + padding: 0 !important; +} + +.p-1 { + padding: var(--spacing-xs) !important; +} + +.p-2 { + padding: var(--spacing-sm) !important; +} + +.p-3 { + padding: var(--spacing-md) !important; +} + +.p-4 { + padding: var(--spacing-lg) !important; +} + +/* Width utilities */ +.w-full { + width: 100% !important; +} + +.w-auto { + width: auto !important; +} + +/* Height utilities */ +.h-full { + height: 100% !important; +} + +.h-auto { + height: auto !important; +} + +/* Overflow */ +.overflow-hidden { + overflow: hidden !important; +} + +.overflow-auto { + overflow: auto !important; +} + +.overflow-scroll { + overflow: scroll !important; +} + +/* Position */ +.relative { + position: relative !important; +} + +.absolute { + position: absolute !important; +} + +.fixed { + position: fixed !important; +} + +.sticky { + position: sticky !important; +} + +/* Cursor */ +.cursor-pointer { + cursor: pointer !important; +} + +.cursor-not-allowed { + cursor: not-allowed !important; +} + +/* User select */ +.select-none { + user-select: none !important; +} + +.select-text { + user-select: text !important; +} + +.select-all { + user-select: all !important; +} + +/* Border radius */ +.rounded { + border-radius: var(--border-radius-md) !important; +} + +.rounded-lg { + border-radius: var(--border-radius-lg) !important; +} + +.rounded-full { + border-radius: 9999px !important; +} + +.rounded-none { + border-radius: 0 !important; +} + +/* Shadow */ +.shadow { + box-shadow: var(--shadow-card) !important; +} + +.shadow-lg { + box-shadow: var(--shadow-elevated) !important; +} + +.shadow-none { + box-shadow: none !important; +} + +/* Opacity */ +.opacity-0 { + opacity: 0 !important; +} + +.opacity-50 { + opacity: 0.5 !important; +} + +.opacity-100 { + opacity: 1 !important; +} + +/* Transition */ +.transition { + transition: all var(--transition-duration) var(--transition-easing) !important; +} + +.transition-none { + transition: none !important; +} + +/* Z-index */ +.z-0 { + z-index: 0 !important; +} + +.z-10 { + z-index: 10 !important; +} + +.z-50 { + z-index: 50 !important; +} + +.z-100 { + z-index: 100 !important; +} diff --git a/src/server/web/static/css/utilities/responsive.css b/src/server/web/static/css/utilities/responsive.css new file mode 100644 index 0000000..52d2faa --- /dev/null +++ b/src/server/web/static/css/utilities/responsive.css @@ -0,0 +1,117 @@ +/** + * AniWorld - Responsive Styles + * + * Media queries and breakpoint-specific styles. + * Note: Component-specific responsive styles are in their respective files. + * This file contains global responsive utilities and overrides. + */ + +/* Small devices (landscape phones, 576px and up) */ +@media (min-width: 576px) { + .container-sm { + max-width: 540px; + } +} + +/* Medium devices (tablets, 768px and up) */ +@media (min-width: 768px) { + .container-md { + max-width: 720px; + } + + .hide-md-up { + display: none !important; + } +} + +/* Large devices (desktops, 992px and up) */ +@media (min-width: 992px) { + .container-lg { + max-width: 960px; + } + + .hide-lg-up { + display: none !important; + } +} + +/* Extra large devices (large desktops, 1200px and up) */ +@media (min-width: 1200px) { + .container-xl { + max-width: 1140px; + } + + .hide-xl-up { + display: none !important; + } +} + +/* Hide on small screens */ +@media (max-width: 575.98px) { + .hide-sm-down { + display: none !important; + } +} + +/* Hide on medium screens and below */ +@media (max-width: 767.98px) { + .hide-md-down { + display: none !important; + } +} + +/* Hide on large screens and below */ +@media (max-width: 991.98px) { + .hide-lg-down { + display: none !important; + } +} + +/* Print styles */ +@media print { + .no-print { + display: none !important; + } + + .print-only { + display: block !important; + } + + body { + background: white; + color: black; + } + + .header, + .toast-container, + .status-panel { + display: none !important; + } +} + +/* Reduced motion preference */ +@media (prefers-reduced-motion: reduce) { + *, + *::before, + *::after { + animation-duration: 0.01ms !important; + animation-iteration-count: 1 !important; + transition-duration: 0.01ms !important; + scroll-behavior: auto !important; + } +} + +/* High contrast mode */ +@media (prefers-contrast: high) { + :root { + --color-border: #000000; + --color-text-primary: #000000; + --color-text-secondary: #333333; + } + + [data-theme="dark"] { + --color-border: #ffffff; + --color-text-primary: #ffffff; + --color-text-secondary: #cccccc; + } +} diff --git a/src/server/web/static/js/app.js b/src/server/web/static/js/app.js index bcb856a..0397515 100644 --- a/src/server/web/static/js/app.js +++ b/src/server/web/static/js/app.js @@ -26,6 +26,8 @@ class AniWorldApp { this.loadSeries(); this.initTheme(); this.updateConnectionStatus(); + // Check scan status on page load (in case socket connect event is delayed) + this.checkActiveScanStatus(); } async checkAuthentication() { @@ -186,12 +188,16 @@ class AniWorldApp { console.log('Connected to server'); // Subscribe to rooms for targeted updates - this.socket.join('scan_progress'); - this.socket.join('download_progress'); + // Valid rooms: downloads, queue, scan, system, errors + this.socket.join('scan'); this.socket.join('downloads'); + this.socket.join('queue'); this.showToast(this.localization.getText('connected-server'), 'success'); this.updateConnectionStatus(); + + // Check if a scan is currently in progress (e.g., after page reload) + this.checkActiveScanStatus(); }); this.socket.on('disconnect', () => { @@ -201,19 +207,22 @@ class AniWorldApp { this.updateConnectionStatus(); }); - // Scan events - this.socket.on('scan_started', () => { - this.showStatus('Scanning series...', true); + // Scan events - handle new detailed scan progress overlay + this.socket.on('scan_started', (data) => { + console.log('Scan started:', data); + this.showScanProgressOverlay(data); this.updateProcessStatus('rescan', true); }); this.socket.on('scan_progress', (data) => { - this.updateStatus(`Scanning: ${data.folder} (${data.counter})`); + console.log('Scan progress:', data); + this.updateScanProgressOverlay(data); }); // Handle both 'scan_completed' (legacy) and 'scan_complete' (new backend) - const handleScanComplete = () => { - this.hideStatus(); + const handleScanComplete = (data) => { + console.log('Scan completed:', data); + this.hideScanProgressOverlay(data); this.showToast('Scan completed successfully', 'success'); this.updateProcessStatus('rescan', false); this.loadSeries(); @@ -410,6 +419,16 @@ class AniWorldApp { this.rescanSeries(); }); + // Click on rescan status indicator to reopen scan overlay + const rescanStatus = document.getElementById('rescan-status'); + if (rescanStatus) { + rescanStatus.addEventListener('click', (e) => { + e.stopPropagation(); + console.log('Rescan status clicked'); + this.reopenScanOverlay(); + }); + } + // Configuration modal document.getElementById('config-btn').addEventListener('click', () => { this.showConfigModal(); @@ -564,7 +583,8 @@ class AniWorldApp { site: anime.site, folder: anime.folder, episodeDict: episodeDict, - missing_episodes: totalMissing + missing_episodes: totalMissing, + has_missing: anime.has_missing || totalMissing > 0 }; }); } else if (data.status === 'success') { @@ -1008,33 +1028,39 @@ class AniWorldApp { async rescanSeries() { try { - this.showToast('Scanning directory...', 'info'); + // Show the overlay immediately before making the API call + this.showScanProgressOverlay({ + directory: 'Starting scan...', + total_items: 0 + }); + this.updateProcessStatus('rescan', true); const response = await this.makeAuthenticatedRequest('/api/anime/rescan', { method: 'POST' }); - if (!response) return; + if (!response) { + this.removeScanProgressOverlay(); + this.updateProcessStatus('rescan', false); + return; + } const data = await response.json(); // Debug logging console.log('Rescan response:', data); console.log('Success value:', data.success, 'Type:', typeof data.success); - if (data.success === true) { - const seriesCount = data.series_count || 0; - this.showToast( - `Rescan complete! Found ${seriesCount} series with missing episodes.`, - 'success' - ); - - // Reload the series list to show the updated data - await this.loadSeries(); - } else { + // Note: The scan progress will be updated via WebSocket events + // The overlay will be closed when scan_completed is received + if (data.success !== true) { + this.removeScanProgressOverlay(); + this.updateProcessStatus('rescan', false); this.showToast(`Rescan error: ${data.message}`, 'error'); } } catch (error) { console.error('Rescan error:', error); + this.removeScanProgressOverlay(); + this.updateProcessStatus('rescan', false); this.showToast('Failed to start rescan', 'error'); } } @@ -1072,6 +1098,314 @@ class AniWorldApp { document.getElementById('status-panel').classList.add('hidden'); } + /** + * Show the scan progress overlay with spinner and initial state + * @param {Object} data - Scan started event data + */ + showScanProgressOverlay(data) { + // Remove existing overlay if present + this.removeScanProgressOverlay(); + + // Store total items for progress calculation + this.scanTotalItems = data?.total_items || 0; + + // Store last scan data for reopening + this._lastScanData = data; + + // Create overlay element + const overlay = document.createElement('div'); + overlay.id = 'scan-progress-overlay'; + overlay.className = 'scan-progress-overlay'; + + const totalDisplay = this.scanTotalItems > 0 ? this.scanTotalItems : '...'; + + overlay.innerHTML = ` +
+
+

+ + + Scanning Library +

+
+
+
+
+
+ 0 / ${totalDisplay} directories +
+
+
+ 0 + Scanned +
+
+ 0 + Series Found +
+
+
+ Current: + ${this.escapeHtml(data?.directory || 'Initializing...')} +
+ +
+ `; + + document.body.appendChild(overlay); + + // Add click-outside-to-close handler + overlay.addEventListener('click', (e) => { + // Only close if clicking the overlay background, not the container + if (e.target === overlay) { + this.removeScanProgressOverlay(); + } + }); + + // Trigger animation by adding visible class after a brief delay + requestAnimationFrame(() => { + overlay.classList.add('visible'); + }); + } + + /** + * Update the scan progress overlay with current progress + * @param {Object} data - Scan progress event data + */ + updateScanProgressOverlay(data) { + const overlay = document.getElementById('scan-progress-overlay'); + if (!overlay) return; + + // Update total items if provided (in case it wasn't available at start) + if (data.total_items && data.total_items > 0) { + this.scanTotalItems = data.total_items; + const totalCount = document.getElementById('scan-total-count'); + if (totalCount) { + totalCount.textContent = this.scanTotalItems; + } + } + + // Update progress bar + const progressBar = document.getElementById('scan-progress-bar'); + if (progressBar && this.scanTotalItems > 0 && data.directories_scanned !== undefined) { + const percentage = Math.min(100, (data.directories_scanned / this.scanTotalItems) * 100); + progressBar.style.width = `${percentage}%`; + } + + // Update current/total count display + const currentCount = document.getElementById('scan-current-count'); + if (currentCount && data.directories_scanned !== undefined) { + currentCount.textContent = data.directories_scanned; + } + + // Update directories count + const dirCount = document.getElementById('scan-directories-count'); + if (dirCount && data.directories_scanned !== undefined) { + dirCount.textContent = data.directories_scanned; + } + + // Update files/series count + const filesCount = document.getElementById('scan-files-count'); + if (filesCount && data.files_found !== undefined) { + filesCount.textContent = data.files_found; + } + + // Update current directory (truncate if too long) + const currentPath = document.getElementById('scan-current-path'); + if (currentPath && data.current_directory) { + const maxLength = 50; + let displayPath = data.current_directory; + if (displayPath.length > maxLength) { + displayPath = '...' + displayPath.slice(-maxLength + 3); + } + currentPath.textContent = displayPath; + currentPath.title = data.current_directory; // Full path on hover + } + } + + /** + * Hide the scan progress overlay with completion summary + * @param {Object} data - Scan completed event data + */ + hideScanProgressOverlay(data) { + const overlay = document.getElementById('scan-progress-overlay'); + if (!overlay) return; + + const container = overlay.querySelector('.scan-progress-container'); + if (container) { + container.classList.add('completed'); + } + + // Update title + const titleText = overlay.querySelector('.scan-title-text'); + if (titleText) { + titleText.textContent = 'Scan Complete'; + } + + // Complete the progress bar + const progressBar = document.getElementById('scan-progress-bar'); + if (progressBar) { + progressBar.style.width = '100%'; + } + + // Update final stats + if (data) { + const dirCount = document.getElementById('scan-directories-count'); + if (dirCount && data.total_directories !== undefined) { + dirCount.textContent = data.total_directories; + } + + const filesCount = document.getElementById('scan-files-count'); + if (filesCount && data.total_files !== undefined) { + filesCount.textContent = data.total_files; + } + + // Update progress text to show final count + const currentCount = document.getElementById('scan-current-count'); + const totalCount = document.getElementById('scan-total-count'); + if (currentCount && data.total_directories !== undefined) { + currentCount.textContent = data.total_directories; + } + if (totalCount && data.total_directories !== undefined) { + totalCount.textContent = data.total_directories; + } + + // Show elapsed time + const elapsedTimeEl = document.getElementById('scan-elapsed-time'); + const elapsedValueEl = document.getElementById('scan-elapsed-value'); + if (elapsedTimeEl && elapsedValueEl && data.elapsed_seconds !== undefined) { + elapsedValueEl.textContent = `${data.elapsed_seconds.toFixed(1)}s`; + elapsedTimeEl.classList.remove('hidden'); + } + + // Update current directory to show completion message + const currentPath = document.getElementById('scan-current-path'); + if (currentPath) { + currentPath.textContent = 'Scan finished successfully'; + } + } + + // Auto-dismiss after 3 seconds + setTimeout(() => { + this.removeScanProgressOverlay(); + }, 3000); + } + + /** + * Remove the scan progress overlay from the DOM + */ + removeScanProgressOverlay() { + const overlay = document.getElementById('scan-progress-overlay'); + if (overlay) { + overlay.classList.remove('visible'); + // Wait for fade out animation before removing + setTimeout(() => { + if (overlay.parentElement) { + overlay.remove(); + } + }, 300); + } + } + + /** + * Reopen the scan progress overlay if a scan is in progress + * Called when user clicks on the rescan status indicator + */ + async reopenScanOverlay() { + // Check if overlay already exists + const existingOverlay = document.getElementById('scan-progress-overlay'); + if (existingOverlay) { + // Overlay is already open, do nothing + return; + } + + // Check if scan is running via API + try { + const response = await this.makeAuthenticatedRequest('/api/anime/scan/status'); + if (!response || !response.ok) { + console.log('Could not fetch scan status'); + return; + } + + const data = await response.json(); + console.log('Scan status for reopen:', data); + + if (data.is_scanning) { + // A scan is in progress, show the overlay + this.showScanProgressOverlay({ + directory: data.directory, + total_items: data.total_items + }); + + // Update with current progress + this.updateScanProgressOverlay({ + directories_scanned: data.directories_scanned, + files_found: data.directories_scanned, + current_directory: data.current_directory, + total_items: data.total_items + }); + } + } catch (error) { + console.error('Error checking scan status for reopen:', error); + } + } + + /** + * Check if a scan is currently in progress (useful after page reload) + * and show the progress overlay if so + */ + async checkActiveScanStatus() { + try { + const response = await this.makeAuthenticatedRequest('/api/anime/scan/status'); + if (!response || !response.ok) { + console.log('Could not fetch scan status, response:', response?.status); + return; + } + + const data = await response.json(); + console.log('Scan status check result:', data); + + if (data.is_scanning) { + console.log('Scan is active, updating UI indicators'); + + // Update the process status indicator FIRST before showing overlay + // This ensures the header icon shows the running state immediately + this.updateProcessStatus('rescan', true); + + // A scan is in progress, show the overlay + this.showScanProgressOverlay({ + directory: data.directory, + total_items: data.total_items + }); + + // Update with current progress + this.updateScanProgressOverlay({ + directories_scanned: data.directories_scanned, + files_found: data.directories_scanned, + current_directory: data.current_directory, + total_items: data.total_items + }); + + // Double-check the status indicator was updated + const statusElement = document.getElementById('rescan-status'); + if (statusElement) { + console.log('Rescan status element classes:', statusElement.className); + } else { + console.warn('Rescan status element not found in DOM'); + } + } else { + console.log('No active scan detected'); + // Ensure indicator shows idle state + this.updateProcessStatus('rescan', false); + } + } catch (error) { + console.error('Error checking scan status:', error); + } + } + showLoading() { document.getElementById('loading-overlay').classList.remove('hidden'); } @@ -1146,10 +1480,16 @@ class AniWorldApp { updateProcessStatus(processName, isRunning, hasError = false) { const statusElement = document.getElementById(`${processName}-status`); - if (!statusElement) return; + if (!statusElement) { + console.warn(`Process status element not found: ${processName}-status`); + return; + } const statusDot = statusElement.querySelector('.status-dot'); - if (!statusDot) return; + if (!statusDot) { + console.warn(`Status dot not found in ${processName}-status element`); + return; + } // Remove all status classes from both dot and element statusDot.classList.remove('idle', 'running', 'error'); @@ -1171,6 +1511,8 @@ class AniWorldApp { statusElement.classList.add('idle'); statusElement.title = `${displayName} is idle`; } + + console.log(`Process status updated: ${processName} = ${isRunning ? 'running' : (hasError ? 'error' : 'idle')}`); } async showConfigModal() { diff --git a/src/server/web/static/js/index/advanced-config.js b/src/server/web/static/js/index/advanced-config.js new file mode 100644 index 0000000..56d6264 --- /dev/null +++ b/src/server/web/static/js/index/advanced-config.js @@ -0,0 +1,74 @@ +/** + * AniWorld - Advanced Config Module + * + * Handles advanced configuration settings like concurrent downloads, + * timeouts, and debug mode. + * + * Dependencies: constants.js, api-client.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.AdvancedConfig = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Load advanced configuration + */ + async function load() { + try { + const response = await AniWorld.ApiClient.get(API.CONFIG_SECTION + '/advanced'); + if (!response) return; + + const data = await response.json(); + + if (data.success) { + const config = data.config; + document.getElementById('max-concurrent-downloads').value = config.max_concurrent_downloads || 3; + document.getElementById('provider-timeout').value = config.provider_timeout || 30; + document.getElementById('enable-debug-mode').checked = config.enable_debug_mode === true; + } + } catch (error) { + console.error('Error loading advanced config:', error); + } + } + + /** + * Save advanced configuration + */ + async function save() { + try { + const config = { + max_concurrent_downloads: parseInt(document.getElementById('max-concurrent-downloads').value), + provider_timeout: parseInt(document.getElementById('provider-timeout').value), + enable_debug_mode: document.getElementById('enable-debug-mode').checked + }; + + const response = await AniWorld.ApiClient.request(API.CONFIG_SECTION + '/advanced', { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config) + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Advanced configuration saved successfully', 'success'); + } else { + AniWorld.UI.showToast('Failed to save config: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error saving advanced config:', error); + AniWorld.UI.showToast('Failed to save advanced configuration', 'error'); + } + } + + // Public API + return { + load: load, + save: save + }; +})(); diff --git a/src/server/web/static/js/index/app-init.js b/src/server/web/static/js/index/app-init.js new file mode 100644 index 0000000..6a979fa --- /dev/null +++ b/src/server/web/static/js/index/app-init.js @@ -0,0 +1,103 @@ +/** + * AniWorld - Index Page Application Initializer + * + * Main entry point for the index page. Initializes all modules. + * + * Dependencies: All shared and index modules + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.IndexApp = (function() { + 'use strict'; + + let localization = null; + + /** + * Initialize the index page application + */ + async function init() { + console.log('AniWorld Index App initializing...'); + + // Initialize localization if available + if (typeof Localization !== 'undefined') { + localization = new Localization(); + } + + // Check authentication first + const isAuthenticated = await AniWorld.Auth.checkAuth(); + if (!isAuthenticated) { + return; // Auth module handles redirect + } + + // Initialize theme + AniWorld.Theme.init(); + + // Initialize WebSocket connection + AniWorld.WebSocketClient.init(); + + // Initialize socket event handlers for this page + AniWorld.IndexSocketHandler.init(localization); + + // Initialize page modules + AniWorld.SeriesManager.init(); + AniWorld.SelectionManager.init(); + AniWorld.Search.init(); + AniWorld.ScanManager.init(); + AniWorld.ConfigManager.init(); + + // Bind global events + bindGlobalEvents(); + + // Load initial data + await AniWorld.SeriesManager.loadSeries(); + + console.log('AniWorld Index App initialized successfully'); + } + + /** + * Bind global event handlers + */ + function bindGlobalEvents() { + // Theme toggle + const themeToggle = document.getElementById('theme-toggle'); + if (themeToggle) { + themeToggle.addEventListener('click', function() { + AniWorld.Theme.toggle(); + }); + } + + // Logout button + const logoutBtn = document.getElementById('logout-btn'); + if (logoutBtn) { + logoutBtn.addEventListener('click', function() { + AniWorld.Auth.logout(AniWorld.UI.showToast); + }); + } + } + + /** + * Get localization instance + */ + function getLocalization() { + return localization; + } + + // Public API + return { + init: init, + getLocalization: getLocalization + }; +})(); + +// Initialize the application when DOM is loaded +document.addEventListener('DOMContentLoaded', function() { + AniWorld.IndexApp.init(); +}); + +// Expose app globally for inline event handlers (backwards compatibility) +window.app = { + addSeries: function(link, name) { + return AniWorld.Search.addSeries(link, name); + } +}; diff --git a/src/server/web/static/js/index/config-manager.js b/src/server/web/static/js/index/config-manager.js new file mode 100644 index 0000000..d7b89c1 --- /dev/null +++ b/src/server/web/static/js/index/config-manager.js @@ -0,0 +1,229 @@ +/** + * AniWorld - Config Manager Module + * + * Orchestrates configuration modal and delegates to specialized config modules. + * + * Dependencies: constants.js, api-client.js, ui-utils.js, + * scheduler-config.js, logging-config.js, advanced-config.js, main-config.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.ConfigManager = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Initialize the config manager + */ + function init() { + bindEvents(); + } + + /** + * Bind UI events + */ + function bindEvents() { + // Config modal + const configBtn = document.getElementById('config-btn'); + if (configBtn) { + configBtn.addEventListener('click', showConfigModal); + } + + const closeConfig = document.getElementById('close-config'); + if (closeConfig) { + closeConfig.addEventListener('click', hideConfigModal); + } + + const configModal = document.querySelector('#config-modal .modal-overlay'); + if (configModal) { + configModal.addEventListener('click', hideConfigModal); + } + + // Scheduler configuration + bindSchedulerEvents(); + + // Logging configuration + bindLoggingEvents(); + + // Advanced configuration + bindAdvancedEvents(); + + // Main configuration + bindMainEvents(); + + // Status panel + const closeStatus = document.getElementById('close-status'); + if (closeStatus) { + closeStatus.addEventListener('click', hideStatus); + } + } + + /** + * Bind scheduler-related events + */ + function bindSchedulerEvents() { + const schedulerEnabled = document.getElementById('scheduled-rescan-enabled'); + if (schedulerEnabled) { + schedulerEnabled.addEventListener('change', AniWorld.SchedulerConfig.toggleTimeInput); + } + + const saveScheduler = document.getElementById('save-scheduler-config'); + if (saveScheduler) { + saveScheduler.addEventListener('click', AniWorld.SchedulerConfig.save); + } + + const testScheduler = document.getElementById('test-scheduled-rescan'); + if (testScheduler) { + testScheduler.addEventListener('click', AniWorld.SchedulerConfig.testRescan); + } + } + + /** + * Bind logging-related events + */ + function bindLoggingEvents() { + const saveLogging = document.getElementById('save-logging-config'); + if (saveLogging) { + saveLogging.addEventListener('click', AniWorld.LoggingConfig.save); + } + + const testLogging = document.getElementById('test-logging'); + if (testLogging) { + testLogging.addEventListener('click', AniWorld.LoggingConfig.testLogging); + } + + const refreshLogs = document.getElementById('refresh-log-files'); + if (refreshLogs) { + refreshLogs.addEventListener('click', AniWorld.LoggingConfig.loadLogFiles); + } + + const cleanupLogs = document.getElementById('cleanup-logs'); + if (cleanupLogs) { + cleanupLogs.addEventListener('click', AniWorld.LoggingConfig.cleanupLogs); + } + } + + /** + * Bind advanced config events + */ + function bindAdvancedEvents() { + const saveAdvanced = document.getElementById('save-advanced-config'); + if (saveAdvanced) { + saveAdvanced.addEventListener('click', AniWorld.AdvancedConfig.save); + } + } + + /** + * Bind main configuration events + */ + function bindMainEvents() { + const createBackup = document.getElementById('create-config-backup'); + if (createBackup) { + createBackup.addEventListener('click', AniWorld.MainConfig.createBackup); + } + + const viewBackups = document.getElementById('view-config-backups'); + if (viewBackups) { + viewBackups.addEventListener('click', AniWorld.MainConfig.viewBackups); + } + + const exportConfig = document.getElementById('export-config'); + if (exportConfig) { + exportConfig.addEventListener('click', AniWorld.MainConfig.exportConfig); + } + + const validateConfig = document.getElementById('validate-config'); + if (validateConfig) { + validateConfig.addEventListener('click', AniWorld.MainConfig.validateConfig); + } + + const resetConfig = document.getElementById('reset-config'); + if (resetConfig) { + resetConfig.addEventListener('click', handleResetConfig); + } + + const saveMain = document.getElementById('save-main-config'); + if (saveMain) { + saveMain.addEventListener('click', AniWorld.MainConfig.save); + } + + const resetMain = document.getElementById('reset-main-config'); + if (resetMain) { + resetMain.addEventListener('click', AniWorld.MainConfig.reset); + } + + const testConnection = document.getElementById('test-connection'); + if (testConnection) { + testConnection.addEventListener('click', AniWorld.MainConfig.testConnection); + } + + const browseDirectory = document.getElementById('browse-directory'); + if (browseDirectory) { + browseDirectory.addEventListener('click', AniWorld.MainConfig.browseDirectory); + } + } + + /** + * Handle reset config with modal refresh + */ + async function handleResetConfig() { + const success = await AniWorld.MainConfig.resetAllConfig(); + if (success) { + setTimeout(function() { + hideConfigModal(); + showConfigModal(); + }, 1000); + } + } + + /** + * Show the configuration modal + */ + async function showConfigModal() { + const modal = document.getElementById('config-modal'); + + try { + // Load current status + const response = await AniWorld.ApiClient.get(API.ANIME_STATUS); + if (!response) return; + const data = await response.json(); + + document.getElementById('anime-directory-input').value = data.directory || ''; + document.getElementById('series-count-input').value = data.series_count || '0'; + + // Load all configuration sections + await AniWorld.SchedulerConfig.load(); + await AniWorld.LoggingConfig.load(); + await AniWorld.AdvancedConfig.load(); + + modal.classList.remove('hidden'); + } catch (error) { + console.error('Error loading configuration:', error); + AniWorld.UI.showToast('Failed to load configuration', 'error'); + } + } + + /** + * Hide the configuration modal + */ + function hideConfigModal() { + document.getElementById('config-modal').classList.add('hidden'); + } + + /** + * Hide status panel + */ + function hideStatus() { + document.getElementById('status-panel').classList.add('hidden'); + } + + // Public API + return { + init: init, + showConfigModal: showConfigModal, + hideConfigModal: hideConfigModal, + hideStatus: hideStatus + }; +})(); diff --git a/src/server/web/static/js/index/logging-config.js b/src/server/web/static/js/index/logging-config.js new file mode 100644 index 0000000..085d1f0 --- /dev/null +++ b/src/server/web/static/js/index/logging-config.js @@ -0,0 +1,278 @@ +/** + * AniWorld - Logging Config Module + * + * Handles logging configuration, log file management, and log viewing. + * + * Dependencies: constants.js, api-client.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.LoggingConfig = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Load logging configuration + */ + async function load() { + try { + const response = await AniWorld.ApiClient.get(API.LOGGING_CONFIG); + if (!response) return; + + const data = await response.json(); + + if (data.success) { + const config = data.config; + + // Set form values + document.getElementById('log-level').value = config.log_level || 'INFO'; + document.getElementById('enable-console-logging').checked = config.enable_console_logging !== false; + document.getElementById('enable-console-progress').checked = config.enable_console_progress === true; + document.getElementById('enable-fail2ban-logging').checked = config.enable_fail2ban_logging !== false; + + // Load log files + await loadLogFiles(); + } + } catch (error) { + console.error('Error loading logging config:', error); + AniWorld.UI.showToast('Failed to load logging configuration', 'error'); + } + } + + /** + * Load log files list + */ + async function loadLogFiles() { + try { + const response = await AniWorld.ApiClient.get(API.LOGGING_FILES); + if (!response) return; + + const data = await response.json(); + + if (data.success) { + const container = document.getElementById('log-files-list'); + container.innerHTML = ''; + + if (data.files.length === 0) { + container.innerHTML = '
No log files found
'; + return; + } + + data.files.forEach(function(file) { + const item = document.createElement('div'); + item.className = 'log-file-item'; + + const info = document.createElement('div'); + info.className = 'log-file-info'; + + const name = document.createElement('div'); + name.className = 'log-file-name'; + name.textContent = file.name; + + const details = document.createElement('div'); + details.className = 'log-file-details'; + details.textContent = 'Size: ' + file.size_mb + ' MB • Modified: ' + new Date(file.modified).toLocaleDateString(); + + info.appendChild(name); + info.appendChild(details); + + const actions = document.createElement('div'); + actions.className = 'log-file-actions'; + + const downloadBtn = document.createElement('button'); + downloadBtn.className = 'btn btn-xs btn-secondary'; + downloadBtn.innerHTML = ''; + downloadBtn.title = 'Download'; + downloadBtn.onclick = function() { downloadLogFile(file.name); }; + + const viewBtn = document.createElement('button'); + viewBtn.className = 'btn btn-xs btn-secondary'; + viewBtn.innerHTML = ''; + viewBtn.title = 'View Last 100 Lines'; + viewBtn.onclick = function() { viewLogFile(file.name); }; + + actions.appendChild(downloadBtn); + actions.appendChild(viewBtn); + + item.appendChild(info); + item.appendChild(actions); + + container.appendChild(item); + }); + } + } catch (error) { + console.error('Error loading log files:', error); + AniWorld.UI.showToast('Failed to load log files', 'error'); + } + } + + /** + * Save logging configuration + */ + async function save() { + try { + const config = { + log_level: document.getElementById('log-level').value, + enable_console_logging: document.getElementById('enable-console-logging').checked, + enable_console_progress: document.getElementById('enable-console-progress').checked, + enable_fail2ban_logging: document.getElementById('enable-fail2ban-logging').checked + }; + + const response = await AniWorld.ApiClient.request(API.LOGGING_CONFIG, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify(config) + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Logging configuration saved successfully', 'success'); + await load(); + } else { + AniWorld.UI.showToast('Failed to save logging config: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error saving logging config:', error); + AniWorld.UI.showToast('Failed to save logging configuration', 'error'); + } + } + + /** + * Test logging functionality + */ + async function testLogging() { + try { + const response = await AniWorld.ApiClient.post(API.LOGGING_TEST, {}); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Test messages logged successfully', 'success'); + setTimeout(loadLogFiles, 1000); + } else { + AniWorld.UI.showToast('Failed to test logging: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error testing logging:', error); + AniWorld.UI.showToast('Failed to test logging', 'error'); + } + } + + /** + * Cleanup old log files + */ + async function cleanupLogs() { + const days = prompt('Delete log files older than how many days?', '30'); + if (!days || isNaN(days) || days < 1) { + AniWorld.UI.showToast('Invalid number of days', 'error'); + return; + } + + try { + const response = await AniWorld.ApiClient.request(API.LOGGING_CLEANUP, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ days: parseInt(days) }) + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast(data.message, 'success'); + await loadLogFiles(); + } else { + AniWorld.UI.showToast('Failed to cleanup logs: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error cleaning up logs:', error); + AniWorld.UI.showToast('Failed to cleanup logs', 'error'); + } + } + + /** + * Download a log file + */ + function downloadLogFile(filename) { + const link = document.createElement('a'); + link.href = '/api/logging/files/' + encodeURIComponent(filename) + '/download'; + link.download = filename; + document.body.appendChild(link); + link.click(); + document.body.removeChild(link); + } + + /** + * View a log file's last lines + */ + async function viewLogFile(filename) { + try { + const response = await AniWorld.ApiClient.get('/api/logging/files/' + encodeURIComponent(filename) + '/tail?lines=100'); + if (!response) return; + + const data = await response.json(); + + if (data.success) { + // Create modal to show log content + const modal = document.createElement('div'); + modal.className = 'modal'; + modal.style.display = 'block'; + + const modalContent = document.createElement('div'); + modalContent.className = 'modal-content'; + modalContent.style.maxWidth = '80%'; + modalContent.style.maxHeight = '80%'; + + const header = document.createElement('div'); + header.innerHTML = '

Log File: ' + filename + '

Showing last ' + data.showing_lines + ' of ' + data.total_lines + ' lines

'; + + const content = document.createElement('pre'); + content.style.maxHeight = '60vh'; + content.style.overflow = 'auto'; + content.style.backgroundColor = '#f5f5f5'; + content.style.padding = '10px'; + content.style.fontSize = '12px'; + content.textContent = data.lines.join('\n'); + + const closeBtn = document.createElement('button'); + closeBtn.textContent = 'Close'; + closeBtn.className = 'btn btn-secondary'; + closeBtn.onclick = function() { document.body.removeChild(modal); }; + + modalContent.appendChild(header); + modalContent.appendChild(content); + modalContent.appendChild(closeBtn); + modal.appendChild(modalContent); + document.body.appendChild(modal); + + // Close on background click + modal.onclick = function(e) { + if (e.target === modal) { + document.body.removeChild(modal); + } + }; + } else { + AniWorld.UI.showToast('Failed to view log file: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error viewing log file:', error); + AniWorld.UI.showToast('Failed to view log file', 'error'); + } + } + + // Public API + return { + load: load, + loadLogFiles: loadLogFiles, + save: save, + testLogging: testLogging, + cleanupLogs: cleanupLogs, + downloadLogFile: downloadLogFile, + viewLogFile: viewLogFile + }; +})(); diff --git a/src/server/web/static/js/index/main-config.js b/src/server/web/static/js/index/main-config.js new file mode 100644 index 0000000..b8c1a60 --- /dev/null +++ b/src/server/web/static/js/index/main-config.js @@ -0,0 +1,294 @@ +/** + * AniWorld - Main Config Module + * + * Handles main configuration (directory, connection) and config management + * (backup, export, validate, reset). + * + * Dependencies: constants.js, api-client.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.MainConfig = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Save main configuration + */ + async function save() { + try { + const animeDirectory = document.getElementById('anime-directory-input').value.trim(); + + if (!animeDirectory) { + AniWorld.UI.showToast('Please enter an anime directory path', 'error'); + return; + } + + const response = await AniWorld.ApiClient.post(API.CONFIG_DIRECTORY, { + directory: animeDirectory + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Main configuration saved successfully', 'success'); + await refreshStatus(); + } else { + AniWorld.UI.showToast('Failed to save configuration: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error saving main config:', error); + AniWorld.UI.showToast('Failed to save main configuration', 'error'); + } + } + + /** + * Reset main configuration + */ + function reset() { + if (confirm('Are you sure you want to reset the main configuration? This will clear the anime directory.')) { + document.getElementById('anime-directory-input').value = ''; + document.getElementById('series-count-input').value = '0'; + AniWorld.UI.showToast('Main configuration reset', 'info'); + } + } + + /** + * Test network connection + */ + async function testConnection() { + try { + AniWorld.UI.showToast('Testing connection...', 'info'); + + const response = await AniWorld.ApiClient.get(API.DIAGNOSTICS_NETWORK); + if (!response) return; + + const data = await response.json(); + + if (data.status === 'success') { + const networkStatus = data.data; + const connectionDiv = document.getElementById('connection-status-display'); + const statusIndicator = connectionDiv.querySelector('.status-indicator'); + const statusText = connectionDiv.querySelector('.status-text'); + + if (networkStatus.aniworld_reachable) { + statusIndicator.className = 'status-indicator connected'; + statusText.textContent = 'Connected'; + AniWorld.UI.showToast('Connection test successful', 'success'); + } else { + statusIndicator.className = 'status-indicator disconnected'; + statusText.textContent = 'Disconnected'; + AniWorld.UI.showToast('Connection test failed', 'error'); + } + } else { + AniWorld.UI.showToast('Connection test failed', 'error'); + } + } catch (error) { + console.error('Error testing connection:', error); + AniWorld.UI.showToast('Connection test failed', 'error'); + } + } + + /** + * Browse for directory + */ + function browseDirectory() { + const currentPath = document.getElementById('anime-directory-input').value; + const newPath = prompt('Enter the anime directory path:', currentPath); + + if (newPath !== null && newPath.trim() !== '') { + document.getElementById('anime-directory-input').value = newPath.trim(); + } + } + + /** + * Refresh status display + */ + async function refreshStatus() { + try { + const response = await AniWorld.ApiClient.get(API.ANIME_STATUS); + if (!response) return; + const data = await response.json(); + + document.getElementById('anime-directory-input').value = data.directory || ''; + document.getElementById('series-count-input').value = data.series_count || '0'; + } catch (error) { + console.error('Error refreshing status:', error); + } + } + + /** + * Create configuration backup + */ + async function createBackup() { + const backupName = prompt('Enter backup name (optional):'); + + try { + const response = await AniWorld.ApiClient.request(API.CONFIG_BACKUP, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ name: backupName || '' }) + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Backup created: ' + data.filename, 'success'); + } else { + AniWorld.UI.showToast('Failed to create backup: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error creating backup:', error); + AniWorld.UI.showToast('Failed to create backup', 'error'); + } + } + + /** + * View configuration backups + */ + async function viewBackups() { + try { + const response = await AniWorld.ApiClient.get(API.CONFIG_BACKUPS); + if (!response) return; + + const data = await response.json(); + + if (data.success) { + showBackupsModal(data.backups); + } else { + AniWorld.UI.showToast('Failed to load backups: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error loading backups:', error); + AniWorld.UI.showToast('Failed to load backups', 'error'); + } + } + + /** + * Show backups modal + */ + function showBackupsModal(backups) { + // Implementation for showing backups modal + console.log('Backups:', backups); + AniWorld.UI.showToast('Found ' + backups.length + ' backup(s)', 'info'); + } + + /** + * Export configuration + */ + function exportConfig() { + AniWorld.UI.showToast('Export configuration feature coming soon', 'info'); + } + + /** + * Validate configuration + */ + async function validateConfig() { + try { + const response = await AniWorld.ApiClient.request(API.CONFIG_VALIDATE, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({}) + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + showValidationResults(data.validation); + } else { + AniWorld.UI.showToast('Validation failed: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error validating config:', error); + AniWorld.UI.showToast('Failed to validate configuration', 'error'); + } + } + + /** + * Show validation results + */ + function showValidationResults(validation) { + const container = document.getElementById('validation-results'); + container.innerHTML = ''; + container.classList.remove('hidden'); + + if (validation.valid) { + const success = document.createElement('div'); + success.className = 'validation-success'; + success.innerHTML = ' Configuration is valid!'; + container.appendChild(success); + } else { + const header = document.createElement('div'); + header.innerHTML = 'Validation Issues Found:'; + container.appendChild(header); + } + + // Show errors + validation.errors.forEach(function(error) { + const errorDiv = document.createElement('div'); + errorDiv.className = 'validation-error'; + errorDiv.innerHTML = ' Error: ' + error; + container.appendChild(errorDiv); + }); + + // Show warnings + validation.warnings.forEach(function(warning) { + const warningDiv = document.createElement('div'); + warningDiv.className = 'validation-warning'; + warningDiv.innerHTML = ' Warning: ' + warning; + container.appendChild(warningDiv); + }); + } + + /** + * Reset all configuration to defaults + */ + async function resetAllConfig() { + if (!confirm('Are you sure you want to reset all configuration to defaults? This cannot be undone (except by restoring a backup).')) { + return; + } + + try { + const response = await AniWorld.ApiClient.request(API.CONFIG_RESET, { + method: 'POST', + headers: { 'Content-Type': 'application/json' }, + body: JSON.stringify({ preserve_security: true }) + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Configuration reset to defaults', 'success'); + // Notify caller to reload config modal + return true; + } else { + AniWorld.UI.showToast('Failed to reset config: ' + data.error, 'error'); + return false; + } + } catch (error) { + console.error('Error resetting config:', error); + AniWorld.UI.showToast('Failed to reset configuration', 'error'); + return false; + } + } + + // Public API + return { + save: save, + reset: reset, + testConnection: testConnection, + browseDirectory: browseDirectory, + refreshStatus: refreshStatus, + createBackup: createBackup, + viewBackups: viewBackups, + exportConfig: exportConfig, + validateConfig: validateConfig, + resetAllConfig: resetAllConfig + }; +})(); diff --git a/src/server/web/static/js/index/scan-manager.js b/src/server/web/static/js/index/scan-manager.js new file mode 100644 index 0000000..b06fd80 --- /dev/null +++ b/src/server/web/static/js/index/scan-manager.js @@ -0,0 +1,439 @@ +/** + * AniWorld - Scan Manager Module + * + * Handles library scanning and progress overlay. + * + * Dependencies: constants.js, api-client.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.ScanManager = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + const DEFAULTS = AniWorld.Constants.DEFAULTS; + + // State + let scanTotalItems = 0; + let lastScanData = null; + + /** + * Initialize the scan manager + */ + function init() { + bindEvents(); + // Check scan status on page load + checkActiveScanStatus(); + } + + /** + * Bind UI events + */ + function bindEvents() { + const rescanBtn = document.getElementById('rescan-btn'); + if (rescanBtn) { + rescanBtn.addEventListener('click', rescanSeries); + } + + // Click on rescan status indicator to reopen scan overlay + const rescanStatus = document.getElementById('rescan-status'); + if (rescanStatus) { + rescanStatus.addEventListener('click', function(e) { + e.stopPropagation(); + console.log('Rescan status clicked'); + reopenScanOverlay(); + }); + } + } + + /** + * Start a rescan of the series directory + */ + async function rescanSeries() { + try { + // Show the overlay immediately before making the API call + showScanProgressOverlay({ + directory: 'Starting scan...', + total_items: 0 + }); + updateProcessStatus('rescan', true); + + const response = await AniWorld.ApiClient.post(API.ANIME_RESCAN, {}); + + if (!response) { + removeScanProgressOverlay(); + updateProcessStatus('rescan', false); + return; + } + const data = await response.json(); + + // Debug logging + console.log('Rescan response:', data); + + // Note: The scan progress will be updated via WebSocket events + // The overlay will be closed when scan_completed is received + if (data.success !== true) { + removeScanProgressOverlay(); + updateProcessStatus('rescan', false); + AniWorld.UI.showToast('Rescan error: ' + data.message, 'error'); + } + } catch (error) { + console.error('Rescan error:', error); + removeScanProgressOverlay(); + updateProcessStatus('rescan', false); + AniWorld.UI.showToast('Failed to start rescan', 'error'); + } + } + + /** + * Show the scan progress overlay + * @param {Object} data - Scan started event data + */ + function showScanProgressOverlay(data) { + // Remove existing overlay if present + removeScanProgressOverlay(); + + // Store total items for progress calculation + scanTotalItems = data?.total_items || 0; + + // Store last scan data for reopening + lastScanData = data; + + // Create overlay element + const overlay = document.createElement('div'); + overlay.id = 'scan-progress-overlay'; + overlay.className = 'scan-progress-overlay'; + + const totalDisplay = scanTotalItems > 0 ? scanTotalItems : '...'; + + overlay.innerHTML = + '
' + + '
' + + '

' + + '' + + '' + + 'Scanning Library' + + '

' + + '
' + + '
' + + '
' + + '
' + + '
' + + '0 / ' + totalDisplay + ' directories' + + '
' + + '
' + + '
' + + '0' + + 'Scanned' + + '
' + + '
' + + '0' + + 'Series Found' + + '
' + + '
' + + '
' + + 'Current:' + + '' + AniWorld.UI.escapeHtml(data?.directory || 'Initializing...') + '' + + '
' + + '' + + '
'; + + document.body.appendChild(overlay); + + // Add click-outside-to-close handler + overlay.addEventListener('click', function(e) { + // Only close if clicking the overlay background, not the container + if (e.target === overlay) { + removeScanProgressOverlay(); + } + }); + + // Trigger animation by adding visible class after a brief delay + requestAnimationFrame(function() { + overlay.classList.add('visible'); + }); + } + + /** + * Update the scan progress overlay + * @param {Object} data - Scan progress event data + */ + function updateScanProgressOverlay(data) { + const overlay = document.getElementById('scan-progress-overlay'); + if (!overlay) return; + + // Update total items if provided + if (data.total_items && data.total_items > 0) { + scanTotalItems = data.total_items; + const totalCount = document.getElementById('scan-total-count'); + if (totalCount) { + totalCount.textContent = scanTotalItems; + } + } + + // Update progress bar + const progressBar = document.getElementById('scan-progress-bar'); + if (progressBar && scanTotalItems > 0 && data.directories_scanned !== undefined) { + const percentage = Math.min(100, (data.directories_scanned / scanTotalItems) * 100); + progressBar.style.width = percentage + '%'; + } + + // Update current/total count display + const currentCount = document.getElementById('scan-current-count'); + if (currentCount && data.directories_scanned !== undefined) { + currentCount.textContent = data.directories_scanned; + } + + // Update directories count + const dirCount = document.getElementById('scan-directories-count'); + if (dirCount && data.directories_scanned !== undefined) { + dirCount.textContent = data.directories_scanned; + } + + // Update files/series count + const filesCount = document.getElementById('scan-files-count'); + if (filesCount && data.files_found !== undefined) { + filesCount.textContent = data.files_found; + } + + // Update current directory (truncate if too long) + const currentPath = document.getElementById('scan-current-path'); + if (currentPath && data.current_directory) { + const maxLength = 50; + let displayPath = data.current_directory; + if (displayPath.length > maxLength) { + displayPath = '...' + displayPath.slice(-maxLength + 3); + } + currentPath.textContent = displayPath; + currentPath.title = data.current_directory; + } + } + + /** + * Hide the scan progress overlay with completion summary + * @param {Object} data - Scan completed event data + */ + function hideScanProgressOverlay(data) { + const overlay = document.getElementById('scan-progress-overlay'); + if (!overlay) return; + + const container = overlay.querySelector('.scan-progress-container'); + if (container) { + container.classList.add('completed'); + } + + // Update title + const titleText = overlay.querySelector('.scan-title-text'); + if (titleText) { + titleText.textContent = 'Scan Complete'; + } + + // Complete the progress bar + const progressBar = document.getElementById('scan-progress-bar'); + if (progressBar) { + progressBar.style.width = '100%'; + } + + // Update final stats + if (data) { + const dirCount = document.getElementById('scan-directories-count'); + if (dirCount && data.total_directories !== undefined) { + dirCount.textContent = data.total_directories; + } + + const filesCount = document.getElementById('scan-files-count'); + if (filesCount && data.total_files !== undefined) { + filesCount.textContent = data.total_files; + } + + // Update progress text to show final count + const currentCount = document.getElementById('scan-current-count'); + const totalCount = document.getElementById('scan-total-count'); + if (currentCount && data.total_directories !== undefined) { + currentCount.textContent = data.total_directories; + } + if (totalCount && data.total_directories !== undefined) { + totalCount.textContent = data.total_directories; + } + + // Show elapsed time + const elapsedTimeEl = document.getElementById('scan-elapsed-time'); + const elapsedValueEl = document.getElementById('scan-elapsed-value'); + if (elapsedTimeEl && elapsedValueEl && data.elapsed_seconds !== undefined) { + elapsedValueEl.textContent = data.elapsed_seconds.toFixed(1) + 's'; + elapsedTimeEl.classList.remove('hidden'); + } + + // Update current directory to show completion message + const currentPath = document.getElementById('scan-current-path'); + if (currentPath) { + currentPath.textContent = 'Scan finished successfully'; + } + } + + // Auto-dismiss after 3 seconds + setTimeout(function() { + removeScanProgressOverlay(); + }, DEFAULTS.SCAN_AUTO_DISMISS); + } + + /** + * Remove the scan progress overlay from the DOM + */ + function removeScanProgressOverlay() { + const overlay = document.getElementById('scan-progress-overlay'); + if (overlay) { + overlay.classList.remove('visible'); + // Wait for fade out animation before removing + setTimeout(function() { + if (overlay.parentElement) { + overlay.remove(); + } + }, 300); + } + } + + /** + * Reopen the scan progress overlay if a scan is in progress + */ + async function reopenScanOverlay() { + // Check if overlay already exists + const existingOverlay = document.getElementById('scan-progress-overlay'); + if (existingOverlay) { + return; + } + + // Check if scan is running via API + try { + const response = await AniWorld.ApiClient.get(API.ANIME_SCAN_STATUS); + if (!response || !response.ok) { + console.log('Could not fetch scan status'); + return; + } + + const data = await response.json(); + console.log('Scan status for reopen:', data); + + if (data.is_scanning) { + // A scan is in progress, show the overlay + showScanProgressOverlay({ + directory: data.directory, + total_items: data.total_items + }); + + // Update with current progress + updateScanProgressOverlay({ + directories_scanned: data.directories_scanned, + files_found: data.directories_scanned, + current_directory: data.current_directory, + total_items: data.total_items + }); + } + } catch (error) { + console.error('Error checking scan status for reopen:', error); + } + } + + /** + * Check if a scan is currently in progress + */ + async function checkActiveScanStatus() { + try { + const response = await AniWorld.ApiClient.get(API.ANIME_SCAN_STATUS); + if (!response || !response.ok) { + console.log('Could not fetch scan status, response:', response?.status); + return; + } + + const data = await response.json(); + console.log('Scan status check result:', data); + + if (data.is_scanning) { + console.log('Scan is active, updating UI indicators'); + + // Update the process status indicator + updateProcessStatus('rescan', true); + + // Show the overlay + showScanProgressOverlay({ + directory: data.directory, + total_items: data.total_items + }); + + // Update with current progress + updateScanProgressOverlay({ + directories_scanned: data.directories_scanned, + files_found: data.directories_scanned, + current_directory: data.current_directory, + total_items: data.total_items + }); + } else { + console.log('No active scan detected'); + updateProcessStatus('rescan', false); + } + } catch (error) { + console.error('Error checking scan status:', error); + } + } + + /** + * Update process status indicator + * @param {string} processName - Process name (e.g., 'rescan', 'download') + * @param {boolean} isRunning - Whether the process is running + * @param {boolean} hasError - Whether there's an error + */ + function updateProcessStatus(processName, isRunning, hasError) { + hasError = hasError || false; + const statusElement = document.getElementById(processName + '-status'); + if (!statusElement) { + console.warn('Process status element not found: ' + processName + '-status'); + return; + } + + const statusDot = statusElement.querySelector('.status-dot'); + if (!statusDot) { + console.warn('Status dot not found in ' + processName + '-status element'); + return; + } + + // Remove all status classes + statusDot.classList.remove('idle', 'running', 'error'); + statusElement.classList.remove('running', 'error', 'idle'); + + // Capitalize process name for display + const displayName = processName.charAt(0).toUpperCase() + processName.slice(1); + + if (hasError) { + statusDot.classList.add('error'); + statusElement.classList.add('error'); + statusElement.title = displayName + ' error - click for details'; + } else if (isRunning) { + statusDot.classList.add('running'); + statusElement.classList.add('running'); + statusElement.title = displayName + ' is running...'; + } else { + statusDot.classList.add('idle'); + statusElement.classList.add('idle'); + statusElement.title = displayName + ' is idle'; + } + + console.log('Process status updated: ' + processName + ' = ' + (isRunning ? 'running' : (hasError ? 'error' : 'idle'))); + } + + // Public API + return { + init: init, + rescanSeries: rescanSeries, + showScanProgressOverlay: showScanProgressOverlay, + updateScanProgressOverlay: updateScanProgressOverlay, + hideScanProgressOverlay: hideScanProgressOverlay, + removeScanProgressOverlay: removeScanProgressOverlay, + reopenScanOverlay: reopenScanOverlay, + checkActiveScanStatus: checkActiveScanStatus, + updateProcessStatus: updateProcessStatus + }; +})(); diff --git a/src/server/web/static/js/index/scheduler-config.js b/src/server/web/static/js/index/scheduler-config.js new file mode 100644 index 0000000..baf4da0 --- /dev/null +++ b/src/server/web/static/js/index/scheduler-config.js @@ -0,0 +1,124 @@ +/** + * AniWorld - Scheduler Config Module + * + * Handles scheduler configuration and scheduled rescan settings. + * + * Dependencies: constants.js, api-client.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.SchedulerConfig = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Load scheduler configuration + */ + async function load() { + try { + const response = await AniWorld.ApiClient.get(API.SCHEDULER_CONFIG); + if (!response) return; + const data = await response.json(); + + if (data.success) { + const config = data.config; + + // Update UI elements + document.getElementById('scheduled-rescan-enabled').checked = config.enabled; + document.getElementById('scheduled-rescan-time').value = config.time || '03:00'; + document.getElementById('auto-download-after-rescan').checked = config.auto_download_after_rescan; + + // Update status display + document.getElementById('next-rescan-time').textContent = + config.next_run ? new Date(config.next_run).toLocaleString() : 'Not scheduled'; + document.getElementById('last-rescan-time').textContent = + config.last_run ? new Date(config.last_run).toLocaleString() : 'Never'; + + const statusBadge = document.getElementById('scheduler-running-status'); + statusBadge.textContent = config.is_running ? 'Running' : 'Stopped'; + statusBadge.className = 'info-value status-badge ' + (config.is_running ? 'running' : 'stopped'); + + // Enable/disable time input based on checkbox + toggleTimeInput(); + } + } catch (error) { + console.error('Error loading scheduler config:', error); + AniWorld.UI.showToast('Failed to load scheduler configuration', 'error'); + } + } + + /** + * Save scheduler configuration + */ + async function save() { + try { + const enabled = document.getElementById('scheduled-rescan-enabled').checked; + const time = document.getElementById('scheduled-rescan-time').value; + const autoDownload = document.getElementById('auto-download-after-rescan').checked; + + const response = await AniWorld.ApiClient.post(API.SCHEDULER_CONFIG, { + enabled: enabled, + time: time, + auto_download_after_rescan: autoDownload + }); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Scheduler configuration saved successfully', 'success'); + await load(); + } else { + AniWorld.UI.showToast('Failed to save config: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error saving scheduler config:', error); + AniWorld.UI.showToast('Failed to save scheduler configuration', 'error'); + } + } + + /** + * Test scheduled rescan + */ + async function testRescan() { + try { + const response = await AniWorld.ApiClient.post(API.SCHEDULER_TRIGGER, {}); + + if (!response) return; + const data = await response.json(); + + if (data.success) { + AniWorld.UI.showToast('Test rescan triggered successfully', 'success'); + } else { + AniWorld.UI.showToast('Failed to trigger test rescan: ' + data.error, 'error'); + } + } catch (error) { + console.error('Error triggering test rescan:', error); + AniWorld.UI.showToast('Failed to trigger test rescan', 'error'); + } + } + + /** + * Toggle scheduler time input visibility + */ + function toggleTimeInput() { + const enabled = document.getElementById('scheduled-rescan-enabled').checked; + const timeConfig = document.getElementById('rescan-time-config'); + + if (enabled) { + timeConfig.classList.add('enabled'); + } else { + timeConfig.classList.remove('enabled'); + } + } + + // Public API + return { + load: load, + save: save, + testRescan: testRescan, + toggleTimeInput: toggleTimeInput + }; +})(); diff --git a/src/server/web/static/js/index/search.js b/src/server/web/static/js/index/search.js new file mode 100644 index 0000000..ce316da --- /dev/null +++ b/src/server/web/static/js/index/search.js @@ -0,0 +1,156 @@ +/** + * AniWorld - Search Module + * + * Handles anime search functionality and result display. + * + * Dependencies: constants.js, api-client.js, ui-utils.js, series-manager.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.Search = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Initialize the search module + */ + function init() { + bindEvents(); + } + + /** + * Bind UI events + */ + function bindEvents() { + const searchInput = document.getElementById('search-input'); + const searchBtn = document.getElementById('search-btn'); + const clearSearch = document.getElementById('clear-search'); + + if (searchBtn) { + searchBtn.addEventListener('click', performSearch); + } + + if (searchInput) { + searchInput.addEventListener('keypress', function(e) { + if (e.key === 'Enter') { + performSearch(); + } + }); + } + + if (clearSearch) { + clearSearch.addEventListener('click', function() { + if (searchInput) searchInput.value = ''; + hideSearchResults(); + }); + } + } + + /** + * Perform anime search + */ + async function performSearch() { + const searchInput = document.getElementById('search-input'); + const query = searchInput ? searchInput.value.trim() : ''; + + if (!query) { + AniWorld.UI.showToast('Please enter a search term', 'warning'); + return; + } + + try { + AniWorld.UI.showLoading(); + + const response = await AniWorld.ApiClient.post(API.ANIME_SEARCH, { query: query }); + + if (!response) return; + const data = await response.json(); + + // Check if response is a direct array (new format) or wrapped object (legacy) + if (Array.isArray(data)) { + displaySearchResults(data); + } else if (data.status === 'success') { + displaySearchResults(data.results); + } else { + AniWorld.UI.showToast('Search error: ' + (data.message || 'Unknown error'), 'error'); + } + } catch (error) { + console.error('Search error:', error); + AniWorld.UI.showToast('Search failed', 'error'); + } finally { + AniWorld.UI.hideLoading(); + } + } + + /** + * Display search results + * @param {Array} results - Search results array + */ + function displaySearchResults(results) { + const resultsContainer = document.getElementById('search-results'); + const resultsList = document.getElementById('search-results-list'); + + if (results.length === 0) { + resultsContainer.classList.add('hidden'); + AniWorld.UI.showToast('No search results found', 'warning'); + return; + } + + resultsList.innerHTML = results.map(function(result) { + const displayName = AniWorld.UI.getDisplayName(result); + return '
' + + '' + AniWorld.UI.escapeHtml(displayName) + '' + + '' + + '
'; + }).join(''); + + resultsContainer.classList.remove('hidden'); + } + + /** + * Hide search results + */ + function hideSearchResults() { + document.getElementById('search-results').classList.add('hidden'); + } + + /** + * Add a series from search results + * @param {string} link - Series link + * @param {string} name - Series name + */ + async function addSeries(link, name) { + try { + const response = await AniWorld.ApiClient.post(API.ANIME_ADD, { link: link, name: name }); + + if (!response) return; + const data = await response.json(); + + if (data.status === 'success') { + AniWorld.UI.showToast(data.message, 'success'); + AniWorld.SeriesManager.loadSeries(); + hideSearchResults(); + document.getElementById('search-input').value = ''; + } else { + AniWorld.UI.showToast('Error adding series: ' + data.message, 'error'); + } + } catch (error) { + console.error('Error adding series:', error); + AniWorld.UI.showToast('Failed to add series', 'error'); + } + } + + // Public API + return { + init: init, + performSearch: performSearch, + hideSearchResults: hideSearchResults, + addSeries: addSeries + }; +})(); diff --git a/src/server/web/static/js/index/selection-manager.js b/src/server/web/static/js/index/selection-manager.js new file mode 100644 index 0000000..fba2760 --- /dev/null +++ b/src/server/web/static/js/index/selection-manager.js @@ -0,0 +1,296 @@ +/** + * AniWorld - Selection Manager Module + * + * Handles series selection for downloads. + * + * Dependencies: constants.js, api-client.js, ui-utils.js, series-manager.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.SelectionManager = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + // State + let selectedSeries = new Set(); + + /** + * Initialize the selection manager + */ + function init() { + bindEvents(); + } + + /** + * Bind UI events + */ + function bindEvents() { + const selectAllBtn = document.getElementById('select-all'); + if (selectAllBtn) { + selectAllBtn.addEventListener('click', toggleSelectAll); + } + + const downloadBtn = document.getElementById('download-selected'); + if (downloadBtn) { + downloadBtn.addEventListener('click', downloadSelected); + } + } + + /** + * Toggle series selection + * @param {string} key - Series key + * @param {boolean} selected - Whether to select or deselect + */ + function toggleSerieSelection(key, selected) { + // Only allow selection of series with missing episodes + const serie = AniWorld.SeriesManager.findByKey(key); + if (!serie || serie.missing_episodes === 0) { + // Uncheck the checkbox if it was checked for a complete series + const checkbox = document.querySelector('input[data-key="' + key + '"]'); + if (checkbox) checkbox.checked = false; + return; + } + + if (selected) { + selectedSeries.add(key); + } else { + selectedSeries.delete(key); + } + + updateSelectionUI(); + } + + /** + * Check if a series is selected + * @param {string} key - Series key + * @returns {boolean} + */ + function isSelected(key) { + return selectedSeries.has(key); + } + + /** + * Update selection UI (buttons and card styles) + */ + function updateSelectionUI() { + const downloadBtn = document.getElementById('download-selected'); + const selectAllBtn = document.getElementById('select-all'); + + // Get series that can be selected (have missing episodes) + const selectableSeriesData = AniWorld.SeriesManager.getFilteredSeriesData().length > 0 ? + AniWorld.SeriesManager.getFilteredSeriesData() : AniWorld.SeriesManager.getSeriesData(); + const selectableSeries = selectableSeriesData.filter(function(serie) { + return serie.missing_episodes > 0; + }); + const selectableKeys = selectableSeries.map(function(serie) { + return serie.key; + }); + + downloadBtn.disabled = selectedSeries.size === 0; + + const allSelectableSelected = selectableKeys.every(function(key) { + return selectedSeries.has(key); + }); + + if (selectedSeries.size === 0) { + selectAllBtn.innerHTML = 'Select All'; + } else if (allSelectableSelected && selectableKeys.length > 0) { + selectAllBtn.innerHTML = 'Deselect All'; + } else { + selectAllBtn.innerHTML = 'Select All'; + } + + // Update card appearances + document.querySelectorAll('.series-card').forEach(function(card) { + const key = card.dataset.key; + const isSelectedCard = selectedSeries.has(key); + card.classList.toggle('selected', isSelectedCard); + }); + } + + /** + * Toggle select all / deselect all + */ + function toggleSelectAll() { + // Get series that can be selected (have missing episodes) + const selectableSeriesData = AniWorld.SeriesManager.getFilteredSeriesData().length > 0 ? + AniWorld.SeriesManager.getFilteredSeriesData() : AniWorld.SeriesManager.getSeriesData(); + const selectableSeries = selectableSeriesData.filter(function(serie) { + return serie.missing_episodes > 0; + }); + const selectableKeys = selectableSeries.map(function(serie) { + return serie.key; + }); + + const allSelectableSelected = selectableKeys.every(function(key) { + return selectedSeries.has(key); + }); + + if (allSelectableSelected && selectedSeries.size > 0) { + // Deselect all selectable series + selectableKeys.forEach(function(key) { + selectedSeries.delete(key); + }); + document.querySelectorAll('.series-checkbox:not([disabled])').forEach(function(cb) { + cb.checked = false; + }); + } else { + // Select all selectable series + selectableKeys.forEach(function(key) { + selectedSeries.add(key); + }); + document.querySelectorAll('.series-checkbox:not([disabled])').forEach(function(cb) { + cb.checked = true; + }); + } + + updateSelectionUI(); + } + + /** + * Clear all selections + */ + function clearSelection() { + selectedSeries.clear(); + document.querySelectorAll('.series-checkbox').forEach(function(cb) { + cb.checked = false; + }); + updateSelectionUI(); + } + + /** + * Download selected series + */ + async function downloadSelected() { + console.log('=== downloadSelected - Using key as primary identifier ==='); + if (selectedSeries.size === 0) { + AniWorld.UI.showToast('No series selected', 'warning'); + return; + } + + try { + const selectedKeys = Array.from(selectedSeries); + console.log('=== Starting download for selected series ==='); + console.log('Selected keys:', selectedKeys); + + let totalEpisodesAdded = 0; + let failedSeries = []; + + // For each selected series, get its missing episodes and add to queue + for (var i = 0; i < selectedKeys.length; i++) { + const key = selectedKeys[i]; + const serie = AniWorld.SeriesManager.findByKey(key); + if (!serie || !serie.episodeDict) { + console.error('Serie not found or has no episodeDict for key:', key, serie); + failedSeries.push(key); + continue; + } + + // Validate required fields + if (!serie.key) { + console.error('Serie missing key:', serie); + failedSeries.push(key); + continue; + } + + // Convert episodeDict format {season: [episodes]} to episode identifiers + const episodes = []; + Object.entries(serie.episodeDict).forEach(function(entry) { + const season = entry[0]; + const episodeNumbers = entry[1]; + if (Array.isArray(episodeNumbers)) { + episodeNumbers.forEach(function(episode) { + episodes.push({ + season: parseInt(season), + episode: episode + }); + }); + } + }); + + if (episodes.length === 0) { + console.log('No episodes to add for serie:', serie.name); + continue; + } + + // Use folder name as fallback if serie name is empty + const serieName = serie.name && serie.name.trim() ? serie.name : serie.folder; + + // Add episodes to download queue + const requestBody = { + serie_id: serie.key, + serie_folder: serie.folder, + serie_name: serieName, + episodes: episodes, + priority: 'NORMAL' + }; + console.log('Sending queue add request:', requestBody); + + const response = await AniWorld.ApiClient.post(API.QUEUE_ADD, requestBody); + + if (!response) { + failedSeries.push(key); + continue; + } + + const data = await response.json(); + console.log('Queue add response:', response.status, data); + + // Log validation errors in detail + if (data.detail && Array.isArray(data.detail)) { + console.error('Validation errors:', JSON.stringify(data.detail, null, 2)); + } + + if (response.ok && data.status === 'success') { + totalEpisodesAdded += episodes.length; + } else { + console.error('Failed to add to queue:', data); + failedSeries.push(key); + } + } + + // Show result message + console.log('=== Download request complete ==='); + console.log('Total episodes added:', totalEpisodesAdded); + console.log('Failed series (keys):', failedSeries); + + if (totalEpisodesAdded > 0) { + const message = failedSeries.length > 0 + ? 'Added ' + totalEpisodesAdded + ' episode(s) to queue (' + failedSeries.length + ' series failed)' + : 'Added ' + totalEpisodesAdded + ' episode(s) to download queue'; + AniWorld.UI.showToast(message, 'success'); + } else { + const errorDetails = failedSeries.length > 0 + ? 'Failed series (keys): ' + failedSeries.join(', ') + : 'No episodes were added. Check browser console for details.'; + console.error('Failed to add episodes. Details:', errorDetails); + AniWorld.UI.showToast('Failed to add episodes to queue. Check console for details.', 'error'); + } + } catch (error) { + console.error('Download error:', error); + AniWorld.UI.showToast('Failed to start download', 'error'); + } + } + + /** + * Get selected series count + * @returns {number} + */ + function getSelectionCount() { + return selectedSeries.size; + } + + // Public API + return { + init: init, + toggleSerieSelection: toggleSerieSelection, + isSelected: isSelected, + updateSelectionUI: updateSelectionUI, + toggleSelectAll: toggleSelectAll, + clearSelection: clearSelection, + downloadSelected: downloadSelected, + getSelectionCount: getSelectionCount + }; +})(); diff --git a/src/server/web/static/js/index/series-manager.js b/src/server/web/static/js/index/series-manager.js new file mode 100644 index 0000000..bfd6e8a --- /dev/null +++ b/src/server/web/static/js/index/series-manager.js @@ -0,0 +1,302 @@ +/** + * AniWorld - Series Manager Module + * + * Manages series data, filtering, sorting, and rendering. + * + * Dependencies: constants.js, api-client.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.SeriesManager = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + // State + let seriesData = []; + let filteredSeriesData = []; + let showMissingOnly = false; + let sortAlphabetical = false; + + /** + * Initialize the series manager + */ + function init() { + bindEvents(); + } + + /** + * Bind UI events for filtering and sorting + */ + function bindEvents() { + const missingOnlyBtn = document.getElementById('show-missing-only'); + if (missingOnlyBtn) { + missingOnlyBtn.addEventListener('click', toggleMissingOnlyFilter); + } + + const sortBtn = document.getElementById('sort-alphabetical'); + if (sortBtn) { + sortBtn.addEventListener('click', toggleAlphabeticalSort); + } + } + + /** + * Load series from API + * @returns {Promise} Array of series data + */ + async function loadSeries() { + try { + AniWorld.UI.showLoading(); + + const response = await AniWorld.ApiClient.get(API.ANIME_LIST); + + if (!response) { + return []; + } + + const data = await response.json(); + + // Check if response has the expected format + if (Array.isArray(data)) { + // API returns array of AnimeSummary objects + seriesData = data.map(function(anime) { + // Count total missing episodes from the episode dictionary + const episodeDict = anime.missing_episodes || {}; + const totalMissing = Object.values(episodeDict).reduce( + function(sum, episodes) { + return sum + (Array.isArray(episodes) ? episodes.length : 0); + }, + 0 + ); + + return { + key: anime.key, + name: anime.name, + site: anime.site, + folder: anime.folder, + episodeDict: episodeDict, + missing_episodes: totalMissing, + has_missing: anime.has_missing || totalMissing > 0 + }; + }); + } else if (data.status === 'success') { + // Legacy format support + seriesData = data.series; + } else { + AniWorld.UI.showToast('Error loading series: ' + (data.message || 'Unknown error'), 'error'); + return []; + } + + applyFiltersAndSort(); + renderSeries(); + return seriesData; + } catch (error) { + console.error('Error loading series:', error); + AniWorld.UI.showToast('Failed to load series', 'error'); + return []; + } finally { + AniWorld.UI.hideLoading(); + } + } + + /** + * Toggle missing episodes only filter + */ + function toggleMissingOnlyFilter() { + showMissingOnly = !showMissingOnly; + const button = document.getElementById('show-missing-only'); + + button.setAttribute('data-active', showMissingOnly); + button.classList.toggle('active', showMissingOnly); + + const icon = button.querySelector('i'); + const text = button.querySelector('span'); + + if (showMissingOnly) { + icon.className = 'fas fa-filter-circle-xmark'; + text.textContent = 'Show All Series'; + } else { + icon.className = 'fas fa-filter'; + text.textContent = 'Missing Episodes Only'; + } + + applyFiltersAndSort(); + renderSeries(); + + // Clear selection when filter changes + if (AniWorld.SelectionManager) { + AniWorld.SelectionManager.clearSelection(); + } + } + + /** + * Toggle alphabetical sorting + */ + function toggleAlphabeticalSort() { + sortAlphabetical = !sortAlphabetical; + const button = document.getElementById('sort-alphabetical'); + + button.setAttribute('data-active', sortAlphabetical); + button.classList.toggle('active', sortAlphabetical); + + const icon = button.querySelector('i'); + const text = button.querySelector('span'); + + if (sortAlphabetical) { + icon.className = 'fas fa-sort-alpha-up'; + text.textContent = 'Default Sort'; + } else { + icon.className = 'fas fa-sort-alpha-down'; + text.textContent = 'A-Z Sort'; + } + + applyFiltersAndSort(); + renderSeries(); + } + + /** + * Apply current filters and sorting to series data + */ + function applyFiltersAndSort() { + let filtered = seriesData.slice(); + + // Sort based on the current sorting mode + filtered.sort(function(a, b) { + if (sortAlphabetical) { + // Pure alphabetical sorting + return AniWorld.UI.getDisplayName(a).localeCompare(AniWorld.UI.getDisplayName(b)); + } else { + // Default sorting: missing episodes first (descending), then by name + if (a.missing_episodes > 0 && b.missing_episodes === 0) return -1; + if (a.missing_episodes === 0 && b.missing_episodes > 0) return 1; + + // If both have missing episodes, sort by count (descending) + if (a.missing_episodes > 0 && b.missing_episodes > 0) { + if (a.missing_episodes !== b.missing_episodes) { + return b.missing_episodes - a.missing_episodes; + } + } + + // For series with same missing episode status, maintain stable order + return 0; + } + }); + + // Apply missing episodes filter + if (showMissingOnly) { + filtered = filtered.filter(function(serie) { + return serie.missing_episodes > 0; + }); + } + + filteredSeriesData = filtered; + } + + /** + * Render series cards in the grid + */ + function renderSeries() { + const grid = document.getElementById('series-grid'); + const dataToRender = filteredSeriesData.length > 0 ? filteredSeriesData : + (seriesData.length > 0 ? seriesData : []); + + if (dataToRender.length === 0) { + const message = showMissingOnly ? + 'No series with missing episodes found.' : + 'No series found. Try searching for anime or rescanning your directory.'; + + grid.innerHTML = + '
' + + '' + + '

' + message + '

' + + '
'; + return; + } + + grid.innerHTML = dataToRender.map(function(serie) { + return createSerieCard(serie); + }).join(''); + + // Bind checkbox events + grid.querySelectorAll('.series-checkbox').forEach(function(checkbox) { + checkbox.addEventListener('change', function(e) { + if (AniWorld.SelectionManager) { + AniWorld.SelectionManager.toggleSerieSelection(e.target.dataset.key, e.target.checked); + } + }); + }); + } + + /** + * Create HTML for a series card + * @param {Object} serie - Series data object + * @returns {string} HTML string + */ + function createSerieCard(serie) { + const isSelected = AniWorld.SelectionManager ? AniWorld.SelectionManager.isSelected(serie.key) : false; + const hasMissingEpisodes = serie.missing_episodes > 0; + const canBeSelected = hasMissingEpisodes; + + return '
' + + '
' + + '' + + '
' + + '

' + AniWorld.UI.escapeHtml(AniWorld.UI.getDisplayName(serie)) + '

' + + '
' + AniWorld.UI.escapeHtml(serie.folder) + '
' + + '
' + + '
' + + (hasMissingEpisodes ? '' : '') + + '
' + + '
' + + '
' + + '
' + + '' + + '' + (hasMissingEpisodes ? serie.missing_episodes + ' missing episodes' : 'Complete') + '' + + '
' + + '' + serie.site + '' + + '
' + + '
'; + } + + /** + * Get all series data + * @returns {Array} Series data array + */ + function getSeriesData() { + return seriesData; + } + + /** + * Get filtered series data + * @returns {Array} Filtered series data array + */ + function getFilteredSeriesData() { + return filteredSeriesData; + } + + /** + * Find a series by key + * @param {string} key - Series key + * @returns {Object|undefined} Series object or undefined + */ + function findByKey(key) { + return seriesData.find(function(s) { + return s.key === key; + }); + } + + // Public API + return { + init: init, + loadSeries: loadSeries, + renderSeries: renderSeries, + applyFiltersAndSort: applyFiltersAndSort, + getSeriesData: getSeriesData, + getFilteredSeriesData: getFilteredSeriesData, + findByKey: findByKey + }; +})(); diff --git a/src/server/web/static/js/index/socket-handler.js b/src/server/web/static/js/index/socket-handler.js new file mode 100644 index 0000000..d32d902 --- /dev/null +++ b/src/server/web/static/js/index/socket-handler.js @@ -0,0 +1,421 @@ +/** + * AniWorld - Socket Handler Module for Index Page + * + * Handles WebSocket events specific to the index page. + * + * Dependencies: constants.js, websocket-client.js, ui-utils.js, scan-manager.js, series-manager.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.IndexSocketHandler = (function() { + 'use strict'; + + const WS_EVENTS = AniWorld.Constants.WS_EVENTS; + + // State + let isDownloading = false; + let isPaused = false; + let localization = null; + + /** + * Initialize socket handler + * @param {Object} localizationObj - Localization object + */ + function init(localizationObj) { + localization = localizationObj; + setupSocketHandlers(); + } + + /** + * Get localized text + */ + function getText(key) { + if (localization && localization.getText) { + return localization.getText(key); + } + // Fallback text + const fallbacks = { + 'connected-server': 'Connected to server', + 'disconnected-server': 'Disconnected from server', + 'download-completed': 'Download completed', + 'download-failed': 'Download failed', + 'paused': 'Paused', + 'downloading': 'Downloading...', + 'connected': 'Connected', + 'disconnected': 'Disconnected' + }; + return fallbacks[key] || key; + } + + /** + * Set up WebSocket event handlers + */ + function setupSocketHandlers() { + const socket = AniWorld.WebSocketClient.getSocket(); + if (!socket) { + console.warn('Socket not available for handler setup'); + return; + } + + // Connection events + socket.on('connect', function() { + AniWorld.UI.showToast(getText('connected-server'), 'success'); + updateConnectionStatus(true); + AniWorld.ScanManager.checkActiveScanStatus(); + }); + + socket.on('disconnect', function() { + AniWorld.UI.showToast(getText('disconnected-server'), 'warning'); + updateConnectionStatus(false); + }); + + // Scan events + socket.on(WS_EVENTS.SCAN_STARTED, function(data) { + console.log('Scan started:', data); + AniWorld.ScanManager.showScanProgressOverlay(data); + AniWorld.ScanManager.updateProcessStatus('rescan', true); + }); + + socket.on(WS_EVENTS.SCAN_PROGRESS, function(data) { + console.log('Scan progress:', data); + AniWorld.ScanManager.updateScanProgressOverlay(data); + }); + + // Handle both legacy and new scan complete events + const handleScanComplete = function(data) { + console.log('Scan completed:', data); + AniWorld.ScanManager.hideScanProgressOverlay(data); + AniWorld.UI.showToast('Scan completed successfully', 'success'); + AniWorld.ScanManager.updateProcessStatus('rescan', false); + AniWorld.SeriesManager.loadSeries(); + }; + socket.on(WS_EVENTS.SCAN_COMPLETED, handleScanComplete); + socket.on(WS_EVENTS.SCAN_COMPLETE, handleScanComplete); + + // Handle scan errors + const handleScanError = function(data) { + AniWorld.ConfigManager.hideStatus(); + AniWorld.UI.showToast('Scan error: ' + (data.message || data.error), 'error'); + AniWorld.ScanManager.updateProcessStatus('rescan', false, true); + }; + socket.on(WS_EVENTS.SCAN_ERROR, handleScanError); + socket.on(WS_EVENTS.SCAN_FAILED, handleScanError); + + // Scheduled scan events + socket.on(WS_EVENTS.SCHEDULED_RESCAN_STARTED, function() { + AniWorld.UI.showToast('Scheduled rescan started', 'info'); + AniWorld.ScanManager.updateProcessStatus('rescan', true); + }); + + socket.on(WS_EVENTS.SCHEDULED_RESCAN_COMPLETED, function(data) { + AniWorld.UI.showToast('Scheduled rescan completed successfully', 'success'); + AniWorld.ScanManager.updateProcessStatus('rescan', false); + AniWorld.SeriesManager.loadSeries(); + }); + + socket.on(WS_EVENTS.SCHEDULED_RESCAN_ERROR, function(data) { + AniWorld.UI.showToast('Scheduled rescan error: ' + data.error, 'error'); + AniWorld.ScanManager.updateProcessStatus('rescan', false, true); + }); + + socket.on(WS_EVENTS.SCHEDULED_RESCAN_SKIPPED, function(data) { + AniWorld.UI.showToast('Scheduled rescan skipped: ' + data.reason, 'warning'); + }); + + socket.on(WS_EVENTS.AUTO_DOWNLOAD_STARTED, function(data) { + AniWorld.UI.showToast('Auto-download started after scheduled rescan', 'info'); + AniWorld.ScanManager.updateProcessStatus('download', true); + }); + + socket.on(WS_EVENTS.AUTO_DOWNLOAD_ERROR, function(data) { + AniWorld.UI.showToast('Auto-download error: ' + data.error, 'error'); + AniWorld.ScanManager.updateProcessStatus('download', false, true); + }); + + // Download events + socket.on(WS_EVENTS.DOWNLOAD_STARTED, function(data) { + isDownloading = true; + isPaused = false; + AniWorld.ScanManager.updateProcessStatus('download', true); + showDownloadQueue(data); + showStatus('Starting download of ' + data.total_series + ' series...', true, true); + }); + + socket.on(WS_EVENTS.DOWNLOAD_PROGRESS, function(data) { + let status = ''; + let percent = 0; + + if (data.progress !== undefined) { + percent = data.progress; + status = 'Downloading: ' + percent.toFixed(1) + '%'; + + if (data.speed_mbps && data.speed_mbps > 0) { + status += ' (' + data.speed_mbps.toFixed(1) + ' Mbps)'; + } + + if (data.eta_seconds && data.eta_seconds > 0) { + const eta = AniWorld.UI.formatETA(data.eta_seconds); + status += ' - ETA: ' + eta; + } + } else if (data.total_bytes) { + percent = ((data.downloaded_bytes || 0) / data.total_bytes * 100); + status = 'Downloading: ' + percent.toFixed(1) + '%'; + } else if (data.downloaded_mb !== undefined) { + status = 'Downloaded: ' + data.downloaded_mb.toFixed(1) + ' MB'; + } else { + status = 'Downloading: ' + (data.percent || '0%'); + } + + if (percent > 0) { + updateProgress(percent, status); + } else { + updateStatus(status); + } + }); + + socket.on(WS_EVENTS.DOWNLOAD_COMPLETED, function(data) { + isDownloading = false; + isPaused = false; + hideDownloadQueue(); + AniWorld.ConfigManager.hideStatus(); + AniWorld.UI.showToast(getText('download-completed'), 'success'); + AniWorld.SeriesManager.loadSeries(); + AniWorld.SelectionManager.clearSelection(); + }); + + socket.on(WS_EVENTS.DOWNLOAD_ERROR, function(data) { + isDownloading = false; + isPaused = false; + hideDownloadQueue(); + AniWorld.ConfigManager.hideStatus(); + AniWorld.UI.showToast(getText('download-failed') + ': ' + data.message, 'error'); + }); + + // Download queue events + socket.on(WS_EVENTS.DOWNLOAD_QUEUE_COMPLETED, function() { + AniWorld.ScanManager.updateProcessStatus('download', false); + AniWorld.UI.showToast('All downloads completed!', 'success'); + }); + + socket.on(WS_EVENTS.DOWNLOAD_STOP_REQUESTED, function() { + AniWorld.UI.showToast('Stopping downloads...', 'info'); + }); + + socket.on(WS_EVENTS.DOWNLOAD_STOPPED, function() { + AniWorld.ScanManager.updateProcessStatus('download', false); + AniWorld.UI.showToast('Downloads stopped', 'success'); + }); + + socket.on(WS_EVENTS.DOWNLOAD_QUEUE_UPDATE, function(data) { + updateDownloadQueue(data); + }); + + socket.on(WS_EVENTS.DOWNLOAD_EPISODE_UPDATE, function(data) { + updateCurrentEpisode(data); + }); + + socket.on(WS_EVENTS.DOWNLOAD_SERIES_COMPLETED, function(data) { + updateDownloadProgress(data); + }); + + // Download control events + socket.on(WS_EVENTS.DOWNLOAD_PAUSED, function() { + isPaused = true; + updateStatus(getText('paused')); + }); + + socket.on(WS_EVENTS.DOWNLOAD_RESUMED, function() { + isPaused = false; + updateStatus(getText('downloading')); + }); + + socket.on(WS_EVENTS.DOWNLOAD_CANCELLED, function() { + isDownloading = false; + isPaused = false; + hideDownloadQueue(); + AniWorld.ConfigManager.hideStatus(); + AniWorld.UI.showToast('Download cancelled', 'warning'); + }); + } + + /** + * Update connection status display + */ + function updateConnectionStatus(connected) { + const indicator = document.getElementById('connection-status-display'); + if (indicator) { + const statusIndicator = indicator.querySelector('.status-indicator'); + const statusText = indicator.querySelector('.status-text'); + + if (connected) { + statusIndicator.classList.add('connected'); + statusText.textContent = getText('connected'); + } else { + statusIndicator.classList.remove('connected'); + statusText.textContent = getText('disconnected'); + } + } + } + + /** + * Show status panel + */ + function showStatus(message, showProgress, showControls) { + showProgress = showProgress || false; + showControls = showControls || false; + + const panel = document.getElementById('status-panel'); + const messageEl = document.getElementById('status-message'); + const progressContainer = document.getElementById('progress-container'); + const controlsContainer = document.getElementById('download-controls'); + + messageEl.textContent = message; + progressContainer.classList.toggle('hidden', !showProgress); + controlsContainer.classList.toggle('hidden', !showControls); + + if (showProgress) { + updateProgress(0); + } + + panel.classList.remove('hidden'); + } + + /** + * Update status message + */ + function updateStatus(message) { + document.getElementById('status-message').textContent = message; + } + + /** + * Update progress bar + */ + function updateProgress(percent, message) { + const fill = document.getElementById('progress-fill'); + const text = document.getElementById('progress-text'); + + fill.style.width = percent + '%'; + text.textContent = message || percent + '%'; + } + + /** + * Show download queue + */ + function showDownloadQueue(data) { + const queueSection = document.getElementById('download-queue-section'); + const queueProgress = document.getElementById('queue-progress'); + + queueProgress.textContent = '0/' + data.total_series + ' series'; + updateDownloadQueue({ + queue: data.queue || [], + current_downloading: null, + stats: { + completed_series: 0, + total_series: data.total_series + } + }); + + queueSection.classList.remove('hidden'); + } + + /** + * Hide download queue + */ + function hideDownloadQueue() { + const queueSection = document.getElementById('download-queue-section'); + const currentDownload = document.getElementById('current-download'); + + queueSection.classList.add('hidden'); + currentDownload.classList.add('hidden'); + } + + /** + * Update download queue display + */ + function updateDownloadQueue(data) { + const queueList = document.getElementById('queue-list'); + const currentDownload = document.getElementById('current-download'); + const queueProgress = document.getElementById('queue-progress'); + + // Update overall progress + if (data.stats) { + queueProgress.textContent = data.stats.completed_series + '/' + data.stats.total_series + ' series'; + } + + // Update current downloading + if (data.current_downloading) { + currentDownload.classList.remove('hidden'); + document.getElementById('current-serie-name').textContent = AniWorld.UI.getDisplayName(data.current_downloading); + document.getElementById('current-episode').textContent = data.current_downloading.missing_episodes + ' episodes remaining'; + } else { + currentDownload.classList.add('hidden'); + } + + // Update queue list + if (data.queue && data.queue.length > 0) { + queueList.innerHTML = data.queue.map(function(serie, index) { + return '
' + + '
' + (index + 1) + '
' + + '
' + AniWorld.UI.escapeHtml(AniWorld.UI.getDisplayName(serie)) + '
' + + '
Waiting
' + + '
'; + }).join(''); + } else { + queueList.innerHTML = '
No series in queue
'; + } + } + + /** + * Update current episode display + */ + function updateCurrentEpisode(data) { + const currentEpisode = document.getElementById('current-episode'); + const progressFill = document.getElementById('current-progress-fill'); + const progressText = document.getElementById('current-progress-text'); + + if (currentEpisode && data.episode) { + currentEpisode.textContent = data.episode + ' (' + data.episode_progress + ')'; + } + + if (data.overall_progress && progressFill && progressText) { + const parts = data.overall_progress.split('/'); + const current = parseInt(parts[0]); + const total = parseInt(parts[1]); + const percent = total > 0 ? (current / total * 100).toFixed(1) : 0; + + progressFill.style.width = percent + '%'; + progressText.textContent = percent + '%'; + } + } + + /** + * Update download progress display + */ + function updateDownloadProgress(data) { + const queueProgress = document.getElementById('queue-progress'); + + if (queueProgress && data.completed_series && data.total_series) { + queueProgress.textContent = data.completed_series + '/' + data.total_series + ' series'; + } + + AniWorld.UI.showToast('Completed: ' + data.serie, 'success'); + } + + /** + * Get download state + */ + function getDownloadState() { + return { + isDownloading: isDownloading, + isPaused: isPaused + }; + } + + // Public API + return { + init: init, + updateConnectionStatus: updateConnectionStatus, + getDownloadState: getDownloadState + }; +})(); diff --git a/src/server/web/static/js/queue.js b/src/server/web/static/js/queue.js index 08a81df..bd0c386 100644 --- a/src/server/web/static/js/queue.js +++ b/src/server/web/static/js/queue.js @@ -32,8 +32,9 @@ class QueueManager { console.log('Connected to server'); // Subscribe to rooms for targeted updates + // Valid rooms: downloads, queue, scan, system, errors this.socket.join('downloads'); - this.socket.join('download_progress'); + this.socket.join('queue'); this.showToast('Connected to server', 'success'); }); diff --git a/src/server/web/static/js/queue/progress-handler.js b/src/server/web/static/js/queue/progress-handler.js new file mode 100644 index 0000000..ba7aaf2 --- /dev/null +++ b/src/server/web/static/js/queue/progress-handler.js @@ -0,0 +1,189 @@ +/** + * AniWorld - Progress Handler Module + * + * Handles real-time download progress updates. + * + * Dependencies: constants.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.ProgressHandler = (function() { + 'use strict'; + + // Store progress updates waiting for cards + let pendingProgressUpdates = new Map(); + + /** + * Update download progress in real-time + * @param {Object} data - Progress data from WebSocket + */ + function updateDownloadProgress(data) { + console.log('updateDownloadProgress called with:', JSON.stringify(data, null, 2)); + + // Extract download ID - prioritize metadata.item_id (actual item ID) + let downloadId = null; + + // First try metadata.item_id (this is the actual download item ID) + if (data.metadata && data.metadata.item_id) { + downloadId = data.metadata.item_id; + } + + // Fallback to other ID fields + if (!downloadId) { + downloadId = data.item_id || data.download_id; + } + + // If ID starts with "download_", extract the actual ID + if (!downloadId && data.id) { + if (data.id.startsWith('download_')) { + downloadId = data.id.substring(9); + } else { + downloadId = data.id; + } + } + + // Check if data is wrapped in another 'data' property + if (!downloadId && data.data) { + if (data.data.metadata && data.data.metadata.item_id) { + downloadId = data.data.metadata.item_id; + } else if (data.data.item_id) { + downloadId = data.data.item_id; + } else if (data.data.id && data.data.id.startsWith('download_')) { + downloadId = data.data.id.substring(9); + } else { + downloadId = data.data.id || data.data.download_id; + } + data = data.data; + } + + if (!downloadId) { + console.warn('No download ID in progress data'); + console.warn('Data structure:', data); + console.warn('Available keys:', Object.keys(data)); + return; + } + + console.log('Looking for download card with ID: ' + downloadId); + + // Find the download card in active downloads + const card = document.querySelector('[data-download-id="' + downloadId + '"]'); + if (!card) { + console.warn('Download card not found for ID: ' + downloadId); + + // Debug: Log all existing download cards + const allCards = document.querySelectorAll('[data-download-id]'); + console.log('Found ' + allCards.length + ' download cards:'); + allCards.forEach(function(c) { + console.log(' - ' + c.getAttribute('data-download-id')); + }); + + // Store this progress update to retry after queue loads + console.log('Storing progress update for ' + downloadId + ' to retry after reload'); + pendingProgressUpdates.set(downloadId, data); + + return false; + } + + console.log('Found download card for ID: ' + downloadId + ', updating progress'); + + // Extract progress information + const progress = data.progress || data; + const percent = progress.percent || 0; + const metadata = progress.metadata || data.metadata || {}; + + // Check data format + let speed; + + if (progress.downloaded_mb !== undefined && progress.total_mb !== undefined) { + // yt-dlp detailed format + speed = progress.speed_mbps ? progress.speed_mbps.toFixed(1) : '0.0'; + } else if (progress.current !== undefined && progress.total !== undefined) { + // ProgressService basic format + speed = metadata.speed_mbps ? metadata.speed_mbps.toFixed(1) : '0.0'; + } else { + // Fallback + speed = metadata.speed_mbps ? metadata.speed_mbps.toFixed(1) : '0.0'; + } + + // Update progress bar + const progressFill = card.querySelector('.progress-fill'); + if (progressFill) { + progressFill.style.width = percent + '%'; + } + + // Update progress text + const progressInfo = card.querySelector('.progress-info'); + if (progressInfo) { + const percentSpan = progressInfo.querySelector('span:first-child'); + const speedSpan = progressInfo.querySelector('.download-speed'); + + if (percentSpan) { + percentSpan.textContent = percent > 0 ? percent.toFixed(1) + '%' : 'Starting...'; + } + if (speedSpan) { + speedSpan.textContent = speed + ' MB/s'; + } + } + + console.log('Updated progress for ' + downloadId + ': ' + percent.toFixed(1) + '%'); + return true; + } + + /** + * Process pending progress updates + */ + function processPendingProgressUpdates() { + if (pendingProgressUpdates.size === 0) { + return; + } + + console.log('Processing ' + pendingProgressUpdates.size + ' pending progress updates...'); + + // Process each pending update + const processed = []; + pendingProgressUpdates.forEach(function(data, downloadId) { + // Check if card now exists + const card = document.querySelector('[data-download-id="' + downloadId + '"]'); + if (card) { + console.log('Retrying progress update for ' + downloadId); + updateDownloadProgress(data); + processed.push(downloadId); + } else { + console.log('Card still not found for ' + downloadId + ', will retry on next reload'); + } + }); + + // Remove processed updates + processed.forEach(function(id) { + pendingProgressUpdates.delete(id); + }); + + if (processed.length > 0) { + console.log('Successfully processed ' + processed.length + ' pending updates'); + } + } + + /** + * Clear all pending progress updates + */ + function clearPendingUpdates() { + pendingProgressUpdates.clear(); + } + + /** + * Clear pending update for specific download + * @param {string} downloadId - Download ID + */ + function clearPendingUpdate(downloadId) { + pendingProgressUpdates.delete(downloadId); + } + + // Public API + return { + updateDownloadProgress: updateDownloadProgress, + processPendingProgressUpdates: processPendingProgressUpdates, + clearPendingUpdates: clearPendingUpdates, + clearPendingUpdate: clearPendingUpdate + }; +})(); diff --git a/src/server/web/static/js/queue/queue-api.js b/src/server/web/static/js/queue/queue-api.js new file mode 100644 index 0000000..b196a3d --- /dev/null +++ b/src/server/web/static/js/queue/queue-api.js @@ -0,0 +1,159 @@ +/** + * AniWorld - Queue API Module + * + * Handles API requests for the download queue. + * + * Dependencies: constants.js, api-client.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.QueueAPI = (function() { + 'use strict'; + + const API = AniWorld.Constants.API; + + /** + * Load queue data from server + * @returns {Promise} Queue data + */ + async function loadQueueData() { + try { + const response = await AniWorld.ApiClient.get(API.QUEUE_STATUS); + if (!response) { + return null; + } + + const data = await response.json(); + + // API returns nested structure with 'status' and 'statistics' + // Transform it to the expected flat structure + return { + ...data.status, + statistics: data.statistics + }; + } catch (error) { + console.error('Error loading queue data:', error); + return null; + } + } + + /** + * Start queue processing + * @returns {Promise} Response data + */ + async function startQueue() { + try { + const response = await AniWorld.ApiClient.post(API.QUEUE_START, {}); + if (!response) return null; + return await response.json(); + } catch (error) { + console.error('Error starting queue:', error); + throw error; + } + } + + /** + * Stop queue processing + * @returns {Promise} Response data + */ + async function stopQueue() { + try { + const response = await AniWorld.ApiClient.post(API.QUEUE_STOP, {}); + if (!response) return null; + return await response.json(); + } catch (error) { + console.error('Error stopping queue:', error); + throw error; + } + } + + /** + * Remove item from queue + * @param {string} downloadId - Download item ID + * @returns {Promise} Success status + */ + async function removeFromQueue(downloadId) { + try { + const response = await AniWorld.ApiClient.delete(API.QUEUE_REMOVE + '/' + downloadId); + if (!response) return false; + return response.status === 204; + } catch (error) { + console.error('Error removing from queue:', error); + throw error; + } + } + + /** + * Retry failed downloads + * @param {Array} itemIds - Array of download item IDs + * @returns {Promise} Response data + */ + async function retryDownloads(itemIds) { + try { + const response = await AniWorld.ApiClient.post(API.QUEUE_RETRY, { item_ids: itemIds }); + if (!response) return null; + return await response.json(); + } catch (error) { + console.error('Error retrying downloads:', error); + throw error; + } + } + + /** + * Clear completed downloads + * @returns {Promise} Response data + */ + async function clearCompleted() { + try { + const response = await AniWorld.ApiClient.delete(API.QUEUE_COMPLETED); + if (!response) return null; + return await response.json(); + } catch (error) { + console.error('Error clearing completed:', error); + throw error; + } + } + + /** + * Clear failed downloads + * @returns {Promise} Response data + */ + async function clearFailed() { + try { + const response = await AniWorld.ApiClient.delete(API.QUEUE_FAILED); + if (!response) return null; + return await response.json(); + } catch (error) { + console.error('Error clearing failed:', error); + throw error; + } + } + + /** + * Clear pending downloads + * @returns {Promise} Response data + */ + async function clearPending() { + try { + const response = await AniWorld.ApiClient.delete(API.QUEUE_PENDING); + if (!response) return null; + return await response.json(); + } catch (error) { + console.error('Error clearing pending:', error); + throw error; + } + } + + // Public API + return { + loadQueueData: loadQueueData, + startQueue: startQueue, + stopQueue: stopQueue, + removeFromQueue: removeFromQueue, + retryDownloads: retryDownloads, + clearCompleted: clearCompleted, + clearFailed: clearFailed, + clearPending: clearPending + }; +})(); diff --git a/src/server/web/static/js/queue/queue-init.js b/src/server/web/static/js/queue/queue-init.js new file mode 100644 index 0000000..7b23029 --- /dev/null +++ b/src/server/web/static/js/queue/queue-init.js @@ -0,0 +1,313 @@ +/** + * AniWorld - Queue Page Application Initializer + * + * Main entry point for the queue page. Initializes all modules. + * + * Dependencies: All shared and queue modules + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.QueueApp = (function() { + 'use strict'; + + /** + * Initialize the queue page application + */ + async function init() { + console.log('AniWorld Queue App initializing...'); + + // Check authentication first + const isAuthenticated = await AniWorld.Auth.checkAuth(); + if (!isAuthenticated) { + return; // Auth module handles redirect + } + + // Initialize theme + AniWorld.Theme.init(); + + // Initialize WebSocket connection + AniWorld.WebSocketClient.init(); + + // Initialize socket event handlers for this page + AniWorld.QueueSocketHandler.init(AniWorld.QueueApp); + + // Bind UI events + bindEvents(); + + // Load initial data + await loadQueueData(); + + console.log('AniWorld Queue App initialized successfully'); + } + + /** + * Bind UI event handlers + */ + function bindEvents() { + // Theme toggle + const themeToggle = document.getElementById('theme-toggle'); + if (themeToggle) { + themeToggle.addEventListener('click', function() { + AniWorld.Theme.toggle(); + }); + } + + // Queue management actions + const clearCompletedBtn = document.getElementById('clear-completed-btn'); + if (clearCompletedBtn) { + clearCompletedBtn.addEventListener('click', function() { + clearQueue('completed'); + }); + } + + const clearFailedBtn = document.getElementById('clear-failed-btn'); + if (clearFailedBtn) { + clearFailedBtn.addEventListener('click', function() { + clearQueue('failed'); + }); + } + + const clearPendingBtn = document.getElementById('clear-pending-btn'); + if (clearPendingBtn) { + clearPendingBtn.addEventListener('click', function() { + clearQueue('pending'); + }); + } + + const retryAllBtn = document.getElementById('retry-all-btn'); + if (retryAllBtn) { + retryAllBtn.addEventListener('click', retryAllFailed); + } + + // Download controls + const startQueueBtn = document.getElementById('start-queue-btn'); + if (startQueueBtn) { + startQueueBtn.addEventListener('click', startDownload); + } + + const stopQueueBtn = document.getElementById('stop-queue-btn'); + if (stopQueueBtn) { + stopQueueBtn.addEventListener('click', stopDownloads); + } + + // Modal events + const closeConfirm = document.getElementById('close-confirm'); + if (closeConfirm) { + closeConfirm.addEventListener('click', AniWorld.UI.hideConfirmModal); + } + + const confirmCancel = document.getElementById('confirm-cancel'); + if (confirmCancel) { + confirmCancel.addEventListener('click', AniWorld.UI.hideConfirmModal); + } + + const modalOverlay = document.querySelector('#confirm-modal .modal-overlay'); + if (modalOverlay) { + modalOverlay.addEventListener('click', AniWorld.UI.hideConfirmModal); + } + + // Logout functionality + const logoutBtn = document.getElementById('logout-btn'); + if (logoutBtn) { + logoutBtn.addEventListener('click', function() { + AniWorld.Auth.logout(AniWorld.UI.showToast); + }); + } + } + + /** + * Load queue data and update display + */ + async function loadQueueData() { + const data = await AniWorld.QueueAPI.loadQueueData(); + if (data) { + AniWorld.QueueRenderer.updateQueueDisplay(data); + AniWorld.ProgressHandler.processPendingProgressUpdates(); + } + } + + /** + * Clear queue by type + * @param {string} type - 'completed', 'failed', or 'pending' + */ + async function clearQueue(type) { + const titles = { + completed: 'Clear Completed Downloads', + failed: 'Clear Failed Downloads', + pending: 'Remove All Pending Downloads' + }; + + const messages = { + completed: 'Are you sure you want to clear all completed downloads?', + failed: 'Are you sure you want to clear all failed downloads?', + pending: 'Are you sure you want to remove all pending downloads from the queue?' + }; + + const confirmed = await AniWorld.UI.showConfirmModal(titles[type], messages[type]); + if (!confirmed) return; + + try { + let data; + if (type === 'completed') { + data = await AniWorld.QueueAPI.clearCompleted(); + AniWorld.UI.showToast('Cleared ' + (data?.count || 0) + ' completed downloads', 'success'); + } else if (type === 'failed') { + data = await AniWorld.QueueAPI.clearFailed(); + AniWorld.UI.showToast('Cleared ' + (data?.count || 0) + ' failed downloads', 'success'); + } else if (type === 'pending') { + data = await AniWorld.QueueAPI.clearPending(); + AniWorld.UI.showToast('Removed ' + (data?.count || 0) + ' pending downloads', 'success'); + } + await loadQueueData(); + } catch (error) { + console.error('Error clearing queue:', error); + AniWorld.UI.showToast('Failed to clear queue', 'error'); + } + } + + /** + * Retry a failed download + * @param {string} downloadId - Download item ID + */ + async function retryDownload(downloadId) { + try { + const data = await AniWorld.QueueAPI.retryDownloads([downloadId]); + AniWorld.UI.showToast('Retried ' + (data?.retried_count || 1) + ' download(s)', 'success'); + await loadQueueData(); + } catch (error) { + console.error('Error retrying download:', error); + AniWorld.UI.showToast('Failed to retry download', 'error'); + } + } + + /** + * Retry all failed downloads + */ + async function retryAllFailed() { + const confirmed = await AniWorld.UI.showConfirmModal( + 'Retry All Failed Downloads', + 'Are you sure you want to retry all failed downloads?' + ); + if (!confirmed) return; + + try { + // Get all failed download IDs + const failedCards = document.querySelectorAll('#failed-downloads .download-card.failed'); + const itemIds = Array.from(failedCards).map(function(card) { + return card.dataset.id; + }).filter(function(id) { + return id; + }); + + if (itemIds.length === 0) { + AniWorld.UI.showToast('No failed downloads to retry', 'info'); + return; + } + + const data = await AniWorld.QueueAPI.retryDownloads(itemIds); + AniWorld.UI.showToast('Retried ' + (data?.retried_count || itemIds.length) + ' download(s)', 'success'); + await loadQueueData(); + } catch (error) { + console.error('Error retrying failed downloads:', error); + AniWorld.UI.showToast('Failed to retry downloads', 'error'); + } + } + + /** + * Remove item from queue + * @param {string} downloadId - Download item ID + */ + async function removeFromQueue(downloadId) { + try { + const success = await AniWorld.QueueAPI.removeFromQueue(downloadId); + if (success) { + AniWorld.UI.showToast('Download removed from queue', 'success'); + await loadQueueData(); + } else { + AniWorld.UI.showToast('Failed to remove from queue', 'error'); + } + } catch (error) { + console.error('Error removing from queue:', error); + AniWorld.UI.showToast('Failed to remove from queue', 'error'); + } + } + + /** + * Start queue processing + */ + async function startDownload() { + try { + const data = await AniWorld.QueueAPI.startQueue(); + + if (data && data.status === 'success') { + AniWorld.UI.showToast('Queue processing started - all items will download automatically', 'success'); + + // Update UI + document.getElementById('start-queue-btn').style.display = 'none'; + document.getElementById('stop-queue-btn').style.display = 'inline-flex'; + document.getElementById('stop-queue-btn').disabled = false; + + await loadQueueData(); + } else { + AniWorld.UI.showToast('Failed to start queue: ' + (data?.message || 'Unknown error'), 'error'); + } + } catch (error) { + console.error('Error starting queue:', error); + AniWorld.UI.showToast('Failed to start queue processing', 'error'); + } + } + + /** + * Stop queue processing + */ + async function stopDownloads() { + try { + const data = await AniWorld.QueueAPI.stopQueue(); + + if (data && data.status === 'success') { + AniWorld.UI.showToast('Queue processing stopped', 'success'); + + // Update UI + document.getElementById('stop-queue-btn').style.display = 'none'; + document.getElementById('start-queue-btn').style.display = 'inline-flex'; + document.getElementById('start-queue-btn').disabled = false; + + await loadQueueData(); + } else { + AniWorld.UI.showToast('Failed to stop queue: ' + (data?.message || 'Unknown error'), 'error'); + } + } catch (error) { + console.error('Error stopping queue:', error); + AniWorld.UI.showToast('Failed to stop queue', 'error'); + } + } + + // Public API + return { + init: init, + loadQueueData: loadQueueData, + retryDownload: retryDownload, + removeFromQueue: removeFromQueue, + startDownload: startDownload, + stopDownloads: stopDownloads + }; +})(); + +// Initialize the application when DOM is loaded +document.addEventListener('DOMContentLoaded', function() { + AniWorld.QueueApp.init(); +}); + +// Expose for inline event handlers (backwards compatibility) +window.queueManager = { + retryDownload: function(id) { + return AniWorld.QueueApp.retryDownload(id); + }, + removeFromQueue: function(id) { + return AniWorld.QueueApp.removeFromQueue(id); + }, + removeFailedDownload: function(id) { + return AniWorld.QueueApp.removeFromQueue(id); + } +}; diff --git a/src/server/web/static/js/queue/queue-renderer.js b/src/server/web/static/js/queue/queue-renderer.js new file mode 100644 index 0000000..1fb8e22 --- /dev/null +++ b/src/server/web/static/js/queue/queue-renderer.js @@ -0,0 +1,335 @@ +/** + * AniWorld - Queue Renderer Module + * + * Handles rendering of queue items and statistics. + * + * Dependencies: constants.js, ui-utils.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.QueueRenderer = (function() { + 'use strict'; + + /** + * Update full queue display + * @param {Object} data - Queue data + */ + function updateQueueDisplay(data) { + // Update statistics + updateStatistics(data.statistics, data); + + // Update active downloads + renderActiveDownloads(data.active_downloads || []); + + // Update pending queue + renderPendingQueue(data.pending_queue || []); + + // Update completed downloads + renderCompletedDownloads(data.completed_downloads || []); + + // Update failed downloads + renderFailedDownloads(data.failed_downloads || []); + + // Update button states + updateButtonStates(data); + } + + /** + * Update statistics display + * @param {Object} stats - Statistics object + * @param {Object} data - Full queue data + */ + function updateStatistics(stats, data) { + const statistics = stats || {}; + + document.getElementById('total-items').textContent = statistics.total_items || 0; + document.getElementById('pending-items').textContent = (data.pending_queue || []).length; + document.getElementById('completed-items').textContent = statistics.completed_items || 0; + document.getElementById('failed-items').textContent = statistics.failed_items || 0; + + // Update section counts + document.getElementById('queue-count').textContent = (data.pending_queue || []).length; + document.getElementById('completed-count').textContent = statistics.completed_items || 0; + document.getElementById('failed-count').textContent = statistics.failed_items || 0; + + document.getElementById('current-speed').textContent = statistics.current_speed || '0 MB/s'; + document.getElementById('average-speed').textContent = statistics.average_speed || '0 MB/s'; + + // Format ETA + const etaElement = document.getElementById('eta-time'); + if (statistics.eta) { + const eta = new Date(statistics.eta); + const now = new Date(); + const diffMs = eta - now; + + if (diffMs > 0) { + const hours = Math.floor(diffMs / (1000 * 60 * 60)); + const minutes = Math.floor((diffMs % (1000 * 60 * 60)) / (1000 * 60)); + etaElement.textContent = hours + 'h ' + minutes + 'm'; + } else { + etaElement.textContent = 'Calculating...'; + } + } else { + etaElement.textContent = '--:--'; + } + } + + /** + * Render active downloads + * @param {Array} downloads - Active downloads array + */ + function renderActiveDownloads(downloads) { + const container = document.getElementById('active-downloads'); + + if (downloads.length === 0) { + container.innerHTML = + '
' + + '' + + '

No active downloads

' + + '
'; + return; + } + + container.innerHTML = downloads.map(function(download) { + return createActiveDownloadCard(download); + }).join(''); + } + + /** + * Create active download card HTML + * @param {Object} download - Download item + * @returns {string} HTML string + */ + function createActiveDownloadCard(download) { + const progress = download.progress || {}; + const progressPercent = progress.percent || 0; + const speed = progress.speed_mbps ? progress.speed_mbps.toFixed(1) + ' MB/s' : '0 MB/s'; + + const episodeNum = String(download.episode.episode).padStart(2, '0'); + const episodeTitle = download.episode.title || 'Episode ' + download.episode.episode; + + return '
' + + '
' + + '
' + + '

' + AniWorld.UI.escapeHtml(download.serie_name) + '

' + + '

' + AniWorld.UI.escapeHtml(download.episode.season) + 'x' + episodeNum + ' - ' + + AniWorld.UI.escapeHtml(episodeTitle) + '

' + + '
' + + '
' + + '
' + + '
' + + '
' + + '
' + + '
' + + '' + (progressPercent > 0 ? progressPercent.toFixed(1) + '%' : 'Starting...') + '' + + '' + speed + '' + + '
' + + '
' + + '
'; + } + + /** + * Render pending queue + * @param {Array} queue - Pending queue array + */ + function renderPendingQueue(queue) { + const container = document.getElementById('pending-queue'); + + if (queue.length === 0) { + container.innerHTML = + '
' + + '' + + '

No items in queue

' + + 'Add episodes from the main page to start downloading' + + '
'; + return; + } + + container.innerHTML = queue.map(function(item, index) { + return createPendingQueueCard(item, index); + }).join(''); + } + + /** + * Create pending queue card HTML + * @param {Object} download - Download item + * @param {number} index - Queue position + * @returns {string} HTML string + */ + function createPendingQueueCard(download, index) { + const addedAt = new Date(download.added_at).toLocaleString(); + const episodeNum = String(download.episode.episode).padStart(2, '0'); + const episodeTitle = download.episode.title || 'Episode ' + download.episode.episode; + + return '
' + + '
' + (index + 1) + '
' + + '
' + + '
' + + '

' + AniWorld.UI.escapeHtml(download.serie_name) + '

' + + '

' + AniWorld.UI.escapeHtml(download.episode.season) + 'x' + episodeNum + ' - ' + + AniWorld.UI.escapeHtml(episodeTitle) + '

' + + 'Added: ' + addedAt + '' + + '
' + + '
' + + '' + + '
' + + '
' + + '
'; + } + + /** + * Render completed downloads + * @param {Array} downloads - Completed downloads array + */ + function renderCompletedDownloads(downloads) { + const container = document.getElementById('completed-downloads'); + + if (downloads.length === 0) { + container.innerHTML = + '
' + + '' + + '

No completed downloads

' + + '
'; + return; + } + + container.innerHTML = downloads.map(function(download) { + return createCompletedDownloadCard(download); + }).join(''); + } + + /** + * Create completed download card HTML + * @param {Object} download - Download item + * @returns {string} HTML string + */ + function createCompletedDownloadCard(download) { + const completedAt = new Date(download.completed_at).toLocaleString(); + const duration = AniWorld.UI.calculateDuration(download.started_at, download.completed_at); + const episodeNum = String(download.episode.episode).padStart(2, '0'); + const episodeTitle = download.episode.title || 'Episode ' + download.episode.episode; + + return '
' + + '
' + + '
' + + '

' + AniWorld.UI.escapeHtml(download.serie_name) + '

' + + '

' + AniWorld.UI.escapeHtml(download.episode.season) + 'x' + episodeNum + ' - ' + + AniWorld.UI.escapeHtml(episodeTitle) + '

' + + 'Completed: ' + completedAt + ' (' + duration + ')' + + '
' + + '
' + + '' + + '
' + + '
' + + '
'; + } + + /** + * Render failed downloads + * @param {Array} downloads - Failed downloads array + */ + function renderFailedDownloads(downloads) { + const container = document.getElementById('failed-downloads'); + + if (downloads.length === 0) { + container.innerHTML = + '
' + + '' + + '

No failed downloads

' + + '
'; + return; + } + + container.innerHTML = downloads.map(function(download) { + return createFailedDownloadCard(download); + }).join(''); + } + + /** + * Create failed download card HTML + * @param {Object} download - Download item + * @returns {string} HTML string + */ + function createFailedDownloadCard(download) { + const failedAt = new Date(download.completed_at).toLocaleString(); + const retryCount = download.retry_count || 0; + const episodeNum = String(download.episode.episode).padStart(2, '0'); + const episodeTitle = download.episode.title || 'Episode ' + download.episode.episode; + + return '
' + + '
' + + '
' + + '

' + AniWorld.UI.escapeHtml(download.serie_name) + '

' + + '

' + AniWorld.UI.escapeHtml(download.episode.season) + 'x' + episodeNum + ' - ' + + AniWorld.UI.escapeHtml(episodeTitle) + '

' + + 'Failed: ' + failedAt + (retryCount > 0 ? ' (Retry ' + retryCount + ')' : '') + '' + + (download.error ? '' + AniWorld.UI.escapeHtml(download.error) + '' : '') + + '
' + + '
' + + '' + + '' + + '
' + + '
' + + '
'; + } + + /** + * Update button states based on queue data + * @param {Object} data - Queue data + */ + function updateButtonStates(data) { + const hasActive = (data.active_downloads || []).length > 0; + const hasPending = (data.pending_queue || []).length > 0; + const hasFailed = (data.failed_downloads || []).length > 0; + const hasCompleted = (data.completed_downloads || []).length > 0; + + console.log('Button states update:', { + hasPending: hasPending, + pendingCount: (data.pending_queue || []).length, + hasActive: hasActive, + hasFailed: hasFailed, + hasCompleted: hasCompleted + }); + + // Enable start button only if there are pending items and no active downloads + document.getElementById('start-queue-btn').disabled = !hasPending || hasActive; + + // Show/hide start/stop buttons based on whether downloads are active + if (hasActive) { + document.getElementById('start-queue-btn').style.display = 'none'; + document.getElementById('stop-queue-btn').style.display = 'inline-flex'; + document.getElementById('stop-queue-btn').disabled = false; + } else { + document.getElementById('stop-queue-btn').style.display = 'none'; + document.getElementById('start-queue-btn').style.display = 'inline-flex'; + } + + document.getElementById('retry-all-btn').disabled = !hasFailed; + document.getElementById('clear-completed-btn').disabled = !hasCompleted; + document.getElementById('clear-failed-btn').disabled = !hasFailed; + + // Update clear pending button if it exists + const clearPendingBtn = document.getElementById('clear-pending-btn'); + if (clearPendingBtn) { + clearPendingBtn.disabled = !hasPending; + } + } + + // Public API + return { + updateQueueDisplay: updateQueueDisplay, + updateStatistics: updateStatistics, + renderActiveDownloads: renderActiveDownloads, + renderPendingQueue: renderPendingQueue, + renderCompletedDownloads: renderCompletedDownloads, + renderFailedDownloads: renderFailedDownloads, + updateButtonStates: updateButtonStates + }; +})(); diff --git a/src/server/web/static/js/queue/queue-socket-handler.js b/src/server/web/static/js/queue/queue-socket-handler.js new file mode 100644 index 0000000..994c88c --- /dev/null +++ b/src/server/web/static/js/queue/queue-socket-handler.js @@ -0,0 +1,161 @@ +/** + * AniWorld - Queue Socket Handler Module + * + * Handles WebSocket events specific to the queue page. + * + * Dependencies: constants.js, websocket-client.js, ui-utils.js, queue-renderer.js, progress-handler.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.QueueSocketHandler = (function() { + 'use strict'; + + const WS_EVENTS = AniWorld.Constants.WS_EVENTS; + + // Reference to queue app for data reloading + let queueApp = null; + + /** + * Initialize socket handler + * @param {Object} app - Reference to queue app + */ + function init(app) { + queueApp = app; + setupSocketHandlers(); + } + + /** + * Set up WebSocket event handlers + */ + function setupSocketHandlers() { + const socket = AniWorld.WebSocketClient.getSocket(); + if (!socket) { + console.warn('Socket not available for handler setup'); + return; + } + + // Connection events + socket.on('connect', function() { + AniWorld.UI.showToast('Connected to server', 'success'); + }); + + socket.on('disconnect', function() { + AniWorld.UI.showToast('Disconnected from server', 'warning'); + }); + + // Queue update events - handle both old and new message types + socket.on('queue_updated', function(data) { + AniWorld.QueueRenderer.updateQueueDisplay(data); + }); + + socket.on('queue_status', function(data) { + // New backend sends queue_status messages with nested structure + if (data.status && data.statistics) { + const queueData = { + ...data.status, + statistics: data.statistics + }; + AniWorld.QueueRenderer.updateQueueDisplay(queueData); + } else if (data.queue_status) { + AniWorld.QueueRenderer.updateQueueDisplay(data.queue_status); + } else { + AniWorld.QueueRenderer.updateQueueDisplay(data); + } + }); + + // Download started events + socket.on('download_started', function() { + AniWorld.UI.showToast('Download queue started', 'success'); + if (queueApp) queueApp.loadQueueData(); + }); + + socket.on('queue_started', function() { + AniWorld.UI.showToast('Download queue started', 'success'); + if (queueApp) queueApp.loadQueueData(); + }); + + // Download progress + socket.on('download_progress', function(data) { + console.log('Received download progress:', data); + const success = AniWorld.ProgressHandler.updateDownloadProgress(data); + if (!success && queueApp) { + // Card not found, reload queue + queueApp.loadQueueData(); + } + }); + + // Download complete events + const handleDownloadComplete = function(data) { + const serieName = data.serie_name || data.serie || 'Unknown'; + const episode = data.episode || ''; + AniWorld.UI.showToast('Completed: ' + serieName + (episode ? ' - Episode ' + episode : ''), 'success'); + + // Clear pending progress updates + const downloadId = data.item_id || data.download_id || data.id; + if (downloadId) { + AniWorld.ProgressHandler.clearPendingUpdate(downloadId); + } + + if (queueApp) queueApp.loadQueueData(); + }; + socket.on(WS_EVENTS.DOWNLOAD_COMPLETED, handleDownloadComplete); + socket.on(WS_EVENTS.DOWNLOAD_COMPLETE, handleDownloadComplete); + + // Download error events + const handleDownloadError = function(data) { + const message = data.error || data.message || 'Unknown error'; + AniWorld.UI.showToast('Download failed: ' + message, 'error'); + + // Clear pending progress updates + const downloadId = data.item_id || data.download_id || data.id; + if (downloadId) { + AniWorld.ProgressHandler.clearPendingUpdate(downloadId); + } + + if (queueApp) queueApp.loadQueueData(); + }; + socket.on(WS_EVENTS.DOWNLOAD_ERROR, handleDownloadError); + socket.on(WS_EVENTS.DOWNLOAD_FAILED, handleDownloadError); + + // Queue completed events + socket.on(WS_EVENTS.DOWNLOAD_QUEUE_COMPLETED, function() { + AniWorld.UI.showToast('All downloads completed!', 'success'); + if (queueApp) queueApp.loadQueueData(); + }); + + socket.on(WS_EVENTS.QUEUE_COMPLETED, function() { + AniWorld.UI.showToast('All downloads completed!', 'success'); + if (queueApp) queueApp.loadQueueData(); + }); + + // Download stop requested + socket.on(WS_EVENTS.DOWNLOAD_STOP_REQUESTED, function() { + AniWorld.UI.showToast('Stopping downloads...', 'info'); + }); + + // Queue stopped events + const handleQueueStopped = function() { + AniWorld.UI.showToast('Download queue stopped', 'success'); + if (queueApp) queueApp.loadQueueData(); + }; + socket.on(WS_EVENTS.DOWNLOAD_STOPPED, handleQueueStopped); + socket.on(WS_EVENTS.QUEUE_STOPPED, handleQueueStopped); + + // Queue paused/resumed + socket.on(WS_EVENTS.QUEUE_PAUSED, function() { + AniWorld.UI.showToast('Queue paused', 'info'); + if (queueApp) queueApp.loadQueueData(); + }); + + socket.on(WS_EVENTS.QUEUE_RESUMED, function() { + AniWorld.UI.showToast('Queue resumed', 'success'); + if (queueApp) queueApp.loadQueueData(); + }); + } + + // Public API + return { + init: init + }; +})(); diff --git a/src/server/web/static/js/shared/api-client.js b/src/server/web/static/js/shared/api-client.js new file mode 100644 index 0000000..84d2838 --- /dev/null +++ b/src/server/web/static/js/shared/api-client.js @@ -0,0 +1,120 @@ +/** + * AniWorld - API Client Module + * + * HTTP request wrapper with automatic authentication + * and error handling. + * + * Dependencies: constants.js, auth.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.ApiClient = (function() { + 'use strict'; + + /** + * Make an authenticated HTTP request + * Automatically includes Authorization header and handles 401 responses + * + * @param {string} url - The API endpoint URL + * @param {Object} options - Fetch options (method, headers, body, etc.) + * @returns {Promise} The fetch response or null if auth failed + */ + async function request(url, options) { + options = options || {}; + + // Get JWT token from localStorage + const token = AniWorld.Auth.getToken(); + + // Check if token exists + if (!token) { + window.location.href = '/login'; + return null; + } + + // Build request options with auth header + const requestOptions = { + credentials: 'same-origin', + ...options, + headers: { + 'Authorization': 'Bearer ' + token, + ...options.headers + } + }; + + // Add Content-Type for JSON body if not already set + if (options.body && typeof options.body === 'string' && !requestOptions.headers['Content-Type']) { + requestOptions.headers['Content-Type'] = 'application/json'; + } + + const response = await fetch(url, requestOptions); + + if (response.status === 401) { + // Token is invalid or expired, clear it and redirect to login + AniWorld.Auth.removeToken(); + window.location.href = '/login'; + return null; + } + + return response; + } + + /** + * Make a GET request + * @param {string} url - The API endpoint URL + * @param {Object} headers - Additional headers + * @returns {Promise} + */ + async function get(url, headers) { + return request(url, { method: 'GET', headers: headers }); + } + + /** + * Make a POST request with JSON body + * @param {string} url - The API endpoint URL + * @param {Object} data - The data to send + * @param {Object} headers - Additional headers + * @returns {Promise} + */ + async function post(url, data, headers) { + return request(url, { + method: 'POST', + headers: { 'Content-Type': 'application/json', ...headers }, + body: JSON.stringify(data) + }); + } + + /** + * Make a DELETE request + * @param {string} url - The API endpoint URL + * @param {Object} headers - Additional headers + * @returns {Promise} + */ + async function del(url, headers) { + return request(url, { method: 'DELETE', headers: headers }); + } + + /** + * Make a PUT request with JSON body + * @param {string} url - The API endpoint URL + * @param {Object} data - The data to send + * @param {Object} headers - Additional headers + * @returns {Promise} + */ + async function put(url, data, headers) { + return request(url, { + method: 'PUT', + headers: { 'Content-Type': 'application/json', ...headers }, + body: JSON.stringify(data) + }); + } + + // Public API + return { + request: request, + get: get, + post: post, + delete: del, + put: put + }; +})(); diff --git a/src/server/web/static/js/shared/auth.js b/src/server/web/static/js/shared/auth.js new file mode 100644 index 0000000..8e92ed2 --- /dev/null +++ b/src/server/web/static/js/shared/auth.js @@ -0,0 +1,173 @@ +/** + * AniWorld - Authentication Module + * + * Handles user authentication, token management, + * and session validation. + * + * Dependencies: constants.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.Auth = (function() { + 'use strict'; + + const STORAGE = AniWorld.Constants.STORAGE_KEYS; + const API = AniWorld.Constants.API; + + /** + * Get the stored access token + * @returns {string|null} The access token or null if not found + */ + function getToken() { + return localStorage.getItem(STORAGE.ACCESS_TOKEN); + } + + /** + * Store the access token + * @param {string} token - The access token to store + */ + function setToken(token) { + localStorage.setItem(STORAGE.ACCESS_TOKEN, token); + } + + /** + * Remove the stored access token + */ + function removeToken() { + localStorage.removeItem(STORAGE.ACCESS_TOKEN); + localStorage.removeItem(STORAGE.TOKEN_EXPIRES_AT); + } + + /** + * Get authorization headers for API requests + * @returns {Object} Headers object with Authorization if token exists + */ + function getAuthHeaders() { + const token = getToken(); + return token ? { 'Authorization': 'Bearer ' + token } : {}; + } + + /** + * Check if user is authenticated + * Redirects to login page if not authenticated + * @returns {Promise} True if authenticated, false otherwise + */ + async function checkAuth() { + const currentPath = window.location.pathname; + + // Don't check authentication if already on login or setup pages + if (currentPath === '/login' || currentPath === '/setup') { + return false; + } + + try { + const token = getToken(); + console.log('checkAuthentication: token exists =', !!token); + + if (!token) { + console.log('checkAuthentication: No token found, redirecting to /login'); + window.location.href = '/login'; + return false; + } + + const headers = { + 'Authorization': 'Bearer ' + token + }; + + const response = await fetch(API.AUTH_STATUS, { headers }); + console.log('checkAuthentication: response status =', response.status); + + if (!response.ok) { + console.log('checkAuthentication: Response not OK, status =', response.status); + throw new Error('HTTP ' + response.status); + } + + const data = await response.json(); + console.log('checkAuthentication: data =', data); + + if (!data.configured) { + console.log('checkAuthentication: Not configured, redirecting to /setup'); + window.location.href = '/setup'; + return false; + } + + if (!data.authenticated) { + console.log('checkAuthentication: Not authenticated, redirecting to /login'); + removeToken(); + window.location.href = '/login'; + return false; + } + + console.log('checkAuthentication: Authenticated successfully'); + + // Show logout button if it exists + const logoutBtn = document.getElementById('logout-btn'); + if (logoutBtn) { + logoutBtn.style.display = 'block'; + } + + return true; + } catch (error) { + console.error('Authentication check failed:', error); + removeToken(); + window.location.href = '/login'; + return false; + } + } + + /** + * Log out the current user + * @param {Function} showToast - Optional function to show toast messages + */ + async function logout(showToast) { + try { + const response = await AniWorld.ApiClient.request(API.AUTH_LOGOUT, { method: 'POST' }); + + removeToken(); + + if (response && response.ok) { + const data = await response.json(); + if (showToast) { + showToast(data.status === 'ok' ? 'Logged out successfully' : 'Logged out', 'success'); + } + } else { + if (showToast) { + showToast('Logged out', 'success'); + } + } + + setTimeout(function() { + window.location.href = '/login'; + }, 1000); + } catch (error) { + console.error('Logout error:', error); + removeToken(); + if (showToast) { + showToast('Logged out', 'success'); + } + setTimeout(function() { + window.location.href = '/login'; + }, 1000); + } + } + + /** + * Check if user has a valid token stored + * @returns {boolean} True if token exists + */ + function hasToken() { + return !!getToken(); + } + + // Public API + return { + getToken: getToken, + setToken: setToken, + removeToken: removeToken, + getAuthHeaders: getAuthHeaders, + checkAuth: checkAuth, + logout: logout, + hasToken: hasToken + }; +})(); diff --git a/src/server/web/static/js/shared/constants.js b/src/server/web/static/js/shared/constants.js new file mode 100644 index 0000000..e68861d --- /dev/null +++ b/src/server/web/static/js/shared/constants.js @@ -0,0 +1,147 @@ +/** + * AniWorld - Constants Module + * + * Shared constants, API endpoints, and configuration values + * used across all JavaScript modules. + * + * Dependencies: None (must be loaded first) + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.Constants = (function() { + 'use strict'; + + // API Endpoints + const API = { + // Auth endpoints + AUTH_STATUS: '/api/auth/status', + AUTH_LOGIN: '/api/auth/login', + AUTH_LOGOUT: '/api/auth/logout', + + // Anime endpoints + ANIME_LIST: '/api/anime', + ANIME_SEARCH: '/api/anime/search', + ANIME_ADD: '/api/anime/add', + ANIME_RESCAN: '/api/anime/rescan', + ANIME_STATUS: '/api/anime/status', + ANIME_SCAN_STATUS: '/api/anime/scan/status', + + // Queue endpoints + QUEUE_STATUS: '/api/queue/status', + QUEUE_ADD: '/api/queue/add', + QUEUE_START: '/api/queue/start', + QUEUE_STOP: '/api/queue/stop', + QUEUE_RETRY: '/api/queue/retry', + QUEUE_REMOVE: '/api/queue', // + /{id} + QUEUE_COMPLETED: '/api/queue/completed', + QUEUE_FAILED: '/api/queue/failed', + QUEUE_PENDING: '/api/queue/pending', + + // Config endpoints + CONFIG_DIRECTORY: '/api/config/directory', + CONFIG_SECTION: '/api/config/section', // + /{section} + CONFIG_BACKUP: '/api/config/backup', + CONFIG_BACKUPS: '/api/config/backups', + CONFIG_VALIDATE: '/api/config/validate', + CONFIG_RESET: '/api/config/reset', + + // Scheduler endpoints + SCHEDULER_CONFIG: '/api/scheduler/config', + SCHEDULER_TRIGGER: '/api/scheduler/trigger-rescan', + + // Logging endpoints + LOGGING_CONFIG: '/api/logging/config', + LOGGING_FILES: '/api/logging/files', + LOGGING_CLEANUP: '/api/logging/cleanup', + LOGGING_TEST: '/api/logging/test', + + // Diagnostics + DIAGNOSTICS_NETWORK: '/api/diagnostics/network' + }; + + // Local Storage Keys + const STORAGE_KEYS = { + ACCESS_TOKEN: 'access_token', + TOKEN_EXPIRES_AT: 'token_expires_at', + THEME: 'theme' + }; + + // Default Values + const DEFAULTS = { + THEME: 'light', + TOAST_DURATION: 5000, + SCAN_AUTO_DISMISS: 3000, + REFRESH_INTERVAL: 2000 + }; + + // WebSocket Rooms + const WS_ROOMS = { + DOWNLOADS: 'downloads', + QUEUE: 'queue', + SCAN: 'scan', + SYSTEM: 'system', + ERRORS: 'errors' + }; + + // WebSocket Events + const WS_EVENTS = { + // Connection + CONNECTED: 'connected', + CONNECT: 'connect', + DISCONNECT: 'disconnect', + + // Scan events + SCAN_STARTED: 'scan_started', + SCAN_PROGRESS: 'scan_progress', + SCAN_COMPLETED: 'scan_completed', + SCAN_COMPLETE: 'scan_complete', + SCAN_ERROR: 'scan_error', + SCAN_FAILED: 'scan_failed', + + // Scheduled scan events + SCHEDULED_RESCAN_STARTED: 'scheduled_rescan_started', + SCHEDULED_RESCAN_COMPLETED: 'scheduled_rescan_completed', + SCHEDULED_RESCAN_ERROR: 'scheduled_rescan_error', + SCHEDULED_RESCAN_SKIPPED: 'scheduled_rescan_skipped', + + // Download events + DOWNLOAD_STARTED: 'download_started', + DOWNLOAD_PROGRESS: 'download_progress', + DOWNLOAD_COMPLETED: 'download_completed', + DOWNLOAD_COMPLETE: 'download_complete', + DOWNLOAD_ERROR: 'download_error', + DOWNLOAD_FAILED: 'download_failed', + DOWNLOAD_PAUSED: 'download_paused', + DOWNLOAD_RESUMED: 'download_resumed', + DOWNLOAD_CANCELLED: 'download_cancelled', + DOWNLOAD_STOPPED: 'download_stopped', + DOWNLOAD_STOP_REQUESTED: 'download_stop_requested', + + // Queue events + QUEUE_UPDATED: 'queue_updated', + QUEUE_STATUS: 'queue_status', + QUEUE_STARTED: 'queue_started', + QUEUE_STOPPED: 'queue_stopped', + QUEUE_PAUSED: 'queue_paused', + QUEUE_RESUMED: 'queue_resumed', + QUEUE_COMPLETED: 'queue_completed', + DOWNLOAD_QUEUE_COMPLETED: 'download_queue_completed', + DOWNLOAD_QUEUE_UPDATE: 'download_queue_update', + DOWNLOAD_EPISODE_UPDATE: 'download_episode_update', + DOWNLOAD_SERIES_COMPLETED: 'download_series_completed', + + // Auto download + AUTO_DOWNLOAD_STARTED: 'auto_download_started', + AUTO_DOWNLOAD_ERROR: 'auto_download_error' + }; + + // Public API + return { + API: API, + STORAGE_KEYS: STORAGE_KEYS, + DEFAULTS: DEFAULTS, + WS_ROOMS: WS_ROOMS, + WS_EVENTS: WS_EVENTS + }; +})(); diff --git a/src/server/web/static/js/shared/theme.js b/src/server/web/static/js/shared/theme.js new file mode 100644 index 0000000..56b7b7b --- /dev/null +++ b/src/server/web/static/js/shared/theme.js @@ -0,0 +1,73 @@ +/** + * AniWorld - Theme Module + * + * Dark/light mode management and persistence. + * + * Dependencies: constants.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.Theme = (function() { + 'use strict'; + + const STORAGE = AniWorld.Constants.STORAGE_KEYS; + const DEFAULTS = AniWorld.Constants.DEFAULTS; + + /** + * Initialize theme from saved preference + */ + function init() { + const savedTheme = localStorage.getItem(STORAGE.THEME) || DEFAULTS.THEME; + setTheme(savedTheme); + } + + /** + * Set the application theme + * @param {string} theme - 'light' or 'dark' + */ + function setTheme(theme) { + document.documentElement.setAttribute('data-theme', theme); + localStorage.setItem(STORAGE.THEME, theme); + + // Update theme toggle icon if it exists + const themeIcon = document.querySelector('#theme-toggle i'); + if (themeIcon) { + themeIcon.className = theme === 'light' ? 'fas fa-moon' : 'fas fa-sun'; + } + } + + /** + * Toggle between light and dark themes + */ + function toggle() { + const currentTheme = document.documentElement.getAttribute('data-theme') || DEFAULTS.THEME; + const newTheme = currentTheme === 'light' ? 'dark' : 'light'; + setTheme(newTheme); + } + + /** + * Get the current theme + * @returns {string} 'light' or 'dark' + */ + function getCurrentTheme() { + return document.documentElement.getAttribute('data-theme') || DEFAULTS.THEME; + } + + /** + * Check if dark mode is active + * @returns {boolean} + */ + function isDarkMode() { + return getCurrentTheme() === 'dark'; + } + + // Public API + return { + init: init, + setTheme: setTheme, + toggle: toggle, + getCurrentTheme: getCurrentTheme, + isDarkMode: isDarkMode + }; +})(); diff --git a/src/server/web/static/js/shared/ui-utils.js b/src/server/web/static/js/shared/ui-utils.js new file mode 100644 index 0000000..26b91cd --- /dev/null +++ b/src/server/web/static/js/shared/ui-utils.js @@ -0,0 +1,245 @@ +/** + * AniWorld - UI Utilities Module + * + * Toast notifications, loading overlays, and + * common UI helper functions. + * + * Dependencies: constants.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.UI = (function() { + 'use strict'; + + const DEFAULTS = AniWorld.Constants.DEFAULTS; + + /** + * Show a toast notification + * @param {string} message - The message to display + * @param {string} type - 'info', 'success', 'warning', or 'error' + * @param {number} duration - Duration in milliseconds (optional) + */ + function showToast(message, type, duration) { + type = type || 'info'; + duration = duration || DEFAULTS.TOAST_DURATION; + + const container = document.getElementById('toast-container'); + if (!container) { + console.warn('Toast container not found'); + return; + } + + const toast = document.createElement('div'); + toast.className = 'toast ' + type; + toast.innerHTML = + '
' + + '' + escapeHtml(message) + '' + + '' + + '
'; + + container.appendChild(toast); + + // Auto-remove after duration + setTimeout(function() { + if (toast.parentElement) { + toast.remove(); + } + }, duration); + } + + /** + * Show loading overlay + */ + function showLoading() { + const overlay = document.getElementById('loading-overlay'); + if (overlay) { + overlay.classList.remove('hidden'); + } + } + + /** + * Hide loading overlay + */ + function hideLoading() { + const overlay = document.getElementById('loading-overlay'); + if (overlay) { + overlay.classList.add('hidden'); + } + } + + /** + * Escape HTML to prevent XSS + * @param {string} text - The text to escape + * @returns {string} Escaped HTML + */ + function escapeHtml(text) { + if (text === null || text === undefined) return ''; + const div = document.createElement('div'); + div.textContent = text; + return div.innerHTML; + } + + /** + * Format bytes to human readable string + * @param {number} bytes - Number of bytes + * @param {number} decimals - Decimal places (default 2) + * @returns {string} Formatted string like "1.5 MB" + */ + function formatBytes(bytes, decimals) { + decimals = decimals || 2; + if (bytes === 0) return '0 Bytes'; + + const k = 1024; + const sizes = ['Bytes', 'KB', 'MB', 'GB', 'TB']; + const i = Math.floor(Math.log(bytes) / Math.log(k)); + + return parseFloat((bytes / Math.pow(k, i)).toFixed(decimals)) + ' ' + sizes[i]; + } + + /** + * Format duration in seconds to human readable string + * @param {number} seconds - Duration in seconds + * @returns {string} Formatted string like "1h 30m" + */ + function formatDuration(seconds) { + if (!seconds || seconds <= 0) return '---'; + + if (seconds < 60) { + return Math.round(seconds) + 's'; + } else if (seconds < 3600) { + const minutes = Math.round(seconds / 60); + return minutes + 'm'; + } else if (seconds < 86400) { + const hours = Math.floor(seconds / 3600); + const minutes = Math.round((seconds % 3600) / 60); + return hours + 'h ' + minutes + 'm'; + } else { + const days = Math.floor(seconds / 86400); + const hours = Math.round((seconds % 86400) / 3600); + return days + 'd ' + hours + 'h'; + } + } + + /** + * Format ETA (alias for formatDuration) + * @param {number} seconds - ETA in seconds + * @returns {string} Formatted ETA string + */ + function formatETA(seconds) { + return formatDuration(seconds); + } + + /** + * Format date to locale string + * @param {string|Date} date - Date to format + * @returns {string} Formatted date string + */ + function formatDate(date) { + if (!date) return ''; + const d = new Date(date); + return d.toLocaleString(); + } + + /** + * Get display name for anime/series object + * Returns name if available, otherwise key or folder + * @param {Object} anime - Anime/series object + * @returns {string} Display name + */ + function getDisplayName(anime) { + if (!anime) return ''; + const name = anime.name || ''; + const trimmedName = name.trim(); + if (trimmedName) { + return trimmedName; + } + return anime.key || anime.folder || ''; + } + + /** + * Calculate duration between two timestamps + * @param {string} startTime - Start timestamp + * @param {string} endTime - End timestamp + * @returns {string} Formatted duration + */ + function calculateDuration(startTime, endTime) { + const start = new Date(startTime); + const end = new Date(endTime); + const diffMs = end - start; + + const minutes = Math.floor(diffMs / (1000 * 60)); + const seconds = Math.floor((diffMs % (1000 * 60)) / 1000); + + return minutes + 'm ' + seconds + 's'; + } + + /** + * Show a confirmation modal + * @param {string} title - Modal title + * @param {string} message - Modal message + * @returns {Promise} Resolves to true if confirmed, false if cancelled + */ + function showConfirmModal(title, message) { + return new Promise(function(resolve) { + const modal = document.getElementById('confirm-modal'); + if (!modal) { + resolve(window.confirm(message)); + return; + } + + document.getElementById('confirm-title').textContent = title; + document.getElementById('confirm-message').textContent = message; + modal.classList.remove('hidden'); + + function handleConfirm() { + cleanup(); + resolve(true); + } + + function handleCancel() { + cleanup(); + resolve(false); + } + + function cleanup() { + document.getElementById('confirm-ok').removeEventListener('click', handleConfirm); + document.getElementById('confirm-cancel').removeEventListener('click', handleCancel); + modal.classList.add('hidden'); + } + + document.getElementById('confirm-ok').addEventListener('click', handleConfirm); + document.getElementById('confirm-cancel').addEventListener('click', handleCancel); + }); + } + + /** + * Hide the confirmation modal + */ + function hideConfirmModal() { + const modal = document.getElementById('confirm-modal'); + if (modal) { + modal.classList.add('hidden'); + } + } + + // Public API + return { + showToast: showToast, + showLoading: showLoading, + hideLoading: hideLoading, + escapeHtml: escapeHtml, + formatBytes: formatBytes, + formatDuration: formatDuration, + formatETA: formatETA, + formatDate: formatDate, + getDisplayName: getDisplayName, + calculateDuration: calculateDuration, + showConfirmModal: showConfirmModal, + hideConfirmModal: hideConfirmModal + }; +})(); diff --git a/src/server/web/static/js/shared/websocket-client.js b/src/server/web/static/js/shared/websocket-client.js new file mode 100644 index 0000000..aecebfb --- /dev/null +++ b/src/server/web/static/js/shared/websocket-client.js @@ -0,0 +1,300 @@ +/** + * AniWorld - WebSocket Client Module + * + * Native WebSocket connection management with Socket.IO-style interface. + * Uses FastAPI native WebSocket backend with room-based messaging. + * + * Dependencies: constants.js + */ + +var AniWorld = window.AniWorld || {}; + +AniWorld.WebSocketClient = (function() { + 'use strict'; + + const WS_EVENTS = AniWorld.Constants.WS_EVENTS; + + let ws = null; + let isConnected = false; + let eventHandlers = {}; + let rooms = new Set(); + let messageQueue = []; + let reconnectAttempts = 0; + const maxReconnectAttempts = 5; + const reconnectDelay = 1000; + let autoReconnect = true; + + /** + * Get WebSocket URL based on current page URL + */ + function getWebSocketUrl() { + const protocol = window.location.protocol === 'https:' ? 'wss:' : 'ws:'; + const host = window.location.host; + return protocol + '//' + host + '/ws/connect'; + } + + /** + * Initialize WebSocket connection + * @param {Object} handlers - Object mapping event names to handler functions + */ + function init(handlers) { + handlers = handlers || {}; + eventHandlers = handlers; + + connect(); + } + + /** + * Connect to WebSocket server + */ + function connect() { + if (ws && ws.readyState === WebSocket.OPEN) { + console.log('WebSocket already connected'); + return; + } + + try { + const url = getWebSocketUrl(); + console.log('Connecting to WebSocket:', url); + ws = new WebSocket(url); + + ws.onopen = function() { + console.log('WebSocket connected'); + isConnected = true; + reconnectAttempts = 0; + + // Emit connect event to handlers + emitToHandlers('connect'); + + // Join default rooms for this application + joinRoom('scan'); + joinRoom('downloads'); + joinRoom('queue'); + + // Rejoin any previously joined rooms + rejoinRooms(); + + // Process queued messages + processMessageQueue(); + + // Call custom connect handler if provided + if (eventHandlers.onConnect) { + eventHandlers.onConnect(); + } + }; + + ws.onmessage = function(event) { + handleMessage(event.data); + }; + + ws.onerror = function(error) { + console.error('WebSocket error:', error); + emitToHandlers('error', { error: 'WebSocket connection error' }); + }; + + ws.onclose = function(event) { + console.log('WebSocket disconnected', event.code, event.reason); + isConnected = false; + emitToHandlers('disconnect', { code: event.code, reason: event.reason }); + + // Call custom disconnect handler if provided + if (eventHandlers.onDisconnect) { + eventHandlers.onDisconnect(); + } + + // Attempt reconnection + if (autoReconnect && reconnectAttempts < maxReconnectAttempts) { + reconnectAttempts++; + var delay = reconnectDelay * reconnectAttempts; + console.log('Attempting reconnection in ' + delay + 'ms (attempt ' + reconnectAttempts + ')'); + setTimeout(connect, delay); + } + }; + } catch (error) { + console.error('Failed to create WebSocket connection:', error); + emitToHandlers('error', { error: 'Failed to connect' }); + } + } + + /** + * Handle incoming WebSocket message + * @param {string} data - Raw message data + */ + function handleMessage(data) { + try { + var message = JSON.parse(data); + var type = message.type; + var payload = message.data; + + console.log('WebSocket message: type=' + type, payload); + + // Emit event to registered handlers + if (type) { + emitToHandlers(type, payload || {}); + } + } catch (error) { + console.error('Failed to parse WebSocket message:', error, data); + } + } + + /** + * Emit event to registered handlers (internal) + * @param {string} event - Event name + * @param {*} data - Event data + */ + function emitToHandlers(event, data) { + if (eventHandlers[event]) { + try { + if (data !== undefined) { + eventHandlers[event](data); + } else { + eventHandlers[event](); + } + } catch (error) { + console.error('Error in event handler for ' + event + ':', error); + } + } + } + + /** + * Send message to server + * @param {string} action - Action type + * @param {Object} data - Data payload + */ + function send(action, data) { + var message = JSON.stringify({ + action: action, + data: data || {} + }); + + if (isConnected && ws && ws.readyState === WebSocket.OPEN) { + ws.send(message); + } else { + console.warn('WebSocket not connected, queueing message'); + messageQueue.push(message); + } + } + + /** + * Join a room (subscribe to topic) + * @param {string} room - Room name + */ + function joinRoom(room) { + rooms.add(room); + if (isConnected) { + send('join', { room: room }); + console.log('Joined room:', room); + } + } + + /** + * Leave a room (unsubscribe from topic) + * @param {string} room - Room name + */ + function leaveRoom(room) { + rooms.delete(room); + if (isConnected) { + send('leave', { room: room }); + console.log('Left room:', room); + } + } + + /** + * Rejoin all rooms after reconnection + */ + function rejoinRooms() { + rooms.forEach(function(room) { + send('join', { room: room }); + }); + } + + /** + * Process queued messages after connection + */ + function processMessageQueue() { + while (messageQueue.length > 0 && isConnected) { + var message = messageQueue.shift(); + ws.send(message); + } + } + + /** + * Register an event handler + * @param {string} eventName - The event name + * @param {Function} handler - The handler function + */ + function on(eventName, handler) { + eventHandlers[eventName] = handler; + } + + /** + * Remove an event handler + * @param {string} eventName - The event name + */ + function off(eventName) { + delete eventHandlers[eventName]; + } + + /** + * Emit an event to the server (Socket.IO compatibility) + * @param {string} eventName - The event name + * @param {*} data - The data to send + */ + function emit(eventName, data) { + if (!isConnected) { + console.warn('WebSocket not connected'); + return; + } + send(eventName, data); + } + + /** + * Get connection status + * @returns {boolean} True if connected + */ + function getConnectionStatus() { + return isConnected; + } + + /** + * Get the WebSocket instance (for compatibility) + * @returns {Object} The WebSocket instance wrapped with event methods + */ + function getSocket() { + // Return a wrapper object that provides Socket.IO-like interface + return { + on: on, + off: off, + emit: emit, + join: joinRoom, + leave: leaveRoom, + connected: isConnected + }; + } + + /** + * Disconnect from server + */ + function disconnect() { + autoReconnect = false; + if (ws) { + ws.close(1000, 'Client disconnected'); + ws = null; + isConnected = false; + } + } + + // Public API + return { + init: init, + on: on, + off: off, + emit: emit, + send: send, + join: joinRoom, + leave: leaveRoom, + isConnected: getConnectionStatus, + getSocket: getSocket, + disconnect: disconnect + }; +})(); diff --git a/src/server/web/templates/index.html b/src/server/web/templates/index.html index 8288f77..2d1d655 100644 --- a/src/server/web/templates/index.html +++ b/src/server/web/templates/index.html @@ -440,15 +440,31 @@ - - - + + + + + + + - + + - - + + + + + + + + + + + + + \ No newline at end of file diff --git a/src/server/web/templates/queue.html b/src/server/web/templates/queue.html index 6b90710..0d7f896 100644 --- a/src/server/web/templates/queue.html +++ b/src/server/web/templates/queue.html @@ -233,9 +233,20 @@ - - - + + + + + + + + + + + + + + \ No newline at end of file diff --git a/stop_server.sh b/stop_server.sh index d722e59..7e89368 100644 --- a/stop_server.sh +++ b/stop_server.sh @@ -1,22 +1,93 @@ #!/bin/bash -# Stop Aniworld FastAPI Server +# Stop Aniworld FastAPI Server (Graceful Shutdown) +# +# This script performs a graceful shutdown by sending SIGTERM first, +# allowing the application to clean up resources properly before +# falling back to SIGKILL if needed. -echo "Stopping Aniworld server..." +GRACEFUL_TIMEOUT=30 # seconds to wait for graceful shutdown -# Method 1: Kill uvicorn processes -pkill -f "uvicorn.*fastapi_app:app" && echo "✓ Stopped uvicorn processes" +echo "Stopping Aniworld server (graceful shutdown)..." -# Method 2: Kill any process using port 8000 +# Function to wait for a process to terminate +wait_for_process() { + local pid=$1 + local timeout=$2 + local count=0 + + while [ $count -lt $timeout ]; do + if ! kill -0 "$pid" 2>/dev/null; then + return 0 # Process terminated + fi + sleep 1 + count=$((count + 1)) + echo -ne "\r Waiting for graceful shutdown... ${count}/${timeout}s" + done + echo "" + return 1 # Timeout +} + +# Method 1: Gracefully stop uvicorn processes +UVICORN_PIDS=$(pgrep -f "uvicorn.*fastapi_app:app") +if [ -n "$UVICORN_PIDS" ]; then + echo "Sending SIGTERM to uvicorn processes..." + for pid in $UVICORN_PIDS; do + kill -TERM "$pid" 2>/dev/null + done + + # Wait for graceful shutdown + all_terminated=true + for pid in $UVICORN_PIDS; do + if ! wait_for_process "$pid" "$GRACEFUL_TIMEOUT"; then + all_terminated=false + echo " Process $pid did not terminate gracefully, forcing..." + kill -9 "$pid" 2>/dev/null + fi + done + + if $all_terminated; then + echo "✓ Uvicorn processes stopped gracefully" + else + echo "✓ Uvicorn processes stopped (forced)" + fi +else + echo "✓ No uvicorn processes running" +fi + +# Method 2: Gracefully stop any process using port 8000 PORT_PID=$(lsof -ti:8000) if [ -n "$PORT_PID" ]; then - kill -9 $PORT_PID - echo "✓ Killed process on port 8000 (PID: $PORT_PID)" + echo "Found process on port 8000 (PID: $PORT_PID)" + + # Send SIGTERM first + kill -TERM "$PORT_PID" 2>/dev/null + + if wait_for_process "$PORT_PID" "$GRACEFUL_TIMEOUT"; then + echo "✓ Process on port 8000 stopped gracefully" + else + echo " Graceful shutdown timed out, forcing..." + kill -9 "$PORT_PID" 2>/dev/null + echo "✓ Process on port 8000 stopped (forced)" + fi else echo "✓ Port 8000 is already free" fi -# Method 3: Kill any python processes running the server -pkill -f "run_server.py" && echo "✓ Stopped run_server.py processes" +# Method 3: Gracefully stop run_server.py processes +SERVER_PIDS=$(pgrep -f "run_server.py") +if [ -n "$SERVER_PIDS" ]; then + echo "Sending SIGTERM to run_server.py processes..." + for pid in $SERVER_PIDS; do + kill -TERM "$pid" 2>/dev/null + done + + for pid in $SERVER_PIDS; do + if ! wait_for_process "$pid" 10; then + kill -9 "$pid" 2>/dev/null + fi + done + echo "✓ Stopped run_server.py processes" +fi echo "" echo "Server stopped successfully!" diff --git a/tests/api/test_anime_endpoints.py b/tests/api/test_anime_endpoints.py index 123f394..12da56a 100644 --- a/tests/api/test_anime_endpoints.py +++ b/tests/api/test_anime_endpoints.py @@ -42,11 +42,17 @@ class FakeSeriesApp: def __init__(self): """Initialize fake series app.""" self.list = self # Changed from self.List to self.list + self.scanner = FakeScanner() # Add fake scanner + self.directory = "/tmp/fake_anime" + self.keyDict = {} # Add keyDict for direct access self._items = [ # Using realistic key values (URL-safe, lowercase, hyphenated) FakeSerie("test-show-key", "Test Show", "Test Show (2023)", {1: [1, 2]}), FakeSerie("complete-show-key", "Complete Show", "Complete Show (2022)", {}), ] + # Populate keyDict + for item in self._items: + self.keyDict[item.key] = item def GetMissingEpisode(self): """Return series with missing episodes.""" @@ -60,11 +66,21 @@ class FakeSeriesApp: """Trigger rescan with callback.""" callback() - def add(self, serie): - """Add a serie to the list.""" + def add(self, serie, use_sanitized_folder=True): + """Add a serie to the list. + + Args: + serie: The Serie instance to add + use_sanitized_folder: Whether to use sanitized folder name + + Returns: + str: The folder path (fake path for testing) + """ # Check if already exists if not any(s.key == serie.key for s in self._items): self._items.append(serie) + self.keyDict[serie.key] = serie + return f"/tmp/fake_anime/{serie.folder}" async def search(self, query): """Search for series (async).""" @@ -85,6 +101,14 @@ class FakeSeriesApp: pass +class FakeScanner: + """Mock SerieScanner for testing.""" + + def scan_single_series(self, key, folder): + """Mock scan that returns some fake missing episodes.""" + return {1: [1, 2, 3], 2: [1, 2]} + + @pytest.fixture(autouse=True) def reset_auth_state(): """Reset auth service state before each test.""" @@ -273,3 +297,122 @@ async def test_add_series_endpoint_empty_link(authenticated_client): assert response.status_code == 400 data = response.json() assert "link" in data["detail"].lower() + + +@pytest.mark.asyncio +async def test_add_series_extracts_key_from_full_url(authenticated_client): + """Test that add_series extracts key from full URL.""" + response = await authenticated_client.post( + "/api/anime/add", + json={ + "link": "https://aniworld.to/anime/stream/attack-on-titan", + "name": "Attack on Titan" + } + ) + + assert response.status_code == 200 + data = response.json() + assert data["key"] == "attack-on-titan" + + +@pytest.mark.asyncio +async def test_add_series_sanitizes_folder_name(authenticated_client): + """Test that add_series creates sanitized folder name.""" + response = await authenticated_client.post( + "/api/anime/add", + json={ + "link": "https://aniworld.to/anime/stream/rezero", + "name": "Re:Zero - Starting Life in Another World?" + } + ) + + assert response.status_code == 200 + data = response.json() + + # Folder should not contain invalid characters + folder = data["folder"] + assert ":" not in folder + assert "?" not in folder + + +@pytest.mark.asyncio +async def test_add_series_returns_missing_episodes(authenticated_client): + """Test that add_series returns missing episodes info.""" + response = await authenticated_client.post( + "/api/anime/add", + json={ + "link": "https://aniworld.to/anime/stream/test-anime", + "name": "Test Anime" + } + ) + + assert response.status_code == 200 + data = response.json() + + # Response should contain missing episodes fields + assert "missing_episodes" in data + assert "total_missing" in data + assert isinstance(data["missing_episodes"], dict) + assert isinstance(data["total_missing"], int) + + +@pytest.mark.asyncio +async def test_add_series_response_structure(authenticated_client): + """Test the full response structure of add_series.""" + response = await authenticated_client.post( + "/api/anime/add", + json={ + "link": "https://aniworld.to/anime/stream/new-anime", + "name": "New Anime Series" + } + ) + + assert response.status_code == 200 + data = response.json() + + # Verify all expected fields are present + assert "status" in data + assert "message" in data + assert "key" in data + assert "folder" in data + assert "missing_episodes" in data + assert "total_missing" in data + + # Status should be success or exists + assert data["status"] in ("success", "exists") + + +@pytest.mark.asyncio +async def test_add_series_special_characters_in_name(authenticated_client): + """Test adding series with various special characters in name.""" + test_cases = [ + ("86: Eighty-Six", "86-eighty-six"), + ("Fate/Stay Night", "fate-stay-night"), + ("What If...?", "what-if"), + ("Steins;Gate", "steins-gate"), + ] + + for name, key in test_cases: + response = await authenticated_client.post( + "/api/anime/add", + json={ + "link": f"https://aniworld.to/anime/stream/{key}", + "name": name + } + ) + + assert response.status_code == 200 + data = response.json() + + # Get just the folder name (last part of path) + folder_path = data["folder"] + # Handle both full paths and just folder names + if "/" in folder_path: + folder_name = folder_path.rstrip("/").split("/")[-1] + else: + folder_name = folder_path + + # Folder name should not contain invalid filesystem characters + invalid_chars = [':', '\\', '?', '*', '<', '>', '|', '"'] + for char in invalid_chars: + assert char not in folder_name, f"Found '{char}' in folder name for {name}" diff --git a/tests/api/test_download_endpoints.py b/tests/api/test_download_endpoints.py index 6c8603e..04815dd 100644 --- a/tests/api/test_download_endpoints.py +++ b/tests/api/test_download_endpoints.py @@ -236,7 +236,7 @@ async def test_add_to_queue_service_error( ) assert response.status_code == 400 - assert "Queue full" in response.json()["detail"] + assert "Queue full" in response.json()["message"] @pytest.mark.asyncio @@ -294,8 +294,8 @@ async def test_start_download_empty_queue( assert response.status_code == 400 data = response.json() - detail = data["detail"].lower() - assert "empty" in detail or "no pending" in detail + message = data["message"].lower() + assert "empty" in message or "no pending" in message @pytest.mark.asyncio @@ -311,8 +311,8 @@ async def test_start_download_already_active( assert response.status_code == 400 data = response.json() - detail_lower = data["detail"].lower() - assert "already" in detail_lower or "progress" in detail_lower + message_lower = data["message"].lower() + assert "already" in message_lower or "progress" in message_lower @pytest.mark.asyncio diff --git a/tests/frontend/test_existing_ui_integration.py b/tests/frontend/test_existing_ui_integration.py index 6c83b6b..1b8a39b 100644 --- a/tests/frontend/test_existing_ui_integration.py +++ b/tests/frontend/test_existing_ui_integration.py @@ -162,6 +162,7 @@ class TestFrontendAuthentication: mock_app = AsyncMock() mock_list = AsyncMock() mock_list.GetMissingEpisode = AsyncMock(return_value=[]) + mock_list.GetList = AsyncMock(return_value=[]) mock_app.List = mock_list mock_get_app.return_value = mock_app @@ -201,7 +202,7 @@ class TestFrontendAnimeAPI: async def test_rescan_anime(self, authenticated_client): """Test POST /api/anime/rescan triggers rescan with events.""" - from unittest.mock import MagicMock + from unittest.mock import MagicMock, patch from src.server.services.progress_service import ProgressService from src.server.utils.dependencies import get_anime_service @@ -210,7 +211,7 @@ class TestFrontendAnimeAPI: mock_series_app = MagicMock() mock_series_app.directory_to_search = "/tmp/test" mock_series_app.series_list = [] - mock_series_app.rescan = AsyncMock() + mock_series_app.rescan = AsyncMock(return_value=[]) mock_series_app.download_status = None mock_series_app.scan_status = None @@ -232,7 +233,16 @@ class TestFrontendAnimeAPI: app.dependency_overrides[get_anime_service] = lambda: anime_service try: - response = await authenticated_client.post("/api/anime/rescan") + # Mock database operations called during rescan + with patch.object( + anime_service, '_save_scan_results_to_db', new_callable=AsyncMock + ): + with patch.object( + anime_service, '_load_series_from_db', new_callable=AsyncMock + ): + response = await authenticated_client.post( + "/api/anime/rescan" + ) assert response.status_code == 200 data = response.json() @@ -448,7 +458,7 @@ class TestFrontendJavaScriptIntegration: assert response.status_code in [200, 400] if response.status_code == 400: # Verify error message indicates empty queue - assert "No pending downloads" in response.json()["detail"] + assert "No pending downloads" in response.json()["message"] # Test pause - always succeeds even if nothing is processing response = await authenticated_client.post("/api/queue/pause") diff --git a/tests/integration/test_data_file_db_sync.py b/tests/integration/test_data_file_db_sync.py new file mode 100644 index 0000000..5f14fe7 --- /dev/null +++ b/tests/integration/test_data_file_db_sync.py @@ -0,0 +1,274 @@ +"""Integration tests for data file to database synchronization. + +This module verifies that the data file to database sync functionality +works correctly, including: +- Loading series from data files +- Adding series to the database +- Preventing duplicate entries +- Handling corrupt or missing files gracefully +- End-to-end startup sync behavior + +The sync functionality allows existing series metadata stored in +data files to be automatically imported into the database during +application startup. +""" +import json +import os +import tempfile +from unittest.mock import AsyncMock, Mock, patch + +import pytest + +from src.core.entities.series import Serie +from src.core.SeriesApp import SeriesApp + + +class TestGetAllSeriesFromDataFiles: + """Test SeriesApp.get_all_series_from_data_files() method.""" + + def test_returns_empty_list_for_empty_directory(self): + """Test that empty directory returns empty list.""" + with tempfile.TemporaryDirectory() as tmp_dir: + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + app = SeriesApp(tmp_dir) + result = app.get_all_series_from_data_files() + + assert isinstance(result, list) + assert len(result) == 0 + + def test_returns_series_from_data_files(self): + """Test that valid data files are loaded correctly.""" + with tempfile.TemporaryDirectory() as tmp_dir: + # Create test data files + _create_test_data_file( + tmp_dir, + folder="Anime Test 1", + key="anime-test-1", + name="Anime Test 1", + episodes={1: [1, 2, 3]} + ) + _create_test_data_file( + tmp_dir, + folder="Anime Test 2", + key="anime-test-2", + name="Anime Test 2", + episodes={1: [1]} + ) + + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + app = SeriesApp(tmp_dir) + result = app.get_all_series_from_data_files() + + assert isinstance(result, list) + assert len(result) == 2 + keys = {s.key for s in result} + assert "anime-test-1" in keys + assert "anime-test-2" in keys + + def test_handles_corrupt_data_files_gracefully(self): + """Test that corrupt data files don't crash the sync.""" + with tempfile.TemporaryDirectory() as tmp_dir: + # Create a valid data file + _create_test_data_file( + tmp_dir, + folder="Valid Anime", + key="valid-anime", + name="Valid Anime", + episodes={1: [1]} + ) + + # Create a corrupt data file (invalid JSON) + corrupt_dir = os.path.join(tmp_dir, "Corrupt Anime") + os.makedirs(corrupt_dir, exist_ok=True) + with open(os.path.join(corrupt_dir, "data"), "w") as f: + f.write("this is not valid json {{{") + + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + app = SeriesApp(tmp_dir) + result = app.get_all_series_from_data_files() + + # Should still return the valid series + assert isinstance(result, list) + assert len(result) >= 1 + # The valid anime should be loaded + keys = {s.key for s in result} + assert "valid-anime" in keys + + def test_handles_missing_directory_gracefully(self): + """Test that non-existent directory returns empty list.""" + non_existent_dir = "/non/existent/directory/path" + + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + app = SeriesApp(non_existent_dir) + result = app.get_all_series_from_data_files() + + assert isinstance(result, list) + assert len(result) == 0 + + +class TestSyncSeriesToDatabase: + """Test sync_series_from_data_files function from anime_service.""" + + @pytest.mark.asyncio + async def test_sync_with_empty_directory(self): + """Test sync with empty anime directory.""" + from src.server.services.anime_service import sync_series_from_data_files + + with tempfile.TemporaryDirectory() as tmp_dir: + mock_logger = Mock() + + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + count = await sync_series_from_data_files(tmp_dir, mock_logger) + + assert count == 0 + # Should log that no series were found + mock_logger.info.assert_called() + + @pytest.mark.asyncio + async def test_sync_adds_new_series_to_database(self): + """Test that sync adds new series to database. + + This is a more realistic test that verifies series data is loaded + from files and the sync function attempts to add them to the DB. + The actual DB interaction is tested in test_add_to_db_creates_record. + """ + from src.server.services.anime_service import sync_series_from_data_files + + with tempfile.TemporaryDirectory() as tmp_dir: + # Create test data files + _create_test_data_file( + tmp_dir, + folder="Sync Test Anime", + key="sync-test-anime", + name="Sync Test Anime", + episodes={1: [1, 2]} + ) + + mock_logger = Mock() + + # First verify that we can load the series from files + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + app = SeriesApp(tmp_dir) + series = app.get_all_series_from_data_files() + assert len(series) == 1 + assert series[0].key == "sync-test-anime" + + # Now test that the sync function loads series and handles DB + # gracefully (even if DB operations fail, it should not crash) + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + # The function should return 0 because DB isn't available + # but should not crash + count = await sync_series_from_data_files(tmp_dir, mock_logger) + + # Since no real DB, it will fail gracefully + assert isinstance(count, int) + # Should have logged something + assert mock_logger.info.called or mock_logger.warning.called + + @pytest.mark.asyncio + async def test_sync_handles_exceptions_gracefully(self): + """Test that sync handles exceptions without crashing.""" + from src.server.services.anime_service import sync_series_from_data_files + + mock_logger = Mock() + + # Make SeriesApp raise an exception during initialization + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'), \ + patch( + 'src.core.SeriesApp.SerieList', + side_effect=Exception("Test error") + ): + count = await sync_series_from_data_files( + "/fake/path", mock_logger + ) + + assert count == 0 + # Should log the warning + mock_logger.warning.assert_called() + + +class TestEndToEndSync: + """End-to-end tests for the sync functionality.""" + + @pytest.mark.asyncio + async def test_startup_sync_integration(self): + """Test end-to-end startup sync behavior.""" + # This test verifies the integration of all components + with tempfile.TemporaryDirectory() as tmp_dir: + # Create test data + _create_test_data_file( + tmp_dir, + folder="E2E Test Anime 1", + key="e2e-test-anime-1", + name="E2E Test Anime 1", + episodes={1: [1, 2, 3]} + ) + _create_test_data_file( + tmp_dir, + folder="E2E Test Anime 2", + key="e2e-test-anime-2", + name="E2E Test Anime 2", + episodes={1: [1], 2: [1, 2]} + ) + + # Use SeriesApp to load series from files + with patch('src.core.SeriesApp.Loaders'), \ + patch('src.core.SeriesApp.SerieScanner'): + app = SeriesApp(tmp_dir) + all_series = app.get_all_series_from_data_files() + + # Verify all series were loaded + assert len(all_series) == 2 + + # Verify series data is correct + series_by_key = {s.key: s for s in all_series} + assert "e2e-test-anime-1" in series_by_key + assert "e2e-test-anime-2" in series_by_key + + # Verify episode data + anime1 = series_by_key["e2e-test-anime-1"] + assert anime1.episodeDict == {1: [1, 2, 3]} + + anime2 = series_by_key["e2e-test-anime-2"] + assert anime2.episodeDict == {1: [1], 2: [1, 2]} + + +def _create_test_data_file( + base_dir: str, + folder: str, + key: str, + name: str, + episodes: dict +) -> None: + """ + Create a test data file in the anime directory. + + Args: + base_dir: Base directory for anime folders + folder: Folder name for the anime + key: Unique key for the series + name: Display name of the series + episodes: Dictionary mapping season to list of episode numbers + """ + anime_dir = os.path.join(base_dir, folder) + os.makedirs(anime_dir, exist_ok=True) + + data = { + "key": key, + "name": name, + "site": "https://aniworld.to", + "folder": folder, + "episodeDict": {str(k): v for k, v in episodes.items()} + } + + data_file = os.path.join(anime_dir, "data") + with open(data_file, "w", encoding="utf-8") as f: + json.dump(data, f, indent=2) diff --git a/tests/integration/test_db_transactions.py b/tests/integration/test_db_transactions.py new file mode 100644 index 0000000..535ada5 --- /dev/null +++ b/tests/integration/test_db_transactions.py @@ -0,0 +1,546 @@ +"""Integration tests for database transaction behavior. + +Tests real database transaction handling including: +- Transaction isolation +- Concurrent transaction handling +- Real commit/rollback behavior +""" +import asyncio +from datetime import datetime, timedelta, timezone +from typing import List + +import pytest +from sqlalchemy import text +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine + +from src.server.database.base import Base +from src.server.database.connection import ( + TransactionManager, + get_session_transaction_depth, + is_session_in_transaction, +) +from src.server.database.models import AnimeSeries, DownloadQueueItem, Episode +from src.server.database.service import ( + AnimeSeriesService, + DownloadQueueService, + EpisodeService, +) +from src.server.database.transaction import ( + TransactionPropagation, + atomic, + transactional, +) + +# ============================================================================ +# Fixtures +# ============================================================================ + + +@pytest.fixture +async def db_engine(): + """Create in-memory database engine for testing.""" + engine = create_async_engine( + "sqlite+aiosqlite:///:memory:", + echo=False, + ) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + await engine.dispose() + + +@pytest.fixture +async def session_factory(db_engine): + """Create session factory for testing.""" + from sqlalchemy.ext.asyncio import async_sessionmaker + + return async_sessionmaker( + db_engine, + class_=AsyncSession, + expire_on_commit=False, + autoflush=False, + autocommit=False, + ) + + +@pytest.fixture +async def db_session(session_factory): + """Create database session for testing.""" + async with session_factory() as session: + yield session + await session.rollback() + + +# ============================================================================ +# Real Database Transaction Tests +# ============================================================================ + + +class TestRealDatabaseTransactions: + """Tests using real in-memory database.""" + + @pytest.mark.asyncio + async def test_commit_persists_data(self, db_session): + """Test that committed data is actually persisted.""" + async with atomic(db_session): + series = await AnimeSeriesService.create( + db_session, + key="commit-test", + name="Commit Test Series", + site="https://test.com", + folder="/test/folder", + ) + + # Data should be retrievable after commit + retrieved = await AnimeSeriesService.get_by_key( + db_session, "commit-test" + ) + assert retrieved is not None + assert retrieved.name == "Commit Test Series" + + @pytest.mark.asyncio + async def test_rollback_discards_data(self, db_session): + """Test that rolled back data is discarded.""" + try: + async with atomic(db_session): + series = await AnimeSeriesService.create( + db_session, + key="rollback-test", + name="Rollback Test Series", + site="https://test.com", + folder="/test/folder", + ) + await db_session.flush() + + raise ValueError("Force rollback") + except ValueError: + pass + + # Data should NOT be retrievable after rollback + retrieved = await AnimeSeriesService.get_by_key( + db_session, "rollback-test" + ) + assert retrieved is None + + @pytest.mark.asyncio + async def test_multiple_operations_atomic(self, db_session): + """Test multiple operations are committed together.""" + async with atomic(db_session): + # Create series + series = await AnimeSeriesService.create( + db_session, + key="atomic-multi-test", + name="Atomic Multi Test", + site="https://test.com", + folder="/test/folder", + ) + + # Create episode + episode = await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=1, + title="Episode 1", + ) + + # Create queue item + item = await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, + ) + + # All should be persisted + retrieved_series = await AnimeSeriesService.get_by_key( + db_session, "atomic-multi-test" + ) + assert retrieved_series is not None + + episodes = await EpisodeService.get_by_series( + db_session, retrieved_series.id + ) + assert len(episodes) == 1 + + queue_items = await DownloadQueueService.get_all(db_session) + assert len(queue_items) >= 1 + + @pytest.mark.asyncio + async def test_multiple_operations_rollback_all(self, db_session): + """Test multiple operations are all rolled back on failure.""" + try: + async with atomic(db_session): + # Create series + series = await AnimeSeriesService.create( + db_session, + key="rollback-multi-test", + name="Rollback Multi Test", + site="https://test.com", + folder="/test/folder", + ) + + # Create episode + episode = await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=1, + ) + + # Create queue item + item = await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, + ) + + await db_session.flush() + + raise RuntimeError("Force complete rollback") + except RuntimeError: + pass + + # None should be persisted + retrieved_series = await AnimeSeriesService.get_by_key( + db_session, "rollback-multi-test" + ) + assert retrieved_series is None + + +# ============================================================================ +# Transaction Manager Tests +# ============================================================================ + + +class TestTransactionManager: + """Tests for TransactionManager class.""" + + @pytest.mark.asyncio + async def test_transaction_manager_basic_flow(self, session_factory): + """Test basic transaction manager usage.""" + async with TransactionManager(session_factory) as tm: + session = await tm.get_session() + await tm.begin() + + series = AnimeSeries( + key="tm-test", + name="TM Test", + site="https://test.com", + folder="/test", + ) + session.add(series) + + await tm.commit() + + # Verify data persisted + async with session_factory() as verify_session: + from sqlalchemy import select + result = await verify_session.execute( + select(AnimeSeries).where(AnimeSeries.key == "tm-test") + ) + series = result.scalar_one_or_none() + assert series is not None + + @pytest.mark.asyncio + async def test_transaction_manager_rollback(self, session_factory): + """Test transaction manager rollback.""" + async with TransactionManager(session_factory) as tm: + session = await tm.get_session() + await tm.begin() + + series = AnimeSeries( + key="tm-rollback-test", + name="TM Rollback Test", + site="https://test.com", + folder="/test", + ) + session.add(series) + await session.flush() + + await tm.rollback() + + # Verify data NOT persisted + async with session_factory() as verify_session: + from sqlalchemy import select + result = await verify_session.execute( + select(AnimeSeries).where(AnimeSeries.key == "tm-rollback-test") + ) + series = result.scalar_one_or_none() + assert series is None + + @pytest.mark.asyncio + async def test_transaction_manager_auto_rollback_on_exception( + self, session_factory + ): + """Test transaction manager auto-rolls back on exception.""" + with pytest.raises(ValueError): + async with TransactionManager(session_factory) as tm: + session = await tm.get_session() + await tm.begin() + + series = AnimeSeries( + key="tm-auto-rollback", + name="TM Auto Rollback", + site="https://test.com", + folder="/test", + ) + session.add(series) + await session.flush() + + raise ValueError("Force exception") + + # Verify data NOT persisted + async with session_factory() as verify_session: + from sqlalchemy import select + result = await verify_session.execute( + select(AnimeSeries).where(AnimeSeries.key == "tm-auto-rollback") + ) + series = result.scalar_one_or_none() + assert series is None + + @pytest.mark.asyncio + async def test_transaction_manager_state_tracking(self, session_factory): + """Test transaction manager tracks state correctly.""" + async with TransactionManager(session_factory) as tm: + assert tm.is_in_transaction() is False + + await tm.begin() + assert tm.is_in_transaction() is True + + await tm.commit() + assert tm.is_in_transaction() is False + + +# ============================================================================ +# Helper Function Tests +# ============================================================================ + + +class TestConnectionHelpers: + """Tests for connection module helper functions.""" + + @pytest.mark.asyncio + async def test_is_session_in_transaction(self, db_session): + """Test is_session_in_transaction helper.""" + # Initially not in transaction + assert is_session_in_transaction(db_session) is False + + async with atomic(db_session): + # Now in transaction + assert is_session_in_transaction(db_session) is True + + # After exit, depends on session state + # SQLite behavior may vary + + @pytest.mark.asyncio + async def test_get_session_transaction_depth(self, db_session): + """Test get_session_transaction_depth helper.""" + depth = get_session_transaction_depth(db_session) + assert depth >= 0 + + +# ============================================================================ +# @transactional Decorator Integration Tests +# ============================================================================ + + +class TestTransactionalDecoratorIntegration: + """Integration tests for @transactional decorator.""" + + @pytest.mark.asyncio + async def test_decorated_function_commits(self, db_session): + """Test decorated function commits on success.""" + @transactional() + async def create_series_decorated(db: AsyncSession): + return await AnimeSeriesService.create( + db, + key="decorated-test", + name="Decorated Test", + site="https://test.com", + folder="/test", + ) + + series = await create_series_decorated(db=db_session) + + # Verify committed + retrieved = await AnimeSeriesService.get_by_key( + db_session, "decorated-test" + ) + assert retrieved is not None + + @pytest.mark.asyncio + async def test_decorated_function_rollback(self, db_session): + """Test decorated function rolls back on error.""" + @transactional() + async def create_then_fail(db: AsyncSession): + await AnimeSeriesService.create( + db, + key="decorated-rollback", + name="Decorated Rollback", + site="https://test.com", + folder="/test", + ) + raise ValueError("Force failure") + + with pytest.raises(ValueError): + await create_then_fail(db=db_session) + + # Verify NOT committed + retrieved = await AnimeSeriesService.get_by_key( + db_session, "decorated-rollback" + ) + assert retrieved is None + + @pytest.mark.asyncio + async def test_nested_decorated_functions(self, db_session): + """Test nested decorated functions work correctly.""" + @transactional(propagation=TransactionPropagation.NESTED) + async def inner_operation(db: AsyncSession, series_id: int): + return await EpisodeService.create( + db, + series_id=series_id, + season=1, + episode_number=1, + ) + + @transactional() + async def outer_operation(db: AsyncSession): + series = await AnimeSeriesService.create( + db, + key="nested-decorated", + name="Nested Decorated", + site="https://test.com", + folder="/test", + ) + episode = await inner_operation(db=db, series_id=series.id) + return series, episode + + series, episode = await outer_operation(db=db_session) + + # Both should be committed + assert series is not None + assert episode is not None + + +# ============================================================================ +# Concurrent Transaction Tests +# ============================================================================ + + +class TestConcurrentTransactions: + """Tests for concurrent transaction handling.""" + + @pytest.mark.asyncio + async def test_concurrent_writes_different_keys(self, session_factory): + """Test concurrent writes to different records.""" + async def create_series(key: str): + async with session_factory() as session: + async with atomic(session): + await AnimeSeriesService.create( + session, + key=key, + name=f"Series {key}", + site="https://test.com", + folder=f"/test/{key}", + ) + + # Run concurrent creates + await asyncio.gather( + create_series("concurrent-1"), + create_series("concurrent-2"), + create_series("concurrent-3"), + ) + + # Verify all created + async with session_factory() as verify_session: + for i in range(1, 4): + series = await AnimeSeriesService.get_by_key( + verify_session, f"concurrent-{i}" + ) + assert series is not None + + +# ============================================================================ +# Queue Repository Transaction Tests +# ============================================================================ + + +class TestQueueRepositoryTransactions: + """Integration tests for QueueRepository transaction handling.""" + + @pytest.mark.asyncio + async def test_save_item_atomic(self, session_factory): + """Test save_item creates series, episode, and queue item atomically.""" + from src.server.models.download import ( + DownloadItem, + DownloadStatus, + EpisodeIdentifier, + ) + from src.server.services.queue_repository import QueueRepository + + repo = QueueRepository(session_factory) + + item = DownloadItem( + id="temp-id", + serie_id="repo-atomic-test", + serie_folder="/test/folder", + serie_name="Repo Atomic Test", + episode=EpisodeIdentifier(season=1, episode=1), + status=DownloadStatus.PENDING, + ) + + saved_item = await repo.save_item(item) + + assert saved_item.id != "temp-id" # Should have DB ID + + # Verify all entities created + async with session_factory() as verify_session: + series = await AnimeSeriesService.get_by_key( + verify_session, "repo-atomic-test" + ) + assert series is not None + + episodes = await EpisodeService.get_by_series( + verify_session, series.id + ) + assert len(episodes) == 1 + + queue_items = await DownloadQueueService.get_all(verify_session) + assert len(queue_items) >= 1 + + @pytest.mark.asyncio + async def test_clear_all_atomic(self, session_factory): + """Test clear_all removes all items atomically.""" + from src.server.models.download import ( + DownloadItem, + DownloadStatus, + EpisodeIdentifier, + ) + from src.server.services.queue_repository import QueueRepository + + repo = QueueRepository(session_factory) + + # Add some items + for i in range(3): + item = DownloadItem( + id=f"clear-{i}", + serie_id=f"clear-series-{i}", + serie_folder=f"/test/folder/{i}", + serie_name=f"Clear Series {i}", + episode=EpisodeIdentifier(season=1, episode=1), + status=DownloadStatus.PENDING, + ) + await repo.save_item(item) + + # Clear all + count = await repo.clear_all() + + assert count == 3 + + # Verify all cleared + async with session_factory() as verify_session: + queue_items = await DownloadQueueService.get_all(verify_session) + assert len(queue_items) == 0 diff --git a/tests/integration/test_download_flow.py b/tests/integration/test_download_flow.py index f951b6e..80986c1 100644 --- a/tests/integration/test_download_flow.py +++ b/tests/integration/test_download_flow.py @@ -220,7 +220,8 @@ class TestDownloadFlowEndToEnd: assert response.status_code == 400 data = response.json() - assert "detail" in data + # API returns 'message' for error responses + assert "message" in data async def test_validation_error_for_invalid_priority(self, authenticated_client): """Test validation error for invalid priority level.""" diff --git a/tests/integration/test_download_progress_integration.py b/tests/integration/test_download_progress_integration.py index cc631e2..da1f72c 100644 --- a/tests/integration/test_download_progress_integration.py +++ b/tests/integration/test_download_progress_integration.py @@ -72,11 +72,14 @@ async def anime_service(mock_series_app, progress_service): @pytest.fixture async def download_service(anime_service, progress_service): - """Create a DownloadService.""" + """Create a DownloadService with mock queue repository.""" + from tests.unit.test_download_service import MockQueueRepository + + mock_repo = MockQueueRepository() service = DownloadService( anime_service=anime_service, progress_service=progress_service, - persistence_path="/tmp/test_integration_progress_queue.json", + queue_repository=mock_repo, ) yield service await service.stop() @@ -177,9 +180,9 @@ class TestDownloadProgressIntegration: connection_id = "test_client_1" await websocket_service.connect(mock_ws, connection_id) - # Join the queue_progress room to receive queue updates + # Join the queue room to receive queue updates await websocket_service.manager.join_room( - connection_id, "queue_progress" + connection_id, "queue" ) # Subscribe to progress events and forward to WebSocket @@ -251,12 +254,12 @@ class TestDownloadProgressIntegration: await websocket_service.connect(client1, "client1") await websocket_service.connect(client2, "client2") - # Join both clients to the queue_progress room + # Join both clients to the queue room await websocket_service.manager.join_room( - "client1", "queue_progress" + "client1", "queue" ) await websocket_service.manager.join_room( - "client2", "queue_progress" + "client2", "queue" ) # Subscribe to progress events and forward to WebSocket diff --git a/tests/integration/test_identifier_consistency.py b/tests/integration/test_identifier_consistency.py index 0ebb384..e93b85a 100644 --- a/tests/integration/test_identifier_consistency.py +++ b/tests/integration/test_identifier_consistency.py @@ -88,9 +88,10 @@ def progress_service(): @pytest.fixture async def download_service(mock_series_app, progress_service, tmp_path): - """Create a DownloadService with dependencies.""" - import uuid - persistence_path = tmp_path / f"test_queue_{uuid.uuid4()}.json" + """Create a DownloadService with mock repository for testing.""" + from tests.unit.test_download_service import MockQueueRepository + + mock_repo = MockQueueRepository() anime_service = AnimeService( series_app=mock_series_app, @@ -101,7 +102,7 @@ async def download_service(mock_series_app, progress_service, tmp_path): service = DownloadService( anime_service=anime_service, progress_service=progress_service, - persistence_path=str(persistence_path), + queue_repository=mock_repo, ) yield service await service.stop() @@ -319,8 +320,6 @@ class TestServiceIdentifierConsistency: - Persisted data contains serie_id (key) - Data can be restored with correct identifiers """ - import json - # Add item to queue await download_service.add_to_queue( serie_id="jujutsu-kaisen", @@ -330,18 +329,13 @@ class TestServiceIdentifierConsistency: priority=DownloadPriority.NORMAL, ) - # Read persisted data - persistence_path = download_service._persistence_path - with open(persistence_path, "r") as f: - data = json.load(f) + # Verify item is in pending queue (in-memory cache synced with DB) + pending_items = list(download_service._pending_queue) + assert len(pending_items) == 1 - # Verify persisted data structure - assert "pending" in data - assert len(data["pending"]) == 1 - - persisted_item = data["pending"][0] - assert persisted_item["serie_id"] == "jujutsu-kaisen" - assert persisted_item["serie_folder"] == "Jujutsu Kaisen (2020)" + persisted_item = pending_items[0] + assert persisted_item.serie_id == "jujutsu-kaisen" + assert persisted_item.serie_folder == "Jujutsu Kaisen (2020)" class TestWebSocketIdentifierConsistency: diff --git a/tests/integration/test_websocket_integration.py b/tests/integration/test_websocket_integration.py index b01ad9a..dc212b1 100644 --- a/tests/integration/test_websocket_integration.py +++ b/tests/integration/test_websocket_integration.py @@ -6,7 +6,7 @@ real-time updates are properly broadcasted to connected clients. """ import asyncio from typing import Any, Dict, List -from unittest.mock import Mock, patch +from unittest.mock import AsyncMock, Mock, patch import pytest @@ -64,21 +64,25 @@ async def anime_service(mock_series_app, progress_service): series_app=mock_series_app, progress_service=progress_service, ) + # Mock database operations that are called during rescan + service._save_scan_results_to_db = AsyncMock(return_value=0) + service._load_series_from_db = AsyncMock(return_value=None) yield service @pytest.fixture async def download_service(anime_service, progress_service, tmp_path): - """Create a DownloadService with dependencies. + """Create a DownloadService with mock repository for testing. - Uses tmp_path to ensure each test has isolated queue storage. + Uses mock repository to ensure each test has isolated queue storage. """ - import uuid - persistence_path = tmp_path / f"test_queue_{uuid.uuid4()}.json" + from tests.unit.test_download_service import MockQueueRepository + + mock_repo = MockQueueRepository() service = DownloadService( anime_service=anime_service, progress_service=progress_service, - persistence_path=str(persistence_path), + queue_repository=mock_repo, ) yield service, progress_service await service.stop() @@ -321,8 +325,9 @@ class TestWebSocketScanIntegration: assert len(broadcasts) >= 2 # At least start and complete # Check for scan progress broadcasts + # Room name is 'scan' for SCAN type progress scan_broadcasts = [ - b for b in broadcasts if b["room"] == "scan_progress" + b for b in broadcasts if b["room"] == "scan" ] assert len(scan_broadcasts) >= 2 @@ -375,8 +380,9 @@ class TestWebSocketScanIntegration: await anime_service.rescan() # Verify failure broadcast + # Room name is 'scan' for SCAN type progress scan_broadcasts = [ - b for b in broadcasts if b["room"] == "scan_progress" + b for b in broadcasts if b["room"] == "scan" ] assert len(scan_broadcasts) >= 2 # Start and fail @@ -434,7 +440,7 @@ class TestWebSocketProgressIntegration: start_broadcast = broadcasts[0] assert start_broadcast["data"]["status"] == "started" - assert start_broadcast["room"] == "download_progress" + assert start_broadcast["room"] == "downloads" # Room name for DOWNLOAD type update_broadcast = broadcasts[1] assert update_broadcast["data"]["status"] == "in_progress" diff --git a/tests/performance/test_api_load.py b/tests/performance/test_api_load.py index 685a2cb..f3be66c 100644 --- a/tests/performance/test_api_load.py +++ b/tests/performance/test_api_load.py @@ -29,6 +29,7 @@ class TestAPILoadTesting: mock_app = MagicMock() mock_app.list = MagicMock() mock_app.list.GetMissingEpisode = MagicMock(return_value=[]) + mock_app.list.GetList = MagicMock(return_value=[]) mock_app.search = AsyncMock(return_value=[]) app.dependency_overrides[get_series_app] = lambda: mock_app diff --git a/tests/performance/test_download_stress.py b/tests/performance/test_download_stress.py index 1e28063..aeee44c 100644 --- a/tests/performance/test_download_stress.py +++ b/tests/performance/test_download_stress.py @@ -28,12 +28,13 @@ class TestDownloadQueueStress: @pytest.fixture def download_service(self, mock_anime_service, tmp_path): - """Create download service with mock.""" - persistence_path = str(tmp_path / "test_queue.json") + """Create download service with mock repository.""" + from tests.unit.test_download_service import MockQueueRepository + mock_repo = MockQueueRepository() service = DownloadService( anime_service=mock_anime_service, max_retries=3, - persistence_path=persistence_path, + queue_repository=mock_repo, ) return service @@ -176,12 +177,13 @@ class TestDownloadMemoryUsage: @pytest.fixture def download_service(self, mock_anime_service, tmp_path): - """Create download service with mock.""" - persistence_path = str(tmp_path / "test_queue.json") + """Create download service with mock repository.""" + from tests.unit.test_download_service import MockQueueRepository + mock_repo = MockQueueRepository() service = DownloadService( anime_service=mock_anime_service, max_retries=3, - persistence_path=persistence_path, + queue_repository=mock_repo, ) return service @@ -232,12 +234,13 @@ class TestDownloadConcurrency: @pytest.fixture def download_service(self, mock_anime_service, tmp_path): - """Create download service with mock.""" - persistence_path = str(tmp_path / "test_queue.json") + """Create download service with mock repository.""" + from tests.unit.test_download_service import MockQueueRepository + mock_repo = MockQueueRepository() service = DownloadService( anime_service=mock_anime_service, max_retries=3, - persistence_path=persistence_path, + queue_repository=mock_repo, ) return service @@ -321,11 +324,12 @@ class TestDownloadErrorHandling: self, mock_failing_anime_service, tmp_path ): """Create download service with failing mock.""" - persistence_path = str(tmp_path / "test_queue.json") + from tests.unit.test_download_service import MockQueueRepository + mock_repo = MockQueueRepository() service = DownloadService( anime_service=mock_failing_anime_service, max_retries=3, - persistence_path=persistence_path, + queue_repository=mock_repo, ) return service @@ -338,12 +342,13 @@ class TestDownloadErrorHandling: @pytest.fixture def download_service(self, mock_anime_service, tmp_path): - """Create download service with mock.""" - persistence_path = str(tmp_path / "test_queue.json") + """Create download service with mock repository.""" + from tests.unit.test_download_service import MockQueueRepository + mock_repo = MockQueueRepository() service = DownloadService( anime_service=mock_anime_service, max_retries=3, - persistence_path=persistence_path, + queue_repository=mock_repo, ) return service diff --git a/tests/security/test_sql_injection.py b/tests/security/test_sql_injection.py index 565587d..96483eb 100644 --- a/tests/security/test_sql_injection.py +++ b/tests/security/test_sql_injection.py @@ -27,6 +27,7 @@ class TestSQLInjection: mock_app = MagicMock() mock_app.list = MagicMock() mock_app.list.GetMissingEpisode = MagicMock(return_value=[]) + mock_app.list.GetList = MagicMock(return_value=[]) mock_app.search = AsyncMock(return_value=[]) # Override dependency @@ -287,6 +288,7 @@ class TestDatabaseSecurity: mock_app = MagicMock() mock_app.list = MagicMock() mock_app.list.GetMissingEpisode = MagicMock(return_value=[]) + mock_app.list.GetList = MagicMock(return_value=[]) mock_app.search = AsyncMock(return_value=[]) # Override dependency diff --git a/tests/unit/test_anime_models.py b/tests/unit/test_anime_models.py index 71d098f..1dab0bf 100644 --- a/tests/unit/test_anime_models.py +++ b/tests/unit/test_anime_models.py @@ -65,7 +65,6 @@ class TestAnimeSeriesResponse: title="Attack on Titan", folder="Attack on Titan (2013)", episodes=[ep], - total_episodes=12, ) assert series.key == "attack-on-titan" diff --git a/tests/unit/test_anime_service.py b/tests/unit/test_anime_service.py index e7af89a..f1d6386 100644 --- a/tests/unit/test_anime_service.py +++ b/tests/unit/test_anime_service.py @@ -6,7 +6,7 @@ error handling, and progress reporting integration. from __future__ import annotations import asyncio -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, patch import pytest @@ -183,7 +183,17 @@ class TestRescan: self, anime_service, mock_series_app, mock_progress_service ): """Test successful rescan operation.""" - await anime_service.rescan() + # Mock rescan to return empty list (no DB save needed) + mock_series_app.rescan.return_value = [] + + # Mock the database operations + with patch.object( + anime_service, '_save_scan_results_to_db', new_callable=AsyncMock + ): + with patch.object( + anime_service, '_load_series_from_db', new_callable=AsyncMock + ): + await anime_service.rescan() # Verify SeriesApp.rescan was called (lowercase, not ReScan) mock_series_app.rescan.assert_called_once() @@ -193,7 +203,15 @@ class TestRescan: """Test rescan operation (callback parameter removed).""" # Rescan no longer accepts callback parameter # Progress is tracked via event handlers automatically - await anime_service.rescan() + mock_series_app.rescan.return_value = [] + + with patch.object( + anime_service, '_save_scan_results_to_db', new_callable=AsyncMock + ): + with patch.object( + anime_service, '_load_series_from_db', new_callable=AsyncMock + ): + await anime_service.rescan() # Verify rescan was called mock_series_app.rescan.assert_called_once() @@ -207,9 +225,17 @@ class TestRescan: # Update series list mock_series_app.series_list = [{"name": "Test"}, {"name": "New"}] + mock_series_app.rescan.return_value = [] - # Rescan should clear cache - await anime_service.rescan() + # Mock the database operations + with patch.object( + anime_service, '_save_scan_results_to_db', new_callable=AsyncMock + ): + with patch.object( + anime_service, '_load_series_from_db', new_callable=AsyncMock + ): + # Rescan should clear cache + await anime_service.rescan() # Next list_missing should return updated data result = await anime_service.list_missing() diff --git a/tests/unit/test_config_models.py b/tests/unit/test_config_models.py index 579a1fd..589e8f5 100644 --- a/tests/unit/test_config_models.py +++ b/tests/unit/test_config_models.py @@ -44,12 +44,12 @@ def test_appconfig_and_config_update_apply_to(): def test_backup_and_validation(): cfg = AppConfig() # default backups disabled -> valid - res: ValidationResult = cfg.validate() + res: ValidationResult = cfg.validate_config() assert res.valid is True # enable backups but leave path empty -> invalid cfg.backup.enabled = True cfg.backup.path = "" - res2 = cfg.validate() + res2 = cfg.validate_config() assert res2.valid is False assert any("backup.path" in e for e in res2.errors) diff --git a/tests/unit/test_config_service.py b/tests/unit/test_config_service.py index 43375f5..c24b80c 100644 --- a/tests/unit/test_config_service.py +++ b/tests/unit/test_config_service.py @@ -318,25 +318,6 @@ class TestConfigServiceBackups: assert len(backups) == 3 # Should only keep max_backups -class TestConfigServiceMigration: - """Test configuration migration.""" - - def test_migration_preserves_data(self, config_service, sample_config): - """Test that migration preserves configuration data.""" - # Manually save config with old version - data = sample_config.model_dump() - data["version"] = "0.9.0" # Old version - - with open(config_service.config_path, "w", encoding="utf-8") as f: - json.dump(data, f) - - # Load should migrate automatically - loaded = config_service.load_config() - - assert loaded.name == sample_config.name - assert loaded.data_dir == sample_config.data_dir - - class TestConfigServiceSingleton: """Test singleton instance management.""" diff --git a/tests/unit/test_database_init.py b/tests/unit/test_database_init.py index a7dfa45..463daa4 100644 --- a/tests/unit/test_database_init.py +++ b/tests/unit/test_database_init.py @@ -25,7 +25,6 @@ from src.server.database.init import ( create_database_backup, create_database_schema, get_database_info, - get_migration_guide, get_schema_version, initialize_database, seed_initial_data, @@ -372,16 +371,6 @@ def test_get_database_info(): assert set(info["expected_tables"]) == EXPECTED_TABLES -def test_get_migration_guide(): - """Test getting migration guide.""" - guide = get_migration_guide() - - assert isinstance(guide, str) - assert "Alembic" in guide - assert "alembic init" in guide - assert "alembic upgrade head" in guide - - # ============================================================================= # Integration Tests # ============================================================================= diff --git a/tests/unit/test_database_models.py b/tests/unit/test_database_models.py index 2587ab6..473c343 100644 --- a/tests/unit/test_database_models.py +++ b/tests/unit/test_database_models.py @@ -14,9 +14,7 @@ from sqlalchemy.orm import Session, sessionmaker from src.server.database.base import Base, SoftDeleteMixin, TimestampMixin from src.server.database.models import ( AnimeSeries, - DownloadPriority, DownloadQueueItem, - DownloadStatus, Episode, UserSession, ) @@ -49,11 +47,6 @@ class TestAnimeSeries: name="Attack on Titan", site="https://aniworld.to", folder="/anime/attack-on-titan", - description="Epic anime about titans", - status="completed", - total_episodes=75, - cover_url="https://example.com/cover.jpg", - episode_dict={1: [1, 2, 3], 2: [1, 2, 3, 4]}, ) db_session.add(series) @@ -172,9 +165,7 @@ class TestEpisode: episode_number=5, title="The Fifth Episode", file_path="/anime/test/S01E05.mp4", - file_size=524288000, # 500 MB is_downloaded=True, - download_date=datetime.now(timezone.utc), ) db_session.add(episode) @@ -225,17 +216,17 @@ class TestDownloadQueueItem: db_session.add(series) db_session.commit() - item = DownloadQueueItem( + episode = Episode( series_id=series.id, season=1, episode_number=3, - status=DownloadStatus.DOWNLOADING, - priority=DownloadPriority.HIGH, - progress_percent=45.5, - downloaded_bytes=250000000, - total_bytes=550000000, - download_speed=2500000.0, - retry_count=0, + ) + db_session.add(episode) + db_session.commit() + + item = DownloadQueueItem( + series_id=series.id, + episode_id=episode.id, download_url="https://example.com/download/ep3", file_destination="/anime/download/S01E03.mp4", ) @@ -245,37 +236,38 @@ class TestDownloadQueueItem: # Verify saved assert item.id is not None - assert item.status == DownloadStatus.DOWNLOADING - assert item.priority == DownloadPriority.HIGH - assert item.progress_percent == 45.5 - assert item.retry_count == 0 + assert item.episode_id == episode.id + assert item.series_id == series.id - def test_download_item_status_enum(self, db_session: Session): - """Test download status enum values.""" + def test_download_item_episode_relationship(self, db_session: Session): + """Test download item episode relationship.""" series = AnimeSeries( - key="status-test", - name="Status Test", + key="relationship-test", + name="Relationship Test", site="https://example.com", - folder="/anime/status", + folder="/anime/relationship", ) db_session.add(series) db_session.commit() - item = DownloadQueueItem( + episode = Episode( series_id=series.id, season=1, episode_number=1, - status=DownloadStatus.PENDING, + ) + db_session.add(episode) + db_session.commit() + + item = DownloadQueueItem( + series_id=series.id, + episode_id=episode.id, ) db_session.add(item) db_session.commit() - # Update status - item.status = DownloadStatus.COMPLETED - db_session.commit() - - # Verify status change - assert item.status == DownloadStatus.COMPLETED + # Verify relationship + assert item.episode.id == episode.id + assert item.series.id == series.id def test_download_item_error_handling(self, db_session: Session): """Test download item with error information.""" @@ -288,21 +280,24 @@ class TestDownloadQueueItem: db_session.add(series) db_session.commit() - item = DownloadQueueItem( + episode = Episode( series_id=series.id, season=1, episode_number=1, - status=DownloadStatus.FAILED, + ) + db_session.add(episode) + db_session.commit() + + item = DownloadQueueItem( + series_id=series.id, + episode_id=episode.id, error_message="Network timeout after 30 seconds", - retry_count=2, ) db_session.add(item) db_session.commit() # Verify error info - assert item.status == DownloadStatus.FAILED assert item.error_message == "Network timeout after 30 seconds" - assert item.retry_count == 2 class TestUserSession: @@ -502,32 +497,31 @@ class TestDatabaseQueries: db_session.add(series) db_session.commit() - # Create items with different statuses - for i, status in enumerate([ - DownloadStatus.PENDING, - DownloadStatus.DOWNLOADING, - DownloadStatus.COMPLETED, - ]): - item = DownloadQueueItem( + # Create episodes and items + for i in range(3): + episode = Episode( series_id=series.id, season=1, episode_number=i + 1, - status=status, + ) + db_session.add(episode) + db_session.commit() + + item = DownloadQueueItem( + series_id=series.id, + episode_id=episode.id, ) db_session.add(item) db_session.commit() - # Query pending items + # Query all items result = db_session.execute( - select(DownloadQueueItem).where( - DownloadQueueItem.status == DownloadStatus.PENDING - ) + select(DownloadQueueItem) ) - pending = result.scalars().all() + items = result.scalars().all() # Verify query - assert len(pending) == 1 - assert pending[0].episode_number == 1 + assert len(items) == 3 def test_query_active_sessions(self, db_session: Session): """Test querying active user sessions.""" diff --git a/tests/unit/test_database_service.py b/tests/unit/test_database_service.py index 786aa18..79e1955 100644 --- a/tests/unit/test_database_service.py +++ b/tests/unit/test_database_service.py @@ -10,7 +10,6 @@ from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine from sqlalchemy.orm import sessionmaker from src.server.database.base import Base -from src.server.database.models import DownloadPriority, DownloadStatus from src.server.database.service import ( AnimeSeriesService, DownloadQueueService, @@ -65,17 +64,11 @@ async def test_create_anime_series(db_session): name="Test Anime", site="https://example.com", folder="/path/to/anime", - description="A test anime", - status="ongoing", - total_episodes=12, - cover_url="https://example.com/cover.jpg", ) assert series.id is not None assert series.key == "test-anime-1" assert series.name == "Test Anime" - assert series.description == "A test anime" - assert series.total_episodes == 12 @pytest.mark.asyncio @@ -160,13 +153,11 @@ async def test_update_anime_series(db_session): db_session, series.id, name="Updated Name", - total_episodes=24, ) await db_session.commit() assert updated is not None assert updated.name == "Updated Name" - assert updated.total_episodes == 24 @pytest.mark.asyncio @@ -308,14 +299,12 @@ async def test_mark_episode_downloaded(db_session): db_session, episode.id, file_path="/path/to/file.mp4", - file_size=1024000, ) await db_session.commit() assert updated is not None assert updated.is_downloaded is True assert updated.file_path == "/path/to/file.mp4" - assert updated.download_date is not None # ============================================================================ @@ -336,23 +325,30 @@ async def test_create_download_queue_item(db_session): ) await db_session.commit() - # Add to queue - item = await DownloadQueueService.create( + # Create episode + episode = await EpisodeService.create( db_session, series_id=series.id, season=1, episode_number=1, - priority=DownloadPriority.HIGH, + ) + await db_session.commit() + + # Add to queue + item = await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, ) assert item.id is not None - assert item.status == DownloadStatus.PENDING - assert item.priority == DownloadPriority.HIGH + assert item.episode_id == episode.id + assert item.series_id == series.id @pytest.mark.asyncio -async def test_get_pending_downloads(db_session): - """Test retrieving pending downloads.""" +async def test_get_download_queue_item_by_episode(db_session): + """Test retrieving download queue item by episode.""" # Create series series = await AnimeSeriesService.create( db_session, @@ -362,29 +358,32 @@ async def test_get_pending_downloads(db_session): folder="/path/test5", ) - # Add pending items - await DownloadQueueService.create( + # Create episode + episode = await EpisodeService.create( db_session, series_id=series.id, season=1, episode_number=1, ) + await db_session.commit() + + # Add to queue await DownloadQueueService.create( db_session, series_id=series.id, - season=1, - episode_number=2, + episode_id=episode.id, ) await db_session.commit() - # Retrieve pending - pending = await DownloadQueueService.get_pending(db_session) - assert len(pending) == 2 + # Retrieve by episode + item = await DownloadQueueService.get_by_episode(db_session, episode.id) + assert item is not None + assert item.episode_id == episode.id @pytest.mark.asyncio -async def test_update_download_status(db_session): - """Test updating download status.""" +async def test_set_download_error(db_session): + """Test setting error on download queue item.""" # Create series and queue item series = await AnimeSeriesService.create( db_session, @@ -393,30 +392,34 @@ async def test_update_download_status(db_session): site="https://example.com", folder="/path/test6", ) - item = await DownloadQueueService.create( + episode = await EpisodeService.create( db_session, series_id=series.id, season=1, episode_number=1, ) + item = await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, + ) await db_session.commit() - # Update status - updated = await DownloadQueueService.update_status( + # Set error + updated = await DownloadQueueService.set_error( db_session, item.id, - DownloadStatus.DOWNLOADING, + "Network error", ) await db_session.commit() assert updated is not None - assert updated.status == DownloadStatus.DOWNLOADING - assert updated.started_at is not None + assert updated.error_message == "Network error" @pytest.mark.asyncio -async def test_update_download_progress(db_session): - """Test updating download progress.""" +async def test_delete_download_queue_item_by_episode(db_session): + """Test deleting download queue item by episode.""" # Create series and queue item series = await AnimeSeriesService.create( db_session, @@ -425,109 +428,31 @@ async def test_update_download_progress(db_session): site="https://example.com", folder="/path/test7", ) - item = await DownloadQueueService.create( + episode = await EpisodeService.create( db_session, series_id=series.id, season=1, episode_number=1, ) - await db_session.commit() - - # Update progress - updated = await DownloadQueueService.update_progress( - db_session, - item.id, - progress_percent=50.0, - downloaded_bytes=500000, - total_bytes=1000000, - download_speed=50000.0, - ) - await db_session.commit() - - assert updated is not None - assert updated.progress_percent == 50.0 - assert updated.downloaded_bytes == 500000 - assert updated.total_bytes == 1000000 - - -@pytest.mark.asyncio -async def test_clear_completed_downloads(db_session): - """Test clearing completed downloads.""" - # Create series and completed items - series = await AnimeSeriesService.create( - db_session, - key="test-series-8", - name="Test Series 8", - site="https://example.com", - folder="/path/test8", - ) - item1 = await DownloadQueueService.create( + await DownloadQueueService.create( db_session, series_id=series.id, - season=1, - episode_number=1, - ) - item2 = await DownloadQueueService.create( - db_session, - series_id=series.id, - season=1, - episode_number=2, - ) - - # Mark items as completed - await DownloadQueueService.update_status( - db_session, - item1.id, - DownloadStatus.COMPLETED, - ) - await DownloadQueueService.update_status( - db_session, - item2.id, - DownloadStatus.COMPLETED, + episode_id=episode.id, ) await db_session.commit() - # Clear completed - count = await DownloadQueueService.clear_completed(db_session) - await db_session.commit() - - assert count == 2 - - -@pytest.mark.asyncio -async def test_retry_failed_downloads(db_session): - """Test retrying failed downloads.""" - # Create series and failed item - series = await AnimeSeriesService.create( + # Delete by episode + deleted = await DownloadQueueService.delete_by_episode( db_session, - key="test-series-9", - name="Test Series 9", - site="https://example.com", - folder="/path/test9", - ) - item = await DownloadQueueService.create( - db_session, - series_id=series.id, - season=1, - episode_number=1, - ) - - # Mark as failed - await DownloadQueueService.update_status( - db_session, - item.id, - DownloadStatus.FAILED, - error_message="Network error", + episode.id, ) await db_session.commit() - # Retry - retried = await DownloadQueueService.retry_failed(db_session) - await db_session.commit() + assert deleted is True - assert len(retried) == 1 - assert retried[0].status == DownloadStatus.PENDING - assert retried[0].error_message is None + # Verify deleted + item = await DownloadQueueService.get_by_episode(db_session, episode.id) + assert item is None # ============================================================================ diff --git a/tests/unit/test_download_progress_websocket.py b/tests/unit/test_download_progress_websocket.py index ac99ca7..a53f3c2 100644 --- a/tests/unit/test_download_progress_websocket.py +++ b/tests/unit/test_download_progress_websocket.py @@ -102,27 +102,20 @@ async def anime_service(mock_series_app, progress_service): @pytest.fixture async def download_service(anime_service, progress_service): - """Create a DownloadService with dependencies.""" - import os - persistence_path = "/tmp/test_download_progress_queue.json" + """Create a DownloadService with mock repository for testing.""" + from tests.unit.test_download_service import MockQueueRepository - # Remove any existing queue file - if os.path.exists(persistence_path): - os.remove(persistence_path) + mock_repo = MockQueueRepository() service = DownloadService( anime_service=anime_service, progress_service=progress_service, - persistence_path=persistence_path, + queue_repository=mock_repo, ) yield service, progress_service await service.stop() - - # Clean up after test - if os.path.exists(persistence_path): - os.remove(persistence_path) class TestDownloadProgressWebSocket: diff --git a/tests/unit/test_download_service.py b/tests/unit/test_download_service.py index 2134fea..271a93b 100644 --- a/tests/unit/test_download_service.py +++ b/tests/unit/test_download_service.py @@ -1,14 +1,13 @@ """Unit tests for the download queue service. -Tests cover queue management, manual download control, persistence, +Tests cover queue management, manual download control, database persistence, and error scenarios for the simplified download service. """ from __future__ import annotations import asyncio -import json from datetime import datetime, timezone -from pathlib import Path +from typing import Dict, List, Optional from unittest.mock import AsyncMock, MagicMock import pytest @@ -23,6 +22,58 @@ from src.server.services.anime_service import AnimeService from src.server.services.download_service import DownloadService, DownloadServiceError +class MockQueueRepository: + """Mock implementation of QueueRepository for testing. + + This provides an in-memory storage that mimics the simplified database + repository behavior without requiring actual database connections. + + Note: The repository is simplified - status, priority, progress are + now managed in-memory by DownloadService, not stored in database. + """ + + def __init__(self): + """Initialize mock repository with in-memory storage.""" + self._items: Dict[str, DownloadItem] = {} + + async def save_item(self, item: DownloadItem) -> DownloadItem: + """Save item to in-memory storage.""" + self._items[item.id] = item + return item + + async def get_item(self, item_id: str) -> Optional[DownloadItem]: + """Get item by ID from in-memory storage.""" + return self._items.get(item_id) + + async def get_all_items(self) -> List[DownloadItem]: + """Get all items in storage.""" + return list(self._items.values()) + + async def set_error( + self, + item_id: str, + error: str, + ) -> bool: + """Set error message on an item.""" + if item_id not in self._items: + return False + self._items[item_id].error = error + return True + + async def delete_item(self, item_id: str) -> bool: + """Delete item from storage.""" + if item_id in self._items: + del self._items[item_id] + return True + return False + + async def clear_all(self) -> int: + """Clear all items.""" + count = len(self._items) + self._items.clear() + return count + + @pytest.fixture def mock_anime_service(): """Create a mock AnimeService.""" @@ -32,18 +83,18 @@ def mock_anime_service(): @pytest.fixture -def temp_persistence_path(tmp_path): - """Create a temporary persistence path.""" - return str(tmp_path / "test_queue.json") +def mock_queue_repository(): + """Create a mock QueueRepository for testing.""" + return MockQueueRepository() @pytest.fixture -def download_service(mock_anime_service, temp_persistence_path): +def download_service(mock_anime_service, mock_queue_repository): """Create a DownloadService instance for testing.""" return DownloadService( anime_service=mock_anime_service, + queue_repository=mock_queue_repository, max_retries=3, - persistence_path=temp_persistence_path, ) @@ -51,12 +102,12 @@ class TestDownloadServiceInitialization: """Test download service initialization.""" def test_initialization_creates_queues( - self, mock_anime_service, temp_persistence_path + self, mock_anime_service, mock_queue_repository ): """Test that initialization creates empty queues.""" service = DownloadService( anime_service=mock_anime_service, - persistence_path=temp_persistence_path, + queue_repository=mock_queue_repository, ) assert len(service._pending_queue) == 0 @@ -65,45 +116,30 @@ class TestDownloadServiceInitialization: assert len(service._failed_items) == 0 assert service._is_stopped is True - def test_initialization_loads_persisted_queue( - self, mock_anime_service, temp_persistence_path + @pytest.mark.asyncio + async def test_initialization_loads_persisted_queue( + self, mock_anime_service, mock_queue_repository ): - """Test that initialization loads persisted queue state.""" - # Create a persisted queue file - persistence_file = Path(temp_persistence_path) - persistence_file.parent.mkdir(parents=True, exist_ok=True) - - test_data = { - "pending": [ - { - "id": "test-id-1", - "serie_id": "series-1", - "serie_folder": "test-series", # Added missing field - "serie_name": "Test Series", - "episode": {"season": 1, "episode": 1, "title": None}, - "status": "pending", - "priority": "NORMAL", # Must be uppercase - "added_at": datetime.now(timezone.utc).isoformat(), - "started_at": None, - "completed_at": None, - "progress": None, - "error": None, - "retry_count": 0, - "source_url": None, - } - ], - "active": [], - "failed": [], - "timestamp": datetime.now(timezone.utc).isoformat(), - } - - with open(persistence_file, "w", encoding="utf-8") as f: - json.dump(test_data, f) + """Test that initialization loads persisted queue from database.""" + # Pre-populate the mock repository with a pending item + test_item = DownloadItem( + id="test-id-1", + serie_id="series-1", + serie_folder="test-series", + serie_name="Test Series", + episode=EpisodeIdentifier(season=1, episode=1), + status=DownloadStatus.PENDING, + priority=DownloadPriority.NORMAL, + added_at=datetime.now(timezone.utc), + ) + await mock_queue_repository.save_item(test_item) + # Create service and initialize from database service = DownloadService( anime_service=mock_anime_service, - persistence_path=temp_persistence_path, + queue_repository=mock_queue_repository, ) + await service.initialize() assert len(service._pending_queue) == 1 assert service._pending_queue[0].id == "test-id-1" @@ -391,11 +427,13 @@ class TestQueueControl: class TestPersistence: - """Test queue persistence functionality.""" + """Test queue persistence functionality with database backend.""" @pytest.mark.asyncio - async def test_queue_persistence(self, download_service): - """Test that queue state is persisted to disk.""" + async def test_queue_persistence( + self, download_service, mock_queue_repository + ): + """Test that queue state is persisted to database.""" await download_service.add_to_queue( serie_id="series-1", serie_folder="series", @@ -403,26 +441,20 @@ class TestPersistence: episodes=[EpisodeIdentifier(season=1, episode=1)], ) - # Persistence file should exist - persistence_path = Path(download_service._persistence_path) - assert persistence_path.exists() - - # Check file contents - with open(persistence_path, "r") as f: - data = json.load(f) - - assert len(data["pending"]) == 1 - assert data["pending"][0]["serie_id"] == "series-1" + # Item should be saved in mock repository + all_items = await mock_queue_repository.get_all_items() + assert len(all_items) == 1 + assert all_items[0].serie_id == "series-1" @pytest.mark.asyncio async def test_queue_recovery_after_restart( - self, mock_anime_service, temp_persistence_path + self, mock_anime_service, mock_queue_repository ): """Test that queue is recovered after service restart.""" # Create and populate first service service1 = DownloadService( anime_service=mock_anime_service, - persistence_path=temp_persistence_path, + queue_repository=mock_queue_repository, ) await service1.add_to_queue( @@ -435,11 +467,13 @@ class TestPersistence: ], ) - # Create new service with same persistence path + # Create new service with same repository (simulating restart) service2 = DownloadService( anime_service=mock_anime_service, - persistence_path=temp_persistence_path, + queue_repository=mock_queue_repository, ) + # Initialize to load from database to recover state + await service2.initialize() # Should recover pending items assert len(service2._pending_queue) == 2 diff --git a/tests/unit/test_filesystem_utils.py b/tests/unit/test_filesystem_utils.py new file mode 100644 index 0000000..71a571e --- /dev/null +++ b/tests/unit/test_filesystem_utils.py @@ -0,0 +1,295 @@ +""" +Unit tests for filesystem utilities. + +Tests the sanitize_folder_name function and related filesystem utilities. +""" + +import os +import tempfile + +import pytest + +from src.server.utils.filesystem import ( + MAX_FOLDER_NAME_LENGTH, + create_safe_folder, + is_safe_path, + sanitize_folder_name, +) + + +class TestSanitizeFolderName: + """Test sanitize_folder_name function.""" + + def test_simple_name(self): + """Test sanitizing a simple name with no special characters.""" + assert sanitize_folder_name("Attack on Titan") == "Attack on Titan" + + def test_name_with_colon(self): + """Test sanitizing name with colon.""" + result = sanitize_folder_name("Attack on Titan: Final Season") + assert ":" not in result + assert result == "Attack on Titan Final Season" + + def test_name_with_question_mark(self): + """Test sanitizing name with question mark.""" + result = sanitize_folder_name("What If...?") + assert "?" not in result + # Trailing dots are stripped + assert result == "What If" + + def test_name_with_multiple_special_chars(self): + """Test sanitizing name with multiple special characters.""" + result = sanitize_folder_name('Test: "Episode" <1> | Part?') + # All invalid chars should be removed + assert ":" not in result + assert '"' not in result + assert "<" not in result + assert ">" not in result + assert "|" not in result + assert "?" not in result + + def test_name_with_forward_slash(self): + """Test sanitizing name with forward slash.""" + result = sanitize_folder_name("Attack/Titan") + assert "/" not in result + + def test_name_with_backslash(self): + """Test sanitizing name with backslash.""" + result = sanitize_folder_name("Attack\\Titan") + assert "\\" not in result + + def test_unicode_characters_preserved(self): + """Test that Unicode characters are preserved.""" + # Japanese title + result = sanitize_folder_name("進撃の巨人") + assert result == "進撃の巨人" + + def test_mixed_unicode_and_special(self): + """Test mixed Unicode and special characters.""" + result = sanitize_folder_name("Re:ゼロ") + assert ":" not in result + assert "ゼロ" in result + + def test_leading_dots_removed(self): + """Test that leading dots are removed.""" + result = sanitize_folder_name("...Hidden Folder") + assert not result.startswith(".") + + def test_trailing_dots_removed(self): + """Test that trailing dots are removed.""" + result = sanitize_folder_name("Folder Name...") + assert not result.endswith(".") + + def test_leading_spaces_removed(self): + """Test that leading spaces are removed.""" + result = sanitize_folder_name(" Attack on Titan") + assert result == "Attack on Titan" + + def test_trailing_spaces_removed(self): + """Test that trailing spaces are removed.""" + result = sanitize_folder_name("Attack on Titan ") + assert result == "Attack on Titan" + + def test_multiple_spaces_collapsed(self): + """Test that multiple consecutive spaces are collapsed.""" + result = sanitize_folder_name("Attack on Titan") + assert result == "Attack on Titan" + + def test_null_byte_removed(self): + """Test that null byte is removed.""" + result = sanitize_folder_name("Attack\x00Titan") + assert "\x00" not in result + + def test_newline_removed(self): + """Test that newline is removed.""" + result = sanitize_folder_name("Attack\nTitan") + assert "\n" not in result + + def test_tab_removed(self): + """Test that tab is removed.""" + result = sanitize_folder_name("Attack\tTitan") + assert "\t" not in result + + def test_none_raises_error(self): + """Test that None raises ValueError.""" + with pytest.raises(ValueError, match="cannot be None"): + sanitize_folder_name(None) + + def test_empty_string_raises_error(self): + """Test that empty string raises ValueError.""" + with pytest.raises(ValueError, match="cannot be empty"): + sanitize_folder_name("") + + def test_whitespace_only_raises_error(self): + """Test that whitespace-only string raises ValueError.""" + with pytest.raises(ValueError, match="cannot be empty"): + sanitize_folder_name(" ") + + def test_only_invalid_chars_raises_error(self): + """Test that string with only invalid characters raises ValueError.""" + with pytest.raises(ValueError, match="only invalid characters"): + sanitize_folder_name("???:::***") + + def test_max_length_truncation(self): + """Test that long names are truncated.""" + long_name = "A" * 300 + result = sanitize_folder_name(long_name) + assert len(result) <= MAX_FOLDER_NAME_LENGTH + + def test_max_length_custom(self): + """Test custom max length.""" + result = sanitize_folder_name("Attack on Titan", max_length=10) + assert len(result) <= 10 + + def test_truncation_at_word_boundary(self): + """Test that truncation happens at word boundary when possible.""" + result = sanitize_folder_name( + "The Very Long Anime Title That Needs Truncation", + max_length=25 + ) + # Should truncate at word boundary + assert len(result) <= 25 + assert not result.endswith(" ") + + def test_custom_replacement_character(self): + """Test custom replacement character.""" + result = sanitize_folder_name("Test:Name", replacement="_") + assert ":" not in result + assert "Test_Name" == result + + def test_asterisk_removed(self): + """Test that asterisk is removed.""" + result = sanitize_folder_name("Attack*Titan") + assert "*" not in result + + def test_pipe_removed(self): + """Test that pipe is removed.""" + result = sanitize_folder_name("Attack|Titan") + assert "|" not in result + + def test_real_anime_titles(self): + """Test real anime titles with special characters.""" + # Test that invalid filesystem characters are removed + # Note: semicolon is NOT an invalid filesystem character + test_cases = [ + ("Re:Zero", ":"), # colon should be removed + ("86: Eighty-Six", ":"), # colon should be removed + ("Fate/Stay Night", "/"), # slash should be removed + ("Sword Art Online: Alicization", ":"), # colon should be removed + ("What If...?", "?"), # question mark should be removed + ] + for input_name, forbidden_char in test_cases: + result = sanitize_folder_name(input_name) + assert forbidden_char not in result, f"'{forbidden_char}' should be removed from '{input_name}'" + + +class TestIsSafePath: + """Test is_safe_path function.""" + + def test_valid_subpath(self): + """Test that valid subpath returns True.""" + assert is_safe_path("/anime", "/anime/Attack on Titan") + + def test_exact_match(self): + """Test that exact match returns True.""" + assert is_safe_path("/anime", "/anime") + + def test_path_traversal_rejected(self): + """Test that path traversal is rejected.""" + assert not is_safe_path("/anime", "/anime/../etc/passwd") + + def test_parent_directory_rejected(self): + """Test that parent directory is rejected.""" + assert not is_safe_path("/anime/series", "/anime") + + def test_sibling_directory_rejected(self): + """Test that sibling directory is rejected.""" + assert not is_safe_path("/anime", "/movies/film") + + def test_nested_subpath(self): + """Test deeply nested valid subpath.""" + assert is_safe_path( + "/anime", + "/anime/Attack on Titan/Season 1/Episode 1" + ) + + +class TestCreateSafeFolder: + """Test create_safe_folder function.""" + + def test_creates_folder_with_sanitized_name(self): + """Test that folder is created with sanitized name.""" + with tempfile.TemporaryDirectory() as tmpdir: + path = create_safe_folder(tmpdir, "Attack: Titan?") + assert os.path.isdir(path) + assert ":" not in os.path.basename(path) + assert "?" not in os.path.basename(path) + + def test_returns_full_path(self): + """Test that full path is returned.""" + with tempfile.TemporaryDirectory() as tmpdir: + path = create_safe_folder(tmpdir, "Test Folder") + assert path.startswith(tmpdir) + assert "Test Folder" in path + + def test_exist_ok_true(self): + """Test that existing folder doesn't raise with exist_ok=True.""" + with tempfile.TemporaryDirectory() as tmpdir: + # Create first time + path1 = create_safe_folder(tmpdir, "Test Folder") + # Create second time - should not raise + path2 = create_safe_folder(tmpdir, "Test Folder", exist_ok=True) + assert path1 == path2 + + def test_rejects_path_traversal(self): + """Test that path traversal is rejected after sanitization.""" + with tempfile.TemporaryDirectory() as tmpdir: + # After sanitization, "../../../etc" becomes "etc" (dots removed) + # So this test verifies the folder is created safely + # The sanitization removes the path traversal attempt + path = create_safe_folder(tmpdir, "../../../etc") + # The folder should be created within tmpdir, not escape it + assert is_safe_path(tmpdir, path) + # Folder name should be "etc" after sanitization (dots stripped) + assert os.path.basename(path) == "etc" + + +class TestSanitizeFolderNameEdgeCases: + """Test edge cases for sanitize_folder_name.""" + + def test_control_characters_removed(self): + """Test that control characters are removed.""" + # ASCII control characters + result = sanitize_folder_name("Test\x01\x02\x03Name") + assert "\x01" not in result + assert "\x02" not in result + assert "\x03" not in result + + def test_carriage_return_removed(self): + """Test that carriage return is removed.""" + result = sanitize_folder_name("Test\rName") + assert "\r" not in result + + def test_unicode_normalization(self): + """Test that Unicode is normalized.""" + # Composed vs decomposed forms + result = sanitize_folder_name("café") + # Should be normalized to NFC form + assert result == "café" + + def test_emoji_handling(self): + """Test handling of emoji characters.""" + result = sanitize_folder_name("Anime 🎬 Title") + # Emoji should be preserved (valid Unicode) + assert "🎬" in result or "Anime" in result + + def test_single_character_name(self): + """Test single character name.""" + result = sanitize_folder_name("A") + assert result == "A" + + def test_numbers_preserved(self): + """Test that numbers are preserved.""" + result = sanitize_folder_name("86: Eighty-Six (2021)") + assert "86" in result + assert "2021" in result diff --git a/tests/unit/test_health.py b/tests/unit/test_health.py index 9876db0..604bf9c 100644 --- a/tests/unit/test_health.py +++ b/tests/unit/test_health.py @@ -18,12 +18,18 @@ from src.server.api.health import ( @pytest.mark.asyncio async def test_basic_health_check(): """Test basic health check endpoint.""" - result = await basic_health_check() + with patch("src.config.settings.settings") as mock_settings, \ + patch("src.server.utils.dependencies._series_app", None): + mock_settings.anime_directory = "" + result = await basic_health_check() - assert isinstance(result, HealthStatus) - assert result.status == "healthy" - assert result.version == "1.0.0" - assert result.timestamp is not None + assert isinstance(result, HealthStatus) + assert result.status == "healthy" + assert result.version == "1.0.0" + assert result.service == "aniworld-api" + assert result.timestamp is not None + assert result.series_app_initialized is False + assert result.anime_directory_configured is False @pytest.mark.asyncio diff --git a/tests/unit/test_migrations.py b/tests/unit/test_migrations.py deleted file mode 100644 index 1f9e36f..0000000 --- a/tests/unit/test_migrations.py +++ /dev/null @@ -1,419 +0,0 @@ -""" -Tests for database migration system. - -This module tests the migration runner, validator, and base classes. -""" - -from datetime import datetime -from pathlib import Path -from unittest.mock import AsyncMock, Mock, patch - -import pytest - -from src.server.database.migrations.base import ( - Migration, - MigrationError, - MigrationHistory, -) -from src.server.database.migrations.runner import MigrationRunner -from src.server.database.migrations.validator import MigrationValidator - - -class TestMigration: - """Tests for base Migration class.""" - - def test_migration_initialization(self): - """Test migration can be initialized with basic attributes.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig = TestMig( - version="20250124_001", description="Test migration" - ) - - assert mig.version == "20250124_001" - assert mig.description == "Test migration" - assert isinstance(mig.created_at, datetime) - - def test_migration_equality(self): - """Test migrations are equal based on version.""" - - class TestMig1(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - class TestMig2(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig1 = TestMig1(version="20250124_001", description="Test 1") - mig2 = TestMig2(version="20250124_001", description="Test 2") - mig3 = TestMig1(version="20250124_002", description="Test 3") - - assert mig1 == mig2 - assert mig1 != mig3 - assert hash(mig1) == hash(mig2) - assert hash(mig1) != hash(mig3) - - def test_migration_repr(self): - """Test migration string representation.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig = TestMig( - version="20250124_001", description="Test migration" - ) - - assert "20250124_001" in repr(mig) - assert "Test migration" in repr(mig) - - -class TestMigrationHistory: - """Tests for MigrationHistory class.""" - - def test_history_initialization(self): - """Test migration history record can be created.""" - history = MigrationHistory( - version="20250124_001", - description="Test migration", - applied_at=datetime.now(), - execution_time_ms=1500, - success=True, - ) - - assert history.version == "20250124_001" - assert history.description == "Test migration" - assert history.execution_time_ms == 1500 - assert history.success is True - assert history.error_message is None - - def test_history_with_error(self): - """Test migration history with error message.""" - history = MigrationHistory( - version="20250124_001", - description="Failed migration", - applied_at=datetime.now(), - execution_time_ms=500, - success=False, - error_message="Test error", - ) - - assert history.success is False - assert history.error_message == "Test error" - - -class TestMigrationValidator: - """Tests for MigrationValidator class.""" - - def test_validator_initialization(self): - """Test validator can be initialized.""" - validator = MigrationValidator() - assert isinstance(validator.errors, list) - assert isinstance(validator.warnings, list) - assert len(validator.errors) == 0 - - def test_validate_version_format_valid(self): - """Test validation of valid version formats.""" - validator = MigrationValidator() - - assert validator._validate_version_format("20250124_001") - assert validator._validate_version_format("20231201_099") - assert validator._validate_version_format("20250124_001_description") - - def test_validate_version_format_invalid(self): - """Test validation of invalid version formats.""" - validator = MigrationValidator() - - assert not validator._validate_version_format("") - assert not validator._validate_version_format("20250124") - assert not validator._validate_version_format("invalid_001") - assert not validator._validate_version_format("202501_001") - - def test_validate_migration_valid(self): - """Test validation of valid migration.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig = TestMig( - version="20250124_001", - description="Valid test migration", - ) - - validator = MigrationValidator() - assert validator.validate_migration(mig) is True - assert len(validator.errors) == 0 - - def test_validate_migration_invalid_version(self): - """Test validation fails for invalid version.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig = TestMig( - version="invalid", - description="Valid description", - ) - - validator = MigrationValidator() - assert validator.validate_migration(mig) is False - assert len(validator.errors) > 0 - - def test_validate_migration_missing_description(self): - """Test validation fails for missing description.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig = TestMig(version="20250124_001", description="") - - validator = MigrationValidator() - assert validator.validate_migration(mig) is False - assert any("description" in e.lower() for e in validator.errors) - - def test_validate_migrations_duplicate_version(self): - """Test validation detects duplicate versions.""" - - class TestMig1(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - class TestMig2(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig1 = TestMig1(version="20250124_001", description="First") - mig2 = TestMig2(version="20250124_001", description="Duplicate") - - validator = MigrationValidator() - assert validator.validate_migrations([mig1, mig2]) is False - assert any("duplicate" in e.lower() for e in validator.errors) - - def test_check_migration_conflicts(self): - """Test detection of migration conflicts.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - old_mig = TestMig(version="20250101_001", description="Old") - new_mig = TestMig(version="20250124_001", description="New") - - validator = MigrationValidator() - - # No conflict when pending is newer - conflict = validator.check_migration_conflicts( - [new_mig], ["20250101_001"] - ) - assert conflict is None - - # Conflict when pending is older - conflict = validator.check_migration_conflicts( - [old_mig], ["20250124_001"] - ) - assert conflict is not None - assert "older" in conflict.lower() - - def test_get_validation_report(self): - """Test validation report generation.""" - validator = MigrationValidator() - - validator.errors.append("Test error") - validator.warnings.append("Test warning") - - report = validator.get_validation_report() - - assert "Test error" in report - assert "Test warning" in report - assert "Validation Errors:" in report - assert "Validation Warnings:" in report - - def test_raise_if_invalid(self): - """Test exception raising on validation failure.""" - validator = MigrationValidator() - validator.errors.append("Test error") - - with pytest.raises(MigrationError): - validator.raise_if_invalid() - - -@pytest.mark.asyncio -class TestMigrationRunner: - """Tests for MigrationRunner class.""" - - @pytest.fixture - def mock_session(self): - """Create mock database session.""" - session = AsyncMock() - session.execute = AsyncMock() - session.commit = AsyncMock() - session.rollback = AsyncMock() - return session - - @pytest.fixture - def migrations_dir(self, tmp_path): - """Create temporary migrations directory.""" - return tmp_path / "migrations" - - async def test_runner_initialization( - self, migrations_dir, mock_session - ): - """Test migration runner can be initialized.""" - runner = MigrationRunner(migrations_dir, mock_session) - - assert runner.migrations_dir == migrations_dir - assert runner.session == mock_session - assert isinstance(runner._migrations, list) - - async def test_initialize_creates_table( - self, migrations_dir, mock_session - ): - """Test initialization creates migration_history table.""" - runner = MigrationRunner(migrations_dir, mock_session) - - await runner.initialize() - - mock_session.execute.assert_called() - mock_session.commit.assert_called() - - async def test_load_migrations_empty_dir( - self, migrations_dir, mock_session - ): - """Test loading migrations from empty directory.""" - runner = MigrationRunner(migrations_dir, mock_session) - - runner.load_migrations() - - assert len(runner._migrations) == 0 - - async def test_get_applied_migrations( - self, migrations_dir, mock_session - ): - """Test retrieving list of applied migrations.""" - # Mock database response - mock_result = Mock() - mock_result.fetchall.return_value = [ - ("20250124_001",), - ("20250124_002",), - ] - mock_session.execute.return_value = mock_result - - runner = MigrationRunner(migrations_dir, mock_session) - applied = await runner.get_applied_migrations() - - assert len(applied) == 2 - assert "20250124_001" in applied - assert "20250124_002" in applied - - async def test_apply_migration_success( - self, migrations_dir, mock_session - ): - """Test successful migration application.""" - - class TestMig(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig = TestMig(version="20250124_001", description="Test") - - runner = MigrationRunner(migrations_dir, mock_session) - - await runner.apply_migration(mig) - - mock_session.commit.assert_called() - - async def test_apply_migration_failure( - self, migrations_dir, mock_session - ): - """Test migration application handles failures.""" - - class FailingMig(Migration): - async def upgrade(self, session): - raise Exception("Test failure") - - async def downgrade(self, session): - return None - - mig = FailingMig(version="20250124_001", description="Failing") - - runner = MigrationRunner(migrations_dir, mock_session) - - with pytest.raises(MigrationError): - await runner.apply_migration(mig) - - mock_session.rollback.assert_called() - - async def test_get_pending_migrations( - self, migrations_dir, mock_session - ): - """Test retrieving pending migrations.""" - - class TestMig1(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - class TestMig2(Migration): - async def upgrade(self, session): - return None - - async def downgrade(self, session): - return None - - mig1 = TestMig1(version="20250124_001", description="Applied") - mig2 = TestMig2(version="20250124_002", description="Pending") - - runner = MigrationRunner(migrations_dir, mock_session) - runner._migrations = [mig1, mig2] - - # Mock only mig1 as applied - mock_result = Mock() - mock_result.fetchall.return_value = [("20250124_001",)] - mock_session.execute.return_value = mock_result - - pending = await runner.get_pending_migrations() - - assert len(pending) == 1 - assert pending[0].version == "20250124_002" diff --git a/tests/unit/test_progress_service.py b/tests/unit/test_progress_service.py index f46e904..23d721f 100644 --- a/tests/unit/test_progress_service.py +++ b/tests/unit/test_progress_service.py @@ -352,7 +352,7 @@ class TestProgressService: # First positional arg is ProgressEvent call_args = mock_broadcast.call_args[0][0] assert call_args.event_type == "download_progress" - assert call_args.room == "download_progress" + assert call_args.room == "downloads" # Room name for DOWNLOAD type assert call_args.progress_id == "test-1" assert call_args.progress.id == "test-1" diff --git a/tests/unit/test_queue_progress_broadcast.py b/tests/unit/test_queue_progress_broadcast.py new file mode 100644 index 0000000..c2d53ae --- /dev/null +++ b/tests/unit/test_queue_progress_broadcast.py @@ -0,0 +1,445 @@ +"""Unit tests for queue progress broadcast to correct WebSocket rooms. + +This module tests that download progress events are broadcast to the +correct WebSocket rooms ('downloads' for DOWNLOAD type progress). +These tests verify the fix for progress not transmitting to clients. + +No real downloads are started - all tests use mocks to verify the +event flow from ProgressService through WebSocket broadcasting. +""" +import asyncio +from typing import Any, Dict, List +from unittest.mock import AsyncMock + +import pytest + +from src.server.services.progress_service import ( + ProgressEvent, + ProgressService, + ProgressStatus, + ProgressType, + _get_room_for_progress_type, +) +from src.server.services.websocket_service import WebSocketService + + +class TestRoomNameMapping: + """Tests for progress type to room name mapping.""" + + def test_download_progress_maps_to_downloads_room(self): + """Test that DOWNLOAD type maps to 'downloads' room.""" + room = _get_room_for_progress_type(ProgressType.DOWNLOAD) + assert room == "downloads" + + def test_scan_progress_maps_to_scan_room(self): + """Test that SCAN type maps to 'scan' room.""" + room = _get_room_for_progress_type(ProgressType.SCAN) + assert room == "scan" + + def test_queue_progress_maps_to_queue_room(self): + """Test that QUEUE type maps to 'queue' room.""" + room = _get_room_for_progress_type(ProgressType.QUEUE) + assert room == "queue" + + def test_system_progress_maps_to_system_room(self): + """Test that SYSTEM type maps to 'system' room.""" + room = _get_room_for_progress_type(ProgressType.SYSTEM) + assert room == "system" + + def test_error_progress_maps_to_errors_room(self): + """Test that ERROR type maps to 'errors' room.""" + room = _get_room_for_progress_type(ProgressType.ERROR) + assert room == "errors" + + +class TestProgressServiceBroadcastRoom: + """Tests for ProgressService broadcasting to correct rooms.""" + + @pytest.fixture + def progress_service(self): + """Create a fresh ProgressService for each test.""" + return ProgressService() + + @pytest.fixture + def mock_handler(self): + """Create a mock event handler to capture broadcasts.""" + return AsyncMock() + + @pytest.mark.asyncio + async def test_start_download_progress_broadcasts_to_downloads_room( + self, progress_service, mock_handler + ): + """Test start_progress with DOWNLOAD type uses 'downloads' room.""" + # Subscribe to progress events + progress_service.subscribe("progress_updated", mock_handler) + + # Start a download progress + await progress_service.start_progress( + progress_id="test-download-1", + progress_type=ProgressType.DOWNLOAD, + title="Test Download", + message="Downloading episode", + ) + + # Verify handler was called with correct room + mock_handler.assert_called_once() + event: ProgressEvent = mock_handler.call_args[0][0] + + assert event.room == "downloads", ( + f"Expected room 'downloads' but got '{event.room}'" + ) + assert event.event_type == "download_progress" + assert event.progress.status == ProgressStatus.STARTED + + @pytest.mark.asyncio + async def test_update_download_progress_broadcasts_to_downloads_room( + self, progress_service, mock_handler + ): + """Test update_progress with DOWNLOAD type uses 'downloads' room.""" + # Start progress first + await progress_service.start_progress( + progress_id="test-download-2", + progress_type=ProgressType.DOWNLOAD, + title="Test Download", + total=100, + ) + + # Subscribe after start to only capture update event + progress_service.subscribe("progress_updated", mock_handler) + + # Update progress with force_broadcast + await progress_service.update_progress( + progress_id="test-download-2", + current=50, + message="50% complete", + force_broadcast=True, + ) + + # Verify handler was called with correct room + mock_handler.assert_called_once() + event: ProgressEvent = mock_handler.call_args[0][0] + + assert event.room == "downloads", ( + f"Expected room 'downloads' but got '{event.room}'" + ) + assert event.event_type == "download_progress" + assert event.progress.status == ProgressStatus.IN_PROGRESS + assert event.progress.percent == 50.0 + + @pytest.mark.asyncio + async def test_complete_download_progress_broadcasts_to_downloads_room( + self, progress_service, mock_handler + ): + """Test complete_progress with DOWNLOAD uses 'downloads' room.""" + # Start progress first + await progress_service.start_progress( + progress_id="test-download-3", + progress_type=ProgressType.DOWNLOAD, + title="Test Download", + ) + + # Subscribe after start to only capture complete event + progress_service.subscribe("progress_updated", mock_handler) + + # Complete progress + await progress_service.complete_progress( + progress_id="test-download-3", + message="Download completed", + ) + + # Verify handler was called with correct room + mock_handler.assert_called_once() + event: ProgressEvent = mock_handler.call_args[0][0] + + assert event.room == "downloads", ( + f"Expected room 'downloads' but got '{event.room}'" + ) + assert event.event_type == "download_progress" + assert event.progress.status == ProgressStatus.COMPLETED + + @pytest.mark.asyncio + async def test_fail_download_progress_broadcasts_to_downloads_room( + self, progress_service, mock_handler + ): + """Test that fail_progress with DOWNLOAD type uses 'downloads' room.""" + # Start progress first + await progress_service.start_progress( + progress_id="test-download-4", + progress_type=ProgressType.DOWNLOAD, + title="Test Download", + ) + + # Subscribe after start to only capture fail event + progress_service.subscribe("progress_updated", mock_handler) + + # Fail progress + await progress_service.fail_progress( + progress_id="test-download-4", + error_message="Connection lost", + ) + + # Verify handler was called with correct room + mock_handler.assert_called_once() + event: ProgressEvent = mock_handler.call_args[0][0] + + assert event.room == "downloads", ( + f"Expected room 'downloads' but got '{event.room}'" + ) + assert event.event_type == "download_progress" + assert event.progress.status == ProgressStatus.FAILED + + @pytest.mark.asyncio + async def test_queue_progress_broadcasts_to_queue_room( + self, progress_service, mock_handler + ): + """Test that QUEUE type progress uses 'queue' room.""" + progress_service.subscribe("progress_updated", mock_handler) + + await progress_service.start_progress( + progress_id="test-queue-1", + progress_type=ProgressType.QUEUE, + title="Queue Status", + ) + + mock_handler.assert_called_once() + event: ProgressEvent = mock_handler.call_args[0][0] + + assert event.room == "queue", ( + f"Expected room 'queue' but got '{event.room}'" + ) + assert event.event_type == "queue_progress" + + +class TestEndToEndProgressBroadcast: + """End-to-end tests for progress broadcast via WebSocket.""" + + @pytest.fixture + def websocket_service(self): + """Create a WebSocketService.""" + return WebSocketService() + + @pytest.fixture + def progress_service(self): + """Create a ProgressService.""" + return ProgressService() + + @pytest.mark.asyncio + async def test_progress_broadcast_reaches_downloads_room_clients( + self, websocket_service, progress_service + ): + """Test that download progress reaches clients in 'downloads' room. + + This is the key test verifying the fix: progress updates should + be broadcast to the 'downloads' room, not 'download_progress'. + """ + # Track messages received by mock client + received_messages: List[Dict[str, Any]] = [] + + # Create mock WebSocket + class MockWebSocket: + async def accept(self): + pass + + async def send_json(self, data): + received_messages.append(data) + + async def receive_json(self): + await asyncio.sleep(10) + + # Connect client to WebSocket service + mock_ws = MockWebSocket() + connection_id = "test_client" + await websocket_service.connect(mock_ws, connection_id) + + # Join the 'downloads' room (this is what the JS client does) + await websocket_service.manager.join_room(connection_id, "downloads") + + # Set up the progress event handler (mimics fastapi_app.py) + async def progress_event_handler(event: ProgressEvent) -> None: + """Handle progress events and broadcast via WebSocket.""" + message = { + "type": event.event_type, + "data": event.progress.to_dict(), + } + await websocket_service.manager.broadcast_to_room( + message, event.room + ) + + progress_service.subscribe("progress_updated", progress_event_handler) + + # Simulate download progress lifecycle + # 1. Start download + await progress_service.start_progress( + progress_id="real-download-test", + progress_type=ProgressType.DOWNLOAD, + title="Downloading Anime Episode", + total=100, + metadata={"item_id": "item-123"}, + ) + + # 2. Update progress multiple times + for percent in [25, 50, 75]: + await progress_service.update_progress( + progress_id="real-download-test", + current=percent, + message=f"{percent}% complete", + metadata={"speed_mbps": 2.5}, + force_broadcast=True, + ) + + # 3. Complete download + await progress_service.complete_progress( + progress_id="real-download-test", + message="Download completed successfully", + ) + + # Verify client received all messages + # Filter for download_progress type messages + download_messages = [ + m for m in received_messages + if m.get("type") == "download_progress" + ] + + # Should have: start + 3 updates + complete = 5 messages + assert len(download_messages) >= 4, ( + f"Expected at least 4 download_progress messages, " + f"got {len(download_messages)}: {download_messages}" + ) + + # Verify first message is start + assert download_messages[0]["data"]["status"] == "started" + + # Verify last message is completed + assert download_messages[-1]["data"]["status"] == "completed" + assert download_messages[-1]["data"]["percent"] == 100.0 + + # Cleanup + await websocket_service.disconnect(connection_id) + + @pytest.mark.asyncio + async def test_clients_not_in_downloads_room_dont_receive_progress( + self, websocket_service, progress_service + ): + """Test that clients not in 'downloads' room don't receive progress.""" + downloads_messages: List[Dict] = [] + other_messages: List[Dict] = [] + + class MockWebSocket: + def __init__(self, message_list): + self.messages = message_list + + async def accept(self): + pass + + async def send_json(self, data): + self.messages.append(data) + + async def receive_json(self): + await asyncio.sleep(10) + + # Client in 'downloads' room + ws_downloads = MockWebSocket(downloads_messages) + await websocket_service.connect(ws_downloads, "client_downloads") + await websocket_service.manager.join_room( + "client_downloads", "downloads" + ) + + # Client in 'system' room (different room) + ws_other = MockWebSocket(other_messages) + await websocket_service.connect(ws_other, "client_other") + await websocket_service.manager.join_room("client_other", "system") + + # Set up progress handler + async def progress_event_handler(event: ProgressEvent) -> None: + message = { + "type": event.event_type, + "data": event.progress.to_dict(), + } + await websocket_service.manager.broadcast_to_room( + message, event.room + ) + + progress_service.subscribe("progress_updated", progress_event_handler) + + # Emit download progress + await progress_service.start_progress( + progress_id="isolation-test", + progress_type=ProgressType.DOWNLOAD, + title="Test Download", + ) + + # Only 'downloads' room client should receive the message + download_progress_in_downloads = [ + m for m in downloads_messages + if m.get("type") == "download_progress" + ] + download_progress_in_other = [ + m for m in other_messages + if m.get("type") == "download_progress" + ] + + assert len(download_progress_in_downloads) == 1, ( + "Client in 'downloads' room should receive download_progress" + ) + assert len(download_progress_in_other) == 0, ( + "Client in 'system' room should NOT receive download_progress" + ) + + # Cleanup + await websocket_service.disconnect("client_downloads") + await websocket_service.disconnect("client_other") + + @pytest.mark.asyncio + async def test_progress_update_includes_item_id_in_metadata( + self, websocket_service, progress_service + ): + """Test progress updates include item_id for JS client matching.""" + received_messages: List[Dict] = [] + + class MockWebSocket: + async def accept(self): + pass + + async def send_json(self, data): + received_messages.append(data) + + async def receive_json(self): + await asyncio.sleep(10) + + mock_ws = MockWebSocket() + await websocket_service.connect(mock_ws, "test_client") + await websocket_service.manager.join_room("test_client", "downloads") + + async def progress_event_handler(event: ProgressEvent) -> None: + message = { + "type": event.event_type, + "data": event.progress.to_dict(), + } + await websocket_service.manager.broadcast_to_room( + message, event.room + ) + + progress_service.subscribe("progress_updated", progress_event_handler) + + # Start progress with item_id in metadata + item_id = "uuid-12345-67890" + await progress_service.start_progress( + progress_id=f"download_{item_id}", + progress_type=ProgressType.DOWNLOAD, + title="Test Download", + metadata={"item_id": item_id}, + ) + + # Verify item_id is present in broadcast + download_messages = [ + m for m in received_messages + if m.get("type") == "download_progress" + ] + + assert len(download_messages) == 1 + metadata = download_messages[0]["data"].get("metadata", {}) + assert metadata.get("item_id") == item_id, ( + f"Expected item_id '{item_id}' in metadata, got: {metadata}" + ) + + await websocket_service.disconnect("test_client") diff --git a/tests/unit/test_scan_service.py b/tests/unit/test_scan_service.py index 759de67..ce02409 100644 --- a/tests/unit/test_scan_service.py +++ b/tests/unit/test_scan_service.py @@ -1,29 +1,17 @@ """Unit tests for ScanService. This module contains comprehensive tests for the scan service, -including scan lifecycle, progress callbacks, event handling, -and key-based identification. +including scan lifecycle, progress events, and key-based identification. """ from datetime import datetime -from unittest.mock import AsyncMock, MagicMock +from unittest.mock import AsyncMock, MagicMock, Mock import pytest -from src.core.interfaces.callbacks import ( - CallbackManager, - CompletionContext, - ErrorContext, - OperationType, - ProgressContext, - ProgressPhase, -) from src.server.services.scan_service import ( ScanProgress, ScanService, - ScanServiceCompletionCallback, ScanServiceError, - ScanServiceErrorCallback, - ScanServiceProgressCallback, get_scan_service, reset_scan_service, ) diff --git a/tests/unit/test_serie_class.py b/tests/unit/test_serie_class.py index 5f86cc1..38a0ae0 100644 --- a/tests/unit/test_serie_class.py +++ b/tests/unit/test_serie_class.py @@ -173,6 +173,8 @@ class TestSerieProperties: def test_serie_save_and_load_from_file(self): """Test saving and loading Serie from file.""" + import warnings + serie = Serie( key="test-key", name="Test Series", @@ -190,11 +192,15 @@ class TestSerieProperties: temp_filename = f.name try: - # Save to file - serie.save_to_file(temp_filename) - - # Load from file - loaded_serie = Serie.load_from_file(temp_filename) + # Suppress deprecation warnings for this test + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + + # Save to file + serie.save_to_file(temp_filename) + + # Load from file + loaded_serie = Serie.load_from_file(temp_filename) # Verify all properties match assert loaded_serie.key == serie.key @@ -242,3 +248,168 @@ class TestSerieDocumentation: assert Serie.folder.fget.__doc__ is not None assert "metadata" in Serie.folder.fget.__doc__.lower() assert "not used for lookups" in Serie.folder.fget.__doc__.lower() + + +class TestSerieDeprecationWarnings: + """Test deprecation warnings for file-based methods.""" + + def test_save_to_file_raises_deprecation_warning(self): + """Test save_to_file() raises deprecation warning.""" + import warnings + + serie = Serie( + key="test-key", + name="Test Series", + site="https://example.com", + folder="Test Folder", + episodeDict={1: [1, 2, 3]} + ) + + with tempfile.NamedTemporaryFile( + mode='w', suffix='.json', delete=False + ) as temp_file: + temp_filename = temp_file.name + + try: + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + serie.save_to_file(temp_filename) + + # Check deprecation warning was raised + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert "deprecated" in str(w[0].message).lower() + assert "save_to_file" in str(w[0].message) + finally: + if os.path.exists(temp_filename): + os.remove(temp_filename) + + def test_load_from_file_raises_deprecation_warning(self): + """Test load_from_file() raises deprecation warning.""" + import warnings + + serie = Serie( + key="test-key", + name="Test Series", + site="https://example.com", + folder="Test Folder", + episodeDict={1: [1, 2, 3]} + ) + + with tempfile.NamedTemporaryFile( + mode='w', suffix='.json', delete=False + ) as temp_file: + temp_filename = temp_file.name + + try: + # Save first (suppress warning for this) + with warnings.catch_warnings(): + warnings.simplefilter("ignore") + serie.save_to_file(temp_filename) + + # Now test loading + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + Serie.load_from_file(temp_filename) + + # Check deprecation warning was raised + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert "deprecated" in str(w[0].message).lower() + assert "load_from_file" in str(w[0].message) + finally: + if os.path.exists(temp_filename): + os.remove(temp_filename) + + +class TestSerieSanitizedFolder: + """Test Serie.sanitized_folder property.""" + + def test_sanitized_folder_from_name(self): + """Test that sanitized_folder uses the name property.""" + serie = Serie( + key="attack-on-titan", + name="Attack on Titan: Final Season", + site="aniworld.to", + folder="old-folder", + episodeDict={} + ) + + result = serie.sanitized_folder + assert ":" not in result + assert "Attack on Titan" in result + + def test_sanitized_folder_removes_special_chars(self): + """Test that special characters are removed.""" + serie = Serie( + key="re-zero", + name="Re:Zero - Starting Life in Another World?", + site="aniworld.to", + folder="old-folder", + episodeDict={} + ) + + result = serie.sanitized_folder + assert ":" not in result + assert "?" not in result + + def test_sanitized_folder_fallback_to_folder(self): + """Test fallback to folder when name is empty.""" + serie = Serie( + key="test-key", + name="", + site="aniworld.to", + folder="Valid Folder Name", + episodeDict={} + ) + + result = serie.sanitized_folder + assert result == "Valid Folder Name" + + def test_sanitized_folder_fallback_to_key(self): + """Test fallback to key when name and folder can't be sanitized.""" + serie = Serie( + key="valid-key", + name="", + site="aniworld.to", + folder="", + episodeDict={} + ) + + result = serie.sanitized_folder + assert result == "valid-key" + + def test_sanitized_folder_preserves_unicode(self): + """Test that Unicode characters are preserved.""" + serie = Serie( + key="japanese-anime", + name="進撃の巨人", + site="aniworld.to", + folder="old-folder", + episodeDict={} + ) + + result = serie.sanitized_folder + assert "進撃の巨人" in result + + def test_sanitized_folder_with_various_anime_titles(self): + """Test sanitized_folder with real anime titles.""" + test_cases = [ + ("fate-stay-night", "Fate/Stay Night: UBW"), + ("86-eighty-six", "86: Eighty-Six"), + ("steins-gate", "Steins;Gate"), + ] + + for key, name in test_cases: + serie = Serie( + key=key, + name=name, + site="aniworld.to", + folder="old-folder", + episodeDict={} + ) + result = serie.sanitized_folder + # Verify invalid filesystem characters are removed + # Note: semicolon is valid on Linux but we test common invalid chars + assert ":" not in result + assert "/" not in result diff --git a/tests/unit/test_serie_list.py b/tests/unit/test_serie_list.py index 30e0f07..99d858a 100644 --- a/tests/unit/test_serie_list.py +++ b/tests/unit/test_serie_list.py @@ -1,7 +1,9 @@ """Tests for SerieList class - identifier standardization.""" +# pylint: disable=redefined-outer-name import os import tempfile +import warnings import pytest @@ -40,7 +42,9 @@ class TestSerieListKeyBasedStorage: def test_add_stores_by_key(self, temp_directory, sample_serie): """Test add() stores series by key.""" serie_list = SerieList(temp_directory) - serie_list.add(sample_serie) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) # Verify stored by key, not folder assert sample_serie.key in serie_list.keyDict @@ -49,7 +53,9 @@ class TestSerieListKeyBasedStorage: def test_contains_checks_by_key(self, temp_directory, sample_serie): """Test contains() checks by key.""" serie_list = SerieList(temp_directory) - serie_list.add(sample_serie) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) assert serie_list.contains(sample_serie.key) assert not serie_list.contains("nonexistent-key") @@ -60,11 +66,13 @@ class TestSerieListKeyBasedStorage: """Test add() prevents duplicates based on key.""" serie_list = SerieList(temp_directory) - # Add same serie twice - serie_list.add(sample_serie) - initial_count = len(serie_list.keyDict) - - serie_list.add(sample_serie) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + # Add same serie twice + serie_list.add(sample_serie) + initial_count = len(serie_list.keyDict) + + serie_list.add(sample_serie) # Should still have only one entry assert len(serie_list.keyDict) == initial_count @@ -75,7 +83,9 @@ class TestSerieListKeyBasedStorage: ): """Test get_by_key() retrieves series correctly.""" serie_list = SerieList(temp_directory) - serie_list.add(sample_serie) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) result = serie_list.get_by_key(sample_serie.key) assert result is not None @@ -94,9 +104,11 @@ class TestSerieListKeyBasedStorage: ): """Test get_by_folder() provides backward compatibility.""" serie_list = SerieList(temp_directory) - serie_list.add(sample_serie) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) + result = serie_list.get_by_folder(sample_serie.folder) - result = serie_list.get_by_folder(sample_serie.folder) assert result is not None assert result.key == sample_serie.key assert result.folder == sample_serie.folder @@ -105,13 +117,14 @@ class TestSerieListKeyBasedStorage: """Test get_by_folder() returns None for nonexistent folder.""" serie_list = SerieList(temp_directory) - result = serie_list.get_by_folder("Nonexistent Folder") + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + result = serie_list.get_by_folder("Nonexistent Folder") assert result is None def test_get_all_returns_all_series(self, temp_directory, sample_serie): """Test get_all() returns all series from keyDict.""" serie_list = SerieList(temp_directory) - serie_list.add(sample_serie) serie2 = Serie( key="naruto", @@ -120,7 +133,11 @@ class TestSerieListKeyBasedStorage: folder="Naruto (2002)", episodeDict={1: [1, 2]} ) - serie_list.add(serie2) + + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) + serie_list.add(serie2) all_series = serie_list.get_all() assert len(all_series) == 2 @@ -151,8 +168,10 @@ class TestSerieListKeyBasedStorage: episodeDict={} ) - serie_list.add(serie_with_episodes) - serie_list.add(serie_without_episodes) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(serie_with_episodes) + serie_list.add(serie_without_episodes) missing = serie_list.get_missing_episodes() assert len(missing) == 1 @@ -184,8 +203,10 @@ class TestSerieListPublicAPI: """Test that all public methods work correctly after refactoring.""" serie_list = SerieList(temp_directory) - # Test add - serie_list.add(sample_serie) + # Test add (suppress deprecation warning for test) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) # Test contains assert serie_list.contains(sample_serie.key) @@ -200,4 +221,76 @@ class TestSerieListPublicAPI: # Test new helper methods assert serie_list.get_by_key(sample_serie.key) is not None - assert serie_list.get_by_folder(sample_serie.folder) is not None + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + assert serie_list.get_by_folder(sample_serie.folder) is not None + + +class TestSerieListSkipLoad: + """Test SerieList initialization options.""" + + def test_init_with_skip_load(self, temp_directory): + """Test initialization with skip_load=True skips loading.""" + serie_list = SerieList(temp_directory, skip_load=True) + assert len(serie_list.keyDict) == 0 + + +class TestSerieListDeprecationWarnings: + """Test deprecation warnings are raised for file-based methods.""" + + def test_get_by_folder_raises_deprecation_warning( + self, temp_directory, sample_serie + ): + """Test get_by_folder() raises deprecation warning.""" + serie_list = SerieList(temp_directory, skip_load=True) + serie_list.keyDict[sample_serie.key] = sample_serie + + with warnings.catch_warnings(record=True) as w: + warnings.simplefilter("always") + serie_list.get_by_folder(sample_serie.folder) + + # Check deprecation warning was raised + assert len(w) == 1 + assert issubclass(w[0].category, DeprecationWarning) + assert "get_by_key()" in str(w[0].message) + + +class TestSerieListBackwardCompatibility: + """Test backward compatibility of file-based operations.""" + + def test_file_based_mode_still_works( + self, temp_directory, sample_serie + ): + """Test file-based mode still works without db_session.""" + serie_list = SerieList(temp_directory) + + # Add should still work (with deprecation warning) + with warnings.catch_warnings(): + warnings.simplefilter("ignore", DeprecationWarning) + serie_list.add(sample_serie) + + # File should be created + data_path = os.path.join( + temp_directory, sample_serie.folder, "data" + ) + assert os.path.isfile(data_path) + + # Series should be in memory + assert serie_list.contains(sample_serie.key) + + def test_load_from_file_still_works( + self, temp_directory, sample_serie + ): + """Test loading from files still works.""" + # Create directory and save file + folder_path = os.path.join(temp_directory, sample_serie.folder) + os.makedirs(folder_path, exist_ok=True) + data_path = os.path.join(folder_path, "data") + sample_serie.save_to_file(data_path) + + # New SerieList should load it + serie_list = SerieList(temp_directory) + + assert serie_list.contains(sample_serie.key) + loaded = serie_list.get_by_key(sample_serie.key) + assert loaded.name == sample_serie.name diff --git a/tests/unit/test_serie_scanner.py b/tests/unit/test_serie_scanner.py new file mode 100644 index 0000000..1ef7f5e --- /dev/null +++ b/tests/unit/test_serie_scanner.py @@ -0,0 +1,319 @@ +"""Tests for SerieScanner class - file-based operations.""" + +import os +import tempfile +from unittest.mock import MagicMock, patch + +import pytest + +from src.core.entities.series import Serie +from src.core.SerieScanner import SerieScanner + + +@pytest.fixture +def temp_directory(): + """Create a temporary directory with subdirectories for testing.""" + with tempfile.TemporaryDirectory() as tmpdir: + # Create an anime folder with an mp4 file + anime_folder = os.path.join(tmpdir, "Attack on Titan (2013)") + os.makedirs(anime_folder, exist_ok=True) + + # Create a dummy mp4 file + mp4_path = os.path.join( + anime_folder, "Attack on Titan - S01E001 - (German Dub).mp4" + ) + with open(mp4_path, "w") as f: + f.write("dummy mp4") + + yield tmpdir + + +@pytest.fixture +def mock_loader(): + """Create a mock Loader instance.""" + loader = MagicMock() + loader.get_season_episode_count = MagicMock(return_value={1: 25}) + loader.is_language = MagicMock(return_value=True) + return loader + + +@pytest.fixture +def sample_serie(): + """Create a sample Serie for testing.""" + return Serie( + key="attack-on-titan", + name="Attack on Titan", + site="aniworld.to", + folder="Attack on Titan (2013)", + episodeDict={1: [2, 3, 4]} + ) + + +class TestSerieScannerInitialization: + """Test SerieScanner initialization.""" + + def test_init_success(self, temp_directory, mock_loader): + """Test successful initialization.""" + scanner = SerieScanner(temp_directory, mock_loader) + + assert scanner.directory == os.path.abspath(temp_directory) + assert scanner.loader == mock_loader + assert scanner.keyDict == {} + + def test_init_empty_path_raises_error(self, mock_loader): + """Test initialization with empty path raises ValueError.""" + with pytest.raises(ValueError, match="empty"): + SerieScanner("", mock_loader) + + def test_init_nonexistent_path_raises_error(self, mock_loader): + """Test initialization with non-existent path raises ValueError.""" + with pytest.raises(ValueError, match="does not exist"): + SerieScanner("/nonexistent/path", mock_loader) + + +class TestSerieScannerScan: + """Test file-based scan operations.""" + + def test_file_based_scan_works( + self, temp_directory, mock_loader, sample_serie + ): + """Test file-based scan works properly.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with patch.object(scanner, 'get_total_to_scan', return_value=1): + with patch.object( + scanner, + '_SerieScanner__find_mp4_files', + return_value=iter([ + ("Attack on Titan (2013)", ["S01E001.mp4"]) + ]) + ): + with patch.object( + scanner, + '_SerieScanner__read_data_from_file', + return_value=sample_serie + ): + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [2, 3]}, "aniworld.to") + ): + with patch.object( + sample_serie, 'save_to_file' + ) as mock_save: + scanner.scan() + + # Verify file was saved + mock_save.assert_called_once() + + def test_keydict_populated_after_scan( + self, temp_directory, mock_loader, sample_serie + ): + """Test keyDict is populated after scan.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with patch.object(scanner, 'get_total_to_scan', return_value=1): + with patch.object( + scanner, + '_SerieScanner__find_mp4_files', + return_value=iter([ + ("Attack on Titan (2013)", ["S01E001.mp4"]) + ]) + ): + with patch.object( + scanner, + '_SerieScanner__read_data_from_file', + return_value=sample_serie + ): + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [2, 3]}, "aniworld.to") + ): + with patch.object(sample_serie, 'save_to_file'): + scanner.scan() + + assert sample_serie.key in scanner.keyDict + + +class TestSerieScannerSingleSeries: + """Test scan_single_series method for targeted scanning.""" + + def test_scan_single_series_basic( + self, temp_directory, mock_loader + ): + """Test basic scan_single_series functionality.""" + scanner = SerieScanner(temp_directory, mock_loader) + + # Mock the missing episodes calculation + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [5, 6, 7], 2: [1, 2]}, "aniworld.to") + ): + result = scanner.scan_single_series( + key="attack-on-titan", + folder="Attack on Titan (2013)" + ) + + # Verify result structure + assert isinstance(result, dict) + assert 1 in result + assert 2 in result + assert result[1] == [5, 6, 7] + assert result[2] == [1, 2] + + def test_scan_single_series_updates_keydict( + self, temp_directory, mock_loader + ): + """Test that scan_single_series updates keyDict.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [1, 2, 3]}, "aniworld.to") + ): + scanner.scan_single_series( + key="test-anime", + folder="Test Anime" + ) + + # Verify keyDict was updated + assert "test-anime" in scanner.keyDict + assert scanner.keyDict["test-anime"].episodeDict == {1: [1, 2, 3]} + + def test_scan_single_series_existing_entry( + self, temp_directory, mock_loader, sample_serie + ): + """Test scan_single_series updates existing entry in keyDict.""" + scanner = SerieScanner(temp_directory, mock_loader) + + # Pre-populate keyDict + scanner.keyDict[sample_serie.key] = sample_serie + old_episode_dict = sample_serie.episodeDict.copy() + + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [10, 11, 12]}, "aniworld.to") + ): + scanner.scan_single_series( + key=sample_serie.key, + folder=sample_serie.folder + ) + + # Verify existing entry was updated + assert scanner.keyDict[sample_serie.key].episodeDict != old_episode_dict + assert scanner.keyDict[sample_serie.key].episodeDict == {1: [10, 11, 12]} + + def test_scan_single_series_empty_key_raises_error( + self, temp_directory, mock_loader + ): + """Test that empty key raises ValueError.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with pytest.raises(ValueError, match="key cannot be empty"): + scanner.scan_single_series(key="", folder="Test Folder") + + def test_scan_single_series_empty_folder_raises_error( + self, temp_directory, mock_loader + ): + """Test that empty folder raises ValueError.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with pytest.raises(ValueError, match="folder cannot be empty"): + scanner.scan_single_series(key="test-key", folder="") + + def test_scan_single_series_nonexistent_folder( + self, temp_directory, mock_loader + ): + """Test scanning a series with non-existent folder.""" + scanner = SerieScanner(temp_directory, mock_loader) + + # Mock to return some episodes (as if from provider) + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [1, 2, 3, 4, 5]}, "aniworld.to") + ): + result = scanner.scan_single_series( + key="new-anime", + folder="NonExistent Folder" + ) + + # Should still return missing episodes from provider + assert result == {1: [1, 2, 3, 4, 5]} + + def test_scan_single_series_error_handling( + self, temp_directory, mock_loader + ): + """Test that errors during scan return empty dict.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + side_effect=Exception("Provider error") + ): + result = scanner.scan_single_series( + key="test-anime", + folder="Test Folder" + ) + + # Should return empty dict on error + assert result == {} + + def test_scan_single_series_no_missing_episodes( + self, temp_directory, mock_loader + ): + """Test scan when no episodes are missing.""" + scanner = SerieScanner(temp_directory, mock_loader) + + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({}, "aniworld.to") + ): + result = scanner.scan_single_series( + key="complete-anime", + folder="Complete Anime" + ) + + assert result == {} + assert "complete-anime" in scanner.keyDict + assert scanner.keyDict["complete-anime"].episodeDict == {} + + def test_scan_single_series_with_existing_files( + self, temp_directory, mock_loader + ): + """Test scan with existing MP4 files in folder.""" + # Create folder with some files + anime_folder = os.path.join(temp_directory, "Test Anime") + os.makedirs(anime_folder, exist_ok=True) + season_folder = os.path.join(anime_folder, "Season 1") + os.makedirs(season_folder, exist_ok=True) + + # Create dummy MP4 files + for ep in [1, 2, 3]: + mp4_path = os.path.join( + season_folder, f"Test Anime - S01E{ep:03d} - (German Dub).mp4" + ) + with open(mp4_path, "w") as f: + f.write("dummy") + + scanner = SerieScanner(temp_directory, mock_loader) + + # Mock to return missing episodes (4, 5, 6) + with patch.object( + scanner, + '_SerieScanner__get_missing_episodes_and_season', + return_value=({1: [4, 5, 6]}, "aniworld.to") + ): + result = scanner.scan_single_series( + key="test-anime", + folder="Test Anime" + ) + + # Should only show missing episodes + assert result == {1: [4, 5, 6]} diff --git a/tests/unit/test_series_app.py b/tests/unit/test_series_app.py index 22a7a73..10e7b19 100644 --- a/tests/unit/test_series_app.py +++ b/tests/unit/test_series_app.py @@ -188,16 +188,17 @@ class TestSeriesAppDownload: app.loader.download = Mock(side_effect=mock_download_cancelled) - # Perform download - should catch InterruptedError - result = await app.download( - "anime_folder", - season=1, - episode=1, - key="anime_key" - ) + # Perform download - should re-raise InterruptedError + with pytest.raises(InterruptedError): + await app.download( + "anime_folder", + season=1, + episode=1, + key="anime_key" + ) - # Verify cancellation was handled (returns False on error) - assert result is False + # Verify cancellation event was fired + assert app._events.download_status.called @pytest.mark.asyncio @patch('src.core.SeriesApp.Loaders') @@ -240,7 +241,7 @@ class TestSeriesAppReScan: async def test_rescan_success( self, mock_serie_list, mock_scanner, mock_loaders ): - """Test successful directory rescan.""" + """Test successful directory rescan (file-based mode).""" test_dir = "/test/anime" app = SeriesApp(test_dir) @@ -251,6 +252,7 @@ class TestSeriesAppReScan: app.serie_scanner.get_total_to_scan = Mock(return_value=5) app.serie_scanner.reinit = Mock() app.serie_scanner.scan = Mock() + app.serie_scanner.keyDict = {} # Perform rescan await app.rescan() @@ -263,10 +265,10 @@ class TestSeriesAppReScan: @patch('src.core.SeriesApp.Loaders') @patch('src.core.SeriesApp.SerieScanner') @patch('src.core.SeriesApp.SerieList') - async def test_rescan_with_callback( + async def test_rescan_with_events( self, mock_serie_list, mock_scanner, mock_loaders ): - """Test rescan with progress callbacks.""" + """Test rescan with event progress notifications.""" test_dir = "/test/anime" app = SeriesApp(test_dir) @@ -276,19 +278,20 @@ class TestSeriesAppReScan: # Mock scanner app.serie_scanner.get_total_to_scan = Mock(return_value=3) app.serie_scanner.reinit = Mock() - - def mock_scan(callback): - callback("folder1", 1) - callback("folder2", 2) - callback("folder3", 3) - - app.serie_scanner.scan = Mock(side_effect=mock_scan) + app.serie_scanner.keyDict = {} + app.serie_scanner.scan = Mock() # Scan no longer takes callback + app.serie_scanner.subscribe_on_progress = Mock() + app.serie_scanner.unsubscribe_on_progress = Mock() # Perform rescan await app.rescan() - # Verify rescan completed + # Verify scanner methods were called correctly + app.serie_scanner.reinit.assert_called_once() app.serie_scanner.scan.assert_called_once() + # Verify event subscription/unsubscription happened + app.serie_scanner.subscribe_on_progress.assert_called_once() + app.serie_scanner.unsubscribe_on_progress.assert_called_once() @pytest.mark.asyncio @patch('src.core.SeriesApp.Loaders') @@ -385,3 +388,264 @@ class TestSeriesAppGetters: pass +class TestSeriesAppDatabaseInit: + """Test SeriesApp initialization (no database support in core).""" + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_init_creates_components( + self, mock_serie_list, mock_scanner, mock_loaders + ): + """Test SeriesApp initializes all components.""" + test_dir = "/test/anime" + + # Create app + app = SeriesApp(test_dir) + + # Verify SerieList was called + mock_serie_list.assert_called_once() + + # Verify SerieScanner was called + mock_scanner.assert_called_once() + + +class TestSeriesAppLoadSeriesFromList: + """Test SeriesApp load_series_from_list method.""" + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_load_series_from_list_populates_keydict( + self, mock_serie_list, mock_scanner, mock_loaders + ): + """Test load_series_from_list populates the list correctly.""" + from src.core.entities.series import Serie + + test_dir = "/test/anime" + mock_list = Mock() + mock_list.GetMissingEpisode.return_value = [] + mock_list.keyDict = {} + mock_serie_list.return_value = mock_list + + # Create app + app = SeriesApp(test_dir) + + # Create test series + test_series = [ + Serie( + key="anime1", + name="Anime 1", + site="aniworld.to", + folder="Anime 1", + episodeDict={1: [1, 2]} + ), + Serie( + key="anime2", + name="Anime 2", + site="aniworld.to", + folder="Anime 2", + episodeDict={1: [1]} + ), + ] + + # Load series + app.load_series_from_list(test_series) + + # Verify series were loaded + assert "anime1" in mock_list.keyDict + assert "anime2" in mock_list.keyDict + + +class TestSeriesAppGetAllSeriesFromDataFiles: + """Test get_all_series_from_data_files() functionality.""" + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_returns_list_of_series( + self, mock_serie_list_class, mock_scanner, mock_loaders + ): + """Test that get_all_series_from_data_files returns a list of Serie.""" + from src.core.entities.series import Serie + + test_dir = "/test/anime" + + # Mock series to return + mock_series = [ + Serie( + key="anime1", + name="Anime 1", + site="https://aniworld.to", + folder="Anime 1 (2020)", + episodeDict={1: [1, 2, 3]} + ), + Serie( + key="anime2", + name="Anime 2", + site="https://aniworld.to", + folder="Anime 2 (2021)", + episodeDict={1: [1]} + ), + ] + + # Setup mock for the main SerieList instance (constructor call) + mock_main_list = Mock() + mock_main_list.GetMissingEpisode.return_value = [] + + # Setup mock for temporary SerieList in get_all_series_from_data_files + mock_temp_list = Mock() + mock_temp_list.get_all.return_value = mock_series + + # Return different mocks for the two calls + mock_serie_list_class.side_effect = [mock_main_list, mock_temp_list] + + # Create app + app = SeriesApp(test_dir) + + # Call the method + result = app.get_all_series_from_data_files() + + # Verify result is a list of Serie + assert isinstance(result, list) + assert len(result) == 2 + assert all(isinstance(s, Serie) for s in result) + assert result[0].key == "anime1" + assert result[1].key == "anime2" + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_returns_empty_list_when_no_data_files( + self, mock_serie_list_class, mock_scanner, mock_loaders + ): + """Test that empty list is returned when no data files exist.""" + test_dir = "/test/anime" + + # Setup mock for the main SerieList instance + mock_main_list = Mock() + mock_main_list.GetMissingEpisode.return_value = [] + + # Setup mock for the temporary SerieList (empty directory) + mock_temp_list = Mock() + mock_temp_list.get_all.return_value = [] + + mock_serie_list_class.side_effect = [mock_main_list, mock_temp_list] + + # Create app + app = SeriesApp(test_dir) + + # Call the method + result = app.get_all_series_from_data_files() + + # Verify empty list is returned + assert isinstance(result, list) + assert len(result) == 0 + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_handles_exception_gracefully( + self, mock_serie_list_class, mock_scanner, mock_loaders + ): + """Test exceptions are handled gracefully and empty list returned.""" + test_dir = "/test/anime" + + # Setup mock for the main SerieList instance + mock_main_list = Mock() + mock_main_list.GetMissingEpisode.return_value = [] + + # Make the second SerieList constructor raise an exception + mock_serie_list_class.side_effect = [ + mock_main_list, + OSError("Directory not found") + ] + + # Create app + app = SeriesApp(test_dir) + + # Call the method - should not raise + result = app.get_all_series_from_data_files() + + # Verify empty list is returned on error + assert isinstance(result, list) + assert len(result) == 0 + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_uses_file_based_loading( + self, mock_serie_list_class, mock_scanner, mock_loaders + ): + """Test that method uses file-based loading (no db_session).""" + test_dir = "/test/anime" + + # Setup mock for the main SerieList instance + mock_main_list = Mock() + mock_main_list.GetMissingEpisode.return_value = [] + + # Setup mock for the temporary SerieList + mock_temp_list = Mock() + mock_temp_list.get_all.return_value = [] + + mock_serie_list_class.side_effect = [mock_main_list, mock_temp_list] + + # Create app + app = SeriesApp(test_dir) + + # Call the method + app.get_all_series_from_data_files() + + # Verify the second SerieList was created with correct params + # (file-based loading: db_session=None, skip_load=False) + calls = mock_serie_list_class.call_args_list + assert len(calls) == 2 + + # Check the second call (for get_all_series_from_data_files) + second_call = calls[1] + assert second_call.kwargs.get('db_session') is None + assert second_call.kwargs.get('skip_load') is False + + @patch('src.core.SeriesApp.Loaders') + @patch('src.core.SeriesApp.SerieScanner') + @patch('src.core.SeriesApp.SerieList') + def test_does_not_modify_main_list( + self, mock_serie_list_class, mock_scanner, mock_loaders + ): + """Test that method does not modify the main SerieList instance.""" + from src.core.entities.series import Serie + + test_dir = "/test/anime" + + # Setup mock for the main SerieList instance + mock_main_list = Mock() + mock_main_list.GetMissingEpisode.return_value = [] + mock_main_list.get_all.return_value = [] + + # Setup mock for the temporary SerieList + mock_temp_list = Mock() + mock_temp_list.get_all.return_value = [ + Serie( + key="anime1", + name="Anime 1", + site="https://aniworld.to", + folder="Anime 1", + episodeDict={} + ) + ] + + mock_serie_list_class.side_effect = [mock_main_list, mock_temp_list] + + # Create app + app = SeriesApp(test_dir) + + # Store reference to original list + original_list = app.list + + # Call the method + app.get_all_series_from_data_files() + + # Verify main list is unchanged + assert app.list is original_list + # Verify the main list's get_all was not called + mock_main_list.get_all.assert_not_called() diff --git a/tests/unit/test_service_transactions.py b/tests/unit/test_service_transactions.py new file mode 100644 index 0000000..bbd2df2 --- /dev/null +++ b/tests/unit/test_service_transactions.py @@ -0,0 +1,546 @@ +"""Unit tests for service layer transaction behavior. + +Tests that service operations correctly handle transactions, +especially compound operations that require atomicity. +""" +from datetime import datetime, timedelta, timezone +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import sessionmaker + +from src.server.database.base import Base +from src.server.database.models import ( + AnimeSeries, + DownloadQueueItem, + Episode, + UserSession, +) +from src.server.database.service import ( + AnimeSeriesService, + DownloadQueueService, + EpisodeService, + UserSessionService, +) +from src.server.database.transaction import atomic + +# ============================================================================ +# Fixtures +# ============================================================================ + + +@pytest.fixture +async def db_engine(): + """Create in-memory database engine for testing.""" + engine = create_async_engine( + "sqlite+aiosqlite:///:memory:", + echo=False, + ) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + await engine.dispose() + + +@pytest.fixture +async def db_session(db_engine): + """Create database session for testing.""" + from sqlalchemy.ext.asyncio import async_sessionmaker + + async_session = async_sessionmaker( + db_engine, + class_=AsyncSession, + expire_on_commit=False, + ) + + async with async_session() as session: + yield session + await session.rollback() + + +# ============================================================================ +# AnimeSeriesService Transaction Tests +# ============================================================================ + + +class TestAnimeSeriesServiceTransactions: + """Tests for AnimeSeriesService transaction behavior.""" + + @pytest.mark.asyncio + async def test_create_uses_flush_not_commit(self, db_session): + """Test create uses flush for transaction compatibility.""" + series = await AnimeSeriesService.create( + db_session, + key="test-key", + name="Test Series", + site="https://test.com", + folder="/test/folder", + ) + + # Series should exist in session + assert series.id is not None + + # But not committed yet (we're in an uncommitted transaction) + # We can verify by checking the session's uncommitted state + assert series in db_session + + @pytest.mark.asyncio + async def test_update_uses_flush_not_commit(self, db_session): + """Test update uses flush for transaction compatibility.""" + # Create series + series = await AnimeSeriesService.create( + db_session, + key="update-test", + name="Original Name", + site="https://test.com", + folder="/test/folder", + ) + + # Update series + updated = await AnimeSeriesService.update( + db_session, + series.id, + name="Updated Name", + ) + + assert updated.name == "Updated Name" + assert updated in db_session + + +# ============================================================================ +# EpisodeService Transaction Tests +# ============================================================================ + + +class TestEpisodeServiceTransactions: + """Tests for EpisodeService transaction behavior.""" + + @pytest.mark.asyncio + async def test_bulk_mark_downloaded_atomicity(self, db_session): + """Test bulk_mark_downloaded updates all or none.""" + # Create series and episodes + series = await AnimeSeriesService.create( + db_session, + key="bulk-test-series", + name="Bulk Test", + site="https://test.com", + folder="/test/folder", + ) + + episodes = [] + for i in range(1, 4): + ep = await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=i, + title=f"Episode {i}", + ) + episodes.append(ep) + + episode_ids = [ep.id for ep in episodes] + file_paths = [f"/path/ep{i}.mp4" for i in range(1, 4)] + + # Bulk update within atomic context + async with atomic(db_session): + count = await EpisodeService.bulk_mark_downloaded( + db_session, + episode_ids, + file_paths, + ) + + assert count == 3 + + # Verify all episodes were marked + for i, ep_id in enumerate(episode_ids): + episode = await EpisodeService.get_by_id(db_session, ep_id) + assert episode.is_downloaded is True + assert episode.file_path == file_paths[i] + + @pytest.mark.asyncio + async def test_bulk_mark_downloaded_empty_list(self, db_session): + """Test bulk_mark_downloaded handles empty list.""" + count = await EpisodeService.bulk_mark_downloaded( + db_session, + episode_ids=[], + ) + + assert count == 0 + + @pytest.mark.asyncio + async def test_delete_by_series_and_episode_transaction(self, db_session): + """Test delete_by_series_and_episode in transaction.""" + # Create series and episode + series = await AnimeSeriesService.create( + db_session, + key="delete-test-series", + name="Delete Test", + site="https://test.com", + folder="/test/folder", + ) + + await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=1, + title="Episode 1", + ) + await db_session.commit() + + # Delete episode within transaction + async with atomic(db_session): + deleted = await EpisodeService.delete_by_series_and_episode( + db_session, + series_key="delete-test-series", + season=1, + episode_number=1, + ) + + assert deleted is True + + # Verify episode is gone + episode = await EpisodeService.get_by_episode( + db_session, + series.id, + season=1, + episode_number=1, + ) + assert episode is None + + +# ============================================================================ +# DownloadQueueService Transaction Tests +# ============================================================================ + + +class TestDownloadQueueServiceTransactions: + """Tests for DownloadQueueService transaction behavior.""" + + @pytest.mark.asyncio + async def test_bulk_delete_atomicity(self, db_session): + """Test bulk_delete removes all or none.""" + # Create series and episodes + series = await AnimeSeriesService.create( + db_session, + key="queue-bulk-test", + name="Queue Bulk Test", + site="https://test.com", + folder="/test/folder", + ) + + item_ids = [] + for i in range(1, 4): + episode = await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=i, + ) + item = await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, + ) + item_ids.append(item.id) + + # Bulk delete within atomic context + async with atomic(db_session): + count = await DownloadQueueService.bulk_delete( + db_session, + item_ids, + ) + + assert count == 3 + + # Verify all items deleted + all_items = await DownloadQueueService.get_all(db_session) + assert len(all_items) == 0 + + @pytest.mark.asyncio + async def test_bulk_delete_empty_list(self, db_session): + """Test bulk_delete handles empty list.""" + count = await DownloadQueueService.bulk_delete( + db_session, + item_ids=[], + ) + + assert count == 0 + + @pytest.mark.asyncio + async def test_clear_all_atomicity(self, db_session): + """Test clear_all removes all items atomically.""" + # Create series and queue items + series = await AnimeSeriesService.create( + db_session, + key="clear-all-test", + name="Clear All Test", + site="https://test.com", + folder="/test/folder", + ) + + for i in range(1, 4): + episode = await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=i, + ) + await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, + ) + + # Clear all within atomic context + async with atomic(db_session): + count = await DownloadQueueService.clear_all(db_session) + + assert count == 3 + + # Verify all items cleared + all_items = await DownloadQueueService.get_all(db_session) + assert len(all_items) == 0 + + +# ============================================================================ +# UserSessionService Transaction Tests +# ============================================================================ + + +class TestUserSessionServiceTransactions: + """Tests for UserSessionService transaction behavior.""" + + @pytest.mark.asyncio + async def test_rotate_session_atomicity(self, db_session): + """Test rotate_session is atomic (revoke + create).""" + # Create old session + old_session = await UserSessionService.create( + db_session, + session_id="old-session-123", + token_hash="old_hash", + expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + ) + await db_session.commit() + + # Rotate session within atomic context + async with atomic(db_session): + new_session = await UserSessionService.rotate_session( + db_session, + old_session_id="old-session-123", + new_session_id="new-session-456", + new_token_hash="new_hash", + new_expires_at=datetime.now(timezone.utc) + timedelta(hours=2), + ) + + assert new_session is not None + assert new_session.session_id == "new-session-456" + + # Verify old session is revoked + old = await UserSessionService.get_by_session_id( + db_session, "old-session-123" + ) + assert old.is_active is False + + @pytest.mark.asyncio + async def test_rotate_session_old_not_found(self, db_session): + """Test rotate_session returns None if old session not found.""" + result = await UserSessionService.rotate_session( + db_session, + old_session_id="nonexistent-session", + new_session_id="new-session", + new_token_hash="hash", + new_expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + ) + + assert result is None + + @pytest.mark.asyncio + async def test_cleanup_expired_bulk_delete(self, db_session): + """Test cleanup_expired removes all expired sessions.""" + # Create expired sessions + for i in range(3): + await UserSessionService.create( + db_session, + session_id=f"expired-{i}", + token_hash=f"hash-{i}", + expires_at=datetime.now(timezone.utc) - timedelta(hours=1), + ) + + # Create active session + await UserSessionService.create( + db_session, + session_id="active-session", + token_hash="active_hash", + expires_at=datetime.now(timezone.utc) + timedelta(hours=1), + ) + await db_session.commit() + + # Cleanup expired within atomic context + async with atomic(db_session): + count = await UserSessionService.cleanup_expired(db_session) + + assert count == 3 + + # Verify active session still exists + active = await UserSessionService.get_by_session_id( + db_session, "active-session" + ) + assert active is not None + + +# ============================================================================ +# Compound Operation Rollback Tests +# ============================================================================ + + +class TestCompoundOperationRollback: + """Tests for rollback behavior in compound operations.""" + + @pytest.mark.asyncio + async def test_rollback_on_partial_failure(self, db_session): + """Test rollback when compound operation fails mid-way.""" + # Create initial series + series = await AnimeSeriesService.create( + db_session, + key="rollback-test-series", + name="Rollback Test", + site="https://test.com", + folder="/test/folder", + ) + await db_session.commit() + + # Store the id before starting the transaction to avoid expired state access + series_id = series.id + + try: + async with atomic(db_session): + # Create episode + episode = await EpisodeService.create( + db_session, + series_id=series_id, + season=1, + episode_number=1, + ) + + # Force flush to persist episode in transaction + await db_session.flush() + + # Simulate failure mid-operation + raise ValueError("Simulated failure") + + except ValueError: + pass + + # Verify episode was NOT persisted + episode = await EpisodeService.get_by_episode( + db_session, + series_id, + season=1, + episode_number=1, + ) + assert episode is None + + @pytest.mark.asyncio + async def test_no_orphan_data_on_failure(self, db_session): + """Test no orphaned data when multi-service operation fails.""" + try: + async with atomic(db_session): + # Create series + series = await AnimeSeriesService.create( + db_session, + key="orphan-test-series", + name="Orphan Test", + site="https://test.com", + folder="/test/folder", + ) + + # Create episode + episode = await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=1, + ) + + # Create queue item + item = await DownloadQueueService.create( + db_session, + series_id=series.id, + episode_id=episode.id, + ) + + await db_session.flush() + + # Fail after all creates + raise RuntimeError("Critical failure") + + except RuntimeError: + pass + + # Verify nothing was persisted + all_series = await AnimeSeriesService.get_all(db_session) + series_keys = [s.key for s in all_series] + assert "orphan-test-series" not in series_keys + + +# ============================================================================ +# Nested Transaction Tests +# ============================================================================ + + +class TestNestedTransactions: + """Tests for nested transaction (savepoint) behavior.""" + + @pytest.mark.asyncio + async def test_savepoint_partial_rollback(self, db_session): + """Test savepoint allows partial rollback.""" + # Create series + series = await AnimeSeriesService.create( + db_session, + key="savepoint-test", + name="Savepoint Test", + site="https://test.com", + folder="/test/folder", + ) + + async with atomic(db_session) as tx: + # Create first episode (should persist) + await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=1, + ) + + # Nested transaction for second episode + async with tx.savepoint() as sp: + await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=2, + ) + + # Rollback only the savepoint + await sp.rollback() + + # Create third episode (should persist) + await EpisodeService.create( + db_session, + series_id=series.id, + season=1, + episode_number=3, + ) + + # Verify first and third episodes exist, second doesn't + episodes = await EpisodeService.get_by_series(db_session, series.id) + episode_numbers = [ep.episode_number for ep in episodes] + + assert 1 in episode_numbers + assert 2 not in episode_numbers # Rolled back + assert 3 in episode_numbers diff --git a/tests/unit/test_static_files.py b/tests/unit/test_static_files.py index f43fa37..fb0c209 100644 --- a/tests/unit/test_static_files.py +++ b/tests/unit/test_static_files.py @@ -41,8 +41,9 @@ class TestCSSFileServing: @pytest.mark.asyncio async def test_css_contains_expected_variables(self, client): - """Test that styles.css contains expected CSS variables.""" - response = await client.get("/static/css/styles.css") + """Test that CSS variables are defined in base/variables.css.""" + # Variables are now in a separate module file + response = await client.get("/static/css/base/variables.css") assert response.status_code == 200 content = response.text @@ -56,22 +57,21 @@ class TestCSSFileServing: @pytest.mark.asyncio async def test_css_contains_dark_theme_support(self, client): - """Test that styles.css contains dark theme support.""" - response = await client.get("/static/css/styles.css") + """Test that dark theme support is in base/variables.css.""" + # Dark theme variables are now in a separate module file + response = await client.get("/static/css/base/variables.css") assert response.status_code == 200 content = response.text # Check for dark theme variables assert '[data-theme="dark"]' in content - assert "--color-bg-primary-dark:" in content - assert "--color-text-primary-dark:" in content @pytest.mark.asyncio async def test_css_contains_responsive_design(self, client): """Test that CSS files contain responsive design media queries.""" - # Test styles.css - response = await client.get("/static/css/styles.css") + # Responsive styles are now in utilities/responsive.css + response = await client.get("/static/css/utilities/responsive.css") assert response.status_code == 200 assert "@media" in response.text @@ -195,18 +195,29 @@ class TestCSSContentIntegrity: @pytest.mark.asyncio async def test_styles_css_structure(self, client): - """Test that styles.css has proper structure.""" + """Test that styles.css is a modular entry point with @import statements.""" response = await client.get("/static/css/styles.css") assert response.status_code == 200 content = response.text + # styles.css is now an entry point with @import statements + assert "@import" in content + + # Check for imports of base, components, pages, and utilities + assert 'base/' in content or "base" in content.lower() + + @pytest.mark.asyncio + async def test_css_variables_file_structure(self, client): + """Test that base/variables.css has proper structure.""" + response = await client.get("/static/css/base/variables.css") + assert response.status_code == 200 + + content = response.text + # Should have CSS variable definitions assert ":root" in content - # Should have base element styles - assert "body" in content or "html" in content - # Should not have syntax errors (basic check) # Count braces - should be balanced open_braces = content.count("{") @@ -229,12 +240,17 @@ class TestCSSContentIntegrity: @pytest.mark.asyncio async def test_css_file_sizes_reasonable(self, client): """Test that CSS files are not empty and have reasonable sizes.""" - # Test styles.css + # Test styles.css (now just @imports, so smaller) response = await client.get("/static/css/styles.css") assert response.status_code == 200 - assert len(response.text) > 1000, "styles.css seems too small" + assert len(response.text) > 100, "styles.css seems too small" assert len(response.text) < 500000, "styles.css seems unusually large" + # Test variables.css (has actual content) + response = await client.get("/static/css/base/variables.css") + assert response.status_code == 200 + assert len(response.text) > 500, "variables.css seems too small" + # Test ux_features.css response = await client.get("/static/css/ux_features.css") assert response.status_code == 200 diff --git a/tests/unit/test_template_integration.py b/tests/unit/test_template_integration.py index 7511386..309786a 100644 --- a/tests/unit/test_template_integration.py +++ b/tests/unit/test_template_integration.py @@ -110,13 +110,18 @@ class TestTemplateIntegration: assert b"" in content async def test_templates_load_required_javascript(self, client): - """Test that index template loads all required JavaScript files.""" + """Test that index template loads all required JavaScript modules.""" response = await client.get("/") assert response.status_code == 200 content = response.content - # Check for main app.js - assert b"/static/js/app.js" in content + # Check for modular JS structure (shared modules) + assert b"/static/js/shared/constants.js" in content + assert b"/static/js/shared/auth.js" in content + assert b"/static/js/shared/api-client.js" in content + + # Check for index-specific modules + assert b"/static/js/index/app-init.js" in content # Check for localization.js assert b"/static/js/localization.js" in content @@ -131,8 +136,8 @@ class TestTemplateIntegration: """Test that queue template includes WebSocket support.""" response = await client.get("/queue") assert response.status_code == 200 - # Check for websocket_client.js implementation - assert b"websocket_client.js" in response.content + # Check for modular websocket client + assert b"/static/js/shared/websocket-client.js" in response.content async def test_index_includes_search_functionality(self, client): """Test that index page includes search functionality.""" diff --git a/tests/unit/test_transactions.py b/tests/unit/test_transactions.py new file mode 100644 index 0000000..3f158ff --- /dev/null +++ b/tests/unit/test_transactions.py @@ -0,0 +1,668 @@ +"""Unit tests for database transaction utilities. + +Tests the transaction management utilities including decorators, +context managers, and helper functions. +""" +import asyncio +from datetime import datetime, timezone +from unittest.mock import AsyncMock, MagicMock, patch + +import pytest +from sqlalchemy.ext.asyncio import AsyncSession, create_async_engine +from sqlalchemy.orm import Session, sessionmaker + +from src.server.database.base import Base +from src.server.database.transaction import ( + AsyncTransactionContext, + TransactionContext, + TransactionError, + TransactionPropagation, + atomic, + atomic_sync, + is_in_transaction, + transactional, +) + +# ============================================================================ +# Fixtures +# ============================================================================ + + +@pytest.fixture +async def async_engine(): + """Create in-memory async database engine for testing.""" + engine = create_async_engine( + "sqlite+aiosqlite:///:memory:", + echo=False, + ) + + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + + yield engine + + await engine.dispose() + + +@pytest.fixture +async def async_session(async_engine): + """Create async database session for testing.""" + from sqlalchemy.ext.asyncio import async_sessionmaker + + async_session_factory = async_sessionmaker( + async_engine, + class_=AsyncSession, + expire_on_commit=False, + ) + + async with async_session_factory() as session: + yield session + await session.rollback() + + +# ============================================================================ +# TransactionContext Tests (Sync) +# ============================================================================ + + +class TestTransactionContext: + """Tests for synchronous TransactionContext.""" + + def test_context_manager_protocol(self): + """Test context manager enters and exits properly.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + with TransactionContext(mock_session) as ctx: + assert ctx.session == mock_session + mock_session.begin.assert_called_once() + + mock_session.commit.assert_called_once() + + def test_rollback_on_exception(self): + """Test rollback is called when exception occurs.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + with pytest.raises(ValueError): + with TransactionContext(mock_session): + raise ValueError("Test error") + + mock_session.rollback.assert_called_once() + mock_session.commit.assert_not_called() + + def test_no_begin_if_already_in_transaction(self): + """Test no new transaction started if already in one.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = True + + with TransactionContext(mock_session): + pass + + mock_session.begin.assert_not_called() + + def test_explicit_commit(self): + """Test explicit commit within context.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + with TransactionContext(mock_session) as ctx: + ctx.commit() + mock_session.commit.assert_called_once() + + # Should not commit again on exit + assert mock_session.commit.call_count == 1 + + def test_explicit_rollback(self): + """Test explicit rollback within context.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + with TransactionContext(mock_session) as ctx: + ctx.rollback() + mock_session.rollback.assert_called_once() + + # Should not commit after explicit rollback + mock_session.commit.assert_not_called() + + +# ============================================================================ +# AsyncTransactionContext Tests +# ============================================================================ + + +class TestAsyncTransactionContext: + """Tests for asynchronous AsyncTransactionContext.""" + + @pytest.mark.asyncio + async def test_async_context_manager_protocol(self): + """Test async context manager enters and exits properly.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + async with AsyncTransactionContext(mock_session) as ctx: + assert ctx.session == mock_session + mock_session.begin.assert_called_once() + + mock_session.commit.assert_called_once() + + @pytest.mark.asyncio + async def test_async_rollback_on_exception(self): + """Test async rollback is called when exception occurs.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + with pytest.raises(ValueError): + async with AsyncTransactionContext(mock_session): + raise ValueError("Test error") + + mock_session.rollback.assert_called_once() + mock_session.commit.assert_not_called() + + @pytest.mark.asyncio + async def test_async_explicit_commit(self): + """Test async explicit commit within context.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + + async with AsyncTransactionContext(mock_session) as ctx: + await ctx.commit() + mock_session.commit.assert_called_once() + + # Should not commit again on exit + assert mock_session.commit.call_count == 1 + + @pytest.mark.asyncio + async def test_async_explicit_rollback(self): + """Test async explicit rollback within context.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + async with AsyncTransactionContext(mock_session) as ctx: + await ctx.rollback() + mock_session.rollback.assert_called_once() + + # Should not commit after explicit rollback + mock_session.commit.assert_not_called() + + +# ============================================================================ +# atomic() Context Manager Tests +# ============================================================================ + + +class TestAtomicContextManager: + """Tests for atomic() async context manager.""" + + @pytest.mark.asyncio + async def test_atomic_commits_on_success(self): + """Test atomic commits transaction on success.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + async with atomic(mock_session) as tx: + pass + + mock_session.commit.assert_called_once() + + @pytest.mark.asyncio + async def test_atomic_rollback_on_failure(self): + """Test atomic rolls back transaction on failure.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + with pytest.raises(RuntimeError): + async with atomic(mock_session): + raise RuntimeError("Operation failed") + + mock_session.rollback.assert_called_once() + + @pytest.mark.asyncio + async def test_atomic_nested_propagation(self): + """Test atomic with NESTED propagation creates savepoint.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = True + mock_nested = AsyncMock() + mock_session.begin_nested = AsyncMock(return_value=mock_nested) + + async with atomic( + mock_session, propagation=TransactionPropagation.NESTED + ): + pass + + mock_session.begin_nested.assert_called_once() + + @pytest.mark.asyncio + async def test_atomic_required_propagation_default(self): + """Test atomic uses REQUIRED propagation by default.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + + async with atomic(mock_session) as tx: + # Should start new transaction + mock_session.begin.assert_called_once() + + +# ============================================================================ +# @transactional Decorator Tests +# ============================================================================ + + +class TestTransactionalDecorator: + """Tests for @transactional decorator.""" + + @pytest.mark.asyncio + async def test_async_function_wrapped(self): + """Test async function is wrapped in transaction.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + @transactional() + async def sample_operation(db: AsyncSession): + return "result" + + result = await sample_operation(db=mock_session) + + assert result == "result" + mock_session.commit.assert_called_once() + + @pytest.mark.asyncio + async def test_async_rollback_on_error(self): + """Test async function rollback on error.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + @transactional() + async def failing_operation(db: AsyncSession): + raise ValueError("Operation failed") + + with pytest.raises(ValueError): + await failing_operation(db=mock_session) + + mock_session.rollback.assert_called_once() + + @pytest.mark.asyncio + async def test_custom_session_param_name(self): + """Test decorator with custom session parameter name.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + + @transactional(session_param="session") + async def operation_with_session(session: AsyncSession): + return "done" + + result = await operation_with_session(session=mock_session) + + assert result == "done" + mock_session.commit.assert_called_once() + + @pytest.mark.asyncio + async def test_missing_session_raises_error(self): + """Test error raised when session parameter not found.""" + @transactional() + async def operation_no_session(data: dict): + return data + + with pytest.raises(TransactionError): + await operation_no_session(data={"key": "value"}) + + @pytest.mark.asyncio + async def test_propagation_passed_to_atomic(self): + """Test propagation mode is passed to atomic.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = True + mock_nested = AsyncMock() + mock_session.begin_nested = AsyncMock(return_value=mock_nested) + + @transactional(propagation=TransactionPropagation.NESTED) + async def nested_operation(db: AsyncSession): + return "nested" + + result = await nested_operation(db=mock_session) + + assert result == "nested" + mock_session.begin_nested.assert_called_once() + + +# ============================================================================ +# Sync transactional decorator Tests +# ============================================================================ + + +class TestSyncTransactionalDecorator: + """Tests for @transactional decorator with sync functions.""" + + def test_sync_function_wrapped(self): + """Test sync function is wrapped in transaction.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + @transactional() + def sample_sync_operation(db: Session): + return "sync_result" + + result = sample_sync_operation(db=mock_session) + + assert result == "sync_result" + mock_session.commit.assert_called_once() + + def test_sync_rollback_on_error(self): + """Test sync function rollback on error.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + @transactional() + def failing_sync_operation(db: Session): + raise ValueError("Sync operation failed") + + with pytest.raises(ValueError): + failing_sync_operation(db=mock_session) + + mock_session.rollback.assert_called_once() + + +# ============================================================================ +# Helper Function Tests +# ============================================================================ + + +class TestHelperFunctions: + """Tests for transaction helper functions.""" + + def test_is_in_transaction_true(self): + """Test is_in_transaction returns True when in transaction.""" + mock_session = MagicMock() + mock_session.in_transaction.return_value = True + + assert is_in_transaction(mock_session) is True + + def test_is_in_transaction_false(self): + """Test is_in_transaction returns False when not in transaction.""" + mock_session = MagicMock() + mock_session.in_transaction.return_value = False + + assert is_in_transaction(mock_session) is False + + +# ============================================================================ +# Integration Tests with Real Database +# ============================================================================ + + +class TestTransactionIntegration: + """Integration tests using real in-memory database.""" + + @pytest.mark.asyncio + async def test_real_transaction_commit(self, async_session): + """Test actual transaction commit with real session.""" + from src.server.database.models import AnimeSeries + + async with atomic(async_session): + series = AnimeSeries( + key="test-series", + name="Test Series", + site="https://test.com", + folder="/test/folder", + ) + async_session.add(series) + + # Verify data persisted + from sqlalchemy import select + result = await async_session.execute( + select(AnimeSeries).where(AnimeSeries.key == "test-series") + ) + saved_series = result.scalar_one_or_none() + + assert saved_series is not None + assert saved_series.name == "Test Series" + + @pytest.mark.asyncio + async def test_real_transaction_rollback(self, async_session): + """Test actual transaction rollback with real session.""" + from src.server.database.models import AnimeSeries + + try: + async with atomic(async_session): + series = AnimeSeries( + key="rollback-series", + name="Rollback Series", + site="https://test.com", + folder="/test/folder", + ) + async_session.add(series) + await async_session.flush() + + # Force rollback + raise ValueError("Simulated error") + except ValueError: + pass + + # Verify data was NOT persisted + from sqlalchemy import select + result = await async_session.execute( + select(AnimeSeries).where(AnimeSeries.key == "rollback-series") + ) + saved_series = result.scalar_one_or_none() + + assert saved_series is None + + +# ============================================================================ +# TransactionPropagation Tests +# ============================================================================ + + +class TestTransactionPropagation: + """Tests for transaction propagation modes.""" + + def test_propagation_enum_values(self): + """Test propagation enum has correct values.""" + assert TransactionPropagation.REQUIRED.value == "required" + assert TransactionPropagation.REQUIRES_NEW.value == "requires_new" + assert TransactionPropagation.NESTED.value == "nested" + + +# ============================================================================ +# Additional Coverage Tests +# ============================================================================ + + +class TestSyncSavepointCoverage: + """Additional tests for sync savepoint coverage.""" + + def test_savepoint_exception_rolls_back(self): + """Test savepoint rollback when exception occurs within savepoint.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + mock_nested = MagicMock() + mock_session.begin_nested.return_value = mock_nested + + with TransactionContext(mock_session) as ctx: + with pytest.raises(ValueError): + with ctx.savepoint() as sp: + raise ValueError("Error in savepoint") + + # Nested transaction should have been rolled back + mock_nested.rollback.assert_called_once() + + def test_savepoint_commit_explicit(self): + """Test explicit commit on savepoint.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + mock_nested = MagicMock() + mock_session.begin_nested.return_value = mock_nested + + with TransactionContext(mock_session) as ctx: + with ctx.savepoint() as sp: + sp.commit() + # Commit should just log, SQLAlchemy handles actual commit + + +class TestAsyncSavepointCoverage: + """Additional tests for async savepoint coverage.""" + + @pytest.mark.asyncio + async def test_async_savepoint_exception_rolls_back(self): + """Test async savepoint rollback when exception occurs.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + mock_nested = AsyncMock() + mock_nested.rollback = AsyncMock() + mock_session.begin_nested = AsyncMock(return_value=mock_nested) + + async with AsyncTransactionContext(mock_session) as ctx: + with pytest.raises(ValueError): + async with ctx.savepoint() as sp: + raise ValueError("Error in async savepoint") + + # Nested transaction should have been rolled back + mock_nested.rollback.assert_called_once() + + @pytest.mark.asyncio + async def test_async_savepoint_commit_explicit(self): + """Test explicit commit on async savepoint.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_nested = AsyncMock() + mock_session.begin_nested = AsyncMock(return_value=mock_nested) + + async with AsyncTransactionContext(mock_session) as ctx: + async with ctx.savepoint() as sp: + await sp.commit() + # Commit should just log, SQLAlchemy handles actual commit + + +class TestAtomicNestedPropagationNoTransaction: + """Tests for NESTED propagation when not in transaction.""" + + @pytest.mark.asyncio + async def test_async_nested_starts_new_when_not_in_transaction(self): + """Test NESTED propagation starts new transaction when none exists.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + async with atomic(mock_session, TransactionPropagation.NESTED) as tx: + # Should start new transaction since none exists + pass + + mock_session.begin.assert_called_once() + mock_session.commit.assert_called_once() + + def test_sync_nested_starts_new_when_not_in_transaction(self): + """Test sync NESTED propagation starts new transaction when none exists.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + with atomic_sync(mock_session, TransactionPropagation.NESTED) as tx: + pass + + mock_session.begin.assert_called_once() + mock_session.commit.assert_called_once() + + +class TestGetTransactionDepth: + """Tests for get_transaction_depth helper.""" + + def test_depth_zero_when_not_in_transaction(self): + """Test depth is 0 when not in transaction.""" + from src.server.database.transaction import get_transaction_depth + + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + depth = get_transaction_depth(mock_session) + assert depth == 0 + + def test_depth_one_in_transaction(self): + """Test depth is 1 in basic transaction.""" + from src.server.database.transaction import get_transaction_depth + + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = True + mock_session._nested_transaction = None + + depth = get_transaction_depth(mock_session) + assert depth == 1 + + def test_depth_two_with_nested_transaction(self): + """Test depth is 2 with nested transaction.""" + from src.server.database.transaction import get_transaction_depth + + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = True + mock_session._nested_transaction = MagicMock() # Has nested + + depth = get_transaction_depth(mock_session) + assert depth == 2 + + +class TestTransactionalDecoratorPositionalArgs: + """Tests for transactional decorator with positional arguments.""" + + @pytest.mark.asyncio + async def test_session_from_positional_arg(self): + """Test decorator extracts session from positional argument.""" + mock_session = AsyncMock(spec=AsyncSession) + mock_session.in_transaction.return_value = False + mock_session.begin = AsyncMock() + mock_session.commit = AsyncMock() + mock_session.rollback = AsyncMock() + + @transactional() + async def operation(db: AsyncSession, data: str): + return f"processed: {data}" + + # Pass session as positional argument + result = await operation(mock_session, "test") + + assert result == "processed: test" + mock_session.commit.assert_called_once() + + def test_sync_session_from_positional_arg(self): + """Test sync decorator extracts session from positional argument.""" + mock_session = MagicMock(spec=Session) + mock_session.in_transaction.return_value = False + + @transactional() + def operation(db: Session, data: str): + return f"processed: {data}" + + result = operation(mock_session, "test") + + assert result == "processed: test" + mock_session.commit.assert_called_once() diff --git a/tests/unit/test_websocket_service.py b/tests/unit/test_websocket_service.py index 45347a1..6a48529 100644 --- a/tests/unit/test_websocket_service.py +++ b/tests/unit/test_websocket_service.py @@ -433,6 +433,105 @@ class TestWebSocketService: assert call_args["data"]["code"] == error_code assert call_args["data"]["message"] == error_message + @pytest.mark.asyncio + async def test_broadcast_scan_started(self, service, mock_websocket): + """Test broadcasting scan started event.""" + connection_id = "test-conn" + directory = "/home/user/anime" + total_items = 42 + + await service.connect(mock_websocket, connection_id) + await service.broadcast_scan_started(directory, total_items) + + assert mock_websocket.send_json.called + call_args = mock_websocket.send_json.call_args[0][0] + assert call_args["type"] == "scan_started" + assert call_args["data"]["directory"] == directory + assert call_args["data"]["total_items"] == total_items + assert "timestamp" in call_args + + @pytest.mark.asyncio + async def test_broadcast_scan_started_default_total(self, service, mock_websocket): + """Test broadcasting scan started event with default total_items.""" + connection_id = "test-conn" + directory = "/home/user/anime" + + await service.connect(mock_websocket, connection_id) + await service.broadcast_scan_started(directory) + + assert mock_websocket.send_json.called + call_args = mock_websocket.send_json.call_args[0][0] + assert call_args["type"] == "scan_started" + assert call_args["data"]["directory"] == directory + assert call_args["data"]["total_items"] == 0 + assert "timestamp" in call_args + + @pytest.mark.asyncio + async def test_broadcast_scan_progress(self, service, mock_websocket): + """Test broadcasting scan progress event.""" + connection_id = "test-conn" + directories_scanned = 25 + files_found = 150 + current_directory = "/home/user/anime/Attack on Titan" + total_items = 100 + + await service.connect(mock_websocket, connection_id) + await service.broadcast_scan_progress( + directories_scanned, files_found, current_directory, total_items + ) + + assert mock_websocket.send_json.called + call_args = mock_websocket.send_json.call_args[0][0] + assert call_args["type"] == "scan_progress" + assert call_args["data"]["directories_scanned"] == directories_scanned + assert call_args["data"]["files_found"] == files_found + assert call_args["data"]["current_directory"] == current_directory + assert call_args["data"]["total_items"] == total_items + assert "timestamp" in call_args + + @pytest.mark.asyncio + async def test_broadcast_scan_progress_default_total(self, service, mock_websocket): + """Test broadcasting scan progress event with default total_items.""" + connection_id = "test-conn" + directories_scanned = 25 + files_found = 150 + current_directory = "/home/user/anime/Attack on Titan" + + await service.connect(mock_websocket, connection_id) + await service.broadcast_scan_progress( + directories_scanned, files_found, current_directory + ) + + assert mock_websocket.send_json.called + call_args = mock_websocket.send_json.call_args[0][0] + assert call_args["type"] == "scan_progress" + assert call_args["data"]["directories_scanned"] == directories_scanned + assert call_args["data"]["files_found"] == files_found + assert call_args["data"]["current_directory"] == current_directory + assert call_args["data"]["total_items"] == 0 + assert "timestamp" in call_args + + @pytest.mark.asyncio + async def test_broadcast_scan_completed(self, service, mock_websocket): + """Test broadcasting scan completed event.""" + connection_id = "test-conn" + total_directories = 100 + total_files = 500 + elapsed_seconds = 12.5 + + await service.connect(mock_websocket, connection_id) + await service.broadcast_scan_completed( + total_directories, total_files, elapsed_seconds + ) + + assert mock_websocket.send_json.called + call_args = mock_websocket.send_json.call_args[0][0] + assert call_args["type"] == "scan_completed" + assert call_args["data"]["total_directories"] == total_directories + assert call_args["data"]["total_files"] == total_files + assert call_args["data"]["elapsed_seconds"] == elapsed_seconds + assert "timestamp" in call_args + class TestGetWebSocketService: """Test cases for get_websocket_service factory function."""