Compare commits
70 Commits
338e3feb4a
...
489c37357e
| Author | SHA1 | Date | |
|---|---|---|---|
| 489c37357e | |||
| 4f2d652a69 | |||
| bd655cb0f0 | |||
| 60070395e9 | |||
| f39a08d985 | |||
| 055bbf4de6 | |||
| ab7d78261e | |||
| b1726968e5 | |||
| ff9dea0488 | |||
| 803f35ef39 | |||
| 4780f68a23 | |||
| 08f816a954 | |||
| 778d16b21a | |||
| a67a16d6bf | |||
| 2e5731b5d6 | |||
| 94cf36bff3 | |||
| dfdac68ecc | |||
| 3d3b97bdc2 | |||
| 1b7ca7b4da | |||
| f28dc756c5 | |||
| d70d70e193 | |||
| 1ba67357dc | |||
| b2728a7cf4 | |||
| f7ee9a40da | |||
| 9f4ea84b47 | |||
| 9e393adb00 | |||
| 458ca1d776 | |||
| b6d44ca7d8 | |||
| 19cb8c11a0 | |||
| 72ac201153 | |||
| a24f07a36e | |||
| 9b071fe370 | |||
| 32dc893434 | |||
| 700f491ef9 | |||
| 4c9bf6b982 | |||
| bf332f27e0 | |||
| 596476f9ac | |||
| 27108aacda | |||
| 54790a7ebb | |||
| 1652f2f6af | |||
| 3cb644add4 | |||
| 63742bb369 | |||
| 8373da8547 | |||
| 38e0ba0484 | |||
| 5f6ac8e507 | |||
| 684337fd0c | |||
| 86eaa8a680 | |||
| ee317b29f1 | |||
| 842f9c88eb | |||
| 99f79e4c29 | |||
| 798461a1ea | |||
| 942f14f746 | |||
| 7c56c8bef2 | |||
| 3b516c0e24 | |||
| b0f3b643c7 | |||
| 48daeba012 | |||
| 4347057c06 | |||
| e0a7c6baa9 | |||
| ae77a11782 | |||
| 396b243d59 | |||
| 73283dea64 | |||
| cb014cf547 | |||
| 246782292f | |||
| 46ca4c9aac | |||
| 795f83ada5 | |||
| 646385b975 | |||
| 148e6c1b58 | |||
| de58161014 | |||
| 7e2d3dd5ab | |||
| 0222262f8f |
140
README.md
Normal file
140
README.md
Normal file
@ -0,0 +1,140 @@
|
|||||||
|
# Aniworld Download Manager
|
||||||
|
|
||||||
|
A web-based anime download manager with REST API, WebSocket real-time updates, and a modern web interface.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Web interface for managing anime library
|
||||||
|
- REST API for programmatic access
|
||||||
|
- WebSocket real-time progress updates
|
||||||
|
- Download queue with priority management
|
||||||
|
- Automatic library scanning for missing episodes
|
||||||
|
- JWT-based authentication
|
||||||
|
- SQLite database for persistence
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Python 3.10+
|
||||||
|
- Conda (recommended) or virtualenv
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/your-repo/aniworld.git
|
||||||
|
cd aniworld
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Create and activate conda environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conda create -n AniWorld python=3.10
|
||||||
|
conda activate AniWorld
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Start the server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Open http://127.0.0.1:8000 in your browser
|
||||||
|
|
||||||
|
### First-Time Setup
|
||||||
|
|
||||||
|
1. Navigate to http://127.0.0.1:8000/setup
|
||||||
|
2. Set a master password (minimum 8 characters, mixed case, number, special character)
|
||||||
|
3. Configure your anime directory path
|
||||||
|
4. Login with your master password
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
| Document | Description |
|
||||||
|
| ---------------------------------------------- | -------------------------------- |
|
||||||
|
| [docs/API.md](docs/API.md) | REST API and WebSocket reference |
|
||||||
|
| [docs/ARCHITECTURE.md](docs/ARCHITECTURE.md) | System architecture and design |
|
||||||
|
| [docs/CONFIGURATION.md](docs/CONFIGURATION.md) | Configuration options |
|
||||||
|
| [docs/DATABASE.md](docs/DATABASE.md) | Database schema |
|
||||||
|
| [docs/DEVELOPMENT.md](docs/DEVELOPMENT.md) | Developer setup guide |
|
||||||
|
| [docs/TESTING.md](docs/TESTING.md) | Testing guidelines |
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
+-- cli/ # CLI interface (legacy)
|
||||||
|
+-- config/ # Application settings
|
||||||
|
+-- core/ # Domain logic
|
||||||
|
| +-- SeriesApp.py # Main application facade
|
||||||
|
| +-- SerieScanner.py # Directory scanning
|
||||||
|
| +-- entities/ # Domain entities
|
||||||
|
| +-- providers/ # External provider adapters
|
||||||
|
+-- server/ # FastAPI web server
|
||||||
|
+-- api/ # REST API endpoints
|
||||||
|
+-- services/ # Business logic
|
||||||
|
+-- models/ # Pydantic models
|
||||||
|
+-- database/ # SQLAlchemy ORM
|
||||||
|
+-- middleware/ # Auth, rate limiting
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
| Endpoint | Description |
|
||||||
|
| ------------------------------ | -------------------------------- |
|
||||||
|
| `POST /api/auth/login` | Authenticate and get JWT token |
|
||||||
|
| `GET /api/anime` | List anime with missing episodes |
|
||||||
|
| `GET /api/anime/search?query=` | Search for anime |
|
||||||
|
| `POST /api/queue/add` | Add episodes to download queue |
|
||||||
|
| `POST /api/queue/start` | Start queue processing |
|
||||||
|
| `GET /api/queue/status` | Get queue status |
|
||||||
|
| `WS /ws/connect` | WebSocket for real-time updates |
|
||||||
|
|
||||||
|
See [docs/API.md](docs/API.md) for complete API reference.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Environment variables (via `.env` file):
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|
| ----------------- | ------------------------------ | ---------------------- |
|
||||||
|
| `JWT_SECRET_KEY` | (random) | Secret for JWT signing |
|
||||||
|
| `DATABASE_URL` | `sqlite:///./data/aniworld.db` | Database connection |
|
||||||
|
| `ANIME_DIRECTORY` | (empty) | Path to anime library |
|
||||||
|
| `LOG_LEVEL` | `INFO` | Logging level |
|
||||||
|
|
||||||
|
See [docs/CONFIGURATION.md](docs/CONFIGURATION.md) for all options.
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v
|
||||||
|
|
||||||
|
# Run unit tests only
|
||||||
|
conda run -n AniWorld python -m pytest tests/unit/ -v
|
||||||
|
|
||||||
|
# Run integration tests
|
||||||
|
conda run -n AniWorld python -m pytest tests/integration/ -v
|
||||||
|
```
|
||||||
|
|
||||||
|
## Technology Stack
|
||||||
|
|
||||||
|
- **Web Framework**: FastAPI 0.104.1
|
||||||
|
- **Database**: SQLite + SQLAlchemy 2.0
|
||||||
|
- **Auth**: JWT (python-jose) + passlib
|
||||||
|
- **Validation**: Pydantic 2.5
|
||||||
|
- **Logging**: structlog
|
||||||
|
- **Testing**: pytest + pytest-asyncio
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT License
|
||||||
@ -1,215 +0,0 @@
|
|||||||
# Server Management Commands
|
|
||||||
|
|
||||||
Quick reference for starting, stopping, and managing the Aniworld server.
|
|
||||||
|
|
||||||
## Start Server
|
|
||||||
|
|
||||||
### Using the start script (Recommended)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./start_server.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using conda directly
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conda run -n AniWorld python run_server.py
|
|
||||||
```
|
|
||||||
|
|
||||||
### Using uvicorn directly
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
|
||||||
```
|
|
||||||
|
|
||||||
## Stop Server
|
|
||||||
|
|
||||||
### Using the stop script (Recommended)
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./stop_server.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
### Manual commands
|
|
||||||
|
|
||||||
**Kill uvicorn processes:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pkill -f "uvicorn.*fastapi_app:app"
|
|
||||||
```
|
|
||||||
|
|
||||||
**Kill process on port 8000:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
lsof -ti:8000 | xargs kill -9
|
|
||||||
```
|
|
||||||
|
|
||||||
**Kill run_server.py processes:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pkill -f "run_server.py"
|
|
||||||
```
|
|
||||||
|
|
||||||
## Check Server Status
|
|
||||||
|
|
||||||
**Check if port 8000 is in use:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
lsof -i:8000
|
|
||||||
```
|
|
||||||
|
|
||||||
**Check for running uvicorn processes:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ps aux | grep uvicorn
|
|
||||||
```
|
|
||||||
|
|
||||||
**Check server is responding:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl http://127.0.0.1:8000/api/health
|
|
||||||
```
|
|
||||||
|
|
||||||
## Restart Server
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./stop_server.sh && ./start_server.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Common Issues
|
|
||||||
|
|
||||||
### "Address already in use" Error
|
|
||||||
|
|
||||||
**Problem:** Port 8000 is already occupied
|
|
||||||
|
|
||||||
**Solution:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./stop_server.sh
|
|
||||||
# or
|
|
||||||
lsof -ti:8000 | xargs kill -9
|
|
||||||
```
|
|
||||||
|
|
||||||
### Server not responding
|
|
||||||
|
|
||||||
**Check logs:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
tail -f logs/app.log
|
|
||||||
```
|
|
||||||
|
|
||||||
**Check if process is running:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
ps aux | grep uvicorn
|
|
||||||
```
|
|
||||||
|
|
||||||
### Cannot connect to server
|
|
||||||
|
|
||||||
**Verify server is running:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl http://127.0.0.1:8000/api/health
|
|
||||||
```
|
|
||||||
|
|
||||||
**Check firewall:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo ufw status
|
|
||||||
```
|
|
||||||
|
|
||||||
## Development Mode
|
|
||||||
|
|
||||||
**Run with auto-reload:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
./start_server.sh # Already includes --reload
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run with custom port:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8080 --reload
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run with debug logging:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
export LOG_LEVEL=DEBUG
|
|
||||||
./start_server.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production Mode
|
|
||||||
|
|
||||||
**Run without auto-reload:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 0.0.0.0 --port 8000 --workers 4
|
|
||||||
```
|
|
||||||
|
|
||||||
**Run with systemd (Linux):**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
sudo systemctl start aniworld
|
|
||||||
sudo systemctl stop aniworld
|
|
||||||
sudo systemctl restart aniworld
|
|
||||||
sudo systemctl status aniworld
|
|
||||||
```
|
|
||||||
|
|
||||||
## URLs
|
|
||||||
|
|
||||||
- **Web Interface:** http://127.0.0.1:8000
|
|
||||||
- **API Documentation:** http://127.0.0.1:8000/api/docs
|
|
||||||
- **Login Page:** http://127.0.0.1:8000/login
|
|
||||||
- **Queue Management:** http://127.0.0.1:8000/queue
|
|
||||||
- **Health Check:** http://127.0.0.1:8000/api/health
|
|
||||||
|
|
||||||
## Default Credentials
|
|
||||||
|
|
||||||
- **Password:** `Hallo123!`
|
|
||||||
|
|
||||||
## Log Files
|
|
||||||
|
|
||||||
- **Application logs:** `logs/app.log`
|
|
||||||
- **Download logs:** `logs/downloads/`
|
|
||||||
- **Error logs:** Check console output or systemd journal
|
|
||||||
|
|
||||||
## Quick Troubleshooting
|
|
||||||
|
|
||||||
| Symptom | Solution |
|
|
||||||
| ------------------------ | ------------------------------------ |
|
|
||||||
| Port already in use | `./stop_server.sh` |
|
|
||||||
| Server won't start | Check `logs/app.log` |
|
|
||||||
| 404 errors | Verify URL and check routing |
|
|
||||||
| WebSocket not connecting | Check server is running and firewall |
|
|
||||||
| Slow responses | Check system resources (`htop`) |
|
|
||||||
| Database errors | Check `data/` directory permissions |
|
|
||||||
|
|
||||||
## Environment Variables
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Set log level
|
|
||||||
export LOG_LEVEL=DEBUG|INFO|WARNING|ERROR
|
|
||||||
|
|
||||||
# Set server port
|
|
||||||
export PORT=8000
|
|
||||||
|
|
||||||
# Set host
|
|
||||||
export HOST=127.0.0.1
|
|
||||||
|
|
||||||
# Set workers (production)
|
|
||||||
export WORKERS=4
|
|
||||||
```
|
|
||||||
|
|
||||||
## Related Scripts
|
|
||||||
|
|
||||||
- `start_server.sh` - Start the server
|
|
||||||
- `stop_server.sh` - Stop the server
|
|
||||||
- `run_server.py` - Python server runner
|
|
||||||
- `scripts/setup.py` - Initial setup
|
|
||||||
|
|
||||||
## More Information
|
|
||||||
|
|
||||||
- [User Guide](docs/user_guide.md)
|
|
||||||
- [API Reference](docs/api_reference.md)
|
|
||||||
- [Deployment Guide](docs/deployment.md)
|
|
||||||
Binary file not shown.
BIN
data/aniworld.db-shm
Normal file
BIN
data/aniworld.db-shm
Normal file
Binary file not shown.
BIN
data/aniworld.db-wal
Normal file
BIN
data/aniworld.db-wal
Normal file
Binary file not shown.
@ -17,7 +17,7 @@
|
|||||||
"keep_days": 30
|
"keep_days": 30
|
||||||
},
|
},
|
||||||
"other": {
|
"other": {
|
||||||
"master_password_hash": "$pbkdf2-sha256$29000$854zxnhvzXmPsVbqvXduTQ$G0HVRAt3kyO5eFwvo.ILkpX9JdmyXYJ9MNPTS/UxAGk",
|
"master_password_hash": "$pbkdf2-sha256$29000$o/R.b.0dYwzhfG/t/R9DSA$kQAcjHoByVaftRAT1OaZg5rILdhMSDNS6uIz67jwdOo",
|
||||||
"anime_directory": "/mnt/server/serien/Serien/"
|
"anime_directory": "/mnt/server/serien/Serien/"
|
||||||
},
|
},
|
||||||
"version": "1.0.0"
|
"version": "1.0.0"
|
||||||
|
|||||||
@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Aniworld",
|
|
||||||
"data_dir": "data",
|
|
||||||
"scheduler": {
|
|
||||||
"enabled": true,
|
|
||||||
"interval_minutes": 60
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"level": "INFO",
|
|
||||||
"file": null,
|
|
||||||
"max_bytes": null,
|
|
||||||
"backup_count": 3
|
|
||||||
},
|
|
||||||
"backup": {
|
|
||||||
"enabled": false,
|
|
||||||
"path": "data/backups",
|
|
||||||
"keep_days": 30
|
|
||||||
},
|
|
||||||
"other": {
|
|
||||||
"master_password_hash": "$pbkdf2-sha256$29000$VCqllLL2vldKyTmHkJIyZg$jNllpzlpENdgCslmS.tG.PGxRZ9pUnrqFEQFveDEcYk",
|
|
||||||
"anime_directory": "/mnt/server/serien/Serien/"
|
|
||||||
},
|
|
||||||
"version": "1.0.0"
|
|
||||||
}
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
{
|
|
||||||
"name": "Aniworld",
|
|
||||||
"data_dir": "data",
|
|
||||||
"scheduler": {
|
|
||||||
"enabled": true,
|
|
||||||
"interval_minutes": 60
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"level": "INFO",
|
|
||||||
"file": null,
|
|
||||||
"max_bytes": null,
|
|
||||||
"backup_count": 3
|
|
||||||
},
|
|
||||||
"backup": {
|
|
||||||
"enabled": false,
|
|
||||||
"path": "data/backups",
|
|
||||||
"keep_days": 30
|
|
||||||
},
|
|
||||||
"other": {
|
|
||||||
"master_password_hash": "$pbkdf2-sha256$29000$3/t/7733PkdoTckZQyildA$Nz9SdX2ZgqBwyzhQ9FGNcnzG1X.TW9oce3sDxJbVSdY",
|
|
||||||
"anime_directory": "/mnt/server/serien/Serien/"
|
|
||||||
},
|
|
||||||
"version": "1.0.0"
|
|
||||||
}
|
|
||||||
@ -1,327 +0,0 @@
|
|||||||
{
|
|
||||||
"pending": [
|
|
||||||
{
|
|
||||||
"id": "ae6424dc-558b-4946-9f07-20db1a09bf33",
|
|
||||||
"serie_id": "test-series-2",
|
|
||||||
"serie_folder": "Another Series (2024)",
|
|
||||||
"serie_name": "Another Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "HIGH",
|
|
||||||
"added_at": "2025-11-28T17:54:38.593236Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "011c2038-9fe3-41cb-844f-ce50c40e415f",
|
|
||||||
"serie_id": "series-high",
|
|
||||||
"serie_folder": "Series High (2024)",
|
|
||||||
"serie_name": "Series High",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "HIGH",
|
|
||||||
"added_at": "2025-11-28T17:54:38.632289Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "0eee56e0-414d-4cd7-8da7-b5a139abd8b5",
|
|
||||||
"serie_id": "series-normal",
|
|
||||||
"serie_folder": "Series Normal (2024)",
|
|
||||||
"serie_name": "Series Normal",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:38.635082Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "eea9f4f3-98e5-4041-9fc6-92e3d4c6fee6",
|
|
||||||
"serie_id": "series-low",
|
|
||||||
"serie_folder": "Series Low (2024)",
|
|
||||||
"serie_name": "Series Low",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "LOW",
|
|
||||||
"added_at": "2025-11-28T17:54:38.637038Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "b6f84ea9-86c8-4cc9-90e5-c7c6ce10c593",
|
|
||||||
"serie_id": "test-series",
|
|
||||||
"serie_folder": "Test Series (2024)",
|
|
||||||
"serie_name": "Test Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:38.801266Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "412aa28d-9763-41ef-913d-3d63919f9346",
|
|
||||||
"serie_id": "test-series",
|
|
||||||
"serie_folder": "Test Series (2024)",
|
|
||||||
"serie_name": "Test Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:38.867939Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "3a036824-2d14-41dd-81b8-094dd322a137",
|
|
||||||
"serie_id": "invalid-series",
|
|
||||||
"serie_folder": "Invalid Series (2024)",
|
|
||||||
"serie_name": "Invalid Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 99,
|
|
||||||
"episode": 99,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:38.935125Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "1f4108ed-5488-4f46-ad5b-fe27e3b04790",
|
|
||||||
"serie_id": "test-series",
|
|
||||||
"serie_folder": "Test Series (2024)",
|
|
||||||
"serie_name": "Test Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:38.968296Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "5e880954-1a9f-450a-8008-5b9d6ac07d66",
|
|
||||||
"serie_id": "series-2",
|
|
||||||
"serie_folder": "Series 2 (2024)",
|
|
||||||
"serie_name": "Series 2",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.055885Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "2415ac21-509b-4d71-b5b9-b824116d6785",
|
|
||||||
"serie_id": "series-0",
|
|
||||||
"serie_folder": "Series 0 (2024)",
|
|
||||||
"serie_name": "Series 0",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.056795Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "716f9823-d59a-4b04-863b-c75fd54bc464",
|
|
||||||
"serie_id": "series-1",
|
|
||||||
"serie_folder": "Series 1 (2024)",
|
|
||||||
"serie_name": "Series 1",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.057486Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "36ad4323-daa9-49c4-97e8-a0aec0cca7a1",
|
|
||||||
"serie_id": "series-4",
|
|
||||||
"serie_folder": "Series 4 (2024)",
|
|
||||||
"serie_name": "Series 4",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.058179Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "695ee7a9-42bb-4953-9a8a-10bd7f533369",
|
|
||||||
"serie_id": "series-3",
|
|
||||||
"serie_folder": "Series 3 (2024)",
|
|
||||||
"serie_name": "Series 3",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.058816Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "aa948908-c410-42ec-85d6-a0298d7d95a5",
|
|
||||||
"serie_id": "persistent-series",
|
|
||||||
"serie_folder": "Persistent Series (2024)",
|
|
||||||
"serie_name": "Persistent Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.152427Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "2537f20e-f394-4c68-81d5-48be3c0c402a",
|
|
||||||
"serie_id": "ws-series",
|
|
||||||
"serie_folder": "WebSocket Series (2024)",
|
|
||||||
"serie_name": "WebSocket Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "NORMAL",
|
|
||||||
"added_at": "2025-11-28T17:54:39.219061Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "aaaf3b05-cce8-47d5-b350-59c5d72533ad",
|
|
||||||
"serie_id": "workflow-series",
|
|
||||||
"serie_folder": "Workflow Test Series (2024)",
|
|
||||||
"serie_name": "Workflow Test Series",
|
|
||||||
"episode": {
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"title": null
|
|
||||||
},
|
|
||||||
"status": "pending",
|
|
||||||
"priority": "HIGH",
|
|
||||||
"added_at": "2025-11-28T17:54:39.254462Z",
|
|
||||||
"started_at": null,
|
|
||||||
"completed_at": null,
|
|
||||||
"progress": null,
|
|
||||||
"error": null,
|
|
||||||
"retry_count": 0,
|
|
||||||
"source_url": null
|
|
||||||
}
|
|
||||||
],
|
|
||||||
"active": [],
|
|
||||||
"failed": [],
|
|
||||||
"timestamp": "2025-11-28T17:54:39.259761+00:00"
|
|
||||||
}
|
|
||||||
23
diagrams/README.md
Normal file
23
diagrams/README.md
Normal file
@ -0,0 +1,23 @@
|
|||||||
|
# Architecture Diagrams
|
||||||
|
|
||||||
|
This directory contains architecture diagram source files for the Aniworld documentation.
|
||||||
|
|
||||||
|
## Diagrams
|
||||||
|
|
||||||
|
### System Architecture (Mermaid)
|
||||||
|
|
||||||
|
See [system-architecture.mmd](system-architecture.mmd) for the system overview diagram.
|
||||||
|
|
||||||
|
### Rendering
|
||||||
|
|
||||||
|
Diagrams can be rendered using:
|
||||||
|
|
||||||
|
- Mermaid Live Editor: https://mermaid.live/
|
||||||
|
- VS Code Mermaid extension
|
||||||
|
- GitHub/GitLab native Mermaid support
|
||||||
|
|
||||||
|
## Formats
|
||||||
|
|
||||||
|
- `.mmd` - Mermaid diagram source files
|
||||||
|
- `.svg` - Exported vector graphics (add when needed)
|
||||||
|
- `.png` - Exported raster graphics (add when needed)
|
||||||
44
diagrams/download-flow.mmd
Normal file
44
diagrams/download-flow.mmd
Normal file
@ -0,0 +1,44 @@
|
|||||||
|
%%{init: {'theme': 'base'}}%%
|
||||||
|
sequenceDiagram
|
||||||
|
participant Client
|
||||||
|
participant FastAPI
|
||||||
|
participant AuthMiddleware
|
||||||
|
participant DownloadService
|
||||||
|
participant ProgressService
|
||||||
|
participant WebSocketService
|
||||||
|
participant SeriesApp
|
||||||
|
participant Database
|
||||||
|
|
||||||
|
Note over Client,Database: Download Flow
|
||||||
|
|
||||||
|
%% Add to queue
|
||||||
|
Client->>FastAPI: POST /api/queue/add
|
||||||
|
FastAPI->>AuthMiddleware: Validate JWT
|
||||||
|
AuthMiddleware-->>FastAPI: OK
|
||||||
|
FastAPI->>DownloadService: add_to_queue()
|
||||||
|
DownloadService->>Database: save_item()
|
||||||
|
Database-->>DownloadService: item_id
|
||||||
|
DownloadService-->>FastAPI: [item_ids]
|
||||||
|
FastAPI-->>Client: 201 Created
|
||||||
|
|
||||||
|
%% Start queue
|
||||||
|
Client->>FastAPI: POST /api/queue/start
|
||||||
|
FastAPI->>AuthMiddleware: Validate JWT
|
||||||
|
AuthMiddleware-->>FastAPI: OK
|
||||||
|
FastAPI->>DownloadService: start_queue_processing()
|
||||||
|
|
||||||
|
loop For each pending item
|
||||||
|
DownloadService->>SeriesApp: download_episode()
|
||||||
|
|
||||||
|
loop Progress updates
|
||||||
|
SeriesApp->>ProgressService: emit("progress_updated")
|
||||||
|
ProgressService->>WebSocketService: broadcast_to_room()
|
||||||
|
WebSocketService-->>Client: WebSocket message
|
||||||
|
end
|
||||||
|
|
||||||
|
SeriesApp-->>DownloadService: completed
|
||||||
|
DownloadService->>Database: update_status()
|
||||||
|
end
|
||||||
|
|
||||||
|
DownloadService-->>FastAPI: OK
|
||||||
|
FastAPI-->>Client: 200 OK
|
||||||
82
diagrams/system-architecture.mmd
Normal file
82
diagrams/system-architecture.mmd
Normal file
@ -0,0 +1,82 @@
|
|||||||
|
%%{init: {'theme': 'base', 'themeVariables': { 'primaryColor': '#4a90d9'}}}%%
|
||||||
|
flowchart TB
|
||||||
|
subgraph Clients["Client Layer"]
|
||||||
|
Browser["Web Browser<br/>(HTML/CSS/JS)"]
|
||||||
|
CLI["CLI Client<br/>(Main.py)"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Server["Server Layer (FastAPI)"]
|
||||||
|
direction TB
|
||||||
|
Middleware["Middleware<br/>Auth, Rate Limit, Error Handler"]
|
||||||
|
|
||||||
|
subgraph API["API Routers"]
|
||||||
|
AuthAPI["/api/auth"]
|
||||||
|
AnimeAPI["/api/anime"]
|
||||||
|
QueueAPI["/api/queue"]
|
||||||
|
ConfigAPI["/api/config"]
|
||||||
|
SchedulerAPI["/api/scheduler"]
|
||||||
|
HealthAPI["/health"]
|
||||||
|
WebSocketAPI["/ws"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Services["Services"]
|
||||||
|
AuthService["AuthService"]
|
||||||
|
AnimeService["AnimeService"]
|
||||||
|
DownloadService["DownloadService"]
|
||||||
|
ConfigService["ConfigService"]
|
||||||
|
ProgressService["ProgressService"]
|
||||||
|
WebSocketService["WebSocketService"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Core["Core Layer"]
|
||||||
|
SeriesApp["SeriesApp"]
|
||||||
|
SerieScanner["SerieScanner"]
|
||||||
|
SerieList["SerieList"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Data["Data Layer"]
|
||||||
|
SQLite[(SQLite<br/>aniworld.db)]
|
||||||
|
ConfigJSON[(config.json)]
|
||||||
|
FileSystem[(File System<br/>Anime Directory)]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph External["External"]
|
||||||
|
Provider["Anime Provider<br/>(aniworld.to)"]
|
||||||
|
end
|
||||||
|
|
||||||
|
%% Client connections
|
||||||
|
Browser -->|HTTP/WebSocket| Middleware
|
||||||
|
CLI -->|Direct| SeriesApp
|
||||||
|
|
||||||
|
%% Middleware to API
|
||||||
|
Middleware --> API
|
||||||
|
|
||||||
|
%% API to Services
|
||||||
|
AuthAPI --> AuthService
|
||||||
|
AnimeAPI --> AnimeService
|
||||||
|
QueueAPI --> DownloadService
|
||||||
|
ConfigAPI --> ConfigService
|
||||||
|
SchedulerAPI --> AnimeService
|
||||||
|
WebSocketAPI --> WebSocketService
|
||||||
|
|
||||||
|
%% Services to Core
|
||||||
|
AnimeService --> SeriesApp
|
||||||
|
DownloadService --> SeriesApp
|
||||||
|
|
||||||
|
%% Services to Data
|
||||||
|
AuthService --> ConfigJSON
|
||||||
|
ConfigService --> ConfigJSON
|
||||||
|
DownloadService --> SQLite
|
||||||
|
AnimeService --> SQLite
|
||||||
|
|
||||||
|
%% Core to Data
|
||||||
|
SeriesApp --> SerieScanner
|
||||||
|
SeriesApp --> SerieList
|
||||||
|
SerieScanner --> FileSystem
|
||||||
|
SerieScanner --> Provider
|
||||||
|
|
||||||
|
%% Event flow
|
||||||
|
ProgressService -.->|Events| WebSocketService
|
||||||
|
DownloadService -.->|Progress| ProgressService
|
||||||
|
WebSocketService -.->|Broadcast| Browser
|
||||||
1194
docs/API.md
Normal file
1194
docs/API.md
Normal file
File diff suppressed because it is too large
Load Diff
625
docs/ARCHITECTURE.md
Normal file
625
docs/ARCHITECTURE.md
Normal file
@ -0,0 +1,625 @@
|
|||||||
|
# Architecture Documentation
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document describes the system architecture of the Aniworld anime download manager.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. System Overview
|
||||||
|
|
||||||
|
Aniworld is a web-based anime download manager built with Python, FastAPI, and SQLite. It provides a REST API and WebSocket interface for managing anime libraries, downloading episodes, and tracking progress.
|
||||||
|
|
||||||
|
### High-Level Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
+------------------+ +------------------+ +------------------+
|
||||||
|
| Web Browser | | CLI Client | | External |
|
||||||
|
| (Frontend) | | (Main.py) | | Providers |
|
||||||
|
+--------+---------+ +--------+---------+ +--------+---------+
|
||||||
|
| | |
|
||||||
|
| HTTP/WebSocket | Direct | HTTP
|
||||||
|
| | |
|
||||||
|
+--------v---------+ +--------v---------+ +--------v---------+
|
||||||
|
| | | | | |
|
||||||
|
| FastAPI <-----> Core Layer <-----> Provider |
|
||||||
|
| Server Layer | | (SeriesApp) | | Adapters |
|
||||||
|
| | | | | |
|
||||||
|
+--------+---------+ +--------+---------+ +------------------+
|
||||||
|
| |
|
||||||
|
| |
|
||||||
|
+--------v---------+ +--------v---------+
|
||||||
|
| | | |
|
||||||
|
| SQLite DB | | File System |
|
||||||
|
| (aniworld.db) | | (data/*.json) |
|
||||||
|
| | | |
|
||||||
|
+------------------+ +------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L1-L252)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Architectural Layers
|
||||||
|
|
||||||
|
### 2.1 CLI Layer (`src/cli/`)
|
||||||
|
|
||||||
|
Legacy command-line interface for direct interaction with the core layer.
|
||||||
|
|
||||||
|
| Component | File | Purpose |
|
||||||
|
| --------- | ----------------------------- | --------------- |
|
||||||
|
| Main | [Main.py](../src/cli/Main.py) | CLI entry point |
|
||||||
|
|
||||||
|
### 2.2 Server Layer (`src/server/`)
|
||||||
|
|
||||||
|
FastAPI-based REST API and WebSocket server.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/server/
|
||||||
|
+-- fastapi_app.py # Application entry point, lifespan management
|
||||||
|
+-- api/ # API route handlers
|
||||||
|
| +-- anime.py # /api/anime/* endpoints
|
||||||
|
| +-- auth.py # /api/auth/* endpoints
|
||||||
|
| +-- config.py # /api/config/* endpoints
|
||||||
|
| +-- download.py # /api/queue/* endpoints
|
||||||
|
| +-- scheduler.py # /api/scheduler/* endpoints
|
||||||
|
| +-- websocket.py # /ws/* WebSocket handlers
|
||||||
|
| +-- health.py # /health/* endpoints
|
||||||
|
+-- controllers/ # Page controllers for HTML rendering
|
||||||
|
| +-- page_controller.py # UI page routes
|
||||||
|
| +-- health_controller.py# Health check route
|
||||||
|
| +-- error_controller.py # Error pages (404, 500)
|
||||||
|
+-- services/ # Business logic
|
||||||
|
| +-- anime_service.py # Anime operations
|
||||||
|
| +-- auth_service.py # Authentication
|
||||||
|
| +-- config_service.py # Configuration management
|
||||||
|
| +-- download_service.py # Download queue management
|
||||||
|
| +-- progress_service.py # Progress tracking
|
||||||
|
| +-- websocket_service.py# WebSocket broadcasting
|
||||||
|
| +-- queue_repository.py # Database persistence
|
||||||
|
+-- models/ # Pydantic models
|
||||||
|
| +-- auth.py # Auth request/response models
|
||||||
|
| +-- config.py # Configuration models
|
||||||
|
| +-- download.py # Download queue models
|
||||||
|
| +-- websocket.py # WebSocket message models
|
||||||
|
+-- middleware/ # Request processing
|
||||||
|
| +-- auth.py # JWT validation, rate limiting
|
||||||
|
| +-- error_handler.py # Exception handlers
|
||||||
|
| +-- setup_redirect.py # Setup flow redirect
|
||||||
|
+-- database/ # SQLAlchemy ORM
|
||||||
|
| +-- connection.py # Database connection
|
||||||
|
| +-- models.py # ORM models
|
||||||
|
| +-- service.py # Database service
|
||||||
|
+-- utils/ # Utility modules
|
||||||
|
| +-- filesystem.py # Folder sanitization, path safety
|
||||||
|
| +-- validators.py # Input validation utilities
|
||||||
|
| +-- dependencies.py # FastAPI dependency injection
|
||||||
|
+-- web/ # Static files and templates
|
||||||
|
+-- static/ # CSS, JS, images
|
||||||
|
+-- templates/ # Jinja2 templates
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/](../src/server/)
|
||||||
|
|
||||||
|
### 2.2.1 Frontend Architecture (`src/server/web/static/`)
|
||||||
|
|
||||||
|
The frontend uses a modular architecture with no build step required. CSS and JavaScript files are organized by responsibility.
|
||||||
|
|
||||||
|
#### CSS Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/server/web/static/css/
|
||||||
|
+-- styles.css # Entry point with @import statements
|
||||||
|
+-- base/
|
||||||
|
| +-- variables.css # CSS custom properties (colors, fonts, spacing)
|
||||||
|
| +-- reset.css # CSS reset and normalize styles
|
||||||
|
| +-- typography.css # Font styles, headings, text utilities
|
||||||
|
+-- components/
|
||||||
|
| +-- buttons.css # All button styles
|
||||||
|
| +-- cards.css # Card and panel components
|
||||||
|
| +-- forms.css # Form inputs, labels, validation styles
|
||||||
|
| +-- modals.css # Modal and overlay styles
|
||||||
|
| +-- navigation.css # Header, nav, sidebar styles
|
||||||
|
| +-- progress.css # Progress bars, loading indicators
|
||||||
|
| +-- notifications.css # Toast, alerts, messages
|
||||||
|
| +-- tables.css # Table and list styles
|
||||||
|
| +-- status.css # Status badges and indicators
|
||||||
|
+-- pages/
|
||||||
|
| +-- login.css # Login page specific styles
|
||||||
|
| +-- index.css # Index/library page specific styles
|
||||||
|
| +-- queue.css # Queue page specific styles
|
||||||
|
+-- utilities/
|
||||||
|
+-- animations.css # Keyframes and animation classes
|
||||||
|
+-- responsive.css # Media queries and breakpoints
|
||||||
|
+-- helpers.css # Utility classes (hidden, flex, spacing)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### JavaScript Structure
|
||||||
|
|
||||||
|
JavaScript uses the IIFE pattern with a shared `AniWorld` namespace for browser compatibility without build tools.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/server/web/static/js/
|
||||||
|
+-- shared/ # Shared utilities used by all pages
|
||||||
|
| +-- constants.js # API endpoints, localStorage keys, defaults
|
||||||
|
| +-- auth.js # Token management (getToken, setToken, checkAuth)
|
||||||
|
| +-- api-client.js # Fetch wrapper with auto-auth headers
|
||||||
|
| +-- theme.js # Dark/light theme toggle
|
||||||
|
| +-- ui-utils.js # Toast notifications, format helpers
|
||||||
|
| +-- websocket-client.js # Socket.IO wrapper
|
||||||
|
+-- index/ # Index page modules
|
||||||
|
| +-- series-manager.js # Series list rendering and filtering
|
||||||
|
| +-- selection-manager.js# Multi-select and bulk download
|
||||||
|
| +-- search.js # Series search functionality
|
||||||
|
| +-- scan-manager.js # Library rescan operations
|
||||||
|
| +-- scheduler-config.js # Scheduler configuration
|
||||||
|
| +-- logging-config.js # Logging configuration
|
||||||
|
| +-- advanced-config.js # Advanced settings
|
||||||
|
| +-- main-config.js # Main configuration and backup
|
||||||
|
| +-- config-manager.js # Config modal orchestrator
|
||||||
|
| +-- socket-handler.js # WebSocket event handlers
|
||||||
|
| +-- app-init.js # Application initialization
|
||||||
|
+-- queue/ # Queue page modules
|
||||||
|
+-- queue-api.js # Queue API interactions
|
||||||
|
+-- queue-renderer.js # Queue list rendering
|
||||||
|
+-- progress-handler.js # Download progress updates
|
||||||
|
+-- queue-socket-handler.js # WebSocket events for queue
|
||||||
|
+-- queue-init.js # Queue page initialization
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Module Pattern
|
||||||
|
|
||||||
|
All JavaScript modules follow the IIFE pattern with namespace:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var AniWorld = window.AniWorld || {};
|
||||||
|
|
||||||
|
AniWorld.ModuleName = (function () {
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
// Private variables and functions
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
return {
|
||||||
|
init: init,
|
||||||
|
publicMethod: publicMethod,
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/web/static/](../src/server/web/static/)
|
||||||
|
|
||||||
|
### 2.3 Core Layer (`src/core/`)
|
||||||
|
|
||||||
|
Domain logic for anime series management.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/core/
|
||||||
|
+-- SeriesApp.py # Main application facade
|
||||||
|
+-- SerieScanner.py # Directory scanning, targeted single-series scan
|
||||||
|
+-- entities/ # Domain entities
|
||||||
|
| +-- series.py # Serie class with sanitized_folder property
|
||||||
|
| +-- SerieList.py # SerieList collection with sanitized folder support
|
||||||
|
+-- providers/ # External provider adapters
|
||||||
|
| +-- base_provider.py # Loader interface
|
||||||
|
| +-- provider_factory.py # Provider registry
|
||||||
|
+-- interfaces/ # Abstract interfaces
|
||||||
|
| +-- callbacks.py # Progress callback system
|
||||||
|
+-- exceptions/ # Domain exceptions
|
||||||
|
+-- Exceptions.py # Custom exceptions
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Components:**
|
||||||
|
|
||||||
|
| Component | Purpose |
|
||||||
|
| -------------- | -------------------------------------------------------------------------- |
|
||||||
|
| `SeriesApp` | Main application facade for anime operations |
|
||||||
|
| `SerieScanner` | Scans directories for anime; `scan_single_series()` for targeted scans |
|
||||||
|
| `Serie` | Domain entity with `sanitized_folder` property for filesystem-safe names |
|
||||||
|
| `SerieList` | Collection management with automatic folder creation using sanitized names |
|
||||||
|
|
||||||
|
Source: [src/core/](../src/core/)
|
||||||
|
|
||||||
|
### 2.4 Infrastructure Layer (`src/infrastructure/`)
|
||||||
|
|
||||||
|
Cross-cutting concerns.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/infrastructure/
|
||||||
|
+-- logging/ # Structured logging setup
|
||||||
|
+-- security/ # Security utilities
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.5 Configuration Layer (`src/config/`)
|
||||||
|
|
||||||
|
Application settings management.
|
||||||
|
|
||||||
|
| Component | File | Purpose |
|
||||||
|
| --------- | ---------------------------------------- | ------------------------------- |
|
||||||
|
| Settings | [settings.py](../src/config/settings.py) | Environment-based configuration |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L1-L96)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Graceful Shutdown
|
||||||
|
|
||||||
|
The application implements a comprehensive graceful shutdown mechanism that ensures data integrity and proper cleanup when the server is stopped via Ctrl+C (SIGINT) or SIGTERM.
|
||||||
|
|
||||||
|
### 11.1 Shutdown Sequence
|
||||||
|
|
||||||
|
```
|
||||||
|
1. SIGINT/SIGTERM received
|
||||||
|
+-- Uvicorn catches signal
|
||||||
|
+-- Stops accepting new requests
|
||||||
|
|
||||||
|
2. FastAPI lifespan shutdown triggered
|
||||||
|
+-- 30 second total timeout
|
||||||
|
|
||||||
|
3. WebSocket shutdown (5s timeout)
|
||||||
|
+-- Broadcast {"type": "server_shutdown"} to all clients
|
||||||
|
+-- Close each connection with code 1001 (Going Away)
|
||||||
|
+-- Clear connection tracking data
|
||||||
|
|
||||||
|
4. Download service stop (10s timeout)
|
||||||
|
+-- Set shutdown flag
|
||||||
|
+-- Persist active download as "pending" in database
|
||||||
|
+-- Cancel active download task
|
||||||
|
+-- Shutdown ThreadPoolExecutor with wait
|
||||||
|
|
||||||
|
5. Progress service cleanup
|
||||||
|
+-- Clear event subscribers
|
||||||
|
+-- Clear active progress tracking
|
||||||
|
|
||||||
|
6. Database cleanup (10s timeout)
|
||||||
|
+-- SQLite: Run PRAGMA wal_checkpoint(TRUNCATE)
|
||||||
|
+-- Dispose async engine
|
||||||
|
+-- Dispose sync engine
|
||||||
|
|
||||||
|
7. Process exits cleanly
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L142-L210)
|
||||||
|
|
||||||
|
### 11.2 Key Components
|
||||||
|
|
||||||
|
| Component | File | Shutdown Method |
|
||||||
|
| ------------------- | ------------------------------------------------------------------- | ------------------------------ |
|
||||||
|
| WebSocket Service | [websocket_service.py](../src/server/services/websocket_service.py) | `shutdown(timeout=5.0)` |
|
||||||
|
| Download Service | [download_service.py](../src/server/services/download_service.py) | `stop(timeout=10.0)` |
|
||||||
|
| Database Connection | [connection.py](../src/server/database/connection.py) | `close_db()` |
|
||||||
|
| Uvicorn Config | [run_server.py](../run_server.py) | `timeout_graceful_shutdown=30` |
|
||||||
|
| Stop Script | [stop_server.sh](../stop_server.sh) | SIGTERM with fallback |
|
||||||
|
|
||||||
|
### 11.3 Data Integrity Guarantees
|
||||||
|
|
||||||
|
1. **Active downloads preserved**: In-progress downloads are saved as "pending" and can resume on restart.
|
||||||
|
|
||||||
|
2. **Database WAL flushed**: SQLite WAL checkpoint ensures all writes are in the main database file.
|
||||||
|
|
||||||
|
3. **WebSocket clients notified**: Clients receive shutdown message before connection closes.
|
||||||
|
|
||||||
|
4. **Thread pool cleanup**: Background threads complete or are gracefully cancelled.
|
||||||
|
|
||||||
|
### 11.4 Manual Stop
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Graceful stop via script (sends SIGTERM, waits up to 30s)
|
||||||
|
./stop_server.sh
|
||||||
|
|
||||||
|
# Or press Ctrl+C in terminal running the server
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [stop_server.sh](../stop_server.sh#L1-L80)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Component Interactions
|
||||||
|
|
||||||
|
### 3.1 Request Flow (REST API)
|
||||||
|
|
||||||
|
```
|
||||||
|
1. Client sends HTTP request
|
||||||
|
2. AuthMiddleware validates JWT token (if required)
|
||||||
|
3. Rate limiter checks request frequency
|
||||||
|
4. FastAPI router dispatches to endpoint handler
|
||||||
|
5. Endpoint calls service layer
|
||||||
|
6. Service layer uses core layer or database
|
||||||
|
7. Response returned as JSON
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L1-L209)
|
||||||
|
|
||||||
|
### 3.2 Download Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. POST /api/queue/add
|
||||||
|
+-- DownloadService.add_to_queue()
|
||||||
|
+-- QueueRepository.save_item() -> SQLite
|
||||||
|
|
||||||
|
2. POST /api/queue/start
|
||||||
|
+-- DownloadService.start_queue_processing()
|
||||||
|
+-- Process pending items sequentially
|
||||||
|
+-- ProgressService emits events
|
||||||
|
+-- WebSocketService broadcasts to clients
|
||||||
|
|
||||||
|
3. During download:
|
||||||
|
+-- ProgressService.emit("progress_updated")
|
||||||
|
+-- WebSocketService.broadcast_to_room()
|
||||||
|
+-- Client receives WebSocket message
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/services/download_service.py](../src/server/services/download_service.py#L1-L150)
|
||||||
|
|
||||||
|
### 3.3 WebSocket Event Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. Client connects to /ws/connect
|
||||||
|
2. Server sends "connected" message
|
||||||
|
3. Client joins room: {"action": "join", "data": {"room": "downloads"}}
|
||||||
|
4. ProgressService emits events
|
||||||
|
5. WebSocketService broadcasts to room subscribers
|
||||||
|
6. Client receives real-time updates
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/api/websocket.py](../src/server/api/websocket.py#L1-L260)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Design Patterns
|
||||||
|
|
||||||
|
### 4.1 Repository Pattern
|
||||||
|
|
||||||
|
Database access is abstracted through repository classes.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# QueueRepository provides CRUD for download items
|
||||||
|
class QueueRepository:
|
||||||
|
async def save_item(self, item: DownloadItem) -> None: ...
|
||||||
|
async def get_all_items(self) -> List[DownloadItem]: ...
|
||||||
|
async def delete_item(self, item_id: str) -> bool: ...
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/services/queue_repository.py](../src/server/services/queue_repository.py)
|
||||||
|
|
||||||
|
### 4.2 Dependency Injection
|
||||||
|
|
||||||
|
FastAPI's `Depends()` provides constructor injection.
|
||||||
|
|
||||||
|
```python
|
||||||
|
@router.get("/status")
|
||||||
|
async def get_status(
|
||||||
|
download_service: DownloadService = Depends(get_download_service),
|
||||||
|
):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/utils/dependencies.py](../src/server/utils/dependencies.py)
|
||||||
|
|
||||||
|
### 4.3 Event-Driven Architecture
|
||||||
|
|
||||||
|
Progress updates use an event subscription model.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# ProgressService publishes events
|
||||||
|
progress_service.emit("progress_updated", event)
|
||||||
|
|
||||||
|
# WebSocketService subscribes
|
||||||
|
progress_service.subscribe("progress_updated", ws_handler)
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L98-L108)
|
||||||
|
|
||||||
|
### 4.4 Singleton Pattern
|
||||||
|
|
||||||
|
Services use module-level singletons for shared state.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# In download_service.py
|
||||||
|
_download_service_instance: Optional[DownloadService] = None
|
||||||
|
|
||||||
|
def get_download_service() -> DownloadService:
|
||||||
|
global _download_service_instance
|
||||||
|
if _download_service_instance is None:
|
||||||
|
_download_service_instance = DownloadService(...)
|
||||||
|
return _download_service_instance
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/services/download_service.py](../src/server/services/download_service.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Data Flow
|
||||||
|
|
||||||
|
### 5.1 Series Identifier Convention
|
||||||
|
|
||||||
|
The system uses two identifier fields:
|
||||||
|
|
||||||
|
| Field | Type | Purpose | Example |
|
||||||
|
| -------- | -------- | -------------------------------------- | -------------------------- |
|
||||||
|
| `key` | Primary | Provider-assigned, URL-safe identifier | `"attack-on-titan"` |
|
||||||
|
| `folder` | Metadata | Filesystem folder name (display only) | `"Attack on Titan (2013)"` |
|
||||||
|
|
||||||
|
All API operations use `key`. The `folder` is for filesystem operations only.
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L26-L50)
|
||||||
|
|
||||||
|
### 5.2 Database Schema
|
||||||
|
|
||||||
|
```
|
||||||
|
+----------------+ +----------------+ +--------------------+
|
||||||
|
| anime_series | | episodes | | download_queue_item|
|
||||||
|
+----------------+ +----------------+ +--------------------+
|
||||||
|
| id (PK) |<--+ | id (PK) | +-->| id (PK) |
|
||||||
|
| key (unique) | | | series_id (FK) |---+ | series_id (FK) |
|
||||||
|
| name | +---| season | | status |
|
||||||
|
| site | | episode_number | | priority |
|
||||||
|
| folder | | title | | progress_percent |
|
||||||
|
| created_at | | is_downloaded | | added_at |
|
||||||
|
| updated_at | | file_path | | started_at |
|
||||||
|
+----------------+ +----------------+ +--------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L1-L200)
|
||||||
|
|
||||||
|
### 5.3 Configuration Storage
|
||||||
|
|
||||||
|
Configuration is stored in `data/config.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Aniworld",
|
||||||
|
"data_dir": "data",
|
||||||
|
"scheduler": { "enabled": true, "interval_minutes": 60 },
|
||||||
|
"logging": { "level": "INFO" },
|
||||||
|
"backup": { "enabled": false, "path": "data/backups" },
|
||||||
|
"other": {
|
||||||
|
"master_password_hash": "$pbkdf2-sha256$...",
|
||||||
|
"anime_directory": "/path/to/anime"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [data/config.json](../data/config.json)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Technology Stack
|
||||||
|
|
||||||
|
| Layer | Technology | Version | Purpose |
|
||||||
|
| ------------- | ------------------- | ------- | ---------------------- |
|
||||||
|
| Web Framework | FastAPI | 0.104.1 | REST API, WebSocket |
|
||||||
|
| ASGI Server | Uvicorn | 0.24.0 | HTTP server |
|
||||||
|
| Database | SQLite + SQLAlchemy | 2.0.35 | Persistence |
|
||||||
|
| Auth | python-jose | 3.3.0 | JWT tokens |
|
||||||
|
| Password | passlib | 1.7.4 | bcrypt hashing |
|
||||||
|
| Validation | Pydantic | 2.5.0 | Data models |
|
||||||
|
| Templates | Jinja2 | 3.1.2 | HTML rendering |
|
||||||
|
| Logging | structlog | 24.1.0 | Structured logging |
|
||||||
|
| Testing | pytest | 7.4.3 | Unit/integration tests |
|
||||||
|
|
||||||
|
Source: [requirements.txt](../requirements.txt)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Scalability Considerations
|
||||||
|
|
||||||
|
### Current Limitations
|
||||||
|
|
||||||
|
1. **Single-process deployment**: In-memory rate limiting and session state are not shared across processes.
|
||||||
|
|
||||||
|
2. **SQLite database**: Not suitable for high concurrency. Consider PostgreSQL for production.
|
||||||
|
|
||||||
|
3. **Sequential downloads**: Only one download processes at a time by design.
|
||||||
|
|
||||||
|
### Recommended Improvements for Scale
|
||||||
|
|
||||||
|
| Concern | Current | Recommended |
|
||||||
|
| -------------- | --------------- | ----------------- |
|
||||||
|
| Rate limiting | In-memory dict | Redis |
|
||||||
|
| Session store | In-memory | Redis or database |
|
||||||
|
| Database | SQLite | PostgreSQL |
|
||||||
|
| Task queue | In-memory deque | Celery + Redis |
|
||||||
|
| Load balancing | None | Nginx/HAProxy |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Integration Points
|
||||||
|
|
||||||
|
### 8.1 External Providers
|
||||||
|
|
||||||
|
The system integrates with anime streaming providers via the Loader interface.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class Loader(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def search(self, query: str) -> List[Serie]: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_episodes(self, serie: Serie) -> Dict[int, List[int]]: ...
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/core/providers/base_provider.py](../src/core/providers/base_provider.py)
|
||||||
|
|
||||||
|
### 8.2 Filesystem Integration
|
||||||
|
|
||||||
|
The scanner reads anime directories to detect downloaded episodes.
|
||||||
|
|
||||||
|
```python
|
||||||
|
SerieScanner(
|
||||||
|
basePath="/path/to/anime", # Anime library directory
|
||||||
|
loader=provider, # Provider for metadata
|
||||||
|
db_session=session # Optional database
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/core/SerieScanner.py](../src/core/SerieScanner.py#L59-L96)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Security Architecture
|
||||||
|
|
||||||
|
### 9.1 Authentication Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. User sets master password via POST /api/auth/setup
|
||||||
|
2. Password hashed with pbkdf2_sha256 (via passlib)
|
||||||
|
3. Hash stored in config.json
|
||||||
|
4. Login validates password, returns JWT token
|
||||||
|
5. JWT contains: session_id, user, created_at, expires_at
|
||||||
|
6. Subsequent requests include: Authorization: Bearer <token>
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L1-L150)
|
||||||
|
|
||||||
|
### 9.2 Password Requirements
|
||||||
|
|
||||||
|
- Minimum 8 characters
|
||||||
|
- Mixed case (upper and lower)
|
||||||
|
- At least one number
|
||||||
|
- At least one special character
|
||||||
|
|
||||||
|
Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L97-L125)
|
||||||
|
|
||||||
|
### 9.3 Rate Limiting
|
||||||
|
|
||||||
|
| Endpoint | Limit | Window |
|
||||||
|
| ----------------- | ----------- | ---------- |
|
||||||
|
| `/api/auth/login` | 5 requests | 60 seconds |
|
||||||
|
| `/api/auth/setup` | 5 requests | 60 seconds |
|
||||||
|
| All origins | 60 requests | 60 seconds |
|
||||||
|
|
||||||
|
Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L54-L68)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Deployment Modes
|
||||||
|
|
||||||
|
### 10.1 Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run with hot reload
|
||||||
|
python -m uvicorn src.server.fastapi_app:app --reload
|
||||||
|
```
|
||||||
|
|
||||||
|
### 10.2 Production
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Via conda environment
|
||||||
|
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app \
|
||||||
|
--host 127.0.0.1 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
### 10.3 Configuration
|
||||||
|
|
||||||
|
Environment variables (via `.env` or shell):
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|
| ----------------- | ------------------------------ | ---------------------- |
|
||||||
|
| `JWT_SECRET_KEY` | Random | Secret for JWT signing |
|
||||||
|
| `DATABASE_URL` | `sqlite:///./data/aniworld.db` | Database connection |
|
||||||
|
| `ANIME_DIRECTORY` | (empty) | Path to anime library |
|
||||||
|
| `LOG_LEVEL` | `INFO` | Logging level |
|
||||||
|
| `CORS_ORIGINS` | `localhost:3000,8000` | Allowed CORS origins |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L1-L96)
|
||||||
105
docs/CHANGELOG.md
Normal file
105
docs/CHANGELOG.md
Normal file
@ -0,0 +1,105 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document tracks all notable changes to the Aniworld project.
|
||||||
|
|
||||||
|
### What This Document Contains
|
||||||
|
|
||||||
|
- **Version History**: All released versions with dates
|
||||||
|
- **Added Features**: New functionality in each release
|
||||||
|
- **Changed Features**: Modifications to existing features
|
||||||
|
- **Deprecated Features**: Features marked for removal
|
||||||
|
- **Removed Features**: Features removed from the codebase
|
||||||
|
- **Fixed Bugs**: Bug fixes with issue references
|
||||||
|
- **Security Fixes**: Security-related changes
|
||||||
|
- **Breaking Changes**: Changes requiring user action
|
||||||
|
|
||||||
|
### What This Document Does NOT Contain
|
||||||
|
|
||||||
|
- Internal refactoring details (unless user-facing)
|
||||||
|
- Commit-level changes
|
||||||
|
- Work-in-progress features
|
||||||
|
- Roadmap or planned features
|
||||||
|
|
||||||
|
### Target Audience
|
||||||
|
|
||||||
|
- All users and stakeholders
|
||||||
|
- Operators planning upgrades
|
||||||
|
- Developers tracking changes
|
||||||
|
- Support personnel
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Format
|
||||||
|
|
||||||
|
This changelog follows [Keep a Changelog](https://keepachangelog.com/) principles and adheres to [Semantic Versioning](https://semver.org/).
|
||||||
|
|
||||||
|
## Sections for Each Release
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## [Version] - YYYY-MM-DD
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- New features
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Changes to existing functionality
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
|
||||||
|
- Features that will be removed in future versions
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- Features removed in this release
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Bug fixes
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Security-related fixes
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Unreleased
|
||||||
|
|
||||||
|
_Changes that are in development but not yet released._
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Enhanced Anime Add Flow**: Automatic database persistence, targeted episode scanning, and folder creation with sanitized names
|
||||||
|
- Filesystem utility module (`src/server/utils/filesystem.py`) with `sanitize_folder_name()`, `is_safe_path()`, and `create_safe_folder()` functions
|
||||||
|
- `Serie.sanitized_folder` property for generating filesystem-safe folder names from display names
|
||||||
|
- `SerieScanner.scan_single_series()` method for targeted scanning of individual anime without full library rescan
|
||||||
|
- Add series API response now includes `missing_episodes` list and `total_missing` count
|
||||||
|
- Database transaction support with `@transactional` decorator and `atomic()` context manager
|
||||||
|
- Transaction propagation modes (REQUIRED, REQUIRES_NEW, NESTED) for fine-grained control
|
||||||
|
- Savepoint support for nested transactions with partial rollback capability
|
||||||
|
- `TransactionManager` helper class for manual transaction control
|
||||||
|
- Bulk operations: `bulk_mark_downloaded`, `bulk_delete`, `clear_all` for batch processing
|
||||||
|
- `rotate_session` atomic operation for secure session rotation
|
||||||
|
- Transaction utilities: `is_session_in_transaction`, `get_session_transaction_depth`
|
||||||
|
- `get_transactional_session` for sessions without auto-commit
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- `QueueRepository.save_item()` now uses atomic transactions for data consistency
|
||||||
|
- `QueueRepository.clear_all()` now uses atomic transactions for all-or-nothing behavior
|
||||||
|
- Service layer documentation updated to reflect transaction-aware design
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Scan status indicator now correctly shows running state after page reload during active scan
|
||||||
|
- Improved reliability of process status updates in the UI header
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Version History
|
||||||
|
|
||||||
|
_To be documented as versions are released._
|
||||||
298
docs/CONFIGURATION.md
Normal file
298
docs/CONFIGURATION.md
Normal file
@ -0,0 +1,298 @@
|
|||||||
|
# Configuration Reference
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document provides a comprehensive reference for all configuration options in the Aniworld application.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Configuration Overview
|
||||||
|
|
||||||
|
### Configuration Sources
|
||||||
|
|
||||||
|
Aniworld uses a layered configuration system:
|
||||||
|
|
||||||
|
1. **Environment Variables** (highest priority)
|
||||||
|
2. **`.env` file** in project root
|
||||||
|
3. **`data/config.json`** file
|
||||||
|
4. **Default values** (lowest priority)
|
||||||
|
|
||||||
|
### Loading Mechanism
|
||||||
|
|
||||||
|
Configuration is loaded at application startup via Pydantic Settings.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/config/settings.py
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L1-L96)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Environment Variables
|
||||||
|
|
||||||
|
### Authentication Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| ----------------------- | ------ | ---------------- | ------------------------------------------------------------------- |
|
||||||
|
| `JWT_SECRET_KEY` | string | (random) | Secret key for JWT token signing. Auto-generated if not set. |
|
||||||
|
| `PASSWORD_SALT` | string | `"default-salt"` | Salt for password hashing. |
|
||||||
|
| `MASTER_PASSWORD_HASH` | string | (none) | Pre-hashed master password. Loaded from config.json if not set. |
|
||||||
|
| `MASTER_PASSWORD` | string | (none) | **DEVELOPMENT ONLY** - Plaintext password. Never use in production. |
|
||||||
|
| `SESSION_TIMEOUT_HOURS` | int | `24` | JWT token expiry time in hours. |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L13-L42)
|
||||||
|
|
||||||
|
### Server Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| ----------------- | ------ | -------------------------------- | --------------------------------------------------------------------- |
|
||||||
|
| `ANIME_DIRECTORY` | string | `""` | Path to anime library directory. |
|
||||||
|
| `LOG_LEVEL` | string | `"INFO"` | Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL. |
|
||||||
|
| `DATABASE_URL` | string | `"sqlite:///./data/aniworld.db"` | Database connection string. |
|
||||||
|
| `CORS_ORIGINS` | string | `"http://localhost:3000"` | Comma-separated allowed CORS origins. Use `*` for localhost defaults. |
|
||||||
|
| `API_RATE_LIMIT` | int | `100` | Maximum API requests per minute. |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L43-L68)
|
||||||
|
|
||||||
|
### Provider Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| ------------------ | ------ | --------------- | --------------------------------------------- |
|
||||||
|
| `DEFAULT_PROVIDER` | string | `"aniworld.to"` | Default anime provider. |
|
||||||
|
| `PROVIDER_TIMEOUT` | int | `30` | HTTP timeout for provider requests (seconds). |
|
||||||
|
| `RETRY_ATTEMPTS` | int | `3` | Number of retry attempts for failed requests. |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L69-L79)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Configuration File (config.json)
|
||||||
|
|
||||||
|
Location: `data/config.json`
|
||||||
|
|
||||||
|
### File Structure
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Aniworld",
|
||||||
|
"data_dir": "data",
|
||||||
|
"scheduler": {
|
||||||
|
"enabled": true,
|
||||||
|
"interval_minutes": 60
|
||||||
|
},
|
||||||
|
"logging": {
|
||||||
|
"level": "INFO",
|
||||||
|
"file": null,
|
||||||
|
"max_bytes": null,
|
||||||
|
"backup_count": 3
|
||||||
|
},
|
||||||
|
"backup": {
|
||||||
|
"enabled": false,
|
||||||
|
"path": "data/backups",
|
||||||
|
"keep_days": 30
|
||||||
|
},
|
||||||
|
"other": {
|
||||||
|
"master_password_hash": "$pbkdf2-sha256$...",
|
||||||
|
"anime_directory": "/path/to/anime"
|
||||||
|
},
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [data/config.json](../data/config.json)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Configuration Sections
|
||||||
|
|
||||||
|
### 4.1 General Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ---------- | ------ | ------------ | ------------------------------ |
|
||||||
|
| `name` | string | `"Aniworld"` | Application name. |
|
||||||
|
| `data_dir` | string | `"data"` | Base directory for data files. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L62-L66)
|
||||||
|
|
||||||
|
### 4.2 Scheduler Settings
|
||||||
|
|
||||||
|
Controls automatic library rescanning.
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ---------------------------- | ---- | ------- | -------------------------------------------- |
|
||||||
|
| `scheduler.enabled` | bool | `true` | Enable/disable automatic scans. |
|
||||||
|
| `scheduler.interval_minutes` | int | `60` | Minutes between automatic scans. Minimum: 1. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L5-L12)
|
||||||
|
|
||||||
|
### 4.3 Logging Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ---------------------- | ------ | -------- | ------------------------------------------------- |
|
||||||
|
| `logging.level` | string | `"INFO"` | Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL. |
|
||||||
|
| `logging.file` | string | `null` | Optional log file path. |
|
||||||
|
| `logging.max_bytes` | int | `null` | Maximum log file size for rotation. |
|
||||||
|
| `logging.backup_count` | int | `3` | Number of rotated log files to keep. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L27-L46)
|
||||||
|
|
||||||
|
### 4.4 Backup Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ------------------ | ------ | ---------------- | -------------------------------- |
|
||||||
|
| `backup.enabled` | bool | `false` | Enable automatic config backups. |
|
||||||
|
| `backup.path` | string | `"data/backups"` | Directory for backup files. |
|
||||||
|
| `backup.keep_days` | int | `30` | Days to retain backups. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L15-L24)
|
||||||
|
|
||||||
|
### 4.5 Other Settings (Dynamic)
|
||||||
|
|
||||||
|
The `other` field stores arbitrary settings.
|
||||||
|
|
||||||
|
| Key | Type | Description |
|
||||||
|
| ---------------------- | ------ | --------------------------------------- |
|
||||||
|
| `master_password_hash` | string | Hashed master password (pbkdf2-sha256). |
|
||||||
|
| `anime_directory` | string | Path to anime library. |
|
||||||
|
| `advanced` | object | Advanced configuration options. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Configuration Precedence
|
||||||
|
|
||||||
|
Settings are resolved in this order (first match wins):
|
||||||
|
|
||||||
|
1. Environment variable (e.g., `ANIME_DIRECTORY`)
|
||||||
|
2. `.env` file in project root
|
||||||
|
3. `data/config.json` (for dynamic settings)
|
||||||
|
4. Code defaults in `Settings` class
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Validation Rules
|
||||||
|
|
||||||
|
### Password Requirements
|
||||||
|
|
||||||
|
Master password must meet all criteria:
|
||||||
|
|
||||||
|
- Minimum 8 characters
|
||||||
|
- At least one uppercase letter
|
||||||
|
- At least one lowercase letter
|
||||||
|
- At least one digit
|
||||||
|
- At least one special character
|
||||||
|
|
||||||
|
Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L97-L125)
|
||||||
|
|
||||||
|
### Logging Level Validation
|
||||||
|
|
||||||
|
Must be one of: `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L43-L47)
|
||||||
|
|
||||||
|
### Backup Path Validation
|
||||||
|
|
||||||
|
If `backup.enabled` is `true`, `backup.path` must be set.
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L87-L91)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Example Configurations
|
||||||
|
|
||||||
|
### Minimal Development Setup
|
||||||
|
|
||||||
|
**.env file:**
|
||||||
|
|
||||||
|
```
|
||||||
|
LOG_LEVEL=DEBUG
|
||||||
|
ANIME_DIRECTORY=/home/user/anime
|
||||||
|
```
|
||||||
|
|
||||||
|
### Production Setup
|
||||||
|
|
||||||
|
**.env file:**
|
||||||
|
|
||||||
|
```
|
||||||
|
JWT_SECRET_KEY=your-secure-random-key-here
|
||||||
|
DATABASE_URL=postgresql+asyncpg://user:pass@localhost/aniworld
|
||||||
|
LOG_LEVEL=WARNING
|
||||||
|
CORS_ORIGINS=https://your-domain.com
|
||||||
|
API_RATE_LIMIT=60
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Setup
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# docker-compose.yml
|
||||||
|
environment:
|
||||||
|
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
|
||||||
|
- DATABASE_URL=sqlite:///./data/aniworld.db
|
||||||
|
- ANIME_DIRECTORY=/media/anime
|
||||||
|
- LOG_LEVEL=INFO
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
|
- /media/anime:/media/anime:ro
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Configuration Backup Management
|
||||||
|
|
||||||
|
### Automatic Backups
|
||||||
|
|
||||||
|
Backups are created automatically before config changes when `backup.enabled` is `true`.
|
||||||
|
|
||||||
|
Location: `data/config_backups/`
|
||||||
|
|
||||||
|
Naming: `config_backup_YYYYMMDD_HHMMSS.json`
|
||||||
|
|
||||||
|
### Manual Backup via API
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create backup
|
||||||
|
curl -X POST http://localhost:8000/api/config/backups \
|
||||||
|
-H "Authorization: Bearer $TOKEN"
|
||||||
|
|
||||||
|
# List backups
|
||||||
|
curl http://localhost:8000/api/config/backups \
|
||||||
|
-H "Authorization: Bearer $TOKEN"
|
||||||
|
|
||||||
|
# Restore backup
|
||||||
|
curl -X POST http://localhost:8000/api/config/backups/config_backup_20251213.json/restore \
|
||||||
|
-H "Authorization: Bearer $TOKEN"
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/api/config.py](../src/server/api/config.py#L67-L142)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Troubleshooting
|
||||||
|
|
||||||
|
### Configuration Not Loading
|
||||||
|
|
||||||
|
1. Check file permissions on `data/config.json`
|
||||||
|
2. Verify JSON syntax with a validator
|
||||||
|
3. Check logs for Pydantic validation errors
|
||||||
|
|
||||||
|
### Environment Variable Not Working
|
||||||
|
|
||||||
|
1. Ensure variable name matches exactly (case-sensitive)
|
||||||
|
2. Check `.env` file location (project root)
|
||||||
|
3. Restart application after changes
|
||||||
|
|
||||||
|
### Master Password Issues
|
||||||
|
|
||||||
|
1. Password hash is stored in `config.json` under `other.master_password_hash`
|
||||||
|
2. Delete this field to reset (requires re-setup)
|
||||||
|
3. Check hash format starts with `$pbkdf2-sha256$`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Related Documentation
|
||||||
|
|
||||||
|
- [API.md](API.md) - Configuration API endpoints
|
||||||
|
- [DEVELOPMENT.md](DEVELOPMENT.md) - Development environment setup
|
||||||
|
- [ARCHITECTURE.md](ARCHITECTURE.md) - Configuration service architecture
|
||||||
421
docs/DATABASE.md
Normal file
421
docs/DATABASE.md
Normal file
@ -0,0 +1,421 @@
|
|||||||
|
# Database Documentation
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document describes the database schema, models, and data layer of the Aniworld application.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Database Overview
|
||||||
|
|
||||||
|
### Technology
|
||||||
|
|
||||||
|
- **Database Engine**: SQLite 3 (default), PostgreSQL supported
|
||||||
|
- **ORM**: SQLAlchemy 2.0 with async support (aiosqlite)
|
||||||
|
- **Location**: `data/aniworld.db` (configurable via `DATABASE_URL`)
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L53-L55)
|
||||||
|
|
||||||
|
### Connection Configuration
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Default connection string
|
||||||
|
DATABASE_URL = "sqlite+aiosqlite:///./data/aniworld.db"
|
||||||
|
|
||||||
|
# PostgreSQL alternative
|
||||||
|
DATABASE_URL = "postgresql+asyncpg://user:pass@localhost/aniworld"
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/connection.py](../src/server/database/connection.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Entity Relationship Diagram
|
||||||
|
|
||||||
|
```
|
||||||
|
+-------------------+ +-------------------+ +------------------------+
|
||||||
|
| anime_series | | episodes | | download_queue_item |
|
||||||
|
+-------------------+ +-------------------+ +------------------------+
|
||||||
|
| id (PK) |<--+ | id (PK) | +-->| id (PK, VARCHAR) |
|
||||||
|
| key (UNIQUE) | | | series_id (FK)----+---+ | series_id (FK)---------+
|
||||||
|
| name | +---| | | status |
|
||||||
|
| site | | season | | priority |
|
||||||
|
| folder | | episode_number | | season |
|
||||||
|
| created_at | | title | | episode |
|
||||||
|
| updated_at | | file_path | | progress_percent |
|
||||||
|
+-------------------+ | is_downloaded | | error_message |
|
||||||
|
| created_at | | retry_count |
|
||||||
|
| updated_at | | added_at |
|
||||||
|
+-------------------+ | started_at |
|
||||||
|
| completed_at |
|
||||||
|
| created_at |
|
||||||
|
| updated_at |
|
||||||
|
+------------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Table Schemas
|
||||||
|
|
||||||
|
### 3.1 anime_series
|
||||||
|
|
||||||
|
Stores anime series metadata.
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ------------ | ------------- | -------------------------- | ------------------------------------------------------- |
|
||||||
|
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID |
|
||||||
|
| `key` | VARCHAR(255) | UNIQUE, NOT NULL, INDEX | **Primary identifier** - provider-assigned URL-safe key |
|
||||||
|
| `name` | VARCHAR(500) | NOT NULL, INDEX | Display name of the series |
|
||||||
|
| `site` | VARCHAR(500) | NOT NULL | Provider site URL |
|
||||||
|
| `folder` | VARCHAR(1000) | NOT NULL | Filesystem folder name (metadata only) |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Identifier Convention:**
|
||||||
|
|
||||||
|
- `key` is the **primary identifier** for all operations (e.g., `"attack-on-titan"`)
|
||||||
|
- `folder` is **metadata only** for filesystem operations (e.g., `"Attack on Titan (2013)"`)
|
||||||
|
- `id` is used only for database relationships
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L23-L87)
|
||||||
|
|
||||||
|
### 3.2 episodes
|
||||||
|
|
||||||
|
Stores individual episode information.
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ---------------- | ------------- | ---------------------------- | ----------------------------- |
|
||||||
|
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID |
|
||||||
|
| `series_id` | INTEGER | FOREIGN KEY, NOT NULL, INDEX | Reference to anime_series.id |
|
||||||
|
| `season` | INTEGER | NOT NULL | Season number (1-based) |
|
||||||
|
| `episode_number` | INTEGER | NOT NULL | Episode number within season |
|
||||||
|
| `title` | VARCHAR(500) | NULLABLE | Episode title if known |
|
||||||
|
| `file_path` | VARCHAR(1000) | NULLABLE | Local file path if downloaded |
|
||||||
|
| `is_downloaded` | BOOLEAN | NOT NULL, DEFAULT FALSE | Download status flag |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Foreign Key:**
|
||||||
|
|
||||||
|
- `series_id` -> `anime_series.id` (ON DELETE CASCADE)
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L122-L181)
|
||||||
|
|
||||||
|
### 3.3 download_queue_item
|
||||||
|
|
||||||
|
Stores download queue items with status tracking.
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ------------------ | ------------- | --------------------------- | ------------------------------ |
|
||||||
|
| `id` | VARCHAR(36) | PRIMARY KEY | UUID identifier |
|
||||||
|
| `series_id` | INTEGER | FOREIGN KEY, NOT NULL | Reference to anime_series.id |
|
||||||
|
| `season` | INTEGER | NOT NULL | Season number |
|
||||||
|
| `episode` | INTEGER | NOT NULL | Episode number |
|
||||||
|
| `status` | VARCHAR(20) | NOT NULL, DEFAULT 'pending' | Download status |
|
||||||
|
| `priority` | VARCHAR(10) | NOT NULL, DEFAULT 'NORMAL' | Queue priority |
|
||||||
|
| `progress_percent` | FLOAT | NULLABLE | Download progress (0-100) |
|
||||||
|
| `error_message` | TEXT | NULLABLE | Error description if failed |
|
||||||
|
| `retry_count` | INTEGER | NOT NULL, DEFAULT 0 | Number of retry attempts |
|
||||||
|
| `source_url` | VARCHAR(2000) | NULLABLE | Download source URL |
|
||||||
|
| `added_at` | DATETIME | NOT NULL, DEFAULT NOW | When added to queue |
|
||||||
|
| `started_at` | DATETIME | NULLABLE | When download started |
|
||||||
|
| `completed_at` | DATETIME | NULLABLE | When download completed/failed |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Status Values:** `pending`, `downloading`, `paused`, `completed`, `failed`, `cancelled`
|
||||||
|
|
||||||
|
**Priority Values:** `LOW`, `NORMAL`, `HIGH`
|
||||||
|
|
||||||
|
**Foreign Key:**
|
||||||
|
|
||||||
|
- `series_id` -> `anime_series.id` (ON DELETE CASCADE)
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L200-L300)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Indexes
|
||||||
|
|
||||||
|
| Table | Index Name | Columns | Purpose |
|
||||||
|
| --------------------- | ----------------------- | ----------- | --------------------------------- |
|
||||||
|
| `anime_series` | `ix_anime_series_key` | `key` | Fast lookup by primary identifier |
|
||||||
|
| `anime_series` | `ix_anime_series_name` | `name` | Search by name |
|
||||||
|
| `episodes` | `ix_episodes_series_id` | `series_id` | Join with series |
|
||||||
|
| `download_queue_item` | `ix_download_series_id` | `series_id` | Filter by series |
|
||||||
|
| `download_queue_item` | `ix_download_status` | `status` | Filter by status |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Model Layer
|
||||||
|
|
||||||
|
### 5.1 SQLAlchemy ORM Models
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/server/database/models.py
|
||||||
|
|
||||||
|
class AnimeSeries(Base, TimestampMixin):
|
||||||
|
__tablename__ = "anime_series"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
key: Mapped[str] = mapped_column(String(255), unique=True, index=True)
|
||||||
|
name: Mapped[str] = mapped_column(String(500), index=True)
|
||||||
|
site: Mapped[str] = mapped_column(String(500))
|
||||||
|
folder: Mapped[str] = mapped_column(String(1000))
|
||||||
|
|
||||||
|
episodes: Mapped[List["Episode"]] = relationship(
|
||||||
|
"Episode", back_populates="series", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L23-L87)
|
||||||
|
|
||||||
|
### 5.2 Pydantic API Models
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/server/models/download.py
|
||||||
|
|
||||||
|
class DownloadItem(BaseModel):
|
||||||
|
id: str
|
||||||
|
serie_id: str # Maps to anime_series.key
|
||||||
|
serie_folder: str # Metadata only
|
||||||
|
serie_name: str
|
||||||
|
episode: EpisodeIdentifier
|
||||||
|
status: DownloadStatus
|
||||||
|
priority: DownloadPriority
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/models/download.py](../src/server/models/download.py#L63-L118)
|
||||||
|
|
||||||
|
### 5.3 Model Mapping
|
||||||
|
|
||||||
|
| API Field | Database Column | Notes |
|
||||||
|
| -------------- | --------------------- | ------------------ |
|
||||||
|
| `serie_id` | `anime_series.key` | Primary identifier |
|
||||||
|
| `serie_folder` | `anime_series.folder` | Metadata only |
|
||||||
|
| `serie_name` | `anime_series.name` | Display name |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Transaction Support
|
||||||
|
|
||||||
|
### 6.1 Overview
|
||||||
|
|
||||||
|
The database layer provides comprehensive transaction support to ensure data consistency across compound operations. All write operations can be wrapped in explicit transactions.
|
||||||
|
|
||||||
|
Source: [src/server/database/transaction.py](../src/server/database/transaction.py)
|
||||||
|
|
||||||
|
### 6.2 Transaction Utilities
|
||||||
|
|
||||||
|
| Component | Type | Description |
|
||||||
|
| ------------------------- | ----------------- | ---------------------------------------- |
|
||||||
|
| `@transactional` | Decorator | Wraps function in transaction boundary |
|
||||||
|
| `atomic()` | Async context mgr | Provides atomic operation block |
|
||||||
|
| `atomic_sync()` | Sync context mgr | Sync version of atomic() |
|
||||||
|
| `TransactionContext` | Class | Explicit sync transaction control |
|
||||||
|
| `AsyncTransactionContext` | Class | Explicit async transaction control |
|
||||||
|
| `TransactionManager` | Class | Helper for manual transaction management |
|
||||||
|
|
||||||
|
### 6.3 Transaction Propagation Modes
|
||||||
|
|
||||||
|
| Mode | Behavior |
|
||||||
|
| -------------- | ------------------------------------------------ |
|
||||||
|
| `REQUIRED` | Use existing transaction or create new (default) |
|
||||||
|
| `REQUIRES_NEW` | Always create new transaction |
|
||||||
|
| `NESTED` | Create savepoint within existing transaction |
|
||||||
|
|
||||||
|
### 6.4 Usage Examples
|
||||||
|
|
||||||
|
**Using @transactional decorator:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
from src.server.database.transaction import transactional
|
||||||
|
|
||||||
|
@transactional()
|
||||||
|
async def compound_operation(db: AsyncSession, data: dict):
|
||||||
|
# All operations commit together or rollback on error
|
||||||
|
series = await AnimeSeriesService.create(db, ...)
|
||||||
|
episode = await EpisodeService.create(db, series_id=series.id, ...)
|
||||||
|
return series, episode
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using atomic() context manager:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
from src.server.database.transaction import atomic
|
||||||
|
|
||||||
|
async def some_function(db: AsyncSession):
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
await operation1(db)
|
||||||
|
await operation2(db)
|
||||||
|
# Auto-commits on success, rolls back on exception
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using savepoints for partial rollback:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
await outer_operation(db)
|
||||||
|
|
||||||
|
async with tx.savepoint() as sp:
|
||||||
|
await risky_operation(db)
|
||||||
|
if error_condition:
|
||||||
|
await sp.rollback() # Only rollback nested ops
|
||||||
|
|
||||||
|
await final_operation(db) # Still executes
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/transaction.py](../src/server/database/transaction.py)
|
||||||
|
|
||||||
|
### 6.5 Connection Module Additions
|
||||||
|
|
||||||
|
| Function | Description |
|
||||||
|
| ------------------------------- | -------------------------------------------- |
|
||||||
|
| `get_transactional_session` | Session without auto-commit for transactions |
|
||||||
|
| `TransactionManager` | Helper class for manual transaction control |
|
||||||
|
| `is_session_in_transaction` | Check if session is in active transaction |
|
||||||
|
| `get_session_transaction_depth` | Get nesting depth of transactions |
|
||||||
|
|
||||||
|
Source: [src/server/database/connection.py](../src/server/database/connection.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Repository Pattern
|
||||||
|
|
||||||
|
The `QueueRepository` class provides data access abstraction.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class QueueRepository:
|
||||||
|
async def save_item(self, item: DownloadItem) -> None:
|
||||||
|
"""Save or update a download item (atomic operation)."""
|
||||||
|
|
||||||
|
async def get_all_items(self) -> List[DownloadItem]:
|
||||||
|
"""Get all items from database."""
|
||||||
|
|
||||||
|
async def delete_item(self, item_id: str) -> bool:
|
||||||
|
"""Delete item by ID."""
|
||||||
|
|
||||||
|
async def clear_all(self) -> int:
|
||||||
|
"""Clear all items (atomic operation)."""
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: Compound operations (`save_item`, `clear_all`) are wrapped in `atomic()` transactions.
|
||||||
|
|
||||||
|
Source: [src/server/services/queue_repository.py](../src/server/services/queue_repository.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Database Service
|
||||||
|
|
||||||
|
The `AnimeSeriesService` provides async CRUD operations.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class AnimeSeriesService:
|
||||||
|
@staticmethod
|
||||||
|
async def create(
|
||||||
|
db: AsyncSession,
|
||||||
|
key: str,
|
||||||
|
name: str,
|
||||||
|
site: str,
|
||||||
|
folder: str
|
||||||
|
) -> AnimeSeries:
|
||||||
|
"""Create a new anime series."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_by_key(
|
||||||
|
db: AsyncSession,
|
||||||
|
key: str
|
||||||
|
) -> Optional[AnimeSeries]:
|
||||||
|
"""Get series by primary key identifier."""
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bulk Operations
|
||||||
|
|
||||||
|
Services provide bulk operations for transaction-safe batch processing:
|
||||||
|
|
||||||
|
| Service | Method | Description |
|
||||||
|
| ---------------------- | ---------------------- | ------------------------------ |
|
||||||
|
| `EpisodeService` | `bulk_mark_downloaded` | Mark multiple episodes at once |
|
||||||
|
| `DownloadQueueService` | `bulk_delete` | Delete multiple queue items |
|
||||||
|
| `DownloadQueueService` | `clear_all` | Clear entire queue |
|
||||||
|
| `UserSessionService` | `rotate_session` | Revoke old + create new atomic |
|
||||||
|
| `UserSessionService` | `cleanup_expired` | Bulk delete expired sessions |
|
||||||
|
|
||||||
|
Source: [src/server/database/service.py](../src/server/database/service.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Data Integrity Rules
|
||||||
|
|
||||||
|
### Validation Constraints
|
||||||
|
|
||||||
|
| Field | Rule | Error Message |
|
||||||
|
| ------------------------- | ------------------------ | ------------------------------------- |
|
||||||
|
| `anime_series.key` | Non-empty, max 255 chars | "Series key cannot be empty" |
|
||||||
|
| `anime_series.name` | Non-empty, max 500 chars | "Series name cannot be empty" |
|
||||||
|
| `episodes.season` | 0-1000 | "Season number must be non-negative" |
|
||||||
|
| `episodes.episode_number` | 0-10000 | "Episode number must be non-negative" |
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L89-L119)
|
||||||
|
|
||||||
|
### Cascade Rules
|
||||||
|
|
||||||
|
- Deleting `anime_series` deletes all related `episodes` and `download_queue_item`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Migration Strategy
|
||||||
|
|
||||||
|
Currently, SQLAlchemy's `create_all()` is used for schema creation.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/server/database/connection.py
|
||||||
|
async def init_db():
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
```
|
||||||
|
|
||||||
|
For production migrations, Alembic is recommended but not yet implemented.
|
||||||
|
|
||||||
|
Source: [src/server/database/connection.py](../src/server/database/connection.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Common Query Patterns
|
||||||
|
|
||||||
|
### Get all series with missing episodes
|
||||||
|
|
||||||
|
```python
|
||||||
|
series = await db.execute(
|
||||||
|
select(AnimeSeries).options(selectinload(AnimeSeries.episodes))
|
||||||
|
)
|
||||||
|
for serie in series.scalars():
|
||||||
|
downloaded = [e for e in serie.episodes if e.is_downloaded]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get pending downloads ordered by priority
|
||||||
|
|
||||||
|
```python
|
||||||
|
items = await db.execute(
|
||||||
|
select(DownloadQueueItem)
|
||||||
|
.where(DownloadQueueItem.status == "pending")
|
||||||
|
.order_by(
|
||||||
|
case(
|
||||||
|
(DownloadQueueItem.priority == "HIGH", 1),
|
||||||
|
(DownloadQueueItem.priority == "NORMAL", 2),
|
||||||
|
(DownloadQueueItem.priority == "LOW", 3),
|
||||||
|
),
|
||||||
|
DownloadQueueItem.added_at
|
||||||
|
)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Database Location
|
||||||
|
|
||||||
|
| Environment | Default Location |
|
||||||
|
| ----------- | ------------------------------------------------- |
|
||||||
|
| Development | `./data/aniworld.db` |
|
||||||
|
| Production | Via `DATABASE_URL` environment variable |
|
||||||
|
| Testing | In-memory SQLite (`sqlite+aiosqlite:///:memory:`) |
|
||||||
64
docs/DEVELOPMENT.md
Normal file
64
docs/DEVELOPMENT.md
Normal file
@ -0,0 +1,64 @@
|
|||||||
|
# Development Guide
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document provides guidance for developers working on the Aniworld project.
|
||||||
|
|
||||||
|
### What This Document Contains
|
||||||
|
|
||||||
|
- **Prerequisites**: Required software and tools
|
||||||
|
- **Environment Setup**: Step-by-step local development setup
|
||||||
|
- **Project Structure**: Source code organization explanation
|
||||||
|
- **Development Workflow**: Branch strategy, commit conventions
|
||||||
|
- **Coding Standards**: Style guide, linting, formatting
|
||||||
|
- **Running the Application**: Development server, CLI usage
|
||||||
|
- **Debugging Tips**: Common debugging approaches
|
||||||
|
- **IDE Configuration**: VS Code settings, recommended extensions
|
||||||
|
- **Contributing Guidelines**: How to submit changes
|
||||||
|
- **Code Review Process**: Review checklist and expectations
|
||||||
|
|
||||||
|
### What This Document Does NOT Contain
|
||||||
|
|
||||||
|
- Production deployment (see [DEPLOYMENT.md](DEPLOYMENT.md))
|
||||||
|
- API reference (see [API.md](API.md))
|
||||||
|
- Architecture decisions (see [ARCHITECTURE.md](ARCHITECTURE.md))
|
||||||
|
- Test writing guides (see [TESTING.md](TESTING.md))
|
||||||
|
- Security guidelines (see [SECURITY.md](SECURITY.md))
|
||||||
|
|
||||||
|
### Target Audience
|
||||||
|
|
||||||
|
- New Developers joining the project
|
||||||
|
- Contributors (internal and external)
|
||||||
|
- Anyone setting up a development environment
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sections to Document
|
||||||
|
|
||||||
|
1. Prerequisites
|
||||||
|
- Python version
|
||||||
|
- Conda environment
|
||||||
|
- Node.js (if applicable)
|
||||||
|
- Git
|
||||||
|
2. Getting Started
|
||||||
|
- Clone repository
|
||||||
|
- Setup conda environment
|
||||||
|
- Install dependencies
|
||||||
|
- Configuration setup
|
||||||
|
3. Project Structure Overview
|
||||||
|
4. Development Server
|
||||||
|
- Starting FastAPI server
|
||||||
|
- Hot reload configuration
|
||||||
|
- Debug mode
|
||||||
|
5. CLI Development
|
||||||
|
6. Code Style
|
||||||
|
- PEP 8 compliance
|
||||||
|
- Type hints requirements
|
||||||
|
- Docstring format
|
||||||
|
- Import organization
|
||||||
|
7. Git Workflow
|
||||||
|
- Branch naming
|
||||||
|
- Commit message format
|
||||||
|
- Pull request process
|
||||||
|
8. Common Development Tasks
|
||||||
|
9. Troubleshooting Development Issues
|
||||||
39
docs/README.md
Normal file
39
docs/README.md
Normal file
@ -0,0 +1,39 @@
|
|||||||
|
# Aniworld Documentation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This directory contains all documentation for the Aniworld anime download manager project.
|
||||||
|
|
||||||
|
## Documentation Structure
|
||||||
|
|
||||||
|
| Document | Purpose | Target Audience |
|
||||||
|
| ---------------------------------------- | ---------------------------------------------- | ---------------------------------- |
|
||||||
|
| [ARCHITECTURE.md](ARCHITECTURE.md) | System architecture and design decisions | Architects, Senior Developers |
|
||||||
|
| [API.md](API.md) | REST API reference and WebSocket documentation | Frontend Developers, API Consumers |
|
||||||
|
| [DEVELOPMENT.md](DEVELOPMENT.md) | Developer setup and contribution guide | All Developers |
|
||||||
|
| [DEPLOYMENT.md](DEPLOYMENT.md) | Deployment and operations guide | DevOps, System Administrators |
|
||||||
|
| [DATABASE.md](DATABASE.md) | Database schema and data models | Backend Developers |
|
||||||
|
| [TESTING.md](TESTING.md) | Testing strategy and guidelines | QA Engineers, Developers |
|
||||||
|
| [SECURITY.md](SECURITY.md) | Security considerations and guidelines | Security Engineers, All Developers |
|
||||||
|
| [CONFIGURATION.md](CONFIGURATION.md) | Configuration options reference | Operators, Developers |
|
||||||
|
| [CHANGELOG.md](CHANGELOG.md) | Version history and changes | All Stakeholders |
|
||||||
|
| [TROUBLESHOOTING.md](TROUBLESHOOTING.md) | Common issues and solutions | Support, Operators |
|
||||||
|
| [features.md](features.md) | Feature list and capabilities | Product Owners, Users |
|
||||||
|
| [instructions.md](instructions.md) | AI agent development instructions | AI Agents, Developers |
|
||||||
|
|
||||||
|
## Documentation Standards
|
||||||
|
|
||||||
|
- All documentation uses Markdown format
|
||||||
|
- Keep documentation up-to-date with code changes
|
||||||
|
- Include code examples where applicable
|
||||||
|
- Use clear, concise language
|
||||||
|
- Include diagrams for complex concepts (use Mermaid syntax)
|
||||||
|
|
||||||
|
## Contributing to Documentation
|
||||||
|
|
||||||
|
When adding or updating documentation:
|
||||||
|
|
||||||
|
1. Follow the established format in each document
|
||||||
|
2. Update the README.md if adding new documents
|
||||||
|
3. Ensure cross-references are valid
|
||||||
|
4. Review for spelling and grammar
|
||||||
71
docs/TESTING.md
Normal file
71
docs/TESTING.md
Normal file
@ -0,0 +1,71 @@
|
|||||||
|
# Testing Documentation
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document describes the testing strategy, guidelines, and practices for the Aniworld project.
|
||||||
|
|
||||||
|
### What This Document Contains
|
||||||
|
|
||||||
|
- **Testing Strategy**: Overall approach to quality assurance
|
||||||
|
- **Test Categories**: Unit, integration, API, performance, security tests
|
||||||
|
- **Test Structure**: Organization of test files and directories
|
||||||
|
- **Writing Tests**: Guidelines for writing effective tests
|
||||||
|
- **Fixtures and Mocking**: Shared test utilities and mock patterns
|
||||||
|
- **Running Tests**: Commands and configurations
|
||||||
|
- **Coverage Requirements**: Minimum coverage thresholds
|
||||||
|
- **CI/CD Integration**: How tests run in automation
|
||||||
|
- **Test Data Management**: Managing test fixtures and data
|
||||||
|
- **Best Practices**: Do's and don'ts for testing
|
||||||
|
|
||||||
|
### What This Document Does NOT Contain
|
||||||
|
|
||||||
|
- Production deployment (see [DEPLOYMENT.md](DEPLOYMENT.md))
|
||||||
|
- Security audit procedures (see [SECURITY.md](SECURITY.md))
|
||||||
|
- Bug tracking and issue management
|
||||||
|
- Performance benchmarking results
|
||||||
|
|
||||||
|
### Target Audience
|
||||||
|
|
||||||
|
- Developers writing tests
|
||||||
|
- QA Engineers
|
||||||
|
- CI/CD Engineers
|
||||||
|
- Code reviewers
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sections to Document
|
||||||
|
|
||||||
|
1. Testing Philosophy
|
||||||
|
- Test pyramid approach
|
||||||
|
- Quality gates
|
||||||
|
2. Test Categories
|
||||||
|
- Unit Tests (`tests/unit/`)
|
||||||
|
- Integration Tests (`tests/integration/`)
|
||||||
|
- API Tests (`tests/api/`)
|
||||||
|
- Frontend Tests (`tests/frontend/`)
|
||||||
|
- Performance Tests (`tests/performance/`)
|
||||||
|
- Security Tests (`tests/security/`)
|
||||||
|
3. Test Structure and Naming
|
||||||
|
- File naming conventions
|
||||||
|
- Test function naming
|
||||||
|
- Test class organization
|
||||||
|
4. Running Tests
|
||||||
|
- pytest commands
|
||||||
|
- Running specific tests
|
||||||
|
- Verbose output
|
||||||
|
- Coverage reports
|
||||||
|
5. Fixtures and Conftest
|
||||||
|
- Shared fixtures
|
||||||
|
- Database fixtures
|
||||||
|
- Mock services
|
||||||
|
6. Mocking Guidelines
|
||||||
|
- What to mock
|
||||||
|
- Mock patterns
|
||||||
|
- External service mocks
|
||||||
|
7. Coverage Requirements
|
||||||
|
8. CI/CD Integration
|
||||||
|
9. Writing Good Tests
|
||||||
|
- Arrange-Act-Assert pattern
|
||||||
|
- Test isolation
|
||||||
|
- Edge cases
|
||||||
|
10. Common Pitfalls to Avoid
|
||||||
@ -1,426 +0,0 @@
|
|||||||
# Series Identifier Standardization - Validation Instructions
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
This document provides comprehensive instructions for AI agents to validate the **Series Identifier Standardization** change across the Aniworld codebase. The change standardizes `key` as the primary identifier for series and relegates `folder` to metadata-only status.
|
|
||||||
|
|
||||||
## Summary of the Change
|
|
||||||
|
|
||||||
| Field | Purpose | Usage |
|
|
||||||
| -------- | ------------------------------------------------------------------------------ | --------------------------------------------------------------- |
|
|
||||||
| `key` | **Primary Identifier** - Provider-assigned, URL-safe (e.g., `attack-on-titan`) | All lookups, API operations, database queries, WebSocket events |
|
|
||||||
| `folder` | **Metadata Only** - Filesystem folder name (e.g., `Attack on Titan (2013)`) | Display purposes, filesystem operations only |
|
|
||||||
| `id` | **Database Primary Key** - Internal auto-increment integer | Database relationships only |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Validation Checklist
|
|
||||||
|
|
||||||
### Phase 2: Application Layer Services
|
|
||||||
|
|
||||||
**Files to validate:**
|
|
||||||
|
|
||||||
1. **`src/server/services/anime_service.py`**
|
|
||||||
|
|
||||||
- [ ] Class docstring explains `key` vs `folder` convention
|
|
||||||
- [ ] All public methods accept `key` parameter for series identification
|
|
||||||
- [ ] No methods accept `folder` as an identifier parameter
|
|
||||||
- [ ] Event handler methods document key/folder convention
|
|
||||||
- [ ] Progress tracking uses `key` in progress IDs where possible
|
|
||||||
|
|
||||||
2. **`src/server/services/download_service.py`**
|
|
||||||
|
|
||||||
- [ ] `DownloadItem` uses `serie_id` (which should be the `key`)
|
|
||||||
- [ ] `serie_folder` is documented as metadata only
|
|
||||||
- [ ] Queue operations look up series by `key` not `folder`
|
|
||||||
- [ ] Persistence format includes `serie_id` as the key identifier
|
|
||||||
|
|
||||||
3. **`src/server/services/websocket_service.py`**
|
|
||||||
|
|
||||||
- [ ] Module docstring explains key/folder convention
|
|
||||||
- [ ] Broadcast methods include `key` in message payloads
|
|
||||||
- [ ] `folder` is documented as optional/display only
|
|
||||||
- [ ] Event broadcasts use `key` as primary identifier
|
|
||||||
|
|
||||||
4. **`src/server/services/scan_service.py`**
|
|
||||||
|
|
||||||
- [ ] Scan operations use `key` for identification
|
|
||||||
- [ ] Progress events include `key` field
|
|
||||||
|
|
||||||
5. **`src/server/services/progress_service.py`**
|
|
||||||
- [ ] Progress tracking includes `key` in metadata where applicable
|
|
||||||
|
|
||||||
**Validation Commands:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check service layer for folder-based lookups
|
|
||||||
grep -rn "by_folder\|folder.*=.*identifier\|folder.*lookup" src/server/services/ --include="*.py"
|
|
||||||
|
|
||||||
# Verify key is used in services
|
|
||||||
grep -rn "serie_id\|series_key\|key.*identifier" src/server/services/ --include="*.py"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Phase 3: API Endpoints and Responses
|
|
||||||
|
|
||||||
**Files to validate:**
|
|
||||||
|
|
||||||
1. **`src/server/api/anime.py`**
|
|
||||||
|
|
||||||
- [ ] `AnimeSummary` model has `key` field with proper description
|
|
||||||
- [ ] `AnimeDetail` model has `key` field with proper description
|
|
||||||
- [ ] API docstrings explain `key` is the primary identifier
|
|
||||||
- [ ] `folder` field descriptions state "metadata only"
|
|
||||||
- [ ] Endpoint paths use `key` parameter (e.g., `/api/anime/{key}`)
|
|
||||||
- [ ] No endpoints use `folder` as path parameter for lookups
|
|
||||||
|
|
||||||
2. **`src/server/api/download.py`**
|
|
||||||
|
|
||||||
- [ ] Download endpoints use `serie_id` (key) for operations
|
|
||||||
- [ ] Request models document key/folder convention
|
|
||||||
- [ ] Response models include `key` as primary identifier
|
|
||||||
|
|
||||||
3. **`src/server/models/anime.py`**
|
|
||||||
|
|
||||||
- [ ] Module docstring explains identifier convention
|
|
||||||
- [ ] `AnimeSeriesResponse` has `key` field properly documented
|
|
||||||
- [ ] `SearchResult` has `key` field properly documented
|
|
||||||
- [ ] Field validators normalize `key` to lowercase
|
|
||||||
- [ ] `folder` fields document metadata-only purpose
|
|
||||||
|
|
||||||
4. **`src/server/models/download.py`**
|
|
||||||
|
|
||||||
- [ ] `DownloadItem` has `serie_id` documented as the key
|
|
||||||
- [ ] `serie_folder` documented as metadata only
|
|
||||||
- [ ] Field descriptions are clear about primary vs metadata
|
|
||||||
|
|
||||||
5. **`src/server/models/websocket.py`**
|
|
||||||
- [ ] Module docstring explains key/folder convention
|
|
||||||
- [ ] Message models document `key` as primary identifier
|
|
||||||
- [ ] `folder` documented as optional display metadata
|
|
||||||
|
|
||||||
**Validation Commands:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check API endpoints for folder-based paths
|
|
||||||
grep -rn "folder.*Path\|/{folder}" src/server/api/ --include="*.py"
|
|
||||||
|
|
||||||
# Verify key is used in endpoints
|
|
||||||
grep -rn "/{key}\|series_key\|serie_id" src/server/api/ --include="*.py"
|
|
||||||
|
|
||||||
# Check model field descriptions
|
|
||||||
grep -rn "Field.*description.*identifier\|Field.*description.*key\|Field.*description.*folder" src/server/models/ --include="*.py"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Phase 4: Frontend Integration
|
|
||||||
|
|
||||||
**Files to validate:**
|
|
||||||
|
|
||||||
1. **`src/server/web/static/js/app.js`**
|
|
||||||
|
|
||||||
- [ ] `selectedSeries` Set uses `key` values, not `folder`
|
|
||||||
- [ ] `seriesData` array comments indicate `key` as primary identifier
|
|
||||||
- [ ] Selection operations use `key` property
|
|
||||||
- [ ] API calls pass `key` for series identification
|
|
||||||
- [ ] WebSocket message handlers extract `key` from data
|
|
||||||
- [ ] No code uses `folder` for series lookups
|
|
||||||
|
|
||||||
2. **`src/server/web/static/js/queue.js`**
|
|
||||||
|
|
||||||
- [ ] Queue items reference series by `key` or `serie_id`
|
|
||||||
- [ ] WebSocket handlers extract `key` from messages
|
|
||||||
- [ ] UI operations use `key` for identification
|
|
||||||
- [ ] `serie_folder` used only for display
|
|
||||||
|
|
||||||
3. **`src/server/web/static/js/websocket_client.js`**
|
|
||||||
|
|
||||||
- [ ] Message handling preserves `key` field
|
|
||||||
- [ ] No transformation that loses `key` information
|
|
||||||
|
|
||||||
4. **HTML Templates** (`src/server/web/templates/`)
|
|
||||||
- [ ] Data attributes use `key` for identification (e.g., `data-key`)
|
|
||||||
- [ ] No `data-folder` used for identification purposes
|
|
||||||
- [ ] Display uses `folder` or `name` appropriately
|
|
||||||
|
|
||||||
**Validation Commands:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check JavaScript for folder-based lookups
|
|
||||||
grep -rn "\.folder\s*==\|folder.*identifier\|getByFolder" src/server/web/static/js/ --include="*.js"
|
|
||||||
|
|
||||||
# Check data attributes in templates
|
|
||||||
grep -rn "data-key\|data-folder\|data-series" src/server/web/templates/ --include="*.html"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Phase 5: Database Operations
|
|
||||||
|
|
||||||
**Files to validate:**
|
|
||||||
|
|
||||||
1. **`src/server/database/models.py`**
|
|
||||||
|
|
||||||
- [ ] `AnimeSeries` model has `key` column with unique constraint
|
|
||||||
- [ ] `key` column is indexed
|
|
||||||
- [ ] Model docstring explains identifier convention
|
|
||||||
- [ ] `folder` column docstring states "metadata only"
|
|
||||||
- [ ] Validators check `key` is not empty
|
|
||||||
- [ ] No `folder` uniqueness constraint (unless intentional)
|
|
||||||
|
|
||||||
2. **`src/server/database/service.py`**
|
|
||||||
|
|
||||||
- [ ] `AnimeSeriesService` has `get_by_key()` method
|
|
||||||
- [ ] Class docstring explains lookup convention
|
|
||||||
- [ ] No `get_by_folder()` without deprecation
|
|
||||||
- [ ] All CRUD operations use `key` for identification
|
|
||||||
- [ ] Logging uses `key` in messages
|
|
||||||
|
|
||||||
3. **`src/server/database/migrations/`**
|
|
||||||
- [ ] Migration files maintain `key` as unique, indexed column
|
|
||||||
- [ ] No migrations that use `folder` as identifier
|
|
||||||
|
|
||||||
**Validation Commands:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check database models
|
|
||||||
grep -rn "unique=True\|index=True" src/server/database/models.py
|
|
||||||
|
|
||||||
# Check service lookups
|
|
||||||
grep -rn "get_by_key\|get_by_folder\|filter.*key\|filter.*folder" src/server/database/service.py
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Phase 6: WebSocket Events
|
|
||||||
|
|
||||||
**Files to validate:**
|
|
||||||
|
|
||||||
1. **All WebSocket broadcast calls** should include `key` in payload:
|
|
||||||
|
|
||||||
- `download_progress` → includes `key`
|
|
||||||
- `download_complete` → includes `key`
|
|
||||||
- `download_failed` → includes `key`
|
|
||||||
- `scan_progress` → includes `key` (where applicable)
|
|
||||||
- `queue_status` → items include `key`
|
|
||||||
|
|
||||||
2. **Message format validation:**
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "download_progress",
|
|
||||||
"data": {
|
|
||||||
"key": "attack-on-titan", // PRIMARY - always present
|
|
||||||
"folder": "Attack on Titan (2013)", // OPTIONAL - display only
|
|
||||||
"progress": 45.5,
|
|
||||||
...
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Validation Commands:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Check WebSocket broadcast calls
|
|
||||||
grep -rn "broadcast.*key\|send_json.*key" src/server/services/ --include="*.py"
|
|
||||||
|
|
||||||
# Check message construction
|
|
||||||
grep -rn '"key":\|"folder":' src/server/services/ --include="*.py"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### Phase 7: Test Coverage
|
|
||||||
|
|
||||||
**Test files to validate:**
|
|
||||||
|
|
||||||
1. **`tests/unit/test_serie_class.py`**
|
|
||||||
|
|
||||||
- [ ] Tests for key validation (empty, whitespace, None)
|
|
||||||
- [ ] Tests for key as primary identifier
|
|
||||||
- [ ] Tests for folder as metadata only
|
|
||||||
|
|
||||||
2. **`tests/unit/test_anime_service.py`**
|
|
||||||
|
|
||||||
- [ ] Service tests use `key` for operations
|
|
||||||
- [ ] Mock objects have proper `key` attributes
|
|
||||||
|
|
||||||
3. **`tests/unit/test_database_models.py`**
|
|
||||||
|
|
||||||
- [ ] Tests for `key` uniqueness constraint
|
|
||||||
- [ ] Tests for `key` validation
|
|
||||||
|
|
||||||
4. **`tests/unit/test_database_service.py`**
|
|
||||||
|
|
||||||
- [ ] Tests for `get_by_key()` method
|
|
||||||
- [ ] No tests for deprecated folder lookups
|
|
||||||
|
|
||||||
5. **`tests/api/test_anime_endpoints.py`**
|
|
||||||
|
|
||||||
- [ ] API tests use `key` in requests
|
|
||||||
- [ ] Mock `FakeSerie` has proper `key` attribute
|
|
||||||
- [ ] Comments explain key/folder convention
|
|
||||||
|
|
||||||
6. **`tests/unit/test_websocket_service.py`**
|
|
||||||
- [ ] WebSocket tests verify `key` in messages
|
|
||||||
- [ ] Broadcast tests include `key` in payload
|
|
||||||
|
|
||||||
**Validation Commands:**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all tests
|
|
||||||
conda run -n AniWorld python -m pytest tests/ -v --tb=short
|
|
||||||
|
|
||||||
# Run specific test files
|
|
||||||
conda run -n AniWorld python -m pytest tests/unit/test_serie_class.py -v
|
|
||||||
conda run -n AniWorld python -m pytest tests/unit/test_database_models.py -v
|
|
||||||
conda run -n AniWorld python -m pytest tests/api/test_anime_endpoints.py -v
|
|
||||||
|
|
||||||
# Search tests for identifier usage
|
|
||||||
grep -rn "key.*identifier\|folder.*metadata" tests/ --include="*.py"
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Common Issues to Check
|
|
||||||
|
|
||||||
### 1. Inconsistent Naming
|
|
||||||
|
|
||||||
Look for inconsistent parameter names:
|
|
||||||
|
|
||||||
- `serie_key` vs `series_key` vs `key`
|
|
||||||
- `serie_id` should refer to `key`, not database `id`
|
|
||||||
- `serie_folder` vs `folder`
|
|
||||||
|
|
||||||
### 2. Missing Documentation
|
|
||||||
|
|
||||||
Check that ALL models, services, and APIs document:
|
|
||||||
|
|
||||||
- What `key` is and how to use it
|
|
||||||
- That `folder` is metadata only
|
|
||||||
|
|
||||||
### 3. Legacy Code Patterns
|
|
||||||
|
|
||||||
Search for deprecated patterns:
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Bad - using folder for lookup
|
|
||||||
series = get_by_folder(folder_name)
|
|
||||||
|
|
||||||
# Good - using key for lookup
|
|
||||||
series = get_by_key(series_key)
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. API Response Consistency
|
|
||||||
|
|
||||||
Verify all API responses include:
|
|
||||||
|
|
||||||
- `key` field (primary identifier)
|
|
||||||
- `folder` field (optional, for display)
|
|
||||||
|
|
||||||
### 5. Frontend Data Flow
|
|
||||||
|
|
||||||
Verify the frontend:
|
|
||||||
|
|
||||||
- Stores `key` in selection sets
|
|
||||||
- Passes `key` to API calls
|
|
||||||
- Uses `folder` only for display
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Deprecation Warnings
|
|
||||||
|
|
||||||
The following should have deprecation warnings (for removal in v3.0.0):
|
|
||||||
|
|
||||||
1. Any `get_by_folder()` or `GetByFolder()` methods
|
|
||||||
2. Any API endpoints that accept `folder` as a lookup parameter
|
|
||||||
3. Any frontend code that uses `folder` for identification
|
|
||||||
|
|
||||||
**Example deprecation:**
|
|
||||||
|
|
||||||
```python
|
|
||||||
import warnings
|
|
||||||
|
|
||||||
def get_by_folder(self, folder: str):
|
|
||||||
"""DEPRECATED: Use get_by_key() instead."""
|
|
||||||
warnings.warn(
|
|
||||||
"get_by_folder() is deprecated, use get_by_key(). "
|
|
||||||
"Will be removed in v3.0.0",
|
|
||||||
DeprecationWarning,
|
|
||||||
stacklevel=2
|
|
||||||
)
|
|
||||||
# ... implementation
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Automated Validation Script
|
|
||||||
|
|
||||||
Run this script to perform automated checks:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
#!/bin/bash
|
|
||||||
# identifier_validation.sh
|
|
||||||
|
|
||||||
echo "=== Series Identifier Standardization Validation ==="
|
|
||||||
echo ""
|
|
||||||
|
|
||||||
echo "1. Checking core entities..."
|
|
||||||
grep -rn "PRIMARY IDENTIFIER\|metadata only" src/core/entities/ --include="*.py" | head -20
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "2. Checking for deprecated folder lookups..."
|
|
||||||
grep -rn "get_by_folder\|GetByFolder" src/ --include="*.py"
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "3. Checking API models for key field..."
|
|
||||||
grep -rn 'key.*Field\|Field.*key' src/server/models/ --include="*.py" | head -20
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "4. Checking database models..."
|
|
||||||
grep -rn "key.*unique\|key.*index" src/server/database/models.py
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "5. Checking frontend key usage..."
|
|
||||||
grep -rn "selectedSeries\|\.key\|data-key" src/server/web/static/js/ --include="*.js" | head -20
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "6. Running tests..."
|
|
||||||
conda run -n AniWorld python -m pytest tests/unit/test_serie_class.py -v --tb=short
|
|
||||||
|
|
||||||
echo ""
|
|
||||||
echo "=== Validation Complete ==="
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Expected Results
|
|
||||||
|
|
||||||
After validation, you should confirm:
|
|
||||||
|
|
||||||
1. ✅ All core entities use `key` as primary identifier
|
|
||||||
2. ✅ All services look up series by `key`
|
|
||||||
3. ✅ All API endpoints use `key` for operations
|
|
||||||
4. ✅ All database queries use `key` for lookups
|
|
||||||
5. ✅ Frontend uses `key` for selection and API calls
|
|
||||||
6. ✅ WebSocket events include `key` in payload
|
|
||||||
7. ✅ All tests pass
|
|
||||||
8. ✅ Documentation clearly explains the convention
|
|
||||||
9. ✅ Deprecation warnings exist for legacy patterns
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## Sign-off
|
|
||||||
|
|
||||||
Once validation is complete, update this section:
|
|
||||||
|
|
||||||
- [x] Phase 1: Core Entities - Validated by: **AI Agent** Date: **28 Nov 2025**
|
|
||||||
- [x] Phase 2: Services - Validated by: **AI Agent** Date: **28 Nov 2025**
|
|
||||||
- [ ] Phase 3: API - Validated by: **\_\_\_** Date: **\_\_\_**
|
|
||||||
- [ ] Phase 4: Frontend - Validated by: **\_\_\_** Date: **\_\_\_**
|
|
||||||
- [ ] Phase 5: Database - Validated by: **\_\_\_** Date: **\_\_\_**
|
|
||||||
- [ ] Phase 6: WebSocket - Validated by: **\_\_\_** Date: **\_\_\_**
|
|
||||||
- [ ] Phase 7: Tests - Validated by: **\_\_\_** Date: **\_\_\_**
|
|
||||||
|
|
||||||
**Final Approval:** \***\*\*\*\*\***\_\_\_\***\*\*\*\*\*** Date: **\*\***\_**\*\***
|
|
||||||
@ -1,337 +0,0 @@
|
|||||||
# Aniworld Web Application Infrastructure
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conda activate AniWorld
|
|
||||||
```
|
|
||||||
|
|
||||||
## Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
src/
|
|
||||||
├── core/ # Core application logic
|
|
||||||
│ ├── SeriesApp.py # Main application class
|
|
||||||
│ ├── SerieScanner.py # Directory scanner
|
|
||||||
│ ├── entities/ # Domain entities (series.py, SerieList.py)
|
|
||||||
│ ├── interfaces/ # Abstract interfaces (providers.py, callbacks.py)
|
|
||||||
│ ├── providers/ # Content providers (aniworld, streaming)
|
|
||||||
│ └── exceptions/ # Custom exceptions
|
|
||||||
├── server/ # FastAPI web application
|
|
||||||
│ ├── fastapi_app.py # Main FastAPI application
|
|
||||||
│ ├── controllers/ # Route controllers (health, page, error)
|
|
||||||
│ ├── api/ # API routes (auth, config, anime, download, websocket)
|
|
||||||
│ ├── models/ # Pydantic models
|
|
||||||
│ ├── services/ # Business logic services
|
|
||||||
│ ├── database/ # SQLAlchemy ORM layer
|
|
||||||
│ ├── utils/ # Utilities (dependencies, templates, security)
|
|
||||||
│ └── web/ # Frontend (templates, static assets)
|
|
||||||
├── cli/ # CLI application
|
|
||||||
data/ # Config, database, queue state
|
|
||||||
logs/ # Application logs
|
|
||||||
tests/ # Test suites
|
|
||||||
```
|
|
||||||
|
|
||||||
## Technology Stack
|
|
||||||
|
|
||||||
| Layer | Technology |
|
|
||||||
| --------- | ---------------------------------------------- |
|
|
||||||
| Backend | FastAPI, Uvicorn, SQLAlchemy, SQLite, Pydantic |
|
|
||||||
| Frontend | HTML5, CSS3, Vanilla JS, Bootstrap 5, HTMX |
|
|
||||||
| Security | JWT (python-jose), bcrypt (passlib) |
|
|
||||||
| Real-time | Native WebSocket |
|
|
||||||
|
|
||||||
## Series Identifier Convention
|
|
||||||
|
|
||||||
Throughout the codebase, three identifiers are used for anime series:
|
|
||||||
|
|
||||||
| Identifier | Type | Purpose | Example |
|
|
||||||
| ---------- | --------------- | ----------------------------------------------------------- | -------------------------- |
|
|
||||||
| `key` | Unique, Indexed | **PRIMARY** - All lookups, API operations, WebSocket events | `"attack-on-titan"` |
|
|
||||||
| `folder` | String | Display/filesystem metadata only (never for lookups) | `"Attack on Titan (2013)"` |
|
|
||||||
| `id` | Primary Key | Internal database key for relationships | `1`, `42` |
|
|
||||||
|
|
||||||
### Key Format Requirements
|
|
||||||
|
|
||||||
- **Lowercase only**: No uppercase letters allowed
|
|
||||||
- **URL-safe**: Only alphanumeric characters and hyphens
|
|
||||||
- **Hyphen-separated**: Words separated by single hyphens
|
|
||||||
- **No leading/trailing hyphens**: Must start and end with alphanumeric
|
|
||||||
- **No consecutive hyphens**: `attack--titan` is invalid
|
|
||||||
|
|
||||||
**Valid examples**: `"attack-on-titan"`, `"one-piece"`, `"86-eighty-six"`, `"re-zero"`
|
|
||||||
**Invalid examples**: `"Attack On Titan"`, `"attack_on_titan"`, `"attack on titan"`
|
|
||||||
|
|
||||||
### Migration Notes
|
|
||||||
|
|
||||||
- **Backward Compatibility**: API endpoints accepting `anime_id` will check `key` first, then fall back to `folder` lookup
|
|
||||||
- **Deprecation**: Folder-based lookups are deprecated and will be removed in a future version
|
|
||||||
- **New Code**: Always use `key` for identification; `folder` is metadata only
|
|
||||||
|
|
||||||
## API Endpoints
|
|
||||||
|
|
||||||
### Authentication (`/api/auth`)
|
|
||||||
|
|
||||||
- `POST /login` - Master password authentication (returns JWT)
|
|
||||||
- `POST /logout` - Invalidate session
|
|
||||||
- `GET /status` - Check authentication status
|
|
||||||
|
|
||||||
### Configuration (`/api/config`)
|
|
||||||
|
|
||||||
- `GET /` - Get configuration
|
|
||||||
- `PUT /` - Update configuration
|
|
||||||
- `POST /validate` - Validate without applying
|
|
||||||
- `GET /backups` - List backups
|
|
||||||
- `POST /backups/{name}/restore` - Restore backup
|
|
||||||
|
|
||||||
### Anime (`/api/anime`)
|
|
||||||
|
|
||||||
- `GET /` - List anime with missing episodes (returns `key` as identifier)
|
|
||||||
- `GET /{anime_id}` - Get anime details (accepts `key` or `folder` for backward compatibility)
|
|
||||||
- `POST /search` - Search for anime (returns `key` as identifier)
|
|
||||||
- `POST /add` - Add new series (extracts `key` from link URL)
|
|
||||||
- `POST /rescan` - Trigger library rescan
|
|
||||||
|
|
||||||
**Response Models:**
|
|
||||||
|
|
||||||
- `AnimeSummary`: `key` (primary identifier), `name`, `site`, `folder` (metadata), `missing_episodes`, `link`
|
|
||||||
- `AnimeDetail`: `key` (primary identifier), `title`, `folder` (metadata), `episodes`, `description`
|
|
||||||
|
|
||||||
### Download Queue (`/api/queue`)
|
|
||||||
|
|
||||||
- `GET /status` - Queue status and statistics
|
|
||||||
- `POST /add` - Add episodes to queue
|
|
||||||
- `DELETE /{item_id}` - Remove item
|
|
||||||
- `POST /start` | `/stop` | `/pause` | `/resume` - Queue control
|
|
||||||
- `POST /retry` - Retry failed downloads
|
|
||||||
- `DELETE /completed` - Clear completed items
|
|
||||||
|
|
||||||
**Request Models:**
|
|
||||||
|
|
||||||
- `DownloadRequest`: `serie_id` (key, primary identifier), `serie_folder` (filesystem path), `serie_name` (display), `episodes`, `priority`
|
|
||||||
|
|
||||||
**Response Models:**
|
|
||||||
|
|
||||||
- `DownloadItem`: `id`, `serie_id` (key), `serie_folder` (metadata), `serie_name`, `episode`, `status`, `progress`
|
|
||||||
- `QueueStatus`: `is_running`, `is_paused`, `active_downloads`, `pending_queue`, `completed_downloads`, `failed_downloads`
|
|
||||||
|
|
||||||
### WebSocket (`/ws/connect`)
|
|
||||||
|
|
||||||
Real-time updates for downloads, scans, and queue operations.
|
|
||||||
|
|
||||||
**Rooms**: `downloads`, `download_progress`, `scan_progress`
|
|
||||||
|
|
||||||
**Message Types**: `download_progress`, `download_complete`, `download_failed`, `queue_status`, `scan_progress`, `scan_complete`, `scan_failed`
|
|
||||||
|
|
||||||
**Series Identifier in Messages:**
|
|
||||||
All series-related WebSocket events include `key` as the primary identifier in their data payload:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"type": "download_progress",
|
|
||||||
"timestamp": "2025-10-17T10:30:00.000Z",
|
|
||||||
"data": {
|
|
||||||
"download_id": "abc123",
|
|
||||||
"key": "attack-on-titan",
|
|
||||||
"folder": "Attack on Titan (2013)",
|
|
||||||
"percent": 45.2,
|
|
||||||
"speed_mbps": 2.5,
|
|
||||||
"eta_seconds": 180
|
|
||||||
}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Database Models
|
|
||||||
|
|
||||||
| Model | Purpose |
|
|
||||||
| ----------------- | ---------------------------------------- |
|
|
||||||
| AnimeSeries | Series metadata (key, name, folder, etc) |
|
|
||||||
| Episode | Episodes linked to series |
|
|
||||||
| DownloadQueueItem | Queue items with status and progress |
|
|
||||||
| UserSession | JWT sessions with expiry |
|
|
||||||
|
|
||||||
**Mixins**: `TimestampMixin` (created_at, updated_at), `SoftDeleteMixin`
|
|
||||||
|
|
||||||
### AnimeSeries Identifier Fields
|
|
||||||
|
|
||||||
| Field | Type | Purpose |
|
|
||||||
| -------- | --------------- | ------------------------------------------------- |
|
|
||||||
| `id` | Primary Key | Internal database key for relationships |
|
|
||||||
| `key` | Unique, Indexed | **PRIMARY IDENTIFIER** for all lookups |
|
|
||||||
| `folder` | String | Filesystem metadata only (not for identification) |
|
|
||||||
|
|
||||||
**Database Service Methods:**
|
|
||||||
|
|
||||||
- `AnimeSeriesService.get_by_key(key)` - **Primary lookup method**
|
|
||||||
- `AnimeSeriesService.get_by_id(id)` - Internal lookup by database ID
|
|
||||||
- No `get_by_folder()` method exists - folder is never used for lookups
|
|
||||||
|
|
||||||
## Core Services
|
|
||||||
|
|
||||||
### SeriesApp (`src/core/SeriesApp.py`)
|
|
||||||
|
|
||||||
Main engine for anime series management with async support, progress callbacks, and cancellation.
|
|
||||||
|
|
||||||
### Callback System (`src/core/interfaces/callbacks.py`)
|
|
||||||
|
|
||||||
- `ProgressCallback`, `ErrorCallback`, `CompletionCallback`
|
|
||||||
- Context classes include `key` + optional `folder` fields
|
|
||||||
- Thread-safe `CallbackManager` for multiple callback registration
|
|
||||||
|
|
||||||
### Services (`src/server/services/`)
|
|
||||||
|
|
||||||
| Service | Purpose |
|
|
||||||
| ---------------- | ----------------------------------------- |
|
|
||||||
| AnimeService | Series management, scans (uses SeriesApp) |
|
|
||||||
| DownloadService | Queue management, download execution |
|
|
||||||
| ScanService | Library scan operations with callbacks |
|
|
||||||
| ProgressService | Centralized progress tracking + WebSocket |
|
|
||||||
| WebSocketService | Real-time connection management |
|
|
||||||
| AuthService | JWT authentication, rate limiting |
|
|
||||||
| ConfigService | Configuration persistence with backups |
|
|
||||||
|
|
||||||
## Validation Utilities (`src/server/utils/validators.py`)
|
|
||||||
|
|
||||||
Provides data validation functions for ensuring data integrity across the application.
|
|
||||||
|
|
||||||
### Series Key Validation
|
|
||||||
|
|
||||||
- **`validate_series_key(key)`**: Validates key format (URL-safe, lowercase, hyphens only)
|
|
||||||
- Valid: `"attack-on-titan"`, `"one-piece"`, `"86-eighty-six"`
|
|
||||||
- Invalid: `"Attack On Titan"`, `"attack_on_titan"`, `"attack on titan"`
|
|
||||||
- **`validate_series_key_or_folder(identifier, allow_folder=True)`**: Backward-compatible validation
|
|
||||||
- Returns tuple `(identifier, is_key)` where `is_key` indicates if it's a valid key format
|
|
||||||
- Set `allow_folder=False` to require strict key format
|
|
||||||
|
|
||||||
### Other Validators
|
|
||||||
|
|
||||||
| Function | Purpose |
|
|
||||||
| --------------------------- | ------------------------------------------ |
|
|
||||||
| `validate_series_name` | Series display name validation |
|
|
||||||
| `validate_episode_range` | Episode range validation (1-1000) |
|
|
||||||
| `validate_download_quality` | Quality setting (360p-1080p, best, worst) |
|
|
||||||
| `validate_language` | Language codes (ger-sub, ger-dub, etc.) |
|
|
||||||
| `validate_anime_url` | Aniworld.to/s.to URL validation |
|
|
||||||
| `validate_backup_name` | Backup filename validation |
|
|
||||||
| `validate_config_data` | Configuration data structure validation |
|
|
||||||
| `sanitize_filename` | Sanitize filenames for safe filesystem use |
|
|
||||||
|
|
||||||
## Template Helpers (`src/server/utils/template_helpers.py`)
|
|
||||||
|
|
||||||
Provides utilities for template rendering and series data preparation.
|
|
||||||
|
|
||||||
### Core Functions
|
|
||||||
|
|
||||||
| Function | Purpose |
|
|
||||||
| -------------------------- | --------------------------------- |
|
|
||||||
| `get_base_context` | Base context for all templates |
|
|
||||||
| `render_template` | Render template with context |
|
|
||||||
| `validate_template_exists` | Check if template file exists |
|
|
||||||
| `list_available_templates` | List all available template files |
|
|
||||||
|
|
||||||
### Series Context Helpers
|
|
||||||
|
|
||||||
All series helpers use `key` as the primary identifier:
|
|
||||||
|
|
||||||
| Function | Purpose |
|
|
||||||
| ----------------------------------- | ---------------------------------------------- |
|
|
||||||
| `prepare_series_context` | Prepare series data for templates (uses `key`) |
|
|
||||||
| `get_series_by_key` | Find series by `key` (not `folder`) |
|
|
||||||
| `filter_series_by_missing_episodes` | Filter series with missing episodes |
|
|
||||||
|
|
||||||
**Example Usage:**
|
|
||||||
|
|
||||||
```python
|
|
||||||
from src.server.utils.template_helpers import prepare_series_context
|
|
||||||
|
|
||||||
series_data = [
|
|
||||||
{"key": "attack-on-titan", "name": "Attack on Titan", "folder": "Attack on Titan (2013)"},
|
|
||||||
{"key": "one-piece", "name": "One Piece", "folder": "One Piece (1999)"}
|
|
||||||
]
|
|
||||||
prepared = prepare_series_context(series_data, sort_by="name")
|
|
||||||
# Returns sorted list using 'key' as identifier
|
|
||||||
```
|
|
||||||
|
|
||||||
## Frontend
|
|
||||||
|
|
||||||
### Static Files
|
|
||||||
|
|
||||||
- CSS: `styles.css` (Fluent UI design), `ux_features.css` (accessibility)
|
|
||||||
- JS: `app.js`, `queue.js`, `websocket_client.js`, accessibility modules
|
|
||||||
|
|
||||||
### WebSocket Client
|
|
||||||
|
|
||||||
Native WebSocket wrapper with Socket.IO-compatible API:
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
const socket = io();
|
|
||||||
socket.join("download_progress");
|
|
||||||
socket.on("download_progress", (data) => {
|
|
||||||
/* ... */
|
|
||||||
});
|
|
||||||
```
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
|
|
||||||
JWT tokens stored in localStorage, included as `Authorization: Bearer <token>`.
|
|
||||||
|
|
||||||
## Testing
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# All tests
|
|
||||||
conda run -n AniWorld python -m pytest tests/ -v
|
|
||||||
|
|
||||||
# Unit tests only
|
|
||||||
conda run -n AniWorld python -m pytest tests/unit/ -v
|
|
||||||
|
|
||||||
# API tests
|
|
||||||
conda run -n AniWorld python -m pytest tests/api/ -v
|
|
||||||
```
|
|
||||||
|
|
||||||
## Production Notes
|
|
||||||
|
|
||||||
### Current (Single-Process)
|
|
||||||
|
|
||||||
- SQLite with WAL mode
|
|
||||||
- In-memory WebSocket connections
|
|
||||||
- File-based config and queue persistence
|
|
||||||
|
|
||||||
### Multi-Process Deployment
|
|
||||||
|
|
||||||
- Switch to PostgreSQL/MySQL
|
|
||||||
- Move WebSocket registry to Redis
|
|
||||||
- Use distributed locking for queue operations
|
|
||||||
- Consider Redis for session/cache storage
|
|
||||||
|
|
||||||
## Code Examples
|
|
||||||
|
|
||||||
### API Usage with Key Identifier
|
|
||||||
|
|
||||||
```python
|
|
||||||
# Fetching anime list - response includes 'key' as identifier
|
|
||||||
response = requests.get("/api/anime", headers={"Authorization": f"Bearer {token}"})
|
|
||||||
anime_list = response.json()
|
|
||||||
# Each item has: key="attack-on-titan", folder="Attack on Titan (2013)", ...
|
|
||||||
|
|
||||||
# Fetching specific anime by key (preferred)
|
|
||||||
response = requests.get("/api/anime/attack-on-titan", headers={"Authorization": f"Bearer {token}"})
|
|
||||||
|
|
||||||
# Adding to download queue using key
|
|
||||||
download_request = {
|
|
||||||
"serie_id": "attack-on-titan", # Use key, not folder
|
|
||||||
"serie_folder": "Attack on Titan (2013)", # Metadata for filesystem
|
|
||||||
"serie_name": "Attack on Titan",
|
|
||||||
"episodes": ["S01E01", "S01E02"],
|
|
||||||
"priority": 1
|
|
||||||
}
|
|
||||||
response = requests.post("/api/queue/add", json=download_request, headers=headers)
|
|
||||||
```
|
|
||||||
|
|
||||||
### WebSocket Event Handling
|
|
||||||
|
|
||||||
```javascript
|
|
||||||
// WebSocket events always include 'key' as identifier
|
|
||||||
socket.on("download_progress", (data) => {
|
|
||||||
const key = data.key; // Primary identifier: "attack-on-titan"
|
|
||||||
const folder = data.folder; // Metadata: "Attack on Titan (2013)"
|
|
||||||
updateProgressBar(key, data.percent);
|
|
||||||
});
|
|
||||||
```
|
|
||||||
@ -75,7 +75,7 @@ conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Final Implementation Notes
|
## Implementation Notes
|
||||||
|
|
||||||
1. **Incremental Development**: Implement features incrementally, testing each component thoroughly before moving to the next
|
1. **Incremental Development**: Implement features incrementally, testing each component thoroughly before moving to the next
|
||||||
2. **Code Review**: Review all generated code for adherence to project standards
|
2. **Code Review**: Review all generated code for adherence to project standards
|
||||||
@ -100,23 +100,10 @@ For each task completed:
|
|||||||
- [ ] Performance validated
|
- [ ] Performance validated
|
||||||
- [ ] Code reviewed
|
- [ ] Code reviewed
|
||||||
- [ ] Task marked as complete in instructions.md
|
- [ ] Task marked as complete in instructions.md
|
||||||
- [ ] Infrastructure.md updated
|
- [ ] Infrastructure.md updated and other docs
|
||||||
- [ ] Changes committed to git; keep your messages in git short and clear
|
- [ ] Changes committed to git; keep your messages in git short and clear
|
||||||
- [ ] Take the next task
|
- [ ] Take the next task
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### Prerequisites
|
## TODO List:
|
||||||
|
|
||||||
1. Server is running: `conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload`
|
|
||||||
2. Password: `Hallo123!`
|
|
||||||
3. Login via browser at `http://127.0.0.1:8000/login`
|
|
||||||
|
|
||||||
### Notes
|
|
||||||
|
|
||||||
- This is a simplification that removes complexity while maintaining core functionality
|
|
||||||
- Improves user experience with explicit manual control
|
|
||||||
- Easier to understand, test, and maintain
|
|
||||||
- Good foundation for future enhancements if needed
|
|
||||||
|
|
||||||
---
|
|
||||||
@ -14,5 +14,4 @@ pytest==7.4.3
|
|||||||
pytest-asyncio==0.21.1
|
pytest-asyncio==0.21.1
|
||||||
httpx==0.25.2
|
httpx==0.25.2
|
||||||
sqlalchemy>=2.0.35
|
sqlalchemy>=2.0.35
|
||||||
alembic==1.13.0
|
|
||||||
aiosqlite>=0.19.0
|
aiosqlite>=0.19.0
|
||||||
@ -2,7 +2,8 @@
|
|||||||
"""
|
"""
|
||||||
Startup script for the Aniworld FastAPI application.
|
Startup script for the Aniworld FastAPI application.
|
||||||
|
|
||||||
This script starts the application with proper logging configuration.
|
This script starts the application with proper logging configuration
|
||||||
|
and graceful shutdown support via Ctrl+C (SIGINT) or SIGTERM.
|
||||||
"""
|
"""
|
||||||
import uvicorn
|
import uvicorn
|
||||||
|
|
||||||
@ -15,6 +16,11 @@ if __name__ == "__main__":
|
|||||||
# Run the application with logging.
|
# Run the application with logging.
|
||||||
# Only watch .py files in src/, explicitly exclude __pycache__.
|
# Only watch .py files in src/, explicitly exclude __pycache__.
|
||||||
# This prevents reload loops from .pyc compilation.
|
# This prevents reload loops from .pyc compilation.
|
||||||
|
#
|
||||||
|
# Graceful shutdown:
|
||||||
|
# - Ctrl+C (SIGINT) or SIGTERM triggers graceful shutdown
|
||||||
|
# - timeout_graceful_shutdown ensures shutdown completes within 30s
|
||||||
|
# - The FastAPI lifespan handler orchestrates cleanup in proper order
|
||||||
uvicorn.run(
|
uvicorn.run(
|
||||||
"src.server.fastapi_app:app",
|
"src.server.fastapi_app:app",
|
||||||
host="127.0.0.1",
|
host="127.0.0.1",
|
||||||
@ -24,4 +30,5 @@ if __name__ == "__main__":
|
|||||||
reload_includes=["*.py"],
|
reload_includes=["*.py"],
|
||||||
reload_excludes=["*/__pycache__/*", "*.pyc"],
|
reload_excludes=["*/__pycache__/*", "*.pyc"],
|
||||||
log_config=log_config,
|
log_config=log_config,
|
||||||
|
timeout_graceful_shutdown=30, # Allow 30s for graceful shutdown
|
||||||
)
|
)
|
||||||
|
|||||||
@ -7,7 +7,7 @@
|
|||||||
# installs dependencies, sets up the database, and starts the application.
|
# installs dependencies, sets up the database, and starts the application.
|
||||||
#
|
#
|
||||||
# Usage:
|
# Usage:
|
||||||
# ./start.sh [development|production] [--no-install] [--no-migrate]
|
# ./start.sh [development|production] [--no-install]
|
||||||
#
|
#
|
||||||
# Environment Variables:
|
# Environment Variables:
|
||||||
# ENVIRONMENT: 'development' or 'production' (default: development)
|
# ENVIRONMENT: 'development' or 'production' (default: development)
|
||||||
@ -28,7 +28,6 @@ PROJECT_ROOT="$(dirname "$SCRIPT_DIR")"
|
|||||||
CONDA_ENV="${CONDA_ENV:-AniWorld}"
|
CONDA_ENV="${CONDA_ENV:-AniWorld}"
|
||||||
ENVIRONMENT="${1:-development}"
|
ENVIRONMENT="${1:-development}"
|
||||||
INSTALL_DEPS="${INSTALL_DEPS:-true}"
|
INSTALL_DEPS="${INSTALL_DEPS:-true}"
|
||||||
RUN_MIGRATIONS="${RUN_MIGRATIONS:-true}"
|
|
||||||
PORT="${PORT:-8000}"
|
PORT="${PORT:-8000}"
|
||||||
HOST="${HOST:-127.0.0.1}"
|
HOST="${HOST:-127.0.0.1}"
|
||||||
|
|
||||||
@ -104,20 +103,6 @@ install_dependencies() {
|
|||||||
log_success "Dependencies installed."
|
log_success "Dependencies installed."
|
||||||
}
|
}
|
||||||
|
|
||||||
# Run database migrations
|
|
||||||
run_migrations() {
|
|
||||||
if [[ "$RUN_MIGRATIONS" != "true" ]]; then
|
|
||||||
log_warning "Skipping database migrations."
|
|
||||||
return
|
|
||||||
fi
|
|
||||||
|
|
||||||
log_info "Running database migrations..."
|
|
||||||
cd "$PROJECT_ROOT"
|
|
||||||
conda run -n "$CONDA_ENV" \
|
|
||||||
python -m alembic upgrade head 2>/dev/null || log_warning "No migrations to run."
|
|
||||||
log_success "Database migrations completed."
|
|
||||||
}
|
|
||||||
|
|
||||||
# Initialize database
|
# Initialize database
|
||||||
init_database() {
|
init_database() {
|
||||||
log_info "Initializing database..."
|
log_info "Initializing database..."
|
||||||
@ -220,10 +205,6 @@ main() {
|
|||||||
INSTALL_DEPS="false"
|
INSTALL_DEPS="false"
|
||||||
shift
|
shift
|
||||||
;;
|
;;
|
||||||
--no-migrate)
|
|
||||||
RUN_MIGRATIONS="false"
|
|
||||||
shift
|
|
||||||
;;
|
|
||||||
*)
|
*)
|
||||||
ENVIRONMENT="$1"
|
ENVIRONMENT="$1"
|
||||||
shift
|
shift
|
||||||
@ -237,7 +218,6 @@ main() {
|
|||||||
create_env_file
|
create_env_file
|
||||||
install_dependencies
|
install_dependencies
|
||||||
init_database
|
init_database
|
||||||
run_migrations
|
|
||||||
start_application
|
start_application
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
316
src/cli/Main.py
316
src/cli/Main.py
@ -1,316 +0,0 @@
|
|||||||
"""Command-line interface for the Aniworld anime download manager."""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from typing import Optional, Sequence
|
|
||||||
|
|
||||||
from rich.progress import Progress
|
|
||||||
|
|
||||||
from src.core.entities.series import Serie
|
|
||||||
from src.core.SeriesApp import SeriesApp as CoreSeriesApp
|
|
||||||
|
|
||||||
LOG_FORMAT = "%(asctime)s - %(levelname)s - %(name)s - %(message)s"
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class SeriesCLI:
|
|
||||||
"""Thin wrapper around :class:`SeriesApp` providing an interactive CLI."""
|
|
||||||
|
|
||||||
def __init__(self, directory_to_search: str) -> None:
|
|
||||||
print("Please wait while initializing...")
|
|
||||||
self.directory_to_search = directory_to_search
|
|
||||||
self.series_app = CoreSeriesApp(directory_to_search)
|
|
||||||
|
|
||||||
self._progress: Optional[Progress] = None
|
|
||||||
self._overall_task_id: Optional[int] = None
|
|
||||||
self._series_task_id: Optional[int] = None
|
|
||||||
self._episode_task_id: Optional[int] = None
|
|
||||||
self._scan_task_id: Optional[int] = None
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Utility helpers
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def _get_series_list(self) -> Sequence[Serie]:
|
|
||||||
"""Return the currently cached series with missing episodes."""
|
|
||||||
return self.series_app.get_series_list()
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Display & selection
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def display_series(self) -> None:
|
|
||||||
"""Print all series with assigned numbers."""
|
|
||||||
series = self._get_series_list()
|
|
||||||
if not series:
|
|
||||||
print("\nNo series with missing episodes were found.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print("\nCurrent result:")
|
|
||||||
for index, serie in enumerate(series, start=1):
|
|
||||||
name = (serie.name or "").strip()
|
|
||||||
label = name if name else serie.folder
|
|
||||||
print(f"{index}. {label}")
|
|
||||||
|
|
||||||
def get_user_selection(self) -> Optional[Sequence[Serie]]:
|
|
||||||
"""Prompt the user to select one or more series for download."""
|
|
||||||
series = list(self._get_series_list())
|
|
||||||
if not series:
|
|
||||||
print("No series available for download.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
self.display_series()
|
|
||||||
prompt = (
|
|
||||||
"\nSelect series by number (e.g. '1', '1,2' or 'all') "
|
|
||||||
"or type 'exit' to return: "
|
|
||||||
)
|
|
||||||
selection = input(prompt).strip().lower()
|
|
||||||
|
|
||||||
if selection in {"exit", ""}:
|
|
||||||
return None
|
|
||||||
|
|
||||||
if selection == "all":
|
|
||||||
return series
|
|
||||||
|
|
||||||
try:
|
|
||||||
indexes = [
|
|
||||||
int(value.strip()) - 1
|
|
||||||
for value in selection.split(",")
|
|
||||||
]
|
|
||||||
except ValueError:
|
|
||||||
print("Invalid selection. Returning to main menu.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
chosen = [
|
|
||||||
series[i]
|
|
||||||
for i in indexes
|
|
||||||
if 0 <= i < len(series)
|
|
||||||
]
|
|
||||||
|
|
||||||
if not chosen:
|
|
||||||
print("No valid series selected.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
return chosen
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Download logic
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def download_series(self, series: Sequence[Serie]) -> None:
|
|
||||||
"""Download all missing episodes for the provided series list."""
|
|
||||||
total_episodes = sum(
|
|
||||||
len(episodes)
|
|
||||||
for serie in series
|
|
||||||
for episodes in serie.episodeDict.values()
|
|
||||||
)
|
|
||||||
|
|
||||||
if total_episodes == 0:
|
|
||||||
print("Selected series do not contain missing episodes.")
|
|
||||||
return
|
|
||||||
|
|
||||||
self._progress = Progress()
|
|
||||||
with self._progress:
|
|
||||||
self._overall_task_id = self._progress.add_task(
|
|
||||||
"[red]Processing...", total=total_episodes
|
|
||||||
)
|
|
||||||
self._series_task_id = self._progress.add_task(
|
|
||||||
"[green]Current series", total=1
|
|
||||||
)
|
|
||||||
self._episode_task_id = self._progress.add_task(
|
|
||||||
"[gray]Download", total=100
|
|
||||||
)
|
|
||||||
|
|
||||||
for serie in series:
|
|
||||||
serie_total = sum(len(eps) for eps in serie.episodeDict.values())
|
|
||||||
self._progress.update(
|
|
||||||
self._series_task_id,
|
|
||||||
total=max(serie_total, 1),
|
|
||||||
completed=0,
|
|
||||||
description=f"[green]{serie.folder}",
|
|
||||||
)
|
|
||||||
|
|
||||||
for season, episodes in serie.episodeDict.items():
|
|
||||||
for episode in episodes:
|
|
||||||
if not self.series_app.loader.is_language(
|
|
||||||
season, episode, serie.key
|
|
||||||
):
|
|
||||||
logger.info(
|
|
||||||
"Skipping %s S%02dE%02d because the desired language is unavailable",
|
|
||||||
serie.folder,
|
|
||||||
season,
|
|
||||||
episode,
|
|
||||||
)
|
|
||||||
continue
|
|
||||||
|
|
||||||
result = self.series_app.download(
|
|
||||||
serieFolder=serie.folder,
|
|
||||||
season=season,
|
|
||||||
episode=episode,
|
|
||||||
key=serie.key,
|
|
||||||
callback=self._update_download_progress,
|
|
||||||
)
|
|
||||||
|
|
||||||
if not result.success:
|
|
||||||
logger.error("Download failed: %s", result.message)
|
|
||||||
|
|
||||||
self._progress.advance(self._overall_task_id)
|
|
||||||
self._progress.advance(self._series_task_id)
|
|
||||||
self._progress.update(
|
|
||||||
self._episode_task_id,
|
|
||||||
completed=0,
|
|
||||||
description="[gray]Waiting...",
|
|
||||||
)
|
|
||||||
|
|
||||||
self._progress = None
|
|
||||||
self.series_app.refresh_series_list()
|
|
||||||
|
|
||||||
def _update_download_progress(self, percent: float) -> None:
|
|
||||||
"""Update the episode progress bar based on download progress."""
|
|
||||||
if not self._progress or self._episode_task_id is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
description = f"[gray]Download: {percent:.1f}%"
|
|
||||||
self._progress.update(
|
|
||||||
self._episode_task_id,
|
|
||||||
completed=percent,
|
|
||||||
description=description,
|
|
||||||
)
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Rescan logic
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def rescan(self) -> None:
|
|
||||||
"""Trigger a rescan of the anime directory using the core app."""
|
|
||||||
total_to_scan = self.series_app.SerieScanner.get_total_to_scan()
|
|
||||||
total_to_scan = max(total_to_scan, 1)
|
|
||||||
|
|
||||||
self._progress = Progress()
|
|
||||||
with self._progress:
|
|
||||||
self._scan_task_id = self._progress.add_task(
|
|
||||||
"[red]Scanning folders...",
|
|
||||||
total=total_to_scan,
|
|
||||||
)
|
|
||||||
|
|
||||||
result = self.series_app.ReScan(
|
|
||||||
callback=self._wrap_scan_callback(total_to_scan)
|
|
||||||
)
|
|
||||||
|
|
||||||
self._progress = None
|
|
||||||
self._scan_task_id = None
|
|
||||||
|
|
||||||
if result.success:
|
|
||||||
print(result.message)
|
|
||||||
else:
|
|
||||||
print(f"Scan failed: {result.message}")
|
|
||||||
|
|
||||||
def _wrap_scan_callback(self, total: int):
|
|
||||||
"""Create a callback that updates the scan progress bar."""
|
|
||||||
|
|
||||||
def _callback(folder: str, current: int) -> None:
|
|
||||||
if not self._progress or self._scan_task_id is None:
|
|
||||||
return
|
|
||||||
|
|
||||||
self._progress.update(
|
|
||||||
self._scan_task_id,
|
|
||||||
completed=min(current, total),
|
|
||||||
description=f"[green]{folder}",
|
|
||||||
)
|
|
||||||
|
|
||||||
return _callback
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Search & add logic
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def search_mode(self) -> None:
|
|
||||||
"""Search for a series and add it to the local list if chosen."""
|
|
||||||
query = input("Enter search string: ").strip()
|
|
||||||
if not query:
|
|
||||||
return
|
|
||||||
|
|
||||||
results = self.series_app.search(query)
|
|
||||||
if not results:
|
|
||||||
print("No results found. Returning to main menu.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print("\nSearch results:")
|
|
||||||
for index, result in enumerate(results, start=1):
|
|
||||||
print(f"{index}. {result.get('name', 'Unknown')}")
|
|
||||||
|
|
||||||
selection = input(
|
|
||||||
"\nSelect an option by number or press <enter> to cancel: "
|
|
||||||
).strip()
|
|
||||||
|
|
||||||
if selection == "":
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
chosen_index = int(selection) - 1
|
|
||||||
except ValueError:
|
|
||||||
print("Invalid input. Returning to main menu.")
|
|
||||||
return
|
|
||||||
|
|
||||||
if not (0 <= chosen_index < len(results)):
|
|
||||||
print("Invalid selection. Returning to main menu.")
|
|
||||||
return
|
|
||||||
|
|
||||||
chosen = results[chosen_index]
|
|
||||||
serie = Serie(
|
|
||||||
chosen.get("link", ""),
|
|
||||||
chosen.get("name", "Unknown"),
|
|
||||||
"aniworld.to",
|
|
||||||
chosen.get("link", ""),
|
|
||||||
{},
|
|
||||||
)
|
|
||||||
self.series_app.List.add(serie)
|
|
||||||
self.series_app.refresh_series_list()
|
|
||||||
print(f"Added '{serie.name}' to the local catalogue.")
|
|
||||||
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
# Main loop
|
|
||||||
# ------------------------------------------------------------------
|
|
||||||
def run(self) -> None:
|
|
||||||
"""Run the interactive CLI loop."""
|
|
||||||
while True:
|
|
||||||
action = input(
|
|
||||||
"\nChoose action ('s' for search, 'i' for rescan, 'd' for download, 'q' to quit): "
|
|
||||||
).strip().lower()
|
|
||||||
|
|
||||||
if action == "s":
|
|
||||||
self.search_mode()
|
|
||||||
elif action == "i":
|
|
||||||
print("\nRescanning series...\n")
|
|
||||||
self.rescan()
|
|
||||||
elif action == "d":
|
|
||||||
selected_series = self.get_user_selection()
|
|
||||||
if selected_series:
|
|
||||||
self.download_series(selected_series)
|
|
||||||
elif action in {"q", "quit", "exit"}:
|
|
||||||
print("Goodbye!")
|
|
||||||
break
|
|
||||||
else:
|
|
||||||
print("Unknown command. Please choose 's', 'i', 'd', or 'q'.")
|
|
||||||
|
|
||||||
|
|
||||||
def configure_logging() -> None:
|
|
||||||
"""Set up a basic logging configuration for the CLI."""
|
|
||||||
logging.basicConfig(level=logging.INFO, format=LOG_FORMAT)
|
|
||||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
|
|
||||||
logging.getLogger("charset_normalizer").setLevel(logging.ERROR)
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
"""Entry point for the CLI application."""
|
|
||||||
configure_logging()
|
|
||||||
|
|
||||||
default_dir = os.getenv("ANIME_DIRECTORY")
|
|
||||||
if not default_dir:
|
|
||||||
print(
|
|
||||||
"Environment variable ANIME_DIRECTORY is not set. Please configure it to the base anime directory."
|
|
||||||
)
|
|
||||||
return
|
|
||||||
|
|
||||||
app = SeriesCLI(default_dir)
|
|
||||||
app.run()
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@ -3,25 +3,24 @@ SerieScanner - Scans directories for anime series and missing episodes.
|
|||||||
|
|
||||||
This module provides functionality to scan anime directories, identify
|
This module provides functionality to scan anime directories, identify
|
||||||
missing episodes, and report progress through callback interfaces.
|
missing episodes, and report progress through callback interfaces.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This module is pure domain logic. Database operations are handled
|
||||||
|
by the service layer (AnimeService).
|
||||||
"""
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import traceback
|
import traceback
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Callable, Iterable, Iterator, Optional
|
from typing import Iterable, Iterator, Optional
|
||||||
|
|
||||||
|
from events import Events
|
||||||
|
|
||||||
from src.core.entities.series import Serie
|
from src.core.entities.series import Serie
|
||||||
from src.core.exceptions.Exceptions import MatchNotFoundError, NoKeyFoundException
|
from src.core.exceptions.Exceptions import MatchNotFoundError, NoKeyFoundException
|
||||||
from src.core.interfaces.callbacks import (
|
|
||||||
CallbackManager,
|
|
||||||
CompletionContext,
|
|
||||||
ErrorContext,
|
|
||||||
OperationType,
|
|
||||||
ProgressContext,
|
|
||||||
ProgressPhase,
|
|
||||||
)
|
|
||||||
from src.core.providers.base_provider import Loader
|
from src.core.providers.base_provider import Loader
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@ -34,13 +33,22 @@ class SerieScanner:
|
|||||||
Scans directories for anime series and identifies missing episodes.
|
Scans directories for anime series and identifies missing episodes.
|
||||||
|
|
||||||
Supports progress callbacks for real-time scanning updates.
|
Supports progress callbacks for real-time scanning updates.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This class is pure domain logic. Database operations are handled
|
||||||
|
by the service layer (AnimeService). Scan results are stored
|
||||||
|
in keyDict and can be retrieved after scanning.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
scanner = SerieScanner("/path/to/anime", loader)
|
||||||
|
scanner.scan()
|
||||||
|
# Results are in scanner.keyDict
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
basePath: str,
|
basePath: str,
|
||||||
loader: Loader,
|
loader: Loader,
|
||||||
callback_manager: Optional[CallbackManager] = None
|
|
||||||
) -> None:
|
) -> None:
|
||||||
"""
|
"""
|
||||||
Initialize the SerieScanner.
|
Initialize the SerieScanner.
|
||||||
@ -67,18 +75,84 @@ class SerieScanner:
|
|||||||
self.directory: str = abs_path
|
self.directory: str = abs_path
|
||||||
self.keyDict: dict[str, Serie] = {}
|
self.keyDict: dict[str, Serie] = {}
|
||||||
self.loader: Loader = loader
|
self.loader: Loader = loader
|
||||||
self._callback_manager: CallbackManager = (
|
|
||||||
callback_manager or CallbackManager()
|
|
||||||
)
|
|
||||||
self._current_operation_id: Optional[str] = None
|
self._current_operation_id: Optional[str] = None
|
||||||
|
self.events = Events()
|
||||||
|
|
||||||
|
self.events.on_progress = []
|
||||||
|
self.events.on_error = []
|
||||||
|
self.events.on_completion = []
|
||||||
|
|
||||||
logger.info("Initialized SerieScanner with base path: %s", abs_path)
|
logger.info("Initialized SerieScanner with base path: %s", abs_path)
|
||||||
|
|
||||||
|
def _safe_call_event(self, event_handler, data: dict) -> None:
|
||||||
|
"""Safely call an event handler if it exists.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_handler: Event handler attribute (e.g., self.events.on_progress)
|
||||||
|
data: Data dictionary to pass to the event handler
|
||||||
|
"""
|
||||||
|
if event_handler:
|
||||||
|
try:
|
||||||
|
# Event handlers are stored as lists, iterate over them
|
||||||
|
for handler in event_handler:
|
||||||
|
handler(data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Error calling event handler: %s", e, exc_info=True)
|
||||||
|
|
||||||
@property
|
def subscribe_on_progress(self, handler):
|
||||||
def callback_manager(self) -> CallbackManager:
|
"""
|
||||||
"""Get the callback manager instance."""
|
Subscribe a handler to an event.
|
||||||
return self._callback_manager
|
Args:
|
||||||
|
handler: Callable to handle the event
|
||||||
|
"""
|
||||||
|
if handler not in self.events.on_progress:
|
||||||
|
self.events.on_progress.append(handler)
|
||||||
|
|
||||||
|
def unsubscribe_on_progress(self, handler):
|
||||||
|
"""
|
||||||
|
Unsubscribe a handler from an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to remove
|
||||||
|
"""
|
||||||
|
if handler in self.events.on_progress:
|
||||||
|
self.events.on_progress.remove(handler)
|
||||||
|
|
||||||
|
def subscribe_on_error(self, handler):
|
||||||
|
"""
|
||||||
|
Subscribe a handler to an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to handle the event
|
||||||
|
"""
|
||||||
|
if handler not in self.events.on_error:
|
||||||
|
self.events.on_error.append(handler)
|
||||||
|
|
||||||
|
def unsubscribe_on_error(self, handler):
|
||||||
|
"""
|
||||||
|
Unsubscribe a handler from an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to remove
|
||||||
|
"""
|
||||||
|
if handler in self.events.on_error:
|
||||||
|
self.events.on_error.remove(handler)
|
||||||
|
|
||||||
|
def subscribe_on_completion(self, handler):
|
||||||
|
"""
|
||||||
|
Subscribe a handler to an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to handle the event
|
||||||
|
"""
|
||||||
|
if handler not in self.events.on_completion:
|
||||||
|
self.events.on_completion.append(handler)
|
||||||
|
|
||||||
|
def unsubscribe_on_completion(self, handler):
|
||||||
|
"""
|
||||||
|
Unsubscribe a handler from an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to remove
|
||||||
|
"""
|
||||||
|
if handler in self.events.on_completion:
|
||||||
|
self.events.on_completion.remove(handler)
|
||||||
|
|
||||||
def reinit(self) -> None:
|
def reinit(self) -> None:
|
||||||
"""Reinitialize the series dictionary (keyed by serie.key)."""
|
"""Reinitialize the series dictionary (keyed by serie.key)."""
|
||||||
self.keyDict: dict[str, Serie] = {}
|
self.keyDict: dict[str, Serie] = {}
|
||||||
@ -92,15 +166,12 @@ class SerieScanner:
|
|||||||
result = self.__find_mp4_files()
|
result = self.__find_mp4_files()
|
||||||
return sum(1 for _ in result)
|
return sum(1 for _ in result)
|
||||||
|
|
||||||
def scan(
|
def scan(self) -> None:
|
||||||
self,
|
|
||||||
callback: Optional[Callable[[str, int], None]] = None
|
|
||||||
) -> None:
|
|
||||||
"""
|
"""
|
||||||
Scan directories for anime series and missing episodes.
|
Scan directories for anime series and missing episodes.
|
||||||
|
|
||||||
Args:
|
Results are stored in self.keyDict and can be retrieved after
|
||||||
callback: Optional legacy callback function (folder, count)
|
scanning. Data files are also saved to disk for persistence.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
Exception: If scan fails critically
|
Exception: If scan fails critically
|
||||||
@ -111,16 +182,16 @@ class SerieScanner:
|
|||||||
logger.info("Starting scan for missing episodes")
|
logger.info("Starting scan for missing episodes")
|
||||||
|
|
||||||
# Notify scan starting
|
# Notify scan starting
|
||||||
self._callback_manager.notify_progress(
|
self._safe_call_event(
|
||||||
ProgressContext(
|
self.events.on_progress,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
phase=ProgressPhase.STARTING,
|
"phase": "STARTING",
|
||||||
current=0,
|
"current": 0,
|
||||||
total=0,
|
"total": 0,
|
||||||
percentage=0.0,
|
"percentage": 0.0,
|
||||||
message="Initializing scan"
|
"message": "Initializing scan"
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -144,27 +215,20 @@ class SerieScanner:
|
|||||||
else:
|
else:
|
||||||
percentage = 0.0
|
percentage = 0.0
|
||||||
|
|
||||||
# Progress is surfaced both through the callback manager
|
|
||||||
# (for the web/UI layer) and, for compatibility, through a
|
|
||||||
# legacy callback that updates CLI progress bars.
|
|
||||||
# Notify progress
|
# Notify progress
|
||||||
self._callback_manager.notify_progress(
|
self._safe_call_event(
|
||||||
ProgressContext(
|
self.events.on_progress,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
phase=ProgressPhase.IN_PROGRESS,
|
"phase": "IN_PROGRESS",
|
||||||
current=counter,
|
"current": counter,
|
||||||
total=total_to_scan,
|
"total": total_to_scan,
|
||||||
percentage=percentage,
|
"percentage": percentage,
|
||||||
message=f"Scanning: {folder}",
|
"message": f"Scanning: {folder}",
|
||||||
details=f"Found {len(mp4_files)} episodes"
|
"details": f"Found {len(mp4_files)} episodes"
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
# Call legacy callback if provided
|
|
||||||
if callback:
|
|
||||||
callback(folder, counter)
|
|
||||||
|
|
||||||
serie = self.__read_data_from_file(folder)
|
serie = self.__read_data_from_file(folder)
|
||||||
if (
|
if (
|
||||||
serie is not None
|
serie is not None
|
||||||
@ -211,15 +275,15 @@ class SerieScanner:
|
|||||||
error_msg = f"Error processing folder '{folder}': {nkfe}"
|
error_msg = f"Error processing folder '{folder}': {nkfe}"
|
||||||
logger.error(error_msg)
|
logger.error(error_msg)
|
||||||
|
|
||||||
self._callback_manager.notify_error(
|
self._safe_call_event(
|
||||||
ErrorContext(
|
self.events.on_error,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
error=nkfe,
|
"error": nkfe,
|
||||||
message=error_msg,
|
"message": error_msg,
|
||||||
recoverable=True,
|
"recoverable": True,
|
||||||
metadata={"folder": folder, "key": None}
|
"metadata": {"folder": folder, "key": None}
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Log error and notify via callback
|
# Log error and notify via callback
|
||||||
@ -233,30 +297,30 @@ class SerieScanner:
|
|||||||
traceback.format_exc()
|
traceback.format_exc()
|
||||||
)
|
)
|
||||||
|
|
||||||
self._callback_manager.notify_error(
|
self._safe_call_event(
|
||||||
ErrorContext(
|
self.events.on_error,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
error=e,
|
"error": e,
|
||||||
message=error_msg,
|
"message": error_msg,
|
||||||
recoverable=True,
|
"recoverable": True,
|
||||||
metadata={"folder": folder, "key": None}
|
"metadata": {"folder": folder, "key": None}
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Notify scan completion
|
# Notify scan completion
|
||||||
self._callback_manager.notify_completion(
|
self._safe_call_event(
|
||||||
CompletionContext(
|
self.events.on_completion,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
success=True,
|
"success": True,
|
||||||
message=f"Scan completed. Processed {counter} folders.",
|
"message": f"Scan completed. Processed {counter} folders.",
|
||||||
statistics={
|
"statistics": {
|
||||||
"total_folders": counter,
|
"total_folders": counter,
|
||||||
"series_found": len(self.keyDict)
|
"series_found": len(self.keyDict)
|
||||||
}
|
}
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -270,23 +334,23 @@ class SerieScanner:
|
|||||||
error_msg = f"Critical scan error: {e}"
|
error_msg = f"Critical scan error: {e}"
|
||||||
logger.error("%s\n%s", error_msg, traceback.format_exc())
|
logger.error("%s\n%s", error_msg, traceback.format_exc())
|
||||||
|
|
||||||
self._callback_manager.notify_error(
|
self._safe_call_event(
|
||||||
ErrorContext(
|
self.events.on_error,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
error=e,
|
"error": e,
|
||||||
message=error_msg,
|
"message": error_msg,
|
||||||
recoverable=False
|
"recoverable": False
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
self._callback_manager.notify_completion(
|
self._safe_call_event(
|
||||||
CompletionContext(
|
self.events.on_completion,
|
||||||
operation_type=OperationType.SCAN,
|
{
|
||||||
operation_id=self._current_operation_id,
|
"operation_id": self._current_operation_id,
|
||||||
success=False,
|
"success": False,
|
||||||
message=error_msg
|
"message": error_msg
|
||||||
)
|
}
|
||||||
)
|
)
|
||||||
|
|
||||||
raise
|
raise
|
||||||
@ -306,16 +370,6 @@ class SerieScanner:
|
|||||||
has_files = True
|
has_files = True
|
||||||
yield anime_name, mp4_files if has_files else []
|
yield anime_name, mp4_files if has_files else []
|
||||||
|
|
||||||
def __remove_year(self, input_string: str) -> str:
|
|
||||||
"""Remove year information from input string."""
|
|
||||||
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
|
|
||||||
logger.debug(
|
|
||||||
"Removed year from '%s' -> '%s'",
|
|
||||||
input_string,
|
|
||||||
cleaned_string
|
|
||||||
)
|
|
||||||
return cleaned_string
|
|
||||||
|
|
||||||
def __read_data_from_file(self, folder_name: str) -> Optional[Serie]:
|
def __read_data_from_file(self, folder_name: str) -> Optional[Serie]:
|
||||||
"""Read serie data from file or key file.
|
"""Read serie data from file or key file.
|
||||||
|
|
||||||
@ -442,3 +496,185 @@ class SerieScanner:
|
|||||||
episodes_dict[season] = missing_episodes
|
episodes_dict[season] = missing_episodes
|
||||||
|
|
||||||
return episodes_dict, "aniworld.to"
|
return episodes_dict, "aniworld.to"
|
||||||
|
|
||||||
|
def scan_single_series(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
folder: str,
|
||||||
|
) -> dict[int, list[int]]:
|
||||||
|
"""
|
||||||
|
Scan a single series for missing episodes.
|
||||||
|
|
||||||
|
This method performs a targeted scan for only the specified series,
|
||||||
|
without triggering a full library rescan. It fetches available
|
||||||
|
episodes from the provider and compares with local files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The unique provider key for the series
|
||||||
|
folder: The filesystem folder name where the series is stored
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[int, list[int]]: Dictionary mapping season numbers to lists
|
||||||
|
of missing episode numbers. Empty dict if no missing episodes.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If key or folder is empty
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> scanner = SerieScanner("/path/to/anime", loader)
|
||||||
|
>>> missing = scanner.scan_single_series(
|
||||||
|
... "attack-on-titan",
|
||||||
|
... "Attack on Titan"
|
||||||
|
... )
|
||||||
|
>>> print(missing)
|
||||||
|
{1: [5, 6, 7], 2: [1, 2]}
|
||||||
|
"""
|
||||||
|
if not key or not key.strip():
|
||||||
|
raise ValueError("Series key cannot be empty")
|
||||||
|
if not folder or not folder.strip():
|
||||||
|
raise ValueError("Series folder cannot be empty")
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Starting targeted scan for series: %s (folder: %s)",
|
||||||
|
key,
|
||||||
|
folder
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate unique operation ID for this targeted scan
|
||||||
|
operation_id = str(uuid.uuid4())
|
||||||
|
# Notify scan starting
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_progress,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"phase": "STARTING",
|
||||||
|
"current": 0,
|
||||||
|
"total": 1,
|
||||||
|
"percentage": 0.0,
|
||||||
|
"message": f"Scanning series: {folder}",
|
||||||
|
"details": f"Key: {key}"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get the folder path
|
||||||
|
folder_path = os.path.join(self.directory, folder)
|
||||||
|
|
||||||
|
# Check if folder exists
|
||||||
|
if not os.path.isdir(folder_path):
|
||||||
|
logger.info(
|
||||||
|
"Series folder does not exist yet: %s - "
|
||||||
|
"will scan for available episodes from provider",
|
||||||
|
folder_path
|
||||||
|
)
|
||||||
|
mp4_files: list[str] = []
|
||||||
|
else:
|
||||||
|
# Find existing MP4 files in the folder
|
||||||
|
mp4_files = []
|
||||||
|
for root, _, files in os.walk(folder_path):
|
||||||
|
for file in files:
|
||||||
|
if file.endswith(".mp4"):
|
||||||
|
mp4_files.append(os.path.join(root, file))
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Found %d existing MP4 files in folder %s",
|
||||||
|
len(mp4_files),
|
||||||
|
folder
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get missing episodes from provider
|
||||||
|
missing_episodes, site = self.__get_missing_episodes_and_season(
|
||||||
|
key, mp4_files
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update progress
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_progress,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"phase": "IN_PROGRESS",
|
||||||
|
"current": 1,
|
||||||
|
"total": 1,
|
||||||
|
"percentage": 100.0,
|
||||||
|
"message": f"Scanned: {folder}",
|
||||||
|
"details": f"Found {sum(len(eps) for eps in missing_episodes.values())} missing episodes"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create or update Serie in keyDict
|
||||||
|
if key in self.keyDict:
|
||||||
|
# Update existing serie
|
||||||
|
self.keyDict[key].episodeDict = missing_episodes
|
||||||
|
logger.debug(
|
||||||
|
"Updated existing series %s with %d missing episodes",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values())
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Create new serie entry
|
||||||
|
serie = Serie(
|
||||||
|
key=key,
|
||||||
|
name="", # Will be populated by caller if needed
|
||||||
|
site=site,
|
||||||
|
folder=folder,
|
||||||
|
episodeDict=missing_episodes
|
||||||
|
)
|
||||||
|
self.keyDict[key] = serie
|
||||||
|
logger.debug(
|
||||||
|
"Created new series entry for %s with %d missing episodes",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values())
|
||||||
|
)
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_completion,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"success": True,
|
||||||
|
"message": f"Scan completed for {folder}",
|
||||||
|
"statistics": {
|
||||||
|
"missing_episodes": sum(
|
||||||
|
len(eps) for eps in missing_episodes.values()
|
||||||
|
),
|
||||||
|
"seasons_with_missing": len(missing_episodes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Targeted scan completed for %s: %d missing episodes across %d seasons",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values()),
|
||||||
|
len(missing_episodes)
|
||||||
|
)
|
||||||
|
|
||||||
|
return missing_episodes
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Failed to scan series {key}: {e}"
|
||||||
|
logger.error(error_msg, exc_info=True)
|
||||||
|
|
||||||
|
# Notify error
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_error,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"error": e,
|
||||||
|
"message": error_msg,
|
||||||
|
"recoverable": True,
|
||||||
|
"metadata": {"key": key, "folder": folder}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# Notify completion with failure
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_completion,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"success": False,
|
||||||
|
"message": error_msg
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# Return empty dict on error (scan failed but not critical)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|||||||
@ -4,10 +4,15 @@ SeriesApp - Core application logic for anime series management.
|
|||||||
This module provides the main application interface for searching,
|
This module provides the main application interface for searching,
|
||||||
downloading, and managing anime series with support for async callbacks,
|
downloading, and managing anime series with support for async callbacks,
|
||||||
progress reporting, and error handling.
|
progress reporting, and error handling.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This module is pure domain logic with no database dependencies.
|
||||||
|
Database operations are handled by the service layer (AnimeService).
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from typing import Any, Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from events import Events
|
from events import Events
|
||||||
@ -119,6 +124,10 @@ class SeriesApp:
|
|||||||
- Managing series lists
|
- Managing series lists
|
||||||
|
|
||||||
Supports async callbacks for progress reporting.
|
Supports async callbacks for progress reporting.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This class is now pure domain logic with no database dependencies.
|
||||||
|
Database operations are handled by the service layer (AnimeService).
|
||||||
|
|
||||||
Events:
|
Events:
|
||||||
download_status: Raised when download status changes.
|
download_status: Raised when download status changes.
|
||||||
@ -140,15 +149,19 @@ class SeriesApp:
|
|||||||
|
|
||||||
self.directory_to_search = directory_to_search
|
self.directory_to_search = directory_to_search
|
||||||
|
|
||||||
|
# Initialize thread pool executor
|
||||||
|
self.executor = ThreadPoolExecutor(max_workers=3)
|
||||||
|
|
||||||
# Initialize events
|
# Initialize events
|
||||||
self._events = Events()
|
self._events = Events()
|
||||||
self._events.download_status = None
|
|
||||||
self._events.scan_status = None
|
|
||||||
|
|
||||||
self.loaders = Loaders()
|
self.loaders = Loaders()
|
||||||
self.loader = self.loaders.GetLoader(key="aniworld.to")
|
self.loader = self.loaders.GetLoader(key="aniworld.to")
|
||||||
self.serie_scanner = SerieScanner(directory_to_search, self.loader)
|
self.serie_scanner = SerieScanner(
|
||||||
|
directory_to_search, self.loader
|
||||||
|
)
|
||||||
self.list = SerieList(self.directory_to_search)
|
self.list = SerieList(self.directory_to_search)
|
||||||
|
self.series_list: List[Any] = []
|
||||||
# Synchronous init used during constructor to avoid awaiting
|
# Synchronous init used during constructor to avoid awaiting
|
||||||
# in __init__
|
# in __init__
|
||||||
self._init_list_sync()
|
self._init_list_sync()
|
||||||
@ -187,6 +200,26 @@ class SeriesApp:
|
|||||||
def scan_status(self, value):
|
def scan_status(self, value):
|
||||||
"""Set scan_status event handler."""
|
"""Set scan_status event handler."""
|
||||||
self._events.scan_status = value
|
self._events.scan_status = value
|
||||||
|
|
||||||
|
def load_series_from_list(self, series: list) -> None:
|
||||||
|
"""
|
||||||
|
Load series into the in-memory list.
|
||||||
|
|
||||||
|
This method is called by the service layer after loading
|
||||||
|
series from the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
series: List of Serie objects to load
|
||||||
|
"""
|
||||||
|
self.list.keyDict.clear()
|
||||||
|
for serie in series:
|
||||||
|
self.list.keyDict[serie.key] = serie
|
||||||
|
self.series_list = self.list.GetMissingEpisode()
|
||||||
|
logger.debug(
|
||||||
|
"Loaded %d series with %d having missing episodes",
|
||||||
|
len(series),
|
||||||
|
len(self.series_list)
|
||||||
|
)
|
||||||
|
|
||||||
def _init_list_sync(self) -> None:
|
def _init_list_sync(self) -> None:
|
||||||
"""Synchronous initialization helper for constructor."""
|
"""Synchronous initialization helper for constructor."""
|
||||||
@ -198,7 +231,9 @@ class SeriesApp:
|
|||||||
|
|
||||||
async def _init_list(self) -> None:
|
async def _init_list(self) -> None:
|
||||||
"""Initialize the series list with missing episodes (async)."""
|
"""Initialize the series list with missing episodes (async)."""
|
||||||
self.series_list = await asyncio.to_thread(
|
loop = asyncio.get_running_loop()
|
||||||
|
self.series_list = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
self.list.GetMissingEpisode
|
self.list.GetMissingEpisode
|
||||||
)
|
)
|
||||||
logger.debug(
|
logger.debug(
|
||||||
@ -220,7 +255,12 @@ class SeriesApp:
|
|||||||
RuntimeError: If search fails
|
RuntimeError: If search fails
|
||||||
"""
|
"""
|
||||||
logger.info("Searching for: %s", words)
|
logger.info("Searching for: %s", words)
|
||||||
results = await asyncio.to_thread(self.loader.search, words)
|
loop = asyncio.get_running_loop()
|
||||||
|
results = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.loader.search,
|
||||||
|
words
|
||||||
|
)
|
||||||
logger.info("Found %d results", len(results))
|
logger.info("Found %d results", len(results))
|
||||||
return results
|
return results
|
||||||
|
|
||||||
@ -255,6 +295,7 @@ class SeriesApp:
|
|||||||
lookups. The 'serie_folder' parameter is only used for
|
lookups. The 'serie_folder' parameter is only used for
|
||||||
filesystem operations.
|
filesystem operations.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Starting download: %s (key: %s) S%02dE%02d",
|
"Starting download: %s (key: %s) S%02dE%02d",
|
||||||
serie_folder,
|
serie_folder,
|
||||||
@ -277,9 +318,10 @@ class SeriesApp:
|
|||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
def download_callback(progress_info):
|
def download_progress_handler(progress_info):
|
||||||
|
"""Handle download progress events from loader."""
|
||||||
logger.debug(
|
logger.debug(
|
||||||
"wrapped_callback called with: %s", progress_info
|
"download_progress_handler called with: %s", progress_info
|
||||||
)
|
)
|
||||||
|
|
||||||
downloaded = progress_info.get('downloaded_bytes', 0)
|
downloaded = progress_info.get('downloaded_bytes', 0)
|
||||||
@ -309,17 +351,28 @@ class SeriesApp:
|
|||||||
item_id=item_id,
|
item_id=item_id,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
# Perform download in thread to avoid blocking event loop
|
|
||||||
download_success = await asyncio.to_thread(
|
# Subscribe to loader's download progress events
|
||||||
self.loader.download,
|
self.loader.subscribe_download_progress(download_progress_handler)
|
||||||
self.directory_to_search,
|
|
||||||
serie_folder,
|
try:
|
||||||
season,
|
# Perform download in thread to avoid blocking event loop
|
||||||
episode,
|
loop = asyncio.get_running_loop()
|
||||||
key,
|
download_success = await loop.run_in_executor(
|
||||||
language,
|
self.executor,
|
||||||
download_callback
|
self.loader.download,
|
||||||
)
|
self.directory_to_search,
|
||||||
|
serie_folder,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
key,
|
||||||
|
language
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
# Always unsubscribe after download completes or fails
|
||||||
|
self.loader.unsubscribe_download_progress(
|
||||||
|
download_progress_handler
|
||||||
|
)
|
||||||
|
|
||||||
if download_success:
|
if download_success:
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -367,7 +420,30 @@ class SeriesApp:
|
|||||||
|
|
||||||
return download_success
|
return download_success
|
||||||
|
|
||||||
except Exception as e:
|
except InterruptedError:
|
||||||
|
# Download was cancelled - propagate the cancellation
|
||||||
|
logger.info(
|
||||||
|
"Download cancelled: %s (key: %s) S%02dE%02d",
|
||||||
|
serie_folder,
|
||||||
|
key,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
)
|
||||||
|
# Fire download cancelled event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="cancelled",
|
||||||
|
message="Download cancelled by user",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise # Re-raise to propagate cancellation
|
||||||
|
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
logger.error(
|
logger.error(
|
||||||
"Download error: %s (key: %s) S%02dE%02d - %s",
|
"Download error: %s (key: %s) S%02dE%02d - %s",
|
||||||
serie_folder,
|
serie_folder,
|
||||||
@ -394,23 +470,40 @@ class SeriesApp:
|
|||||||
|
|
||||||
return False
|
return False
|
||||||
|
|
||||||
async def rescan(self) -> int:
|
async def rescan(self) -> list:
|
||||||
"""
|
"""
|
||||||
Rescan directory for missing episodes (async).
|
Rescan directory for missing episodes (async).
|
||||||
|
|
||||||
|
This method performs a file-based scan and returns the results.
|
||||||
|
Database persistence is handled by the service layer (AnimeService).
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Number of series with missing episodes after rescan.
|
List of Serie objects found during scan with their
|
||||||
|
missing episodes.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This method no longer saves to database directly. The returned
|
||||||
|
list should be persisted by the caller (AnimeService).
|
||||||
"""
|
"""
|
||||||
logger.info("Starting directory rescan")
|
logger.info("Starting directory rescan")
|
||||||
|
|
||||||
|
total_to_scan = 0
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Get total items to scan
|
# Get total items to scan
|
||||||
total_to_scan = await asyncio.to_thread(
|
logger.info("Getting total items to scan...")
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
total_to_scan = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
self.serie_scanner.get_total_to_scan
|
self.serie_scanner.get_total_to_scan
|
||||||
)
|
)
|
||||||
logger.info("Total folders to scan: %d", total_to_scan)
|
logger.info("Total folders to scan: %d", total_to_scan)
|
||||||
|
|
||||||
# Fire scan started event
|
# Fire scan started event
|
||||||
|
logger.info(
|
||||||
|
"Firing scan_status 'started' event, handler=%s",
|
||||||
|
self._events.scan_status
|
||||||
|
)
|
||||||
self._events.scan_status(
|
self._events.scan_status(
|
||||||
ScanStatusEventArgs(
|
ScanStatusEventArgs(
|
||||||
current=0,
|
current=0,
|
||||||
@ -423,37 +516,60 @@ class SeriesApp:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Reinitialize scanner
|
# Reinitialize scanner
|
||||||
await asyncio.to_thread(self.serie_scanner.reinit)
|
await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
def scan_callback(folder: str, current: int):
|
self.serie_scanner.reinit
|
||||||
# Calculate progress
|
)
|
||||||
if total_to_scan > 0:
|
|
||||||
progress = current / total_to_scan
|
|
||||||
else:
|
|
||||||
progress = 0.0
|
|
||||||
|
|
||||||
|
def scan_progress_handler(progress_data):
|
||||||
|
"""Handle scan progress events from scanner."""
|
||||||
# Fire scan progress event
|
# Fire scan progress event
|
||||||
|
message = progress_data.get('message', '')
|
||||||
|
folder = message.replace('Scanning: ', '')
|
||||||
self._events.scan_status(
|
self._events.scan_status(
|
||||||
ScanStatusEventArgs(
|
ScanStatusEventArgs(
|
||||||
current=current,
|
current=progress_data.get('current', 0),
|
||||||
total=total_to_scan,
|
total=progress_data.get('total', total_to_scan),
|
||||||
folder=folder,
|
folder=folder,
|
||||||
status="progress",
|
status="progress",
|
||||||
progress=progress,
|
progress=(
|
||||||
message=f"Scanning: {folder}",
|
progress_data.get('percentage', 0.0) / 100.0
|
||||||
|
),
|
||||||
|
message=message,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
# Perform scan
|
# Subscribe to scanner's progress events
|
||||||
await asyncio.to_thread(self.serie_scanner.scan, scan_callback)
|
self.serie_scanner.subscribe_on_progress(scan_progress_handler)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Perform scan (file-based, returns results in scanner.keyDict)
|
||||||
|
await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.serie_scanner.scan
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
# Always unsubscribe after scan completes or fails
|
||||||
|
self.serie_scanner.unsubscribe_on_progress(
|
||||||
|
scan_progress_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get scanned series from scanner
|
||||||
|
scanned_series = list(self.serie_scanner.keyDict.values())
|
||||||
|
|
||||||
# Reinitialize list
|
# Update in-memory list with scan results
|
||||||
self.list = SerieList(self.directory_to_search)
|
self.list.keyDict.clear()
|
||||||
await self._init_list()
|
for serie in scanned_series:
|
||||||
|
self.list.keyDict[serie.key] = serie
|
||||||
|
self.series_list = self.list.GetMissingEpisode()
|
||||||
|
|
||||||
logger.info("Directory rescan completed successfully")
|
logger.info("Directory rescan completed successfully")
|
||||||
|
|
||||||
# Fire scan completed event
|
# Fire scan completed event
|
||||||
|
logger.info(
|
||||||
|
"Firing scan_status 'completed' event, handler=%s",
|
||||||
|
self._events.scan_status
|
||||||
|
)
|
||||||
self._events.scan_status(
|
self._events.scan_status(
|
||||||
ScanStatusEventArgs(
|
ScanStatusEventArgs(
|
||||||
current=total_to_scan,
|
current=total_to_scan,
|
||||||
@ -468,7 +584,7 @@ class SeriesApp:
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
return len(self.series_list)
|
return scanned_series
|
||||||
|
|
||||||
except InterruptedError:
|
except InterruptedError:
|
||||||
logger.warning("Scan cancelled by user")
|
logger.warning("Scan cancelled by user")
|
||||||
@ -477,7 +593,7 @@ class SeriesApp:
|
|||||||
self._events.scan_status(
|
self._events.scan_status(
|
||||||
ScanStatusEventArgs(
|
ScanStatusEventArgs(
|
||||||
current=0,
|
current=0,
|
||||||
total=total_to_scan if 'total_to_scan' in locals() else 0,
|
total=total_to_scan,
|
||||||
folder="",
|
folder="",
|
||||||
status="cancelled",
|
status="cancelled",
|
||||||
message="Scan cancelled by user",
|
message="Scan cancelled by user",
|
||||||
@ -492,7 +608,7 @@ class SeriesApp:
|
|||||||
self._events.scan_status(
|
self._events.scan_status(
|
||||||
ScanStatusEventArgs(
|
ScanStatusEventArgs(
|
||||||
current=0,
|
current=0,
|
||||||
total=total_to_scan if 'total_to_scan' in locals() else 0,
|
total=total_to_scan,
|
||||||
folder="",
|
folder="",
|
||||||
status="failed",
|
status="failed",
|
||||||
error=e,
|
error=e,
|
||||||
@ -536,3 +652,66 @@ class SeriesApp:
|
|||||||
looks up series by their unique key, not by folder name.
|
looks up series by their unique key, not by folder name.
|
||||||
"""
|
"""
|
||||||
return self.list.get_by_key(key)
|
return self.list.get_by_key(key)
|
||||||
|
|
||||||
|
def get_all_series_from_data_files(self) -> List[Serie]:
|
||||||
|
"""
|
||||||
|
Get all series from data files in the anime directory.
|
||||||
|
|
||||||
|
Scans the directory_to_search for all 'data' files and loads
|
||||||
|
the Serie metadata from each file. This method is synchronous
|
||||||
|
and can be wrapped with asyncio.to_thread if needed for async
|
||||||
|
contexts.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of Serie objects found in data files. Returns an empty
|
||||||
|
list if no data files are found or if the directory doesn't
|
||||||
|
exist.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
series_app = SeriesApp("/path/to/anime")
|
||||||
|
all_series = series_app.get_all_series_from_data_files()
|
||||||
|
for serie in all_series:
|
||||||
|
print(f"Found: {serie.name} (key={serie.key})")
|
||||||
|
"""
|
||||||
|
logger.info(
|
||||||
|
"Scanning for data files in directory: %s",
|
||||||
|
self.directory_to_search
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a fresh SerieList instance for file-based loading
|
||||||
|
# This ensures we get all series from data files without
|
||||||
|
# interfering with the main instance's state
|
||||||
|
try:
|
||||||
|
temp_list = SerieList(
|
||||||
|
self.directory_to_search,
|
||||||
|
skip_load=False # Allow automatic loading
|
||||||
|
)
|
||||||
|
except (OSError, ValueError) as e:
|
||||||
|
logger.error(
|
||||||
|
"Failed to scan directory for data files: %s",
|
||||||
|
str(e),
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get all series from the temporary list
|
||||||
|
all_series = temp_list.get_all()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Found %d series from data files in %s",
|
||||||
|
len(all_series),
|
||||||
|
self.directory_to_search
|
||||||
|
)
|
||||||
|
|
||||||
|
return all_series
|
||||||
|
|
||||||
|
def shutdown(self) -> None:
|
||||||
|
"""
|
||||||
|
Shutdown the thread pool executor.
|
||||||
|
|
||||||
|
Should be called when the SeriesApp instance is no longer needed
|
||||||
|
to properly clean up resources.
|
||||||
|
"""
|
||||||
|
if hasattr(self, 'executor'):
|
||||||
|
self.executor.shutdown(wait=True)
|
||||||
|
logger.info("ThreadPoolExecutor shut down successfully")
|
||||||
|
|||||||
@ -1,4 +1,14 @@
|
|||||||
"""Utilities for loading and managing stored anime series metadata."""
|
"""Utilities for loading and managing stored anime series metadata.
|
||||||
|
|
||||||
|
This module provides the SerieList class for managing collections of anime
|
||||||
|
series metadata. It uses file-based storage only.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This module is part of the core domain layer and has no database
|
||||||
|
dependencies. All database operations are handled by the service layer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
@ -8,6 +18,8 @@ from typing import Dict, Iterable, List, Optional
|
|||||||
|
|
||||||
from src.core.entities.series import Serie
|
from src.core.entities.series import Serie
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SerieList:
|
class SerieList:
|
||||||
"""
|
"""
|
||||||
@ -15,34 +27,84 @@ class SerieList:
|
|||||||
|
|
||||||
Series are identified by their unique 'key' (provider identifier).
|
Series are identified by their unique 'key' (provider identifier).
|
||||||
The 'folder' is metadata only and not used for lookups.
|
The 'folder' is metadata only and not used for lookups.
|
||||||
|
|
||||||
|
This class manages in-memory series data loaded from filesystem.
|
||||||
|
It has no database dependencies - all persistence is handled by
|
||||||
|
the service layer.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# File-based mode
|
||||||
|
serie_list = SerieList("/path/to/anime")
|
||||||
|
series = serie_list.get_all()
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
directory: Path to the anime directory
|
||||||
|
keyDict: Internal dictionary mapping serie.key to Serie objects
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self, base_path: str) -> None:
|
def __init__(
|
||||||
|
self,
|
||||||
|
base_path: str,
|
||||||
|
skip_load: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the SerieList.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_path: Path to the anime directory
|
||||||
|
skip_load: If True, skip automatic loading of series from files.
|
||||||
|
Useful when planning to load from database instead.
|
||||||
|
"""
|
||||||
self.directory: str = base_path
|
self.directory: str = base_path
|
||||||
# Internal storage using serie.key as the dictionary key
|
# Internal storage using serie.key as the dictionary key
|
||||||
self.keyDict: Dict[str, Serie] = {}
|
self.keyDict: Dict[str, Serie] = {}
|
||||||
self.load_series()
|
|
||||||
|
|
||||||
def add(self, serie: Serie) -> None:
|
|
||||||
"""
|
|
||||||
Persist a new series if it is not already present.
|
|
||||||
|
|
||||||
Uses serie.key for identification. The serie.folder is used for
|
# Only auto-load from files if not skipping
|
||||||
filesystem operations only.
|
if not skip_load:
|
||||||
|
self.load_series()
|
||||||
|
|
||||||
|
def add(self, serie: Serie, use_sanitized_folder: bool = True) -> str:
|
||||||
|
"""
|
||||||
|
Persist a new series if it is not already present (file-based mode).
|
||||||
|
|
||||||
|
Uses serie.key for identification. Creates the filesystem folder
|
||||||
|
using either the sanitized display name (default) or the existing
|
||||||
|
folder property.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
serie: The Serie instance to add
|
serie: The Serie instance to add
|
||||||
|
use_sanitized_folder: If True (default), use serie.sanitized_folder
|
||||||
|
for the filesystem folder name based on display name.
|
||||||
|
If False, use serie.folder as-is for backward compatibility.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The folder path that was created/used
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This method creates data files on disk. For database storage,
|
||||||
|
use add_to_db() instead.
|
||||||
"""
|
"""
|
||||||
if self.contains(serie.key):
|
if self.contains(serie.key):
|
||||||
return
|
# Return existing folder path
|
||||||
|
existing = self.keyDict[serie.key]
|
||||||
|
return os.path.join(self.directory, existing.folder)
|
||||||
|
|
||||||
data_path = os.path.join(self.directory, serie.folder, "data")
|
# Determine folder name to use
|
||||||
anime_path = os.path.join(self.directory, serie.folder)
|
if use_sanitized_folder:
|
||||||
|
folder_name = serie.sanitized_folder
|
||||||
|
# Update the serie's folder property to match what we create
|
||||||
|
serie.folder = folder_name
|
||||||
|
else:
|
||||||
|
folder_name = serie.folder
|
||||||
|
|
||||||
|
data_path = os.path.join(self.directory, folder_name, "data")
|
||||||
|
anime_path = os.path.join(self.directory, folder_name)
|
||||||
os.makedirs(anime_path, exist_ok=True)
|
os.makedirs(anime_path, exist_ok=True)
|
||||||
if not os.path.isfile(data_path):
|
if not os.path.isfile(data_path):
|
||||||
serie.save_to_file(data_path)
|
serie.save_to_file(data_path)
|
||||||
# Store by key, not folder
|
# Store by key, not folder
|
||||||
self.keyDict[serie.key] = serie
|
self.keyDict[serie.key] = serie
|
||||||
|
|
||||||
|
return anime_path
|
||||||
|
|
||||||
def contains(self, key: str) -> bool:
|
def contains(self, key: str) -> bool:
|
||||||
"""
|
"""
|
||||||
|
|||||||
@ -1,4 +1,7 @@
|
|||||||
import json
|
import json
|
||||||
|
import warnings
|
||||||
|
|
||||||
|
from src.server.utils.filesystem import sanitize_folder_name
|
||||||
|
|
||||||
|
|
||||||
class Serie:
|
class Serie:
|
||||||
@ -126,6 +129,35 @@ class Serie:
|
|||||||
def episodeDict(self, value: dict[int, list[int]]):
|
def episodeDict(self, value: dict[int, list[int]]):
|
||||||
self._episodeDict = value
|
self._episodeDict = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sanitized_folder(self) -> str:
|
||||||
|
"""
|
||||||
|
Get a filesystem-safe folder name derived from the display name.
|
||||||
|
|
||||||
|
This property returns a sanitized version of the series name
|
||||||
|
suitable for use as a filesystem folder name. It removes/replaces
|
||||||
|
characters that are invalid for filesystems while preserving
|
||||||
|
Unicode characters.
|
||||||
|
|
||||||
|
Use this property when creating folders for the series on disk.
|
||||||
|
The `folder` property stores the actual folder name used.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Filesystem-safe folder name based on display name
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> serie = Serie("attack-on-titan", "Attack on Titan: Final", ...)
|
||||||
|
>>> serie.sanitized_folder
|
||||||
|
'Attack on Titan Final'
|
||||||
|
"""
|
||||||
|
# Use name if available, fall back to folder, then key
|
||||||
|
name_to_sanitize = self._name or self._folder or self._key
|
||||||
|
try:
|
||||||
|
return sanitize_folder_name(name_to_sanitize)
|
||||||
|
except ValueError:
|
||||||
|
# Fallback to key if name cannot be sanitized
|
||||||
|
return sanitize_folder_name(self._key)
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Convert Serie object to dictionary for JSON serialization."""
|
"""Convert Serie object to dictionary for JSON serialization."""
|
||||||
return {
|
return {
|
||||||
@ -154,13 +186,46 @@ class Serie:
|
|||||||
)
|
)
|
||||||
|
|
||||||
def save_to_file(self, filename: str):
|
def save_to_file(self, filename: str):
|
||||||
"""Save Serie object to JSON file."""
|
"""Save Serie object to JSON file.
|
||||||
|
|
||||||
|
.. deprecated::
|
||||||
|
File-based storage is deprecated. Use database storage via
|
||||||
|
`AnimeSeriesService.create()` instead. This method will be
|
||||||
|
removed in v3.0.0.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Path to save the JSON file
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"save_to_file() is deprecated and will be removed in v3.0.0. "
|
||||||
|
"Use database storage via AnimeSeriesService.create() instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
with open(filename, "w", encoding="utf-8") as file:
|
with open(filename, "w", encoding="utf-8") as file:
|
||||||
json.dump(self.to_dict(), file, indent=4)
|
json.dump(self.to_dict(), file, indent=4)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_from_file(cls, filename: str) -> "Serie":
|
def load_from_file(cls, filename: str) -> "Serie":
|
||||||
"""Load Serie object from JSON file."""
|
"""Load Serie object from JSON file.
|
||||||
|
|
||||||
|
.. deprecated::
|
||||||
|
File-based storage is deprecated. Use database storage via
|
||||||
|
`AnimeSeriesService.get_by_key()` instead. This method will be
|
||||||
|
removed in v3.0.0.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Path to load the JSON file from
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Serie: The loaded Serie object
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"load_from_file() is deprecated and will be removed in v3.0.0. "
|
||||||
|
"Use database storage via AnimeSeriesService instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
with open(filename, "r", encoding="utf-8") as file:
|
with open(filename, "r", encoding="utf-8") as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
return cls.from_dict(data)
|
return cls.from_dict(data)
|
||||||
|
|||||||
@ -1,18 +1,22 @@
|
|||||||
|
|
||||||
import html
|
import html
|
||||||
import json
|
import json
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import shutil
|
import shutil
|
||||||
|
import threading
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from events import Events
|
||||||
from fake_useragent import UserAgent
|
from fake_useragent import UserAgent
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from urllib3.util.retry import Retry
|
from urllib3.util.retry import Retry
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
|
from yt_dlp.utils import DownloadCancelled
|
||||||
|
|
||||||
from ..interfaces.providers import Providers
|
from ..interfaces.providers import Providers
|
||||||
from .base_provider import Loader
|
from .base_provider import Loader
|
||||||
@ -71,6 +75,9 @@ class AniworldLoader(Loader):
|
|||||||
self.ANIWORLD_TO = "https://aniworld.to"
|
self.ANIWORLD_TO = "https://aniworld.to"
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
|
|
||||||
|
# Cancellation flag for graceful shutdown
|
||||||
|
self._cancel_flag = threading.Event()
|
||||||
|
|
||||||
# Configure retries with backoff
|
# Configure retries with backoff
|
||||||
retries = Retry(
|
retries = Retry(
|
||||||
total=5, # Number of retries
|
total=5, # Number of retries
|
||||||
@ -91,6 +98,23 @@ class AniworldLoader(Loader):
|
|||||||
self._EpisodeHTMLDict = {}
|
self._EpisodeHTMLDict = {}
|
||||||
self.Providers = Providers()
|
self.Providers = Providers()
|
||||||
|
|
||||||
|
# Events: download_progress is triggered with progress dict
|
||||||
|
self.events = Events()
|
||||||
|
|
||||||
|
def subscribe_download_progress(self, handler):
|
||||||
|
"""Subscribe a handler to the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to be called with progress dict.
|
||||||
|
"""
|
||||||
|
self.events.download_progress += handler
|
||||||
|
|
||||||
|
def unsubscribe_download_progress(self, handler):
|
||||||
|
"""Unsubscribe a handler from the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable previously subscribed.
|
||||||
|
"""
|
||||||
|
self.events.download_progress -= handler
|
||||||
|
|
||||||
def clear_cache(self):
|
def clear_cache(self):
|
||||||
"""Clear the cached HTML data."""
|
"""Clear the cached HTML data."""
|
||||||
logging.debug("Clearing HTML cache")
|
logging.debug("Clearing HTML cache")
|
||||||
@ -196,7 +220,7 @@ class AniworldLoader(Loader):
|
|||||||
|
|
||||||
is_available = language_code in languages
|
is_available = language_code in languages
|
||||||
logging.debug(f"Available languages for S{season:02}E{episode:03}: {languages}, requested: {language_code}, available: {is_available}")
|
logging.debug(f"Available languages for S{season:02}E{episode:03}: {languages}, requested: {language_code}, available: {is_available}")
|
||||||
return is_available
|
return is_available
|
||||||
|
|
||||||
def download(
|
def download(
|
||||||
self,
|
self,
|
||||||
@ -205,8 +229,7 @@ class AniworldLoader(Loader):
|
|||||||
season: int,
|
season: int,
|
||||||
episode: int,
|
episode: int,
|
||||||
key: str,
|
key: str,
|
||||||
language: str = "German Dub",
|
language: str = "German Dub"
|
||||||
progress_callback=None
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Download episode to specified directory.
|
"""Download episode to specified directory.
|
||||||
|
|
||||||
@ -219,8 +242,6 @@ class AniworldLoader(Loader):
|
|||||||
key: Series unique identifier from provider (used for
|
key: Series unique identifier from provider (used for
|
||||||
identification and API calls)
|
identification and API calls)
|
||||||
language: Audio language preference (default: German Dub)
|
language: Audio language preference (default: German Dub)
|
||||||
progress_callback: Optional callback for download progress
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
bool: True if download succeeded, False otherwise
|
bool: True if download succeeded, False otherwise
|
||||||
"""
|
"""
|
||||||
@ -266,6 +287,16 @@ class AniworldLoader(Loader):
|
|||||||
season, episode, key, language
|
season, episode, key, language
|
||||||
)
|
)
|
||||||
logging.debug("Direct link obtained from provider")
|
logging.debug("Direct link obtained from provider")
|
||||||
|
|
||||||
|
cancel_flag = self._cancel_flag
|
||||||
|
|
||||||
|
def events_progress_hook(d):
|
||||||
|
if cancel_flag.is_set():
|
||||||
|
logging.info("Cancellation detected in progress hook")
|
||||||
|
raise DownloadCancelled("Download cancelled by user")
|
||||||
|
# Fire the event for progress
|
||||||
|
self.events.download_progress(d)
|
||||||
|
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
'fragment_retries': float('inf'),
|
'fragment_retries': float('inf'),
|
||||||
'outtmpl': temp_path,
|
'outtmpl': temp_path,
|
||||||
@ -273,30 +304,18 @@ class AniworldLoader(Loader):
|
|||||||
'no_warnings': True,
|
'no_warnings': True,
|
||||||
'progress_with_newline': False,
|
'progress_with_newline': False,
|
||||||
'nocheckcertificate': True,
|
'nocheckcertificate': True,
|
||||||
|
'progress_hooks': [events_progress_hook],
|
||||||
}
|
}
|
||||||
|
|
||||||
if header:
|
if header:
|
||||||
ydl_opts['http_headers'] = header
|
ydl_opts['http_headers'] = header
|
||||||
logging.debug("Using custom headers for download")
|
logging.debug("Using custom headers for download")
|
||||||
if progress_callback:
|
|
||||||
# Wrap the callback to add logging
|
|
||||||
def logged_progress_callback(d):
|
|
||||||
logging.debug(
|
|
||||||
f"YT-DLP progress: status={d.get('status')}, "
|
|
||||||
f"downloaded={d.get('downloaded_bytes')}, "
|
|
||||||
f"total={d.get('total_bytes')}, "
|
|
||||||
f"speed={d.get('speed')}"
|
|
||||||
)
|
|
||||||
progress_callback(d)
|
|
||||||
|
|
||||||
ydl_opts['progress_hooks'] = [logged_progress_callback]
|
|
||||||
logging.debug("Progress callback registered with YT-DLP")
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logging.debug("Starting YoutubeDL download")
|
logging.debug("Starting YoutubeDL download")
|
||||||
logging.debug(f"Download link: {link[:100]}...")
|
logging.debug(f"Download link: {link[:100]}...")
|
||||||
logging.debug(f"YDL options: {ydl_opts}")
|
logging.debug(f"YDL options: {ydl_opts}")
|
||||||
|
|
||||||
with YoutubeDL(ydl_opts) as ydl:
|
with YoutubeDL(ydl_opts) as ydl:
|
||||||
info = ydl.extract_info(link, download=True)
|
info = ydl.extract_info(link, download=True)
|
||||||
logging.debug(
|
logging.debug(
|
||||||
@ -325,17 +344,15 @@ class AniworldLoader(Loader):
|
|||||||
f"Broken pipe error with provider {provider}: {e}. "
|
f"Broken pipe error with provider {provider}: {e}. "
|
||||||
f"This usually means the stream connection was closed."
|
f"This usually means the stream connection was closed."
|
||||||
)
|
)
|
||||||
# Try next provider if available
|
|
||||||
continue
|
continue
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logging.error(
|
logging.error(
|
||||||
f"YoutubeDL download failed with provider {provider}: "
|
f"YoutubeDL download failed with provider {provider}: "
|
||||||
f"{type(e).__name__}: {e}"
|
f"{type(e).__name__}: {e}"
|
||||||
)
|
)
|
||||||
# Try next provider if available
|
|
||||||
continue
|
continue
|
||||||
break
|
break
|
||||||
|
|
||||||
# If we get here, all providers failed
|
# If we get here, all providers failed
|
||||||
logging.error("All download providers failed")
|
logging.error("All download providers failed")
|
||||||
self.clear_cache()
|
self.clear_cache()
|
||||||
|
|||||||
@ -1,9 +1,21 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
from typing import Any, Callable, Dict, List, Optional
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
|
||||||
class Loader(ABC):
|
class Loader(ABC):
|
||||||
"""Abstract base class for anime data loaders/providers."""
|
"""Abstract base class for anime data loaders/providers."""
|
||||||
|
@abstractmethod
|
||||||
|
def subscribe_download_progress(self, handler):
|
||||||
|
"""Subscribe a handler to the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to be called with progress dict.
|
||||||
|
"""
|
||||||
|
@abstractmethod
|
||||||
|
def unsubscribe_download_progress(self, handler):
|
||||||
|
"""Unsubscribe a handler from the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable previously subscribed.
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def search(self, word: str) -> List[Dict[str, Any]]:
|
def search(self, word: str) -> List[Dict[str, Any]]:
|
||||||
@ -44,8 +56,7 @@ class Loader(ABC):
|
|||||||
season: int,
|
season: int,
|
||||||
episode: int,
|
episode: int,
|
||||||
key: str,
|
key: str,
|
||||||
language: str = "German Dub",
|
language: str = "German Dub"
|
||||||
progress_callback: Optional[Callable[[str, Dict], None]] = None,
|
|
||||||
) -> bool:
|
) -> bool:
|
||||||
"""Download episode to specified directory.
|
"""Download episode to specified directory.
|
||||||
|
|
||||||
@ -56,8 +67,6 @@ class Loader(ABC):
|
|||||||
episode: Episode number within season
|
episode: Episode number within season
|
||||||
key: Unique series identifier/key
|
key: Unique series identifier/key
|
||||||
language: Language version to download (default: German Dub)
|
language: Language version to download (default: German Dub)
|
||||||
progress_callback: Optional callback for progress updates
|
|
||||||
called with (event_type: str, data: Dict)
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
True if download successful, False otherwise
|
True if download successful, False otherwise
|
||||||
|
|||||||
@ -229,37 +229,6 @@ class DatabaseIntegrityChecker:
|
|||||||
logger.warning(msg)
|
logger.warning(msg)
|
||||||
issues_found += count
|
issues_found += count
|
||||||
|
|
||||||
# Check for invalid progress percentages
|
|
||||||
stmt = select(DownloadQueueItem).where(
|
|
||||||
(DownloadQueueItem.progress < 0) |
|
|
||||||
(DownloadQueueItem.progress > 100)
|
|
||||||
)
|
|
||||||
invalid_progress = self.session.execute(stmt).scalars().all()
|
|
||||||
|
|
||||||
if invalid_progress:
|
|
||||||
count = len(invalid_progress)
|
|
||||||
msg = (
|
|
||||||
f"Found {count} queue items with invalid progress "
|
|
||||||
f"percentages"
|
|
||||||
)
|
|
||||||
self.issues.append(msg)
|
|
||||||
logger.warning(msg)
|
|
||||||
issues_found += count
|
|
||||||
|
|
||||||
# Check for queue items with invalid status
|
|
||||||
valid_statuses = {'pending', 'downloading', 'completed', 'failed'}
|
|
||||||
stmt = select(DownloadQueueItem).where(
|
|
||||||
~DownloadQueueItem.status.in_(valid_statuses)
|
|
||||||
)
|
|
||||||
invalid_status = self.session.execute(stmt).scalars().all()
|
|
||||||
|
|
||||||
if invalid_status:
|
|
||||||
count = len(invalid_status)
|
|
||||||
msg = f"Found {count} queue items with invalid status"
|
|
||||||
self.issues.append(msg)
|
|
||||||
logger.warning(msg)
|
|
||||||
issues_found += count
|
|
||||||
|
|
||||||
if issues_found == 0:
|
if issues_found == 0:
|
||||||
logger.info("No data consistency issues found")
|
logger.info("No data consistency issues found")
|
||||||
|
|
||||||
|
|||||||
@ -1,17 +1,28 @@
|
|||||||
import logging
|
import logging
|
||||||
|
import os
|
||||||
import warnings
|
import warnings
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from src.core.entities.series import Serie
|
from src.core.entities.series import Serie
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
from src.server.exceptions import (
|
||||||
|
BadRequestError,
|
||||||
|
NotFoundError,
|
||||||
|
ServerError,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
||||||
from src.server.utils.dependencies import (
|
from src.server.utils.dependencies import (
|
||||||
get_anime_service,
|
get_anime_service,
|
||||||
|
get_optional_database_session,
|
||||||
get_series_app,
|
get_series_app,
|
||||||
require_auth,
|
require_auth,
|
||||||
)
|
)
|
||||||
|
from src.server.utils.filesystem import sanitize_folder_name
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
@ -52,9 +63,8 @@ async def get_anime_status(
|
|||||||
"series_count": series_count
|
"series_count": series_count
|
||||||
}
|
}
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to get status: {str(exc)}"
|
||||||
detail=f"Failed to get status: {str(exc)}",
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
@ -73,6 +83,7 @@ class AnimeSummary(BaseModel):
|
|||||||
site: Provider site URL
|
site: Provider site URL
|
||||||
folder: Filesystem folder name (metadata only)
|
folder: Filesystem folder name (metadata only)
|
||||||
missing_episodes: Episode dictionary mapping seasons to episode numbers
|
missing_episodes: Episode dictionary mapping seasons to episode numbers
|
||||||
|
has_missing: Boolean flag indicating if series has missing episodes
|
||||||
link: Optional link to the series page (used when adding new series)
|
link: Optional link to the series page (used when adding new series)
|
||||||
"""
|
"""
|
||||||
key: str = Field(
|
key: str = Field(
|
||||||
@ -95,6 +106,10 @@ class AnimeSummary(BaseModel):
|
|||||||
...,
|
...,
|
||||||
description="Episode dictionary: {season: [episode_numbers]}"
|
description="Episode dictionary: {season: [episode_numbers]}"
|
||||||
)
|
)
|
||||||
|
has_missing: bool = Field(
|
||||||
|
default=False,
|
||||||
|
description="Whether the series has any missing episodes"
|
||||||
|
)
|
||||||
link: Optional[str] = Field(
|
link: Optional[str] = Field(
|
||||||
default="",
|
default="",
|
||||||
description="Link to the series page (for adding new series)"
|
description="Link to the series page (for adding new series)"
|
||||||
@ -109,6 +124,7 @@ class AnimeSummary(BaseModel):
|
|||||||
"site": "aniworld.to",
|
"site": "aniworld.to",
|
||||||
"folder": "beheneko the elf girls cat (2025)",
|
"folder": "beheneko the elf girls cat (2025)",
|
||||||
"missing_episodes": {"1": [1, 2, 3, 4]},
|
"missing_episodes": {"1": [1, 2, 3, 4]},
|
||||||
|
"has_missing": True,
|
||||||
"link": "https://aniworld.to/anime/stream/beheneko"
|
"link": "https://aniworld.to/anime/stream/beheneko"
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -173,11 +189,14 @@ async def list_anime(
|
|||||||
_auth: dict = Depends(require_auth),
|
_auth: dict = Depends(require_auth),
|
||||||
series_app: Any = Depends(get_series_app),
|
series_app: Any = Depends(get_series_app),
|
||||||
) -> List[AnimeSummary]:
|
) -> List[AnimeSummary]:
|
||||||
"""List library series that still have missing episodes.
|
"""List all library series with their missing episodes status.
|
||||||
|
|
||||||
Returns AnimeSummary objects where `key` is the primary identifier
|
Returns AnimeSummary objects where `key` is the primary identifier
|
||||||
used for all operations. The `folder` field is metadata only and
|
used for all operations. The `folder` field is metadata only and
|
||||||
should not be used for lookups.
|
should not be used for lookups.
|
||||||
|
|
||||||
|
All series are returned, with `has_missing` flag indicating whether
|
||||||
|
a series has any missing episodes.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
page: Page number for pagination (must be positive)
|
page: Page number for pagination (must be positive)
|
||||||
@ -196,6 +215,7 @@ async def list_anime(
|
|||||||
- site: Provider site
|
- site: Provider site
|
||||||
- folder: Filesystem folder name (metadata only)
|
- folder: Filesystem folder name (metadata only)
|
||||||
- missing_episodes: Dict mapping seasons to episode numbers
|
- missing_episodes: Dict mapping seasons to episode numbers
|
||||||
|
- has_missing: Whether the series has any missing episodes
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPException: When the underlying lookup fails or params invalid.
|
HTTPException: When the underlying lookup fails or params invalid.
|
||||||
@ -205,35 +225,30 @@ async def list_anime(
|
|||||||
try:
|
try:
|
||||||
page_num = int(page)
|
page_num = int(page)
|
||||||
if page_num < 1:
|
if page_num < 1:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Page number must be positive"
|
||||||
detail="Page number must be positive"
|
|
||||||
)
|
)
|
||||||
page = page_num
|
page = page_num
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Page must be a valid number"
|
||||||
detail="Page must be a valid number"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if per_page is not None:
|
if per_page is not None:
|
||||||
try:
|
try:
|
||||||
per_page_num = int(per_page)
|
per_page_num = int(per_page)
|
||||||
if per_page_num < 1:
|
if per_page_num < 1:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Per page must be positive"
|
||||||
detail="Per page must be positive"
|
|
||||||
)
|
)
|
||||||
if per_page_num > 1000:
|
if per_page_num > 1000:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Per page cannot exceed 1000"
|
||||||
detail="Per page cannot exceed 1000"
|
|
||||||
)
|
)
|
||||||
per_page = per_page_num
|
per_page = per_page_num
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Per page must be a valid number"
|
||||||
detail="Per page must be a valid number"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validate sort_by parameter to prevent ORM injection
|
# Validate sort_by parameter to prevent ORM injection
|
||||||
@ -242,9 +257,8 @@ async def list_anime(
|
|||||||
allowed_sort_fields = ["title", "id", "missing_episodes", "name"]
|
allowed_sort_fields = ["title", "id", "missing_episodes", "name"]
|
||||||
if sort_by not in allowed_sort_fields:
|
if sort_by not in allowed_sort_fields:
|
||||||
allowed = ", ".join(allowed_sort_fields)
|
allowed = ", ".join(allowed_sort_fields)
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message=f"Invalid sort_by parameter. Allowed: {allowed}"
|
||||||
detail=f"Invalid sort_by parameter. Allowed: {allowed}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validate filter parameter
|
# Validate filter parameter
|
||||||
@ -257,17 +271,16 @@ async def list_anime(
|
|||||||
lower_filter = filter.lower()
|
lower_filter = filter.lower()
|
||||||
for pattern in dangerous_patterns:
|
for pattern in dangerous_patterns:
|
||||||
if pattern in lower_filter:
|
if pattern in lower_filter:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Invalid filter parameter"
|
||||||
detail="Invalid filter parameter"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Get missing episodes from series app
|
# Get all series from series app
|
||||||
if not hasattr(series_app, "list"):
|
if not hasattr(series_app, "list"):
|
||||||
return []
|
return []
|
||||||
|
|
||||||
series = series_app.list.GetMissingEpisode()
|
series = series_app.list.GetList()
|
||||||
summaries: List[AnimeSummary] = []
|
summaries: List[AnimeSummary] = []
|
||||||
for serie in series:
|
for serie in series:
|
||||||
# Get all properties from the serie object
|
# Get all properties from the serie object
|
||||||
@ -280,6 +293,9 @@ async def list_anime(
|
|||||||
# Convert episode dict keys to strings for JSON serialization
|
# Convert episode dict keys to strings for JSON serialization
|
||||||
missing_episodes = {str(k): v for k, v in episode_dict.items()}
|
missing_episodes = {str(k): v for k, v in episode_dict.items()}
|
||||||
|
|
||||||
|
# Determine if series has missing episodes
|
||||||
|
has_missing = bool(episode_dict)
|
||||||
|
|
||||||
summaries.append(
|
summaries.append(
|
||||||
AnimeSummary(
|
AnimeSummary(
|
||||||
key=key,
|
key=key,
|
||||||
@ -287,6 +303,7 @@ async def list_anime(
|
|||||||
site=site,
|
site=site,
|
||||||
folder=folder,
|
folder=folder,
|
||||||
missing_episodes=missing_episodes,
|
missing_episodes=missing_episodes,
|
||||||
|
has_missing=has_missing,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -307,12 +324,11 @@ async def list_anime(
|
|||||||
)
|
)
|
||||||
|
|
||||||
return summaries
|
return summaries
|
||||||
except HTTPException:
|
except (ValidationError, BadRequestError, NotFoundError, ServerError):
|
||||||
raise
|
raise
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message="Failed to retrieve anime list"
|
||||||
detail="Failed to retrieve anime list",
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
@ -343,17 +359,40 @@ async def trigger_rescan(
|
|||||||
"message": "Rescan started successfully",
|
"message": "Rescan started successfully",
|
||||||
}
|
}
|
||||||
except AnimeServiceError as e:
|
except AnimeServiceError as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Rescan failed: {str(e)}"
|
||||||
detail=f"Rescan failed: {str(e)}",
|
|
||||||
) from e
|
) from e
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message="Failed to start rescan"
|
||||||
detail="Failed to start rescan",
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/scan/status")
|
||||||
|
async def get_scan_status(
|
||||||
|
_auth: dict = Depends(require_auth),
|
||||||
|
anime_service: AnimeService = Depends(get_anime_service),
|
||||||
|
) -> dict:
|
||||||
|
"""Get the current scan status.
|
||||||
|
|
||||||
|
Returns the current state of any ongoing library scan,
|
||||||
|
useful for restoring UI state after page reload.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
_auth: Ensures the caller is authenticated (value unused)
|
||||||
|
anime_service: AnimeService instance provided via dependency.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dict[str, Any]: Current scan status including:
|
||||||
|
- is_scanning: Whether a scan is in progress
|
||||||
|
- total_items: Total items to scan
|
||||||
|
- directories_scanned: Items scanned so far
|
||||||
|
- current_directory: Current item being scanned
|
||||||
|
- directory: Root scan directory
|
||||||
|
"""
|
||||||
|
return anime_service.get_scan_status()
|
||||||
|
|
||||||
|
|
||||||
class AddSeriesRequest(BaseModel):
|
class AddSeriesRequest(BaseModel):
|
||||||
"""Request model for adding a new series."""
|
"""Request model for adding a new series."""
|
||||||
|
|
||||||
@ -582,13 +621,21 @@ async def add_series(
|
|||||||
request: AddSeriesRequest,
|
request: AddSeriesRequest,
|
||||||
_auth: dict = Depends(require_auth),
|
_auth: dict = Depends(require_auth),
|
||||||
series_app: Any = Depends(get_series_app),
|
series_app: Any = Depends(get_series_app),
|
||||||
|
db: Optional[AsyncSession] = Depends(get_optional_database_session),
|
||||||
|
anime_service: AnimeService = Depends(get_anime_service),
|
||||||
) -> dict:
|
) -> dict:
|
||||||
"""Add a new series to the library.
|
"""Add a new series to the library with full initialization.
|
||||||
|
|
||||||
Extracts the series `key` from the provided link URL.
|
This endpoint performs the complete series addition flow:
|
||||||
|
1. Validates inputs and extracts the series key from the link URL
|
||||||
|
2. Creates a sanitized folder name from the display name
|
||||||
|
3. Saves the series to the database (if available)
|
||||||
|
4. Creates the folder on disk with the sanitized name
|
||||||
|
5. Triggers a targeted scan for missing episodes (only this series)
|
||||||
|
|
||||||
The `key` is the URL-safe identifier used for all lookups.
|
The `key` is the URL-safe identifier used for all lookups.
|
||||||
The `name` is stored as display metadata along with a
|
The `name` is stored as display metadata and used to derive
|
||||||
filesystem-friendly `folder` name derived from the name.
|
the filesystem folder name (sanitized for filesystem safety).
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request: Request containing the series link and name.
|
request: Request containing the series link and name.
|
||||||
@ -596,15 +643,24 @@ async def add_series(
|
|||||||
- name: Display name for the series
|
- name: Display name for the series
|
||||||
_auth: Ensures the caller is authenticated (value unused)
|
_auth: Ensures the caller is authenticated (value unused)
|
||||||
series_app: Core `SeriesApp` instance provided via dependency
|
series_app: Core `SeriesApp` instance provided via dependency
|
||||||
|
db: Optional database session for async operations
|
||||||
|
anime_service: AnimeService for scanning operations
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict[str, Any]: Status payload with success message and key
|
Dict[str, Any]: Status payload with:
|
||||||
|
- status: "success" or "exists"
|
||||||
|
- message: Human-readable status message
|
||||||
|
- key: Series unique identifier
|
||||||
|
- folder: Created folder path
|
||||||
|
- db_id: Database ID (if saved to DB)
|
||||||
|
- missing_episodes: Dict of missing episodes by season
|
||||||
|
- total_missing: Total count of missing episodes
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPException: If adding the series fails or link is invalid
|
HTTPException: If adding the series fails or link is invalid
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Validate inputs
|
# Step A: Validate inputs
|
||||||
if not request.link or not request.link.strip():
|
if not request.link or not request.link.strip():
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
@ -617,13 +673,6 @@ async def add_series(
|
|||||||
detail="Series name cannot be empty",
|
detail="Series name cannot be empty",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Check if series_app has the list attribute
|
|
||||||
if not hasattr(series_app, "list"):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
|
||||||
detail="Series list functionality not available",
|
|
||||||
)
|
|
||||||
|
|
||||||
# Extract key from link URL
|
# Extract key from link URL
|
||||||
# Expected format: https://aniworld.to/anime/stream/{key}
|
# Expected format: https://aniworld.to/anime/stream/{key}
|
||||||
link = request.link.strip()
|
link = request.link.strip()
|
||||||
@ -644,38 +693,150 @@ async def add_series(
|
|||||||
detail="Could not extract series key from link",
|
detail="Could not extract series key from link",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Create folder from name (filesystem-friendly)
|
# Step B: Create sanitized folder name from display name
|
||||||
folder = request.name.strip()
|
name = request.name.strip()
|
||||||
|
try:
|
||||||
|
folder = sanitize_folder_name(name)
|
||||||
|
except ValueError as e:
|
||||||
|
raise HTTPException(
|
||||||
|
status_code=status.HTTP_400_BAD_REQUEST,
|
||||||
|
detail=f"Invalid series name for folder: {str(e)}",
|
||||||
|
)
|
||||||
|
|
||||||
# Create a new Serie object
|
db_id = None
|
||||||
# key: unique identifier extracted from link
|
missing_episodes: dict = {}
|
||||||
# name: display name from request
|
scan_error: Optional[str] = None
|
||||||
# folder: filesystem folder name (derived from name)
|
|
||||||
# episodeDict: empty for new series
|
|
||||||
serie = Serie(
|
|
||||||
key=key,
|
|
||||||
name=request.name.strip(),
|
|
||||||
site="aniworld.to",
|
|
||||||
folder=folder,
|
|
||||||
episodeDict={}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add the series to the list
|
# Step C: Save to database if available
|
||||||
series_app.list.add(serie)
|
if db is not None:
|
||||||
|
# Check if series already exists in database
|
||||||
|
existing = await AnimeSeriesService.get_by_key(db, key)
|
||||||
|
if existing:
|
||||||
|
return {
|
||||||
|
"status": "exists",
|
||||||
|
"message": f"Series already exists: {name}",
|
||||||
|
"key": key,
|
||||||
|
"folder": existing.folder,
|
||||||
|
"db_id": existing.id,
|
||||||
|
"missing_episodes": {},
|
||||||
|
"total_missing": 0
|
||||||
|
}
|
||||||
|
|
||||||
|
# Save to database using AnimeSeriesService
|
||||||
|
anime_series = await AnimeSeriesService.create(
|
||||||
|
db=db,
|
||||||
|
key=key,
|
||||||
|
name=name,
|
||||||
|
site="aniworld.to",
|
||||||
|
folder=folder,
|
||||||
|
)
|
||||||
|
db_id = anime_series.id
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Added series to database: %s (key=%s, db_id=%d)",
|
||||||
|
name,
|
||||||
|
key,
|
||||||
|
db_id
|
||||||
|
)
|
||||||
|
|
||||||
# Refresh the series list to update the cache
|
# Step D: Create folder on disk and add to SerieList
|
||||||
if hasattr(series_app, "refresh_series_list"):
|
folder_path = None
|
||||||
series_app.refresh_series_list()
|
if series_app and hasattr(series_app, "list"):
|
||||||
|
serie = Serie(
|
||||||
|
key=key,
|
||||||
|
name=name,
|
||||||
|
site="aniworld.to",
|
||||||
|
folder=folder,
|
||||||
|
episodeDict={}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add to SerieList - this creates the folder with sanitized name
|
||||||
|
if hasattr(series_app.list, 'add'):
|
||||||
|
with warnings.catch_warnings():
|
||||||
|
warnings.simplefilter("ignore", DeprecationWarning)
|
||||||
|
folder_path = series_app.list.add(serie, use_sanitized_folder=True)
|
||||||
|
# Update folder to reflect what was actually created
|
||||||
|
folder = serie.folder
|
||||||
|
elif hasattr(series_app.list, 'keyDict'):
|
||||||
|
# Manual folder creation and cache update
|
||||||
|
if hasattr(series_app.list, 'directory'):
|
||||||
|
folder_path = os.path.join(series_app.list.directory, folder)
|
||||||
|
os.makedirs(folder_path, exist_ok=True)
|
||||||
|
series_app.list.keyDict[key] = serie
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Created folder for series: %s at %s",
|
||||||
|
name,
|
||||||
|
folder_path or folder
|
||||||
|
)
|
||||||
|
|
||||||
return {
|
# Step E: Trigger targeted scan for missing episodes
|
||||||
"status": "success",
|
try:
|
||||||
"message": f"Successfully added series: {request.name}",
|
if series_app and hasattr(series_app, "scanner"):
|
||||||
"key": key,
|
missing_episodes = series_app.scanner.scan_single_series(
|
||||||
"folder": folder
|
key=key,
|
||||||
|
folder=folder
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Targeted scan completed for %s: found %d missing episodes",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values())
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update the serie in keyDict with the missing episodes
|
||||||
|
if hasattr(series_app, "list") and hasattr(series_app.list, "keyDict"):
|
||||||
|
if key in series_app.list.keyDict:
|
||||||
|
series_app.list.keyDict[key].episodeDict = missing_episodes
|
||||||
|
elif anime_service:
|
||||||
|
# Fallback to anime_service if scanner not directly available
|
||||||
|
# Note: This is a lightweight scan, not a full rescan
|
||||||
|
logger.info(
|
||||||
|
"Scanner not directly available, "
|
||||||
|
"skipping targeted scan for %s",
|
||||||
|
key
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Scan failure is not critical - series was still added
|
||||||
|
scan_error = str(e)
|
||||||
|
logger.warning(
|
||||||
|
"Targeted scan failed for %s: %s (series still added)",
|
||||||
|
key,
|
||||||
|
e
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert missing episodes keys to strings for JSON serialization
|
||||||
|
missing_episodes_serializable = {
|
||||||
|
str(season): episodes
|
||||||
|
for season, episodes in missing_episodes.items()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# Calculate total missing
|
||||||
|
total_missing = sum(len(eps) for eps in missing_episodes.values())
|
||||||
|
|
||||||
|
# Step F: Return response
|
||||||
|
response = {
|
||||||
|
"status": "success",
|
||||||
|
"message": f"Successfully added series: {name}",
|
||||||
|
"key": key,
|
||||||
|
"folder": folder_path or folder,
|
||||||
|
"db_id": db_id,
|
||||||
|
"missing_episodes": missing_episodes_serializable,
|
||||||
|
"total_missing": total_missing
|
||||||
|
}
|
||||||
|
|
||||||
|
if scan_error:
|
||||||
|
response["scan_warning"] = f"Scan partially failed: {scan_error}"
|
||||||
|
|
||||||
|
return response
|
||||||
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
|
logger.error("Failed to add series: %s", exc, exc_info=True)
|
||||||
|
|
||||||
|
# Attempt to rollback database entry if folder creation failed
|
||||||
|
# (This is a best-effort cleanup)
|
||||||
|
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail=f"Failed to add series: {str(exc)}",
|
detail=f"Failed to add series: {str(exc)}",
|
||||||
|
|||||||
@ -26,7 +26,7 @@ optional_bearer = HTTPBearer(auto_error=False)
|
|||||||
|
|
||||||
|
|
||||||
@router.post("/setup", status_code=http_status.HTTP_201_CREATED)
|
@router.post("/setup", status_code=http_status.HTTP_201_CREATED)
|
||||||
def setup_auth(req: SetupRequest):
|
async def setup_auth(req: SetupRequest):
|
||||||
"""Initial setup endpoint to configure the master password.
|
"""Initial setup endpoint to configure the master password.
|
||||||
|
|
||||||
This endpoint also initializes the configuration with default values
|
This endpoint also initializes the configuration with default values
|
||||||
@ -57,17 +57,44 @@ def setup_auth(req: SetupRequest):
|
|||||||
config.other['master_password_hash'] = password_hash
|
config.other['master_password_hash'] = password_hash
|
||||||
|
|
||||||
# Store anime directory in config's other field if provided
|
# Store anime directory in config's other field if provided
|
||||||
|
anime_directory = None
|
||||||
if hasattr(req, 'anime_directory') and req.anime_directory:
|
if hasattr(req, 'anime_directory') and req.anime_directory:
|
||||||
config.other['anime_directory'] = req.anime_directory
|
anime_directory = req.anime_directory.strip()
|
||||||
|
if anime_directory:
|
||||||
|
config.other['anime_directory'] = anime_directory
|
||||||
|
|
||||||
# Save the config with the password hash and anime directory
|
# Save the config with the password hash and anime directory
|
||||||
config_service.save_config(config, create_backup=False)
|
config_service.save_config(config, create_backup=False)
|
||||||
|
|
||||||
|
# Sync series from data files to database if anime directory is set
|
||||||
|
if anime_directory:
|
||||||
|
try:
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from src.server.services.anime_service import (
|
||||||
|
sync_series_from_data_files,
|
||||||
|
)
|
||||||
|
logger = structlog.get_logger(__name__)
|
||||||
|
sync_count = await sync_series_from_data_files(
|
||||||
|
anime_directory, logger
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Setup complete: synced series from data files",
|
||||||
|
count=sync_count
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't fail setup if sync fails
|
||||||
|
import structlog
|
||||||
|
structlog.get_logger(__name__).warning(
|
||||||
|
"Failed to sync series after setup",
|
||||||
|
error=str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
return {"status": "ok"}
|
||||||
|
|
||||||
except ValueError as e:
|
except ValueError as e:
|
||||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||||
|
|
||||||
return {"status": "ok"}
|
|
||||||
|
|
||||||
|
|
||||||
@router.post("/login", response_model=LoginResponse)
|
@router.post("/login", response_model=LoginResponse)
|
||||||
def login(req: LoginRequest):
|
def login(req: LoginRequest):
|
||||||
|
|||||||
@ -1,4 +1,4 @@
|
|||||||
from typing import Dict, List, Optional
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
|
||||||
@ -210,10 +210,10 @@ def update_advanced_config(
|
|||||||
) from e
|
) from e
|
||||||
|
|
||||||
|
|
||||||
@router.post("/directory", response_model=Dict[str, str])
|
@router.post("/directory", response_model=Dict[str, Any])
|
||||||
def update_directory(
|
async def update_directory(
|
||||||
directory_config: Dict[str, str], auth: dict = Depends(require_auth)
|
directory_config: Dict[str, str], auth: dict = Depends(require_auth)
|
||||||
) -> Dict[str, str]:
|
) -> Dict[str, Any]:
|
||||||
"""Update anime directory configuration.
|
"""Update anime directory configuration.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
@ -235,13 +235,37 @@ def update_directory(
|
|||||||
app_config = config_service.load_config()
|
app_config = config_service.load_config()
|
||||||
|
|
||||||
# Store directory in other section
|
# Store directory in other section
|
||||||
if "anime_directory" not in app_config.other:
|
app_config.other["anime_directory"] = directory
|
||||||
app_config.other["anime_directory"] = directory
|
|
||||||
else:
|
|
||||||
app_config.other["anime_directory"] = directory
|
|
||||||
|
|
||||||
config_service.save_config(app_config)
|
config_service.save_config(app_config)
|
||||||
return {"message": "Anime directory updated successfully"}
|
|
||||||
|
# Sync series from data files to database
|
||||||
|
sync_count = 0
|
||||||
|
try:
|
||||||
|
import structlog
|
||||||
|
|
||||||
|
from src.server.services.anime_service import sync_series_from_data_files
|
||||||
|
logger = structlog.get_logger(__name__)
|
||||||
|
sync_count = await sync_series_from_data_files(directory, logger)
|
||||||
|
logger.info(
|
||||||
|
"Directory updated: synced series from data files",
|
||||||
|
directory=directory,
|
||||||
|
count=sync_count
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't fail the directory update if sync fails
|
||||||
|
import structlog
|
||||||
|
structlog.get_logger(__name__).warning(
|
||||||
|
"Failed to sync series after directory update",
|
||||||
|
error=str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
response: Dict[str, Any] = {
|
||||||
|
"message": "Anime directory updated successfully",
|
||||||
|
"synced_series": sync_count
|
||||||
|
}
|
||||||
|
|
||||||
|
return response
|
||||||
except ConfigServiceError as e:
|
except ConfigServiceError as e:
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
|
|||||||
@ -4,9 +4,10 @@ This module provides REST API endpoints for managing the anime download queue,
|
|||||||
including adding episodes, removing items, controlling queue processing, and
|
including adding episodes, removing items, controlling queue processing, and
|
||||||
retrieving queue status and statistics.
|
retrieving queue status and statistics.
|
||||||
"""
|
"""
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Path, status
|
from fastapi import APIRouter, Depends, Path, status
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
from src.server.exceptions import BadRequestError, NotFoundError, ServerError
|
||||||
from src.server.models.download import (
|
from src.server.models.download import (
|
||||||
DownloadRequest,
|
DownloadRequest,
|
||||||
QueueOperationRequest,
|
QueueOperationRequest,
|
||||||
@ -52,9 +53,8 @@ async def get_queue_status(
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to retrieve queue status: {str(e)}"
|
||||||
detail=f"Failed to retrieve queue status: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -91,9 +91,8 @@ async def add_to_queue(
|
|||||||
try:
|
try:
|
||||||
# Validate request
|
# Validate request
|
||||||
if not request.episodes:
|
if not request.episodes:
|
||||||
raise HTTPException(
|
raise BadRequestError(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
message="At least one episode must be specified"
|
||||||
detail="At least one episode must be specified",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add to queue
|
# Add to queue
|
||||||
@ -122,16 +121,12 @@ async def add_to_queue(
|
|||||||
)
|
)
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to add episodes to queue: {str(e)}"
|
||||||
detail=f"Failed to add episodes to queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -163,9 +158,8 @@ async def clear_completed(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to clear completed items: {str(e)}"
|
||||||
detail=f"Failed to clear completed items: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -197,9 +191,8 @@ async def clear_failed(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to clear failed items: {str(e)}"
|
||||||
detail=f"Failed to clear failed items: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -231,9 +224,8 @@ async def clear_pending(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to clear pending items: {str(e)}"
|
||||||
detail=f"Failed to clear pending items: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -262,22 +254,19 @@ async def remove_from_queue(
|
|||||||
removed_ids = await download_service.remove_from_queue([item_id])
|
removed_ids = await download_service.remove_from_queue([item_id])
|
||||||
|
|
||||||
if not removed_ids:
|
if not removed_ids:
|
||||||
raise HTTPException(
|
raise NotFoundError(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
message=f"Download item {item_id} not found in queue",
|
||||||
detail=f"Download item {item_id} not found in queue",
|
resource_type="download_item",
|
||||||
|
resource_id=item_id
|
||||||
)
|
)
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to remove item from queue: {str(e)}"
|
||||||
detail=f"Failed to remove item from queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -307,22 +296,18 @@ async def remove_multiple_from_queue(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not removed_ids:
|
if not removed_ids:
|
||||||
raise HTTPException(
|
raise NotFoundError(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
message="No matching items found in queue",
|
||||||
detail="No matching items found in queue",
|
resource_type="download_items"
|
||||||
)
|
)
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to remove items from queue: {str(e)}"
|
||||||
detail=f"Failed to remove items from queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -354,9 +339,8 @@ async def start_queue(
|
|||||||
result = await download_service.start_queue_processing()
|
result = await download_service.start_queue_processing()
|
||||||
|
|
||||||
if result is None:
|
if result is None:
|
||||||
raise HTTPException(
|
raise BadRequestError(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
message="No pending downloads in queue"
|
||||||
detail="No pending downloads in queue",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -365,16 +349,12 @@ async def start_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to start queue processing: {str(e)}"
|
||||||
detail=f"Failed to start queue processing: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -408,9 +388,8 @@ async def stop_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to stop queue processing: {str(e)}"
|
||||||
detail=f"Failed to stop queue processing: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -442,9 +421,8 @@ async def pause_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to pause queue processing: {str(e)}"
|
||||||
detail=f"Failed to pause queue processing: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -480,9 +458,8 @@ async def reorder_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to reorder queue: {str(e)}"
|
||||||
detail=f"Failed to reorder queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -522,7 +499,6 @@ async def retry_failed(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to retry downloads: {str(e)}"
|
||||||
detail=f"Failed to retry downloads: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|||||||
@ -23,6 +23,9 @@ class HealthStatus(BaseModel):
|
|||||||
status: str
|
status: str
|
||||||
timestamp: str
|
timestamp: str
|
||||||
version: str = "1.0.0"
|
version: str = "1.0.0"
|
||||||
|
service: str = "aniworld-api"
|
||||||
|
series_app_initialized: bool = False
|
||||||
|
anime_directory_configured: bool = False
|
||||||
|
|
||||||
|
|
||||||
class DatabaseHealth(BaseModel):
|
class DatabaseHealth(BaseModel):
|
||||||
@ -170,14 +173,24 @@ def get_system_metrics() -> SystemMetrics:
|
|||||||
@router.get("", response_model=HealthStatus)
|
@router.get("", response_model=HealthStatus)
|
||||||
async def basic_health_check() -> HealthStatus:
|
async def basic_health_check() -> HealthStatus:
|
||||||
"""Basic health check endpoint.
|
"""Basic health check endpoint.
|
||||||
|
|
||||||
|
This endpoint does not depend on anime_directory configuration
|
||||||
|
and should always return 200 OK for basic health monitoring.
|
||||||
|
Includes service information for identification.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HealthStatus: Simple health status with timestamp.
|
HealthStatus: Simple health status with timestamp and service info.
|
||||||
"""
|
"""
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.server.utils.dependencies import _series_app
|
||||||
|
|
||||||
logger.debug("Basic health check requested")
|
logger.debug("Basic health check requested")
|
||||||
return HealthStatus(
|
return HealthStatus(
|
||||||
status="healthy",
|
status="healthy",
|
||||||
timestamp=datetime.now().isoformat(),
|
timestamp=datetime.now().isoformat(),
|
||||||
|
service="aniworld-api",
|
||||||
|
series_app_initialized=_series_app is not None,
|
||||||
|
anime_directory_configured=bool(settings.anime_directory),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -13,8 +13,9 @@ in their data payload. The `folder` field is optional for display purposes.
|
|||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Dict, Optional, Set
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect, status
|
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect, status
|
||||||
@ -34,6 +35,73 @@ logger = structlog.get_logger(__name__)
|
|||||||
|
|
||||||
router = APIRouter(prefix="/ws", tags=["websocket"])
|
router = APIRouter(prefix="/ws", tags=["websocket"])
|
||||||
|
|
||||||
|
# Valid room names - explicit allow-list for security
|
||||||
|
VALID_ROOMS: Set[str] = {
|
||||||
|
"downloads", # Download progress updates
|
||||||
|
"queue", # Queue status changes
|
||||||
|
"scan", # Scan progress updates
|
||||||
|
"system", # System notifications
|
||||||
|
"errors", # Error notifications
|
||||||
|
}
|
||||||
|
|
||||||
|
# Rate limiting configuration for WebSocket messages
|
||||||
|
WS_RATE_LIMIT_MESSAGES_PER_MINUTE = 60
|
||||||
|
WS_RATE_LIMIT_WINDOW_SECONDS = 60
|
||||||
|
|
||||||
|
# In-memory rate limiting for WebSocket connections
|
||||||
|
# WARNING: This resets on process restart. For production, consider Redis.
|
||||||
|
_ws_rate_limits: Dict[str, Dict[str, float]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _check_ws_rate_limit(connection_id: str) -> bool:
|
||||||
|
"""Check if a WebSocket connection has exceeded its rate limit.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_id: Unique identifier for the WebSocket connection
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if within rate limit, False if exceeded
|
||||||
|
"""
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
if connection_id not in _ws_rate_limits:
|
||||||
|
_ws_rate_limits[connection_id] = {
|
||||||
|
"count": 0,
|
||||||
|
"window_start": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
record = _ws_rate_limits[connection_id]
|
||||||
|
|
||||||
|
# Reset window if expired
|
||||||
|
if now - record["window_start"] > WS_RATE_LIMIT_WINDOW_SECONDS:
|
||||||
|
record["window_start"] = now
|
||||||
|
record["count"] = 0
|
||||||
|
|
||||||
|
record["count"] += 1
|
||||||
|
|
||||||
|
return record["count"] <= WS_RATE_LIMIT_MESSAGES_PER_MINUTE
|
||||||
|
|
||||||
|
|
||||||
|
def _cleanup_ws_rate_limits(connection_id: str) -> None:
|
||||||
|
"""Remove rate limit record for a disconnected connection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_id: Unique identifier for the WebSocket connection
|
||||||
|
"""
|
||||||
|
_ws_rate_limits.pop(connection_id, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_room_name(room: str) -> bool:
|
||||||
|
"""Validate that a room name is in the allowed set.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
room: Room name to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if room is valid, False otherwise
|
||||||
|
"""
|
||||||
|
return room in VALID_ROOMS
|
||||||
|
|
||||||
|
|
||||||
@router.websocket("/connect")
|
@router.websocket("/connect")
|
||||||
async def websocket_endpoint(
|
async def websocket_endpoint(
|
||||||
@ -130,6 +198,19 @@ async def websocket_endpoint(
|
|||||||
# Receive message from client
|
# Receive message from client
|
||||||
data = await websocket.receive_json()
|
data = await websocket.receive_json()
|
||||||
|
|
||||||
|
# Check rate limit
|
||||||
|
if not _check_ws_rate_limit(connection_id):
|
||||||
|
logger.warning(
|
||||||
|
"WebSocket rate limit exceeded",
|
||||||
|
connection_id=connection_id,
|
||||||
|
)
|
||||||
|
await ws_service.send_error(
|
||||||
|
connection_id,
|
||||||
|
"Rate limit exceeded. Please slow down.",
|
||||||
|
"RATE_LIMIT_EXCEEDED",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
# Parse client message
|
# Parse client message
|
||||||
try:
|
try:
|
||||||
client_msg = ClientMessage(**data)
|
client_msg = ClientMessage(**data)
|
||||||
@ -149,9 +230,26 @@ async def websocket_endpoint(
|
|||||||
# Handle room subscription requests
|
# Handle room subscription requests
|
||||||
if client_msg.action in ["join", "leave"]:
|
if client_msg.action in ["join", "leave"]:
|
||||||
try:
|
try:
|
||||||
|
room_name = client_msg.data.get("room", "")
|
||||||
|
|
||||||
|
# Validate room name against allow-list
|
||||||
|
if not _validate_room_name(room_name):
|
||||||
|
logger.warning(
|
||||||
|
"Invalid room name requested",
|
||||||
|
connection_id=connection_id,
|
||||||
|
room=room_name,
|
||||||
|
)
|
||||||
|
await ws_service.send_error(
|
||||||
|
connection_id,
|
||||||
|
f"Invalid room name: {room_name}. "
|
||||||
|
f"Valid rooms: {', '.join(sorted(VALID_ROOMS))}",
|
||||||
|
"INVALID_ROOM",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
room_req = RoomSubscriptionRequest(
|
room_req = RoomSubscriptionRequest(
|
||||||
action=client_msg.action,
|
action=client_msg.action,
|
||||||
room=client_msg.data.get("room", ""),
|
room=room_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
if room_req.action == "join":
|
if room_req.action == "join":
|
||||||
@ -241,7 +339,8 @@ async def websocket_endpoint(
|
|||||||
error=str(e),
|
error=str(e),
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
# Cleanup connection
|
# Cleanup connection and rate limit record
|
||||||
|
_cleanup_ws_rate_limits(connection_id)
|
||||||
await ws_service.disconnect(connection_id)
|
await ws_service.disconnect(connection_id)
|
||||||
logger.info("WebSocket connection closed", connection_id=connection_id)
|
logger.info("WebSocket connection closed", connection_id=connection_id)
|
||||||
|
|
||||||
@ -263,5 +362,6 @@ async def websocket_status(
|
|||||||
"status": "operational",
|
"status": "operational",
|
||||||
"active_connections": connection_count,
|
"active_connections": connection_count,
|
||||||
"supported_message_types": [t.value for t in WebSocketMessageType],
|
"supported_message_types": [t.value for t in WebSocketMessageType],
|
||||||
|
"valid_rooms": sorted(VALID_ROOMS),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|||||||
@ -8,7 +8,7 @@ Environment Variables:
|
|||||||
JWT_SECRET_KEY: Secret key for JWT token signing (default: dev-secret)
|
JWT_SECRET_KEY: Secret key for JWT token signing (default: dev-secret)
|
||||||
PASSWORD_SALT: Salt for password hashing (default: dev-salt)
|
PASSWORD_SALT: Salt for password hashing (default: dev-salt)
|
||||||
DATABASE_URL: Development database connection string (default: SQLite)
|
DATABASE_URL: Development database connection string (default: SQLite)
|
||||||
LOG_LEVEL: Logging level (default: DEBUG)
|
LOG_LEVEL: Logging level (default: INFO)
|
||||||
CORS_ORIGINS: Comma-separated list of allowed CORS origins
|
CORS_ORIGINS: Comma-separated list of allowed CORS origins
|
||||||
API_RATE_LIMIT: API rate limit per minute (default: 1000)
|
API_RATE_LIMIT: API rate limit per minute (default: 1000)
|
||||||
"""
|
"""
|
||||||
@ -91,8 +91,8 @@ class DevelopmentSettings(BaseSettings):
|
|||||||
# Logging Settings
|
# Logging Settings
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
|
|
||||||
log_level: str = Field(default="DEBUG", env="LOG_LEVEL")
|
log_level: str = Field(default="INFO", env="LOG_LEVEL")
|
||||||
"""Logging level (DEBUG for detailed output)."""
|
"""Logging level (INFO for standard output)."""
|
||||||
|
|
||||||
log_file: str = Field(default="logs/development.log", env="LOG_FILE")
|
log_file: str = Field(default="logs/development.log", env="LOG_FILE")
|
||||||
"""Path to development log file."""
|
"""Path to development log file."""
|
||||||
|
|||||||
@ -60,7 +60,7 @@ def setup_logging() -> Dict[str, logging.Logger]:
|
|||||||
|
|
||||||
# File handler for general server logs
|
# File handler for general server logs
|
||||||
server_file_handler = logging.FileHandler(server_log_file, mode='a', encoding='utf-8')
|
server_file_handler = logging.FileHandler(server_log_file, mode='a', encoding='utf-8')
|
||||||
server_file_handler.setLevel(logging.DEBUG)
|
server_file_handler.setLevel(logging.INFO)
|
||||||
server_file_handler.setFormatter(detailed_format)
|
server_file_handler.setFormatter(detailed_format)
|
||||||
root_logger.addHandler(server_file_handler)
|
root_logger.addHandler(server_file_handler)
|
||||||
|
|
||||||
|
|||||||
@ -1,27 +0,0 @@
|
|||||||
"""
|
|
||||||
Health check controller for monitoring and status endpoints.
|
|
||||||
|
|
||||||
This module provides health check endpoints for application monitoring.
|
|
||||||
"""
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from src.config.settings import settings
|
|
||||||
from src.server.utils.dependencies import _series_app
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/health", tags=["health"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint for monitoring.
|
|
||||||
|
|
||||||
This endpoint does not depend on anime_directory configuration
|
|
||||||
and should always return 200 OK for basic health monitoring.
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"service": "aniworld-api",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"series_app_initialized": _series_app is not None,
|
|
||||||
"anime_directory_configured": bool(settings.anime_directory)
|
|
||||||
}
|
|
||||||
@ -13,7 +13,7 @@ This package provides persistent storage for anime series, episodes, download qu
|
|||||||
Install required dependencies:
|
Install required dependencies:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
pip install sqlalchemy alembic aiosqlite
|
pip install sqlalchemy aiosqlite
|
||||||
```
|
```
|
||||||
|
|
||||||
Or use the project requirements:
|
Or use the project requirements:
|
||||||
@ -163,24 +163,6 @@ from src.config.settings import settings
|
|||||||
settings.database_url = "sqlite:///./data/aniworld.db"
|
settings.database_url = "sqlite:///./data/aniworld.db"
|
||||||
```
|
```
|
||||||
|
|
||||||
## Migrations (Future)
|
|
||||||
|
|
||||||
Alembic is installed for database migrations:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Initialize Alembic
|
|
||||||
alembic init alembic
|
|
||||||
|
|
||||||
# Generate migration
|
|
||||||
alembic revision --autogenerate -m "Description"
|
|
||||||
|
|
||||||
# Apply migrations
|
|
||||||
alembic upgrade head
|
|
||||||
|
|
||||||
# Rollback
|
|
||||||
alembic downgrade -1
|
|
||||||
```
|
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
Run database tests:
|
Run database tests:
|
||||||
@ -196,8 +178,7 @@ The test suite uses an in-memory SQLite database for isolation and speed.
|
|||||||
- **base.py**: Base declarative class and mixins
|
- **base.py**: Base declarative class and mixins
|
||||||
- **models.py**: SQLAlchemy ORM models (4 models)
|
- **models.py**: SQLAlchemy ORM models (4 models)
|
||||||
- **connection.py**: Engine, session factory, dependency injection
|
- **connection.py**: Engine, session factory, dependency injection
|
||||||
- **migrations.py**: Alembic migration placeholder
|
- \***\*init**.py\*\*: Package exports
|
||||||
- ****init**.py**: Package exports
|
|
||||||
- **service.py**: Service layer with CRUD operations
|
- **service.py**: Service layer with CRUD operations
|
||||||
|
|
||||||
## Service Layer
|
## Service Layer
|
||||||
@ -432,5 +413,4 @@ Solution: Ensure referenced records exist before creating relationships.
|
|||||||
## Further Reading
|
## Further Reading
|
||||||
|
|
||||||
- [SQLAlchemy 2.0 Documentation](https://docs.sqlalchemy.org/en/20/)
|
- [SQLAlchemy 2.0 Documentation](https://docs.sqlalchemy.org/en/20/)
|
||||||
- [Alembic Tutorial](https://alembic.sqlalchemy.org/en/latest/tutorial.html)
|
|
||||||
- [FastAPI with Databases](https://fastapi.tiangolo.com/tutorial/sql-databases/)
|
- [FastAPI with Databases](https://fastapi.tiangolo.com/tutorial/sql-databases/)
|
||||||
|
|||||||
@ -30,7 +30,6 @@ from src.server.database.init import (
|
|||||||
create_database_backup,
|
create_database_backup,
|
||||||
create_database_schema,
|
create_database_schema,
|
||||||
get_database_info,
|
get_database_info,
|
||||||
get_migration_guide,
|
|
||||||
get_schema_version,
|
get_schema_version,
|
||||||
initialize_database,
|
initialize_database,
|
||||||
seed_initial_data,
|
seed_initial_data,
|
||||||
@ -64,7 +63,6 @@ __all__ = [
|
|||||||
"check_database_health",
|
"check_database_health",
|
||||||
"create_database_backup",
|
"create_database_backup",
|
||||||
"get_database_info",
|
"get_database_info",
|
||||||
"get_migration_guide",
|
|
||||||
"CURRENT_SCHEMA_VERSION",
|
"CURRENT_SCHEMA_VERSION",
|
||||||
"EXPECTED_TABLES",
|
"EXPECTED_TABLES",
|
||||||
# Models
|
# Models
|
||||||
|
|||||||
@ -7,7 +7,11 @@ Functions:
|
|||||||
- init_db: Initialize database engine and create tables
|
- init_db: Initialize database engine and create tables
|
||||||
- close_db: Close database connections and cleanup
|
- close_db: Close database connections and cleanup
|
||||||
- get_db_session: FastAPI dependency for database sessions
|
- get_db_session: FastAPI dependency for database sessions
|
||||||
|
- get_transactional_session: Session without auto-commit for transactions
|
||||||
- get_engine: Get database engine instance
|
- get_engine: Get database engine instance
|
||||||
|
|
||||||
|
Classes:
|
||||||
|
- TransactionManager: Helper class for manual transaction control
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
@ -86,19 +90,24 @@ async def init_db() -> None:
|
|||||||
db_url = _get_database_url()
|
db_url = _get_database_url()
|
||||||
logger.info(f"Initializing database: {db_url}")
|
logger.info(f"Initializing database: {db_url}")
|
||||||
|
|
||||||
|
# Build engine kwargs based on database type
|
||||||
|
is_sqlite = "sqlite" in db_url
|
||||||
|
engine_kwargs = {
|
||||||
|
"echo": settings.log_level == "DEBUG",
|
||||||
|
"poolclass": pool.StaticPool if is_sqlite else pool.QueuePool,
|
||||||
|
"pool_pre_ping": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Only add pool_size and max_overflow for non-SQLite databases
|
||||||
|
if not is_sqlite:
|
||||||
|
engine_kwargs["pool_size"] = 5
|
||||||
|
engine_kwargs["max_overflow"] = 10
|
||||||
|
|
||||||
# Create async engine
|
# Create async engine
|
||||||
_engine = create_async_engine(
|
_engine = create_async_engine(db_url, **engine_kwargs)
|
||||||
db_url,
|
|
||||||
echo=settings.log_level == "DEBUG",
|
|
||||||
poolclass=pool.StaticPool if "sqlite" in db_url else pool.QueuePool,
|
|
||||||
pool_size=5 if "sqlite" not in db_url else None,
|
|
||||||
max_overflow=10 if "sqlite" not in db_url else None,
|
|
||||||
pool_pre_ping=True,
|
|
||||||
future=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Configure SQLite if needed
|
# Configure SQLite if needed
|
||||||
if "sqlite" in db_url:
|
if is_sqlite:
|
||||||
_configure_sqlite_engine(_engine)
|
_configure_sqlite_engine(_engine)
|
||||||
|
|
||||||
# Create async session factory
|
# Create async session factory
|
||||||
@ -112,12 +121,13 @@ async def init_db() -> None:
|
|||||||
|
|
||||||
# Create sync engine for initial setup
|
# Create sync engine for initial setup
|
||||||
sync_url = settings.database_url
|
sync_url = settings.database_url
|
||||||
_sync_engine = create_engine(
|
is_sqlite_sync = "sqlite" in sync_url
|
||||||
sync_url,
|
sync_engine_kwargs = {
|
||||||
echo=settings.log_level == "DEBUG",
|
"echo": settings.log_level == "DEBUG",
|
||||||
poolclass=pool.StaticPool if "sqlite" in sync_url else pool.QueuePool,
|
"poolclass": pool.StaticPool if is_sqlite_sync else pool.QueuePool,
|
||||||
pool_pre_ping=True,
|
"pool_pre_ping": True,
|
||||||
)
|
}
|
||||||
|
_sync_engine = create_engine(sync_url, **sync_engine_kwargs)
|
||||||
|
|
||||||
# Create sync session factory
|
# Create sync session factory
|
||||||
_sync_session_factory = sessionmaker(
|
_sync_session_factory = sessionmaker(
|
||||||
@ -140,11 +150,29 @@ async def init_db() -> None:
|
|||||||
async def close_db() -> None:
|
async def close_db() -> None:
|
||||||
"""Close database connections and cleanup resources.
|
"""Close database connections and cleanup resources.
|
||||||
|
|
||||||
|
Performs a WAL checkpoint for SQLite databases to ensure all
|
||||||
|
pending writes are flushed to the main database file before
|
||||||
|
closing connections. This prevents database corruption during
|
||||||
|
shutdown.
|
||||||
|
|
||||||
Should be called during application shutdown.
|
Should be called during application shutdown.
|
||||||
"""
|
"""
|
||||||
global _engine, _sync_engine, _session_factory, _sync_session_factory
|
global _engine, _sync_engine, _session_factory, _sync_session_factory
|
||||||
|
|
||||||
try:
|
try:
|
||||||
|
# For SQLite: checkpoint WAL to ensure all writes are flushed
|
||||||
|
if _sync_engine and "sqlite" in str(_sync_engine.url):
|
||||||
|
logger.info("Running SQLite WAL checkpoint before shutdown...")
|
||||||
|
try:
|
||||||
|
from sqlalchemy import text
|
||||||
|
with _sync_engine.connect() as conn:
|
||||||
|
# TRUNCATE mode: checkpoint and truncate WAL file
|
||||||
|
conn.execute(text("PRAGMA wal_checkpoint(TRUNCATE)"))
|
||||||
|
conn.commit()
|
||||||
|
logger.info("SQLite WAL checkpoint completed")
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"WAL checkpoint failed (non-critical): {e}")
|
||||||
|
|
||||||
if _engine:
|
if _engine:
|
||||||
logger.info("Closing async database engine...")
|
logger.info("Closing async database engine...")
|
||||||
await _engine.dispose()
|
await _engine.dispose()
|
||||||
@ -258,3 +286,307 @@ def get_sync_session() -> Session:
|
|||||||
)
|
)
|
||||||
|
|
||||||
return _sync_session_factory()
|
return _sync_session_factory()
|
||||||
|
|
||||||
|
|
||||||
|
def get_async_session_factory() -> AsyncSession:
|
||||||
|
"""Get a new async database session (factory function).
|
||||||
|
|
||||||
|
Creates a new session instance for use in repository patterns.
|
||||||
|
The caller is responsible for committing/rolling back and closing.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
AsyncSession: New database session for async operations
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If database is not initialized
|
||||||
|
|
||||||
|
Example:
|
||||||
|
session = get_async_session_factory()
|
||||||
|
try:
|
||||||
|
result = await session.execute(select(AnimeSeries))
|
||||||
|
await session.commit()
|
||||||
|
return result.scalars().all()
|
||||||
|
except Exception:
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
"""
|
||||||
|
if _session_factory is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Database not initialized. Call init_db() first."
|
||||||
|
)
|
||||||
|
|
||||||
|
return _session_factory()
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def get_transactional_session() -> AsyncGenerator[AsyncSession, None]:
|
||||||
|
"""Get a database session without auto-commit for explicit transaction control.
|
||||||
|
|
||||||
|
Unlike get_db_session(), this does NOT auto-commit on success.
|
||||||
|
Use this when you need explicit transaction control with the
|
||||||
|
@transactional decorator or atomic() context manager.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
AsyncSession: Database session for async operations
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If database is not initialized
|
||||||
|
|
||||||
|
Example:
|
||||||
|
async with get_transactional_session() as session:
|
||||||
|
async with atomic(session) as tx:
|
||||||
|
# Multiple operations in transaction
|
||||||
|
await operation1(session)
|
||||||
|
await operation2(session)
|
||||||
|
# Committed when exiting atomic() context
|
||||||
|
"""
|
||||||
|
if _session_factory is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Database not initialized. Call init_db() first."
|
||||||
|
)
|
||||||
|
|
||||||
|
session = _session_factory()
|
||||||
|
try:
|
||||||
|
yield session
|
||||||
|
except Exception:
|
||||||
|
await session.rollback()
|
||||||
|
raise
|
||||||
|
finally:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionManager:
|
||||||
|
"""Helper class for manual transaction control.
|
||||||
|
|
||||||
|
Provides a cleaner interface for managing transactions across
|
||||||
|
multiple service calls within a single request.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_session_factory: Factory for creating new sessions
|
||||||
|
_session: Current active session
|
||||||
|
_in_transaction: Whether currently in a transaction
|
||||||
|
|
||||||
|
Example:
|
||||||
|
async with TransactionManager() as tm:
|
||||||
|
session = await tm.get_session()
|
||||||
|
await tm.begin()
|
||||||
|
try:
|
||||||
|
await service1.operation(session)
|
||||||
|
await service2.operation(session)
|
||||||
|
await tm.commit()
|
||||||
|
except Exception:
|
||||||
|
await tm.rollback()
|
||||||
|
raise
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
session_factory: Optional[async_sessionmaker] = None
|
||||||
|
) -> None:
|
||||||
|
"""Initialize transaction manager.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session_factory: Optional custom session factory.
|
||||||
|
Uses global factory if not provided.
|
||||||
|
"""
|
||||||
|
self._session_factory = session_factory or _session_factory
|
||||||
|
self._session: Optional[AsyncSession] = None
|
||||||
|
self._in_transaction = False
|
||||||
|
|
||||||
|
if self._session_factory is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"Database not initialized. Call init_db() first."
|
||||||
|
)
|
||||||
|
|
||||||
|
async def __aenter__(self) -> "TransactionManager":
|
||||||
|
"""Enter context manager and create session."""
|
||||||
|
self._session = self._session_factory()
|
||||||
|
logger.debug("TransactionManager: Created new session")
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: Optional[type],
|
||||||
|
exc_val: Optional[BaseException],
|
||||||
|
exc_tb: Optional[object],
|
||||||
|
) -> bool:
|
||||||
|
"""Exit context manager and cleanup session.
|
||||||
|
|
||||||
|
Automatically rolls back if an exception occurred and
|
||||||
|
transaction wasn't explicitly committed.
|
||||||
|
"""
|
||||||
|
if self._session:
|
||||||
|
if exc_type is not None and self._in_transaction:
|
||||||
|
logger.warning(
|
||||||
|
"TransactionManager: Rolling back due to exception: %s",
|
||||||
|
exc_val,
|
||||||
|
)
|
||||||
|
await self._session.rollback()
|
||||||
|
|
||||||
|
await self._session.close()
|
||||||
|
self._session = None
|
||||||
|
self._in_transaction = False
|
||||||
|
logger.debug("TransactionManager: Session closed")
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def get_session(self) -> AsyncSession:
|
||||||
|
"""Get the current session.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Current AsyncSession instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If not within context manager
|
||||||
|
"""
|
||||||
|
if self._session is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"TransactionManager must be used as async context manager"
|
||||||
|
)
|
||||||
|
return self._session
|
||||||
|
|
||||||
|
async def begin(self) -> None:
|
||||||
|
"""Begin a new transaction.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If already in a transaction or no session
|
||||||
|
"""
|
||||||
|
if self._session is None:
|
||||||
|
raise RuntimeError("No active session")
|
||||||
|
|
||||||
|
if self._in_transaction:
|
||||||
|
raise RuntimeError("Already in a transaction")
|
||||||
|
|
||||||
|
await self._session.begin()
|
||||||
|
self._in_transaction = True
|
||||||
|
logger.debug("TransactionManager: Transaction started")
|
||||||
|
|
||||||
|
async def commit(self) -> None:
|
||||||
|
"""Commit the current transaction.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If not in a transaction
|
||||||
|
"""
|
||||||
|
if not self._in_transaction or self._session is None:
|
||||||
|
raise RuntimeError("Not in a transaction")
|
||||||
|
|
||||||
|
await self._session.commit()
|
||||||
|
self._in_transaction = False
|
||||||
|
logger.debug("TransactionManager: Transaction committed")
|
||||||
|
|
||||||
|
async def rollback(self) -> None:
|
||||||
|
"""Rollback the current transaction.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If not in a transaction
|
||||||
|
"""
|
||||||
|
if self._session is None:
|
||||||
|
raise RuntimeError("No active session")
|
||||||
|
|
||||||
|
await self._session.rollback()
|
||||||
|
self._in_transaction = False
|
||||||
|
logger.debug("TransactionManager: Transaction rolled back")
|
||||||
|
|
||||||
|
async def savepoint(self, name: Optional[str] = None) -> "SavepointHandle":
|
||||||
|
"""Create a savepoint within the current transaction.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Optional savepoint name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
SavepointHandle for controlling the savepoint
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If not in a transaction
|
||||||
|
"""
|
||||||
|
if not self._in_transaction or self._session is None:
|
||||||
|
raise RuntimeError("Must be in a transaction to create savepoint")
|
||||||
|
|
||||||
|
nested = await self._session.begin_nested()
|
||||||
|
return SavepointHandle(nested, name or "unnamed")
|
||||||
|
|
||||||
|
def is_in_transaction(self) -> bool:
|
||||||
|
"""Check if currently in a transaction.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if in an active transaction
|
||||||
|
"""
|
||||||
|
return self._in_transaction
|
||||||
|
|
||||||
|
def get_transaction_depth(self) -> int:
|
||||||
|
"""Get current transaction nesting depth.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
0 if not in transaction, 1+ for nested transactions
|
||||||
|
"""
|
||||||
|
if not self._in_transaction:
|
||||||
|
return 0
|
||||||
|
return 1 # Basic implementation - could be extended
|
||||||
|
|
||||||
|
|
||||||
|
class SavepointHandle:
|
||||||
|
"""Handle for controlling a database savepoint.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_nested: SQLAlchemy nested transaction
|
||||||
|
_name: Savepoint name for logging
|
||||||
|
_released: Whether savepoint has been released
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, nested: object, name: str) -> None:
|
||||||
|
"""Initialize savepoint handle.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nested: SQLAlchemy nested transaction object
|
||||||
|
name: Savepoint name
|
||||||
|
"""
|
||||||
|
self._nested = nested
|
||||||
|
self._name = name
|
||||||
|
self._released = False
|
||||||
|
logger.debug("Created savepoint: %s", name)
|
||||||
|
|
||||||
|
async def rollback(self) -> None:
|
||||||
|
"""Rollback to this savepoint."""
|
||||||
|
if not self._released:
|
||||||
|
await self._nested.rollback()
|
||||||
|
self._released = True
|
||||||
|
logger.debug("Rolled back savepoint: %s", self._name)
|
||||||
|
|
||||||
|
async def release(self) -> None:
|
||||||
|
"""Release (commit) this savepoint."""
|
||||||
|
if not self._released:
|
||||||
|
# Nested transactions commit automatically in SQLAlchemy
|
||||||
|
self._released = True
|
||||||
|
logger.debug("Released savepoint: %s", self._name)
|
||||||
|
|
||||||
|
|
||||||
|
def is_session_in_transaction(session: AsyncSession | Session) -> bool:
|
||||||
|
"""Check if a session is currently in a transaction.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy session (sync or async)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if session is in an active transaction
|
||||||
|
"""
|
||||||
|
return session.in_transaction()
|
||||||
|
|
||||||
|
|
||||||
|
def get_session_transaction_depth(session: AsyncSession | Session) -> int:
|
||||||
|
"""Get the transaction nesting depth of a session.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy session (sync or async)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of nested transactions (0 if not in transaction)
|
||||||
|
"""
|
||||||
|
if not session.in_transaction():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Check for nested transaction state
|
||||||
|
# Note: SQLAlchemy doesn't directly expose nesting depth
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|||||||
@ -1,479 +0,0 @@
|
|||||||
"""Example integration of database service with existing services.
|
|
||||||
|
|
||||||
This file demonstrates how to integrate the database service layer with
|
|
||||||
existing application services like AnimeService and DownloadService.
|
|
||||||
|
|
||||||
These examples show patterns for:
|
|
||||||
- Persisting scan results to database
|
|
||||||
- Loading queue from database on startup
|
|
||||||
- Syncing download progress to database
|
|
||||||
- Maintaining consistency between in-memory state and database
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from src.core.entities.series import Serie
|
|
||||||
from src.server.database.models import DownloadPriority, DownloadStatus
|
|
||||||
from src.server.database.service import (
|
|
||||||
AnimeSeriesService,
|
|
||||||
DownloadQueueService,
|
|
||||||
EpisodeService,
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Example 1: Persist Scan Results
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def persist_scan_results(
|
|
||||||
db: AsyncSession,
|
|
||||||
series_list: List[Serie],
|
|
||||||
) -> None:
|
|
||||||
"""Persist scan results to database.
|
|
||||||
|
|
||||||
Updates or creates anime series and their episodes based on
|
|
||||||
scan results from SerieScanner.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
series_list: List of Serie objects from scan
|
|
||||||
"""
|
|
||||||
logger.info(f"Persisting {len(series_list)} series to database")
|
|
||||||
|
|
||||||
for serie in series_list:
|
|
||||||
# Check if series exists
|
|
||||||
existing = await AnimeSeriesService.get_by_key(db, serie.key)
|
|
||||||
|
|
||||||
if existing:
|
|
||||||
# Update existing series
|
|
||||||
await AnimeSeriesService.update(
|
|
||||||
db,
|
|
||||||
existing.id,
|
|
||||||
name=serie.name,
|
|
||||||
site=serie.site,
|
|
||||||
folder=serie.folder,
|
|
||||||
episode_dict=serie.episode_dict,
|
|
||||||
)
|
|
||||||
series_id = existing.id
|
|
||||||
else:
|
|
||||||
# Create new series
|
|
||||||
new_series = await AnimeSeriesService.create(
|
|
||||||
db,
|
|
||||||
key=serie.key,
|
|
||||||
name=serie.name,
|
|
||||||
site=serie.site,
|
|
||||||
folder=serie.folder,
|
|
||||||
episode_dict=serie.episode_dict,
|
|
||||||
)
|
|
||||||
series_id = new_series.id
|
|
||||||
|
|
||||||
# Update episodes for this series
|
|
||||||
await _update_episodes(db, series_id, serie)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
logger.info("Scan results persisted successfully")
|
|
||||||
|
|
||||||
|
|
||||||
async def _update_episodes(
|
|
||||||
db: AsyncSession,
|
|
||||||
series_id: int,
|
|
||||||
serie: Serie,
|
|
||||||
) -> None:
|
|
||||||
"""Update episodes for a series.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
series_id: Series ID in database
|
|
||||||
serie: Serie object with episode information
|
|
||||||
"""
|
|
||||||
# Get existing episodes
|
|
||||||
existing_episodes = await EpisodeService.get_by_series(db, series_id)
|
|
||||||
existing_map = {
|
|
||||||
(ep.season, ep.episode_number): ep
|
|
||||||
for ep in existing_episodes
|
|
||||||
}
|
|
||||||
|
|
||||||
# Iterate through episode_dict to create/update episodes
|
|
||||||
for season, episodes in serie.episode_dict.items():
|
|
||||||
for ep_num in episodes:
|
|
||||||
key = (int(season), int(ep_num))
|
|
||||||
|
|
||||||
if key in existing_map:
|
|
||||||
# Episode exists, check if downloaded
|
|
||||||
episode = existing_map[key]
|
|
||||||
# Update if needed (e.g., file path changed)
|
|
||||||
if not episode.is_downloaded:
|
|
||||||
# Check if file exists locally
|
|
||||||
# This would be done by checking serie.local_episodes
|
|
||||||
pass
|
|
||||||
else:
|
|
||||||
# Create new episode
|
|
||||||
await EpisodeService.create(
|
|
||||||
db,
|
|
||||||
series_id=series_id,
|
|
||||||
season=int(season),
|
|
||||||
episode_number=int(ep_num),
|
|
||||||
is_downloaded=False,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Example 2: Load Queue from Database
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def load_queue_from_database(
|
|
||||||
db: AsyncSession,
|
|
||||||
) -> List[dict]:
|
|
||||||
"""Load download queue from database.
|
|
||||||
|
|
||||||
Retrieves pending and active download items from database and
|
|
||||||
converts them to format suitable for DownloadService.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of download items as dictionaries
|
|
||||||
"""
|
|
||||||
logger.info("Loading download queue from database")
|
|
||||||
|
|
||||||
# Get pending and active items
|
|
||||||
pending = await DownloadQueueService.get_pending(db)
|
|
||||||
active = await DownloadQueueService.get_active(db)
|
|
||||||
|
|
||||||
all_items = pending + active
|
|
||||||
|
|
||||||
# Convert to dictionary format for DownloadService
|
|
||||||
queue_items = []
|
|
||||||
for item in all_items:
|
|
||||||
queue_items.append({
|
|
||||||
"id": item.id,
|
|
||||||
"series_id": item.series_id,
|
|
||||||
"season": item.season,
|
|
||||||
"episode_number": item.episode_number,
|
|
||||||
"status": item.status.value,
|
|
||||||
"priority": item.priority.value,
|
|
||||||
"progress_percent": item.progress_percent,
|
|
||||||
"downloaded_bytes": item.downloaded_bytes,
|
|
||||||
"total_bytes": item.total_bytes,
|
|
||||||
"download_speed": item.download_speed,
|
|
||||||
"error_message": item.error_message,
|
|
||||||
"retry_count": item.retry_count,
|
|
||||||
})
|
|
||||||
|
|
||||||
logger.info(f"Loaded {len(queue_items)} items from database")
|
|
||||||
return queue_items
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Example 3: Sync Download Progress to Database
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def sync_download_progress(
|
|
||||||
db: AsyncSession,
|
|
||||||
item_id: int,
|
|
||||||
progress_percent: float,
|
|
||||||
downloaded_bytes: int,
|
|
||||||
total_bytes: Optional[int] = None,
|
|
||||||
download_speed: Optional[float] = None,
|
|
||||||
) -> None:
|
|
||||||
"""Sync download progress to database.
|
|
||||||
|
|
||||||
Updates download queue item progress in database. This would be called
|
|
||||||
from the download progress callback.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
item_id: Download queue item ID
|
|
||||||
progress_percent: Progress percentage (0-100)
|
|
||||||
downloaded_bytes: Bytes downloaded
|
|
||||||
total_bytes: Optional total file size
|
|
||||||
download_speed: Optional current speed (bytes/sec)
|
|
||||||
"""
|
|
||||||
await DownloadQueueService.update_progress(
|
|
||||||
db,
|
|
||||||
item_id,
|
|
||||||
progress_percent,
|
|
||||||
downloaded_bytes,
|
|
||||||
total_bytes,
|
|
||||||
download_speed,
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
|
|
||||||
async def mark_download_complete(
|
|
||||||
db: AsyncSession,
|
|
||||||
item_id: int,
|
|
||||||
file_path: str,
|
|
||||||
file_size: int,
|
|
||||||
) -> None:
|
|
||||||
"""Mark download as complete in database.
|
|
||||||
|
|
||||||
Updates download queue item status and marks episode as downloaded.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
item_id: Download queue item ID
|
|
||||||
file_path: Path to downloaded file
|
|
||||||
file_size: File size in bytes
|
|
||||||
"""
|
|
||||||
# Get download item
|
|
||||||
item = await DownloadQueueService.get_by_id(db, item_id)
|
|
||||||
if not item:
|
|
||||||
logger.error(f"Download item {item_id} not found")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Update download status
|
|
||||||
await DownloadQueueService.update_status(
|
|
||||||
db,
|
|
||||||
item_id,
|
|
||||||
DownloadStatus.COMPLETED,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Find or create episode and mark as downloaded
|
|
||||||
episode = await EpisodeService.get_by_episode(
|
|
||||||
db,
|
|
||||||
item.series_id,
|
|
||||||
item.season,
|
|
||||||
item.episode_number,
|
|
||||||
)
|
|
||||||
|
|
||||||
if episode:
|
|
||||||
await EpisodeService.mark_downloaded(
|
|
||||||
db,
|
|
||||||
episode.id,
|
|
||||||
file_path,
|
|
||||||
file_size,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
# Create episode
|
|
||||||
episode = await EpisodeService.create(
|
|
||||||
db,
|
|
||||||
series_id=item.series_id,
|
|
||||||
season=item.season,
|
|
||||||
episode_number=item.episode_number,
|
|
||||||
file_path=file_path,
|
|
||||||
file_size=file_size,
|
|
||||||
is_downloaded=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
logger.info(
|
|
||||||
f"Marked download complete: S{item.season:02d}E{item.episode_number:02d}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
async def mark_download_failed(
|
|
||||||
db: AsyncSession,
|
|
||||||
item_id: int,
|
|
||||||
error_message: str,
|
|
||||||
) -> None:
|
|
||||||
"""Mark download as failed in database.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
item_id: Download queue item ID
|
|
||||||
error_message: Error description
|
|
||||||
"""
|
|
||||||
await DownloadQueueService.update_status(
|
|
||||||
db,
|
|
||||||
item_id,
|
|
||||||
DownloadStatus.FAILED,
|
|
||||||
error_message=error_message,
|
|
||||||
)
|
|
||||||
await db.commit()
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Example 4: Add Episodes to Download Queue
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def add_episodes_to_queue(
|
|
||||||
db: AsyncSession,
|
|
||||||
series_key: str,
|
|
||||||
episodes: List[tuple[int, int]], # List of (season, episode) tuples
|
|
||||||
priority: DownloadPriority = DownloadPriority.NORMAL,
|
|
||||||
) -> int:
|
|
||||||
"""Add multiple episodes to download queue.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
series_key: Series provider key
|
|
||||||
episodes: List of (season, episode_number) tuples
|
|
||||||
priority: Download priority
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Number of episodes added to queue
|
|
||||||
"""
|
|
||||||
# Get series
|
|
||||||
series = await AnimeSeriesService.get_by_key(db, series_key)
|
|
||||||
if not series:
|
|
||||||
logger.error(f"Series not found: {series_key}")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
added_count = 0
|
|
||||||
for season, episode_number in episodes:
|
|
||||||
# Check if already in queue
|
|
||||||
existing_items = await DownloadQueueService.get_all(db)
|
|
||||||
already_queued = any(
|
|
||||||
item.series_id == series.id
|
|
||||||
and item.season == season
|
|
||||||
and item.episode_number == episode_number
|
|
||||||
and item.status in (DownloadStatus.PENDING, DownloadStatus.DOWNLOADING)
|
|
||||||
for item in existing_items
|
|
||||||
)
|
|
||||||
|
|
||||||
if not already_queued:
|
|
||||||
await DownloadQueueService.create(
|
|
||||||
db,
|
|
||||||
series_id=series.id,
|
|
||||||
season=season,
|
|
||||||
episode_number=episode_number,
|
|
||||||
priority=priority,
|
|
||||||
)
|
|
||||||
added_count += 1
|
|
||||||
|
|
||||||
await db.commit()
|
|
||||||
logger.info(f"Added {added_count} episodes to download queue")
|
|
||||||
return added_count
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Example 5: Integration with AnimeService
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
class EnhancedAnimeService:
|
|
||||||
"""Enhanced AnimeService with database persistence.
|
|
||||||
|
|
||||||
This is an example of how to wrap the existing AnimeService with
|
|
||||||
database persistence capabilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, db_session_factory):
|
|
||||||
"""Initialize enhanced anime service.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db_session_factory: Async session factory for database access
|
|
||||||
"""
|
|
||||||
self.db_session_factory = db_session_factory
|
|
||||||
|
|
||||||
async def rescan_with_persistence(self, directory: str) -> dict:
|
|
||||||
"""Rescan directory and persist results.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
directory: Directory to scan
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Scan results dictionary
|
|
||||||
"""
|
|
||||||
# Import here to avoid circular dependencies
|
|
||||||
from src.core.SeriesApp import SeriesApp
|
|
||||||
|
|
||||||
# Perform scan
|
|
||||||
app = SeriesApp(directory)
|
|
||||||
series_list = app.ReScan()
|
|
||||||
|
|
||||||
# Persist to database
|
|
||||||
async with self.db_session_factory() as db:
|
|
||||||
await persist_scan_results(db, series_list)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"total_series": len(series_list),
|
|
||||||
"message": "Scan completed and persisted to database",
|
|
||||||
}
|
|
||||||
|
|
||||||
async def get_series_with_missing_episodes(self) -> List[dict]:
|
|
||||||
"""Get series with missing episodes from database.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of series with missing episodes
|
|
||||||
"""
|
|
||||||
async with self.db_session_factory() as db:
|
|
||||||
# Get all series
|
|
||||||
all_series = await AnimeSeriesService.get_all(
|
|
||||||
db,
|
|
||||||
with_episodes=True,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Filter series with missing episodes
|
|
||||||
series_with_missing = []
|
|
||||||
for series in all_series:
|
|
||||||
if series.episode_dict:
|
|
||||||
total_episodes = sum(
|
|
||||||
len(eps) for eps in series.episode_dict.values()
|
|
||||||
)
|
|
||||||
downloaded_episodes = sum(
|
|
||||||
1 for ep in series.episodes if ep.is_downloaded
|
|
||||||
)
|
|
||||||
|
|
||||||
if downloaded_episodes < total_episodes:
|
|
||||||
series_with_missing.append({
|
|
||||||
"id": series.id,
|
|
||||||
"key": series.key,
|
|
||||||
"name": series.name,
|
|
||||||
"total_episodes": total_episodes,
|
|
||||||
"downloaded_episodes": downloaded_episodes,
|
|
||||||
"missing_episodes": total_episodes - downloaded_episodes,
|
|
||||||
})
|
|
||||||
|
|
||||||
return series_with_missing
|
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
|
||||||
# Usage Example
|
|
||||||
# ============================================================================
|
|
||||||
|
|
||||||
|
|
||||||
async def example_usage():
|
|
||||||
"""Example usage of database service integration."""
|
|
||||||
from src.server.database import get_db_session
|
|
||||||
|
|
||||||
# Get database session
|
|
||||||
async with get_db_session() as db:
|
|
||||||
# Example 1: Add episodes to queue
|
|
||||||
added = await add_episodes_to_queue(
|
|
||||||
db,
|
|
||||||
series_key="attack-on-titan",
|
|
||||||
episodes=[(1, 1), (1, 2), (1, 3)],
|
|
||||||
priority=DownloadPriority.HIGH,
|
|
||||||
)
|
|
||||||
print(f"Added {added} episodes to queue")
|
|
||||||
|
|
||||||
# Example 2: Load queue
|
|
||||||
queue_items = await load_queue_from_database(db)
|
|
||||||
print(f"Queue has {len(queue_items)} items")
|
|
||||||
|
|
||||||
# Example 3: Update progress
|
|
||||||
if queue_items:
|
|
||||||
await sync_download_progress(
|
|
||||||
db,
|
|
||||||
item_id=queue_items[0]["id"],
|
|
||||||
progress_percent=50.0,
|
|
||||||
downloaded_bytes=500000,
|
|
||||||
total_bytes=1000000,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Example 4: Mark complete
|
|
||||||
if queue_items:
|
|
||||||
await mark_download_complete(
|
|
||||||
db,
|
|
||||||
item_id=queue_items[0]["id"],
|
|
||||||
file_path="/path/to/file.mp4",
|
|
||||||
file_size=1000000,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import asyncio
|
|
||||||
asyncio.run(example_usage())
|
|
||||||
@ -2,12 +2,9 @@
|
|||||||
|
|
||||||
This module provides comprehensive database initialization functionality:
|
This module provides comprehensive database initialization functionality:
|
||||||
- Schema creation and validation
|
- Schema creation and validation
|
||||||
- Initial data migration
|
|
||||||
- Database health checks
|
- Database health checks
|
||||||
- Schema versioning support
|
- Schema versioning support
|
||||||
- Migration utilities
|
|
||||||
|
|
||||||
For production deployments, consider using Alembic for managed migrations.
|
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
@ -47,7 +44,7 @@ EXPECTED_INDEXES = {
|
|||||||
"episodes": ["ix_episodes_series_id"],
|
"episodes": ["ix_episodes_series_id"],
|
||||||
"download_queue": [
|
"download_queue": [
|
||||||
"ix_download_queue_series_id",
|
"ix_download_queue_series_id",
|
||||||
"ix_download_queue_status",
|
"ix_download_queue_episode_id",
|
||||||
],
|
],
|
||||||
"user_sessions": [
|
"user_sessions": [
|
||||||
"ix_user_sessions_session_id",
|
"ix_user_sessions_session_id",
|
||||||
@ -316,7 +313,6 @@ async def get_schema_version(engine: Optional[AsyncEngine] = None) -> str:
|
|||||||
"""Get current database schema version.
|
"""Get current database schema version.
|
||||||
|
|
||||||
Returns version string based on existing tables and structure.
|
Returns version string based on existing tables and structure.
|
||||||
For production, consider using Alembic versioning.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
engine: Optional database engine (uses default if not provided)
|
engine: Optional database engine (uses default if not provided)
|
||||||
@ -354,8 +350,6 @@ async def create_schema_version_table(
|
|||||||
) -> None:
|
) -> None:
|
||||||
"""Create schema version tracking table.
|
"""Create schema version tracking table.
|
||||||
|
|
||||||
Future enhancement for tracking schema migrations with Alembic.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
engine: Optional database engine (uses default if not provided)
|
engine: Optional database engine (uses default if not provided)
|
||||||
"""
|
"""
|
||||||
@ -587,60 +581,6 @@ def get_database_info() -> Dict[str, Any]:
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def get_migration_guide() -> str:
|
|
||||||
"""Get migration guide for production deployments.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Migration guide text
|
|
||||||
"""
|
|
||||||
return """
|
|
||||||
Database Migration Guide
|
|
||||||
========================
|
|
||||||
|
|
||||||
Current Setup: SQLAlchemy create_all()
|
|
||||||
- Automatically creates tables on startup
|
|
||||||
- Suitable for development and single-instance deployments
|
|
||||||
- Schema changes require manual handling
|
|
||||||
|
|
||||||
For Production with Alembic:
|
|
||||||
============================
|
|
||||||
|
|
||||||
1. Initialize Alembic (already installed):
|
|
||||||
alembic init alembic
|
|
||||||
|
|
||||||
2. Configure alembic/env.py:
|
|
||||||
from src.server.database.base import Base
|
|
||||||
target_metadata = Base.metadata
|
|
||||||
|
|
||||||
3. Configure alembic.ini:
|
|
||||||
sqlalchemy.url = <your-database-url>
|
|
||||||
|
|
||||||
4. Generate initial migration:
|
|
||||||
alembic revision --autogenerate -m "Initial schema v1.0.0"
|
|
||||||
|
|
||||||
5. Review migration in alembic/versions/
|
|
||||||
|
|
||||||
6. Apply migration:
|
|
||||||
alembic upgrade head
|
|
||||||
|
|
||||||
7. For future schema changes:
|
|
||||||
- Modify models in src/server/database/models.py
|
|
||||||
- Generate migration: alembic revision --autogenerate -m "Description"
|
|
||||||
- Review generated migration
|
|
||||||
- Test in staging environment
|
|
||||||
- Apply: alembic upgrade head
|
|
||||||
- For rollback: alembic downgrade -1
|
|
||||||
|
|
||||||
Best Practices:
|
|
||||||
==============
|
|
||||||
- Always backup database before migrations
|
|
||||||
- Test migrations in staging first
|
|
||||||
- Review auto-generated migrations carefully
|
|
||||||
- Keep migrations in version control
|
|
||||||
- Document breaking changes
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
# Public API
|
# Public API
|
||||||
# =============================================================================
|
# =============================================================================
|
||||||
@ -656,7 +596,6 @@ __all__ = [
|
|||||||
"check_database_health",
|
"check_database_health",
|
||||||
"create_database_backup",
|
"create_database_backup",
|
||||||
"get_database_info",
|
"get_database_info",
|
||||||
"get_migration_guide",
|
|
||||||
"CURRENT_SCHEMA_VERSION",
|
"CURRENT_SCHEMA_VERSION",
|
||||||
"EXPECTED_TABLES",
|
"EXPECTED_TABLES",
|
||||||
]
|
]
|
||||||
|
|||||||
@ -1,167 +0,0 @@
|
|||||||
"""Database migration utilities.
|
|
||||||
|
|
||||||
This module provides utilities for database migrations and schema versioning.
|
|
||||||
Alembic integration can be added when needed for production environments.
|
|
||||||
|
|
||||||
For now, we use SQLAlchemy's create_all for automatic schema creation.
|
|
||||||
"""
|
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncEngine
|
|
||||||
|
|
||||||
from src.server.database.base import Base
|
|
||||||
from src.server.database.connection import get_engine, get_sync_engine
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
async def initialize_schema(engine: Optional[AsyncEngine] = None) -> None:
|
|
||||||
"""Initialize database schema.
|
|
||||||
|
|
||||||
Creates all tables defined in Base metadata if they don't exist.
|
|
||||||
This is a simple migration strategy suitable for single-instance deployments.
|
|
||||||
|
|
||||||
For production with multiple instances, consider using Alembic:
|
|
||||||
- alembic init alembic
|
|
||||||
- alembic revision --autogenerate -m "Initial schema"
|
|
||||||
- alembic upgrade head
|
|
||||||
|
|
||||||
Args:
|
|
||||||
engine: Optional database engine (uses default if not provided)
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
RuntimeError: If database is not initialized
|
|
||||||
"""
|
|
||||||
if engine is None:
|
|
||||||
engine = get_engine()
|
|
||||||
|
|
||||||
logger.info("Initializing database schema...")
|
|
||||||
|
|
||||||
# Create all tables
|
|
||||||
async with engine.begin() as conn:
|
|
||||||
await conn.run_sync(Base.metadata.create_all)
|
|
||||||
|
|
||||||
logger.info("Database schema initialized successfully")
|
|
||||||
|
|
||||||
|
|
||||||
async def check_schema_version(engine: Optional[AsyncEngine] = None) -> str:
|
|
||||||
"""Check current database schema version.
|
|
||||||
|
|
||||||
Returns a simple version identifier based on existing tables.
|
|
||||||
For production, consider using Alembic for proper versioning.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
engine: Optional database engine (uses default if not provided)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Schema version string
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
RuntimeError: If database is not initialized
|
|
||||||
"""
|
|
||||||
if engine is None:
|
|
||||||
engine = get_engine()
|
|
||||||
|
|
||||||
async with engine.connect() as conn:
|
|
||||||
# Check which tables exist
|
|
||||||
result = await conn.execute(
|
|
||||||
text(
|
|
||||||
"SELECT name FROM sqlite_master "
|
|
||||||
"WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
tables = [row[0] for row in result]
|
|
||||||
|
|
||||||
if not tables:
|
|
||||||
return "empty"
|
|
||||||
elif len(tables) == 4 and all(
|
|
||||||
t in tables for t in [
|
|
||||||
"anime_series",
|
|
||||||
"episodes",
|
|
||||||
"download_queue",
|
|
||||||
"user_sessions",
|
|
||||||
]
|
|
||||||
):
|
|
||||||
return "v1.0"
|
|
||||||
else:
|
|
||||||
return "custom"
|
|
||||||
|
|
||||||
|
|
||||||
def get_migration_info() -> str:
|
|
||||||
"""Get information about database migration setup.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Migration setup information
|
|
||||||
"""
|
|
||||||
return """
|
|
||||||
Database Migration Information
|
|
||||||
==============================
|
|
||||||
|
|
||||||
Current Strategy: SQLAlchemy create_all()
|
|
||||||
- Automatically creates tables on startup
|
|
||||||
- Suitable for development and single-instance deployments
|
|
||||||
- Schema changes require manual handling
|
|
||||||
|
|
||||||
For Production Migrations (Alembic):
|
|
||||||
====================================
|
|
||||||
|
|
||||||
1. Initialize Alembic:
|
|
||||||
alembic init alembic
|
|
||||||
|
|
||||||
2. Configure alembic/env.py:
|
|
||||||
- Import Base from src.server.database.base
|
|
||||||
- Set target_metadata = Base.metadata
|
|
||||||
|
|
||||||
3. Configure alembic.ini:
|
|
||||||
- Set sqlalchemy.url to your database URL
|
|
||||||
|
|
||||||
4. Generate initial migration:
|
|
||||||
alembic revision --autogenerate -m "Initial schema"
|
|
||||||
|
|
||||||
5. Apply migrations:
|
|
||||||
alembic upgrade head
|
|
||||||
|
|
||||||
6. For future changes:
|
|
||||||
- Modify models in src/server/database/models.py
|
|
||||||
- Generate migration: alembic revision --autogenerate -m "Description"
|
|
||||||
- Review generated migration in alembic/versions/
|
|
||||||
- Apply: alembic upgrade head
|
|
||||||
|
|
||||||
Benefits of Alembic:
|
|
||||||
- Version control for database schema
|
|
||||||
- Automatic migration generation from model changes
|
|
||||||
- Rollback support with downgrade scripts
|
|
||||||
- Multi-instance deployment support
|
|
||||||
- Safe schema changes in production
|
|
||||||
"""
|
|
||||||
|
|
||||||
|
|
||||||
# =============================================================================
|
|
||||||
# Future Alembic Integration
|
|
||||||
# =============================================================================
|
|
||||||
#
|
|
||||||
# When ready to use Alembic, follow these steps:
|
|
||||||
#
|
|
||||||
# 1. Install Alembic (already in requirements.txt):
|
|
||||||
# pip install alembic
|
|
||||||
#
|
|
||||||
# 2. Initialize Alembic from project root:
|
|
||||||
# alembic init alembic
|
|
||||||
#
|
|
||||||
# 3. Update alembic/env.py to use our Base:
|
|
||||||
# from src.server.database.base import Base
|
|
||||||
# target_metadata = Base.metadata
|
|
||||||
#
|
|
||||||
# 4. Configure alembic.ini with DATABASE_URL from settings
|
|
||||||
#
|
|
||||||
# 5. Generate initial migration:
|
|
||||||
# alembic revision --autogenerate -m "Initial schema"
|
|
||||||
#
|
|
||||||
# 6. Review generated migration and apply:
|
|
||||||
# alembic upgrade head
|
|
||||||
#
|
|
||||||
# =============================================================================
|
|
||||||
@ -1,236 +0,0 @@
|
|||||||
"""
|
|
||||||
Initial database schema migration.
|
|
||||||
|
|
||||||
This migration creates the base tables for the Aniworld application,
|
|
||||||
including users, anime, downloads, and configuration tables.
|
|
||||||
|
|
||||||
Version: 20250124_001
|
|
||||||
Created: 2025-01-24
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from ..migrations.base import Migration, MigrationError
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class InitialSchemaMigration(Migration):
|
|
||||||
"""
|
|
||||||
Creates initial database schema.
|
|
||||||
|
|
||||||
This migration sets up all core tables needed for the application:
|
|
||||||
- users: User accounts and authentication
|
|
||||||
- anime: Anime series metadata
|
|
||||||
- episodes: Episode information
|
|
||||||
- downloads: Download queue and history
|
|
||||||
- config: Application configuration
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize the initial schema migration."""
|
|
||||||
super().__init__(
|
|
||||||
version="20250124_001",
|
|
||||||
description="Create initial database schema",
|
|
||||||
)
|
|
||||||
|
|
||||||
async def upgrade(self, session: AsyncSession) -> None:
|
|
||||||
"""
|
|
||||||
Create all initial tables.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session: Database session
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If table creation fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Create users table
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS users (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
username TEXT NOT NULL UNIQUE,
|
|
||||||
email TEXT,
|
|
||||||
password_hash TEXT NOT NULL,
|
|
||||||
is_active BOOLEAN DEFAULT 1,
|
|
||||||
is_admin BOOLEAN DEFAULT 0,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create anime table
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS anime (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
title TEXT NOT NULL,
|
|
||||||
original_title TEXT,
|
|
||||||
description TEXT,
|
|
||||||
genres TEXT,
|
|
||||||
release_year INTEGER,
|
|
||||||
status TEXT,
|
|
||||||
total_episodes INTEGER,
|
|
||||||
cover_image_url TEXT,
|
|
||||||
aniworld_url TEXT,
|
|
||||||
mal_id INTEGER,
|
|
||||||
anilist_id INTEGER,
|
|
||||||
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create episodes table
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS episodes (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
anime_id INTEGER NOT NULL,
|
|
||||||
episode_number INTEGER NOT NULL,
|
|
||||||
season_number INTEGER DEFAULT 1,
|
|
||||||
title TEXT,
|
|
||||||
description TEXT,
|
|
||||||
duration_minutes INTEGER,
|
|
||||||
air_date DATE,
|
|
||||||
stream_url TEXT,
|
|
||||||
download_url TEXT,
|
|
||||||
file_path TEXT,
|
|
||||||
file_size_bytes INTEGER,
|
|
||||||
is_downloaded BOOLEAN DEFAULT 0,
|
|
||||||
download_progress REAL DEFAULT 0.0,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
FOREIGN KEY (anime_id) REFERENCES anime(id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
UNIQUE (anime_id, season_number, episode_number)
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create downloads table
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS downloads (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
episode_id INTEGER NOT NULL,
|
|
||||||
user_id INTEGER,
|
|
||||||
status TEXT NOT NULL DEFAULT 'pending',
|
|
||||||
priority INTEGER DEFAULT 5,
|
|
||||||
progress REAL DEFAULT 0.0,
|
|
||||||
download_speed_mbps REAL,
|
|
||||||
eta_seconds INTEGER,
|
|
||||||
started_at TIMESTAMP,
|
|
||||||
completed_at TIMESTAMP,
|
|
||||||
failed_at TIMESTAMP,
|
|
||||||
error_message TEXT,
|
|
||||||
retry_count INTEGER DEFAULT 0,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
FOREIGN KEY (episode_id) REFERENCES episodes(id)
|
|
||||||
ON DELETE CASCADE,
|
|
||||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
|
||||||
ON DELETE SET NULL
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create config table
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"""
|
|
||||||
CREATE TABLE IF NOT EXISTS config (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
key TEXT NOT NULL UNIQUE,
|
|
||||||
value TEXT NOT NULL,
|
|
||||||
category TEXT DEFAULT 'general',
|
|
||||||
description TEXT,
|
|
||||||
is_secret BOOLEAN DEFAULT 0,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create indexes for better performance
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_anime_title "
|
|
||||||
"ON anime(title)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_episodes_anime_id "
|
|
||||||
"ON episodes(anime_id)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS idx_downloads_status "
|
|
||||||
"ON downloads(status)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
await session.execute(
|
|
||||||
text(
|
|
||||||
"CREATE INDEX IF NOT EXISTS "
|
|
||||||
"idx_downloads_episode_id ON downloads(episode_id)"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info("Initial schema created successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to create initial schema: {e}")
|
|
||||||
raise MigrationError(
|
|
||||||
f"Initial schema creation failed: {e}"
|
|
||||||
) from e
|
|
||||||
|
|
||||||
async def downgrade(self, session: AsyncSession) -> None:
|
|
||||||
"""
|
|
||||||
Drop all initial tables.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session: Database session
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If table dropping fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Drop tables in reverse order to respect foreign keys
|
|
||||||
tables = [
|
|
||||||
"downloads",
|
|
||||||
"episodes",
|
|
||||||
"anime",
|
|
||||||
"users",
|
|
||||||
"config",
|
|
||||||
]
|
|
||||||
|
|
||||||
for table in tables:
|
|
||||||
await session.execute(text(f"DROP TABLE IF EXISTS {table}"))
|
|
||||||
logger.debug(f"Dropped table: {table}")
|
|
||||||
|
|
||||||
logger.info("Initial schema rolled back successfully")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to rollback initial schema: {e}")
|
|
||||||
raise MigrationError(
|
|
||||||
f"Initial schema rollback failed: {e}"
|
|
||||||
) from e
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
"""
|
|
||||||
Database migration system for Aniworld application.
|
|
||||||
|
|
||||||
This package provides tools for managing database schema changes,
|
|
||||||
including migration creation, execution, and rollback capabilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .base import Migration, MigrationError
|
|
||||||
from .runner import MigrationRunner
|
|
||||||
from .validator import MigrationValidator
|
|
||||||
|
|
||||||
__all__ = [
|
|
||||||
"Migration",
|
|
||||||
"MigrationError",
|
|
||||||
"MigrationRunner",
|
|
||||||
"MigrationValidator",
|
|
||||||
]
|
|
||||||
@ -1,128 +0,0 @@
|
|||||||
"""
|
|
||||||
Base migration classes and utilities.
|
|
||||||
|
|
||||||
This module provides the foundation for database migrations,
|
|
||||||
including the abstract Migration class and error handling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from abc import ABC, abstractmethod
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationError(Exception):
|
|
||||||
"""Base exception for migration-related errors."""
|
|
||||||
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class Migration(ABC):
|
|
||||||
"""
|
|
||||||
Abstract base class for database migrations.
|
|
||||||
|
|
||||||
Each migration should inherit from this class and implement
|
|
||||||
the upgrade and downgrade methods.
|
|
||||||
|
|
||||||
Attributes:
|
|
||||||
version: Unique version identifier (e.g., "20250124_001")
|
|
||||||
description: Human-readable description of the migration
|
|
||||||
created_at: Timestamp when migration was created
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
version: str,
|
|
||||||
description: str,
|
|
||||||
created_at: Optional[datetime] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Initialize migration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
version: Unique version identifier
|
|
||||||
description: Human-readable description
|
|
||||||
created_at: Creation timestamp (defaults to now)
|
|
||||||
"""
|
|
||||||
self.version = version
|
|
||||||
self.description = description
|
|
||||||
self.created_at = created_at or datetime.now()
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def upgrade(self, session: AsyncSession) -> None:
|
|
||||||
"""
|
|
||||||
Apply the migration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session: Database session for executing changes
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If migration fails
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
@abstractmethod
|
|
||||||
async def downgrade(self, session: AsyncSession) -> None:
|
|
||||||
"""
|
|
||||||
Revert the migration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
session: Database session for reverting changes
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If rollback fails
|
|
||||||
"""
|
|
||||||
pass
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
|
||||||
"""Return string representation of migration."""
|
|
||||||
return f"Migration({self.version}: {self.description})"
|
|
||||||
|
|
||||||
def __eq__(self, other: object) -> bool:
|
|
||||||
"""Check equality based on version."""
|
|
||||||
if not isinstance(other, Migration):
|
|
||||||
return False
|
|
||||||
return self.version == other.version
|
|
||||||
|
|
||||||
def __hash__(self) -> int:
|
|
||||||
"""Return hash based on version."""
|
|
||||||
return hash(self.version)
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationHistory:
|
|
||||||
"""
|
|
||||||
Tracks applied migrations in the database.
|
|
||||||
|
|
||||||
This model stores information about which migrations have been
|
|
||||||
applied, when they were applied, and their execution status.
|
|
||||||
"""
|
|
||||||
|
|
||||||
__tablename__ = "migration_history"
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
version: str,
|
|
||||||
description: str,
|
|
||||||
applied_at: datetime,
|
|
||||||
execution_time_ms: int,
|
|
||||||
success: bool = True,
|
|
||||||
error_message: Optional[str] = None,
|
|
||||||
):
|
|
||||||
"""
|
|
||||||
Initialize migration history record.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
version: Migration version identifier
|
|
||||||
description: Migration description
|
|
||||||
applied_at: Timestamp when migration was applied
|
|
||||||
execution_time_ms: Time taken to execute in milliseconds
|
|
||||||
success: Whether migration succeeded
|
|
||||||
error_message: Error message if migration failed
|
|
||||||
"""
|
|
||||||
self.version = version
|
|
||||||
self.description = description
|
|
||||||
self.applied_at = applied_at
|
|
||||||
self.execution_time_ms = execution_time_ms
|
|
||||||
self.success = success
|
|
||||||
self.error_message = error_message
|
|
||||||
@ -1,323 +0,0 @@
|
|||||||
"""
|
|
||||||
Migration runner for executing database migrations.
|
|
||||||
|
|
||||||
This module handles the execution of migrations in the correct order,
|
|
||||||
tracks migration history, and provides rollback capabilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import importlib.util
|
|
||||||
import logging
|
|
||||||
import time
|
|
||||||
from datetime import datetime
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import List, Optional
|
|
||||||
|
|
||||||
from sqlalchemy import text
|
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
|
||||||
|
|
||||||
from .base import Migration, MigrationError, MigrationHistory
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationRunner:
|
|
||||||
"""
|
|
||||||
Manages database migration execution and tracking.
|
|
||||||
|
|
||||||
This class handles loading migrations, executing them in order,
|
|
||||||
tracking their status, and rolling back when needed.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, migrations_dir: Path, session: AsyncSession):
|
|
||||||
"""
|
|
||||||
Initialize migration runner.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
migrations_dir: Directory containing migration files
|
|
||||||
session: Database session for executing migrations
|
|
||||||
"""
|
|
||||||
self.migrations_dir = migrations_dir
|
|
||||||
self.session = session
|
|
||||||
self._migrations: List[Migration] = []
|
|
||||||
|
|
||||||
async def initialize(self) -> None:
|
|
||||||
"""
|
|
||||||
Initialize migration system by creating tracking table if needed.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If initialization fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Create migration_history table if it doesn't exist
|
|
||||||
create_table_sql = """
|
|
||||||
CREATE TABLE IF NOT EXISTS migration_history (
|
|
||||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
|
||||||
version TEXT NOT NULL UNIQUE,
|
|
||||||
description TEXT NOT NULL,
|
|
||||||
applied_at TIMESTAMP NOT NULL,
|
|
||||||
execution_time_ms INTEGER NOT NULL,
|
|
||||||
success BOOLEAN NOT NULL DEFAULT 1,
|
|
||||||
error_message TEXT
|
|
||||||
)
|
|
||||||
"""
|
|
||||||
await self.session.execute(text(create_table_sql))
|
|
||||||
await self.session.commit()
|
|
||||||
logger.info("Migration system initialized")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to initialize migration system: {e}")
|
|
||||||
raise MigrationError(f"Initialization failed: {e}") from e
|
|
||||||
|
|
||||||
def load_migrations(self) -> None:
|
|
||||||
"""
|
|
||||||
Load all migration files from the migrations directory.
|
|
||||||
|
|
||||||
Migration files should be named in format: {version}_{description}.py
|
|
||||||
and contain a Migration class that inherits from base.Migration.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If loading migrations fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
self._migrations.clear()
|
|
||||||
|
|
||||||
if not self.migrations_dir.exists():
|
|
||||||
logger.warning(f"Migrations directory does not exist: {self.migrations_dir}")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Find all Python files in migrations directory
|
|
||||||
migration_files = sorted(self.migrations_dir.glob("*.py"))
|
|
||||||
migration_files = [f for f in migration_files if f.name != "__init__.py"]
|
|
||||||
|
|
||||||
for file_path in migration_files:
|
|
||||||
try:
|
|
||||||
# Import the migration module dynamically
|
|
||||||
spec = importlib.util.spec_from_file_location(
|
|
||||||
f"migration.{file_path.stem}", file_path
|
|
||||||
)
|
|
||||||
if spec and spec.loader:
|
|
||||||
module = importlib.util.module_from_spec(spec)
|
|
||||||
spec.loader.exec_module(module)
|
|
||||||
|
|
||||||
# Find Migration subclass in module
|
|
||||||
for attr_name in dir(module):
|
|
||||||
attr = getattr(module, attr_name)
|
|
||||||
if (
|
|
||||||
isinstance(attr, type)
|
|
||||||
and issubclass(attr, Migration)
|
|
||||||
and attr != Migration
|
|
||||||
):
|
|
||||||
migration_instance = attr()
|
|
||||||
self._migrations.append(migration_instance)
|
|
||||||
logger.debug(f"Loaded migration: {migration_instance.version}")
|
|
||||||
break
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to load migration {file_path.name}: {e}")
|
|
||||||
raise MigrationError(f"Failed to load {file_path.name}: {e}") from e
|
|
||||||
|
|
||||||
# Sort migrations by version
|
|
||||||
self._migrations.sort(key=lambda m: m.version)
|
|
||||||
logger.info(f"Loaded {len(self._migrations)} migrations")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to load migrations: {e}")
|
|
||||||
raise MigrationError(f"Loading migrations failed: {e}") from e
|
|
||||||
|
|
||||||
async def get_applied_migrations(self) -> List[str]:
|
|
||||||
"""
|
|
||||||
Get list of already applied migration versions.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of migration versions that have been applied
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If query fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
result = await self.session.execute(
|
|
||||||
text("SELECT version FROM migration_history WHERE success = 1 ORDER BY version")
|
|
||||||
)
|
|
||||||
versions = [row[0] for row in result.fetchall()]
|
|
||||||
return versions
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to get applied migrations: {e}")
|
|
||||||
raise MigrationError(f"Query failed: {e}") from e
|
|
||||||
|
|
||||||
async def get_pending_migrations(self) -> List[Migration]:
|
|
||||||
"""
|
|
||||||
Get list of migrations that haven't been applied yet.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of pending Migration objects
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If check fails
|
|
||||||
"""
|
|
||||||
applied = await self.get_applied_migrations()
|
|
||||||
pending = [m for m in self._migrations if m.version not in applied]
|
|
||||||
return pending
|
|
||||||
|
|
||||||
async def apply_migration(self, migration: Migration) -> None:
|
|
||||||
"""
|
|
||||||
Apply a single migration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
migration: Migration to apply
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If migration fails
|
|
||||||
"""
|
|
||||||
start_time = time.time()
|
|
||||||
success = False
|
|
||||||
error_message = None
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(f"Applying migration: {migration.version} - {migration.description}")
|
|
||||||
|
|
||||||
# Execute the migration
|
|
||||||
await migration.upgrade(self.session)
|
|
||||||
await self.session.commit()
|
|
||||||
|
|
||||||
success = True
|
|
||||||
execution_time_ms = int((time.time() - start_time) * 1000)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Migration {migration.version} applied successfully in {execution_time_ms}ms"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
error_message = str(e)
|
|
||||||
execution_time_ms = int((time.time() - start_time) * 1000)
|
|
||||||
logger.error(f"Migration {migration.version} failed: {e}")
|
|
||||||
await self.session.rollback()
|
|
||||||
raise MigrationError(f"Migration {migration.version} failed: {e}") from e
|
|
||||||
|
|
||||||
finally:
|
|
||||||
# Record migration in history
|
|
||||||
try:
|
|
||||||
history_record = MigrationHistory(
|
|
||||||
version=migration.version,
|
|
||||||
description=migration.description,
|
|
||||||
applied_at=datetime.now(),
|
|
||||||
execution_time_ms=execution_time_ms,
|
|
||||||
success=success,
|
|
||||||
error_message=error_message,
|
|
||||||
)
|
|
||||||
|
|
||||||
insert_sql = """
|
|
||||||
INSERT INTO migration_history
|
|
||||||
(version, description, applied_at, execution_time_ms, success, error_message)
|
|
||||||
VALUES (:version, :description, :applied_at, :execution_time_ms, :success, :error_message)
|
|
||||||
"""
|
|
||||||
|
|
||||||
await self.session.execute(
|
|
||||||
text(insert_sql),
|
|
||||||
{
|
|
||||||
"version": history_record.version,
|
|
||||||
"description": history_record.description,
|
|
||||||
"applied_at": history_record.applied_at,
|
|
||||||
"execution_time_ms": history_record.execution_time_ms,
|
|
||||||
"success": history_record.success,
|
|
||||||
"error_message": history_record.error_message,
|
|
||||||
},
|
|
||||||
)
|
|
||||||
await self.session.commit()
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to record migration history: {e}")
|
|
||||||
|
|
||||||
async def run_migrations(self, target_version: Optional[str] = None) -> int:
|
|
||||||
"""
|
|
||||||
Run all pending migrations up to target version.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
target_version: Stop at this version (None = run all)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Number of migrations applied
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If migrations fail
|
|
||||||
"""
|
|
||||||
pending = await self.get_pending_migrations()
|
|
||||||
|
|
||||||
if target_version:
|
|
||||||
pending = [m for m in pending if m.version <= target_version]
|
|
||||||
|
|
||||||
if not pending:
|
|
||||||
logger.info("No pending migrations to apply")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
logger.info(f"Applying {len(pending)} pending migrations")
|
|
||||||
|
|
||||||
for migration in pending:
|
|
||||||
await self.apply_migration(migration)
|
|
||||||
|
|
||||||
return len(pending)
|
|
||||||
|
|
||||||
async def rollback_migration(self, migration: Migration) -> None:
|
|
||||||
"""
|
|
||||||
Rollback a single migration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
migration: Migration to rollback
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If rollback fails
|
|
||||||
"""
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(f"Rolling back migration: {migration.version}")
|
|
||||||
|
|
||||||
# Execute the downgrade
|
|
||||||
await migration.downgrade(self.session)
|
|
||||||
await self.session.commit()
|
|
||||||
|
|
||||||
execution_time_ms = int((time.time() - start_time) * 1000)
|
|
||||||
|
|
||||||
# Remove from history
|
|
||||||
delete_sql = "DELETE FROM migration_history WHERE version = :version"
|
|
||||||
await self.session.execute(text(delete_sql), {"version": migration.version})
|
|
||||||
await self.session.commit()
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
f"Migration {migration.version} rolled back successfully in {execution_time_ms}ms"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Rollback of {migration.version} failed: {e}")
|
|
||||||
await self.session.rollback()
|
|
||||||
raise MigrationError(f"Rollback of {migration.version} failed: {e}") from e
|
|
||||||
|
|
||||||
async def rollback(self, steps: int = 1) -> int:
|
|
||||||
"""
|
|
||||||
Rollback the last N migrations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
steps: Number of migrations to rollback
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Number of migrations rolled back
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If rollback fails
|
|
||||||
"""
|
|
||||||
applied = await self.get_applied_migrations()
|
|
||||||
|
|
||||||
if not applied:
|
|
||||||
logger.info("No migrations to rollback")
|
|
||||||
return 0
|
|
||||||
|
|
||||||
# Get migrations to rollback (in reverse order)
|
|
||||||
to_rollback = applied[-steps:]
|
|
||||||
to_rollback.reverse()
|
|
||||||
|
|
||||||
migrations_to_rollback = [m for m in self._migrations if m.version in to_rollback]
|
|
||||||
|
|
||||||
logger.info(f"Rolling back {len(migrations_to_rollback)} migrations")
|
|
||||||
|
|
||||||
for migration in migrations_to_rollback:
|
|
||||||
await self.rollback_migration(migration)
|
|
||||||
|
|
||||||
return len(migrations_to_rollback)
|
|
||||||
@ -1,222 +0,0 @@
|
|||||||
"""
|
|
||||||
Migration validator for ensuring migration safety and integrity.
|
|
||||||
|
|
||||||
This module provides validation utilities to check migrations
|
|
||||||
before they are executed, ensuring they meet quality standards.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from typing import List, Optional, Set
|
|
||||||
|
|
||||||
from .base import Migration, MigrationError
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class MigrationValidator:
|
|
||||||
"""
|
|
||||||
Validates migrations before execution.
|
|
||||||
|
|
||||||
Performs various checks to ensure migrations are safe to run,
|
|
||||||
including version uniqueness, naming conventions, and
|
|
||||||
dependency resolution.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
"""Initialize migration validator."""
|
|
||||||
self.errors: List[str] = []
|
|
||||||
self.warnings: List[str] = []
|
|
||||||
|
|
||||||
def reset(self) -> None:
|
|
||||||
"""Clear validation results."""
|
|
||||||
self.errors.clear()
|
|
||||||
self.warnings.clear()
|
|
||||||
|
|
||||||
def validate_migration(self, migration: Migration) -> bool:
|
|
||||||
"""
|
|
||||||
Validate a single migration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
migration: Migration to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if migration is valid, False otherwise
|
|
||||||
"""
|
|
||||||
self.reset()
|
|
||||||
|
|
||||||
# Check version format
|
|
||||||
if not self._validate_version_format(migration.version):
|
|
||||||
self.errors.append(
|
|
||||||
f"Invalid version format: {migration.version}. "
|
|
||||||
"Expected format: YYYYMMDD_NNN"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check description
|
|
||||||
if not migration.description or len(migration.description) < 5:
|
|
||||||
self.errors.append(
|
|
||||||
f"Migration {migration.version} has invalid "
|
|
||||||
f"description: '{migration.description}'"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for implementation
|
|
||||||
if not hasattr(migration, "upgrade") or not callable(
|
|
||||||
getattr(migration, "upgrade")
|
|
||||||
):
|
|
||||||
self.errors.append(
|
|
||||||
f"Migration {migration.version} missing upgrade method"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not hasattr(migration, "downgrade") or not callable(
|
|
||||||
getattr(migration, "downgrade")
|
|
||||||
):
|
|
||||||
self.errors.append(
|
|
||||||
f"Migration {migration.version} missing downgrade method"
|
|
||||||
)
|
|
||||||
|
|
||||||
return len(self.errors) == 0
|
|
||||||
|
|
||||||
def validate_migrations(self, migrations: List[Migration]) -> bool:
|
|
||||||
"""
|
|
||||||
Validate a list of migrations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
migrations: List of migrations to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if all migrations are valid, False otherwise
|
|
||||||
"""
|
|
||||||
self.reset()
|
|
||||||
|
|
||||||
if not migrations:
|
|
||||||
self.warnings.append("No migrations to validate")
|
|
||||||
return True
|
|
||||||
|
|
||||||
# Check for duplicate versions
|
|
||||||
versions: Set[str] = set()
|
|
||||||
for migration in migrations:
|
|
||||||
if migration.version in versions:
|
|
||||||
self.errors.append(
|
|
||||||
f"Duplicate migration version: {migration.version}"
|
|
||||||
)
|
|
||||||
versions.add(migration.version)
|
|
||||||
|
|
||||||
# Return early if duplicates found
|
|
||||||
if self.errors:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Validate each migration
|
|
||||||
for migration in migrations:
|
|
||||||
if not self.validate_migration(migration):
|
|
||||||
logger.error(
|
|
||||||
f"Migration {migration.version} "
|
|
||||||
f"validation failed: {self.errors}"
|
|
||||||
)
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check version ordering
|
|
||||||
sorted_versions = sorted([m.version for m in migrations])
|
|
||||||
actual_versions = [m.version for m in migrations]
|
|
||||||
if sorted_versions != actual_versions:
|
|
||||||
self.warnings.append(
|
|
||||||
"Migrations are not in chronological order"
|
|
||||||
)
|
|
||||||
|
|
||||||
return len(self.errors) == 0
|
|
||||||
|
|
||||||
def _validate_version_format(self, version: str) -> bool:
|
|
||||||
"""
|
|
||||||
Validate version string format.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
version: Version string to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if format is valid
|
|
||||||
"""
|
|
||||||
# Expected format: YYYYMMDD_NNN or YYYYMMDD_NNN_description
|
|
||||||
if not version:
|
|
||||||
return False
|
|
||||||
|
|
||||||
parts = version.split("_")
|
|
||||||
if len(parts) < 2:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check date part (YYYYMMDD)
|
|
||||||
date_part = parts[0]
|
|
||||||
if len(date_part) != 8 or not date_part.isdigit():
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Check sequence part (NNN)
|
|
||||||
seq_part = parts[1]
|
|
||||||
if not seq_part.isdigit():
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
def check_migration_conflicts(
|
|
||||||
self,
|
|
||||||
pending: List[Migration],
|
|
||||||
applied: List[str],
|
|
||||||
) -> Optional[str]:
|
|
||||||
"""
|
|
||||||
Check for conflicts between pending and applied migrations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
pending: List of pending migrations
|
|
||||||
applied: List of applied migration versions
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Error message if conflicts found, None otherwise
|
|
||||||
"""
|
|
||||||
# Check if any pending migration has version lower than applied
|
|
||||||
if not applied:
|
|
||||||
return None
|
|
||||||
|
|
||||||
latest_applied = max(applied)
|
|
||||||
|
|
||||||
for migration in pending:
|
|
||||||
if migration.version < latest_applied:
|
|
||||||
return (
|
|
||||||
f"Migration {migration.version} is older than "
|
|
||||||
f"latest applied migration {latest_applied}. "
|
|
||||||
"This may indicate a merge conflict."
|
|
||||||
)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_validation_report(self) -> str:
|
|
||||||
"""
|
|
||||||
Get formatted validation report.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted report string
|
|
||||||
"""
|
|
||||||
report = []
|
|
||||||
|
|
||||||
if self.errors:
|
|
||||||
report.append("Validation Errors:")
|
|
||||||
for error in self.errors:
|
|
||||||
report.append(f" - {error}")
|
|
||||||
|
|
||||||
if self.warnings:
|
|
||||||
report.append("Validation Warnings:")
|
|
||||||
for warning in self.warnings:
|
|
||||||
report.append(f" - {warning}")
|
|
||||||
|
|
||||||
if not self.errors and not self.warnings:
|
|
||||||
report.append("All validations passed")
|
|
||||||
|
|
||||||
return "\n".join(report)
|
|
||||||
|
|
||||||
def raise_if_invalid(self) -> None:
|
|
||||||
"""
|
|
||||||
Raise exception if validation failed.
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
MigrationError: If validation errors exist
|
|
||||||
"""
|
|
||||||
if self.errors:
|
|
||||||
error_msg = "\n".join(self.errors)
|
|
||||||
raise MigrationError(
|
|
||||||
f"Migration validation failed:\n{error_msg}"
|
|
||||||
)
|
|
||||||
@ -15,18 +15,7 @@ from datetime import datetime, timezone
|
|||||||
from enum import Enum
|
from enum import Enum
|
||||||
from typing import List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from sqlalchemy import (
|
from sqlalchemy import Boolean, DateTime, ForeignKey, Integer, String, Text, func
|
||||||
JSON,
|
|
||||||
Boolean,
|
|
||||||
DateTime,
|
|
||||||
Float,
|
|
||||||
ForeignKey,
|
|
||||||
Integer,
|
|
||||||
String,
|
|
||||||
Text,
|
|
||||||
func,
|
|
||||||
)
|
|
||||||
from sqlalchemy import Enum as SQLEnum
|
|
||||||
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
|
||||||
|
|
||||||
from src.server.database.base import Base, TimestampMixin
|
from src.server.database.base import Base, TimestampMixin
|
||||||
@ -51,10 +40,6 @@ class AnimeSeries(Base, TimestampMixin):
|
|||||||
name: Display name of the series
|
name: Display name of the series
|
||||||
site: Provider site URL
|
site: Provider site URL
|
||||||
folder: Filesystem folder name (metadata only, not for lookups)
|
folder: Filesystem folder name (metadata only, not for lookups)
|
||||||
description: Optional series description
|
|
||||||
status: Current status (ongoing, completed, etc.)
|
|
||||||
total_episodes: Total number of episodes
|
|
||||||
cover_url: URL to series cover image
|
|
||||||
episodes: Relationship to Episode models (via id foreign key)
|
episodes: Relationship to Episode models (via id foreign key)
|
||||||
download_items: Relationship to DownloadQueueItem models (via id foreign key)
|
download_items: Relationship to DownloadQueueItem models (via id foreign key)
|
||||||
created_at: Creation timestamp (from TimestampMixin)
|
created_at: Creation timestamp (from TimestampMixin)
|
||||||
@ -89,30 +74,6 @@ class AnimeSeries(Base, TimestampMixin):
|
|||||||
doc="Filesystem folder name - METADATA ONLY, not for lookups"
|
doc="Filesystem folder name - METADATA ONLY, not for lookups"
|
||||||
)
|
)
|
||||||
|
|
||||||
# Metadata
|
|
||||||
description: Mapped[Optional[str]] = mapped_column(
|
|
||||||
Text, nullable=True,
|
|
||||||
doc="Series description"
|
|
||||||
)
|
|
||||||
status: Mapped[Optional[str]] = mapped_column(
|
|
||||||
String(50), nullable=True,
|
|
||||||
doc="Series status (ongoing, completed, etc.)"
|
|
||||||
)
|
|
||||||
total_episodes: Mapped[Optional[int]] = mapped_column(
|
|
||||||
Integer, nullable=True,
|
|
||||||
doc="Total number of episodes"
|
|
||||||
)
|
|
||||||
cover_url: Mapped[Optional[str]] = mapped_column(
|
|
||||||
String(1000), nullable=True,
|
|
||||||
doc="URL to cover image"
|
|
||||||
)
|
|
||||||
|
|
||||||
# JSON field for episode dictionary (season -> [episodes])
|
|
||||||
episode_dict: Mapped[Optional[dict]] = mapped_column(
|
|
||||||
JSON, nullable=True,
|
|
||||||
doc="Episode dictionary {season: [episodes]}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationships
|
# Relationships
|
||||||
episodes: Mapped[List["Episode"]] = relationship(
|
episodes: Mapped[List["Episode"]] = relationship(
|
||||||
"Episode",
|
"Episode",
|
||||||
@ -161,22 +122,6 @@ class AnimeSeries(Base, TimestampMixin):
|
|||||||
raise ValueError("Folder path must be 1000 characters or less")
|
raise ValueError("Folder path must be 1000 characters or less")
|
||||||
return value.strip()
|
return value.strip()
|
||||||
|
|
||||||
@validates('cover_url')
|
|
||||||
def validate_cover_url(self, key: str, value: Optional[str]) -> Optional[str]:
|
|
||||||
"""Validate cover URL length."""
|
|
||||||
if value is not None and len(value) > 1000:
|
|
||||||
raise ValueError("Cover URL must be 1000 characters or less")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('total_episodes')
|
|
||||||
def validate_total_episodes(self, key: str, value: Optional[int]) -> Optional[int]:
|
|
||||||
"""Validate total episodes is positive."""
|
|
||||||
if value is not None and value < 0:
|
|
||||||
raise ValueError("Total episodes must be non-negative")
|
|
||||||
if value is not None and value > 10000:
|
|
||||||
raise ValueError("Total episodes must be 10000 or less")
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return f"<AnimeSeries(id={self.id}, key='{self.key}', name='{self.name}')>"
|
return f"<AnimeSeries(id={self.id}, key='{self.key}', name='{self.name}')>"
|
||||||
|
|
||||||
@ -194,9 +139,7 @@ class Episode(Base, TimestampMixin):
|
|||||||
episode_number: Episode number within season
|
episode_number: Episode number within season
|
||||||
title: Episode title
|
title: Episode title
|
||||||
file_path: Local file path if downloaded
|
file_path: Local file path if downloaded
|
||||||
file_size: File size in bytes
|
|
||||||
is_downloaded: Whether episode is downloaded
|
is_downloaded: Whether episode is downloaded
|
||||||
download_date: When episode was downloaded
|
|
||||||
series: Relationship to AnimeSeries
|
series: Relationship to AnimeSeries
|
||||||
created_at: Creation timestamp (from TimestampMixin)
|
created_at: Creation timestamp (from TimestampMixin)
|
||||||
updated_at: Last update timestamp (from TimestampMixin)
|
updated_at: Last update timestamp (from TimestampMixin)
|
||||||
@ -234,18 +177,10 @@ class Episode(Base, TimestampMixin):
|
|||||||
String(1000), nullable=True,
|
String(1000), nullable=True,
|
||||||
doc="Local file path"
|
doc="Local file path"
|
||||||
)
|
)
|
||||||
file_size: Mapped[Optional[int]] = mapped_column(
|
|
||||||
Integer, nullable=True,
|
|
||||||
doc="File size in bytes"
|
|
||||||
)
|
|
||||||
is_downloaded: Mapped[bool] = mapped_column(
|
is_downloaded: Mapped[bool] = mapped_column(
|
||||||
Boolean, default=False, nullable=False,
|
Boolean, default=False, nullable=False,
|
||||||
doc="Whether episode is downloaded"
|
doc="Whether episode is downloaded"
|
||||||
)
|
)
|
||||||
download_date: Mapped[Optional[datetime]] = mapped_column(
|
|
||||||
DateTime(timezone=True), nullable=True,
|
|
||||||
doc="When episode was downloaded"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Relationship
|
# Relationship
|
||||||
series: Mapped["AnimeSeries"] = relationship(
|
series: Mapped["AnimeSeries"] = relationship(
|
||||||
@ -287,13 +222,6 @@ class Episode(Base, TimestampMixin):
|
|||||||
raise ValueError("File path must be 1000 characters or less")
|
raise ValueError("File path must be 1000 characters or less")
|
||||||
return value
|
return value
|
||||||
|
|
||||||
@validates('file_size')
|
|
||||||
def validate_file_size(self, key: str, value: Optional[int]) -> Optional[int]:
|
|
||||||
"""Validate file size is non-negative."""
|
|
||||||
if value is not None and value < 0:
|
|
||||||
raise ValueError("File size must be non-negative")
|
|
||||||
return value
|
|
||||||
|
|
||||||
def __repr__(self) -> str:
|
def __repr__(self) -> str:
|
||||||
return (
|
return (
|
||||||
f"<Episode(id={self.id}, series_id={self.series_id}, "
|
f"<Episode(id={self.id}, series_id={self.series_id}, "
|
||||||
@ -321,27 +249,20 @@ class DownloadPriority(str, Enum):
|
|||||||
class DownloadQueueItem(Base, TimestampMixin):
|
class DownloadQueueItem(Base, TimestampMixin):
|
||||||
"""SQLAlchemy model for download queue items.
|
"""SQLAlchemy model for download queue items.
|
||||||
|
|
||||||
Tracks download queue with status, progress, and error information.
|
Tracks download queue with error information.
|
||||||
Provides persistence for the DownloadService queue state.
|
Provides persistence for the DownloadService queue state.
|
||||||
|
|
||||||
Attributes:
|
Attributes:
|
||||||
id: Primary key
|
id: Primary key
|
||||||
series_id: Foreign key to AnimeSeries
|
series_id: Foreign key to AnimeSeries
|
||||||
season: Season number
|
episode_id: Foreign key to Episode
|
||||||
episode_number: Episode number
|
|
||||||
status: Current download status
|
|
||||||
priority: Download priority
|
|
||||||
progress_percent: Download progress (0-100)
|
|
||||||
downloaded_bytes: Bytes downloaded
|
|
||||||
total_bytes: Total file size
|
|
||||||
download_speed: Current speed in bytes/sec
|
|
||||||
error_message: Error description if failed
|
error_message: Error description if failed
|
||||||
retry_count: Number of retry attempts
|
|
||||||
download_url: Provider download URL
|
download_url: Provider download URL
|
||||||
file_destination: Target file path
|
file_destination: Target file path
|
||||||
started_at: When download started
|
started_at: When download started
|
||||||
completed_at: When download completed
|
completed_at: When download completed
|
||||||
series: Relationship to AnimeSeries
|
series: Relationship to AnimeSeries
|
||||||
|
episode: Relationship to Episode
|
||||||
created_at: Creation timestamp (from TimestampMixin)
|
created_at: Creation timestamp (from TimestampMixin)
|
||||||
updated_at: Last update timestamp (from TimestampMixin)
|
updated_at: Last update timestamp (from TimestampMixin)
|
||||||
"""
|
"""
|
||||||
@ -359,47 +280,11 @@ class DownloadQueueItem(Base, TimestampMixin):
|
|||||||
index=True
|
index=True
|
||||||
)
|
)
|
||||||
|
|
||||||
# Episode identification
|
# Foreign key to episode
|
||||||
season: Mapped[int] = mapped_column(
|
episode_id: Mapped[int] = mapped_column(
|
||||||
Integer, nullable=False,
|
ForeignKey("episodes.id", ondelete="CASCADE"),
|
||||||
doc="Season number"
|
|
||||||
)
|
|
||||||
episode_number: Mapped[int] = mapped_column(
|
|
||||||
Integer, nullable=False,
|
|
||||||
doc="Episode number"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Queue management
|
|
||||||
status: Mapped[str] = mapped_column(
|
|
||||||
SQLEnum(DownloadStatus),
|
|
||||||
default=DownloadStatus.PENDING,
|
|
||||||
nullable=False,
|
nullable=False,
|
||||||
index=True,
|
index=True
|
||||||
doc="Current download status"
|
|
||||||
)
|
|
||||||
priority: Mapped[str] = mapped_column(
|
|
||||||
SQLEnum(DownloadPriority),
|
|
||||||
default=DownloadPriority.NORMAL,
|
|
||||||
nullable=False,
|
|
||||||
doc="Download priority"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Progress tracking
|
|
||||||
progress_percent: Mapped[float] = mapped_column(
|
|
||||||
Float, default=0.0, nullable=False,
|
|
||||||
doc="Progress percentage (0-100)"
|
|
||||||
)
|
|
||||||
downloaded_bytes: Mapped[int] = mapped_column(
|
|
||||||
Integer, default=0, nullable=False,
|
|
||||||
doc="Bytes downloaded"
|
|
||||||
)
|
|
||||||
total_bytes: Mapped[Optional[int]] = mapped_column(
|
|
||||||
Integer, nullable=True,
|
|
||||||
doc="Total file size"
|
|
||||||
)
|
|
||||||
download_speed: Mapped[Optional[float]] = mapped_column(
|
|
||||||
Float, nullable=True,
|
|
||||||
doc="Current download speed (bytes/sec)"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Error handling
|
# Error handling
|
||||||
@ -407,10 +292,6 @@ class DownloadQueueItem(Base, TimestampMixin):
|
|||||||
Text, nullable=True,
|
Text, nullable=True,
|
||||||
doc="Error description"
|
doc="Error description"
|
||||||
)
|
)
|
||||||
retry_count: Mapped[int] = mapped_column(
|
|
||||||
Integer, default=0, nullable=False,
|
|
||||||
doc="Number of retry attempts"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Download details
|
# Download details
|
||||||
download_url: Mapped[Optional[str]] = mapped_column(
|
download_url: Mapped[Optional[str]] = mapped_column(
|
||||||
@ -437,67 +318,9 @@ class DownloadQueueItem(Base, TimestampMixin):
|
|||||||
"AnimeSeries",
|
"AnimeSeries",
|
||||||
back_populates="download_items"
|
back_populates="download_items"
|
||||||
)
|
)
|
||||||
|
episode: Mapped["Episode"] = relationship(
|
||||||
@validates('season')
|
"Episode"
|
||||||
def validate_season(self, key: str, value: int) -> int:
|
)
|
||||||
"""Validate season number is positive."""
|
|
||||||
if value < 0:
|
|
||||||
raise ValueError("Season number must be non-negative")
|
|
||||||
if value > 1000:
|
|
||||||
raise ValueError("Season number must be 1000 or less")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('episode_number')
|
|
||||||
def validate_episode_number(self, key: str, value: int) -> int:
|
|
||||||
"""Validate episode number is positive."""
|
|
||||||
if value < 0:
|
|
||||||
raise ValueError("Episode number must be non-negative")
|
|
||||||
if value > 10000:
|
|
||||||
raise ValueError("Episode number must be 10000 or less")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('progress_percent')
|
|
||||||
def validate_progress_percent(self, key: str, value: float) -> float:
|
|
||||||
"""Validate progress is between 0 and 100."""
|
|
||||||
if value < 0.0:
|
|
||||||
raise ValueError("Progress percent must be non-negative")
|
|
||||||
if value > 100.0:
|
|
||||||
raise ValueError("Progress percent cannot exceed 100")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('downloaded_bytes')
|
|
||||||
def validate_downloaded_bytes(self, key: str, value: int) -> int:
|
|
||||||
"""Validate downloaded bytes is non-negative."""
|
|
||||||
if value < 0:
|
|
||||||
raise ValueError("Downloaded bytes must be non-negative")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('total_bytes')
|
|
||||||
def validate_total_bytes(
|
|
||||||
self, key: str, value: Optional[int]
|
|
||||||
) -> Optional[int]:
|
|
||||||
"""Validate total bytes is non-negative."""
|
|
||||||
if value is not None and value < 0:
|
|
||||||
raise ValueError("Total bytes must be non-negative")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('download_speed')
|
|
||||||
def validate_download_speed(
|
|
||||||
self, key: str, value: Optional[float]
|
|
||||||
) -> Optional[float]:
|
|
||||||
"""Validate download speed is non-negative."""
|
|
||||||
if value is not None and value < 0.0:
|
|
||||||
raise ValueError("Download speed must be non-negative")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('retry_count')
|
|
||||||
def validate_retry_count(self, key: str, value: int) -> int:
|
|
||||||
"""Validate retry count is non-negative."""
|
|
||||||
if value < 0:
|
|
||||||
raise ValueError("Retry count must be non-negative")
|
|
||||||
if value > 100:
|
|
||||||
raise ValueError("Retry count cannot exceed 100")
|
|
||||||
return value
|
|
||||||
|
|
||||||
@validates('download_url')
|
@validates('download_url')
|
||||||
def validate_download_url(
|
def validate_download_url(
|
||||||
@ -523,8 +346,7 @@ class DownloadQueueItem(Base, TimestampMixin):
|
|||||||
return (
|
return (
|
||||||
f"<DownloadQueueItem(id={self.id}, "
|
f"<DownloadQueueItem(id={self.id}, "
|
||||||
f"series_id={self.series_id}, "
|
f"series_id={self.series_id}, "
|
||||||
f"S{self.season:02d}E{self.episode_number:02d}, "
|
f"episode_id={self.episode_id})>"
|
||||||
f"status={self.status})>"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -9,13 +9,22 @@ Services:
|
|||||||
- DownloadQueueService: CRUD operations for download queue
|
- DownloadQueueService: CRUD operations for download queue
|
||||||
- UserSessionService: CRUD operations for user sessions
|
- UserSessionService: CRUD operations for user sessions
|
||||||
|
|
||||||
|
Transaction Support:
|
||||||
|
All services are designed to work within transaction boundaries.
|
||||||
|
Individual operations use flush() instead of commit() to allow
|
||||||
|
the caller to control transaction boundaries.
|
||||||
|
|
||||||
|
For compound operations spanning multiple services, use the
|
||||||
|
@transactional decorator or atomic() context manager from
|
||||||
|
src.server.database.transaction.
|
||||||
|
|
||||||
All services support both async and sync operations for flexibility.
|
All services support both async and sync operations for flexibility.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from datetime import datetime, timedelta, timezone
|
from datetime import datetime, timedelta, timezone
|
||||||
from typing import Dict, List, Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
from sqlalchemy import delete, select, update
|
from sqlalchemy import delete, select, update
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
@ -23,9 +32,7 @@ from sqlalchemy.orm import Session, selectinload
|
|||||||
|
|
||||||
from src.server.database.models import (
|
from src.server.database.models import (
|
||||||
AnimeSeries,
|
AnimeSeries,
|
||||||
DownloadPriority,
|
|
||||||
DownloadQueueItem,
|
DownloadQueueItem,
|
||||||
DownloadStatus,
|
|
||||||
Episode,
|
Episode,
|
||||||
UserSession,
|
UserSession,
|
||||||
)
|
)
|
||||||
@ -57,11 +64,6 @@ class AnimeSeriesService:
|
|||||||
name: str,
|
name: str,
|
||||||
site: str,
|
site: str,
|
||||||
folder: str,
|
folder: str,
|
||||||
description: Optional[str] = None,
|
|
||||||
status: Optional[str] = None,
|
|
||||||
total_episodes: Optional[int] = None,
|
|
||||||
cover_url: Optional[str] = None,
|
|
||||||
episode_dict: Optional[Dict] = None,
|
|
||||||
) -> AnimeSeries:
|
) -> AnimeSeries:
|
||||||
"""Create a new anime series.
|
"""Create a new anime series.
|
||||||
|
|
||||||
@ -71,11 +73,6 @@ class AnimeSeriesService:
|
|||||||
name: Series name
|
name: Series name
|
||||||
site: Provider site URL
|
site: Provider site URL
|
||||||
folder: Local filesystem path
|
folder: Local filesystem path
|
||||||
description: Optional series description
|
|
||||||
status: Optional series status
|
|
||||||
total_episodes: Optional total episode count
|
|
||||||
cover_url: Optional cover image URL
|
|
||||||
episode_dict: Optional episode dictionary
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Created AnimeSeries instance
|
Created AnimeSeries instance
|
||||||
@ -88,11 +85,6 @@ class AnimeSeriesService:
|
|||||||
name=name,
|
name=name,
|
||||||
site=site,
|
site=site,
|
||||||
folder=folder,
|
folder=folder,
|
||||||
description=description,
|
|
||||||
status=status,
|
|
||||||
total_episodes=total_episodes,
|
|
||||||
cover_url=cover_url,
|
|
||||||
episode_dict=episode_dict,
|
|
||||||
)
|
)
|
||||||
db.add(series)
|
db.add(series)
|
||||||
await db.flush()
|
await db.flush()
|
||||||
@ -262,7 +254,6 @@ class EpisodeService:
|
|||||||
episode_number: int,
|
episode_number: int,
|
||||||
title: Optional[str] = None,
|
title: Optional[str] = None,
|
||||||
file_path: Optional[str] = None,
|
file_path: Optional[str] = None,
|
||||||
file_size: Optional[int] = None,
|
|
||||||
is_downloaded: bool = False,
|
is_downloaded: bool = False,
|
||||||
) -> Episode:
|
) -> Episode:
|
||||||
"""Create a new episode.
|
"""Create a new episode.
|
||||||
@ -274,7 +265,6 @@ class EpisodeService:
|
|||||||
episode_number: Episode number within season
|
episode_number: Episode number within season
|
||||||
title: Optional episode title
|
title: Optional episode title
|
||||||
file_path: Optional local file path
|
file_path: Optional local file path
|
||||||
file_size: Optional file size in bytes
|
|
||||||
is_downloaded: Whether episode is downloaded
|
is_downloaded: Whether episode is downloaded
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
@ -286,9 +276,7 @@ class EpisodeService:
|
|||||||
episode_number=episode_number,
|
episode_number=episode_number,
|
||||||
title=title,
|
title=title,
|
||||||
file_path=file_path,
|
file_path=file_path,
|
||||||
file_size=file_size,
|
|
||||||
is_downloaded=is_downloaded,
|
is_downloaded=is_downloaded,
|
||||||
download_date=datetime.now(timezone.utc) if is_downloaded else None,
|
|
||||||
)
|
)
|
||||||
db.add(episode)
|
db.add(episode)
|
||||||
await db.flush()
|
await db.flush()
|
||||||
@ -372,7 +360,6 @@ class EpisodeService:
|
|||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
episode_id: int,
|
episode_id: int,
|
||||||
file_path: str,
|
file_path: str,
|
||||||
file_size: int,
|
|
||||||
) -> Optional[Episode]:
|
) -> Optional[Episode]:
|
||||||
"""Mark episode as downloaded.
|
"""Mark episode as downloaded.
|
||||||
|
|
||||||
@ -380,7 +367,6 @@ class EpisodeService:
|
|||||||
db: Database session
|
db: Database session
|
||||||
episode_id: Episode primary key
|
episode_id: Episode primary key
|
||||||
file_path: Local file path
|
file_path: Local file path
|
||||||
file_size: File size in bytes
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Updated Episode instance or None if not found
|
Updated Episode instance or None if not found
|
||||||
@ -391,8 +377,6 @@ class EpisodeService:
|
|||||||
|
|
||||||
episode.is_downloaded = True
|
episode.is_downloaded = True
|
||||||
episode.file_path = file_path
|
episode.file_path = file_path
|
||||||
episode.file_size = file_size
|
|
||||||
episode.download_date = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
await db.flush()
|
await db.flush()
|
||||||
await db.refresh(episode)
|
await db.refresh(episode)
|
||||||
@ -418,6 +402,96 @@ class EpisodeService:
|
|||||||
)
|
)
|
||||||
return result.rowcount > 0
|
return result.rowcount > 0
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def delete_by_series_and_episode(
|
||||||
|
db: AsyncSession,
|
||||||
|
series_key: str,
|
||||||
|
season: int,
|
||||||
|
episode_number: int,
|
||||||
|
) -> bool:
|
||||||
|
"""Delete episode by series key, season, and episode number.
|
||||||
|
|
||||||
|
Used to remove episodes from the missing list when they are
|
||||||
|
downloaded successfully.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
series_key: Unique provider key for the series
|
||||||
|
season: Season number
|
||||||
|
episode_number: Episode number within season
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if deleted, False if not found
|
||||||
|
"""
|
||||||
|
# First get the series by key
|
||||||
|
series = await AnimeSeriesService.get_by_key(db, series_key)
|
||||||
|
if not series:
|
||||||
|
logger.warning(
|
||||||
|
f"Series not found for key: {series_key}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Then delete the episode
|
||||||
|
result = await db.execute(
|
||||||
|
delete(Episode).where(
|
||||||
|
Episode.series_id == series.id,
|
||||||
|
Episode.season == season,
|
||||||
|
Episode.episode_number == episode_number,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
deleted = result.rowcount > 0
|
||||||
|
if deleted:
|
||||||
|
logger.info(
|
||||||
|
f"Removed episode from missing list: "
|
||||||
|
f"{series_key} S{season:02d}E{episode_number:02d}"
|
||||||
|
)
|
||||||
|
return deleted
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def bulk_mark_downloaded(
|
||||||
|
db: AsyncSession,
|
||||||
|
episode_ids: List[int],
|
||||||
|
file_paths: Optional[List[str]] = None,
|
||||||
|
) -> int:
|
||||||
|
"""Mark multiple episodes as downloaded atomically.
|
||||||
|
|
||||||
|
This operation should be wrapped in a transaction for atomicity.
|
||||||
|
All episodes will be updated or none if an error occurs.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
episode_ids: List of episode primary keys to update
|
||||||
|
file_paths: Optional list of file paths (parallel to episode_ids)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of episodes updated
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Use within @transactional or atomic() for guaranteed atomicity:
|
||||||
|
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
count = await EpisodeService.bulk_mark_downloaded(
|
||||||
|
db, episode_ids, file_paths
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
if not episode_ids:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
updated_count = 0
|
||||||
|
|
||||||
|
for i, episode_id in enumerate(episode_ids):
|
||||||
|
episode = await EpisodeService.get_by_id(db, episode_id)
|
||||||
|
if episode:
|
||||||
|
episode.is_downloaded = True
|
||||||
|
if file_paths and i < len(file_paths):
|
||||||
|
episode.file_path = file_paths[i]
|
||||||
|
updated_count += 1
|
||||||
|
|
||||||
|
await db.flush()
|
||||||
|
logger.info(f"Bulk marked {updated_count} episodes as downloaded")
|
||||||
|
|
||||||
|
return updated_count
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
# Download Queue Service
|
# Download Queue Service
|
||||||
@ -427,17 +501,18 @@ class EpisodeService:
|
|||||||
class DownloadQueueService:
|
class DownloadQueueService:
|
||||||
"""Service for download queue CRUD operations.
|
"""Service for download queue CRUD operations.
|
||||||
|
|
||||||
Provides methods for managing the download queue with status tracking,
|
Provides methods for managing the download queue.
|
||||||
priority management, and progress updates.
|
|
||||||
|
Transaction Support:
|
||||||
|
All operations use flush() for transaction-safe operation.
|
||||||
|
For bulk operations, use @transactional or atomic() context.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def create(
|
async def create(
|
||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
series_id: int,
|
series_id: int,
|
||||||
season: int,
|
episode_id: int,
|
||||||
episode_number: int,
|
|
||||||
priority: DownloadPriority = DownloadPriority.NORMAL,
|
|
||||||
download_url: Optional[str] = None,
|
download_url: Optional[str] = None,
|
||||||
file_destination: Optional[str] = None,
|
file_destination: Optional[str] = None,
|
||||||
) -> DownloadQueueItem:
|
) -> DownloadQueueItem:
|
||||||
@ -446,9 +521,7 @@ class DownloadQueueService:
|
|||||||
Args:
|
Args:
|
||||||
db: Database session
|
db: Database session
|
||||||
series_id: Foreign key to AnimeSeries
|
series_id: Foreign key to AnimeSeries
|
||||||
season: Season number
|
episode_id: Foreign key to Episode
|
||||||
episode_number: Episode number
|
|
||||||
priority: Download priority
|
|
||||||
download_url: Optional provider download URL
|
download_url: Optional provider download URL
|
||||||
file_destination: Optional target file path
|
file_destination: Optional target file path
|
||||||
|
|
||||||
@ -457,10 +530,7 @@ class DownloadQueueService:
|
|||||||
"""
|
"""
|
||||||
item = DownloadQueueItem(
|
item = DownloadQueueItem(
|
||||||
series_id=series_id,
|
series_id=series_id,
|
||||||
season=season,
|
episode_id=episode_id,
|
||||||
episode_number=episode_number,
|
|
||||||
status=DownloadStatus.PENDING,
|
|
||||||
priority=priority,
|
|
||||||
download_url=download_url,
|
download_url=download_url,
|
||||||
file_destination=file_destination,
|
file_destination=file_destination,
|
||||||
)
|
)
|
||||||
@ -468,8 +538,8 @@ class DownloadQueueService:
|
|||||||
await db.flush()
|
await db.flush()
|
||||||
await db.refresh(item)
|
await db.refresh(item)
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Added to download queue: S{season:02d}E{episode_number:02d} "
|
f"Added to download queue: episode_id={episode_id} "
|
||||||
f"for series_id={series_id} with priority={priority}"
|
f"for series_id={series_id}"
|
||||||
)
|
)
|
||||||
return item
|
return item
|
||||||
|
|
||||||
@ -493,68 +563,25 @@ class DownloadQueueService:
|
|||||||
return result.scalar_one_or_none()
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_by_status(
|
async def get_by_episode(
|
||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
status: DownloadStatus,
|
episode_id: int,
|
||||||
limit: Optional[int] = None,
|
) -> Optional[DownloadQueueItem]:
|
||||||
) -> List[DownloadQueueItem]:
|
"""Get download queue item by episode ID.
|
||||||
"""Get download queue items by status.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db: Database session
|
db: Database session
|
||||||
status: Download status filter
|
episode_id: Foreign key to Episode
|
||||||
limit: Optional limit for results
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of DownloadQueueItem instances
|
DownloadQueueItem instance or None if not found
|
||||||
"""
|
"""
|
||||||
query = select(DownloadQueueItem).where(
|
result = await db.execute(
|
||||||
DownloadQueueItem.status == status
|
select(DownloadQueueItem).where(
|
||||||
)
|
DownloadQueueItem.episode_id == episode_id
|
||||||
|
)
|
||||||
# Order by priority (HIGH first) then creation time
|
|
||||||
query = query.order_by(
|
|
||||||
DownloadQueueItem.priority.desc(),
|
|
||||||
DownloadQueueItem.created_at.asc(),
|
|
||||||
)
|
|
||||||
|
|
||||||
if limit:
|
|
||||||
query = query.limit(limit)
|
|
||||||
|
|
||||||
result = await db.execute(query)
|
|
||||||
return list(result.scalars().all())
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def get_pending(
|
|
||||||
db: AsyncSession,
|
|
||||||
limit: Optional[int] = None,
|
|
||||||
) -> List[DownloadQueueItem]:
|
|
||||||
"""Get pending download queue items.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
limit: Optional limit for results
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of pending DownloadQueueItem instances ordered by priority
|
|
||||||
"""
|
|
||||||
return await DownloadQueueService.get_by_status(
|
|
||||||
db, DownloadStatus.PENDING, limit
|
|
||||||
)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def get_active(db: AsyncSession) -> List[DownloadQueueItem]:
|
|
||||||
"""Get active download queue items.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of downloading DownloadQueueItem instances
|
|
||||||
"""
|
|
||||||
return await DownloadQueueService.get_by_status(
|
|
||||||
db, DownloadStatus.DOWNLOADING
|
|
||||||
)
|
)
|
||||||
|
return result.scalar_one_or_none()
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def get_all(
|
async def get_all(
|
||||||
@ -576,7 +603,6 @@ class DownloadQueueService:
|
|||||||
query = query.options(selectinload(DownloadQueueItem.series))
|
query = query.options(selectinload(DownloadQueueItem.series))
|
||||||
|
|
||||||
query = query.order_by(
|
query = query.order_by(
|
||||||
DownloadQueueItem.priority.desc(),
|
|
||||||
DownloadQueueItem.created_at.asc(),
|
DownloadQueueItem.created_at.asc(),
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -584,19 +610,17 @@ class DownloadQueueService:
|
|||||||
return list(result.scalars().all())
|
return list(result.scalars().all())
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def update_status(
|
async def set_error(
|
||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
item_id: int,
|
item_id: int,
|
||||||
status: DownloadStatus,
|
error_message: str,
|
||||||
error_message: Optional[str] = None,
|
|
||||||
) -> Optional[DownloadQueueItem]:
|
) -> Optional[DownloadQueueItem]:
|
||||||
"""Update download queue item status.
|
"""Set error message on download queue item.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db: Database session
|
db: Database session
|
||||||
item_id: Item primary key
|
item_id: Item primary key
|
||||||
status: New download status
|
error_message: Error description
|
||||||
error_message: Optional error message for failed status
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Updated DownloadQueueItem instance or None if not found
|
Updated DownloadQueueItem instance or None if not found
|
||||||
@ -605,61 +629,11 @@ class DownloadQueueService:
|
|||||||
if not item:
|
if not item:
|
||||||
return None
|
return None
|
||||||
|
|
||||||
item.status = status
|
item.error_message = error_message
|
||||||
|
|
||||||
# Update timestamps based on status
|
|
||||||
if status == DownloadStatus.DOWNLOADING and not item.started_at:
|
|
||||||
item.started_at = datetime.now(timezone.utc)
|
|
||||||
elif status in (DownloadStatus.COMPLETED, DownloadStatus.FAILED):
|
|
||||||
item.completed_at = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
# Set error message for failed downloads
|
|
||||||
if status == DownloadStatus.FAILED and error_message:
|
|
||||||
item.error_message = error_message
|
|
||||||
item.retry_count += 1
|
|
||||||
|
|
||||||
await db.flush()
|
|
||||||
await db.refresh(item)
|
|
||||||
logger.debug(f"Updated download queue item {item_id} status to {status}")
|
|
||||||
return item
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
async def update_progress(
|
|
||||||
db: AsyncSession,
|
|
||||||
item_id: int,
|
|
||||||
progress_percent: float,
|
|
||||||
downloaded_bytes: int,
|
|
||||||
total_bytes: Optional[int] = None,
|
|
||||||
download_speed: Optional[float] = None,
|
|
||||||
) -> Optional[DownloadQueueItem]:
|
|
||||||
"""Update download progress.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
db: Database session
|
|
||||||
item_id: Item primary key
|
|
||||||
progress_percent: Progress percentage (0-100)
|
|
||||||
downloaded_bytes: Bytes downloaded
|
|
||||||
total_bytes: Optional total file size
|
|
||||||
download_speed: Optional current speed (bytes/sec)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated DownloadQueueItem instance or None if not found
|
|
||||||
"""
|
|
||||||
item = await DownloadQueueService.get_by_id(db, item_id)
|
|
||||||
if not item:
|
|
||||||
return None
|
|
||||||
|
|
||||||
item.progress_percent = progress_percent
|
|
||||||
item.downloaded_bytes = downloaded_bytes
|
|
||||||
|
|
||||||
if total_bytes is not None:
|
|
||||||
item.total_bytes = total_bytes
|
|
||||||
|
|
||||||
if download_speed is not None:
|
|
||||||
item.download_speed = download_speed
|
|
||||||
|
|
||||||
await db.flush()
|
await db.flush()
|
||||||
await db.refresh(item)
|
await db.refresh(item)
|
||||||
|
logger.debug(f"Set error on download queue item {item_id}")
|
||||||
return item
|
return item
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -682,57 +656,87 @@ class DownloadQueueService:
|
|||||||
return deleted
|
return deleted
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def clear_completed(db: AsyncSession) -> int:
|
async def delete_by_episode(
|
||||||
"""Clear completed downloads from queue.
|
db: AsyncSession,
|
||||||
|
episode_id: int,
|
||||||
|
) -> bool:
|
||||||
|
"""Delete download queue item by episode ID.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db: Database session
|
db: Database session
|
||||||
|
episode_id: Foreign key to Episode
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Number of items cleared
|
True if deleted, False if not found
|
||||||
"""
|
"""
|
||||||
result = await db.execute(
|
result = await db.execute(
|
||||||
delete(DownloadQueueItem).where(
|
delete(DownloadQueueItem).where(
|
||||||
DownloadQueueItem.status == DownloadStatus.COMPLETED
|
DownloadQueueItem.episode_id == episode_id
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
count = result.rowcount
|
deleted = result.rowcount > 0
|
||||||
logger.info(f"Cleared {count} completed downloads from queue")
|
if deleted:
|
||||||
return count
|
logger.info(
|
||||||
|
f"Deleted download queue item with episode_id={episode_id}"
|
||||||
|
)
|
||||||
|
return deleted
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
async def retry_failed(
|
async def bulk_delete(
|
||||||
db: AsyncSession,
|
db: AsyncSession,
|
||||||
max_retries: int = 3,
|
item_ids: List[int],
|
||||||
) -> List[DownloadQueueItem]:
|
) -> int:
|
||||||
"""Retry failed downloads that haven't exceeded max retries.
|
"""Delete multiple download queue items atomically.
|
||||||
|
|
||||||
|
This operation should be wrapped in a transaction for atomicity.
|
||||||
|
All items will be deleted or none if an error occurs.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db: Database session
|
db: Database session
|
||||||
max_retries: Maximum number of retry attempts
|
item_ids: List of item primary keys to delete
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
List of items marked for retry
|
Number of items deleted
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Use within @transactional or atomic() for guaranteed atomicity:
|
||||||
|
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
count = await DownloadQueueService.bulk_delete(db, item_ids)
|
||||||
"""
|
"""
|
||||||
|
if not item_ids:
|
||||||
|
return 0
|
||||||
|
|
||||||
result = await db.execute(
|
result = await db.execute(
|
||||||
select(DownloadQueueItem).where(
|
delete(DownloadQueueItem).where(
|
||||||
DownloadQueueItem.status == DownloadStatus.FAILED,
|
DownloadQueueItem.id.in_(item_ids)
|
||||||
DownloadQueueItem.retry_count < max_retries,
|
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
items = list(result.scalars().all())
|
|
||||||
|
|
||||||
for item in items:
|
count = result.rowcount
|
||||||
item.status = DownloadStatus.PENDING
|
logger.info(f"Bulk deleted {count} download queue items")
|
||||||
item.error_message = None
|
|
||||||
item.progress_percent = 0.0
|
|
||||||
item.downloaded_bytes = 0
|
|
||||||
item.started_at = None
|
|
||||||
item.completed_at = None
|
|
||||||
|
|
||||||
await db.flush()
|
return count
|
||||||
logger.info(f"Marked {len(items)} failed downloads for retry")
|
|
||||||
return items
|
@staticmethod
|
||||||
|
async def clear_all(
|
||||||
|
db: AsyncSession,
|
||||||
|
) -> int:
|
||||||
|
"""Clear all download queue items.
|
||||||
|
|
||||||
|
Deletes all items from the download queue. This operation
|
||||||
|
should be wrapped in a transaction.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of items deleted
|
||||||
|
"""
|
||||||
|
result = await db.execute(delete(DownloadQueueItem))
|
||||||
|
count = result.rowcount
|
||||||
|
logger.info(f"Cleared all {count} download queue items")
|
||||||
|
return count
|
||||||
|
|
||||||
|
|
||||||
# ============================================================================
|
# ============================================================================
|
||||||
@ -744,6 +748,10 @@ class UserSessionService:
|
|||||||
"""Service for user session CRUD operations.
|
"""Service for user session CRUD operations.
|
||||||
|
|
||||||
Provides methods for managing user authentication sessions with JWT tokens.
|
Provides methods for managing user authentication sessions with JWT tokens.
|
||||||
|
|
||||||
|
Transaction Support:
|
||||||
|
Session rotation and cleanup operations should use transactions
|
||||||
|
for atomicity when multiple sessions are involved.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
@ -875,6 +883,9 @@ class UserSessionService:
|
|||||||
async def cleanup_expired(db: AsyncSession) -> int:
|
async def cleanup_expired(db: AsyncSession) -> int:
|
||||||
"""Clean up expired sessions.
|
"""Clean up expired sessions.
|
||||||
|
|
||||||
|
This is a bulk delete operation that should be wrapped in
|
||||||
|
a transaction for atomicity when multiple sessions are deleted.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
db: Database session
|
db: Database session
|
||||||
|
|
||||||
@ -889,3 +900,66 @@ class UserSessionService:
|
|||||||
count = result.rowcount
|
count = result.rowcount
|
||||||
logger.info(f"Cleaned up {count} expired sessions")
|
logger.info(f"Cleaned up {count} expired sessions")
|
||||||
return count
|
return count
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def rotate_session(
|
||||||
|
db: AsyncSession,
|
||||||
|
old_session_id: str,
|
||||||
|
new_session_id: str,
|
||||||
|
new_token_hash: str,
|
||||||
|
new_expires_at: datetime,
|
||||||
|
user_id: Optional[str] = None,
|
||||||
|
ip_address: Optional[str] = None,
|
||||||
|
user_agent: Optional[str] = None,
|
||||||
|
) -> Optional[UserSession]:
|
||||||
|
"""Rotate a session by revoking old and creating new atomically.
|
||||||
|
|
||||||
|
This compound operation revokes the old session and creates a new
|
||||||
|
one. Should be wrapped in a transaction for atomicity.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Database session
|
||||||
|
old_session_id: Session ID to revoke
|
||||||
|
new_session_id: New session ID
|
||||||
|
new_token_hash: New token hash
|
||||||
|
new_expires_at: New expiration time
|
||||||
|
user_id: Optional user identifier
|
||||||
|
ip_address: Optional client IP
|
||||||
|
user_agent: Optional user agent
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
New UserSession instance, or None if old session not found
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Use within @transactional or atomic() for atomicity:
|
||||||
|
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
new_session = await UserSessionService.rotate_session(
|
||||||
|
db, old_id, new_id, hash, expires
|
||||||
|
)
|
||||||
|
"""
|
||||||
|
# Revoke old session
|
||||||
|
old_revoked = await UserSessionService.revoke(db, old_session_id)
|
||||||
|
if not old_revoked:
|
||||||
|
logger.warning(
|
||||||
|
f"Could not rotate: old session {old_session_id} not found"
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Create new session
|
||||||
|
new_session = await UserSessionService.create(
|
||||||
|
db=db,
|
||||||
|
session_id=new_session_id,
|
||||||
|
token_hash=new_token_hash,
|
||||||
|
expires_at=new_expires_at,
|
||||||
|
user_id=user_id,
|
||||||
|
ip_address=ip_address,
|
||||||
|
user_agent=user_agent,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Rotated session: {old_session_id} -> {new_session_id}"
|
||||||
|
)
|
||||||
|
|
||||||
|
return new_session
|
||||||
|
|
||||||
|
|||||||
715
src/server/database/transaction.py
Normal file
715
src/server/database/transaction.py
Normal file
@ -0,0 +1,715 @@
|
|||||||
|
"""Transaction management utilities for SQLAlchemy.
|
||||||
|
|
||||||
|
This module provides transaction management utilities including decorators,
|
||||||
|
context managers, and helper functions for ensuring data consistency
|
||||||
|
across database operations.
|
||||||
|
|
||||||
|
Components:
|
||||||
|
- @transactional decorator: Wraps functions in transaction boundaries
|
||||||
|
- TransactionContext: Sync context manager for explicit transaction control
|
||||||
|
- atomic(): Async context manager for async operations
|
||||||
|
- TransactionPropagation: Enum for transaction propagation modes
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
@transactional
|
||||||
|
async def compound_operation(session: AsyncSession, data: Model) -> Result:
|
||||||
|
# Multiple write operations here
|
||||||
|
# All succeed or all fail
|
||||||
|
pass
|
||||||
|
|
||||||
|
async with atomic(session) as tx:
|
||||||
|
# Operations here
|
||||||
|
async with tx.savepoint() as sp:
|
||||||
|
# Nested operations with partial rollback capability
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from contextlib import asynccontextmanager, contextmanager
|
||||||
|
from enum import Enum
|
||||||
|
from typing import (
|
||||||
|
Any,
|
||||||
|
AsyncGenerator,
|
||||||
|
Callable,
|
||||||
|
Generator,
|
||||||
|
Optional,
|
||||||
|
ParamSpec,
|
||||||
|
TypeVar,
|
||||||
|
)
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
from sqlalchemy.orm import Session
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Type variables for generic typing
|
||||||
|
T = TypeVar("T")
|
||||||
|
P = ParamSpec("P")
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionPropagation(Enum):
|
||||||
|
"""Transaction propagation behavior options.
|
||||||
|
|
||||||
|
Defines how transactions should behave when called within
|
||||||
|
an existing transaction context.
|
||||||
|
|
||||||
|
Values:
|
||||||
|
REQUIRED: Use existing transaction or create new one (default)
|
||||||
|
REQUIRES_NEW: Always create a new transaction (suspend existing)
|
||||||
|
NESTED: Create a savepoint within existing transaction
|
||||||
|
"""
|
||||||
|
|
||||||
|
REQUIRED = "required"
|
||||||
|
REQUIRES_NEW = "requires_new"
|
||||||
|
NESTED = "nested"
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionError(Exception):
|
||||||
|
"""Exception raised for transaction-related errors."""
|
||||||
|
|
||||||
|
|
||||||
|
class TransactionContext:
|
||||||
|
"""Synchronous context manager for explicit transaction control.
|
||||||
|
|
||||||
|
Provides a clean interface for managing database transactions with
|
||||||
|
automatic commit/rollback semantics and savepoint support.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
session: SQLAlchemy Session instance
|
||||||
|
_savepoint_count: Counter for nested savepoints
|
||||||
|
|
||||||
|
Example:
|
||||||
|
with TransactionContext(session) as tx:
|
||||||
|
# Database operations here
|
||||||
|
with tx.savepoint() as sp:
|
||||||
|
# Nested operations with partial rollback
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, session: Session) -> None:
|
||||||
|
"""Initialize transaction context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy sync session
|
||||||
|
"""
|
||||||
|
self.session = session
|
||||||
|
self._savepoint_count = 0
|
||||||
|
self._committed = False
|
||||||
|
|
||||||
|
def __enter__(self) -> "TransactionContext":
|
||||||
|
"""Enter transaction context.
|
||||||
|
|
||||||
|
Begins a new transaction if not already in one.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Self for context manager protocol
|
||||||
|
"""
|
||||||
|
logger.debug("Entering transaction context")
|
||||||
|
|
||||||
|
# Check if session is already in a transaction
|
||||||
|
if not self.session.in_transaction():
|
||||||
|
self.session.begin()
|
||||||
|
logger.debug("Started new transaction")
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(
|
||||||
|
self,
|
||||||
|
exc_type: Optional[type],
|
||||||
|
exc_val: Optional[BaseException],
|
||||||
|
exc_tb: Optional[Any],
|
||||||
|
) -> bool:
|
||||||
|
"""Exit transaction context.
|
||||||
|
|
||||||
|
Commits on success, rolls back on exception.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
exc_type: Exception type if raised
|
||||||
|
exc_val: Exception value if raised
|
||||||
|
exc_tb: Exception traceback if raised
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
False to propagate exceptions
|
||||||
|
"""
|
||||||
|
if exc_type is not None:
|
||||||
|
logger.warning(
|
||||||
|
"Transaction rollback due to exception: %s: %s",
|
||||||
|
exc_type.__name__,
|
||||||
|
exc_val,
|
||||||
|
)
|
||||||
|
self.session.rollback()
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self._committed:
|
||||||
|
self.session.commit()
|
||||||
|
logger.debug("Transaction committed")
|
||||||
|
self._committed = True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def savepoint(self, name: Optional[str] = None) -> Generator["SavepointContext", None, None]:
|
||||||
|
"""Create a savepoint for partial rollback capability.
|
||||||
|
|
||||||
|
Savepoints allow nested transactions where inner operations
|
||||||
|
can be rolled back without affecting outer operations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Optional savepoint name (auto-generated if not provided)
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
SavepointContext for nested transaction control
|
||||||
|
|
||||||
|
Example:
|
||||||
|
with tx.savepoint() as sp:
|
||||||
|
# Operations here can be rolled back independently
|
||||||
|
if error_condition:
|
||||||
|
sp.rollback()
|
||||||
|
"""
|
||||||
|
self._savepoint_count += 1
|
||||||
|
savepoint_name = name or f"sp_{self._savepoint_count}"
|
||||||
|
|
||||||
|
logger.debug("Creating savepoint: %s", savepoint_name)
|
||||||
|
nested = self.session.begin_nested()
|
||||||
|
|
||||||
|
sp_context = SavepointContext(nested, savepoint_name)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield sp_context
|
||||||
|
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
# Commit the savepoint (release it)
|
||||||
|
logger.debug("Releasing savepoint: %s", savepoint_name)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.warning(
|
||||||
|
"Rolling back savepoint %s due to exception: %s",
|
||||||
|
savepoint_name,
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
nested.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
def commit(self) -> None:
|
||||||
|
"""Explicitly commit the transaction.
|
||||||
|
|
||||||
|
Use this for early commit within the context.
|
||||||
|
"""
|
||||||
|
if not self._committed:
|
||||||
|
self.session.commit()
|
||||||
|
self._committed = True
|
||||||
|
logger.debug("Transaction explicitly committed")
|
||||||
|
|
||||||
|
def rollback(self) -> None:
|
||||||
|
"""Explicitly rollback the transaction.
|
||||||
|
|
||||||
|
Use this for early rollback within the context.
|
||||||
|
"""
|
||||||
|
self.session.rollback()
|
||||||
|
self._committed = True # Prevent double commit
|
||||||
|
logger.debug("Transaction explicitly rolled back")
|
||||||
|
|
||||||
|
|
||||||
|
class SavepointContext:
|
||||||
|
"""Context for managing a database savepoint.
|
||||||
|
|
||||||
|
Provides explicit control over savepoint commit/rollback.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_nested: SQLAlchemy nested transaction object
|
||||||
|
_name: Savepoint name for logging
|
||||||
|
_rolled_back: Whether rollback has been called
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, nested: Any, name: str) -> None:
|
||||||
|
"""Initialize savepoint context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nested: SQLAlchemy nested transaction
|
||||||
|
name: Savepoint name for logging
|
||||||
|
"""
|
||||||
|
self._nested = nested
|
||||||
|
self._name = name
|
||||||
|
self._rolled_back = False
|
||||||
|
|
||||||
|
def rollback(self) -> None:
|
||||||
|
"""Rollback to this savepoint.
|
||||||
|
|
||||||
|
Undoes all changes since the savepoint was created.
|
||||||
|
"""
|
||||||
|
if not self._rolled_back:
|
||||||
|
self._nested.rollback()
|
||||||
|
self._rolled_back = True
|
||||||
|
logger.debug("Savepoint %s rolled back", self._name)
|
||||||
|
|
||||||
|
def commit(self) -> None:
|
||||||
|
"""Commit (release) this savepoint.
|
||||||
|
|
||||||
|
Makes changes since the savepoint permanent within
|
||||||
|
the parent transaction.
|
||||||
|
"""
|
||||||
|
if not self._rolled_back:
|
||||||
|
# SQLAlchemy commits nested transactions automatically
|
||||||
|
# when exiting the context without rollback
|
||||||
|
logger.debug("Savepoint %s committed", self._name)
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncTransactionContext:
|
||||||
|
"""Asynchronous context manager for explicit transaction control.
|
||||||
|
|
||||||
|
Provides async interface for managing database transactions with
|
||||||
|
automatic commit/rollback semantics and savepoint support.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
session: SQLAlchemy AsyncSession instance
|
||||||
|
_savepoint_count: Counter for nested savepoints
|
||||||
|
|
||||||
|
Example:
|
||||||
|
async with AsyncTransactionContext(session) as tx:
|
||||||
|
# Database operations here
|
||||||
|
async with tx.savepoint() as sp:
|
||||||
|
# Nested operations with partial rollback
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, session: AsyncSession) -> None:
|
||||||
|
"""Initialize async transaction context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy async session
|
||||||
|
"""
|
||||||
|
self.session = session
|
||||||
|
self._savepoint_count = 0
|
||||||
|
self._committed = False
|
||||||
|
|
||||||
|
async def __aenter__(self) -> "AsyncTransactionContext":
|
||||||
|
"""Enter async transaction context.
|
||||||
|
|
||||||
|
Begins a new transaction if not already in one.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Self for context manager protocol
|
||||||
|
"""
|
||||||
|
logger.debug("Entering async transaction context")
|
||||||
|
|
||||||
|
# Check if session is already in a transaction
|
||||||
|
if not self.session.in_transaction():
|
||||||
|
await self.session.begin()
|
||||||
|
logger.debug("Started new async transaction")
|
||||||
|
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(
|
||||||
|
self,
|
||||||
|
exc_type: Optional[type],
|
||||||
|
exc_val: Optional[BaseException],
|
||||||
|
exc_tb: Optional[Any],
|
||||||
|
) -> bool:
|
||||||
|
"""Exit async transaction context.
|
||||||
|
|
||||||
|
Commits on success, rolls back on exception.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
exc_type: Exception type if raised
|
||||||
|
exc_val: Exception value if raised
|
||||||
|
exc_tb: Exception traceback if raised
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
False to propagate exceptions
|
||||||
|
"""
|
||||||
|
if exc_type is not None:
|
||||||
|
logger.warning(
|
||||||
|
"Async transaction rollback due to exception: %s: %s",
|
||||||
|
exc_type.__name__,
|
||||||
|
exc_val,
|
||||||
|
)
|
||||||
|
await self.session.rollback()
|
||||||
|
return False
|
||||||
|
|
||||||
|
if not self._committed:
|
||||||
|
await self.session.commit()
|
||||||
|
logger.debug("Async transaction committed")
|
||||||
|
self._committed = True
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def savepoint(
|
||||||
|
self, name: Optional[str] = None
|
||||||
|
) -> AsyncGenerator["AsyncSavepointContext", None]:
|
||||||
|
"""Create an async savepoint for partial rollback capability.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Optional savepoint name (auto-generated if not provided)
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
AsyncSavepointContext for nested transaction control
|
||||||
|
"""
|
||||||
|
self._savepoint_count += 1
|
||||||
|
savepoint_name = name or f"sp_{self._savepoint_count}"
|
||||||
|
|
||||||
|
logger.debug("Creating async savepoint: %s", savepoint_name)
|
||||||
|
nested = await self.session.begin_nested()
|
||||||
|
|
||||||
|
sp_context = AsyncSavepointContext(nested, savepoint_name, self.session)
|
||||||
|
|
||||||
|
try:
|
||||||
|
yield sp_context
|
||||||
|
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.debug("Releasing async savepoint: %s", savepoint_name)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.warning(
|
||||||
|
"Rolling back async savepoint %s due to exception: %s",
|
||||||
|
savepoint_name,
|
||||||
|
e,
|
||||||
|
)
|
||||||
|
await nested.rollback()
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def commit(self) -> None:
|
||||||
|
"""Explicitly commit the async transaction."""
|
||||||
|
if not self._committed:
|
||||||
|
await self.session.commit()
|
||||||
|
self._committed = True
|
||||||
|
logger.debug("Async transaction explicitly committed")
|
||||||
|
|
||||||
|
async def rollback(self) -> None:
|
||||||
|
"""Explicitly rollback the async transaction."""
|
||||||
|
await self.session.rollback()
|
||||||
|
self._committed = True # Prevent double commit
|
||||||
|
logger.debug("Async transaction explicitly rolled back")
|
||||||
|
|
||||||
|
|
||||||
|
class AsyncSavepointContext:
|
||||||
|
"""Async context for managing a database savepoint.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_nested: SQLAlchemy nested transaction object
|
||||||
|
_name: Savepoint name for logging
|
||||||
|
_session: Parent session for async operations
|
||||||
|
_rolled_back: Whether rollback has been called
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self, nested: Any, name: str, session: AsyncSession
|
||||||
|
) -> None:
|
||||||
|
"""Initialize async savepoint context.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nested: SQLAlchemy nested transaction
|
||||||
|
name: Savepoint name for logging
|
||||||
|
session: Parent async session
|
||||||
|
"""
|
||||||
|
self._nested = nested
|
||||||
|
self._name = name
|
||||||
|
self._session = session
|
||||||
|
self._rolled_back = False
|
||||||
|
|
||||||
|
async def rollback(self) -> None:
|
||||||
|
"""Rollback to this savepoint asynchronously."""
|
||||||
|
if not self._rolled_back:
|
||||||
|
await self._nested.rollback()
|
||||||
|
self._rolled_back = True
|
||||||
|
logger.debug("Async savepoint %s rolled back", self._name)
|
||||||
|
|
||||||
|
async def commit(self) -> None:
|
||||||
|
"""Commit (release) this savepoint asynchronously."""
|
||||||
|
if not self._rolled_back:
|
||||||
|
logger.debug("Async savepoint %s committed", self._name)
|
||||||
|
|
||||||
|
|
||||||
|
@asynccontextmanager
|
||||||
|
async def atomic(
|
||||||
|
session: AsyncSession,
|
||||||
|
propagation: TransactionPropagation = TransactionPropagation.REQUIRED,
|
||||||
|
) -> AsyncGenerator[AsyncTransactionContext, None]:
|
||||||
|
"""Async context manager for atomic database operations.
|
||||||
|
|
||||||
|
Provides a clean interface for wrapping database operations in
|
||||||
|
a transaction boundary with automatic commit/rollback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy async session
|
||||||
|
propagation: Transaction propagation behavior
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
AsyncTransactionContext for transaction control
|
||||||
|
|
||||||
|
Example:
|
||||||
|
async with atomic(session) as tx:
|
||||||
|
await some_operation(session)
|
||||||
|
await another_operation(session)
|
||||||
|
# All operations committed together or rolled back
|
||||||
|
|
||||||
|
async with atomic(session) as tx:
|
||||||
|
await outer_operation(session)
|
||||||
|
async with tx.savepoint() as sp:
|
||||||
|
await risky_operation(session)
|
||||||
|
if error:
|
||||||
|
await sp.rollback() # Only rollback nested ops
|
||||||
|
"""
|
||||||
|
logger.debug(
|
||||||
|
"Starting atomic block with propagation: %s",
|
||||||
|
propagation.value,
|
||||||
|
)
|
||||||
|
|
||||||
|
if propagation == TransactionPropagation.NESTED:
|
||||||
|
# Use savepoint for nested propagation
|
||||||
|
if session.in_transaction():
|
||||||
|
nested = await session.begin_nested()
|
||||||
|
sp_context = AsyncSavepointContext(nested, "atomic_nested", session)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Create a wrapper context for consistency
|
||||||
|
wrapper = AsyncTransactionContext(session)
|
||||||
|
wrapper._committed = True # Parent manages commit
|
||||||
|
yield wrapper
|
||||||
|
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.debug("Releasing nested atomic savepoint")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.warning(
|
||||||
|
"Rolling back nested atomic savepoint due to: %s", e
|
||||||
|
)
|
||||||
|
await nested.rollback()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
# No existing transaction, start new one
|
||||||
|
async with AsyncTransactionContext(session) as tx:
|
||||||
|
yield tx
|
||||||
|
else:
|
||||||
|
# REQUIRED or REQUIRES_NEW
|
||||||
|
async with AsyncTransactionContext(session) as tx:
|
||||||
|
yield tx
|
||||||
|
|
||||||
|
|
||||||
|
@contextmanager
|
||||||
|
def atomic_sync(
|
||||||
|
session: Session,
|
||||||
|
propagation: TransactionPropagation = TransactionPropagation.REQUIRED,
|
||||||
|
) -> Generator[TransactionContext, None, None]:
|
||||||
|
"""Sync context manager for atomic database operations.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy sync session
|
||||||
|
propagation: Transaction propagation behavior
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
TransactionContext for transaction control
|
||||||
|
"""
|
||||||
|
logger.debug(
|
||||||
|
"Starting sync atomic block with propagation: %s",
|
||||||
|
propagation.value,
|
||||||
|
)
|
||||||
|
|
||||||
|
if propagation == TransactionPropagation.NESTED:
|
||||||
|
if session.in_transaction():
|
||||||
|
nested = session.begin_nested()
|
||||||
|
sp_context = SavepointContext(nested, "atomic_nested")
|
||||||
|
|
||||||
|
try:
|
||||||
|
wrapper = TransactionContext(session)
|
||||||
|
wrapper._committed = True
|
||||||
|
yield wrapper
|
||||||
|
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.debug("Releasing nested sync atomic savepoint")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
if not sp_context._rolled_back:
|
||||||
|
logger.warning(
|
||||||
|
"Rolling back nested sync savepoint due to: %s", e
|
||||||
|
)
|
||||||
|
nested.rollback()
|
||||||
|
raise
|
||||||
|
else:
|
||||||
|
with TransactionContext(session) as tx:
|
||||||
|
yield tx
|
||||||
|
else:
|
||||||
|
with TransactionContext(session) as tx:
|
||||||
|
yield tx
|
||||||
|
|
||||||
|
|
||||||
|
def transactional(
|
||||||
|
propagation: TransactionPropagation = TransactionPropagation.REQUIRED,
|
||||||
|
session_param: str = "db",
|
||||||
|
) -> Callable[[Callable[P, T]], Callable[P, T]]:
|
||||||
|
"""Decorator to wrap a function in a transaction boundary.
|
||||||
|
|
||||||
|
Automatically handles commit on success and rollback on exception.
|
||||||
|
Works with both sync and async functions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
propagation: Transaction propagation behavior
|
||||||
|
session_param: Name of the session parameter in the function signature
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decorated function wrapped in transaction
|
||||||
|
|
||||||
|
Example:
|
||||||
|
@transactional()
|
||||||
|
async def create_user_with_profile(db: AsyncSession, data: dict):
|
||||||
|
user = await create_user(db, data['user'])
|
||||||
|
profile = await create_profile(db, user.id, data['profile'])
|
||||||
|
return user, profile
|
||||||
|
|
||||||
|
@transactional(propagation=TransactionPropagation.NESTED)
|
||||||
|
async def risky_sub_operation(db: AsyncSession, data: dict):
|
||||||
|
# This can be rolled back without affecting parent transaction
|
||||||
|
pass
|
||||||
|
"""
|
||||||
|
def decorator(func: Callable[P, T]) -> Callable[P, T]:
|
||||||
|
import asyncio
|
||||||
|
|
||||||
|
if asyncio.iscoroutinefunction(func):
|
||||||
|
@functools.wraps(func)
|
||||||
|
async def async_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||||
|
# Get session from kwargs or args
|
||||||
|
session = _extract_session(func, args, kwargs, session_param)
|
||||||
|
|
||||||
|
if session is None:
|
||||||
|
raise TransactionError(
|
||||||
|
f"Could not find session parameter '{session_param}' "
|
||||||
|
f"in function {func.__name__}"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Starting transaction for %s with propagation %s",
|
||||||
|
func.__name__,
|
||||||
|
propagation.value,
|
||||||
|
)
|
||||||
|
|
||||||
|
async with atomic(session, propagation):
|
||||||
|
result = await func(*args, **kwargs)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Transaction completed for %s",
|
||||||
|
func.__name__,
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return async_wrapper # type: ignore
|
||||||
|
else:
|
||||||
|
@functools.wraps(func)
|
||||||
|
def sync_wrapper(*args: P.args, **kwargs: P.kwargs) -> T:
|
||||||
|
# Get session from kwargs or args
|
||||||
|
session = _extract_session(func, args, kwargs, session_param)
|
||||||
|
|
||||||
|
if session is None:
|
||||||
|
raise TransactionError(
|
||||||
|
f"Could not find session parameter '{session_param}' "
|
||||||
|
f"in function {func.__name__}"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Starting sync transaction for %s with propagation %s",
|
||||||
|
func.__name__,
|
||||||
|
propagation.value,
|
||||||
|
)
|
||||||
|
|
||||||
|
with atomic_sync(session, propagation):
|
||||||
|
result = func(*args, **kwargs)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Sync transaction completed for %s",
|
||||||
|
func.__name__,
|
||||||
|
)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
return sync_wrapper # type: ignore
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_session(
|
||||||
|
func: Callable,
|
||||||
|
args: tuple,
|
||||||
|
kwargs: dict,
|
||||||
|
session_param: str,
|
||||||
|
) -> Optional[AsyncSession | Session]:
|
||||||
|
"""Extract session from function arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
func: The function being called
|
||||||
|
args: Positional arguments
|
||||||
|
kwargs: Keyword arguments
|
||||||
|
session_param: Name of the session parameter
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Session instance or None if not found
|
||||||
|
"""
|
||||||
|
import inspect
|
||||||
|
|
||||||
|
# Check kwargs first
|
||||||
|
if session_param in kwargs:
|
||||||
|
return kwargs[session_param]
|
||||||
|
|
||||||
|
# Get function signature to find positional index
|
||||||
|
sig = inspect.signature(func)
|
||||||
|
params = list(sig.parameters.keys())
|
||||||
|
|
||||||
|
if session_param in params:
|
||||||
|
idx = params.index(session_param)
|
||||||
|
# Account for 'self' parameter in methods
|
||||||
|
if len(args) > idx:
|
||||||
|
return args[idx]
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def is_in_transaction(session: AsyncSession | Session) -> bool:
|
||||||
|
"""Check if session is currently in a transaction.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy session (sync or async)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if session is in an active transaction
|
||||||
|
"""
|
||||||
|
return session.in_transaction()
|
||||||
|
|
||||||
|
|
||||||
|
def get_transaction_depth(session: AsyncSession | Session) -> int:
|
||||||
|
"""Get the current transaction nesting depth.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
session: SQLAlchemy session (sync or async)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of nested transactions (0 if not in transaction)
|
||||||
|
"""
|
||||||
|
# SQLAlchemy doesn't expose nesting depth directly,
|
||||||
|
# but we can check transaction state
|
||||||
|
if not session.in_transaction():
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Check for nested transaction
|
||||||
|
if hasattr(session, '_nested_transaction') and session._nested_transaction:
|
||||||
|
return 2 # At least one savepoint
|
||||||
|
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"TransactionPropagation",
|
||||||
|
"TransactionError",
|
||||||
|
"TransactionContext",
|
||||||
|
"AsyncTransactionContext",
|
||||||
|
"SavepointContext",
|
||||||
|
"AsyncSavepointContext",
|
||||||
|
"atomic",
|
||||||
|
"atomic_sync",
|
||||||
|
"transactional",
|
||||||
|
"is_in_transaction",
|
||||||
|
"get_transaction_depth",
|
||||||
|
]
|
||||||
@ -144,6 +144,23 @@ class ConflictError(AniWorldAPIException):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BadRequestError(AniWorldAPIException):
|
||||||
|
"""Exception raised for bad request (400) errors."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str = "Bad request",
|
||||||
|
details: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
"""Initialize bad request error."""
|
||||||
|
super().__init__(
|
||||||
|
message=message,
|
||||||
|
status_code=400,
|
||||||
|
error_code="BAD_REQUEST",
|
||||||
|
details=details,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RateLimitError(AniWorldAPIException):
|
class RateLimitError(AniWorldAPIException):
|
||||||
"""Exception raised when rate limit is exceeded."""
|
"""Exception raised when rate limit is exceeded."""
|
||||||
|
|
||||||
|
|||||||
@ -5,6 +5,7 @@ This module provides the main FastAPI application with proper CORS
|
|||||||
configuration, middleware setup, static file serving, and Jinja2 template
|
configuration, middleware setup, static file serving, and Jinja2 template
|
||||||
integration.
|
integration.
|
||||||
"""
|
"""
|
||||||
|
import asyncio
|
||||||
from contextlib import asynccontextmanager
|
from contextlib import asynccontextmanager
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
@ -21,6 +22,7 @@ from src.server.api.anime import router as anime_router
|
|||||||
from src.server.api.auth import router as auth_router
|
from src.server.api.auth import router as auth_router
|
||||||
from src.server.api.config import router as config_router
|
from src.server.api.config import router as config_router
|
||||||
from src.server.api.download import router as download_router
|
from src.server.api.download import router as download_router
|
||||||
|
from src.server.api.health import router as health_router
|
||||||
from src.server.api.scheduler import router as scheduler_router
|
from src.server.api.scheduler import router as scheduler_router
|
||||||
from src.server.api.websocket import router as websocket_router
|
from src.server.api.websocket import router as websocket_router
|
||||||
from src.server.controllers.error_controller import (
|
from src.server.controllers.error_controller import (
|
||||||
@ -29,11 +31,11 @@ from src.server.controllers.error_controller import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Import controllers
|
# Import controllers
|
||||||
from src.server.controllers.health_controller import router as health_router
|
|
||||||
from src.server.controllers.page_controller import router as page_router
|
from src.server.controllers.page_controller import router as page_router
|
||||||
from src.server.middleware.auth import AuthMiddleware
|
from src.server.middleware.auth import AuthMiddleware
|
||||||
from src.server.middleware.error_handler import register_exception_handlers
|
from src.server.middleware.error_handler import register_exception_handlers
|
||||||
from src.server.middleware.setup_redirect import SetupRedirectMiddleware
|
from src.server.middleware.setup_redirect import SetupRedirectMiddleware
|
||||||
|
from src.server.services.anime_service import sync_series_from_data_files
|
||||||
from src.server.services.progress_service import get_progress_service
|
from src.server.services.progress_service import get_progress_service
|
||||||
from src.server.services.websocket_service import get_websocket_service
|
from src.server.services.websocket_service import get_websocket_service
|
||||||
|
|
||||||
@ -42,29 +44,54 @@ from src.server.services.websocket_service import get_websocket_service
|
|||||||
|
|
||||||
|
|
||||||
@asynccontextmanager
|
@asynccontextmanager
|
||||||
async def lifespan(app: FastAPI):
|
async def lifespan(_application: FastAPI):
|
||||||
"""Manage application lifespan (startup and shutdown)."""
|
"""Manage application lifespan (startup and shutdown).
|
||||||
# Setup logging first with DEBUG level
|
|
||||||
logger = setup_logging(log_level="DEBUG")
|
Args:
|
||||||
|
_application: The FastAPI application instance (unused but required
|
||||||
|
by the lifespan protocol).
|
||||||
|
"""
|
||||||
|
# Setup logging first with INFO level
|
||||||
|
logger = setup_logging(log_level="INFO")
|
||||||
|
|
||||||
# Startup
|
# Startup
|
||||||
try:
|
try:
|
||||||
logger.info("Starting FastAPI application...")
|
logger.info("Starting FastAPI application...")
|
||||||
|
|
||||||
|
# Initialize database first (required for other services)
|
||||||
|
try:
|
||||||
|
from src.server.database.connection import init_db
|
||||||
|
await init_db()
|
||||||
|
logger.info("Database initialized successfully")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to initialize database: %s", e, exc_info=True)
|
||||||
|
raise # Database is required, fail startup if it fails
|
||||||
|
|
||||||
# Load configuration from config.json and sync with settings
|
# Load configuration from config.json and sync with settings
|
||||||
try:
|
try:
|
||||||
from src.server.services.config_service import get_config_service
|
from src.server.services.config_service import get_config_service
|
||||||
config_service = get_config_service()
|
config_service = get_config_service()
|
||||||
config = config_service.load_config()
|
config = config_service.load_config()
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Config loaded: other=%s", config.other
|
||||||
|
)
|
||||||
|
|
||||||
# Sync anime_directory from config.json to settings
|
# Sync anime_directory from config.json to settings
|
||||||
if config.other and config.other.get("anime_directory"):
|
# config.other is Dict[str, object] - pylint doesn't infer this
|
||||||
settings.anime_directory = str(config.other["anime_directory"])
|
other_settings = dict(config.other) if config.other else {}
|
||||||
|
if other_settings.get("anime_directory"):
|
||||||
|
anime_dir = other_settings["anime_directory"]
|
||||||
|
settings.anime_directory = str(anime_dir)
|
||||||
logger.info(
|
logger.info(
|
||||||
"Loaded anime_directory from config: %s",
|
"Loaded anime_directory from config: %s",
|
||||||
settings.anime_directory
|
settings.anime_directory
|
||||||
)
|
)
|
||||||
except Exception as e:
|
else:
|
||||||
|
logger.debug(
|
||||||
|
"anime_directory not found in config.other"
|
||||||
|
)
|
||||||
|
except (OSError, ValueError, KeyError) as e:
|
||||||
logger.warning("Failed to load config from config.json: %s", e)
|
logger.warning("Failed to load config from config.json: %s", e)
|
||||||
|
|
||||||
# Initialize progress service with event subscription
|
# Initialize progress service with event subscription
|
||||||
@ -86,6 +113,37 @@ async def lifespan(app: FastAPI):
|
|||||||
# Subscribe to progress events
|
# Subscribe to progress events
|
||||||
progress_service.subscribe("progress_updated", progress_event_handler)
|
progress_service.subscribe("progress_updated", progress_event_handler)
|
||||||
|
|
||||||
|
# Initialize download service and restore queue from database
|
||||||
|
# Only if anime directory is configured
|
||||||
|
try:
|
||||||
|
from src.server.utils.dependencies import get_download_service
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Checking anime_directory setting: '%s'",
|
||||||
|
settings.anime_directory
|
||||||
|
)
|
||||||
|
|
||||||
|
if settings.anime_directory:
|
||||||
|
download_service = get_download_service()
|
||||||
|
await download_service.initialize()
|
||||||
|
logger.info("Download service initialized and queue restored")
|
||||||
|
|
||||||
|
# Sync series from data files to database
|
||||||
|
sync_count = await sync_series_from_data_files(
|
||||||
|
settings.anime_directory
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Data file sync complete. Added %d series.", sync_count
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.info(
|
||||||
|
"Download service initialization skipped - "
|
||||||
|
"anime directory not configured"
|
||||||
|
)
|
||||||
|
except (OSError, RuntimeError, ValueError) as e:
|
||||||
|
logger.warning("Failed to initialize download service: %s", e)
|
||||||
|
# Continue startup - download service can be initialized later
|
||||||
|
|
||||||
logger.info("FastAPI application started successfully")
|
logger.info("FastAPI application started successfully")
|
||||||
logger.info("Server running on http://127.0.0.1:8000")
|
logger.info("Server running on http://127.0.0.1:8000")
|
||||||
logger.info(
|
logger.info(
|
||||||
@ -98,20 +156,88 @@ async def lifespan(app: FastAPI):
|
|||||||
# Yield control to the application
|
# Yield control to the application
|
||||||
yield
|
yield
|
||||||
|
|
||||||
# Shutdown
|
# Shutdown - execute in proper order with timeout protection
|
||||||
logger.info("FastAPI application shutting down")
|
logger.info("FastAPI application shutting down (graceful shutdown initiated)")
|
||||||
|
|
||||||
# Shutdown download service and its thread pool
|
# Define shutdown timeout (total time allowed for all shutdown operations)
|
||||||
|
SHUTDOWN_TIMEOUT = 30.0
|
||||||
|
|
||||||
|
import time
|
||||||
|
shutdown_start = time.monotonic()
|
||||||
|
|
||||||
|
def remaining_time() -> float:
|
||||||
|
"""Calculate remaining shutdown time."""
|
||||||
|
elapsed = time.monotonic() - shutdown_start
|
||||||
|
return max(0.0, SHUTDOWN_TIMEOUT - elapsed)
|
||||||
|
|
||||||
|
# 1. Broadcast shutdown notification via WebSocket
|
||||||
try:
|
try:
|
||||||
from src.server.services.download_service import _download_service_instance
|
ws_service = get_websocket_service()
|
||||||
|
logger.info("Broadcasting shutdown notification to WebSocket clients...")
|
||||||
|
await asyncio.wait_for(
|
||||||
|
ws_service.shutdown(timeout=min(5.0, remaining_time())),
|
||||||
|
timeout=min(5.0, remaining_time())
|
||||||
|
)
|
||||||
|
logger.info("WebSocket shutdown complete")
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.warning("WebSocket shutdown timed out")
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
|
logger.error("Error during WebSocket shutdown: %s", e, exc_info=True)
|
||||||
|
|
||||||
|
# 2. Shutdown download service and persist active downloads
|
||||||
|
try:
|
||||||
|
from src.server.services.download_service import ( # noqa: E501
|
||||||
|
_download_service_instance,
|
||||||
|
)
|
||||||
if _download_service_instance is not None:
|
if _download_service_instance is not None:
|
||||||
logger.info("Stopping download service...")
|
logger.info("Stopping download service...")
|
||||||
await _download_service_instance.stop()
|
|
||||||
logger.info("Download service stopped successfully")
|
logger.info("Download service stopped successfully")
|
||||||
except Exception as e:
|
except asyncio.TimeoutError:
|
||||||
|
logger.warning("Download service shutdown timed out")
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
logger.error("Error stopping download service: %s", e, exc_info=True)
|
logger.error("Error stopping download service: %s", e, exc_info=True)
|
||||||
|
|
||||||
logger.info("FastAPI application shutdown complete")
|
# 3. Shutdown SeriesApp and cleanup thread pool
|
||||||
|
try:
|
||||||
|
from src.server.utils.dependencies import _series_app
|
||||||
|
if _series_app is not None:
|
||||||
|
logger.info("Shutting down SeriesApp thread pool...")
|
||||||
|
_series_app.shutdown()
|
||||||
|
logger.info("SeriesApp shutdown complete")
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
|
logger.error("Error during SeriesApp shutdown: %s", e, exc_info=True)
|
||||||
|
|
||||||
|
# 4. Cleanup progress service
|
||||||
|
try:
|
||||||
|
progress_service = get_progress_service()
|
||||||
|
logger.info("Cleaning up progress service...")
|
||||||
|
# Clear any active progress tracking and subscribers
|
||||||
|
progress_service._active_progress.clear()
|
||||||
|
logger.info("Progress service cleanup complete")
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
|
logger.error(
|
||||||
|
"Error cleaning up progress service: %s", e, exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# 5. Close database connections with WAL checkpoint
|
||||||
|
try:
|
||||||
|
from src.server.database.connection import close_db
|
||||||
|
logger.info("Closing database connections...")
|
||||||
|
await asyncio.wait_for(
|
||||||
|
close_db(),
|
||||||
|
timeout=min(10.0, remaining_time())
|
||||||
|
)
|
||||||
|
logger.info("Database connections closed")
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.warning("Database shutdown timed out")
|
||||||
|
except Exception as e: # pylint: disable=broad-exception-caught
|
||||||
|
logger.error("Error closing database: %s", e, exc_info=True)
|
||||||
|
|
||||||
|
elapsed_total = time.monotonic() - shutdown_start
|
||||||
|
logger.info(
|
||||||
|
"FastAPI application shutdown complete (took %.2fs)",
|
||||||
|
elapsed_total
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
# Initialize FastAPI app with lifespan
|
# Initialize FastAPI app with lifespan
|
||||||
@ -180,5 +306,5 @@ if __name__ == "__main__":
|
|||||||
host="127.0.0.1",
|
host="127.0.0.1",
|
||||||
port=8000,
|
port=8000,
|
||||||
reload=True,
|
reload=True,
|
||||||
log_level="debug"
|
log_level="info"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -8,6 +8,17 @@ Responsibilities:
|
|||||||
This middleware is intentionally lightweight and synchronous.
|
This middleware is intentionally lightweight and synchronous.
|
||||||
For production use consider a distributed rate limiter (Redis) and
|
For production use consider a distributed rate limiter (Redis) and
|
||||||
a proper token revocation store.
|
a proper token revocation store.
|
||||||
|
|
||||||
|
WARNING - SINGLE PROCESS LIMITATION:
|
||||||
|
Rate limiting state is stored in memory dictionaries which RESET when
|
||||||
|
the process restarts. This means:
|
||||||
|
- Attackers can bypass rate limits by triggering a process restart
|
||||||
|
- Rate limits are not shared across multiple workers/processes
|
||||||
|
|
||||||
|
For production deployments, consider:
|
||||||
|
- Using Redis-backed rate limiting (e.g., slowapi with Redis)
|
||||||
|
- Running behind a reverse proxy with rate limiting (nginx, HAProxy)
|
||||||
|
- Using a dedicated rate limiting service
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|||||||
@ -15,6 +15,7 @@ from src.server.exceptions import (
|
|||||||
AniWorldAPIException,
|
AniWorldAPIException,
|
||||||
AuthenticationError,
|
AuthenticationError,
|
||||||
AuthorizationError,
|
AuthorizationError,
|
||||||
|
BadRequestError,
|
||||||
ConflictError,
|
ConflictError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
RateLimitError,
|
RateLimitError,
|
||||||
@ -127,6 +128,26 @@ def register_exception_handlers(app: FastAPI) -> None:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@app.exception_handler(BadRequestError)
|
||||||
|
async def bad_request_error_handler(
|
||||||
|
request: Request, exc: BadRequestError
|
||||||
|
) -> JSONResponse:
|
||||||
|
"""Handle bad request errors (400)."""
|
||||||
|
logger.info(
|
||||||
|
f"Bad request error: {exc.message}",
|
||||||
|
extra={"details": exc.details, "path": str(request.url.path)},
|
||||||
|
)
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content=create_error_response(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
error=exc.error_code,
|
||||||
|
message=exc.message,
|
||||||
|
details=exc.details,
|
||||||
|
request_id=getattr(request.state, "request_id", None),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
@app.exception_handler(NotFoundError)
|
@app.exception_handler(NotFoundError)
|
||||||
async def not_found_error_handler(
|
async def not_found_error_handler(
|
||||||
request: Request, exc: NotFoundError
|
request: Request, exc: NotFoundError
|
||||||
|
|||||||
@ -11,7 +11,7 @@ from typing import Callable
|
|||||||
|
|
||||||
from fastapi import Request
|
from fastapi import Request
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
from starlette.middleware.base import BaseHTTPMiddleware
|
||||||
from starlette.responses import RedirectResponse
|
from starlette.responses import RedirectResponse, Response
|
||||||
from starlette.types import ASGIApp
|
from starlette.types import ASGIApp
|
||||||
|
|
||||||
from src.server.services.auth_service import auth_service
|
from src.server.services.auth_service import auth_service
|
||||||
@ -91,11 +91,11 @@ class SetupRedirectMiddleware(BaseHTTPMiddleware):
|
|||||||
config = config_service.load_config()
|
config = config_service.load_config()
|
||||||
|
|
||||||
# Validate the loaded config
|
# Validate the loaded config
|
||||||
validation = config.validate()
|
validation = config.validate_config()
|
||||||
if not validation.valid:
|
if not validation.valid:
|
||||||
return True
|
return True
|
||||||
|
|
||||||
except Exception:
|
except (FileNotFoundError, ValueError, OSError, AttributeError):
|
||||||
# If we can't load or validate config, setup is needed
|
# If we can't load or validate config, setup is needed
|
||||||
return True
|
return True
|
||||||
|
|
||||||
@ -103,7 +103,7 @@ class SetupRedirectMiddleware(BaseHTTPMiddleware):
|
|||||||
|
|
||||||
async def dispatch(
|
async def dispatch(
|
||||||
self, request: Request, call_next: Callable
|
self, request: Request, call_next: Callable
|
||||||
) -> RedirectResponse:
|
) -> Response:
|
||||||
"""Process the request and redirect to setup if needed.
|
"""Process the request and redirect to setup if needed.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
|
|||||||
@ -70,8 +70,6 @@ class AnimeSeriesResponse(BaseModel):
|
|||||||
)
|
)
|
||||||
)
|
)
|
||||||
alt_titles: List[str] = Field(default_factory=list, description="Alternative titles")
|
alt_titles: List[str] = Field(default_factory=list, description="Alternative titles")
|
||||||
description: Optional[str] = Field(None, description="Short series description")
|
|
||||||
total_episodes: Optional[int] = Field(None, ge=0, description="Declared total episode count if known")
|
|
||||||
episodes: List[EpisodeInfo] = Field(default_factory=list, description="Known episodes information")
|
episodes: List[EpisodeInfo] = Field(default_factory=list, description="Known episodes information")
|
||||||
missing_episodes: List[MissingEpisodeInfo] = Field(default_factory=list, description="Detected missing episode ranges")
|
missing_episodes: List[MissingEpisodeInfo] = Field(default_factory=list, description="Detected missing episode ranges")
|
||||||
thumbnail: Optional[HttpUrl] = Field(None, description="Optional thumbnail image URL")
|
thumbnail: Optional[HttpUrl] = Field(None, description="Optional thumbnail image URL")
|
||||||
|
|||||||
@ -58,8 +58,9 @@ class ValidationResult(BaseModel):
|
|||||||
"""Result of a configuration validation attempt."""
|
"""Result of a configuration validation attempt."""
|
||||||
|
|
||||||
valid: bool = Field(..., description="Whether the configuration is valid")
|
valid: bool = Field(..., description="Whether the configuration is valid")
|
||||||
errors: Optional[List[str]] = Field(
|
errors: List[str] = Field(
|
||||||
default_factory=list, description="List of validation error messages"
|
default_factory=lambda: [],
|
||||||
|
description="List of validation error messages"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -71,14 +72,16 @@ class AppConfig(BaseModel):
|
|||||||
|
|
||||||
name: str = Field(default="Aniworld", description="Application name")
|
name: str = Field(default="Aniworld", description="Application name")
|
||||||
data_dir: str = Field(default="data", description="Base data directory")
|
data_dir: str = Field(default="data", description="Base data directory")
|
||||||
scheduler: SchedulerConfig = Field(default_factory=SchedulerConfig)
|
scheduler: SchedulerConfig = Field(
|
||||||
|
default_factory=SchedulerConfig
|
||||||
|
)
|
||||||
logging: LoggingConfig = Field(default_factory=LoggingConfig)
|
logging: LoggingConfig = Field(default_factory=LoggingConfig)
|
||||||
backup: BackupConfig = Field(default_factory=BackupConfig)
|
backup: BackupConfig = Field(default_factory=BackupConfig)
|
||||||
other: Dict[str, object] = Field(
|
other: Dict[str, object] = Field(
|
||||||
default_factory=dict, description="Arbitrary other settings"
|
default_factory=dict, description="Arbitrary other settings"
|
||||||
)
|
)
|
||||||
|
|
||||||
def validate(self) -> ValidationResult:
|
def validate_config(self) -> ValidationResult:
|
||||||
"""Perform light-weight validation and return a ValidationResult.
|
"""Perform light-weight validation and return a ValidationResult.
|
||||||
|
|
||||||
This method intentionally avoids performing IO (no filesystem checks)
|
This method intentionally avoids performing IO (no filesystem checks)
|
||||||
@ -98,7 +101,8 @@ class AppConfig(BaseModel):
|
|||||||
errors.append(msg)
|
errors.append(msg)
|
||||||
|
|
||||||
# backup.path must be set when backups are enabled
|
# backup.path must be set when backups are enabled
|
||||||
if self.backup.enabled and (not self.backup.path):
|
backup_data = self.model_dump().get("backup", {})
|
||||||
|
if backup_data.get("enabled") and not backup_data.get("path"):
|
||||||
errors.append(
|
errors.append(
|
||||||
"backup.path must be set when backups.enabled is true"
|
"backup.path must be set when backups.enabled is true"
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,6 +1,7 @@
|
|||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
|
import time
|
||||||
from functools import lru_cache
|
from functools import lru_cache
|
||||||
from typing import Optional
|
from typing import Optional
|
||||||
|
|
||||||
@ -12,6 +13,10 @@ from src.server.services.progress_service import (
|
|||||||
ProgressType,
|
ProgressType,
|
||||||
get_progress_service,
|
get_progress_service,
|
||||||
)
|
)
|
||||||
|
from src.server.services.websocket_service import (
|
||||||
|
WebSocketService,
|
||||||
|
get_websocket_service,
|
||||||
|
)
|
||||||
|
|
||||||
logger = structlog.get_logger(__name__)
|
logger = structlog.get_logger(__name__)
|
||||||
|
|
||||||
@ -37,21 +42,37 @@ class AnimeService:
|
|||||||
self,
|
self,
|
||||||
series_app: SeriesApp,
|
series_app: SeriesApp,
|
||||||
progress_service: Optional[ProgressService] = None,
|
progress_service: Optional[ProgressService] = None,
|
||||||
|
websocket_service: Optional[WebSocketService] = None,
|
||||||
):
|
):
|
||||||
self._app = series_app
|
self._app = series_app
|
||||||
self._directory = series_app.directory_to_search
|
self._directory = series_app.directory_to_search
|
||||||
self._progress_service = progress_service or get_progress_service()
|
self._progress_service = progress_service or get_progress_service()
|
||||||
|
self._websocket_service = websocket_service or get_websocket_service()
|
||||||
self._event_loop: Optional[asyncio.AbstractEventLoop] = None
|
self._event_loop: Optional[asyncio.AbstractEventLoop] = None
|
||||||
|
# Track scan progress for WebSocket updates
|
||||||
|
self._scan_start_time: Optional[float] = None
|
||||||
|
self._scan_directories_count: int = 0
|
||||||
|
self._scan_files_count: int = 0
|
||||||
|
self._scan_total_items: int = 0
|
||||||
|
self._is_scanning: bool = False
|
||||||
|
self._scan_current_directory: str = ""
|
||||||
|
# Lock to prevent concurrent rescans
|
||||||
|
self._scan_lock = asyncio.Lock()
|
||||||
# Subscribe to SeriesApp events
|
# Subscribe to SeriesApp events
|
||||||
# Note: Events library uses assignment (=), not += operator
|
# Note: Events library uses assignment (=), not += operator
|
||||||
try:
|
try:
|
||||||
self._app.download_status = self._on_download_status
|
self._app.download_status = self._on_download_status
|
||||||
self._app.scan_status = self._on_scan_status
|
self._app.scan_status = self._on_scan_status
|
||||||
logger.debug("Successfully subscribed to SeriesApp events")
|
logger.info(
|
||||||
|
"Subscribed to SeriesApp events",
|
||||||
|
scan_status_handler=str(self._app.scan_status),
|
||||||
|
series_app_id=id(self._app),
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.exception("Failed to subscribe to SeriesApp events")
|
logger.exception("Failed to subscribe to SeriesApp events")
|
||||||
raise AnimeServiceError("Initialization failed") from e
|
raise AnimeServiceError("Initialization failed") from e
|
||||||
|
|
||||||
|
|
||||||
def _on_download_status(self, args) -> None:
|
def _on_download_status(self, args) -> None:
|
||||||
"""Handle download status events from SeriesApp.
|
"""Handle download status events from SeriesApp.
|
||||||
|
|
||||||
@ -142,7 +163,7 @@ class AnimeService:
|
|||||||
),
|
),
|
||||||
loop
|
loop
|
||||||
)
|
)
|
||||||
except Exception as exc:
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
logger.error(
|
logger.error(
|
||||||
"Error handling download status event",
|
"Error handling download status event",
|
||||||
error=str(exc)
|
error=str(exc)
|
||||||
@ -152,7 +173,8 @@ class AnimeService:
|
|||||||
"""Handle scan status events from SeriesApp.
|
"""Handle scan status events from SeriesApp.
|
||||||
|
|
||||||
Events include both 'key' (primary identifier) and 'folder'
|
Events include both 'key' (primary identifier) and 'folder'
|
||||||
(metadata for display purposes).
|
(metadata for display purposes). Also broadcasts via WebSocket
|
||||||
|
for real-time UI updates.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
args: ScanStatusEventArgs from SeriesApp containing key,
|
args: ScanStatusEventArgs from SeriesApp containing key,
|
||||||
@ -161,23 +183,50 @@ class AnimeService:
|
|||||||
try:
|
try:
|
||||||
scan_id = "library_scan"
|
scan_id = "library_scan"
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Scan status event received",
|
||||||
|
status=args.status,
|
||||||
|
current=args.current,
|
||||||
|
total=args.total,
|
||||||
|
folder=args.folder,
|
||||||
|
)
|
||||||
|
|
||||||
# Get event loop - try running loop first, then stored loop
|
# Get event loop - try running loop first, then stored loop
|
||||||
loop = None
|
loop = None
|
||||||
try:
|
try:
|
||||||
loop = asyncio.get_running_loop()
|
loop = asyncio.get_running_loop()
|
||||||
|
logger.debug("Using running event loop for scan status")
|
||||||
except RuntimeError:
|
except RuntimeError:
|
||||||
# No running loop in this thread - use stored loop
|
# No running loop in this thread - use stored loop
|
||||||
loop = self._event_loop
|
loop = self._event_loop
|
||||||
|
logger.debug(
|
||||||
|
"Using stored event loop for scan status",
|
||||||
|
has_loop=loop is not None
|
||||||
|
)
|
||||||
|
|
||||||
if not loop:
|
if not loop:
|
||||||
logger.debug(
|
logger.warning(
|
||||||
"No event loop available for scan status event",
|
"No event loop available for scan status event",
|
||||||
status=args.status
|
status=args.status
|
||||||
)
|
)
|
||||||
return
|
return
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Processing scan status event",
|
||||||
|
status=args.status,
|
||||||
|
loop_id=id(loop),
|
||||||
|
)
|
||||||
|
|
||||||
# Map SeriesApp scan events to progress service
|
# Map SeriesApp scan events to progress service
|
||||||
if args.status == "started":
|
if args.status == "started":
|
||||||
|
# Track scan start time and reset counters
|
||||||
|
self._scan_start_time = time.time()
|
||||||
|
self._scan_directories_count = 0
|
||||||
|
self._scan_files_count = 0
|
||||||
|
self._scan_total_items = args.total
|
||||||
|
self._is_scanning = True
|
||||||
|
self._scan_current_directory = ""
|
||||||
|
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self._progress_service.start_progress(
|
self._progress_service.start_progress(
|
||||||
progress_id=scan_id,
|
progress_id=scan_id,
|
||||||
@ -187,7 +236,18 @@ class AnimeService:
|
|||||||
),
|
),
|
||||||
loop
|
loop
|
||||||
)
|
)
|
||||||
|
# Broadcast scan started via WebSocket with total items
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._broadcast_scan_started_safe(total_items=args.total),
|
||||||
|
loop
|
||||||
|
)
|
||||||
elif args.status == "progress":
|
elif args.status == "progress":
|
||||||
|
# Update scan counters
|
||||||
|
self._scan_directories_count = args.current
|
||||||
|
self._scan_current_directory = args.folder or ""
|
||||||
|
# Estimate files found (use current as proxy since detailed
|
||||||
|
# file count isn't available from SerieScanner)
|
||||||
|
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self._progress_service.update_progress(
|
self._progress_service.update_progress(
|
||||||
progress_id=scan_id,
|
progress_id=scan_id,
|
||||||
@ -197,7 +257,25 @@ class AnimeService:
|
|||||||
),
|
),
|
||||||
loop
|
loop
|
||||||
)
|
)
|
||||||
|
# Broadcast scan progress via WebSocket
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._broadcast_scan_progress_safe(
|
||||||
|
directories_scanned=args.current,
|
||||||
|
files_found=args.current, # Use folder count as proxy
|
||||||
|
current_directory=args.folder or "",
|
||||||
|
total_items=args.total,
|
||||||
|
),
|
||||||
|
loop
|
||||||
|
)
|
||||||
elif args.status == "completed":
|
elif args.status == "completed":
|
||||||
|
# Calculate elapsed time
|
||||||
|
elapsed = 0.0
|
||||||
|
if self._scan_start_time:
|
||||||
|
elapsed = time.time() - self._scan_start_time
|
||||||
|
|
||||||
|
# Mark scan as complete
|
||||||
|
self._is_scanning = False
|
||||||
|
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self._progress_service.complete_progress(
|
self._progress_service.complete_progress(
|
||||||
progress_id=scan_id,
|
progress_id=scan_id,
|
||||||
@ -205,7 +283,17 @@ class AnimeService:
|
|||||||
),
|
),
|
||||||
loop
|
loop
|
||||||
)
|
)
|
||||||
|
# Broadcast scan completed via WebSocket
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._broadcast_scan_completed_safe(
|
||||||
|
total_directories=args.total,
|
||||||
|
total_files=args.total, # Use folder count as proxy
|
||||||
|
elapsed_seconds=elapsed,
|
||||||
|
),
|
||||||
|
loop
|
||||||
|
)
|
||||||
elif args.status == "failed":
|
elif args.status == "failed":
|
||||||
|
self._is_scanning = False
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self._progress_service.fail_progress(
|
self._progress_service.fail_progress(
|
||||||
progress_id=scan_id,
|
progress_id=scan_id,
|
||||||
@ -214,6 +302,7 @@ class AnimeService:
|
|||||||
loop
|
loop
|
||||||
)
|
)
|
||||||
elif args.status == "cancelled":
|
elif args.status == "cancelled":
|
||||||
|
self._is_scanning = False
|
||||||
asyncio.run_coroutine_threadsafe(
|
asyncio.run_coroutine_threadsafe(
|
||||||
self._progress_service.fail_progress(
|
self._progress_service.fail_progress(
|
||||||
progress_id=scan_id,
|
progress_id=scan_id,
|
||||||
@ -221,8 +310,119 @@ class AnimeService:
|
|||||||
),
|
),
|
||||||
loop
|
loop
|
||||||
)
|
)
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
logger.error("Error handling scan status event: %s", exc)
|
||||||
|
|
||||||
|
async def _broadcast_scan_started_safe(self, total_items: int = 0) -> None:
|
||||||
|
"""Safely broadcast scan started event via WebSocket.
|
||||||
|
|
||||||
|
Wraps the WebSocket broadcast in try/except to ensure scan
|
||||||
|
continues even if WebSocket fails.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
total_items: Total number of items to scan
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
"Broadcasting scan_started via WebSocket",
|
||||||
|
directory=self._directory,
|
||||||
|
total_items=total_items,
|
||||||
|
)
|
||||||
|
await self._websocket_service.broadcast_scan_started(
|
||||||
|
directory=self._directory,
|
||||||
|
total_items=total_items,
|
||||||
|
)
|
||||||
|
logger.info("scan_started broadcast sent successfully")
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.error("Error handling scan status event", error=str(exc))
|
logger.warning(
|
||||||
|
"Failed to broadcast scan_started via WebSocket",
|
||||||
|
error=str(exc)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _broadcast_scan_progress_safe(
|
||||||
|
self,
|
||||||
|
directories_scanned: int,
|
||||||
|
files_found: int,
|
||||||
|
current_directory: str,
|
||||||
|
total_items: int = 0,
|
||||||
|
) -> None:
|
||||||
|
"""Safely broadcast scan progress event via WebSocket.
|
||||||
|
|
||||||
|
Wraps the WebSocket broadcast in try/except to ensure scan
|
||||||
|
continues even if WebSocket fails.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directories_scanned: Number of directories scanned so far
|
||||||
|
files_found: Number of files found so far
|
||||||
|
current_directory: Current directory being scanned
|
||||||
|
total_items: Total number of items to scan
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
await self._websocket_service.broadcast_scan_progress(
|
||||||
|
directories_scanned=directories_scanned,
|
||||||
|
files_found=files_found,
|
||||||
|
current_directory=current_directory,
|
||||||
|
total_items=total_items,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Failed to broadcast scan_progress via WebSocket",
|
||||||
|
error=str(exc)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _broadcast_scan_completed_safe(
|
||||||
|
self,
|
||||||
|
total_directories: int,
|
||||||
|
total_files: int,
|
||||||
|
elapsed_seconds: float,
|
||||||
|
) -> None:
|
||||||
|
"""Safely broadcast scan completed event via WebSocket.
|
||||||
|
|
||||||
|
Wraps the WebSocket broadcast in try/except to ensure scan
|
||||||
|
cleanup continues even if WebSocket fails.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
total_directories: Total directories scanned
|
||||||
|
total_files: Total files found
|
||||||
|
elapsed_seconds: Time taken for the scan
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
await self._websocket_service.broadcast_scan_completed(
|
||||||
|
total_directories=total_directories,
|
||||||
|
total_files=total_files,
|
||||||
|
elapsed_seconds=elapsed_seconds,
|
||||||
|
)
|
||||||
|
except Exception as exc:
|
||||||
|
logger.warning(
|
||||||
|
"Failed to broadcast scan_completed via WebSocket",
|
||||||
|
error=str(exc)
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_scan_status(self) -> dict:
|
||||||
|
"""Get the current scan status.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with scan status information including:
|
||||||
|
- is_scanning: Whether a scan is currently in progress
|
||||||
|
- total_items: Total number of items to scan
|
||||||
|
- directories_scanned: Number of directories scanned so far
|
||||||
|
- current_directory: Current directory being scanned
|
||||||
|
- directory: Root directory being scanned
|
||||||
|
"""
|
||||||
|
status = {
|
||||||
|
"is_scanning": self._is_scanning,
|
||||||
|
"total_items": self._scan_total_items,
|
||||||
|
"directories_scanned": self._scan_directories_count,
|
||||||
|
"current_directory": self._scan_current_directory,
|
||||||
|
"directory": self._directory,
|
||||||
|
}
|
||||||
|
logger.debug(
|
||||||
|
"Scan status requested",
|
||||||
|
is_scanning=self._is_scanning,
|
||||||
|
total_items=self._scan_total_items,
|
||||||
|
directories_scanned=self._scan_directories_count,
|
||||||
|
)
|
||||||
|
return status
|
||||||
|
|
||||||
@lru_cache(maxsize=128)
|
@lru_cache(maxsize=128)
|
||||||
def _cached_list_missing(self) -> list[dict]:
|
def _cached_list_missing(self) -> list[dict]:
|
||||||
@ -288,25 +488,322 @@ class AnimeService:
|
|||||||
The SeriesApp handles progress tracking via events which are
|
The SeriesApp handles progress tracking via events which are
|
||||||
forwarded to the ProgressService through event handlers.
|
forwarded to the ProgressService through event handlers.
|
||||||
|
|
||||||
|
After scanning, results are persisted to the database.
|
||||||
|
|
||||||
All series are identified by their 'key' (provider identifier),
|
All series are identified by their 'key' (provider identifier),
|
||||||
with 'folder' stored as metadata.
|
with 'folder' stored as metadata.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Only one scan can run at a time. If a scan is already in
|
||||||
|
progress, this method returns immediately without starting
|
||||||
|
a new scan.
|
||||||
"""
|
"""
|
||||||
try:
|
# Check if a scan is already running (non-blocking)
|
||||||
# Store event loop for event handlers
|
if self._scan_lock.locked():
|
||||||
self._event_loop = asyncio.get_running_loop()
|
logger.info("Rescan already in progress, ignoring request")
|
||||||
|
return
|
||||||
# SeriesApp.rescan is now async and handles events internally
|
|
||||||
await self._app.rescan()
|
async with self._scan_lock:
|
||||||
|
|
||||||
# invalidate cache
|
|
||||||
try:
|
try:
|
||||||
self._cached_list_missing.cache_clear()
|
# Store event loop for event handlers
|
||||||
except Exception:
|
self._event_loop = asyncio.get_running_loop()
|
||||||
pass
|
logger.info(
|
||||||
|
"Rescan started, event loop stored",
|
||||||
|
loop_id=id(self._event_loop),
|
||||||
|
series_app_id=id(self._app),
|
||||||
|
scan_handler=str(self._app.scan_status),
|
||||||
|
)
|
||||||
|
|
||||||
|
# SeriesApp.rescan returns scanned series list
|
||||||
|
scanned_series = await self._app.rescan()
|
||||||
|
|
||||||
|
# Persist scan results to database
|
||||||
|
if scanned_series:
|
||||||
|
await self._save_scan_results_to_db(scanned_series)
|
||||||
|
|
||||||
|
# Reload series from database to ensure consistency
|
||||||
|
await self._load_series_from_db()
|
||||||
|
|
||||||
except Exception as exc:
|
# invalidate cache
|
||||||
logger.exception("rescan failed")
|
try:
|
||||||
raise AnimeServiceError("Rescan failed") from exc
|
self._cached_list_missing.cache_clear()
|
||||||
|
except Exception: # pylint: disable=broad-except
|
||||||
|
pass
|
||||||
|
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
logger.exception("rescan failed")
|
||||||
|
raise AnimeServiceError("Rescan failed") from exc
|
||||||
|
|
||||||
|
async def _save_scan_results_to_db(self, series_list: list) -> int:
|
||||||
|
"""
|
||||||
|
Save scan results to the database.
|
||||||
|
|
||||||
|
Creates or updates series records in the database based on
|
||||||
|
scan results.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
series_list: List of Serie objects from scan
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of series saved/updated
|
||||||
|
"""
|
||||||
|
from src.server.database.connection import get_db_session
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
|
||||||
|
saved_count = 0
|
||||||
|
|
||||||
|
async with get_db_session() as db:
|
||||||
|
for serie in series_list:
|
||||||
|
try:
|
||||||
|
# Check if series already exists
|
||||||
|
existing = await AnimeSeriesService.get_by_key(
|
||||||
|
db, serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
if existing:
|
||||||
|
# Update existing series
|
||||||
|
await self._update_series_in_db(
|
||||||
|
serie, existing, db
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Create new series
|
||||||
|
await self._create_series_in_db(serie, db)
|
||||||
|
|
||||||
|
saved_count += 1
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
logger.warning(
|
||||||
|
"Failed to save series to database: %s (key=%s) - %s",
|
||||||
|
serie.name,
|
||||||
|
serie.key,
|
||||||
|
str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Saved %d series to database from scan results",
|
||||||
|
saved_count
|
||||||
|
)
|
||||||
|
return saved_count
|
||||||
|
|
||||||
|
async def _create_series_in_db(self, serie, db) -> None:
|
||||||
|
"""Create a new series in the database."""
|
||||||
|
from src.server.database.service import AnimeSeriesService, EpisodeService
|
||||||
|
|
||||||
|
anime_series = await AnimeSeriesService.create(
|
||||||
|
db=db,
|
||||||
|
key=serie.key,
|
||||||
|
name=serie.name,
|
||||||
|
site=serie.site,
|
||||||
|
folder=serie.folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Episode records
|
||||||
|
if serie.episodeDict:
|
||||||
|
for season, episode_numbers in serie.episodeDict.items():
|
||||||
|
for ep_num in episode_numbers:
|
||||||
|
await EpisodeService.create(
|
||||||
|
db=db,
|
||||||
|
series_id=anime_series.id,
|
||||||
|
season=season,
|
||||||
|
episode_number=ep_num,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Created series in database: %s (key=%s)",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _update_series_in_db(self, serie, existing, db) -> None:
|
||||||
|
"""Update an existing series in the database.
|
||||||
|
|
||||||
|
Syncs the database episodes with the current missing episodes from scan.
|
||||||
|
- Adds new missing episodes that are not in the database
|
||||||
|
- Removes episodes from database that are no longer missing
|
||||||
|
(i.e., the file has been added to the filesystem)
|
||||||
|
"""
|
||||||
|
from src.server.database.service import AnimeSeriesService, EpisodeService
|
||||||
|
|
||||||
|
# Get existing episodes from database
|
||||||
|
existing_episodes = await EpisodeService.get_by_series(db, existing.id)
|
||||||
|
|
||||||
|
# Build dict of existing episodes: {season: {ep_num: episode_id}}
|
||||||
|
existing_dict: dict[int, dict[int, int]] = {}
|
||||||
|
for ep in existing_episodes:
|
||||||
|
if ep.season not in existing_dict:
|
||||||
|
existing_dict[ep.season] = {}
|
||||||
|
existing_dict[ep.season][ep.episode_number] = ep.id
|
||||||
|
|
||||||
|
# Get new missing episodes from scan
|
||||||
|
new_dict = serie.episodeDict or {}
|
||||||
|
|
||||||
|
# Build set of new missing episodes for quick lookup
|
||||||
|
new_missing_set: set[tuple[int, int]] = set()
|
||||||
|
for season, episode_numbers in new_dict.items():
|
||||||
|
for ep_num in episode_numbers:
|
||||||
|
new_missing_set.add((season, ep_num))
|
||||||
|
|
||||||
|
# Add new missing episodes that are not in the database
|
||||||
|
for season, episode_numbers in new_dict.items():
|
||||||
|
existing_season_eps = existing_dict.get(season, {})
|
||||||
|
for ep_num in episode_numbers:
|
||||||
|
if ep_num not in existing_season_eps:
|
||||||
|
await EpisodeService.create(
|
||||||
|
db=db,
|
||||||
|
series_id=existing.id,
|
||||||
|
season=season,
|
||||||
|
episode_number=ep_num,
|
||||||
|
)
|
||||||
|
logger.debug(
|
||||||
|
"Added missing episode to database: %s S%02dE%02d",
|
||||||
|
serie.key,
|
||||||
|
season,
|
||||||
|
ep_num
|
||||||
|
)
|
||||||
|
|
||||||
|
# Remove episodes from database that are no longer missing
|
||||||
|
# (i.e., the episode file now exists on the filesystem)
|
||||||
|
for season, eps_dict in existing_dict.items():
|
||||||
|
for ep_num, episode_id in eps_dict.items():
|
||||||
|
if (season, ep_num) not in new_missing_set:
|
||||||
|
await EpisodeService.delete(db, episode_id)
|
||||||
|
logger.info(
|
||||||
|
"Removed episode from database (no longer missing): "
|
||||||
|
"%s S%02dE%02d",
|
||||||
|
serie.key,
|
||||||
|
season,
|
||||||
|
ep_num
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update folder if changed
|
||||||
|
if existing.folder != serie.folder:
|
||||||
|
await AnimeSeriesService.update(
|
||||||
|
db,
|
||||||
|
existing.id,
|
||||||
|
folder=serie.folder
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Updated series in database: %s (key=%s)",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
async def _load_series_from_db(self) -> None:
|
||||||
|
"""
|
||||||
|
Load series from the database into SeriesApp.
|
||||||
|
|
||||||
|
This method is called during initialization and after rescans
|
||||||
|
to ensure the in-memory series list is in sync with the database.
|
||||||
|
"""
|
||||||
|
from src.core.entities.series import Serie
|
||||||
|
from src.server.database.connection import get_db_session
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
|
||||||
|
async with get_db_session() as db:
|
||||||
|
anime_series_list = await AnimeSeriesService.get_all(
|
||||||
|
db, with_episodes=True
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert to Serie objects
|
||||||
|
series_list = []
|
||||||
|
for anime_series in anime_series_list:
|
||||||
|
# Build episode_dict from episodes relationship
|
||||||
|
episode_dict: dict[int, list[int]] = {}
|
||||||
|
if anime_series.episodes:
|
||||||
|
for episode in anime_series.episodes:
|
||||||
|
season = episode.season
|
||||||
|
if season not in episode_dict:
|
||||||
|
episode_dict[season] = []
|
||||||
|
episode_dict[season].append(episode.episode_number)
|
||||||
|
# Sort episode numbers
|
||||||
|
for season in episode_dict:
|
||||||
|
episode_dict[season].sort()
|
||||||
|
|
||||||
|
serie = Serie(
|
||||||
|
key=anime_series.key,
|
||||||
|
name=anime_series.name,
|
||||||
|
site=anime_series.site,
|
||||||
|
folder=anime_series.folder,
|
||||||
|
episodeDict=episode_dict
|
||||||
|
)
|
||||||
|
series_list.append(serie)
|
||||||
|
|
||||||
|
# Load into SeriesApp
|
||||||
|
self._app.load_series_from_list(series_list)
|
||||||
|
|
||||||
|
async def add_series_to_db(
|
||||||
|
self,
|
||||||
|
serie,
|
||||||
|
db
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Add a series to the database if it doesn't already exist.
|
||||||
|
|
||||||
|
Uses serie.key for identification. Creates a new AnimeSeries
|
||||||
|
record in the database if it doesn't already exist.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie: The Serie instance to add
|
||||||
|
db: Database session for async operations
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created AnimeSeries instance, or None if already exists
|
||||||
|
"""
|
||||||
|
from src.server.database.service import AnimeSeriesService, EpisodeService
|
||||||
|
|
||||||
|
# Check if series already exists in DB
|
||||||
|
existing = await AnimeSeriesService.get_by_key(db, serie.key)
|
||||||
|
if existing:
|
||||||
|
logger.debug(
|
||||||
|
"Series already exists in database: %s (key=%s)",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Create new series in database
|
||||||
|
anime_series = await AnimeSeriesService.create(
|
||||||
|
db=db,
|
||||||
|
key=serie.key,
|
||||||
|
name=serie.name,
|
||||||
|
site=serie.site,
|
||||||
|
folder=serie.folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Episode records for each episode in episodeDict
|
||||||
|
if serie.episodeDict:
|
||||||
|
for season, episode_numbers in serie.episodeDict.items():
|
||||||
|
for episode_number in episode_numbers:
|
||||||
|
await EpisodeService.create(
|
||||||
|
db=db,
|
||||||
|
series_id=anime_series.id,
|
||||||
|
season=season,
|
||||||
|
episode_number=episode_number,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Added series to database: %s (key=%s)",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
return anime_series
|
||||||
|
|
||||||
|
async def contains_in_db(self, key: str, db) -> bool:
|
||||||
|
"""
|
||||||
|
Check if a series with the given key exists in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The unique provider identifier for the series
|
||||||
|
db: Database session for async operations
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if the series exists in the database
|
||||||
|
"""
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
|
||||||
|
existing = await AnimeSeriesService.get_by_key(db, key)
|
||||||
|
return existing is not None
|
||||||
|
|
||||||
async def download(
|
async def download(
|
||||||
self,
|
self,
|
||||||
@ -335,6 +832,7 @@ class AnimeService:
|
|||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
AnimeServiceError: If download fails
|
AnimeServiceError: If download fails
|
||||||
|
InterruptedError: If download was cancelled
|
||||||
|
|
||||||
Note:
|
Note:
|
||||||
The 'key' parameter is the primary identifier used for all
|
The 'key' parameter is the primary identifier used for all
|
||||||
@ -353,6 +851,10 @@ class AnimeService:
|
|||||||
key=key,
|
key=key,
|
||||||
item_id=item_id,
|
item_id=item_id,
|
||||||
)
|
)
|
||||||
|
except InterruptedError:
|
||||||
|
# Download was cancelled - re-raise for proper handling
|
||||||
|
logger.info("Download cancelled, propagating cancellation")
|
||||||
|
raise
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
logger.exception("download failed")
|
logger.exception("download failed")
|
||||||
raise AnimeServiceError("Download failed") from exc
|
raise AnimeServiceError("Download failed") from exc
|
||||||
@ -361,3 +863,135 @@ class AnimeService:
|
|||||||
def get_anime_service(series_app: SeriesApp) -> AnimeService:
|
def get_anime_service(series_app: SeriesApp) -> AnimeService:
|
||||||
"""Factory used for creating AnimeService with a SeriesApp instance."""
|
"""Factory used for creating AnimeService with a SeriesApp instance."""
|
||||||
return AnimeService(series_app)
|
return AnimeService(series_app)
|
||||||
|
|
||||||
|
|
||||||
|
async def sync_series_from_data_files(
|
||||||
|
anime_directory: str,
|
||||||
|
log_instance=None # pylint: disable=unused-argument
|
||||||
|
) -> int:
|
||||||
|
"""
|
||||||
|
Sync series from data files to the database.
|
||||||
|
|
||||||
|
Scans the anime directory for data files and adds any new series
|
||||||
|
to the database. Existing series are skipped (no duplicates).
|
||||||
|
|
||||||
|
This function is typically called during application startup to ensure
|
||||||
|
series metadata stored in filesystem data files is available in the
|
||||||
|
database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
anime_directory: Path to the anime directory with data files
|
||||||
|
log_instance: Optional logger instance (unused, kept for API
|
||||||
|
compatibility). This function always uses structlog internally.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of new series added to the database
|
||||||
|
"""
|
||||||
|
# Always use structlog for structured logging with keyword arguments
|
||||||
|
log = structlog.get_logger(__name__)
|
||||||
|
|
||||||
|
try:
|
||||||
|
from src.server.database.connection import get_db_session
|
||||||
|
from src.server.database.service import AnimeSeriesService, EpisodeService
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
"Starting data file to database sync",
|
||||||
|
directory=anime_directory
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get all series from data files using SeriesApp
|
||||||
|
series_app = SeriesApp(anime_directory)
|
||||||
|
all_series = await asyncio.to_thread(
|
||||||
|
series_app.get_all_series_from_data_files
|
||||||
|
)
|
||||||
|
|
||||||
|
if not all_series:
|
||||||
|
log.info("No series found in data files to sync")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
"Found series in data files, syncing to database",
|
||||||
|
count=len(all_series)
|
||||||
|
)
|
||||||
|
|
||||||
|
async with get_db_session() as db:
|
||||||
|
added_count = 0
|
||||||
|
skipped_count = 0
|
||||||
|
for serie in all_series:
|
||||||
|
# Handle series with empty name - use folder as fallback
|
||||||
|
if not serie.name or not serie.name.strip():
|
||||||
|
if serie.folder and serie.folder.strip():
|
||||||
|
serie.name = serie.folder.strip()
|
||||||
|
log.debug(
|
||||||
|
"Using folder as name fallback",
|
||||||
|
key=serie.key,
|
||||||
|
folder=serie.folder
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
log.warning(
|
||||||
|
"Skipping series with empty name and folder",
|
||||||
|
key=serie.key
|
||||||
|
)
|
||||||
|
skipped_count += 1
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Check if series already exists in DB
|
||||||
|
existing = await AnimeSeriesService.get_by_key(db, serie.key)
|
||||||
|
if existing:
|
||||||
|
log.debug(
|
||||||
|
"Series already exists in database",
|
||||||
|
name=serie.name,
|
||||||
|
key=serie.key
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Create new series in database
|
||||||
|
anime_series = await AnimeSeriesService.create(
|
||||||
|
db=db,
|
||||||
|
key=serie.key,
|
||||||
|
name=serie.name,
|
||||||
|
site=serie.site,
|
||||||
|
folder=serie.folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create Episode records for each episode in episodeDict
|
||||||
|
if serie.episodeDict:
|
||||||
|
for season, episode_numbers in serie.episodeDict.items():
|
||||||
|
for episode_number in episode_numbers:
|
||||||
|
await EpisodeService.create(
|
||||||
|
db=db,
|
||||||
|
series_id=anime_series.id,
|
||||||
|
season=season,
|
||||||
|
episode_number=episode_number,
|
||||||
|
)
|
||||||
|
|
||||||
|
added_count += 1
|
||||||
|
log.debug(
|
||||||
|
"Added series to database",
|
||||||
|
name=serie.name,
|
||||||
|
key=serie.key
|
||||||
|
)
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
log.warning(
|
||||||
|
"Failed to add series to database",
|
||||||
|
key=serie.key,
|
||||||
|
name=serie.name,
|
||||||
|
error=str(e)
|
||||||
|
)
|
||||||
|
skipped_count += 1
|
||||||
|
|
||||||
|
log.info(
|
||||||
|
"Data file sync complete",
|
||||||
|
added=added_count,
|
||||||
|
skipped=len(all_series) - added_count
|
||||||
|
)
|
||||||
|
return added_count
|
||||||
|
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
log.warning(
|
||||||
|
"Failed to sync series to database",
|
||||||
|
error=str(e),
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
return 0
|
||||||
|
|||||||
@ -42,6 +42,17 @@ class AuthService:
|
|||||||
config persistence should be used (not implemented here).
|
config persistence should be used (not implemented here).
|
||||||
- Lockout policy is kept in-memory and will reset when the process
|
- Lockout policy is kept in-memory and will reset when the process
|
||||||
restarts. This is acceptable for single-process deployments.
|
restarts. This is acceptable for single-process deployments.
|
||||||
|
|
||||||
|
WARNING - SINGLE PROCESS LIMITATION:
|
||||||
|
Failed login attempts are stored in memory dictionaries which RESET
|
||||||
|
when the process restarts. This means:
|
||||||
|
- Attackers can bypass lockouts by triggering a process restart
|
||||||
|
- Lockout state is not shared across multiple workers/processes
|
||||||
|
|
||||||
|
For production deployments, consider:
|
||||||
|
- Storing failed attempts in database with TTL-based expiration
|
||||||
|
- Using Redis for distributed lockout state
|
||||||
|
- Implementing account-based (not just IP-based) lockout tracking
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
|||||||
@ -4,7 +4,7 @@ This service handles:
|
|||||||
- Loading and saving configuration to JSON files
|
- Loading and saving configuration to JSON files
|
||||||
- Configuration validation
|
- Configuration validation
|
||||||
- Backup and restore functionality
|
- Backup and restore functionality
|
||||||
- Configuration migration for version updates
|
- Configuration version management
|
||||||
"""
|
"""
|
||||||
|
|
||||||
import json
|
import json
|
||||||
@ -35,8 +35,8 @@ class ConfigBackupError(ConfigServiceError):
|
|||||||
class ConfigService:
|
class ConfigService:
|
||||||
"""Service for managing application configuration persistence.
|
"""Service for managing application configuration persistence.
|
||||||
|
|
||||||
Handles loading, saving, validation, backup, and migration of
|
Handles loading, saving, validation, backup, and version management
|
||||||
configuration files. Uses JSON format for human-readable and
|
of configuration files. Uses JSON format for human-readable and
|
||||||
version-control friendly storage.
|
version-control friendly storage.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
@ -84,18 +84,13 @@ class ConfigService:
|
|||||||
with open(self.config_path, "r", encoding="utf-8") as f:
|
with open(self.config_path, "r", encoding="utf-8") as f:
|
||||||
data = json.load(f)
|
data = json.load(f)
|
||||||
|
|
||||||
# Check if migration is needed
|
|
||||||
file_version = data.get("version", "1.0.0")
|
|
||||||
if file_version != self.CONFIG_VERSION:
|
|
||||||
data = self._migrate_config(data, file_version)
|
|
||||||
|
|
||||||
# Remove version key before constructing AppConfig
|
# Remove version key before constructing AppConfig
|
||||||
data.pop("version", None)
|
data.pop("version", None)
|
||||||
|
|
||||||
config = AppConfig(**data)
|
config = AppConfig(**data)
|
||||||
|
|
||||||
# Validate configuration
|
# Validate configuration
|
||||||
validation = config.validate()
|
validation = config.validate_config()
|
||||||
if not validation.valid:
|
if not validation.valid:
|
||||||
errors = ', '.join(validation.errors or [])
|
errors = ', '.join(validation.errors or [])
|
||||||
raise ConfigValidationError(
|
raise ConfigValidationError(
|
||||||
@ -128,7 +123,7 @@ class ConfigService:
|
|||||||
ConfigValidationError: If config validation fails
|
ConfigValidationError: If config validation fails
|
||||||
"""
|
"""
|
||||||
# Validate before saving
|
# Validate before saving
|
||||||
validation = config.validate()
|
validation = config.validate_config()
|
||||||
if not validation.valid:
|
if not validation.valid:
|
||||||
errors = ', '.join(validation.errors or [])
|
errors = ', '.join(validation.errors or [])
|
||||||
raise ConfigValidationError(
|
raise ConfigValidationError(
|
||||||
@ -185,7 +180,7 @@ class ConfigService:
|
|||||||
Returns:
|
Returns:
|
||||||
ValidationResult: Validation result with errors if any
|
ValidationResult: Validation result with errors if any
|
||||||
"""
|
"""
|
||||||
return config.validate()
|
return config.validate_config()
|
||||||
|
|
||||||
def create_backup(self, name: Optional[str] = None) -> Path:
|
def create_backup(self, name: Optional[str] = None) -> Path:
|
||||||
"""Create backup of current configuration.
|
"""Create backup of current configuration.
|
||||||
@ -328,26 +323,6 @@ class ConfigService:
|
|||||||
except (OSError, IOError):
|
except (OSError, IOError):
|
||||||
# Ignore errors during cleanup
|
# Ignore errors during cleanup
|
||||||
continue
|
continue
|
||||||
|
|
||||||
def _migrate_config(
|
|
||||||
self, data: Dict, from_version: str # noqa: ARG002
|
|
||||||
) -> Dict:
|
|
||||||
"""Migrate configuration from old version to current.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Configuration data to migrate
|
|
||||||
from_version: Version to migrate from (reserved for future use)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dict: Migrated configuration data
|
|
||||||
"""
|
|
||||||
# Currently only one version exists
|
|
||||||
# Future migrations would go here
|
|
||||||
# Example:
|
|
||||||
# if from_version == "1.0.0" and self.CONFIG_VERSION == "2.0.0":
|
|
||||||
# data = self._migrate_1_0_to_2_0(data)
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
# Singleton instance
|
# Singleton instance
|
||||||
|
|||||||
@ -2,18 +2,19 @@
|
|||||||
|
|
||||||
This module provides a simplified queue management system for handling
|
This module provides a simplified queue management system for handling
|
||||||
anime episode downloads with manual start/stop controls, progress tracking,
|
anime episode downloads with manual start/stop controls, progress tracking,
|
||||||
persistence, and retry functionality.
|
database persistence, and retry functionality.
|
||||||
|
|
||||||
|
The service uses SQLite database for persistent storage via QueueRepository
|
||||||
|
while maintaining an in-memory cache for performance.
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import asyncio
|
import asyncio
|
||||||
import json
|
|
||||||
import uuid
|
import uuid
|
||||||
from collections import deque
|
from collections import deque
|
||||||
from concurrent.futures import ThreadPoolExecutor
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from pathlib import Path
|
from typing import TYPE_CHECKING, Dict, List, Optional
|
||||||
from typing import Dict, List, Optional
|
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
|
|
||||||
@ -28,6 +29,9 @@ from src.server.models.download import (
|
|||||||
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
||||||
from src.server.services.progress_service import ProgressService, get_progress_service
|
from src.server.services.progress_service import ProgressService, get_progress_service
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from src.server.services.queue_repository import QueueRepository
|
||||||
|
|
||||||
logger = structlog.get_logger(__name__)
|
logger = structlog.get_logger(__name__)
|
||||||
|
|
||||||
|
|
||||||
@ -42,7 +46,7 @@ class DownloadService:
|
|||||||
- Manual download start/stop
|
- Manual download start/stop
|
||||||
- FIFO queue processing
|
- FIFO queue processing
|
||||||
- Real-time progress tracking
|
- Real-time progress tracking
|
||||||
- Queue persistence and recovery
|
- Database persistence via QueueRepository
|
||||||
- Automatic retry logic
|
- Automatic retry logic
|
||||||
- WebSocket broadcast support
|
- WebSocket broadcast support
|
||||||
"""
|
"""
|
||||||
@ -50,24 +54,28 @@ class DownloadService:
|
|||||||
def __init__(
|
def __init__(
|
||||||
self,
|
self,
|
||||||
anime_service: AnimeService,
|
anime_service: AnimeService,
|
||||||
|
queue_repository: Optional["QueueRepository"] = None,
|
||||||
max_retries: int = 3,
|
max_retries: int = 3,
|
||||||
persistence_path: str = "./data/download_queue.json",
|
|
||||||
progress_service: Optional[ProgressService] = None,
|
progress_service: Optional[ProgressService] = None,
|
||||||
):
|
):
|
||||||
"""Initialize the download service.
|
"""Initialize the download service.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
anime_service: Service for anime operations
|
anime_service: Service for anime operations
|
||||||
|
queue_repository: Optional repository for database persistence.
|
||||||
|
If not provided, will use default singleton.
|
||||||
max_retries: Maximum retry attempts for failed downloads
|
max_retries: Maximum retry attempts for failed downloads
|
||||||
persistence_path: Path to persist queue state
|
|
||||||
progress_service: Optional progress service for tracking
|
progress_service: Optional progress service for tracking
|
||||||
"""
|
"""
|
||||||
self._anime_service = anime_service
|
self._anime_service = anime_service
|
||||||
self._max_retries = max_retries
|
self._max_retries = max_retries
|
||||||
self._persistence_path = Path(persistence_path)
|
|
||||||
self._progress_service = progress_service or get_progress_service()
|
self._progress_service = progress_service or get_progress_service()
|
||||||
|
|
||||||
|
# Database repository for persistence
|
||||||
|
self._queue_repository = queue_repository
|
||||||
|
self._db_initialized = False
|
||||||
|
|
||||||
# Queue storage by status
|
# In-memory cache for performance (synced with database)
|
||||||
self._pending_queue: deque[DownloadItem] = deque()
|
self._pending_queue: deque[DownloadItem] = deque()
|
||||||
# Helper dict for O(1) lookup of pending items by ID
|
# Helper dict for O(1) lookup of pending items by ID
|
||||||
self._pending_items_by_id: Dict[str, DownloadItem] = {}
|
self._pending_items_by_id: Dict[str, DownloadItem] = {}
|
||||||
@ -92,14 +100,159 @@ class DownloadService:
|
|||||||
# Track if queue progress has been initialized
|
# Track if queue progress has been initialized
|
||||||
self._queue_progress_initialized: bool = False
|
self._queue_progress_initialized: bool = False
|
||||||
|
|
||||||
# Load persisted queue
|
|
||||||
self._load_queue()
|
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"DownloadService initialized",
|
"DownloadService initialized",
|
||||||
max_retries=max_retries,
|
max_retries=max_retries,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
def _get_repository(self) -> "QueueRepository":
|
||||||
|
"""Get the queue repository, initializing if needed.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
QueueRepository instance
|
||||||
|
"""
|
||||||
|
if self._queue_repository is None:
|
||||||
|
from src.server.services.queue_repository import get_queue_repository
|
||||||
|
self._queue_repository = get_queue_repository()
|
||||||
|
return self._queue_repository
|
||||||
|
|
||||||
|
async def initialize(self) -> None:
|
||||||
|
"""Initialize the service by loading queue state from database.
|
||||||
|
|
||||||
|
Should be called after database is initialized during app startup.
|
||||||
|
Note: With the simplified model, status/priority/progress are now
|
||||||
|
managed in-memory only. The database stores the queue items
|
||||||
|
for persistence across restarts.
|
||||||
|
"""
|
||||||
|
if self._db_initialized:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
repository = self._get_repository()
|
||||||
|
|
||||||
|
# Load all items from database - they all start as PENDING
|
||||||
|
# since status is now managed in-memory only
|
||||||
|
all_items = await repository.get_all_items()
|
||||||
|
for item in all_items:
|
||||||
|
# All items from database are treated as pending
|
||||||
|
item.status = DownloadStatus.PENDING
|
||||||
|
self._add_to_pending_queue(item)
|
||||||
|
|
||||||
|
self._db_initialized = True
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Queue restored from database: pending_count=%d",
|
||||||
|
len(self._pending_queue),
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to load queue from database: %s", e, exc_info=True)
|
||||||
|
# Continue without persistence - queue will work in memory only
|
||||||
|
self._db_initialized = True
|
||||||
|
|
||||||
|
async def _save_to_database(self, item: DownloadItem) -> DownloadItem:
|
||||||
|
"""Save or update an item in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item: Download item to save
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Saved item with database ID
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
repository = self._get_repository()
|
||||||
|
return await repository.save_item(item)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to save item to database: %s", e)
|
||||||
|
return item
|
||||||
|
|
||||||
|
async def _set_error_in_database(
|
||||||
|
self,
|
||||||
|
item_id: str,
|
||||||
|
error: str,
|
||||||
|
) -> bool:
|
||||||
|
"""Set error message on an item in the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: Download item ID
|
||||||
|
error: Error message
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if update succeeded
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
repository = self._get_repository()
|
||||||
|
return await repository.set_error(item_id, error)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to set error in database: %s", e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _delete_from_database(self, item_id: str) -> bool:
|
||||||
|
"""Delete an item from the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: Download item ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if delete succeeded
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
repository = self._get_repository()
|
||||||
|
return await repository.delete_item(item_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to delete from database: %s", e)
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def _remove_episode_from_missing_list(
|
||||||
|
self,
|
||||||
|
series_key: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
) -> bool:
|
||||||
|
"""Remove a downloaded episode from the missing episodes list.
|
||||||
|
|
||||||
|
Called when a download completes successfully to update the
|
||||||
|
database so the episode no longer appears as missing.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
series_key: Unique provider key for the series
|
||||||
|
season: Season number
|
||||||
|
episode: Episode number within season
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if episode was removed, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from src.server.database.connection import get_db_session
|
||||||
|
from src.server.database.service import EpisodeService
|
||||||
|
|
||||||
|
async with get_db_session() as db:
|
||||||
|
deleted = await EpisodeService.delete_by_series_and_episode(
|
||||||
|
db=db,
|
||||||
|
series_key=series_key,
|
||||||
|
season=season,
|
||||||
|
episode_number=episode,
|
||||||
|
)
|
||||||
|
if deleted:
|
||||||
|
logger.info(
|
||||||
|
"Removed episode from missing list: "
|
||||||
|
"%s S%02dE%02d",
|
||||||
|
series_key,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
)
|
||||||
|
# Clear the anime service cache so list_missing
|
||||||
|
# returns updated data
|
||||||
|
try:
|
||||||
|
self._anime_service._cached_list_missing.cache_clear()
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
return deleted
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
"Failed to remove episode from missing list: %s", e
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
async def _init_queue_progress(self) -> None:
|
async def _init_queue_progress(self) -> None:
|
||||||
"""Initialize the download queue progress tracking.
|
"""Initialize the download queue progress tracking.
|
||||||
|
|
||||||
@ -119,7 +272,7 @@ class DownloadService:
|
|||||||
)
|
)
|
||||||
self._queue_progress_initialized = True
|
self._queue_progress_initialized = True
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to initialize queue progress", error=str(e))
|
logger.error("Failed to initialize queue progress: %s", e)
|
||||||
|
|
||||||
def _add_to_pending_queue(
|
def _add_to_pending_queue(
|
||||||
self, item: DownloadItem, front: bool = False
|
self, item: DownloadItem, front: bool = False
|
||||||
@ -165,69 +318,6 @@ class DownloadService:
|
|||||||
"""Generate unique identifier for download items."""
|
"""Generate unique identifier for download items."""
|
||||||
return str(uuid.uuid4())
|
return str(uuid.uuid4())
|
||||||
|
|
||||||
def _load_queue(self) -> None:
|
|
||||||
"""Load persisted queue from disk."""
|
|
||||||
try:
|
|
||||||
if self._persistence_path.exists():
|
|
||||||
with open(self._persistence_path, "r", encoding="utf-8") as f:
|
|
||||||
data = json.load(f)
|
|
||||||
|
|
||||||
# Restore pending items
|
|
||||||
for item_dict in data.get("pending", []):
|
|
||||||
item = DownloadItem(**item_dict)
|
|
||||||
# Reset status if was downloading when saved
|
|
||||||
if item.status == DownloadStatus.DOWNLOADING:
|
|
||||||
item.status = DownloadStatus.PENDING
|
|
||||||
self._add_to_pending_queue(item)
|
|
||||||
|
|
||||||
# Restore failed items that can be retried
|
|
||||||
for item_dict in data.get("failed", []):
|
|
||||||
item = DownloadItem(**item_dict)
|
|
||||||
if item.retry_count < self._max_retries:
|
|
||||||
item.status = DownloadStatus.PENDING
|
|
||||||
self._add_to_pending_queue(item)
|
|
||||||
else:
|
|
||||||
self._failed_items.append(item)
|
|
||||||
|
|
||||||
logger.info(
|
|
||||||
"Queue restored from disk",
|
|
||||||
pending_count=len(self._pending_queue),
|
|
||||||
failed_count=len(self._failed_items),
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Failed to load persisted queue", error=str(e))
|
|
||||||
|
|
||||||
def _save_queue(self) -> None:
|
|
||||||
"""Persist current queue state to disk."""
|
|
||||||
try:
|
|
||||||
self._persistence_path.parent.mkdir(parents=True, exist_ok=True)
|
|
||||||
|
|
||||||
active_items = (
|
|
||||||
[self._active_download] if self._active_download else []
|
|
||||||
)
|
|
||||||
|
|
||||||
data = {
|
|
||||||
"pending": [
|
|
||||||
item.model_dump(mode="json")
|
|
||||||
for item in self._pending_queue
|
|
||||||
],
|
|
||||||
"active": [
|
|
||||||
item.model_dump(mode="json") for item in active_items
|
|
||||||
],
|
|
||||||
"failed": [
|
|
||||||
item.model_dump(mode="json")
|
|
||||||
for item in self._failed_items
|
|
||||||
],
|
|
||||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
|
||||||
}
|
|
||||||
|
|
||||||
with open(self._persistence_path, "w", encoding="utf-8") as f:
|
|
||||||
json.dump(data, f, indent=2)
|
|
||||||
|
|
||||||
logger.debug("Queue persisted to disk")
|
|
||||||
except Exception as e:
|
|
||||||
logger.error("Failed to persist queue", error=str(e))
|
|
||||||
|
|
||||||
async def add_to_queue(
|
async def add_to_queue(
|
||||||
self,
|
self,
|
||||||
serie_id: str,
|
serie_id: str,
|
||||||
@ -274,22 +364,23 @@ class DownloadService:
|
|||||||
added_at=datetime.now(timezone.utc),
|
added_at=datetime.now(timezone.utc),
|
||||||
)
|
)
|
||||||
|
|
||||||
# Always append to end (FIFO order)
|
# Save to database first to get persistent ID
|
||||||
self._add_to_pending_queue(item, front=False)
|
saved_item = await self._save_to_database(item)
|
||||||
|
|
||||||
created_ids.append(item.id)
|
# Add to in-memory cache
|
||||||
|
self._add_to_pending_queue(saved_item, front=False)
|
||||||
|
|
||||||
|
created_ids.append(saved_item.id)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Item added to queue",
|
"Item added to queue",
|
||||||
item_id=item.id,
|
item_id=saved_item.id,
|
||||||
serie_key=serie_id,
|
serie_key=serie_id,
|
||||||
serie_name=serie_name,
|
serie_name=serie_name,
|
||||||
season=episode.season,
|
season=episode.season,
|
||||||
episode=episode.episode,
|
episode=episode.episode,
|
||||||
)
|
)
|
||||||
|
|
||||||
self._save_queue()
|
|
||||||
|
|
||||||
# Notify via progress service
|
# Notify via progress service
|
||||||
queue_status = await self.get_queue_status()
|
queue_status = await self.get_queue_status()
|
||||||
await self._progress_service.update_progress(
|
await self._progress_service.update_progress(
|
||||||
@ -306,7 +397,7 @@ class DownloadService:
|
|||||||
return created_ids
|
return created_ids
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to add items to queue", error=str(e))
|
logger.error("Failed to add items to queue: %s", e)
|
||||||
raise DownloadServiceError(f"Failed to add items: {str(e)}") from e
|
raise DownloadServiceError(f"Failed to add items: {str(e)}") from e
|
||||||
|
|
||||||
async def remove_from_queue(self, item_ids: List[str]) -> List[str]:
|
async def remove_from_queue(self, item_ids: List[str]) -> List[str]:
|
||||||
@ -333,8 +424,10 @@ class DownloadService:
|
|||||||
item.completed_at = datetime.now(timezone.utc)
|
item.completed_at = datetime.now(timezone.utc)
|
||||||
self._failed_items.append(item)
|
self._failed_items.append(item)
|
||||||
self._active_download = None
|
self._active_download = None
|
||||||
|
# Delete cancelled item from database
|
||||||
|
await self._delete_from_database(item_id)
|
||||||
removed_ids.append(item_id)
|
removed_ids.append(item_id)
|
||||||
logger.info("Cancelled active download", item_id=item_id)
|
logger.info("Cancelled active download: item_id=%s", item_id)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Check pending queue - O(1) lookup using helper dict
|
# Check pending queue - O(1) lookup using helper dict
|
||||||
@ -342,13 +435,14 @@ class DownloadService:
|
|||||||
item = self._pending_items_by_id[item_id]
|
item = self._pending_items_by_id[item_id]
|
||||||
self._pending_queue.remove(item)
|
self._pending_queue.remove(item)
|
||||||
del self._pending_items_by_id[item_id]
|
del self._pending_items_by_id[item_id]
|
||||||
|
# Delete from database
|
||||||
|
await self._delete_from_database(item_id)
|
||||||
removed_ids.append(item_id)
|
removed_ids.append(item_id)
|
||||||
logger.info(
|
logger.info(
|
||||||
"Removed from pending queue", item_id=item_id
|
"Removed from pending queue", item_id=item_id
|
||||||
)
|
)
|
||||||
|
|
||||||
if removed_ids:
|
if removed_ids:
|
||||||
self._save_queue()
|
|
||||||
# Notify via progress service
|
# Notify via progress service
|
||||||
queue_status = await self.get_queue_status()
|
queue_status = await self.get_queue_status()
|
||||||
await self._progress_service.update_progress(
|
await self._progress_service.update_progress(
|
||||||
@ -365,7 +459,7 @@ class DownloadService:
|
|||||||
return removed_ids
|
return removed_ids
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to remove items", error=str(e))
|
logger.error("Failed to remove items: %s", e)
|
||||||
raise DownloadServiceError(
|
raise DownloadServiceError(
|
||||||
f"Failed to remove items: {str(e)}"
|
f"Failed to remove items: {str(e)}"
|
||||||
) from e
|
) from e
|
||||||
@ -379,6 +473,10 @@ class DownloadService:
|
|||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
DownloadServiceError: If reordering fails
|
DownloadServiceError: If reordering fails
|
||||||
|
|
||||||
|
Note:
|
||||||
|
Reordering is done in-memory only. Database priority is not
|
||||||
|
updated since the in-memory queue defines the actual order.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Build new queue based on specified order
|
# Build new queue based on specified order
|
||||||
@ -399,9 +497,6 @@ class DownloadService:
|
|||||||
# Replace queue
|
# Replace queue
|
||||||
self._pending_queue = new_queue
|
self._pending_queue = new_queue
|
||||||
|
|
||||||
# Save updated queue
|
|
||||||
self._save_queue()
|
|
||||||
|
|
||||||
# Notify via progress service
|
# Notify via progress service
|
||||||
queue_status = await self.get_queue_status()
|
queue_status = await self.get_queue_status()
|
||||||
await self._progress_service.update_progress(
|
await self._progress_service.update_progress(
|
||||||
@ -418,7 +513,7 @@ class DownloadService:
|
|||||||
logger.info("Queue reordered", reordered_count=len(item_ids))
|
logger.info("Queue reordered", reordered_count=len(item_ids))
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to reorder queue", error=str(e))
|
logger.error("Failed to reorder queue: %s", e)
|
||||||
raise DownloadServiceError(
|
raise DownloadServiceError(
|
||||||
f"Failed to reorder queue: {str(e)}"
|
f"Failed to reorder queue: {str(e)}"
|
||||||
) from e
|
) from e
|
||||||
@ -462,7 +557,7 @@ class DownloadService:
|
|||||||
return "queue_started"
|
return "queue_started"
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to start queue processing", error=str(e))
|
logger.error("Failed to start queue processing: %s", e)
|
||||||
raise DownloadServiceError(
|
raise DownloadServiceError(
|
||||||
f"Failed to start queue processing: {str(e)}"
|
f"Failed to start queue processing: {str(e)}"
|
||||||
) from e
|
) from e
|
||||||
@ -692,13 +787,15 @@ class DownloadService:
|
|||||||
Number of items cleared
|
Number of items cleared
|
||||||
"""
|
"""
|
||||||
count = len(self._pending_queue)
|
count = len(self._pending_queue)
|
||||||
|
|
||||||
|
# Delete all pending items from database
|
||||||
|
for item_id in list(self._pending_items_by_id.keys()):
|
||||||
|
await self._delete_from_database(item_id)
|
||||||
|
|
||||||
self._pending_queue.clear()
|
self._pending_queue.clear()
|
||||||
self._pending_items_by_id.clear()
|
self._pending_items_by_id.clear()
|
||||||
logger.info("Cleared pending items", count=count)
|
logger.info("Cleared pending items", count=count)
|
||||||
|
|
||||||
# Save queue state
|
|
||||||
self._save_queue()
|
|
||||||
|
|
||||||
# Notify via progress service
|
# Notify via progress service
|
||||||
if count > 0:
|
if count > 0:
|
||||||
queue_status = await self.get_queue_status()
|
queue_status = await self.get_queue_status()
|
||||||
@ -749,14 +846,15 @@ class DownloadService:
|
|||||||
self._add_to_pending_queue(item)
|
self._add_to_pending_queue(item)
|
||||||
retried_ids.append(item.id)
|
retried_ids.append(item.id)
|
||||||
|
|
||||||
|
# Status is now managed in-memory only
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Retrying failed item",
|
"Retrying failed item: item_id=%s, retry_count=%d",
|
||||||
item_id=item.id,
|
item.id,
|
||||||
retry_count=item.retry_count
|
item.retry_count,
|
||||||
)
|
)
|
||||||
|
|
||||||
if retried_ids:
|
if retried_ids:
|
||||||
self._save_queue()
|
|
||||||
# Notify via progress service
|
# Notify via progress service
|
||||||
queue_status = await self.get_queue_status()
|
queue_status = await self.get_queue_status()
|
||||||
await self._progress_service.update_progress(
|
await self._progress_service.update_progress(
|
||||||
@ -773,7 +871,7 @@ class DownloadService:
|
|||||||
return retried_ids
|
return retried_ids
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to retry items", error=str(e))
|
logger.error("Failed to retry items: %s", e)
|
||||||
raise DownloadServiceError(
|
raise DownloadServiceError(
|
||||||
f"Failed to retry: {str(e)}"
|
f"Failed to retry: {str(e)}"
|
||||||
) from e
|
) from e
|
||||||
@ -790,18 +888,17 @@ class DownloadService:
|
|||||||
logger.info("Skipping download due to shutdown")
|
logger.info("Skipping download due to shutdown")
|
||||||
return
|
return
|
||||||
|
|
||||||
# Update status
|
# Update status in memory (status is now in-memory only)
|
||||||
item.status = DownloadStatus.DOWNLOADING
|
item.status = DownloadStatus.DOWNLOADING
|
||||||
item.started_at = datetime.now(timezone.utc)
|
item.started_at = datetime.now(timezone.utc)
|
||||||
self._active_download = item
|
self._active_download = item
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Starting download",
|
"Starting download: item_id=%s, serie_key=%s, S%02dE%02d",
|
||||||
item_id=item.id,
|
item.id,
|
||||||
serie_key=item.serie_id,
|
item.serie_id,
|
||||||
serie_name=item.serie_name,
|
item.episode.season,
|
||||||
season=item.episode.season,
|
item.episode.episode,
|
||||||
episode=item.episode.episode,
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Execute download via anime service
|
# Execute download via anime service
|
||||||
@ -809,7 +906,8 @@ class DownloadService:
|
|||||||
# - download started/progress/completed/failed events
|
# - download started/progress/completed/failed events
|
||||||
# - All updates forwarded to ProgressService
|
# - All updates forwarded to ProgressService
|
||||||
# - ProgressService broadcasts to WebSocket clients
|
# - ProgressService broadcasts to WebSocket clients
|
||||||
# Use serie_folder for filesystem operations and serie_id (key) for identification
|
# Use serie_folder for filesystem operations
|
||||||
|
# and serie_id (key) for identification
|
||||||
if not item.serie_folder:
|
if not item.serie_folder:
|
||||||
raise DownloadServiceError(
|
raise DownloadServiceError(
|
||||||
f"Missing serie_folder for download item {item.id}. "
|
f"Missing serie_folder for download item {item.id}. "
|
||||||
@ -835,8 +933,18 @@ class DownloadService:
|
|||||||
|
|
||||||
self._completed_items.append(item)
|
self._completed_items.append(item)
|
||||||
|
|
||||||
|
# Delete completed item from database (status is in-memory)
|
||||||
|
await self._delete_from_database(item.id)
|
||||||
|
|
||||||
|
# Remove episode from missing episodes list in database
|
||||||
|
await self._remove_episode_from_missing_list(
|
||||||
|
series_key=item.serie_id,
|
||||||
|
season=item.episode.season,
|
||||||
|
episode=item.episode.episode,
|
||||||
|
)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Download completed successfully", item_id=item.id
|
"Download completed successfully: item_id=%s", item.id
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
raise AnimeServiceError("Download returned False")
|
raise AnimeServiceError("Download returned False")
|
||||||
@ -844,15 +952,36 @@ class DownloadService:
|
|||||||
except asyncio.CancelledError:
|
except asyncio.CancelledError:
|
||||||
# Handle task cancellation during shutdown
|
# Handle task cancellation during shutdown
|
||||||
logger.info(
|
logger.info(
|
||||||
"Download cancelled during shutdown",
|
"Download task cancelled: item_id=%s",
|
||||||
item_id=item.id,
|
item.id,
|
||||||
)
|
)
|
||||||
item.status = DownloadStatus.CANCELLED
|
item.status = DownloadStatus.CANCELLED
|
||||||
item.completed_at = datetime.now(timezone.utc)
|
item.completed_at = datetime.now(timezone.utc)
|
||||||
|
# Delete cancelled item from database
|
||||||
|
await self._delete_from_database(item.id)
|
||||||
# Return item to pending queue if not shutting down
|
# Return item to pending queue if not shutting down
|
||||||
if not self._is_shutting_down:
|
if not self._is_shutting_down:
|
||||||
self._add_to_pending_queue(item, front=True)
|
self._add_to_pending_queue(item, front=True)
|
||||||
|
# Re-save to database as pending
|
||||||
|
await self._save_to_database(item)
|
||||||
raise # Re-raise to properly cancel the task
|
raise # Re-raise to properly cancel the task
|
||||||
|
|
||||||
|
except InterruptedError:
|
||||||
|
# Handle download cancellation from provider
|
||||||
|
logger.info(
|
||||||
|
"Download interrupted/cancelled: item_id=%s",
|
||||||
|
item.id,
|
||||||
|
)
|
||||||
|
item.status = DownloadStatus.CANCELLED
|
||||||
|
item.completed_at = datetime.now(timezone.utc)
|
||||||
|
# Delete cancelled item from database
|
||||||
|
await self._delete_from_database(item.id)
|
||||||
|
# Return item to pending queue if not shutting down
|
||||||
|
if not self._is_shutting_down:
|
||||||
|
self._add_to_pending_queue(item, front=True)
|
||||||
|
# Re-save to database as pending
|
||||||
|
await self._save_to_database(item)
|
||||||
|
# Don't re-raise - this is handled gracefully
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
# Handle failure
|
# Handle failure
|
||||||
@ -861,11 +990,14 @@ class DownloadService:
|
|||||||
item.error = str(e)
|
item.error = str(e)
|
||||||
self._failed_items.append(item)
|
self._failed_items.append(item)
|
||||||
|
|
||||||
|
# Set error in database
|
||||||
|
await self._set_error_in_database(item.id, str(e))
|
||||||
|
|
||||||
logger.error(
|
logger.error(
|
||||||
"Download failed",
|
"Download failed: item_id=%s, error=%s, retry_count=%d",
|
||||||
item_id=item.id,
|
item.id,
|
||||||
error=str(e),
|
str(e),
|
||||||
retry_count=item.retry_count,
|
item.retry_count,
|
||||||
)
|
)
|
||||||
# Note: Failure is already broadcast by AnimeService
|
# Note: Failure is already broadcast by AnimeService
|
||||||
# via ProgressService when SeriesApp fires failed event
|
# via ProgressService when SeriesApp fires failed event
|
||||||
@ -874,44 +1006,8 @@ class DownloadService:
|
|||||||
# Remove from active downloads
|
# Remove from active downloads
|
||||||
if self._active_download and self._active_download.id == item.id:
|
if self._active_download and self._active_download.id == item.id:
|
||||||
self._active_download = None
|
self._active_download = None
|
||||||
|
|
||||||
self._save_queue()
|
|
||||||
|
|
||||||
async def start(self) -> None:
|
|
||||||
"""Initialize the download queue service (compatibility method).
|
|
||||||
|
|
||||||
Note: Downloads are started manually via start_next_download().
|
|
||||||
"""
|
|
||||||
logger.info("Download queue service initialized")
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
|
||||||
"""Stop the download queue service and cancel active downloads.
|
|
||||||
|
|
||||||
Cancels any active download and shuts down the thread pool immediately.
|
|
||||||
"""
|
|
||||||
logger.info("Stopping download queue service...")
|
|
||||||
|
|
||||||
# Set shutdown flag
|
|
||||||
self._is_shutting_down = True
|
|
||||||
self._is_stopped = True
|
|
||||||
|
|
||||||
# Cancel active download task if running
|
|
||||||
if self._active_download_task and not self._active_download_task.done():
|
|
||||||
logger.info("Cancelling active download task...")
|
|
||||||
self._active_download_task.cancel()
|
|
||||||
try:
|
|
||||||
await self._active_download_task
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
logger.info("Active download task cancelled")
|
|
||||||
|
|
||||||
# Save final state
|
|
||||||
self._save_queue()
|
|
||||||
|
|
||||||
# Shutdown executor immediately, don't wait for tasks
|
|
||||||
logger.info("Shutting down thread pool executor...")
|
|
||||||
self._executor.shutdown(wait=False, cancel_futures=True)
|
|
||||||
|
|
||||||
logger.info("Download queue service stopped")
|
|
||||||
|
|
||||||
|
|
||||||
# Singleton instance
|
# Singleton instance
|
||||||
|
|||||||
@ -133,6 +133,30 @@ class ProgressServiceError(Exception):
|
|||||||
"""Service-level exception for progress operations."""
|
"""Service-level exception for progress operations."""
|
||||||
|
|
||||||
|
|
||||||
|
# Mapping from ProgressType to WebSocket room names
|
||||||
|
# This ensures compatibility with the valid rooms defined in the WebSocket API:
|
||||||
|
# "downloads", "queue", "scan", "system", "errors"
|
||||||
|
_PROGRESS_TYPE_TO_ROOM: Dict[ProgressType, str] = {
|
||||||
|
ProgressType.DOWNLOAD: "downloads",
|
||||||
|
ProgressType.SCAN: "scan",
|
||||||
|
ProgressType.QUEUE: "queue",
|
||||||
|
ProgressType.SYSTEM: "system",
|
||||||
|
ProgressType.ERROR: "errors",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def _get_room_for_progress_type(progress_type: ProgressType) -> str:
|
||||||
|
"""Get the WebSocket room name for a progress type.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
progress_type: The type of progress update
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The WebSocket room name to broadcast to
|
||||||
|
"""
|
||||||
|
return _PROGRESS_TYPE_TO_ROOM.get(progress_type, "system")
|
||||||
|
|
||||||
|
|
||||||
class ProgressService:
|
class ProgressService:
|
||||||
"""Manages real-time progress updates and broadcasting.
|
"""Manages real-time progress updates and broadcasting.
|
||||||
|
|
||||||
@ -293,7 +317,7 @@ class ProgressService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit event to subscribers
|
# Emit event to subscribers
|
||||||
room = f"{progress_type.value}_progress"
|
room = _get_room_for_progress_type(progress_type)
|
||||||
event = ProgressEvent(
|
event = ProgressEvent(
|
||||||
event_type=f"{progress_type.value}_progress",
|
event_type=f"{progress_type.value}_progress",
|
||||||
progress_id=progress_id,
|
progress_id=progress_id,
|
||||||
@ -370,7 +394,7 @@ class ProgressService:
|
|||||||
should_broadcast = force_broadcast or percent_change >= 1.0
|
should_broadcast = force_broadcast or percent_change >= 1.0
|
||||||
|
|
||||||
if should_broadcast:
|
if should_broadcast:
|
||||||
room = f"{update.type.value}_progress"
|
room = _get_room_for_progress_type(update.type)
|
||||||
event = ProgressEvent(
|
event = ProgressEvent(
|
||||||
event_type=f"{update.type.value}_progress",
|
event_type=f"{update.type.value}_progress",
|
||||||
progress_id=progress_id,
|
progress_id=progress_id,
|
||||||
@ -427,7 +451,7 @@ class ProgressService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit completion event
|
# Emit completion event
|
||||||
room = f"{update.type.value}_progress"
|
room = _get_room_for_progress_type(update.type)
|
||||||
event = ProgressEvent(
|
event = ProgressEvent(
|
||||||
event_type=f"{update.type.value}_progress",
|
event_type=f"{update.type.value}_progress",
|
||||||
progress_id=progress_id,
|
progress_id=progress_id,
|
||||||
@ -483,7 +507,7 @@ class ProgressService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit failure event
|
# Emit failure event
|
||||||
room = f"{update.type.value}_progress"
|
room = _get_room_for_progress_type(update.type)
|
||||||
event = ProgressEvent(
|
event = ProgressEvent(
|
||||||
event_type=f"{update.type.value}_progress",
|
event_type=f"{update.type.value}_progress",
|
||||||
progress_id=progress_id,
|
progress_id=progress_id,
|
||||||
@ -533,7 +557,7 @@ class ProgressService:
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Emit cancellation event
|
# Emit cancellation event
|
||||||
room = f"{update.type.value}_progress"
|
room = _get_room_for_progress_type(update.type)
|
||||||
event = ProgressEvent(
|
event = ProgressEvent(
|
||||||
event_type=f"{update.type.value}_progress",
|
event_type=f"{update.type.value}_progress",
|
||||||
progress_id=progress_id,
|
progress_id=progress_id,
|
||||||
|
|||||||
471
src/server/services/queue_repository.py
Normal file
471
src/server/services/queue_repository.py
Normal file
@ -0,0 +1,471 @@
|
|||||||
|
"""Queue repository adapter for database-backed download queue operations.
|
||||||
|
|
||||||
|
This module provides a repository adapter that wraps the DownloadQueueService
|
||||||
|
and provides the interface needed by DownloadService for queue persistence.
|
||||||
|
|
||||||
|
The repository pattern abstracts the database operations from the business
|
||||||
|
logic, allowing the DownloadService to work with domain models (DownloadItem)
|
||||||
|
while the repository handles conversion to/from database models.
|
||||||
|
|
||||||
|
Transaction Support:
|
||||||
|
Compound operations (save_item, clear_all) are wrapped in atomic()
|
||||||
|
context managers to ensure all-or-nothing behavior. If any part of
|
||||||
|
a compound operation fails, all changes are rolled back.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Callable, List, Optional
|
||||||
|
|
||||||
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
|
from src.server.database.models import DownloadQueueItem as DBDownloadQueueItem
|
||||||
|
from src.server.database.service import (
|
||||||
|
AnimeSeriesService,
|
||||||
|
DownloadQueueService,
|
||||||
|
EpisodeService,
|
||||||
|
)
|
||||||
|
from src.server.database.transaction import atomic
|
||||||
|
from src.server.models.download import (
|
||||||
|
DownloadItem,
|
||||||
|
DownloadPriority,
|
||||||
|
DownloadStatus,
|
||||||
|
EpisodeIdentifier,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class QueueRepositoryError(Exception):
|
||||||
|
"""Repository-level exception for queue operations."""
|
||||||
|
|
||||||
|
|
||||||
|
class QueueRepository:
|
||||||
|
"""Repository adapter for database-backed download queue operations.
|
||||||
|
|
||||||
|
Provides clean interface for queue operations while handling
|
||||||
|
model conversion between Pydantic (DownloadItem) and SQLAlchemy
|
||||||
|
(DownloadQueueItem) models.
|
||||||
|
|
||||||
|
Note: The database model (DownloadQueueItem) is simplified and only
|
||||||
|
stores episode_id as a foreign key. Status, priority, progress, and
|
||||||
|
retry_count are managed in-memory by the DownloadService.
|
||||||
|
|
||||||
|
Transaction Support:
|
||||||
|
All compound operations are wrapped in atomic() transactions.
|
||||||
|
This ensures data consistency even if operations fail mid-way.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_db_session_factory: Factory function to create database sessions
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
db_session_factory: Callable[[], AsyncSession],
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the queue repository.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db_session_factory: Factory function that returns AsyncSession
|
||||||
|
"""
|
||||||
|
self._db_session_factory = db_session_factory
|
||||||
|
logger.info("QueueRepository initialized")
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# Model Conversion Methods
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
def _from_db_model(
|
||||||
|
self,
|
||||||
|
db_item: DBDownloadQueueItem,
|
||||||
|
item_id: Optional[str] = None,
|
||||||
|
) -> DownloadItem:
|
||||||
|
"""Convert database model to DownloadItem.
|
||||||
|
|
||||||
|
Note: Since the database model is simplified, status, priority,
|
||||||
|
progress, and retry_count default to initial values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db_item: SQLAlchemy download queue item
|
||||||
|
item_id: Optional override for item ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Pydantic download item with default status/priority
|
||||||
|
"""
|
||||||
|
# Get episode info from the related Episode object
|
||||||
|
episode = db_item.episode
|
||||||
|
series = db_item.series
|
||||||
|
|
||||||
|
episode_identifier = EpisodeIdentifier(
|
||||||
|
season=episode.season if episode else 1,
|
||||||
|
episode=episode.episode_number if episode else 1,
|
||||||
|
title=episode.title if episode else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
return DownloadItem(
|
||||||
|
id=item_id or str(db_item.id),
|
||||||
|
serie_id=series.key if series else "",
|
||||||
|
serie_folder=series.folder if series else "",
|
||||||
|
serie_name=series.name if series else "",
|
||||||
|
episode=episode_identifier,
|
||||||
|
status=DownloadStatus.PENDING, # Default - managed in-memory
|
||||||
|
priority=DownloadPriority.NORMAL, # Default - managed in-memory
|
||||||
|
added_at=db_item.created_at or datetime.now(timezone.utc),
|
||||||
|
started_at=db_item.started_at,
|
||||||
|
completed_at=db_item.completed_at,
|
||||||
|
progress=None, # Managed in-memory
|
||||||
|
error=db_item.error_message,
|
||||||
|
retry_count=0, # Managed in-memory
|
||||||
|
source_url=db_item.download_url,
|
||||||
|
)
|
||||||
|
|
||||||
|
# =========================================================================
|
||||||
|
# CRUD Operations
|
||||||
|
# =========================================================================
|
||||||
|
|
||||||
|
async def save_item(
|
||||||
|
self,
|
||||||
|
item: DownloadItem,
|
||||||
|
db: Optional[AsyncSession] = None,
|
||||||
|
) -> DownloadItem:
|
||||||
|
"""Save a download item to the database atomically.
|
||||||
|
|
||||||
|
Creates a new record if the item doesn't exist in the database.
|
||||||
|
This compound operation (series lookup/create, episode lookup/create,
|
||||||
|
queue item create) is wrapped in a transaction for atomicity.
|
||||||
|
|
||||||
|
Note: Status, priority, progress, and retry_count are NOT persisted.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item: Download item to save
|
||||||
|
db: Optional existing database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Saved download item with database ID
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
QueueRepositoryError: If save operation fails
|
||||||
|
"""
|
||||||
|
session = db or self._db_session_factory()
|
||||||
|
manage_session = db is None
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with atomic(session):
|
||||||
|
# Find series by key
|
||||||
|
series = await AnimeSeriesService.get_by_key(session, item.serie_id)
|
||||||
|
|
||||||
|
if not series:
|
||||||
|
# Create series if it doesn't exist
|
||||||
|
# Use a placeholder site URL - will be updated later when actual URL is known
|
||||||
|
site_url = getattr(item, 'serie_site', None) or f"https://aniworld.to/anime/{item.serie_id}"
|
||||||
|
series = await AnimeSeriesService.create(
|
||||||
|
db=session,
|
||||||
|
key=item.serie_id,
|
||||||
|
name=item.serie_name,
|
||||||
|
site=site_url,
|
||||||
|
folder=item.serie_folder,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Created new series for queue item: key=%s, name=%s",
|
||||||
|
item.serie_id,
|
||||||
|
item.serie_name,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Find or create episode
|
||||||
|
episode = await EpisodeService.get_by_episode(
|
||||||
|
session,
|
||||||
|
series.id,
|
||||||
|
item.episode.season,
|
||||||
|
item.episode.episode,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not episode:
|
||||||
|
# Create episode if it doesn't exist
|
||||||
|
episode = await EpisodeService.create(
|
||||||
|
db=session,
|
||||||
|
series_id=series.id,
|
||||||
|
season=item.episode.season,
|
||||||
|
episode_number=item.episode.episode,
|
||||||
|
title=item.episode.title,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Created new episode for queue item: S%02dE%02d",
|
||||||
|
item.episode.season,
|
||||||
|
item.episode.episode,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create queue item
|
||||||
|
db_item = await DownloadQueueService.create(
|
||||||
|
db=session,
|
||||||
|
series_id=series.id,
|
||||||
|
episode_id=episode.id,
|
||||||
|
download_url=str(item.source_url) if item.source_url else None,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update the item ID with the database ID
|
||||||
|
item.id = str(db_item.id)
|
||||||
|
|
||||||
|
# Transaction committed by atomic() context manager
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Saved queue item to database: item_id=%s, serie_key=%s",
|
||||||
|
item.id,
|
||||||
|
item.serie_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return item
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Rollback handled by atomic() context manager
|
||||||
|
logger.error("Failed to save queue item: %s", e)
|
||||||
|
raise QueueRepositoryError(f"Failed to save item: {e}") from e
|
||||||
|
finally:
|
||||||
|
if manage_session:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def get_item(
|
||||||
|
self,
|
||||||
|
item_id: str,
|
||||||
|
db: Optional[AsyncSession] = None,
|
||||||
|
) -> Optional[DownloadItem]:
|
||||||
|
"""Get a download item by ID.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: Download item ID (database ID as string)
|
||||||
|
db: Optional existing database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Download item or None if not found
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
QueueRepositoryError: If query fails
|
||||||
|
"""
|
||||||
|
session = db or self._db_session_factory()
|
||||||
|
manage_session = db is None
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_item = await DownloadQueueService.get_by_id(
|
||||||
|
session, int(item_id)
|
||||||
|
)
|
||||||
|
|
||||||
|
if not db_item:
|
||||||
|
return None
|
||||||
|
|
||||||
|
return self._from_db_model(db_item, item_id)
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
# Invalid ID format
|
||||||
|
return None
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to get queue item: %s", e)
|
||||||
|
raise QueueRepositoryError(f"Failed to get item: {e}") from e
|
||||||
|
finally:
|
||||||
|
if manage_session:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def get_all_items(
|
||||||
|
self,
|
||||||
|
db: Optional[AsyncSession] = None,
|
||||||
|
) -> List[DownloadItem]:
|
||||||
|
"""Get all download items regardless of status.
|
||||||
|
|
||||||
|
Note: All items are returned with default status (PENDING) since
|
||||||
|
status is now managed in-memory by the DownloadService.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Optional existing database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of all download items
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
QueueRepositoryError: If query fails
|
||||||
|
"""
|
||||||
|
session = db or self._db_session_factory()
|
||||||
|
manage_session = db is None
|
||||||
|
|
||||||
|
try:
|
||||||
|
db_items = await DownloadQueueService.get_all(
|
||||||
|
session, with_series=True
|
||||||
|
)
|
||||||
|
return [self._from_db_model(item) for item in db_items]
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Failed to get all items: %s", e)
|
||||||
|
raise QueueRepositoryError(f"Failed to get all items: {e}") from e
|
||||||
|
finally:
|
||||||
|
if manage_session:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def set_error(
|
||||||
|
self,
|
||||||
|
item_id: str,
|
||||||
|
error: str,
|
||||||
|
db: Optional[AsyncSession] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Set error message on a download item.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: Download item ID
|
||||||
|
error: Error message
|
||||||
|
db: Optional existing database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if update succeeded, False if item not found
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
QueueRepositoryError: If update fails
|
||||||
|
"""
|
||||||
|
session = db or self._db_session_factory()
|
||||||
|
manage_session = db is None
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await DownloadQueueService.set_error(
|
||||||
|
session,
|
||||||
|
int(item_id),
|
||||||
|
error,
|
||||||
|
)
|
||||||
|
|
||||||
|
if manage_session:
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
success = result is not None
|
||||||
|
|
||||||
|
if success:
|
||||||
|
logger.debug(
|
||||||
|
"Set error on queue item: item_id=%s",
|
||||||
|
item_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return success
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
if manage_session:
|
||||||
|
await session.rollback()
|
||||||
|
logger.error("Failed to set error: %s", e)
|
||||||
|
raise QueueRepositoryError(f"Failed to set error: {e}") from e
|
||||||
|
finally:
|
||||||
|
if manage_session:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def delete_item(
|
||||||
|
self,
|
||||||
|
item_id: str,
|
||||||
|
db: Optional[AsyncSession] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Delete a download item from the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
item_id: Download item ID
|
||||||
|
db: Optional existing database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if item was deleted, False if not found
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
QueueRepositoryError: If delete fails
|
||||||
|
"""
|
||||||
|
session = db or self._db_session_factory()
|
||||||
|
manage_session = db is None
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = await DownloadQueueService.delete(session, int(item_id))
|
||||||
|
|
||||||
|
if manage_session:
|
||||||
|
await session.commit()
|
||||||
|
|
||||||
|
if result:
|
||||||
|
logger.debug("Deleted queue item: item_id=%s", item_id)
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except ValueError:
|
||||||
|
return False
|
||||||
|
except Exception as e:
|
||||||
|
if manage_session:
|
||||||
|
await session.rollback()
|
||||||
|
logger.error("Failed to delete item: %s", e)
|
||||||
|
raise QueueRepositoryError(f"Failed to delete item: {e}") from e
|
||||||
|
finally:
|
||||||
|
if manage_session:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
async def clear_all(
|
||||||
|
self,
|
||||||
|
db: Optional[AsyncSession] = None,
|
||||||
|
) -> int:
|
||||||
|
"""Clear all download items from the queue atomically.
|
||||||
|
|
||||||
|
This bulk delete operation is wrapped in a transaction.
|
||||||
|
Either all items are deleted or none are.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db: Optional existing database session
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Number of items cleared
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
QueueRepositoryError: If operation fails
|
||||||
|
"""
|
||||||
|
session = db or self._db_session_factory()
|
||||||
|
manage_session = db is None
|
||||||
|
|
||||||
|
try:
|
||||||
|
async with atomic(session):
|
||||||
|
# Use the bulk clear operation for efficiency and atomicity
|
||||||
|
count = await DownloadQueueService.clear_all(session)
|
||||||
|
|
||||||
|
# Transaction committed by atomic() context manager
|
||||||
|
|
||||||
|
logger.info("Cleared all items from queue: count=%d", count)
|
||||||
|
return count
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Rollback handled by atomic() context manager
|
||||||
|
logger.error("Failed to clear queue: %s", e)
|
||||||
|
raise QueueRepositoryError(f"Failed to clear queue: {e}") from e
|
||||||
|
finally:
|
||||||
|
if manage_session:
|
||||||
|
await session.close()
|
||||||
|
|
||||||
|
|
||||||
|
# Singleton instance
|
||||||
|
_queue_repository_instance: Optional[QueueRepository] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_queue_repository(
|
||||||
|
db_session_factory: Optional[Callable[[], AsyncSession]] = None,
|
||||||
|
) -> QueueRepository:
|
||||||
|
"""Get or create the QueueRepository singleton.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
db_session_factory: Optional factory function for database sessions.
|
||||||
|
If not provided, uses default from connection module.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
QueueRepository singleton instance
|
||||||
|
"""
|
||||||
|
global _queue_repository_instance
|
||||||
|
|
||||||
|
if _queue_repository_instance is None:
|
||||||
|
if db_session_factory is None:
|
||||||
|
# Use default session factory
|
||||||
|
from src.server.database.connection import get_async_session_factory
|
||||||
|
db_session_factory = get_async_session_factory
|
||||||
|
|
||||||
|
_queue_repository_instance = QueueRepository(db_session_factory)
|
||||||
|
|
||||||
|
return _queue_repository_instance
|
||||||
|
|
||||||
|
|
||||||
|
def reset_queue_repository() -> None:
|
||||||
|
"""Reset the QueueRepository singleton.
|
||||||
|
|
||||||
|
Used for testing to ensure fresh state between tests.
|
||||||
|
"""
|
||||||
|
global _queue_repository_instance
|
||||||
|
_queue_repository_instance = None
|
||||||
@ -13,20 +13,8 @@ from typing import Any, Callable, Dict, List, Optional
|
|||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
|
|
||||||
from src.core.interfaces.callbacks import (
|
|
||||||
CallbackManager,
|
|
||||||
CompletionCallback,
|
|
||||||
CompletionContext,
|
|
||||||
ErrorCallback,
|
|
||||||
ErrorContext,
|
|
||||||
OperationType,
|
|
||||||
ProgressCallback,
|
|
||||||
ProgressContext,
|
|
||||||
ProgressPhase,
|
|
||||||
)
|
|
||||||
from src.server.services.progress_service import (
|
from src.server.services.progress_service import (
|
||||||
ProgressService,
|
ProgressService,
|
||||||
ProgressStatus,
|
|
||||||
ProgressType,
|
ProgressType,
|
||||||
get_progress_service,
|
get_progress_service,
|
||||||
)
|
)
|
||||||
@ -104,173 +92,6 @@ class ScanProgress:
|
|||||||
return result
|
return result
|
||||||
|
|
||||||
|
|
||||||
class ScanServiceProgressCallback(ProgressCallback):
|
|
||||||
"""Callback implementation for forwarding scan progress to ScanService.
|
|
||||||
|
|
||||||
This callback receives progress events from SerieScanner and forwards
|
|
||||||
them to the ScanService for processing and broadcasting.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
service: "ScanService",
|
|
||||||
scan_progress: ScanProgress,
|
|
||||||
):
|
|
||||||
"""Initialize the callback.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
service: Parent ScanService instance
|
|
||||||
scan_progress: ScanProgress to update
|
|
||||||
"""
|
|
||||||
self._service = service
|
|
||||||
self._scan_progress = scan_progress
|
|
||||||
|
|
||||||
def on_progress(self, context: ProgressContext) -> None:
|
|
||||||
"""Handle progress update from SerieScanner.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
context: Progress context with key and folder information
|
|
||||||
"""
|
|
||||||
self._scan_progress.current = context.current
|
|
||||||
self._scan_progress.total = context.total
|
|
||||||
self._scan_progress.percentage = context.percentage
|
|
||||||
self._scan_progress.message = context.message
|
|
||||||
self._scan_progress.key = context.key
|
|
||||||
self._scan_progress.folder = context.folder
|
|
||||||
self._scan_progress.updated_at = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
if context.phase == ProgressPhase.STARTING:
|
|
||||||
self._scan_progress.status = "started"
|
|
||||||
elif context.phase == ProgressPhase.IN_PROGRESS:
|
|
||||||
self._scan_progress.status = "in_progress"
|
|
||||||
elif context.phase == ProgressPhase.COMPLETED:
|
|
||||||
self._scan_progress.status = "completed"
|
|
||||||
elif context.phase == ProgressPhase.FAILED:
|
|
||||||
self._scan_progress.status = "failed"
|
|
||||||
|
|
||||||
# Forward to service for broadcasting
|
|
||||||
# Use run_coroutine_threadsafe if event loop is available
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
asyncio.run_coroutine_threadsafe(
|
|
||||||
self._service._handle_progress_update(self._scan_progress),
|
|
||||||
loop
|
|
||||||
)
|
|
||||||
except RuntimeError:
|
|
||||||
# No running event loop - likely in test or sync context
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ScanServiceErrorCallback(ErrorCallback):
|
|
||||||
"""Callback implementation for handling scan errors.
|
|
||||||
|
|
||||||
This callback receives error events from SerieScanner and forwards
|
|
||||||
them to the ScanService for processing and broadcasting.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
service: "ScanService",
|
|
||||||
scan_progress: ScanProgress,
|
|
||||||
):
|
|
||||||
"""Initialize the callback.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
service: Parent ScanService instance
|
|
||||||
scan_progress: ScanProgress to update
|
|
||||||
"""
|
|
||||||
self._service = service
|
|
||||||
self._scan_progress = scan_progress
|
|
||||||
|
|
||||||
def on_error(self, context: ErrorContext) -> None:
|
|
||||||
"""Handle error from SerieScanner.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
context: Error context with key and folder information
|
|
||||||
"""
|
|
||||||
error_msg = context.message
|
|
||||||
if context.folder:
|
|
||||||
error_msg = f"[{context.folder}] {error_msg}"
|
|
||||||
|
|
||||||
self._scan_progress.errors.append(error_msg)
|
|
||||||
self._scan_progress.updated_at = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
logger.warning(
|
|
||||||
"Scan error",
|
|
||||||
key=context.key,
|
|
||||||
folder=context.folder,
|
|
||||||
error=str(context.error),
|
|
||||||
recoverable=context.recoverable,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Forward to service for broadcasting
|
|
||||||
# Use run_coroutine_threadsafe if event loop is available
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
asyncio.run_coroutine_threadsafe(
|
|
||||||
self._service._handle_scan_error(
|
|
||||||
self._scan_progress,
|
|
||||||
context,
|
|
||||||
),
|
|
||||||
loop
|
|
||||||
)
|
|
||||||
except RuntimeError:
|
|
||||||
# No running event loop - likely in test or sync context
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ScanServiceCompletionCallback(CompletionCallback):
|
|
||||||
"""Callback implementation for handling scan completion.
|
|
||||||
|
|
||||||
This callback receives completion events from SerieScanner and forwards
|
|
||||||
them to the ScanService for processing and broadcasting.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
service: "ScanService",
|
|
||||||
scan_progress: ScanProgress,
|
|
||||||
):
|
|
||||||
"""Initialize the callback.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
service: Parent ScanService instance
|
|
||||||
scan_progress: ScanProgress to update
|
|
||||||
"""
|
|
||||||
self._service = service
|
|
||||||
self._scan_progress = scan_progress
|
|
||||||
|
|
||||||
def on_completion(self, context: CompletionContext) -> None:
|
|
||||||
"""Handle completion from SerieScanner.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
context: Completion context with statistics
|
|
||||||
"""
|
|
||||||
self._scan_progress.status = "completed" if context.success else "failed"
|
|
||||||
self._scan_progress.message = context.message
|
|
||||||
self._scan_progress.updated_at = datetime.now(timezone.utc)
|
|
||||||
|
|
||||||
if context.statistics:
|
|
||||||
self._scan_progress.series_found = context.statistics.get(
|
|
||||||
"series_found", 0
|
|
||||||
)
|
|
||||||
|
|
||||||
# Forward to service for broadcasting
|
|
||||||
# Use run_coroutine_threadsafe if event loop is available
|
|
||||||
try:
|
|
||||||
loop = asyncio.get_running_loop()
|
|
||||||
asyncio.run_coroutine_threadsafe(
|
|
||||||
self._service._handle_scan_completion(
|
|
||||||
self._scan_progress,
|
|
||||||
context,
|
|
||||||
),
|
|
||||||
loop
|
|
||||||
)
|
|
||||||
except RuntimeError:
|
|
||||||
# No running event loop - likely in test or sync context
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ScanService:
|
class ScanService:
|
||||||
"""Manages anime library scan operations.
|
"""Manages anime library scan operations.
|
||||||
|
|
||||||
@ -376,13 +197,13 @@ class ScanService:
|
|||||||
|
|
||||||
async def start_scan(
|
async def start_scan(
|
||||||
self,
|
self,
|
||||||
scanner_factory: Callable[..., Any],
|
scanner: Any, # SerieScanner instance
|
||||||
) -> str:
|
) -> str:
|
||||||
"""Start a new library scan.
|
"""Start a new library scan.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
scanner_factory: Factory function that creates a SerieScanner.
|
scanner: SerieScanner instance to use for scanning.
|
||||||
The factory should accept a callback_manager parameter.
|
The service will subscribe to its events.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Scan ID for tracking
|
Scan ID for tracking
|
||||||
@ -415,7 +236,7 @@ class ScanService:
|
|||||||
message="Initializing scan...",
|
message="Initializing scan...",
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.error("Failed to start progress tracking", error=str(e))
|
logger.error("Failed to start progress tracking: %s", e)
|
||||||
|
|
||||||
# Emit scan started event
|
# Emit scan started event
|
||||||
await self._emit_scan_event({
|
await self._emit_scan_event({
|
||||||
@ -423,42 +244,82 @@ class ScanService:
|
|||||||
"scan_id": scan_id,
|
"scan_id": scan_id,
|
||||||
"message": "Library scan started",
|
"message": "Library scan started",
|
||||||
})
|
})
|
||||||
|
|
||||||
|
# Create event handlers for the scanner
|
||||||
|
def on_progress_handler(progress_data: Dict[str, Any]) -> None:
|
||||||
|
"""Handle progress events from scanner."""
|
||||||
|
scan_progress.current = progress_data.get('current', 0)
|
||||||
|
scan_progress.total = progress_data.get('total', 0)
|
||||||
|
scan_progress.percentage = progress_data.get('percentage', 0.0)
|
||||||
|
scan_progress.message = progress_data.get('message', '')
|
||||||
|
scan_progress.updated_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
phase = progress_data.get('phase', '')
|
||||||
|
if phase == 'STARTING':
|
||||||
|
scan_progress.status = "started"
|
||||||
|
elif phase == 'IN_PROGRESS':
|
||||||
|
scan_progress.status = "in_progress"
|
||||||
|
|
||||||
|
# Schedule the progress update on the event loop
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._handle_progress_update(scan_progress),
|
||||||
|
loop
|
||||||
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_error_handler(error_data: Dict[str, Any]) -> None:
|
||||||
|
"""Handle error events from scanner."""
|
||||||
|
error_msg = error_data.get('message', 'Unknown error')
|
||||||
|
scan_progress.errors.append(error_msg)
|
||||||
|
scan_progress.updated_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
logger.warning(
|
||||||
|
"Scan error",
|
||||||
|
error=str(error_data.get('error')),
|
||||||
|
recoverable=error_data.get('recoverable', True),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Schedule the error handling on the event loop
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._handle_scan_error(scan_progress, error_data),
|
||||||
|
loop
|
||||||
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
def on_completion_handler(completion_data: Dict[str, Any]) -> None:
|
||||||
|
"""Handle completion events from scanner."""
|
||||||
|
success = completion_data.get('success', False)
|
||||||
|
scan_progress.status = "completed" if success else "failed"
|
||||||
|
scan_progress.message = completion_data.get('message', '')
|
||||||
|
scan_progress.updated_at = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
if 'statistics' in completion_data:
|
||||||
|
stats = completion_data['statistics']
|
||||||
|
scan_progress.series_found = stats.get('series_found', 0)
|
||||||
|
|
||||||
|
# Schedule the completion handling on the event loop
|
||||||
|
try:
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
asyncio.run_coroutine_threadsafe(
|
||||||
|
self._handle_scan_completion(scan_progress, completion_data),
|
||||||
|
loop
|
||||||
|
)
|
||||||
|
except RuntimeError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
# Subscribe to scanner events
|
||||||
|
scanner.subscribe_on_progress(on_progress_handler)
|
||||||
|
scanner.subscribe_on_error(on_error_handler)
|
||||||
|
scanner.subscribe_on_completion(on_completion_handler)
|
||||||
|
|
||||||
return scan_id
|
return scan_id
|
||||||
|
|
||||||
def create_callback_manager(
|
|
||||||
self,
|
|
||||||
scan_progress: Optional[ScanProgress] = None,
|
|
||||||
) -> CallbackManager:
|
|
||||||
"""Create a callback manager for scan operations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
scan_progress: Optional scan progress to use. If None,
|
|
||||||
uses current scan progress.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
CallbackManager configured with scan callbacks
|
|
||||||
"""
|
|
||||||
progress = scan_progress or self._current_scan
|
|
||||||
if not progress:
|
|
||||||
progress = ScanProgress(str(uuid.uuid4()))
|
|
||||||
self._current_scan = progress
|
|
||||||
|
|
||||||
callback_manager = CallbackManager()
|
|
||||||
|
|
||||||
# Register callbacks
|
|
||||||
callback_manager.register_progress_callback(
|
|
||||||
ScanServiceProgressCallback(self, progress)
|
|
||||||
)
|
|
||||||
callback_manager.register_error_callback(
|
|
||||||
ScanServiceErrorCallback(self, progress)
|
|
||||||
)
|
|
||||||
callback_manager.register_completion_callback(
|
|
||||||
ScanServiceCompletionCallback(self, progress)
|
|
||||||
)
|
|
||||||
|
|
||||||
return callback_manager
|
|
||||||
|
|
||||||
async def _handle_progress_update(
|
async def _handle_progress_update(
|
||||||
self,
|
self,
|
||||||
scan_progress: ScanProgress,
|
scan_progress: ScanProgress,
|
||||||
@ -475,11 +336,9 @@ class ScanService:
|
|||||||
current=scan_progress.current,
|
current=scan_progress.current,
|
||||||
total=scan_progress.total,
|
total=scan_progress.total,
|
||||||
message=scan_progress.message,
|
message=scan_progress.message,
|
||||||
key=scan_progress.key,
|
|
||||||
folder=scan_progress.folder,
|
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.debug("Progress update skipped", error=str(e))
|
logger.debug("Progress update skipped: %s", e)
|
||||||
|
|
||||||
# Emit progress event with key as primary identifier
|
# Emit progress event with key as primary identifier
|
||||||
await self._emit_scan_event({
|
await self._emit_scan_event({
|
||||||
@ -490,36 +349,38 @@ class ScanService:
|
|||||||
async def _handle_scan_error(
|
async def _handle_scan_error(
|
||||||
self,
|
self,
|
||||||
scan_progress: ScanProgress,
|
scan_progress: ScanProgress,
|
||||||
error_context: ErrorContext,
|
error_data: Dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Handle a scan error.
|
"""Handle a scan error.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
scan_progress: Current scan progress
|
scan_progress: Current scan progress
|
||||||
error_context: Error context with key and folder metadata
|
error_data: Error data dictionary with error info
|
||||||
"""
|
"""
|
||||||
# Emit error event with key as primary identifier
|
# Emit error event with key as primary identifier
|
||||||
await self._emit_scan_event({
|
await self._emit_scan_event({
|
||||||
"type": "scan_error",
|
"type": "scan_error",
|
||||||
"scan_id": scan_progress.scan_id,
|
"scan_id": scan_progress.scan_id,
|
||||||
"key": error_context.key,
|
"error": str(error_data.get('error')),
|
||||||
"folder": error_context.folder,
|
"message": error_data.get('message', 'Unknown error'),
|
||||||
"error": str(error_context.error),
|
"recoverable": error_data.get('recoverable', True),
|
||||||
"message": error_context.message,
|
|
||||||
"recoverable": error_context.recoverable,
|
|
||||||
})
|
})
|
||||||
|
|
||||||
async def _handle_scan_completion(
|
async def _handle_scan_completion(
|
||||||
self,
|
self,
|
||||||
scan_progress: ScanProgress,
|
scan_progress: ScanProgress,
|
||||||
completion_context: CompletionContext,
|
completion_data: Dict[str, Any],
|
||||||
) -> None:
|
) -> None:
|
||||||
"""Handle scan completion.
|
"""Handle scan completion.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
scan_progress: Final scan progress
|
scan_progress: Final scan progress
|
||||||
completion_context: Completion context with statistics
|
completion_data: Completion data dictionary with statistics
|
||||||
"""
|
"""
|
||||||
|
success = completion_data.get('success', False)
|
||||||
|
message = completion_data.get('message', '')
|
||||||
|
statistics = completion_data.get('statistics', {})
|
||||||
|
|
||||||
async with self._lock:
|
async with self._lock:
|
||||||
self._is_scanning = False
|
self._is_scanning = False
|
||||||
|
|
||||||
@ -530,33 +391,33 @@ class ScanService:
|
|||||||
|
|
||||||
# Complete progress tracking
|
# Complete progress tracking
|
||||||
try:
|
try:
|
||||||
if completion_context.success:
|
if success:
|
||||||
await self._progress_service.complete_progress(
|
await self._progress_service.complete_progress(
|
||||||
progress_id=f"scan_{scan_progress.scan_id}",
|
progress_id=f"scan_{scan_progress.scan_id}",
|
||||||
message=completion_context.message,
|
message=message,
|
||||||
)
|
)
|
||||||
else:
|
else:
|
||||||
await self._progress_service.fail_progress(
|
await self._progress_service.fail_progress(
|
||||||
progress_id=f"scan_{scan_progress.scan_id}",
|
progress_id=f"scan_{scan_progress.scan_id}",
|
||||||
error_message=completion_context.message,
|
error_message=message,
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.debug("Progress completion skipped", error=str(e))
|
logger.debug("Progress completion skipped: %s", e)
|
||||||
|
|
||||||
# Emit completion event
|
# Emit completion event
|
||||||
await self._emit_scan_event({
|
await self._emit_scan_event({
|
||||||
"type": "scan_completed" if completion_context.success else "scan_failed",
|
"type": "scan_completed" if success else "scan_failed",
|
||||||
"scan_id": scan_progress.scan_id,
|
"scan_id": scan_progress.scan_id,
|
||||||
"success": completion_context.success,
|
"success": success,
|
||||||
"message": completion_context.message,
|
"message": message,
|
||||||
"statistics": completion_context.statistics,
|
"statistics": statistics,
|
||||||
"data": scan_progress.to_dict(),
|
"data": scan_progress.to_dict(),
|
||||||
})
|
})
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
"Scan completed",
|
"Scan completed",
|
||||||
scan_id=scan_progress.scan_id,
|
scan_id=scan_progress.scan_id,
|
||||||
success=completion_context.success,
|
success=success,
|
||||||
series_found=scan_progress.series_found,
|
series_found=scan_progress.series_found,
|
||||||
errors_count=len(scan_progress.errors),
|
errors_count=len(scan_progress.errors),
|
||||||
)
|
)
|
||||||
@ -598,7 +459,7 @@ class ScanService:
|
|||||||
error_message="Scan cancelled by user",
|
error_message="Scan cancelled by user",
|
||||||
)
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
logger.debug("Progress cancellation skipped", error=str(e))
|
logger.debug("Progress cancellation skipped: %s", e)
|
||||||
|
|
||||||
logger.info("Scan cancelled")
|
logger.info("Scan cancelled")
|
||||||
return True
|
return True
|
||||||
|
|||||||
@ -322,6 +322,85 @@ class ConnectionManager:
|
|||||||
connection_id=connection_id,
|
connection_id=connection_id,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
async def shutdown(self, timeout: float = 5.0) -> None:
|
||||||
|
"""Gracefully shutdown all WebSocket connections.
|
||||||
|
|
||||||
|
Broadcasts a shutdown notification to all clients, then closes
|
||||||
|
each connection with proper close codes.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeout: Maximum time (seconds) to wait for all closes to complete
|
||||||
|
"""
|
||||||
|
logger.info(
|
||||||
|
"Initiating WebSocket shutdown, connections=%d",
|
||||||
|
len(self._active_connections)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Broadcast shutdown notification to all clients
|
||||||
|
shutdown_message = {
|
||||||
|
"type": "server_shutdown",
|
||||||
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"data": {
|
||||||
|
"message": "Server is shutting down",
|
||||||
|
"reason": "graceful_shutdown",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
try:
|
||||||
|
await self.broadcast(shutdown_message)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning("Failed to broadcast shutdown message: %s", e)
|
||||||
|
|
||||||
|
# Close all connections gracefully
|
||||||
|
async with self._lock:
|
||||||
|
connection_ids = list(self._active_connections.keys())
|
||||||
|
|
||||||
|
close_tasks = []
|
||||||
|
for connection_id in connection_ids:
|
||||||
|
websocket = self._active_connections.get(connection_id)
|
||||||
|
if websocket:
|
||||||
|
close_tasks.append(
|
||||||
|
self._close_connection_gracefully(connection_id, websocket)
|
||||||
|
)
|
||||||
|
|
||||||
|
if close_tasks:
|
||||||
|
# Wait for all closes with timeout
|
||||||
|
try:
|
||||||
|
await asyncio.wait_for(
|
||||||
|
asyncio.gather(*close_tasks, return_exceptions=True),
|
||||||
|
timeout=timeout
|
||||||
|
)
|
||||||
|
except asyncio.TimeoutError:
|
||||||
|
logger.warning(
|
||||||
|
"WebSocket shutdown timed out after %.1f seconds", timeout
|
||||||
|
)
|
||||||
|
|
||||||
|
# Clear all data structures
|
||||||
|
async with self._lock:
|
||||||
|
self._active_connections.clear()
|
||||||
|
self._rooms.clear()
|
||||||
|
self._connection_metadata.clear()
|
||||||
|
|
||||||
|
logger.info("WebSocket shutdown complete")
|
||||||
|
|
||||||
|
async def _close_connection_gracefully(
|
||||||
|
self, connection_id: str, websocket: WebSocket
|
||||||
|
) -> None:
|
||||||
|
"""Close a single WebSocket connection gracefully.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_id: The connection identifier
|
||||||
|
websocket: The WebSocket connection to close
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Code 1001 = Going Away (server shutdown)
|
||||||
|
await websocket.close(code=1001, reason="Server shutdown")
|
||||||
|
logger.debug("Closed WebSocket connection: %s", connection_id)
|
||||||
|
except Exception as e:
|
||||||
|
logger.debug(
|
||||||
|
"Error closing WebSocket %s: %s", connection_id, str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class WebSocketService:
|
class WebSocketService:
|
||||||
"""High-level WebSocket service for application-wide messaging.
|
"""High-level WebSocket service for application-wide messaging.
|
||||||
@ -498,6 +577,99 @@ class WebSocketService:
|
|||||||
}
|
}
|
||||||
await self._manager.send_personal_message(message, connection_id)
|
await self._manager.send_personal_message(message, connection_id)
|
||||||
|
|
||||||
|
async def broadcast_scan_started(
|
||||||
|
self, directory: str, total_items: int = 0
|
||||||
|
) -> None:
|
||||||
|
"""Broadcast that a library scan has started.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory: The root directory path being scanned
|
||||||
|
total_items: Total number of items to scan (for progress display)
|
||||||
|
"""
|
||||||
|
message = {
|
||||||
|
"type": "scan_started",
|
||||||
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"data": {
|
||||||
|
"directory": directory,
|
||||||
|
"total_items": total_items,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await self._manager.broadcast(message)
|
||||||
|
logger.info(
|
||||||
|
"Broadcast scan_started",
|
||||||
|
directory=directory,
|
||||||
|
total_items=total_items,
|
||||||
|
)
|
||||||
|
|
||||||
|
async def broadcast_scan_progress(
|
||||||
|
self,
|
||||||
|
directories_scanned: int,
|
||||||
|
files_found: int,
|
||||||
|
current_directory: str,
|
||||||
|
total_items: int = 0,
|
||||||
|
) -> None:
|
||||||
|
"""Broadcast scan progress update to all clients.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directories_scanned: Number of directories scanned so far
|
||||||
|
files_found: Number of MP4 files found so far
|
||||||
|
current_directory: Current directory being scanned
|
||||||
|
total_items: Total number of items to scan (for progress display)
|
||||||
|
"""
|
||||||
|
message = {
|
||||||
|
"type": "scan_progress",
|
||||||
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"data": {
|
||||||
|
"directories_scanned": directories_scanned,
|
||||||
|
"files_found": files_found,
|
||||||
|
"current_directory": current_directory,
|
||||||
|
"total_items": total_items,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await self._manager.broadcast(message)
|
||||||
|
|
||||||
|
async def broadcast_scan_completed(
|
||||||
|
self,
|
||||||
|
total_directories: int,
|
||||||
|
total_files: int,
|
||||||
|
elapsed_seconds: float,
|
||||||
|
) -> None:
|
||||||
|
"""Broadcast scan completion to all clients.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
total_directories: Total number of directories scanned
|
||||||
|
total_files: Total number of MP4 files found
|
||||||
|
elapsed_seconds: Time taken for the scan in seconds
|
||||||
|
"""
|
||||||
|
message = {
|
||||||
|
"type": "scan_completed",
|
||||||
|
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||||
|
"data": {
|
||||||
|
"total_directories": total_directories,
|
||||||
|
"total_files": total_files,
|
||||||
|
"elapsed_seconds": round(elapsed_seconds, 2),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
await self._manager.broadcast(message)
|
||||||
|
logger.info(
|
||||||
|
"Broadcast scan_completed",
|
||||||
|
total_directories=total_directories,
|
||||||
|
total_files=total_files,
|
||||||
|
elapsed_seconds=round(elapsed_seconds, 2),
|
||||||
|
)
|
||||||
|
|
||||||
|
async def shutdown(self, timeout: float = 5.0) -> None:
|
||||||
|
"""Gracefully shutdown the WebSocket service.
|
||||||
|
|
||||||
|
Broadcasts shutdown notification and closes all connections.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
timeout: Maximum time (seconds) to wait for shutdown
|
||||||
|
"""
|
||||||
|
logger.info("Shutting down WebSocket service...")
|
||||||
|
await self._manager.shutdown(timeout=timeout)
|
||||||
|
logger.info("WebSocket service shutdown complete")
|
||||||
|
|
||||||
|
|
||||||
# Singleton instance for application-wide access
|
# Singleton instance for application-wide access
|
||||||
_websocket_service: Optional[WebSocketService] = None
|
_websocket_service: Optional[WebSocketService] = None
|
||||||
|
|||||||
@ -65,6 +65,10 @@ def get_series_app() -> SeriesApp:
|
|||||||
Raises:
|
Raises:
|
||||||
HTTPException: If SeriesApp is not initialized or anime directory
|
HTTPException: If SeriesApp is not initialized or anime directory
|
||||||
is not configured
|
is not configured
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This creates a SeriesApp without database support. For database-
|
||||||
|
backed storage, use get_series_app_with_db() instead.
|
||||||
"""
|
"""
|
||||||
global _series_app
|
global _series_app
|
||||||
|
|
||||||
@ -103,7 +107,6 @@ def reset_series_app() -> None:
|
|||||||
_series_app = None
|
_series_app = None
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
async def get_database_session() -> AsyncGenerator:
|
async def get_database_session() -> AsyncGenerator:
|
||||||
"""
|
"""
|
||||||
Dependency to get database session.
|
Dependency to get database session.
|
||||||
@ -134,6 +137,38 @@ async def get_database_session() -> AsyncGenerator:
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
async def get_optional_database_session() -> AsyncGenerator:
|
||||||
|
"""
|
||||||
|
Dependency to get optional database session.
|
||||||
|
|
||||||
|
Unlike get_database_session(), this returns None if the database
|
||||||
|
is not available, allowing endpoints to fall back to other storage.
|
||||||
|
|
||||||
|
Yields:
|
||||||
|
AsyncSession or None: Database session if available, None otherwise
|
||||||
|
|
||||||
|
Example:
|
||||||
|
@app.post("/anime/add")
|
||||||
|
async def add_anime(
|
||||||
|
db: Optional[AsyncSession] = Depends(get_optional_database_session)
|
||||||
|
):
|
||||||
|
if db:
|
||||||
|
# Use database
|
||||||
|
await AnimeSeriesService.create(db, ...)
|
||||||
|
else:
|
||||||
|
# Fall back to file-based storage
|
||||||
|
series_app.list.add(serie)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
from src.server.database import get_db_session
|
||||||
|
|
||||||
|
async with get_db_session() as session:
|
||||||
|
yield session
|
||||||
|
except (ImportError, RuntimeError):
|
||||||
|
# Database not available - yield None
|
||||||
|
yield None
|
||||||
|
|
||||||
|
|
||||||
def get_current_user(
|
def get_current_user(
|
||||||
credentials: Optional[HTTPAuthorizationCredentials] = Depends(
|
credentials: Optional[HTTPAuthorizationCredentials] = Depends(
|
||||||
http_bearer_security
|
http_bearer_security
|
||||||
|
|||||||
180
src/server/utils/filesystem.py
Normal file
180
src/server/utils/filesystem.py
Normal file
@ -0,0 +1,180 @@
|
|||||||
|
"""Filesystem utilities for safe file and folder operations.
|
||||||
|
|
||||||
|
This module provides utility functions for safely handling filesystem
|
||||||
|
operations, including sanitizing folder names and path validation.
|
||||||
|
|
||||||
|
Security:
|
||||||
|
- All functions sanitize inputs to prevent path traversal attacks
|
||||||
|
- Invalid filesystem characters are removed or replaced
|
||||||
|
- Unicode characters are preserved for international titles
|
||||||
|
"""
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import unicodedata
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
# Characters that are invalid in filesystem paths across platforms
|
||||||
|
# Windows: < > : " / \ | ? *
|
||||||
|
# Linux/Mac: / and null byte
|
||||||
|
INVALID_PATH_CHARS = '<>:"/\\|?*\x00'
|
||||||
|
|
||||||
|
# Additional characters to remove for cleaner folder names
|
||||||
|
EXTRA_CLEANUP_CHARS = '\r\n\t'
|
||||||
|
|
||||||
|
# Maximum folder name length (conservative for cross-platform compatibility)
|
||||||
|
MAX_FOLDER_NAME_LENGTH = 200
|
||||||
|
|
||||||
|
|
||||||
|
def sanitize_folder_name(
|
||||||
|
name: str,
|
||||||
|
replacement: str = "",
|
||||||
|
max_length: Optional[int] = None,
|
||||||
|
) -> str:
|
||||||
|
"""Sanitize a string for use as a filesystem folder name.
|
||||||
|
|
||||||
|
Removes or replaces characters that are invalid for filesystems while
|
||||||
|
preserving Unicode characters (for Japanese/Chinese titles, etc.).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: The string to sanitize (e.g., anime display name)
|
||||||
|
replacement: Character to replace invalid chars with (default: "")
|
||||||
|
max_length: Maximum length for the result (default: MAX_FOLDER_NAME_LENGTH)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: A filesystem-safe folder name
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If name is None, empty, or results in empty string
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
>>> sanitize_folder_name("Attack on Titan: Final Season")
|
||||||
|
'Attack on Titan Final Season'
|
||||||
|
>>> sanitize_folder_name("What If...?")
|
||||||
|
'What If...'
|
||||||
|
>>> sanitize_folder_name("Re:Zero")
|
||||||
|
'ReZero'
|
||||||
|
>>> sanitize_folder_name("日本語タイトル")
|
||||||
|
'日本語タイトル'
|
||||||
|
"""
|
||||||
|
if name is None:
|
||||||
|
raise ValueError("Folder name cannot be None")
|
||||||
|
|
||||||
|
# Strip leading/trailing whitespace
|
||||||
|
name = name.strip()
|
||||||
|
|
||||||
|
if not name:
|
||||||
|
raise ValueError("Folder name cannot be empty")
|
||||||
|
|
||||||
|
max_len = max_length or MAX_FOLDER_NAME_LENGTH
|
||||||
|
|
||||||
|
# Normalize Unicode characters (NFC form for consistency)
|
||||||
|
name = unicodedata.normalize('NFC', name)
|
||||||
|
|
||||||
|
# Remove invalid filesystem characters
|
||||||
|
for char in INVALID_PATH_CHARS:
|
||||||
|
name = name.replace(char, replacement)
|
||||||
|
|
||||||
|
# Remove extra cleanup characters
|
||||||
|
for char in EXTRA_CLEANUP_CHARS:
|
||||||
|
name = name.replace(char, replacement)
|
||||||
|
|
||||||
|
# Remove control characters but preserve Unicode
|
||||||
|
name = ''.join(
|
||||||
|
char for char in name
|
||||||
|
if not unicodedata.category(char).startswith('C')
|
||||||
|
or char == ' ' # Preserve spaces
|
||||||
|
)
|
||||||
|
|
||||||
|
# Collapse multiple consecutive spaces
|
||||||
|
name = re.sub(r' +', ' ', name)
|
||||||
|
|
||||||
|
# Remove leading/trailing dots and whitespace
|
||||||
|
# (dots at start can make folders hidden on Unix)
|
||||||
|
name = name.strip('. ')
|
||||||
|
|
||||||
|
# Handle edge case: all characters were invalid
|
||||||
|
if not name:
|
||||||
|
raise ValueError(
|
||||||
|
"Folder name contains only invalid characters"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Truncate to max length while avoiding breaking in middle of word
|
||||||
|
if len(name) > max_len:
|
||||||
|
# Try to truncate at a word boundary
|
||||||
|
truncated = name[:max_len]
|
||||||
|
last_space = truncated.rfind(' ')
|
||||||
|
if last_space > max_len // 2: # Only if we don't lose too much
|
||||||
|
truncated = truncated[:last_space]
|
||||||
|
name = truncated.rstrip()
|
||||||
|
|
||||||
|
return name
|
||||||
|
|
||||||
|
|
||||||
|
def is_safe_path(base_path: str, target_path: str) -> bool:
|
||||||
|
"""Check if target_path is safely within base_path.
|
||||||
|
|
||||||
|
Prevents path traversal attacks by ensuring the target path
|
||||||
|
is actually within the base path after resolution.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_path: The base directory that should contain the target
|
||||||
|
target_path: The path to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if target_path is safely within base_path
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> is_safe_path("/anime", "/anime/Attack on Titan")
|
||||||
|
True
|
||||||
|
>>> is_safe_path("/anime", "/anime/../etc/passwd")
|
||||||
|
False
|
||||||
|
"""
|
||||||
|
# Resolve to absolute paths
|
||||||
|
base_resolved = os.path.abspath(base_path)
|
||||||
|
target_resolved = os.path.abspath(target_path)
|
||||||
|
|
||||||
|
# Check that target starts with base (with trailing separator)
|
||||||
|
base_with_sep = base_resolved + os.sep
|
||||||
|
return (
|
||||||
|
target_resolved == base_resolved or
|
||||||
|
target_resolved.startswith(base_with_sep)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def create_safe_folder(
|
||||||
|
base_path: str,
|
||||||
|
folder_name: str,
|
||||||
|
exist_ok: bool = True,
|
||||||
|
) -> str:
|
||||||
|
"""Create a folder with a sanitized name safely within base_path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_path: Base directory to create folder within
|
||||||
|
folder_name: Unsanitized folder name
|
||||||
|
exist_ok: If True, don't raise error if folder exists
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Full path to the created folder
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If resulting path would be outside base_path
|
||||||
|
OSError: If folder creation fails
|
||||||
|
"""
|
||||||
|
# Sanitize the folder name
|
||||||
|
safe_name = sanitize_folder_name(folder_name)
|
||||||
|
|
||||||
|
# Construct full path
|
||||||
|
full_path = os.path.join(base_path, safe_name)
|
||||||
|
|
||||||
|
# Validate path safety
|
||||||
|
if not is_safe_path(base_path, full_path):
|
||||||
|
raise ValueError(
|
||||||
|
f"Folder name '{folder_name}' would create path outside "
|
||||||
|
f"base directory"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create the folder
|
||||||
|
os.makedirs(full_path, exist_ok=exist_ok)
|
||||||
|
|
||||||
|
return full_path
|
||||||
33
src/server/web/static/css/base/reset.css
Normal file
33
src/server/web/static/css/base/reset.css
Normal file
@ -0,0 +1,33 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - CSS Reset
|
||||||
|
*
|
||||||
|
* Normalize and reset default browser styles
|
||||||
|
* for consistent cross-browser rendering.
|
||||||
|
*/
|
||||||
|
|
||||||
|
* {
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
html {
|
||||||
|
font-size: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
margin: 0;
|
||||||
|
padding: 0;
|
||||||
|
font-family: var(--font-family);
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
line-height: 1.5;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
background-color: var(--color-bg-primary);
|
||||||
|
transition: background-color var(--transition-duration) var(--transition-easing),
|
||||||
|
color var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* App container */
|
||||||
|
.app-container {
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
51
src/server/web/static/css/base/typography.css
Normal file
51
src/server/web/static/css/base/typography.css
Normal file
@ -0,0 +1,51 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Typography Styles
|
||||||
|
*
|
||||||
|
* Font styles, headings, and text utilities.
|
||||||
|
*/
|
||||||
|
|
||||||
|
h1, h2, h3, h4, h5, h6 {
|
||||||
|
margin: 0;
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
h1 {
|
||||||
|
font-size: var(--font-size-large-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
h2 {
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
}
|
||||||
|
|
||||||
|
h3 {
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
}
|
||||||
|
|
||||||
|
h4 {
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
p {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
a {
|
||||||
|
color: var(--color-accent);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
|
|
||||||
|
small {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.error-message {
|
||||||
|
color: var(--color-error);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
114
src/server/web/static/css/base/variables.css
Normal file
114
src/server/web/static/css/base/variables.css
Normal file
@ -0,0 +1,114 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - CSS Variables
|
||||||
|
*
|
||||||
|
* Fluent UI Design System custom properties for colors, typography,
|
||||||
|
* spacing, borders, shadows, and transitions.
|
||||||
|
* Includes both light and dark theme definitions.
|
||||||
|
*/
|
||||||
|
|
||||||
|
:root {
|
||||||
|
/* Light theme colors */
|
||||||
|
--color-bg-primary: #ffffff;
|
||||||
|
--color-bg-secondary: #faf9f8;
|
||||||
|
--color-bg-tertiary: #f3f2f1;
|
||||||
|
--color-surface: #ffffff;
|
||||||
|
--color-surface-hover: #f3f2f1;
|
||||||
|
--color-surface-pressed: #edebe9;
|
||||||
|
--color-text-primary: #323130;
|
||||||
|
--color-text-secondary: #605e5c;
|
||||||
|
--color-text-tertiary: #a19f9d;
|
||||||
|
--color-accent: #0078d4;
|
||||||
|
--color-accent-hover: #106ebe;
|
||||||
|
--color-accent-pressed: #005a9e;
|
||||||
|
--color-success: #107c10;
|
||||||
|
--color-warning: #ff8c00;
|
||||||
|
--color-error: #d13438;
|
||||||
|
--color-border: #e1dfdd;
|
||||||
|
--color-divider: #c8c6c4;
|
||||||
|
|
||||||
|
/* Dark theme colors (stored as variables for theme switching) */
|
||||||
|
--color-bg-primary-dark: #202020;
|
||||||
|
--color-bg-secondary-dark: #2d2d30;
|
||||||
|
--color-bg-tertiary-dark: #3e3e42;
|
||||||
|
--color-surface-dark: #292929;
|
||||||
|
--color-surface-hover-dark: #3e3e42;
|
||||||
|
--color-surface-pressed-dark: #484848;
|
||||||
|
--color-text-primary-dark: #ffffff;
|
||||||
|
--color-text-secondary-dark: #cccccc;
|
||||||
|
--color-text-tertiary-dark: #969696;
|
||||||
|
--color-accent-dark: #60cdff;
|
||||||
|
--color-accent-hover-dark: #4db8e8;
|
||||||
|
--color-accent-pressed-dark: #3aa0d1;
|
||||||
|
--color-border-dark: #484644;
|
||||||
|
--color-divider-dark: #605e5c;
|
||||||
|
|
||||||
|
/* Typography */
|
||||||
|
--font-family: 'Segoe UI', 'Segoe UI Web (West European)', -apple-system, BlinkMacSystemFont, Roboto, 'Helvetica Neue', sans-serif;
|
||||||
|
--font-size-caption: 12px;
|
||||||
|
--font-size-body: 14px;
|
||||||
|
--font-size-subtitle: 16px;
|
||||||
|
--font-size-title: 20px;
|
||||||
|
--font-size-large-title: 32px;
|
||||||
|
|
||||||
|
/* Spacing */
|
||||||
|
--spacing-xs: 4px;
|
||||||
|
--spacing-sm: 8px;
|
||||||
|
--spacing-md: 12px;
|
||||||
|
--spacing-lg: 16px;
|
||||||
|
--spacing-xl: 20px;
|
||||||
|
--spacing-xxl: 24px;
|
||||||
|
|
||||||
|
/* Border radius */
|
||||||
|
--border-radius-sm: 2px;
|
||||||
|
--border-radius-md: 4px;
|
||||||
|
--border-radius-lg: 6px;
|
||||||
|
--border-radius-xl: 8px;
|
||||||
|
--border-radius: var(--border-radius-md);
|
||||||
|
|
||||||
|
/* Shadows */
|
||||||
|
--shadow-card: 0 1.6px 3.6px 0 rgba(0, 0, 0, 0.132), 0 0.3px 0.9px 0 rgba(0, 0, 0, 0.108);
|
||||||
|
--shadow-elevated: 0 6.4px 14.4px 0 rgba(0, 0, 0, 0.132), 0 1.2px 3.6px 0 rgba(0, 0, 0, 0.108);
|
||||||
|
|
||||||
|
/* Transitions */
|
||||||
|
--transition-duration: 0.15s;
|
||||||
|
--transition-easing: cubic-bezier(0.1, 0.9, 0.2, 1);
|
||||||
|
--animation-duration-fast: 0.1s;
|
||||||
|
--animation-duration-normal: 0.15s;
|
||||||
|
--animation-easing-standard: cubic-bezier(0.1, 0.9, 0.2, 1);
|
||||||
|
|
||||||
|
/* Additional color aliases */
|
||||||
|
--color-primary: var(--color-accent);
|
||||||
|
--color-primary-light: #e6f2fb;
|
||||||
|
--color-primary-dark: #005a9e;
|
||||||
|
--color-text: var(--color-text-primary);
|
||||||
|
--color-text-disabled: #a19f9d;
|
||||||
|
--color-background: var(--color-bg-primary);
|
||||||
|
--color-background-secondary: var(--color-bg-secondary);
|
||||||
|
--color-background-tertiary: var(--color-bg-tertiary);
|
||||||
|
--color-background-subtle: var(--color-bg-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark theme */
|
||||||
|
[data-theme="dark"] {
|
||||||
|
--color-bg-primary: var(--color-bg-primary-dark);
|
||||||
|
--color-bg-secondary: var(--color-bg-secondary-dark);
|
||||||
|
--color-bg-tertiary: var(--color-bg-tertiary-dark);
|
||||||
|
--color-surface: var(--color-surface-dark);
|
||||||
|
--color-surface-hover: var(--color-surface-hover-dark);
|
||||||
|
--color-surface-pressed: var(--color-surface-pressed-dark);
|
||||||
|
--color-text-primary: var(--color-text-primary-dark);
|
||||||
|
--color-text-secondary: var(--color-text-secondary-dark);
|
||||||
|
--color-text-tertiary: var(--color-text-tertiary-dark);
|
||||||
|
--color-accent: var(--color-accent-dark);
|
||||||
|
--color-accent-hover: var(--color-accent-hover-dark);
|
||||||
|
--color-accent-pressed: var(--color-accent-pressed-dark);
|
||||||
|
--color-border: var(--color-border-dark);
|
||||||
|
--color-divider: var(--color-divider-dark);
|
||||||
|
--color-text: var(--color-text-primary-dark);
|
||||||
|
--color-text-disabled: #969696;
|
||||||
|
--color-background: var(--color-bg-primary-dark);
|
||||||
|
--color-background-secondary: var(--color-bg-secondary-dark);
|
||||||
|
--color-background-tertiary: var(--color-bg-tertiary-dark);
|
||||||
|
--color-background-subtle: var(--color-bg-tertiary-dark);
|
||||||
|
--color-primary-light: #1a3a5c;
|
||||||
|
}
|
||||||
123
src/server/web/static/css/components/buttons.css
Normal file
123
src/server/web/static/css/components/buttons.css
Normal file
@ -0,0 +1,123 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Button Styles
|
||||||
|
*
|
||||||
|
* All button-related styles including variants,
|
||||||
|
* states, and sizes.
|
||||||
|
*/
|
||||||
|
|
||||||
|
.btn {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
padding: var(--spacing-sm) var(--spacing-md);
|
||||||
|
border: 1px solid transparent;
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
font-weight: 500;
|
||||||
|
text-decoration: none;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing);
|
||||||
|
background-color: transparent;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Primary button */
|
||||||
|
.btn-primary {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary:hover:not(:disabled) {
|
||||||
|
background-color: var(--color-accent-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-primary:active {
|
||||||
|
background-color: var(--color-accent-pressed);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Secondary button */
|
||||||
|
.btn-secondary {
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border-color: var(--color-border);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-secondary:hover:not(:disabled) {
|
||||||
|
background-color: var(--color-surface-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Success button */
|
||||||
|
.btn-success {
|
||||||
|
background-color: var(--color-success);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-success:hover:not(:disabled) {
|
||||||
|
background-color: #0e6b0e;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Warning button */
|
||||||
|
.btn-warning {
|
||||||
|
background-color: var(--color-warning);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-warning:hover:not(:disabled) {
|
||||||
|
background-color: #e67e00;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Danger/Error button */
|
||||||
|
.btn-danger {
|
||||||
|
background-color: var(--color-error);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.btn-danger:hover:not(:disabled) {
|
||||||
|
background-color: #b52d30;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Icon button */
|
||||||
|
.btn-icon {
|
||||||
|
padding: var(--spacing-sm);
|
||||||
|
min-width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Small button */
|
||||||
|
.btn-small {
|
||||||
|
padding: var(--spacing-xs) var(--spacing-sm);
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Extra small button */
|
||||||
|
.btn-xs {
|
||||||
|
padding: 2px 6px;
|
||||||
|
font-size: 0.75em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Filter button active state */
|
||||||
|
.series-filters .btn {
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-filters .btn[data-active="true"] {
|
||||||
|
background-color: var(--color-primary);
|
||||||
|
color: white;
|
||||||
|
border-color: var(--color-primary);
|
||||||
|
transform: scale(1.02);
|
||||||
|
box-shadow: 0 2px 8px rgba(0, 120, 212, 0.3);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-filters .btn[data-active="true"]:hover {
|
||||||
|
background-color: var(--color-primary-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark theme adjustments */
|
||||||
|
[data-theme="dark"] .series-filters .btn[data-active="true"] {
|
||||||
|
background-color: var(--color-primary);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
271
src/server/web/static/css/components/cards.css
Normal file
271
src/server/web/static/css/components/cards.css
Normal file
@ -0,0 +1,271 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Card Styles
|
||||||
|
*
|
||||||
|
* Card and panel component styles including
|
||||||
|
* series cards and stat cards.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Series Card */
|
||||||
|
.series-card {
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
box-shadow: var(--shadow-card);
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing);
|
||||||
|
position: relative;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
min-height: 120px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-card:hover {
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
transform: translateY(-1px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-card.selected {
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
background-color: var(--color-surface-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-card-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-checkbox {
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
accent-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-info h3 {
|
||||||
|
margin: 0 0 var(--spacing-xs) 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
line-height: 1.3;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-folder {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
margin-bottom: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-stats {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
margin-top: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-site {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Series Card Status Indicators */
|
||||||
|
.series-status {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--spacing-sm);
|
||||||
|
right: var(--spacing-sm);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-missing {
|
||||||
|
color: var(--color-warning);
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-complete {
|
||||||
|
color: var(--color-success);
|
||||||
|
font-size: 1.2em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Series Card States */
|
||||||
|
.series-card.has-missing {
|
||||||
|
border-left: 4px solid var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-card.complete {
|
||||||
|
border-left: 4px solid var(--color-success);
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-card.complete .series-checkbox {
|
||||||
|
opacity: 0.5;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-card.complete:not(.selected) {
|
||||||
|
background-color: var(--color-background-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark theme adjustments */
|
||||||
|
[data-theme="dark"] .series-card.complete:not(.selected) {
|
||||||
|
background-color: var(--color-background-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Stat Card */
|
||||||
|
.stat-card {
|
||||||
|
background: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-card:hover {
|
||||||
|
background: var(--color-surface-hover);
|
||||||
|
transform: translateY(-2px);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-icon {
|
||||||
|
font-size: 2rem;
|
||||||
|
width: 48px;
|
||||||
|
height: 48px;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
border-radius: 50%;
|
||||||
|
background: rgba(var(--color-primary-rgb), 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-value {
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stat-label {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Download Card */
|
||||||
|
.download-card {
|
||||||
|
background: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card:hover {
|
||||||
|
background: var(--color-surface-hover);
|
||||||
|
transform: translateX(4px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.active {
|
||||||
|
border-left: 4px solid var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.completed {
|
||||||
|
border-left: 4px solid var(--color-success);
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.failed {
|
||||||
|
border-left: 4px solid var(--color-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.pending {
|
||||||
|
border-left: 4px solid var(--color-warning);
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.pending.high-priority {
|
||||||
|
border-left-color: var(--color-accent);
|
||||||
|
background: linear-gradient(90deg, rgba(var(--color-accent-rgb), 0.05) 0%, transparent 10%);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: flex-start;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-info h4 {
|
||||||
|
margin: 0 0 var(--spacing-xs) 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-info p {
|
||||||
|
margin: 0 0 var(--spacing-xs) 0;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-info small {
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.priority-indicator {
|
||||||
|
color: var(--color-accent);
|
||||||
|
margin-right: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Queue Position */
|
||||||
|
.queue-position {
|
||||||
|
position: absolute;
|
||||||
|
top: var(--spacing-sm);
|
||||||
|
left: 48px;
|
||||||
|
background: var(--color-warning);
|
||||||
|
color: white;
|
||||||
|
width: 28px;
|
||||||
|
height: 28px;
|
||||||
|
border-radius: 50%;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.pending .download-info {
|
||||||
|
margin-left: 80px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-card.pending .download-header {
|
||||||
|
padding-left: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark Theme Adjustments for Cards */
|
||||||
|
[data-theme="dark"] .stat-card {
|
||||||
|
background: var(--color-surface-dark);
|
||||||
|
border-color: var(--color-border-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .stat-card:hover {
|
||||||
|
background: var(--color-surface-hover-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .download-card {
|
||||||
|
background: var(--color-surface-dark);
|
||||||
|
border-color: var(--color-border-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .download-card:hover {
|
||||||
|
background: var(--color-surface-hover-dark);
|
||||||
|
}
|
||||||
224
src/server/web/static/css/components/forms.css
Normal file
224
src/server/web/static/css/components/forms.css
Normal file
@ -0,0 +1,224 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Form Styles
|
||||||
|
*
|
||||||
|
* Form inputs, labels, validation states,
|
||||||
|
* and form group layouts.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Input fields */
|
||||||
|
.input-field {
|
||||||
|
width: 120px;
|
||||||
|
padding: var(--spacing-xs) var(--spacing-sm);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
background: var(--color-background);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
transition: border-color var(--animation-duration-fast) var(--animation-easing-standard);
|
||||||
|
}
|
||||||
|
|
||||||
|
.input-field:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Input groups */
|
||||||
|
.input-group {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.input-group .input-field {
|
||||||
|
flex: 1;
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.input-group .btn {
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Search input */
|
||||||
|
.search-input {
|
||||||
|
flex: 1;
|
||||||
|
padding: var(--spacing-md);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
box-shadow: 0 0 0 1px var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-input-group {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
max-width: 600px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Checkbox custom styling */
|
||||||
|
.checkbox-label {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
cursor: pointer;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-label input[type="checkbox"] {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-custom {
|
||||||
|
display: inline-block;
|
||||||
|
width: 18px;
|
||||||
|
height: 18px;
|
||||||
|
min-width: 18px;
|
||||||
|
min-height: 18px;
|
||||||
|
flex-shrink: 0;
|
||||||
|
border: 2px solid var(--color-border);
|
||||||
|
border-radius: 4px;
|
||||||
|
background: var(--color-background);
|
||||||
|
position: relative;
|
||||||
|
transition: all var(--animation-duration-fast) var(--animation-easing-standard);
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-label input[type="checkbox"]:checked+.checkbox-custom {
|
||||||
|
background: var(--color-accent);
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-label input[type="checkbox"]:checked+.checkbox-custom::after {
|
||||||
|
content: '';
|
||||||
|
position: absolute;
|
||||||
|
left: 4px;
|
||||||
|
top: 1px;
|
||||||
|
width: 6px;
|
||||||
|
height: 10px;
|
||||||
|
border: solid white;
|
||||||
|
border-width: 0 2px 2px 0;
|
||||||
|
transform: rotate(45deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.checkbox-label:hover .checkbox-custom {
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Form groups */
|
||||||
|
.form-group {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.form-label {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--color-text);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Config item styling */
|
||||||
|
.config-item {
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-item:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-item label {
|
||||||
|
display: block;
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
margin-bottom: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-value {
|
||||||
|
padding: var(--spacing-sm);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
font-family: monospace;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
word-break: break-all;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-value input[readonly] {
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .config-value input[readonly] {
|
||||||
|
background-color: var(--color-bg-secondary-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Config description */
|
||||||
|
.config-description {
|
||||||
|
font-size: 0.9em;
|
||||||
|
color: var(--muted-text);
|
||||||
|
margin: 4px 0 8px 0;
|
||||||
|
line-height: 1.4;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Config actions */
|
||||||
|
.config-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
margin-top: var(--spacing-md);
|
||||||
|
flex-wrap: wrap;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-actions .btn {
|
||||||
|
flex: 1;
|
||||||
|
min-width: 140px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Validation styles */
|
||||||
|
.validation-results {
|
||||||
|
margin: 12px 0;
|
||||||
|
padding: 12px;
|
||||||
|
border-radius: 6px;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
background: var(--card-bg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.validation-results.hidden {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.validation-error {
|
||||||
|
color: var(--color-error);
|
||||||
|
margin: 4px 0;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.validation-warning {
|
||||||
|
color: var(--color-warning);
|
||||||
|
margin: 4px 0;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.validation-success {
|
||||||
|
color: var(--color-success);
|
||||||
|
margin: 4px 0;
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive form adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.config-actions {
|
||||||
|
flex-direction: column;
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-actions .btn {
|
||||||
|
flex: none;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
}
|
||||||
264
src/server/web/static/css/components/modals.css
Normal file
264
src/server/web/static/css/components/modals.css
Normal file
@ -0,0 +1,264 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Modal Styles
|
||||||
|
*
|
||||||
|
* Modal and overlay styles including
|
||||||
|
* config modal and confirmation dialogs.
|
||||||
|
*/
|
||||||
|
|
||||||
|
.modal {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
z-index: 2000;
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-overlay {
|
||||||
|
position: absolute;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background-color: rgba(0, 0, 0, 0.5);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-content {
|
||||||
|
position: relative;
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
max-width: 500px;
|
||||||
|
width: 90%;
|
||||||
|
max-height: 80vh;
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.modal-body {
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
overflow-y: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Config Section within modals */
|
||||||
|
.config-section {
|
||||||
|
border-top: 1px solid var(--color-divider);
|
||||||
|
margin-top: var(--spacing-lg);
|
||||||
|
padding-top: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-section h4 {
|
||||||
|
margin: 0 0 var(--spacing-md) 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scheduler info box */
|
||||||
|
.scheduler-info {
|
||||||
|
background: var(--color-background-subtle);
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
padding: var(--spacing-md);
|
||||||
|
margin: var(--spacing-sm) 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-row {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-row:last-child {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.info-value {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Status badge */
|
||||||
|
.status-badge {
|
||||||
|
padding: 2px 8px;
|
||||||
|
border-radius: 12px;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-badge.running {
|
||||||
|
background: var(--color-accent);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-badge.stopped {
|
||||||
|
background: var(--color-text-disabled);
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Rescan time config */
|
||||||
|
#rescan-time-config {
|
||||||
|
margin-left: var(--spacing-lg);
|
||||||
|
opacity: 0.6;
|
||||||
|
transition: opacity var(--animation-duration-normal) var(--animation-easing-standard);
|
||||||
|
}
|
||||||
|
|
||||||
|
#rescan-time-config.enabled {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Loading overlay */
|
||||||
|
.loading-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background-color: rgba(0, 0, 0, 0.5);
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
z-index: 2000;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading-spinner {
|
||||||
|
text-align: center;
|
||||||
|
color: white;
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading-spinner i {
|
||||||
|
font-size: 48px;
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.loading-spinner p {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Backup list */
|
||||||
|
.backup-list {
|
||||||
|
max-height: 200px;
|
||||||
|
overflow-y: auto;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 6px;
|
||||||
|
margin: 8px 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-item {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: 8px 12px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-item:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-info {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-name {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-details {
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: var(--muted-text);
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.backup-actions .btn {
|
||||||
|
padding: 4px 8px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Log files container */
|
||||||
|
.log-files-container {
|
||||||
|
max-height: 200px;
|
||||||
|
overflow-y: auto;
|
||||||
|
border: 1px solid var(--border-color);
|
||||||
|
border-radius: 6px;
|
||||||
|
padding: 8px;
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-item {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: 8px;
|
||||||
|
border-bottom: 1px solid var(--border-color);
|
||||||
|
font-size: 0.9em;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-item:last-child {
|
||||||
|
border-bottom: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-info {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-name {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--text-color);
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-details {
|
||||||
|
font-size: 0.8em;
|
||||||
|
color: var(--muted-text);
|
||||||
|
margin-top: 2px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-actions .btn {
|
||||||
|
padding: 4px 8px;
|
||||||
|
font-size: 0.8em;
|
||||||
|
min-width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.log-file-actions .btn-xs {
|
||||||
|
padding: 2px 6px;
|
||||||
|
font-size: 0.75em;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.info-row {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: flex-start;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
}
|
||||||
218
src/server/web/static/css/components/navigation.css
Normal file
218
src/server/web/static/css/components/navigation.css
Normal file
@ -0,0 +1,218 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Navigation Styles
|
||||||
|
*
|
||||||
|
* Header, nav, and navigation link styles.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Header */
|
||||||
|
.header {
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
padding: var(--spacing-lg) var(--spacing-xl);
|
||||||
|
box-shadow: var(--shadow-card);
|
||||||
|
transition: background-color var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-content {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
max-width: 1200px;
|
||||||
|
margin: 0 auto;
|
||||||
|
min-height: 60px;
|
||||||
|
position: relative;
|
||||||
|
width: 100%;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
flex-shrink: 1;
|
||||||
|
min-width: 150px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title i {
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title h1 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-actions {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
flex-shrink: 0;
|
||||||
|
flex-wrap: nowrap;
|
||||||
|
justify-content: flex-end;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Main content */
|
||||||
|
.main-content {
|
||||||
|
flex: 1;
|
||||||
|
padding: var(--spacing-xl);
|
||||||
|
max-width: 1200px;
|
||||||
|
margin: 0 auto;
|
||||||
|
width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Section headers */
|
||||||
|
.section-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
padding-bottom: var(--spacing-md);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header h2 {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Series section */
|
||||||
|
.series-section {
|
||||||
|
margin-bottom: var(--spacing-xxl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-header {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-header h2 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-filters {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-actions {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fill, minmax(300px, 1fr));
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Search section */
|
||||||
|
.search-section {
|
||||||
|
margin-bottom: var(--spacing-xxl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-container {
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark theme adjustments */
|
||||||
|
[data-theme="dark"] .section-header {
|
||||||
|
border-bottom-color: var(--color-border-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive design */
|
||||||
|
@media (min-width: 768px) {
|
||||||
|
.series-header {
|
||||||
|
flex-direction: row;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-filters {
|
||||||
|
margin-bottom: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
.header-title {
|
||||||
|
min-width: 120px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title h1 {
|
||||||
|
font-size: 1.4rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-actions {
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.header-content {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
min-height: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-title {
|
||||||
|
text-align: center;
|
||||||
|
min-width: auto;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header-actions {
|
||||||
|
justify-content: center;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
width: 100%;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.main-content {
|
||||||
|
padding: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-header {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
align-items: stretch;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-actions {
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.series-grid {
|
||||||
|
grid-template-columns: 1fr;
|
||||||
|
}
|
||||||
|
|
||||||
|
.section-header {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: stretch;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-header {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-actions {
|
||||||
|
justify-content: flex-end;
|
||||||
|
}
|
||||||
|
}
|
||||||
148
src/server/web/static/css/components/notifications.css
Normal file
148
src/server/web/static/css/components/notifications.css
Normal file
@ -0,0 +1,148 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Notification Styles
|
||||||
|
*
|
||||||
|
* Toast notifications, alerts, and messages.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Toast container */
|
||||||
|
.toast-container {
|
||||||
|
position: fixed;
|
||||||
|
top: var(--spacing-xl);
|
||||||
|
right: var(--spacing-xl);
|
||||||
|
z-index: 1100;
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Toast base */
|
||||||
|
.toast {
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
padding: var(--spacing-md) var(--spacing-lg);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
min-width: 300px;
|
||||||
|
animation: slideIn var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Toast variants */
|
||||||
|
.toast.success {
|
||||||
|
border-left: 4px solid var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.error {
|
||||||
|
border-left: 4px solid var(--color-error);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.warning {
|
||||||
|
border-left: 4px solid var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast.info {
|
||||||
|
border-left: 4px solid var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Status panel */
|
||||||
|
.status-panel {
|
||||||
|
position: fixed;
|
||||||
|
bottom: var(--spacing-xl);
|
||||||
|
right: var(--spacing-xl);
|
||||||
|
width: 400px;
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
z-index: 1000;
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: var(--spacing-md) var(--spacing-lg);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-content {
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-message {
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Status indicator */
|
||||||
|
.status-indicator {
|
||||||
|
display: inline-block;
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
border-radius: 50%;
|
||||||
|
background-color: var(--color-error);
|
||||||
|
margin-right: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator.connected {
|
||||||
|
background-color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Download controls */
|
||||||
|
.download-controls {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
margin-top: var(--spacing-md);
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Empty state */
|
||||||
|
.empty-state {
|
||||||
|
text-align: center;
|
||||||
|
padding: var(--spacing-xxl);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.empty-state i {
|
||||||
|
font-size: 3rem;
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
opacity: 0.5;
|
||||||
|
}
|
||||||
|
|
||||||
|
.empty-state p {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
}
|
||||||
|
|
||||||
|
.empty-state small {
|
||||||
|
display: block;
|
||||||
|
margin-top: var(--spacing-sm);
|
||||||
|
font-size: var(--font-size-small);
|
||||||
|
opacity: 0.7;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.status-panel {
|
||||||
|
bottom: var(--spacing-md);
|
||||||
|
right: var(--spacing-md);
|
||||||
|
left: var(--spacing-md);
|
||||||
|
width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast-container {
|
||||||
|
top: var(--spacing-md);
|
||||||
|
right: var(--spacing-md);
|
||||||
|
left: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.toast {
|
||||||
|
min-width: auto;
|
||||||
|
}
|
||||||
|
}
|
||||||
196
src/server/web/static/css/components/progress.css
Normal file
196
src/server/web/static/css/components/progress.css
Normal file
@ -0,0 +1,196 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Progress Styles
|
||||||
|
*
|
||||||
|
* Progress bars, loading indicators,
|
||||||
|
* and download progress displays.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Progress bar base */
|
||||||
|
.progress-bar {
|
||||||
|
width: 100%;
|
||||||
|
height: 8px;
|
||||||
|
background-color: var(--color-bg-tertiary);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-fill {
|
||||||
|
height: 100%;
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
transition: width var(--transition-duration) var(--transition-easing);
|
||||||
|
width: 0%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-text {
|
||||||
|
margin-top: var(--spacing-xs);
|
||||||
|
text-align: center;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Progress container */
|
||||||
|
.progress-container {
|
||||||
|
margin-top: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mini progress bar */
|
||||||
|
.progress-bar-mini {
|
||||||
|
width: 80px;
|
||||||
|
height: 4px;
|
||||||
|
background-color: var(--color-bg-tertiary);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-fill-mini {
|
||||||
|
height: 100%;
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
border-radius: var(--border-radius-sm);
|
||||||
|
transition: width var(--transition-duration) var(--transition-easing);
|
||||||
|
width: 0%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-text-mini {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
font-weight: 500;
|
||||||
|
min-width: 35px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Download progress */
|
||||||
|
.download-progress {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
min-width: 120px;
|
||||||
|
margin-top: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Progress bar gradient style */
|
||||||
|
.download-progress .progress-bar {
|
||||||
|
width: 100%;
|
||||||
|
height: 8px;
|
||||||
|
background: var(--color-border);
|
||||||
|
border-radius: 4px;
|
||||||
|
overflow: hidden;
|
||||||
|
margin-bottom: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-progress .progress-fill {
|
||||||
|
height: 100%;
|
||||||
|
background: linear-gradient(90deg, var(--color-primary), var(--color-accent));
|
||||||
|
border-radius: 4px;
|
||||||
|
transition: width 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.progress-info {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-speed {
|
||||||
|
color: var(--color-primary);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Missing episodes status */
|
||||||
|
.missing-episodes {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-episodes i {
|
||||||
|
color: var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-episodes.has-missing {
|
||||||
|
color: var(--color-warning);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-episodes.complete {
|
||||||
|
color: var(--color-success);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-episodes.has-missing i {
|
||||||
|
color: var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.missing-episodes.complete i {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Speed and ETA section */
|
||||||
|
.speed-eta-section {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
background: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.speed-info {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.speed-current,
|
||||||
|
.speed-average,
|
||||||
|
.eta-info {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.speed-info .label,
|
||||||
|
.eta-info .label {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
}
|
||||||
|
|
||||||
|
.speed-info .value,
|
||||||
|
.eta-info .value {
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark theme adjustments */
|
||||||
|
[data-theme="dark"] .speed-eta-section {
|
||||||
|
background: var(--color-surface-dark);
|
||||||
|
border-color: var(--color-border-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.current-download-item {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: stretch;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-progress {
|
||||||
|
justify-content: space-between;
|
||||||
|
}
|
||||||
|
|
||||||
|
.speed-eta-section {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
text-align: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.speed-info {
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
}
|
||||||
128
src/server/web/static/css/components/status.css
Normal file
128
src/server/web/static/css/components/status.css
Normal file
@ -0,0 +1,128 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Process Status Styles
|
||||||
|
*
|
||||||
|
* Process status indicators for scan and download operations.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Process Status Indicators */
|
||||||
|
.process-status {
|
||||||
|
display: flex;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
align-items: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
padding: var(--spacing-sm) var(--spacing-md);
|
||||||
|
background: transparent;
|
||||||
|
border-radius: var(--border-radius);
|
||||||
|
border: none;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
transition: all var(--animation-duration-normal) var(--animation-easing-standard);
|
||||||
|
min-width: 0;
|
||||||
|
flex-shrink: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator:hover {
|
||||||
|
background: transparent;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator i {
|
||||||
|
font-size: 24px;
|
||||||
|
transition: all var(--animation-duration-normal) var(--animation-easing-standard);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Status dots */
|
||||||
|
.status-dot {
|
||||||
|
width: 8px;
|
||||||
|
height: 8px;
|
||||||
|
border-radius: 50%;
|
||||||
|
transition: all var(--animation-duration-normal) var(--animation-easing-standard);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot.idle {
|
||||||
|
background-color: var(--color-text-disabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot.running {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
animation: pulse 2s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-dot.error {
|
||||||
|
background-color: #e74c3c;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Rescan icon specific styling */
|
||||||
|
#rescan-status {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
#rescan-status i {
|
||||||
|
color: var(--color-text-disabled);
|
||||||
|
}
|
||||||
|
|
||||||
|
#rescan-status.running i {
|
||||||
|
color: #22c55e;
|
||||||
|
animation: iconPulse 2s infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
#rescan-status.running {
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Animations */
|
||||||
|
@keyframes pulse {
|
||||||
|
0%,
|
||||||
|
100% {
|
||||||
|
opacity: 1;
|
||||||
|
transform: scale(1);
|
||||||
|
}
|
||||||
|
|
||||||
|
50% {
|
||||||
|
opacity: 0.5;
|
||||||
|
transform: scale(1.2);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes iconPulse {
|
||||||
|
0%,
|
||||||
|
100% {
|
||||||
|
opacity: 1;
|
||||||
|
transform: scale(1) rotate(0deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
50% {
|
||||||
|
opacity: 0.7;
|
||||||
|
transform: scale(1.1) rotate(180deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Mobile view */
|
||||||
|
@media (max-width: 1024px) {
|
||||||
|
.process-status {
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.process-status {
|
||||||
|
order: -1;
|
||||||
|
margin-right: 0;
|
||||||
|
margin-bottom: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator {
|
||||||
|
font-size: 11px;
|
||||||
|
padding: 6px 8px;
|
||||||
|
gap: 4px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.status-indicator i {
|
||||||
|
font-size: 20px;
|
||||||
|
}
|
||||||
|
}
|
||||||
255
src/server/web/static/css/components/tables.css
Normal file
255
src/server/web/static/css/components/tables.css
Normal file
@ -0,0 +1,255 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Table Styles
|
||||||
|
*
|
||||||
|
* Table, list, and queue item styles.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Search results */
|
||||||
|
.search-results {
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
box-shadow: var(--shadow-card);
|
||||||
|
margin-top: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-results h3 {
|
||||||
|
margin: 0 0 var(--spacing-md) 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-results-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result-item {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: var(--spacing-md);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
transition: background-color var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result-item:hover {
|
||||||
|
background-color: var(--color-surface-hover);
|
||||||
|
}
|
||||||
|
|
||||||
|
.search-result-name {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Download Queue Section */
|
||||||
|
.download-queue-section {
|
||||||
|
margin-bottom: var(--spacing-xxl);
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
box-shadow: var(--shadow-card);
|
||||||
|
overflow: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-header {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-between;
|
||||||
|
align-items: center;
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-header h2 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-subtitle);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-header i {
|
||||||
|
color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-progress {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Current download */
|
||||||
|
.current-download {
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
}
|
||||||
|
|
||||||
|
.current-download-header {
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.current-download-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.current-download-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
padding: var(--spacing-md);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
border-left: 4px solid var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.download-info {
|
||||||
|
flex: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.serie-name {
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
margin-bottom: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.episode-info {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Queue list */
|
||||||
|
.queue-list-container {
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-list-container h3 {
|
||||||
|
margin: 0 0 var(--spacing-md) 0;
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-list {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-item {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
padding: var(--spacing-sm) var(--spacing-md);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
border-left: 3px solid var(--color-divider);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-item-index {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
font-weight: 600;
|
||||||
|
min-width: 20px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-item-name {
|
||||||
|
flex: 1;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-item-status {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-empty {
|
||||||
|
text-align: center;
|
||||||
|
padding: var(--spacing-xl);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
font-style: italic;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Stats grid */
|
||||||
|
.queue-stats-section {
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.stats-grid {
|
||||||
|
display: grid;
|
||||||
|
grid-template-columns: repeat(auto-fit, minmax(200px, 1fr));
|
||||||
|
gap: var(--spacing-lg);
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Drag and Drop Styles */
|
||||||
|
.draggable-item {
|
||||||
|
cursor: move;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.draggable-item.dragging {
|
||||||
|
opacity: 0.5;
|
||||||
|
transform: scale(0.98);
|
||||||
|
cursor: grabbing;
|
||||||
|
}
|
||||||
|
|
||||||
|
.draggable-item.drag-over {
|
||||||
|
border-top: 3px solid var(--color-primary);
|
||||||
|
margin-top: 8px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.drag-handle {
|
||||||
|
position: absolute;
|
||||||
|
left: 8px;
|
||||||
|
top: 50%;
|
||||||
|
transform: translateY(-50%);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
cursor: grab;
|
||||||
|
font-size: 1.2rem;
|
||||||
|
padding: var(--spacing-xs);
|
||||||
|
transition: color var(--transition-duration);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drag-handle:hover {
|
||||||
|
color: var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.drag-handle:active {
|
||||||
|
cursor: grabbing;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sortable-list {
|
||||||
|
position: relative;
|
||||||
|
min-height: 100px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.pending-queue-list {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.queue-item {
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: stretch;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.queue-item-index {
|
||||||
|
min-width: auto;
|
||||||
|
}
|
||||||
|
|
||||||
|
.stats-grid {
|
||||||
|
grid-template-columns: repeat(2, 1fr);
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
}
|
||||||
|
}
|
||||||
230
src/server/web/static/css/pages/index.css
Normal file
230
src/server/web/static/css/pages/index.css
Normal file
@ -0,0 +1,230 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Index Page Styles
|
||||||
|
*
|
||||||
|
* Index/library page specific styles including
|
||||||
|
* series grid, search, and scan overlay.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Scan Progress Overlay */
|
||||||
|
.scan-progress-overlay {
|
||||||
|
position: fixed;
|
||||||
|
top: 0;
|
||||||
|
left: 0;
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
background-color: rgba(0, 0, 0, 0.6);
|
||||||
|
display: flex;
|
||||||
|
justify-content: center;
|
||||||
|
align-items: center;
|
||||||
|
z-index: 3000;
|
||||||
|
opacity: 0;
|
||||||
|
visibility: hidden;
|
||||||
|
transition: opacity 0.3s ease, visibility 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-overlay.visible {
|
||||||
|
opacity: 1;
|
||||||
|
visibility: visible;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-container {
|
||||||
|
background-color: var(--color-surface);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-lg);
|
||||||
|
box-shadow: var(--shadow-elevated);
|
||||||
|
padding: var(--spacing-xxl);
|
||||||
|
max-width: 450px;
|
||||||
|
width: 90%;
|
||||||
|
text-align: center;
|
||||||
|
animation: scanProgressSlideIn 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes scanProgressSlideIn {
|
||||||
|
from {
|
||||||
|
transform: translateY(-20px);
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
to {
|
||||||
|
transform: translateY(0);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-header {
|
||||||
|
margin-bottom: var(--spacing-lg);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-header h3 {
|
||||||
|
margin: 0;
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-spinner {
|
||||||
|
display: inline-block;
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
border: 3px solid var(--color-bg-tertiary);
|
||||||
|
border-top-color: var(--color-accent);
|
||||||
|
border-radius: 50%;
|
||||||
|
animation: scanSpinner 1s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
@keyframes scanSpinner {
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Progress bar for scan */
|
||||||
|
.scan-progress-bar-container {
|
||||||
|
width: 100%;
|
||||||
|
height: 8px;
|
||||||
|
background-color: var(--color-bg-tertiary);
|
||||||
|
border-radius: 4px;
|
||||||
|
overflow: hidden;
|
||||||
|
margin-bottom: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-bar {
|
||||||
|
height: 100%;
|
||||||
|
background: linear-gradient(90deg, var(--color-accent), var(--color-accent-hover, var(--color-accent)));
|
||||||
|
border-radius: 4px;
|
||||||
|
transition: width 0.3s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-container.completed .scan-progress-bar {
|
||||||
|
background: linear-gradient(90deg, var(--color-success), var(--color-success));
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-text {
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
margin-bottom: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-text #scan-current-count {
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-text #scan-total-count {
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-container.completed .scan-progress-text #scan-current-count {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-stats {
|
||||||
|
display: flex;
|
||||||
|
justify-content: space-around;
|
||||||
|
margin: var(--spacing-lg) 0;
|
||||||
|
padding: var(--spacing-md) 0;
|
||||||
|
border-top: 1px solid var(--color-border);
|
||||||
|
border-bottom: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-stat {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
align-items: center;
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-stat-value {
|
||||||
|
font-size: var(--font-size-large-title);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-accent);
|
||||||
|
line-height: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-stat-label {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
text-transform: uppercase;
|
||||||
|
letter-spacing: 0.5px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-current-directory {
|
||||||
|
margin-top: var(--spacing-md);
|
||||||
|
padding: var(--spacing-sm) var(--spacing-md);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
border-radius: var(--border-radius-md);
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
white-space: nowrap;
|
||||||
|
overflow: hidden;
|
||||||
|
text-overflow: ellipsis;
|
||||||
|
max-width: 100%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-current-directory-label {
|
||||||
|
font-weight: 500;
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
margin-right: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scan completed state */
|
||||||
|
.scan-progress-container.completed .scan-progress-spinner {
|
||||||
|
display: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-container.completed .scan-progress-header h3 {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-completed-icon {
|
||||||
|
display: none;
|
||||||
|
width: 24px;
|
||||||
|
height: 24px;
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-container.completed .scan-completed-icon {
|
||||||
|
display: inline-block;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-container.completed .scan-stat-value {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-elapsed-time {
|
||||||
|
margin-top: var(--spacing-md);
|
||||||
|
font-size: var(--font-size-body);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-elapsed-time i {
|
||||||
|
margin-right: var(--spacing-xs);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive adjustments for scan overlay */
|
||||||
|
@media (max-width: 768px) {
|
||||||
|
.scan-progress-container {
|
||||||
|
padding: var(--spacing-lg);
|
||||||
|
max-width: 95%;
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-progress-stats {
|
||||||
|
flex-direction: column;
|
||||||
|
gap: var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-stat {
|
||||||
|
flex-direction: row;
|
||||||
|
justify-content: space-between;
|
||||||
|
width: 100%;
|
||||||
|
padding: 0 var(--spacing-md);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scan-stat-value {
|
||||||
|
font-size: var(--font-size-title);
|
||||||
|
}
|
||||||
|
}
|
||||||
168
src/server/web/static/css/pages/login.css
Normal file
168
src/server/web/static/css/pages/login.css
Normal file
@ -0,0 +1,168 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Login Page Styles
|
||||||
|
*
|
||||||
|
* Login page specific styles including login card,
|
||||||
|
* form elements, and branding.
|
||||||
|
*/
|
||||||
|
|
||||||
|
.login-container {
|
||||||
|
min-height: 100vh;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
background: linear-gradient(135deg, var(--color-primary-light) 0%, var(--color-primary) 100%);
|
||||||
|
padding: 1rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-card {
|
||||||
|
background: var(--color-surface);
|
||||||
|
border-radius: 16px;
|
||||||
|
padding: 2rem;
|
||||||
|
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
|
||||||
|
width: 100%;
|
||||||
|
max-width: 400px;
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header {
|
||||||
|
text-align: center;
|
||||||
|
margin-bottom: 2rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header .logo {
|
||||||
|
font-size: 3rem;
|
||||||
|
color: var(--color-primary);
|
||||||
|
margin-bottom: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header h1 {
|
||||||
|
margin: 0;
|
||||||
|
color: var(--color-text);
|
||||||
|
font-size: 1.5rem;
|
||||||
|
font-weight: 600;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-header p {
|
||||||
|
margin: 0.5rem 0 0 0;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
font-size: 0.9rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-form {
|
||||||
|
display: flex;
|
||||||
|
flex-direction: column;
|
||||||
|
gap: 1.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Password input group */
|
||||||
|
.password-input-group {
|
||||||
|
position: relative;
|
||||||
|
}
|
||||||
|
|
||||||
|
.password-input {
|
||||||
|
width: 100%;
|
||||||
|
padding: 0.75rem 3rem 0.75rem 1rem;
|
||||||
|
border: 2px solid var(--color-border);
|
||||||
|
border-radius: 8px;
|
||||||
|
font-size: 1rem;
|
||||||
|
background: var(--color-background);
|
||||||
|
color: var(--color-text);
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
box-sizing: border-box;
|
||||||
|
}
|
||||||
|
|
||||||
|
.password-input:focus {
|
||||||
|
outline: none;
|
||||||
|
border-color: var(--color-primary);
|
||||||
|
box-shadow: 0 0 0 3px rgba(var(--color-primary-rgb), 0.1);
|
||||||
|
}
|
||||||
|
|
||||||
|
.password-toggle {
|
||||||
|
position: absolute;
|
||||||
|
right: 0.75rem;
|
||||||
|
top: 50%;
|
||||||
|
transform: translateY(-50%);
|
||||||
|
background: none;
|
||||||
|
border: none;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
cursor: pointer;
|
||||||
|
padding: 0.25rem;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
}
|
||||||
|
|
||||||
|
.password-toggle:hover {
|
||||||
|
color: var(--color-text-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Login button */
|
||||||
|
.login-btn {
|
||||||
|
width: 100%;
|
||||||
|
padding: 0.875rem;
|
||||||
|
background: var(--color-primary);
|
||||||
|
color: white;
|
||||||
|
border: none;
|
||||||
|
border-radius: 8px;
|
||||||
|
font-size: 1rem;
|
||||||
|
font-weight: 600;
|
||||||
|
cursor: pointer;
|
||||||
|
transition: all 0.2s ease;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-btn:hover:not(:disabled) {
|
||||||
|
background: var(--color-accent-hover);
|
||||||
|
transform: translateY(-1px);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-btn:disabled {
|
||||||
|
opacity: 0.6;
|
||||||
|
cursor: not-allowed;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Error message */
|
||||||
|
.login-error {
|
||||||
|
background: rgba(var(--color-error-rgb, 209, 52, 56), 0.1);
|
||||||
|
border: 1px solid var(--color-error);
|
||||||
|
border-radius: 8px;
|
||||||
|
padding: 0.75rem;
|
||||||
|
color: var(--color-error);
|
||||||
|
font-size: 0.875rem;
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Remember me checkbox */
|
||||||
|
.remember-me {
|
||||||
|
display: flex;
|
||||||
|
align-items: center;
|
||||||
|
gap: 0.5rem;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.remember-me input {
|
||||||
|
accent-color: var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Footer links */
|
||||||
|
.login-footer {
|
||||||
|
margin-top: 1.5rem;
|
||||||
|
text-align: center;
|
||||||
|
font-size: 0.875rem;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-footer a {
|
||||||
|
color: var(--color-primary);
|
||||||
|
text-decoration: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
.login-footer a:hover {
|
||||||
|
text-decoration: underline;
|
||||||
|
}
|
||||||
46
src/server/web/static/css/pages/queue.css
Normal file
46
src/server/web/static/css/pages/queue.css
Normal file
@ -0,0 +1,46 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Queue Page Styles
|
||||||
|
*
|
||||||
|
* Queue page specific styles for download management.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Active downloads section */
|
||||||
|
.active-downloads-section {
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
.active-downloads-list {
|
||||||
|
min-height: 100px;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Pending queue section */
|
||||||
|
.pending-queue-section {
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Completed downloads section */
|
||||||
|
.completed-downloads-section {
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Failed downloads section */
|
||||||
|
.failed-downloads-section {
|
||||||
|
margin-bottom: var(--spacing-xl);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Queue page text color utilities */
|
||||||
|
.text-primary {
|
||||||
|
color: var(--color-primary);
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-success {
|
||||||
|
color: var(--color-success);
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-warning {
|
||||||
|
color: var(--color-warning);
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-error {
|
||||||
|
color: var(--color-error);
|
||||||
|
}
|
||||||
File diff suppressed because it is too large
Load Diff
160
src/server/web/static/css/utilities/animations.css
Normal file
160
src/server/web/static/css/utilities/animations.css
Normal file
@ -0,0 +1,160 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Animation Styles
|
||||||
|
*
|
||||||
|
* Keyframes and animation utility classes.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Slide in animation */
|
||||||
|
@keyframes slideIn {
|
||||||
|
from {
|
||||||
|
transform: translateX(100%);
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
transform: translateX(0);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Fade in animation */
|
||||||
|
@keyframes fadeIn {
|
||||||
|
from {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Fade out animation */
|
||||||
|
@keyframes fadeOut {
|
||||||
|
from {
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Slide up animation */
|
||||||
|
@keyframes slideUp {
|
||||||
|
from {
|
||||||
|
transform: translateY(20px);
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
transform: translateY(0);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Slide down animation */
|
||||||
|
@keyframes slideDown {
|
||||||
|
from {
|
||||||
|
transform: translateY(-20px);
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
transform: translateY(0);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Scale in animation */
|
||||||
|
@keyframes scaleIn {
|
||||||
|
from {
|
||||||
|
transform: scale(0.9);
|
||||||
|
opacity: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
transform: scale(1);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Spin animation for loading */
|
||||||
|
@keyframes spin {
|
||||||
|
from {
|
||||||
|
transform: rotate(0deg);
|
||||||
|
}
|
||||||
|
|
||||||
|
to {
|
||||||
|
transform: rotate(360deg);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Bounce animation */
|
||||||
|
@keyframes bounce {
|
||||||
|
0%, 20%, 50%, 80%, 100% {
|
||||||
|
transform: translateY(0);
|
||||||
|
}
|
||||||
|
|
||||||
|
40% {
|
||||||
|
transform: translateY(-10px);
|
||||||
|
}
|
||||||
|
|
||||||
|
60% {
|
||||||
|
transform: translateY(-5px);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Pulse animation */
|
||||||
|
@keyframes pulsate {
|
||||||
|
0% {
|
||||||
|
transform: scale(1);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
50% {
|
||||||
|
transform: scale(1.05);
|
||||||
|
opacity: 0.8;
|
||||||
|
}
|
||||||
|
|
||||||
|
100% {
|
||||||
|
transform: scale(1);
|
||||||
|
opacity: 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Animation utility classes */
|
||||||
|
.animate-slide-in {
|
||||||
|
animation: slideIn var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-fade-in {
|
||||||
|
animation: fadeIn var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-fade-out {
|
||||||
|
animation: fadeOut var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-slide-up {
|
||||||
|
animation: slideUp var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-slide-down {
|
||||||
|
animation: slideDown var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-scale-in {
|
||||||
|
animation: scaleIn var(--transition-duration) var(--transition-easing);
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-spin {
|
||||||
|
animation: spin 1s linear infinite;
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-bounce {
|
||||||
|
animation: bounce 1s ease;
|
||||||
|
}
|
||||||
|
|
||||||
|
.animate-pulse {
|
||||||
|
animation: pulsate 2s ease-in-out infinite;
|
||||||
|
}
|
||||||
368
src/server/web/static/css/utilities/helpers.css
Normal file
368
src/server/web/static/css/utilities/helpers.css
Normal file
@ -0,0 +1,368 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Helper Utilities
|
||||||
|
*
|
||||||
|
* Utility classes for visibility, spacing, flexbox, and text.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Display utilities */
|
||||||
|
.hidden {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.visible {
|
||||||
|
visibility: visible !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.invisible {
|
||||||
|
visibility: hidden !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.block {
|
||||||
|
display: block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inline-block {
|
||||||
|
display: inline-block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inline {
|
||||||
|
display: inline !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex {
|
||||||
|
display: flex !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.inline-flex {
|
||||||
|
display: inline-flex !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.grid {
|
||||||
|
display: grid !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Flexbox utilities */
|
||||||
|
.flex-row {
|
||||||
|
flex-direction: row !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-column {
|
||||||
|
flex-direction: column !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-wrap {
|
||||||
|
flex-wrap: wrap !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-nowrap {
|
||||||
|
flex-wrap: nowrap !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.justify-start {
|
||||||
|
justify-content: flex-start !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.justify-end {
|
||||||
|
justify-content: flex-end !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.justify-center {
|
||||||
|
justify-content: center !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.justify-between {
|
||||||
|
justify-content: space-between !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.justify-around {
|
||||||
|
justify-content: space-around !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-start {
|
||||||
|
align-items: flex-start !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-end {
|
||||||
|
align-items: flex-end !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-center {
|
||||||
|
align-items: center !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.align-stretch {
|
||||||
|
align-items: stretch !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-1 {
|
||||||
|
flex: 1 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-auto {
|
||||||
|
flex: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.flex-none {
|
||||||
|
flex: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Text alignment */
|
||||||
|
.text-left {
|
||||||
|
text-align: left !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-center {
|
||||||
|
text-align: center !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-right {
|
||||||
|
text-align: right !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Text transformation */
|
||||||
|
.text-uppercase {
|
||||||
|
text-transform: uppercase !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-lowercase {
|
||||||
|
text-transform: lowercase !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.text-capitalize {
|
||||||
|
text-transform: capitalize !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Font weight */
|
||||||
|
.font-normal {
|
||||||
|
font-weight: 400 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.font-medium {
|
||||||
|
font-weight: 500 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.font-semibold {
|
||||||
|
font-weight: 600 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.font-bold {
|
||||||
|
font-weight: 700 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Margins */
|
||||||
|
.m-0 {
|
||||||
|
margin: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mt-0 {
|
||||||
|
margin-top: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mb-0 {
|
||||||
|
margin-bottom: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.ml-0 {
|
||||||
|
margin-left: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mr-0 {
|
||||||
|
margin-right: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mb-1 {
|
||||||
|
margin-bottom: var(--spacing-xs) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mb-2 {
|
||||||
|
margin-bottom: var(--spacing-sm) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mb-3 {
|
||||||
|
margin-bottom: var(--spacing-md) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mb-4 {
|
||||||
|
margin-bottom: var(--spacing-lg) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mt-1 {
|
||||||
|
margin-top: var(--spacing-xs) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mt-2 {
|
||||||
|
margin-top: var(--spacing-sm) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mt-3 {
|
||||||
|
margin-top: var(--spacing-md) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mt-4 {
|
||||||
|
margin-top: var(--spacing-lg) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.mx-auto {
|
||||||
|
margin-left: auto !important;
|
||||||
|
margin-right: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Padding */
|
||||||
|
.p-0 {
|
||||||
|
padding: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.p-1 {
|
||||||
|
padding: var(--spacing-xs) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.p-2 {
|
||||||
|
padding: var(--spacing-sm) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.p-3 {
|
||||||
|
padding: var(--spacing-md) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.p-4 {
|
||||||
|
padding: var(--spacing-lg) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Width utilities */
|
||||||
|
.w-full {
|
||||||
|
width: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.w-auto {
|
||||||
|
width: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Height utilities */
|
||||||
|
.h-full {
|
||||||
|
height: 100% !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.h-auto {
|
||||||
|
height: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Overflow */
|
||||||
|
.overflow-hidden {
|
||||||
|
overflow: hidden !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.overflow-auto {
|
||||||
|
overflow: auto !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.overflow-scroll {
|
||||||
|
overflow: scroll !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Position */
|
||||||
|
.relative {
|
||||||
|
position: relative !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.absolute {
|
||||||
|
position: absolute !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.fixed {
|
||||||
|
position: fixed !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.sticky {
|
||||||
|
position: sticky !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Cursor */
|
||||||
|
.cursor-pointer {
|
||||||
|
cursor: pointer !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.cursor-not-allowed {
|
||||||
|
cursor: not-allowed !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* User select */
|
||||||
|
.select-none {
|
||||||
|
user-select: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.select-text {
|
||||||
|
user-select: text !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.select-all {
|
||||||
|
user-select: all !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Border radius */
|
||||||
|
.rounded {
|
||||||
|
border-radius: var(--border-radius-md) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rounded-lg {
|
||||||
|
border-radius: var(--border-radius-lg) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rounded-full {
|
||||||
|
border-radius: 9999px !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.rounded-none {
|
||||||
|
border-radius: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Shadow */
|
||||||
|
.shadow {
|
||||||
|
box-shadow: var(--shadow-card) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.shadow-lg {
|
||||||
|
box-shadow: var(--shadow-elevated) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.shadow-none {
|
||||||
|
box-shadow: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Opacity */
|
||||||
|
.opacity-0 {
|
||||||
|
opacity: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.opacity-50 {
|
||||||
|
opacity: 0.5 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.opacity-100 {
|
||||||
|
opacity: 1 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Transition */
|
||||||
|
.transition {
|
||||||
|
transition: all var(--transition-duration) var(--transition-easing) !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.transition-none {
|
||||||
|
transition: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Z-index */
|
||||||
|
.z-0 {
|
||||||
|
z-index: 0 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.z-10 {
|
||||||
|
z-index: 10 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.z-50 {
|
||||||
|
z-index: 50 !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.z-100 {
|
||||||
|
z-index: 100 !important;
|
||||||
|
}
|
||||||
117
src/server/web/static/css/utilities/responsive.css
Normal file
117
src/server/web/static/css/utilities/responsive.css
Normal file
@ -0,0 +1,117 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Responsive Styles
|
||||||
|
*
|
||||||
|
* Media queries and breakpoint-specific styles.
|
||||||
|
* Note: Component-specific responsive styles are in their respective files.
|
||||||
|
* This file contains global responsive utilities and overrides.
|
||||||
|
*/
|
||||||
|
|
||||||
|
/* Small devices (landscape phones, 576px and up) */
|
||||||
|
@media (min-width: 576px) {
|
||||||
|
.container-sm {
|
||||||
|
max-width: 540px;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Medium devices (tablets, 768px and up) */
|
||||||
|
@media (min-width: 768px) {
|
||||||
|
.container-md {
|
||||||
|
max-width: 720px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hide-md-up {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Large devices (desktops, 992px and up) */
|
||||||
|
@media (min-width: 992px) {
|
||||||
|
.container-lg {
|
||||||
|
max-width: 960px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hide-lg-up {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Extra large devices (large desktops, 1200px and up) */
|
||||||
|
@media (min-width: 1200px) {
|
||||||
|
.container-xl {
|
||||||
|
max-width: 1140px;
|
||||||
|
}
|
||||||
|
|
||||||
|
.hide-xl-up {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide on small screens */
|
||||||
|
@media (max-width: 575.98px) {
|
||||||
|
.hide-sm-down {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide on medium screens and below */
|
||||||
|
@media (max-width: 767.98px) {
|
||||||
|
.hide-md-down {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide on large screens and below */
|
||||||
|
@media (max-width: 991.98px) {
|
||||||
|
.hide-lg-down {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Print styles */
|
||||||
|
@media print {
|
||||||
|
.no-print {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
.print-only {
|
||||||
|
display: block !important;
|
||||||
|
}
|
||||||
|
|
||||||
|
body {
|
||||||
|
background: white;
|
||||||
|
color: black;
|
||||||
|
}
|
||||||
|
|
||||||
|
.header,
|
||||||
|
.toast-container,
|
||||||
|
.status-panel {
|
||||||
|
display: none !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Reduced motion preference */
|
||||||
|
@media (prefers-reduced-motion: reduce) {
|
||||||
|
*,
|
||||||
|
*::before,
|
||||||
|
*::after {
|
||||||
|
animation-duration: 0.01ms !important;
|
||||||
|
animation-iteration-count: 1 !important;
|
||||||
|
transition-duration: 0.01ms !important;
|
||||||
|
scroll-behavior: auto !important;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/* High contrast mode */
|
||||||
|
@media (prefers-contrast: high) {
|
||||||
|
:root {
|
||||||
|
--color-border: #000000;
|
||||||
|
--color-text-primary: #000000;
|
||||||
|
--color-text-secondary: #333333;
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] {
|
||||||
|
--color-border: #ffffff;
|
||||||
|
--color-text-primary: #ffffff;
|
||||||
|
--color-text-secondary: #cccccc;
|
||||||
|
}
|
||||||
|
}
|
||||||
@ -26,6 +26,8 @@ class AniWorldApp {
|
|||||||
this.loadSeries();
|
this.loadSeries();
|
||||||
this.initTheme();
|
this.initTheme();
|
||||||
this.updateConnectionStatus();
|
this.updateConnectionStatus();
|
||||||
|
// Check scan status on page load (in case socket connect event is delayed)
|
||||||
|
this.checkActiveScanStatus();
|
||||||
}
|
}
|
||||||
|
|
||||||
async checkAuthentication() {
|
async checkAuthentication() {
|
||||||
@ -186,12 +188,16 @@ class AniWorldApp {
|
|||||||
console.log('Connected to server');
|
console.log('Connected to server');
|
||||||
|
|
||||||
// Subscribe to rooms for targeted updates
|
// Subscribe to rooms for targeted updates
|
||||||
this.socket.join('scan_progress');
|
// Valid rooms: downloads, queue, scan, system, errors
|
||||||
this.socket.join('download_progress');
|
this.socket.join('scan');
|
||||||
this.socket.join('downloads');
|
this.socket.join('downloads');
|
||||||
|
this.socket.join('queue');
|
||||||
|
|
||||||
this.showToast(this.localization.getText('connected-server'), 'success');
|
this.showToast(this.localization.getText('connected-server'), 'success');
|
||||||
this.updateConnectionStatus();
|
this.updateConnectionStatus();
|
||||||
|
|
||||||
|
// Check if a scan is currently in progress (e.g., after page reload)
|
||||||
|
this.checkActiveScanStatus();
|
||||||
});
|
});
|
||||||
|
|
||||||
this.socket.on('disconnect', () => {
|
this.socket.on('disconnect', () => {
|
||||||
@ -201,19 +207,22 @@ class AniWorldApp {
|
|||||||
this.updateConnectionStatus();
|
this.updateConnectionStatus();
|
||||||
});
|
});
|
||||||
|
|
||||||
// Scan events
|
// Scan events - handle new detailed scan progress overlay
|
||||||
this.socket.on('scan_started', () => {
|
this.socket.on('scan_started', (data) => {
|
||||||
this.showStatus('Scanning series...', true);
|
console.log('Scan started:', data);
|
||||||
|
this.showScanProgressOverlay(data);
|
||||||
this.updateProcessStatus('rescan', true);
|
this.updateProcessStatus('rescan', true);
|
||||||
});
|
});
|
||||||
|
|
||||||
this.socket.on('scan_progress', (data) => {
|
this.socket.on('scan_progress', (data) => {
|
||||||
this.updateStatus(`Scanning: ${data.folder} (${data.counter})`);
|
console.log('Scan progress:', data);
|
||||||
|
this.updateScanProgressOverlay(data);
|
||||||
});
|
});
|
||||||
|
|
||||||
// Handle both 'scan_completed' (legacy) and 'scan_complete' (new backend)
|
// Handle both 'scan_completed' (legacy) and 'scan_complete' (new backend)
|
||||||
const handleScanComplete = () => {
|
const handleScanComplete = (data) => {
|
||||||
this.hideStatus();
|
console.log('Scan completed:', data);
|
||||||
|
this.hideScanProgressOverlay(data);
|
||||||
this.showToast('Scan completed successfully', 'success');
|
this.showToast('Scan completed successfully', 'success');
|
||||||
this.updateProcessStatus('rescan', false);
|
this.updateProcessStatus('rescan', false);
|
||||||
this.loadSeries();
|
this.loadSeries();
|
||||||
@ -410,6 +419,16 @@ class AniWorldApp {
|
|||||||
this.rescanSeries();
|
this.rescanSeries();
|
||||||
});
|
});
|
||||||
|
|
||||||
|
// Click on rescan status indicator to reopen scan overlay
|
||||||
|
const rescanStatus = document.getElementById('rescan-status');
|
||||||
|
if (rescanStatus) {
|
||||||
|
rescanStatus.addEventListener('click', (e) => {
|
||||||
|
e.stopPropagation();
|
||||||
|
console.log('Rescan status clicked');
|
||||||
|
this.reopenScanOverlay();
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
// Configuration modal
|
// Configuration modal
|
||||||
document.getElementById('config-btn').addEventListener('click', () => {
|
document.getElementById('config-btn').addEventListener('click', () => {
|
||||||
this.showConfigModal();
|
this.showConfigModal();
|
||||||
@ -564,7 +583,8 @@ class AniWorldApp {
|
|||||||
site: anime.site,
|
site: anime.site,
|
||||||
folder: anime.folder,
|
folder: anime.folder,
|
||||||
episodeDict: episodeDict,
|
episodeDict: episodeDict,
|
||||||
missing_episodes: totalMissing
|
missing_episodes: totalMissing,
|
||||||
|
has_missing: anime.has_missing || totalMissing > 0
|
||||||
};
|
};
|
||||||
});
|
});
|
||||||
} else if (data.status === 'success') {
|
} else if (data.status === 'success') {
|
||||||
@ -1008,33 +1028,39 @@ class AniWorldApp {
|
|||||||
|
|
||||||
async rescanSeries() {
|
async rescanSeries() {
|
||||||
try {
|
try {
|
||||||
this.showToast('Scanning directory...', 'info');
|
// Show the overlay immediately before making the API call
|
||||||
|
this.showScanProgressOverlay({
|
||||||
|
directory: 'Starting scan...',
|
||||||
|
total_items: 0
|
||||||
|
});
|
||||||
|
this.updateProcessStatus('rescan', true);
|
||||||
|
|
||||||
const response = await this.makeAuthenticatedRequest('/api/anime/rescan', {
|
const response = await this.makeAuthenticatedRequest('/api/anime/rescan', {
|
||||||
method: 'POST'
|
method: 'POST'
|
||||||
});
|
});
|
||||||
|
|
||||||
if (!response) return;
|
if (!response) {
|
||||||
|
this.removeScanProgressOverlay();
|
||||||
|
this.updateProcessStatus('rescan', false);
|
||||||
|
return;
|
||||||
|
}
|
||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
// Debug logging
|
// Debug logging
|
||||||
console.log('Rescan response:', data);
|
console.log('Rescan response:', data);
|
||||||
console.log('Success value:', data.success, 'Type:', typeof data.success);
|
console.log('Success value:', data.success, 'Type:', typeof data.success);
|
||||||
|
|
||||||
if (data.success === true) {
|
// Note: The scan progress will be updated via WebSocket events
|
||||||
const seriesCount = data.series_count || 0;
|
// The overlay will be closed when scan_completed is received
|
||||||
this.showToast(
|
if (data.success !== true) {
|
||||||
`Rescan complete! Found ${seriesCount} series with missing episodes.`,
|
this.removeScanProgressOverlay();
|
||||||
'success'
|
this.updateProcessStatus('rescan', false);
|
||||||
);
|
|
||||||
|
|
||||||
// Reload the series list to show the updated data
|
|
||||||
await this.loadSeries();
|
|
||||||
} else {
|
|
||||||
this.showToast(`Rescan error: ${data.message}`, 'error');
|
this.showToast(`Rescan error: ${data.message}`, 'error');
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
console.error('Rescan error:', error);
|
console.error('Rescan error:', error);
|
||||||
|
this.removeScanProgressOverlay();
|
||||||
|
this.updateProcessStatus('rescan', false);
|
||||||
this.showToast('Failed to start rescan', 'error');
|
this.showToast('Failed to start rescan', 'error');
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
@ -1072,6 +1098,314 @@ class AniWorldApp {
|
|||||||
document.getElementById('status-panel').classList.add('hidden');
|
document.getElementById('status-panel').classList.add('hidden');
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Show the scan progress overlay with spinner and initial state
|
||||||
|
* @param {Object} data - Scan started event data
|
||||||
|
*/
|
||||||
|
showScanProgressOverlay(data) {
|
||||||
|
// Remove existing overlay if present
|
||||||
|
this.removeScanProgressOverlay();
|
||||||
|
|
||||||
|
// Store total items for progress calculation
|
||||||
|
this.scanTotalItems = data?.total_items || 0;
|
||||||
|
|
||||||
|
// Store last scan data for reopening
|
||||||
|
this._lastScanData = data;
|
||||||
|
|
||||||
|
// Create overlay element
|
||||||
|
const overlay = document.createElement('div');
|
||||||
|
overlay.id = 'scan-progress-overlay';
|
||||||
|
overlay.className = 'scan-progress-overlay';
|
||||||
|
|
||||||
|
const totalDisplay = this.scanTotalItems > 0 ? this.scanTotalItems : '...';
|
||||||
|
|
||||||
|
overlay.innerHTML = `
|
||||||
|
<div class="scan-progress-container">
|
||||||
|
<div class="scan-progress-header">
|
||||||
|
<h3>
|
||||||
|
<span class="scan-progress-spinner"></span>
|
||||||
|
<i class="fas fa-check-circle scan-completed-icon"></i>
|
||||||
|
<span class="scan-title-text">Scanning Library</span>
|
||||||
|
</h3>
|
||||||
|
</div>
|
||||||
|
<div class="scan-progress-bar-container">
|
||||||
|
<div class="scan-progress-bar" id="scan-progress-bar" style="width: 0%"></div>
|
||||||
|
</div>
|
||||||
|
<div class="scan-progress-text" id="scan-progress-text">
|
||||||
|
<span id="scan-current-count">0</span> / <span id="scan-total-count">${totalDisplay}</span> directories
|
||||||
|
</div>
|
||||||
|
<div class="scan-progress-stats">
|
||||||
|
<div class="scan-stat">
|
||||||
|
<span class="scan-stat-value" id="scan-directories-count">0</span>
|
||||||
|
<span class="scan-stat-label">Scanned</span>
|
||||||
|
</div>
|
||||||
|
<div class="scan-stat">
|
||||||
|
<span class="scan-stat-value" id="scan-files-count">0</span>
|
||||||
|
<span class="scan-stat-label">Series Found</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
<div class="scan-current-directory" id="scan-current-directory">
|
||||||
|
<span class="scan-current-directory-label">Current:</span>
|
||||||
|
<span id="scan-current-path">${this.escapeHtml(data?.directory || 'Initializing...')}</span>
|
||||||
|
</div>
|
||||||
|
<div class="scan-elapsed-time hidden" id="scan-elapsed-time">
|
||||||
|
<i class="fas fa-clock"></i>
|
||||||
|
<span id="scan-elapsed-value">0.0s</span>
|
||||||
|
</div>
|
||||||
|
</div>
|
||||||
|
`;
|
||||||
|
|
||||||
|
document.body.appendChild(overlay);
|
||||||
|
|
||||||
|
// Add click-outside-to-close handler
|
||||||
|
overlay.addEventListener('click', (e) => {
|
||||||
|
// Only close if clicking the overlay background, not the container
|
||||||
|
if (e.target === overlay) {
|
||||||
|
this.removeScanProgressOverlay();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
// Trigger animation by adding visible class after a brief delay
|
||||||
|
requestAnimationFrame(() => {
|
||||||
|
overlay.classList.add('visible');
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Update the scan progress overlay with current progress
|
||||||
|
* @param {Object} data - Scan progress event data
|
||||||
|
*/
|
||||||
|
updateScanProgressOverlay(data) {
|
||||||
|
const overlay = document.getElementById('scan-progress-overlay');
|
||||||
|
if (!overlay) return;
|
||||||
|
|
||||||
|
// Update total items if provided (in case it wasn't available at start)
|
||||||
|
if (data.total_items && data.total_items > 0) {
|
||||||
|
this.scanTotalItems = data.total_items;
|
||||||
|
const totalCount = document.getElementById('scan-total-count');
|
||||||
|
if (totalCount) {
|
||||||
|
totalCount.textContent = this.scanTotalItems;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update progress bar
|
||||||
|
const progressBar = document.getElementById('scan-progress-bar');
|
||||||
|
if (progressBar && this.scanTotalItems > 0 && data.directories_scanned !== undefined) {
|
||||||
|
const percentage = Math.min(100, (data.directories_scanned / this.scanTotalItems) * 100);
|
||||||
|
progressBar.style.width = `${percentage}%`;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update current/total count display
|
||||||
|
const currentCount = document.getElementById('scan-current-count');
|
||||||
|
if (currentCount && data.directories_scanned !== undefined) {
|
||||||
|
currentCount.textContent = data.directories_scanned;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update directories count
|
||||||
|
const dirCount = document.getElementById('scan-directories-count');
|
||||||
|
if (dirCount && data.directories_scanned !== undefined) {
|
||||||
|
dirCount.textContent = data.directories_scanned;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update files/series count
|
||||||
|
const filesCount = document.getElementById('scan-files-count');
|
||||||
|
if (filesCount && data.files_found !== undefined) {
|
||||||
|
filesCount.textContent = data.files_found;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update current directory (truncate if too long)
|
||||||
|
const currentPath = document.getElementById('scan-current-path');
|
||||||
|
if (currentPath && data.current_directory) {
|
||||||
|
const maxLength = 50;
|
||||||
|
let displayPath = data.current_directory;
|
||||||
|
if (displayPath.length > maxLength) {
|
||||||
|
displayPath = '...' + displayPath.slice(-maxLength + 3);
|
||||||
|
}
|
||||||
|
currentPath.textContent = displayPath;
|
||||||
|
currentPath.title = data.current_directory; // Full path on hover
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Hide the scan progress overlay with completion summary
|
||||||
|
* @param {Object} data - Scan completed event data
|
||||||
|
*/
|
||||||
|
hideScanProgressOverlay(data) {
|
||||||
|
const overlay = document.getElementById('scan-progress-overlay');
|
||||||
|
if (!overlay) return;
|
||||||
|
|
||||||
|
const container = overlay.querySelector('.scan-progress-container');
|
||||||
|
if (container) {
|
||||||
|
container.classList.add('completed');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update title
|
||||||
|
const titleText = overlay.querySelector('.scan-title-text');
|
||||||
|
if (titleText) {
|
||||||
|
titleText.textContent = 'Scan Complete';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Complete the progress bar
|
||||||
|
const progressBar = document.getElementById('scan-progress-bar');
|
||||||
|
if (progressBar) {
|
||||||
|
progressBar.style.width = '100%';
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update final stats
|
||||||
|
if (data) {
|
||||||
|
const dirCount = document.getElementById('scan-directories-count');
|
||||||
|
if (dirCount && data.total_directories !== undefined) {
|
||||||
|
dirCount.textContent = data.total_directories;
|
||||||
|
}
|
||||||
|
|
||||||
|
const filesCount = document.getElementById('scan-files-count');
|
||||||
|
if (filesCount && data.total_files !== undefined) {
|
||||||
|
filesCount.textContent = data.total_files;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update progress text to show final count
|
||||||
|
const currentCount = document.getElementById('scan-current-count');
|
||||||
|
const totalCount = document.getElementById('scan-total-count');
|
||||||
|
if (currentCount && data.total_directories !== undefined) {
|
||||||
|
currentCount.textContent = data.total_directories;
|
||||||
|
}
|
||||||
|
if (totalCount && data.total_directories !== undefined) {
|
||||||
|
totalCount.textContent = data.total_directories;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Show elapsed time
|
||||||
|
const elapsedTimeEl = document.getElementById('scan-elapsed-time');
|
||||||
|
const elapsedValueEl = document.getElementById('scan-elapsed-value');
|
||||||
|
if (elapsedTimeEl && elapsedValueEl && data.elapsed_seconds !== undefined) {
|
||||||
|
elapsedValueEl.textContent = `${data.elapsed_seconds.toFixed(1)}s`;
|
||||||
|
elapsedTimeEl.classList.remove('hidden');
|
||||||
|
}
|
||||||
|
|
||||||
|
// Update current directory to show completion message
|
||||||
|
const currentPath = document.getElementById('scan-current-path');
|
||||||
|
if (currentPath) {
|
||||||
|
currentPath.textContent = 'Scan finished successfully';
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Auto-dismiss after 3 seconds
|
||||||
|
setTimeout(() => {
|
||||||
|
this.removeScanProgressOverlay();
|
||||||
|
}, 3000);
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Remove the scan progress overlay from the DOM
|
||||||
|
*/
|
||||||
|
removeScanProgressOverlay() {
|
||||||
|
const overlay = document.getElementById('scan-progress-overlay');
|
||||||
|
if (overlay) {
|
||||||
|
overlay.classList.remove('visible');
|
||||||
|
// Wait for fade out animation before removing
|
||||||
|
setTimeout(() => {
|
||||||
|
if (overlay.parentElement) {
|
||||||
|
overlay.remove();
|
||||||
|
}
|
||||||
|
}, 300);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Reopen the scan progress overlay if a scan is in progress
|
||||||
|
* Called when user clicks on the rescan status indicator
|
||||||
|
*/
|
||||||
|
async reopenScanOverlay() {
|
||||||
|
// Check if overlay already exists
|
||||||
|
const existingOverlay = document.getElementById('scan-progress-overlay');
|
||||||
|
if (existingOverlay) {
|
||||||
|
// Overlay is already open, do nothing
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// Check if scan is running via API
|
||||||
|
try {
|
||||||
|
const response = await this.makeAuthenticatedRequest('/api/anime/scan/status');
|
||||||
|
if (!response || !response.ok) {
|
||||||
|
console.log('Could not fetch scan status');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
console.log('Scan status for reopen:', data);
|
||||||
|
|
||||||
|
if (data.is_scanning) {
|
||||||
|
// A scan is in progress, show the overlay
|
||||||
|
this.showScanProgressOverlay({
|
||||||
|
directory: data.directory,
|
||||||
|
total_items: data.total_items
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update with current progress
|
||||||
|
this.updateScanProgressOverlay({
|
||||||
|
directories_scanned: data.directories_scanned,
|
||||||
|
files_found: data.directories_scanned,
|
||||||
|
current_directory: data.current_directory,
|
||||||
|
total_items: data.total_items
|
||||||
|
});
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error checking scan status for reopen:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Check if a scan is currently in progress (useful after page reload)
|
||||||
|
* and show the progress overlay if so
|
||||||
|
*/
|
||||||
|
async checkActiveScanStatus() {
|
||||||
|
try {
|
||||||
|
const response = await this.makeAuthenticatedRequest('/api/anime/scan/status');
|
||||||
|
if (!response || !response.ok) {
|
||||||
|
console.log('Could not fetch scan status, response:', response?.status);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
console.log('Scan status check result:', data);
|
||||||
|
|
||||||
|
if (data.is_scanning) {
|
||||||
|
console.log('Scan is active, updating UI indicators');
|
||||||
|
|
||||||
|
// Update the process status indicator FIRST before showing overlay
|
||||||
|
// This ensures the header icon shows the running state immediately
|
||||||
|
this.updateProcessStatus('rescan', true);
|
||||||
|
|
||||||
|
// A scan is in progress, show the overlay
|
||||||
|
this.showScanProgressOverlay({
|
||||||
|
directory: data.directory,
|
||||||
|
total_items: data.total_items
|
||||||
|
});
|
||||||
|
|
||||||
|
// Update with current progress
|
||||||
|
this.updateScanProgressOverlay({
|
||||||
|
directories_scanned: data.directories_scanned,
|
||||||
|
files_found: data.directories_scanned,
|
||||||
|
current_directory: data.current_directory,
|
||||||
|
total_items: data.total_items
|
||||||
|
});
|
||||||
|
|
||||||
|
// Double-check the status indicator was updated
|
||||||
|
const statusElement = document.getElementById('rescan-status');
|
||||||
|
if (statusElement) {
|
||||||
|
console.log('Rescan status element classes:', statusElement.className);
|
||||||
|
} else {
|
||||||
|
console.warn('Rescan status element not found in DOM');
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
console.log('No active scan detected');
|
||||||
|
// Ensure indicator shows idle state
|
||||||
|
this.updateProcessStatus('rescan', false);
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error checking scan status:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
showLoading() {
|
showLoading() {
|
||||||
document.getElementById('loading-overlay').classList.remove('hidden');
|
document.getElementById('loading-overlay').classList.remove('hidden');
|
||||||
}
|
}
|
||||||
@ -1146,10 +1480,16 @@ class AniWorldApp {
|
|||||||
|
|
||||||
updateProcessStatus(processName, isRunning, hasError = false) {
|
updateProcessStatus(processName, isRunning, hasError = false) {
|
||||||
const statusElement = document.getElementById(`${processName}-status`);
|
const statusElement = document.getElementById(`${processName}-status`);
|
||||||
if (!statusElement) return;
|
if (!statusElement) {
|
||||||
|
console.warn(`Process status element not found: ${processName}-status`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
const statusDot = statusElement.querySelector('.status-dot');
|
const statusDot = statusElement.querySelector('.status-dot');
|
||||||
if (!statusDot) return;
|
if (!statusDot) {
|
||||||
|
console.warn(`Status dot not found in ${processName}-status element`);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// Remove all status classes from both dot and element
|
// Remove all status classes from both dot and element
|
||||||
statusDot.classList.remove('idle', 'running', 'error');
|
statusDot.classList.remove('idle', 'running', 'error');
|
||||||
@ -1171,6 +1511,8 @@ class AniWorldApp {
|
|||||||
statusElement.classList.add('idle');
|
statusElement.classList.add('idle');
|
||||||
statusElement.title = `${displayName} is idle`;
|
statusElement.title = `${displayName} is idle`;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
console.log(`Process status updated: ${processName} = ${isRunning ? 'running' : (hasError ? 'error' : 'idle')}`);
|
||||||
}
|
}
|
||||||
|
|
||||||
async showConfigModal() {
|
async showConfigModal() {
|
||||||
|
|||||||
74
src/server/web/static/js/index/advanced-config.js
Normal file
74
src/server/web/static/js/index/advanced-config.js
Normal file
@ -0,0 +1,74 @@
|
|||||||
|
/**
|
||||||
|
* AniWorld - Advanced Config Module
|
||||||
|
*
|
||||||
|
* Handles advanced configuration settings like concurrent downloads,
|
||||||
|
* timeouts, and debug mode.
|
||||||
|
*
|
||||||
|
* Dependencies: constants.js, api-client.js, ui-utils.js
|
||||||
|
*/
|
||||||
|
|
||||||
|
var AniWorld = window.AniWorld || {};
|
||||||
|
|
||||||
|
AniWorld.AdvancedConfig = (function() {
|
||||||
|
'use strict';
|
||||||
|
|
||||||
|
const API = AniWorld.Constants.API;
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Load advanced configuration
|
||||||
|
*/
|
||||||
|
async function load() {
|
||||||
|
try {
|
||||||
|
const response = await AniWorld.ApiClient.get(API.CONFIG_SECTION + '/advanced');
|
||||||
|
if (!response) return;
|
||||||
|
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (data.success) {
|
||||||
|
const config = data.config;
|
||||||
|
document.getElementById('max-concurrent-downloads').value = config.max_concurrent_downloads || 3;
|
||||||
|
document.getElementById('provider-timeout').value = config.provider_timeout || 30;
|
||||||
|
document.getElementById('enable-debug-mode').checked = config.enable_debug_mode === true;
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error loading advanced config:', error);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
/**
|
||||||
|
* Save advanced configuration
|
||||||
|
*/
|
||||||
|
async function save() {
|
||||||
|
try {
|
||||||
|
const config = {
|
||||||
|
max_concurrent_downloads: parseInt(document.getElementById('max-concurrent-downloads').value),
|
||||||
|
provider_timeout: parseInt(document.getElementById('provider-timeout').value),
|
||||||
|
enable_debug_mode: document.getElementById('enable-debug-mode').checked
|
||||||
|
};
|
||||||
|
|
||||||
|
const response = await AniWorld.ApiClient.request(API.CONFIG_SECTION + '/advanced', {
|
||||||
|
method: 'POST',
|
||||||
|
headers: { 'Content-Type': 'application/json' },
|
||||||
|
body: JSON.stringify(config)
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!response) return;
|
||||||
|
const data = await response.json();
|
||||||
|
|
||||||
|
if (data.success) {
|
||||||
|
AniWorld.UI.showToast('Advanced configuration saved successfully', 'success');
|
||||||
|
} else {
|
||||||
|
AniWorld.UI.showToast('Failed to save config: ' + data.error, 'error');
|
||||||
|
}
|
||||||
|
} catch (error) {
|
||||||
|
console.error('Error saving advanced config:', error);
|
||||||
|
AniWorld.UI.showToast('Failed to save advanced configuration', 'error');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
return {
|
||||||
|
load: load,
|
||||||
|
save: save
|
||||||
|
};
|
||||||
|
})();
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user