Compare commits
33 Commits
95b7059576
...
2441730862
| Author | SHA1 | Date | |
|---|---|---|---|
| 2441730862 | |||
| 5c4bd3d7e8 | |||
| 5c88572ac7 | |||
| a80bfba873 | |||
| 64e78bb9b8 | |||
| ec987eff80 | |||
| e414a1a358 | |||
| 8a49db2a10 | |||
| 2de3317aee | |||
| ca4bf72fde | |||
| d5f7b1598f | |||
| 57c30a0156 | |||
| 9fce617949 | |||
| 0b5faeffc9 | |||
| 18faf3fe91 | |||
| 4dba4db344 | |||
| b76ffbf656 | |||
| f0b9d50f85 | |||
| 6cdb2eb1e1 | |||
| 33aeac0141 | |||
| eaf6bb9957 | |||
| 3c6d82907d | |||
| 3be175522f | |||
| 6ebc2ed2ea | |||
| fadd4973da | |||
| 727486795c | |||
| dbb5701660 | |||
| 55781a8448 | |||
| fd76be02fd | |||
| 4649cf562d | |||
| 627f8b0cc4 | |||
| adfbdf56d0 | |||
| 02764f7e6f |
14
.vscode/launch.json
vendored
14
.vscode/launch.json
vendored
@ -8,7 +8,7 @@
|
||||
"program": "${workspaceFolder}/src/server/fastapi_app.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||
@ -30,7 +30,7 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"args": [
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
@ -61,7 +61,7 @@
|
||||
"program": "${workspaceFolder}/src/cli/Main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
@ -79,7 +79,7 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests",
|
||||
"-v",
|
||||
@ -105,7 +105,7 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests/unit",
|
||||
"-v",
|
||||
@ -126,7 +126,7 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests/integration",
|
||||
"-v",
|
||||
@ -150,7 +150,7 @@
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||
"args": [
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
|
||||
215
SERVER_COMMANDS.md
Normal file
215
SERVER_COMMANDS.md
Normal file
@ -0,0 +1,215 @@
|
||||
# Server Management Commands
|
||||
|
||||
Quick reference for starting, stopping, and managing the Aniworld server.
|
||||
|
||||
## Start Server
|
||||
|
||||
### Using the start script (Recommended)
|
||||
|
||||
```bash
|
||||
./start_server.sh
|
||||
```
|
||||
|
||||
### Using conda directly
|
||||
|
||||
```bash
|
||||
conda run -n AniWorld python run_server.py
|
||||
```
|
||||
|
||||
### Using uvicorn directly
|
||||
|
||||
```bash
|
||||
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
||||
```
|
||||
|
||||
## Stop Server
|
||||
|
||||
### Using the stop script (Recommended)
|
||||
|
||||
```bash
|
||||
./stop_server.sh
|
||||
```
|
||||
|
||||
### Manual commands
|
||||
|
||||
**Kill uvicorn processes:**
|
||||
|
||||
```bash
|
||||
pkill -f "uvicorn.*fastapi_app:app"
|
||||
```
|
||||
|
||||
**Kill process on port 8000:**
|
||||
|
||||
```bash
|
||||
lsof -ti:8000 | xargs kill -9
|
||||
```
|
||||
|
||||
**Kill run_server.py processes:**
|
||||
|
||||
```bash
|
||||
pkill -f "run_server.py"
|
||||
```
|
||||
|
||||
## Check Server Status
|
||||
|
||||
**Check if port 8000 is in use:**
|
||||
|
||||
```bash
|
||||
lsof -i:8000
|
||||
```
|
||||
|
||||
**Check for running uvicorn processes:**
|
||||
|
||||
```bash
|
||||
ps aux | grep uvicorn
|
||||
```
|
||||
|
||||
**Check server is responding:**
|
||||
|
||||
```bash
|
||||
curl http://127.0.0.1:8000/api/health
|
||||
```
|
||||
|
||||
## Restart Server
|
||||
|
||||
```bash
|
||||
./stop_server.sh && ./start_server.sh
|
||||
```
|
||||
|
||||
## Common Issues
|
||||
|
||||
### "Address already in use" Error
|
||||
|
||||
**Problem:** Port 8000 is already occupied
|
||||
|
||||
**Solution:**
|
||||
|
||||
```bash
|
||||
./stop_server.sh
|
||||
# or
|
||||
lsof -ti:8000 | xargs kill -9
|
||||
```
|
||||
|
||||
### Server not responding
|
||||
|
||||
**Check logs:**
|
||||
|
||||
```bash
|
||||
tail -f logs/app.log
|
||||
```
|
||||
|
||||
**Check if process is running:**
|
||||
|
||||
```bash
|
||||
ps aux | grep uvicorn
|
||||
```
|
||||
|
||||
### Cannot connect to server
|
||||
|
||||
**Verify server is running:**
|
||||
|
||||
```bash
|
||||
curl http://127.0.0.1:8000/api/health
|
||||
```
|
||||
|
||||
**Check firewall:**
|
||||
|
||||
```bash
|
||||
sudo ufw status
|
||||
```
|
||||
|
||||
## Development Mode
|
||||
|
||||
**Run with auto-reload:**
|
||||
|
||||
```bash
|
||||
./start_server.sh # Already includes --reload
|
||||
```
|
||||
|
||||
**Run with custom port:**
|
||||
|
||||
```bash
|
||||
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8080 --reload
|
||||
```
|
||||
|
||||
**Run with debug logging:**
|
||||
|
||||
```bash
|
||||
export LOG_LEVEL=DEBUG
|
||||
./start_server.sh
|
||||
```
|
||||
|
||||
## Production Mode
|
||||
|
||||
**Run without auto-reload:**
|
||||
|
||||
```bash
|
||||
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 0.0.0.0 --port 8000 --workers 4
|
||||
```
|
||||
|
||||
**Run with systemd (Linux):**
|
||||
|
||||
```bash
|
||||
sudo systemctl start aniworld
|
||||
sudo systemctl stop aniworld
|
||||
sudo systemctl restart aniworld
|
||||
sudo systemctl status aniworld
|
||||
```
|
||||
|
||||
## URLs
|
||||
|
||||
- **Web Interface:** http://127.0.0.1:8000
|
||||
- **API Documentation:** http://127.0.0.1:8000/api/docs
|
||||
- **Login Page:** http://127.0.0.1:8000/login
|
||||
- **Queue Management:** http://127.0.0.1:8000/queue
|
||||
- **Health Check:** http://127.0.0.1:8000/api/health
|
||||
|
||||
## Default Credentials
|
||||
|
||||
- **Password:** `Hallo123!`
|
||||
|
||||
## Log Files
|
||||
|
||||
- **Application logs:** `logs/app.log`
|
||||
- **Download logs:** `logs/downloads/`
|
||||
- **Error logs:** Check console output or systemd journal
|
||||
|
||||
## Quick Troubleshooting
|
||||
|
||||
| Symptom | Solution |
|
||||
| ------------------------ | ------------------------------------ |
|
||||
| Port already in use | `./stop_server.sh` |
|
||||
| Server won't start | Check `logs/app.log` |
|
||||
| 404 errors | Verify URL and check routing |
|
||||
| WebSocket not connecting | Check server is running and firewall |
|
||||
| Slow responses | Check system resources (`htop`) |
|
||||
| Database errors | Check `data/` directory permissions |
|
||||
|
||||
## Environment Variables
|
||||
|
||||
```bash
|
||||
# Set log level
|
||||
export LOG_LEVEL=DEBUG|INFO|WARNING|ERROR
|
||||
|
||||
# Set server port
|
||||
export PORT=8000
|
||||
|
||||
# Set host
|
||||
export HOST=127.0.0.1
|
||||
|
||||
# Set workers (production)
|
||||
export WORKERS=4
|
||||
```
|
||||
|
||||
## Related Scripts
|
||||
|
||||
- `start_server.sh` - Start the server
|
||||
- `stop_server.sh` - Stop the server
|
||||
- `run_server.py` - Python server runner
|
||||
- `scripts/setup.py` - Initial setup
|
||||
|
||||
## More Information
|
||||
|
||||
- [User Guide](docs/user_guide.md)
|
||||
- [API Reference](docs/api_reference.md)
|
||||
- [Deployment Guide](docs/deployment.md)
|
||||
@ -1,16 +0,0 @@
|
||||
{
|
||||
"created_at": "2025-10-27T20:15:18.690820",
|
||||
"last_updated": "2025-10-27T20:15:18.690826",
|
||||
"download_stats": {
|
||||
"total_downloads": 0,
|
||||
"successful_downloads": 0,
|
||||
"failed_downloads": 0,
|
||||
"total_bytes_downloaded": 0,
|
||||
"average_speed_mbps": 0.0,
|
||||
"success_rate": 0.0,
|
||||
"average_duration_seconds": 0.0
|
||||
},
|
||||
"series_popularity": [],
|
||||
"storage_history": [],
|
||||
"performance_samples": []
|
||||
}
|
||||
@ -17,7 +17,7 @@
|
||||
"keep_days": 30
|
||||
},
|
||||
"other": {
|
||||
"master_password_hash": "$pbkdf2-sha256$29000$hjDm/H8vRehdCyEkRGitVQ$JJC2Bxw8XeNA0NoG/e4rhw6PjZaN588mJ2SDY3ZPFNY",
|
||||
"master_password_hash": "$pbkdf2-sha256$29000$8v4/p1RKyRnDWEspJSTEeA$u8rsOktLvjCgB2XeHrQvcSGj2vq.Gea0rQQt/e6Ygm0",
|
||||
"anime_directory": "/home/lukas/Volume/serien/"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
|
||||
@ -1,24 +0,0 @@
|
||||
{
|
||||
"name": "Aniworld",
|
||||
"data_dir": "data",
|
||||
"scheduler": {
|
||||
"enabled": true,
|
||||
"interval_minutes": 60
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"file": null,
|
||||
"max_bytes": null,
|
||||
"backup_count": 3
|
||||
},
|
||||
"backup": {
|
||||
"enabled": false,
|
||||
"path": "data/backups",
|
||||
"keep_days": 30
|
||||
},
|
||||
"other": {
|
||||
"master_password_hash": "$pbkdf2-sha256$29000$qRWiNCaEEIKQkhKiFOLcWw$P1QqwKEJHzPszsU/nHmIzdxwbTMIV2iC4tbWUuhqZlo",
|
||||
"anime_directory": "/home/lukas/Volume/serien/"
|
||||
},
|
||||
"version": "1.0.0"
|
||||
}
|
||||
@ -1,150 +1,18 @@
|
||||
{
|
||||
"pending": [
|
||||
{
|
||||
"id": "47335663-456f-44b6-a176-aa2c2ab74451",
|
||||
"serie_id": "workflow-series",
|
||||
"serie_name": "Workflow Test Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "high",
|
||||
"added_at": "2025-10-27T19:15:24.278322Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "665e833d-b4b8-4fb2-810f-5a02ed1b3161",
|
||||
"serie_id": "series-2",
|
||||
"serie_name": "Series 2",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.825647Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "6d2d59b4-c4a7-4056-a386-d49f709f56ec",
|
||||
"serie_id": "series-1",
|
||||
"serie_name": "Series 1",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.822544Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "eb43e2ce-b782-473f-aa5e-b29e07531034",
|
||||
"serie_id": "series-0",
|
||||
"serie_name": "Series 0",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.817448Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "f942fc20-2eb3-44fc-b2e1-5634d3749856",
|
||||
"serie_id": "series-high",
|
||||
"serie_name": "Series High",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "high",
|
||||
"added_at": "2025-10-27T19:15:23.494450Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "d91b4625-af9f-4f84-a223-a3a68a743a6f",
|
||||
"serie_id": "test-series-2",
|
||||
"serie_name": "Another Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "high",
|
||||
"added_at": "2025-10-27T19:15:23.458331Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "020aa6c4-b969-4290-a9f3-3951a0ebf218",
|
||||
"serie_id": "test-series-1",
|
||||
"serie_name": "Test Anime Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": "Episode 1"
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.424005Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "67a98da0-544d-46c6-865c-0eea068ee47d",
|
||||
"serie_id": "test-series-1",
|
||||
"serie_name": "Test Anime Series",
|
||||
"id": "b8b02c5c-257c-400a-a8b1-2d2559acdaad",
|
||||
"serie_id": "beheneko-the-elf-girls-cat-is-secretly-an-s-ranked-monster",
|
||||
"serie_folder": "beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)",
|
||||
"serie_name": "beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 2,
|
||||
"title": "Episode 2"
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.424103Z",
|
||||
"priority": "NORMAL",
|
||||
"added_at": "2025-11-02T14:41:55.086784Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
@ -153,17 +21,18 @@
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "bb811506-a40f-45e0-a517-9d12afa33759",
|
||||
"serie_id": "series-normal",
|
||||
"serie_name": "Series Normal",
|
||||
"id": "e2dfbb04-b538-4635-92c3-1a967f7eef34",
|
||||
"serie_id": "beheneko-the-elf-girls-cat-is-secretly-an-s-ranked-monster",
|
||||
"serie_folder": "beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)",
|
||||
"serie_name": "beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"episode": 3,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.496680Z",
|
||||
"priority": "NORMAL",
|
||||
"added_at": "2025-11-02T14:41:55.086820Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
@ -172,245 +41,18 @@
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "2f8e6e85-7a1c-4d9b-aeaf-f4c9da6de8da",
|
||||
"serie_id": "series-low",
|
||||
"serie_name": "Series Low",
|
||||
"id": "8740a24e-7d49-4512-9e5f-328f5f4f61b1",
|
||||
"serie_id": "beheneko-the-elf-girls-cat-is-secretly-an-s-ranked-monster",
|
||||
"serie_folder": "beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)",
|
||||
"serie_name": "beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"episode": 4,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "low",
|
||||
"added_at": "2025-10-27T19:15:23.498731Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "885b8873-8a97-439d-b2f3-93d50828baad",
|
||||
"serie_id": "test-series",
|
||||
"serie_name": "Test Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.746489Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "15711557-66d2-4b7c-90f5-17600dfb0e40",
|
||||
"serie_id": "test-series",
|
||||
"serie_name": "Test Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.860548Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "e3b0ade0-b4bb-414e-a65d-9593dd3b27b9",
|
||||
"serie_id": "invalid-series",
|
||||
"serie_name": "Invalid Series",
|
||||
"episode": {
|
||||
"season": 99,
|
||||
"episode": 99,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.938644Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "41f5ce9e-f20c-4ad6-b074-ff06787463d5",
|
||||
"serie_id": "test-series",
|
||||
"serie_name": "Test Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:23.973361Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "3c84fcc6-3aa4-4531-bcc8-296c7eb36430",
|
||||
"serie_id": "series-4",
|
||||
"serie_name": "Series 4",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.075622Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "650324c2-7028-46fb-bceb-9ed756f514c8",
|
||||
"serie_id": "series-3",
|
||||
"serie_name": "Series 3",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.076679Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "8782d952-25c3-4907-85eb-205c216f0b35",
|
||||
"serie_id": "series-2",
|
||||
"serie_name": "Series 2",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.077499Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "ba2e0be5-3d11-47df-892b-7df465824419",
|
||||
"serie_id": "series-1",
|
||||
"serie_name": "Series 1",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.078333Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "7a64b375-aaad-494d-bcd1-1f2ae5c421f4",
|
||||
"serie_id": "series-0",
|
||||
"serie_name": "Series 0",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.079175Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "c532886f-6dc2-45fa-92dd-3d46ef62a692",
|
||||
"serie_id": "persistent-series",
|
||||
"serie_name": "Persistent Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.173243Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "0e6d4e1e-7714-4fb1-9ad1-3458c9c6d4e6",
|
||||
"serie_id": "ws-series",
|
||||
"serie_name": "WebSocket Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.241585Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
"error": null,
|
||||
"retry_count": 0,
|
||||
"source_url": null
|
||||
},
|
||||
{
|
||||
"id": "f10196c8-f093-4a15-a498-72c3bfe6f735",
|
||||
"serie_id": "pause-test",
|
||||
"serie_name": "Pause Test Series",
|
||||
"episode": {
|
||||
"season": 1,
|
||||
"episode": 1,
|
||||
"title": null
|
||||
},
|
||||
"status": "pending",
|
||||
"priority": "normal",
|
||||
"added_at": "2025-10-27T19:15:24.426637Z",
|
||||
"priority": "NORMAL",
|
||||
"added_at": "2025-11-02T14:41:55.086860Z",
|
||||
"started_at": null,
|
||||
"completed_at": null,
|
||||
"progress": null,
|
||||
@ -421,5 +63,5 @@
|
||||
],
|
||||
"active": [],
|
||||
"failed": [],
|
||||
"timestamp": "2025-10-27T19:15:24.426898+00:00"
|
||||
"timestamp": "2025-11-02T14:42:15.345939+00:00"
|
||||
}
|
||||
@ -35,22 +35,15 @@ Added the following endpoints to `/src/server/api/anime.py`:
|
||||
- Calls `SeriesApp.Download()` with folder list
|
||||
- Used when user selects multiple series and clicks download
|
||||
|
||||
#### `/api/v1/anime/process/locks` (GET)
|
||||
|
||||
- Returns current lock status for rescan and download processes
|
||||
- Response: `{success: boolean, locks: {rescan: {is_locked: boolean}, download: {is_locked: boolean}}}`
|
||||
- Used to update UI status indicators and disable buttons during operations
|
||||
|
||||
### 2. Updated Frontend API Calls
|
||||
|
||||
Modified `/src/server/web/static/js/app.js` to use correct endpoint paths:
|
||||
|
||||
| Old Path | New Path | Purpose |
|
||||
| --------------------------- | ----------------------------- | ------------------------- |
|
||||
| ----------------- | ------------------------ | ------------------------- |
|
||||
| `/api/add_series` | `/api/v1/anime/add` | Add new series |
|
||||
| `/api/download` | `/api/v1/anime/download` | Download selected folders |
|
||||
| `/api/status` | `/api/v1/anime/status` | Get library status |
|
||||
| `/api/process/locks/status` | `/api/v1/anime/process/locks` | Check process locks |
|
||||
|
||||
### 3. Verified Existing Endpoints
|
||||
|
||||
|
||||
@ -1,169 +0,0 @@
|
||||
# Logging Implementation Summary
|
||||
|
||||
## What Was Implemented
|
||||
|
||||
### 1. Core Logging Infrastructure (`src/infrastructure/logging/`)
|
||||
|
||||
- **`logger.py`**: Main logging configuration module
|
||||
|
||||
- `setup_logging()`: Configures both console and file handlers
|
||||
- `get_logger()`: Retrieves logger instances for specific modules
|
||||
- Follows Python logging best practices with proper formatters
|
||||
|
||||
- **`uvicorn_config.py`**: Uvicorn-specific logging configuration
|
||||
|
||||
- Custom logging configuration dictionary for uvicorn
|
||||
- Ensures uvicorn logs are captured in both console and file
|
||||
- Configures multiple loggers (uvicorn, uvicorn.error, uvicorn.access, aniworld)
|
||||
|
||||
- **`__init__.py`**: Package initialization
|
||||
- Exports public API: `setup_logging`, `get_logger`, `get_uvicorn_log_config`
|
||||
|
||||
### 2. FastAPI Integration
|
||||
|
||||
Updated `src/server/fastapi_app.py` to:
|
||||
|
||||
- Import and use the logging infrastructure
|
||||
- Call `setup_logging()` during application startup (in `lifespan()`)
|
||||
- Replace all `print()` statements with proper logger calls
|
||||
- Use lazy formatting (`logger.info("Message: %s", value)`)
|
||||
|
||||
### 3. Startup Scripts
|
||||
|
||||
- **`run_server.py`**: Python startup script
|
||||
|
||||
- Uses the custom uvicorn logging configuration
|
||||
- Recommended way to start the server
|
||||
|
||||
- **`start_server.sh`**: Bash startup script
|
||||
- Wrapper around `run_server.py`
|
||||
- Made executable with proper shebang
|
||||
|
||||
### 4. Documentation
|
||||
|
||||
- **`docs/logging.md`**: Comprehensive logging guide
|
||||
- How to run the server
|
||||
- Log file locations
|
||||
- Log format examples
|
||||
- Troubleshooting guide
|
||||
- Programmatic usage examples
|
||||
|
||||
## Log Outputs
|
||||
|
||||
### Console Output
|
||||
|
||||
```
|
||||
INFO: Starting FastAPI application...
|
||||
INFO: Loaded anime_directory from config: /home/lukas/Volume/serien/
|
||||
INFO: Server running on http://127.0.0.1:8000
|
||||
INFO: API documentation available at http://127.0.0.1:8000/api/docs
|
||||
```
|
||||
|
||||
### File Output (`logs/fastapi_app.log`)
|
||||
|
||||
```
|
||||
2025-10-25 17:31:19 - aniworld - INFO - ============================================================
|
||||
2025-10-25 17:31:19 - aniworld - INFO - Logging configured successfully
|
||||
2025-10-25 17:31:19 - aniworld - INFO - Log level: INFO
|
||||
2025-10-25 17:31:19 - aniworld - INFO - Log file: /home/lukas/Volume/repo/Aniworld/logs/fastapi_app.log
|
||||
2025-10-25 17:31:19 - aniworld - INFO - ============================================================
|
||||
2025-10-25 17:31:19 - aniworld - INFO - Starting FastAPI application...
|
||||
2025-10-25 17:31:19 - aniworld - INFO - Loaded anime_directory from config: /home/lukas/Volume/serien/
|
||||
2025-10-25 17:31:19 - src.core.SeriesApp - INFO - Initializing SeriesApp...
|
||||
2025-10-25 17:31:19 - src.core.SerieScanner - INFO - Initialized SerieScanner...
|
||||
2025-10-25 17:31:19 - aniworld - INFO - SeriesApp initialized with directory: /home/lukas/Volume/serien/
|
||||
2025-10-25 17:31:19 - aniworld - INFO - FastAPI application started successfully
|
||||
2025-10-25 17:31:19 - aniworld - INFO - Server running on http://127.0.0.1:8000
|
||||
2025-10-25 17:31:19 - aniworld - INFO - API documentation available at http://127.0.0.1:8000/api/docs
|
||||
```
|
||||
|
||||
## How to Use
|
||||
|
||||
### Starting the Server
|
||||
|
||||
**Recommended:**
|
||||
|
||||
```bash
|
||||
conda run -n AniWorld python run_server.py
|
||||
```
|
||||
|
||||
**Alternative:**
|
||||
|
||||
```bash
|
||||
./start_server.sh
|
||||
```
|
||||
|
||||
**View logs in real-time:**
|
||||
|
||||
```bash
|
||||
tail -f logs/fastapi_app.log
|
||||
```
|
||||
|
||||
### In Code
|
||||
|
||||
```python
|
||||
from src.infrastructure.logging import get_logger
|
||||
|
||||
logger = get_logger(__name__)
|
||||
|
||||
logger.info("Message: %s", value)
|
||||
logger.warning("Warning: %s", warning_msg)
|
||||
logger.error("Error occurred", exc_info=True)
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Set log level via environment variable or `.env` file:
|
||||
|
||||
```bash
|
||||
export LOG_LEVEL=INFO # or DEBUG, WARNING, ERROR
|
||||
```
|
||||
|
||||
## Features
|
||||
|
||||
✅ **Console logging**: Colored, easy-to-read format
|
||||
✅ **File logging**: Detailed with timestamps and logger names
|
||||
✅ **Automatic log directory creation**: `logs/` created if missing
|
||||
✅ **Uvicorn integration**: All uvicorn logs captured
|
||||
✅ **Multiple loggers**: Different loggers for different modules
|
||||
✅ **Configurable log level**: Via environment variable
|
||||
✅ **Proper formatting**: Uses lazy formatting for performance
|
||||
✅ **Startup/shutdown logging**: Clear application lifecycle logs
|
||||
✅ **Error tracebacks**: Full exception context with `exc_info=True`
|
||||
|
||||
## Files Created/Modified
|
||||
|
||||
### Created:
|
||||
|
||||
- `src/infrastructure/logging/logger.py`
|
||||
- `src/infrastructure/logging/uvicorn_config.py`
|
||||
- `src/infrastructure/logging/__init__.py`
|
||||
- `run_server.py`
|
||||
- `start_server.sh`
|
||||
- `docs/logging.md`
|
||||
- `docs/logging_implementation_summary.md` (this file)
|
||||
|
||||
### Modified:
|
||||
|
||||
- `src/server/fastapi_app.py`: Integrated logging throughout
|
||||
|
||||
## Testing
|
||||
|
||||
The implementation has been tested and verified:
|
||||
|
||||
- ✅ Log file created at `logs/fastapi_app.log`
|
||||
- ✅ Startup messages logged correctly
|
||||
- ✅ Application configuration loaded and logged
|
||||
- ✅ Uvicorn logs captured
|
||||
- ✅ File watching events logged
|
||||
- ✅ Shutdown messages logged
|
||||
|
||||
## Next Steps
|
||||
|
||||
Consider adding:
|
||||
|
||||
1. **Log rotation**: Use `RotatingFileHandler` to prevent log files from growing too large
|
||||
2. **Structured logging**: Use `structlog` for JSON-formatted logs
|
||||
3. **Log aggregation**: Send logs to a centralized logging service
|
||||
4. **Performance monitoring**: Add timing logs for slow operations
|
||||
5. **Request logging middleware**: Log all HTTP requests/responses
|
||||
450
docs/progress_service_architecture.md
Normal file
450
docs/progress_service_architecture.md
Normal file
@ -0,0 +1,450 @@
|
||||
# Progress Service Architecture
|
||||
|
||||
## Overview
|
||||
|
||||
The ProgressService serves as the **single source of truth** for all real-time progress tracking in the Aniworld application. This architecture follows a clean, decoupled design where progress updates flow through a well-defined pipeline.
|
||||
|
||||
## Architecture Diagram
|
||||
|
||||
```
|
||||
┌─────────────┐
|
||||
│ SeriesApp │ ← Core download/scan logic
|
||||
└──────┬──────┘
|
||||
│ Events (download_status, scan_status)
|
||||
▼
|
||||
┌─────────────────┐
|
||||
│ AnimeService │ ← Subscribes to SeriesApp events
|
||||
└────────┬────────┘
|
||||
│ Forwards events
|
||||
▼
|
||||
┌──────────────────┐
|
||||
│ ProgressService │ ← Single source of truth for progress
|
||||
└────────┬─────────┘
|
||||
│ Emits events to subscribers
|
||||
▼
|
||||
┌──────────────────┐
|
||||
│ WebSocketService │ ← Subscribes to progress events
|
||||
└──────────────────┘
|
||||
│
|
||||
▼
|
||||
Connected clients receive real-time updates
|
||||
```
|
||||
|
||||
## Components
|
||||
|
||||
### 1. SeriesApp (Core Layer)
|
||||
|
||||
**Location**: `src/core/SeriesApp.py`
|
||||
|
||||
**Responsibilities**:
|
||||
|
||||
- Execute actual downloads and scans
|
||||
- Fire events with detailed progress information
|
||||
- Manage download state and error handling
|
||||
|
||||
**Events**:
|
||||
|
||||
- `download_status`: Fired during downloads
|
||||
|
||||
- `started`: Download begins
|
||||
- `progress`: Progress updates (percent, speed, ETA)
|
||||
- `completed`: Download finished successfully
|
||||
- `failed`: Download encountered an error
|
||||
|
||||
- `scan_status`: Fired during library scans
|
||||
- `started`: Scan begins
|
||||
- `progress`: Scan progress updates
|
||||
- `completed`: Scan finished
|
||||
- `failed`: Scan encountered an error
|
||||
- `cancelled`: Scan was cancelled
|
||||
|
||||
### 2. AnimeService (Service Layer)
|
||||
|
||||
**Location**: `src/server/services/anime_service.py`
|
||||
|
||||
**Responsibilities**:
|
||||
|
||||
- Subscribe to SeriesApp events
|
||||
- Translate SeriesApp events into ProgressService updates
|
||||
- Provide async interface for web layer
|
||||
|
||||
**Event Handlers**:
|
||||
|
||||
```python
|
||||
def _on_download_status(self, args):
|
||||
"""Translates download events to progress service."""
|
||||
if args.status == "started":
|
||||
await progress_service.start_progress(...)
|
||||
elif args.status == "progress":
|
||||
await progress_service.update_progress(...)
|
||||
elif args.status == "completed":
|
||||
await progress_service.complete_progress(...)
|
||||
elif args.status == "failed":
|
||||
await progress_service.fail_progress(...)
|
||||
|
||||
def _on_scan_status(self, args):
|
||||
"""Translates scan events to progress service."""
|
||||
# Similar pattern as download_status
|
||||
```
|
||||
|
||||
### 3. ProgressService (Service Layer)
|
||||
|
||||
**Location**: `src/server/services/progress_service.py`
|
||||
|
||||
**Responsibilities**:
|
||||
|
||||
- Central progress tracking for all operations
|
||||
- Maintain active and historical progress records
|
||||
- Calculate percentages and rates
|
||||
- Emit events to subscribers (event-based architecture)
|
||||
|
||||
**Progress Types**:
|
||||
|
||||
- `DOWNLOAD`: Individual episode downloads
|
||||
- `SCAN`: Library scans for missing episodes
|
||||
- `QUEUE`: Download queue operations
|
||||
- `SYSTEM`: System-level operations
|
||||
- `ERROR`: Error notifications
|
||||
|
||||
**Event System**:
|
||||
|
||||
```python
|
||||
# Subscribe to progress events
|
||||
def subscribe(event_name: str, handler: Callable[[ProgressEvent], None])
|
||||
def unsubscribe(event_name: str, handler: Callable[[ProgressEvent], None])
|
||||
|
||||
# Internal event emission
|
||||
async def _emit_event(event: ProgressEvent)
|
||||
```
|
||||
|
||||
**Key Methods**:
|
||||
|
||||
```python
|
||||
async def start_progress(progress_id, progress_type, title, ...):
|
||||
"""Start tracking a new operation."""
|
||||
|
||||
async def update_progress(progress_id, current, total, message, ...):
|
||||
"""Update progress for an ongoing operation."""
|
||||
|
||||
async def complete_progress(progress_id, message, ...):
|
||||
"""Mark operation as completed."""
|
||||
|
||||
async def fail_progress(progress_id, error_message, ...):
|
||||
"""Mark operation as failed."""
|
||||
```
|
||||
|
||||
### 4. DownloadService (Service Layer)
|
||||
|
||||
**Location**: `src/server/services/download_service.py`
|
||||
|
||||
**Responsibilities**:
|
||||
|
||||
- Manage download queue (FIFO processing)
|
||||
- Track queue state (pending, active, completed, failed)
|
||||
- Persist queue to disk
|
||||
- Use ProgressService for queue-related updates
|
||||
|
||||
**Progress Integration**:
|
||||
|
||||
```python
|
||||
# Queue operations notify via ProgressService
|
||||
await progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message="Added 3 items to queue",
|
||||
metadata={
|
||||
"action": "items_added",
|
||||
"queue_status": {...}
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
```
|
||||
|
||||
**Note**: DownloadService does NOT directly broadcast. Individual download progress flows through:
|
||||
`SeriesApp → AnimeService → ProgressService → WebSocket`
|
||||
|
||||
### 5. WebSocketService (Service Layer)
|
||||
|
||||
**Location**: `src/server/services/websocket_service.py`
|
||||
|
||||
**Responsibilities**:
|
||||
|
||||
- Manage WebSocket connections
|
||||
- Support room-based messaging
|
||||
- Broadcast progress updates to clients
|
||||
- Handle connection lifecycle
|
||||
|
||||
**Integration**:
|
||||
WebSocketService subscribes to ProgressService events:
|
||||
|
||||
```python
|
||||
async def lifespan(app: FastAPI):
|
||||
# Get services
|
||||
progress_service = get_progress_service()
|
||||
ws_service = get_websocket_service()
|
||||
|
||||
# Define event handler
|
||||
async def progress_event_handler(event) -> None:
|
||||
"""Handle progress events and broadcast via WebSocket."""
|
||||
message = {
|
||||
"type": event.event_type,
|
||||
"data": event.progress.to_dict(),
|
||||
}
|
||||
await ws_service.manager.broadcast_to_room(message, event.room)
|
||||
|
||||
# Subscribe to progress events
|
||||
progress_service.subscribe("progress_updated", progress_event_handler)
|
||||
```
|
||||
|
||||
## Data Flow Examples
|
||||
|
||||
### Example 1: Episode Download
|
||||
|
||||
1. **User triggers download** via API endpoint
|
||||
2. **DownloadService** queues the download
|
||||
3. **DownloadService** starts processing → calls `anime_service.download()`
|
||||
4. **AnimeService** calls `series_app.download()`
|
||||
5. **SeriesApp** fires `download_status` events:
|
||||
- `started` → AnimeService → ProgressService → WebSocket → Client
|
||||
- `progress` (multiple) → AnimeService → ProgressService → WebSocket → Client
|
||||
- `completed` → AnimeService → ProgressService → WebSocket → Client
|
||||
|
||||
### Example 2: Library Scan
|
||||
|
||||
1. **User triggers scan** via API endpoint
|
||||
2. **AnimeService** calls `series_app.rescan()`
|
||||
3. **SeriesApp** fires `scan_status` events:
|
||||
- `started` → AnimeService → ProgressService → WebSocket → Client
|
||||
- `progress` (multiple) → AnimeService → ProgressService → WebSocket → Client
|
||||
- `completed` → AnimeService → ProgressService → WebSocket → Client
|
||||
|
||||
### Example 3: Queue Management
|
||||
|
||||
1. **User adds items to queue** via API endpoint
|
||||
2. **DownloadService** adds items to internal queue
|
||||
3. **DownloadService** notifies via ProgressService:
|
||||
```python
|
||||
await progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message="Added 5 items to queue",
|
||||
metadata={"queue_status": {...}},
|
||||
force_broadcast=True,
|
||||
)
|
||||
```
|
||||
4. **ProgressService** → WebSocket → Client receives queue update
|
||||
|
||||
## Benefits of This Architecture
|
||||
|
||||
### 1. **Single Source of Truth**
|
||||
|
||||
- All progress tracking goes through ProgressService
|
||||
- Consistent progress reporting across the application
|
||||
- Easy to monitor and debug
|
||||
|
||||
### 2. **Decoupling**
|
||||
|
||||
- Core logic (SeriesApp) doesn't know about web layer
|
||||
- Services can be tested independently
|
||||
- Easy to add new progress consumers (e.g., CLI, GUI)
|
||||
|
||||
### 3. **Type Safety**
|
||||
|
||||
- Strongly typed progress updates
|
||||
- Enum-based progress types and statuses
|
||||
- Clear data contracts
|
||||
|
||||
### 4. **Flexibility**
|
||||
|
||||
- Multiple subscribers can listen to progress events
|
||||
- Room-based WebSocket messaging
|
||||
- Metadata support for custom data
|
||||
- Multiple concurrent progress operations
|
||||
|
||||
### 5. **Maintainability**
|
||||
|
||||
- Clear separation of concerns
|
||||
- Single place to modify progress logic
|
||||
- Easy to extend with new progress types or subscribers
|
||||
|
||||
### 6. **Scalability**
|
||||
|
||||
- Event-based architecture supports multiple consumers
|
||||
- Isolated error handling per subscriber
|
||||
- No single point of failure
|
||||
|
||||
## Progress IDs
|
||||
|
||||
Progress operations are identified by unique IDs:
|
||||
|
||||
- **Downloads**: `download_{serie_folder}_{season}_{episode}`
|
||||
- **Scans**: `library_scan`
|
||||
- **Queue**: `download_queue`
|
||||
|
||||
## WebSocket Messages
|
||||
|
||||
Clients receive progress updates in this format:
|
||||
|
||||
```json
|
||||
{
|
||||
"type": "download_progress",
|
||||
"data": {
|
||||
"id": "download_naruto_1_1",
|
||||
"type": "download",
|
||||
"status": "in_progress",
|
||||
"title": "Downloading Naruto",
|
||||
"message": "S01E01",
|
||||
"percent": 45.5,
|
||||
"current": 45,
|
||||
"total": 100,
|
||||
"metadata": {},
|
||||
"started_at": "2025-11-07T10:00:00Z",
|
||||
"updated_at": "2025-11-07T10:05:00Z"
|
||||
}
|
||||
}
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
### Startup (fastapi_app.py)
|
||||
|
||||
```python
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# Initialize services
|
||||
progress_service = get_progress_service()
|
||||
ws_service = get_websocket_service()
|
||||
|
||||
# Define event handler
|
||||
async def progress_event_handler(event) -> None:
|
||||
"""Handle progress events and broadcast via WebSocket."""
|
||||
message = {
|
||||
"type": event.event_type,
|
||||
"data": event.progress.to_dict(),
|
||||
}
|
||||
await ws_service.manager.broadcast_to_room(message, event.room)
|
||||
|
||||
# Subscribe to progress events
|
||||
progress_service.subscribe("progress_updated", progress_event_handler)
|
||||
```
|
||||
|
||||
### Service Initialization
|
||||
|
||||
```python
|
||||
# AnimeService automatically subscribes to SeriesApp events
|
||||
anime_service = AnimeService(series_app)
|
||||
|
||||
# DownloadService uses ProgressService for queue updates
|
||||
download_service = DownloadService(anime_service)
|
||||
```
|
||||
|
||||
## Migration Notes
|
||||
|
||||
### What Changed
|
||||
|
||||
**Before (Callback-based)**:
|
||||
|
||||
- ProgressService had a single `set_broadcast_callback()` method
|
||||
- Only one consumer could receive updates
|
||||
- Direct coupling between ProgressService and WebSocketService
|
||||
|
||||
**After (Event-based)**:
|
||||
|
||||
- ProgressService uses `subscribe()` and `unsubscribe()` methods
|
||||
- Multiple consumers can subscribe to progress events
|
||||
- Loose coupling - ProgressService doesn't know about subscribers
|
||||
- Clean event flow: SeriesApp → AnimeService → ProgressService → Subscribers
|
||||
|
||||
### Removed
|
||||
|
||||
1. **ProgressService**:
|
||||
|
||||
- `set_broadcast_callback()` method
|
||||
- `_broadcast_callback` attribute
|
||||
- `_broadcast()` method
|
||||
|
||||
### Added
|
||||
|
||||
1. **ProgressService**:
|
||||
|
||||
- `ProgressEvent` dataclass to encapsulate event data
|
||||
- `subscribe()` method for event subscription
|
||||
- `unsubscribe()` method to remove handlers
|
||||
- `_emit_event()` method for broadcasting to all subscribers
|
||||
- `_event_handlers` dictionary to track subscribers
|
||||
|
||||
2. **fastapi_app.py**:
|
||||
- Event handler function `progress_event_handler`
|
||||
- Uses `subscribe()` instead of `set_broadcast_callback()`
|
||||
|
||||
### Benefits of Event-Based Design
|
||||
|
||||
1. **Multiple Subscribers**: Can now have multiple services listening to progress
|
||||
|
||||
```python
|
||||
# WebSocket for real-time updates
|
||||
progress_service.subscribe("progress_updated", websocket_handler)
|
||||
# Metrics for analytics
|
||||
progress_service.subscribe("progress_updated", metrics_handler)
|
||||
# Logging for debugging
|
||||
progress_service.subscribe("progress_updated", logging_handler)
|
||||
```
|
||||
|
||||
2. **Isolated Error Handling**: If one subscriber fails, others continue working
|
||||
|
||||
3. **Dynamic Subscription**: Handlers can subscribe/unsubscribe at runtime
|
||||
|
||||
4. **Extensibility**: Easy to add new features without modifying ProgressService
|
||||
|
||||
## Testing
|
||||
|
||||
### Unit Tests
|
||||
|
||||
- Test each service independently
|
||||
- Mock ProgressService for services that use it
|
||||
- Verify event handler logic
|
||||
|
||||
### Integration Tests
|
||||
|
||||
- Test full flow: SeriesApp → AnimeService → ProgressService → WebSocket
|
||||
- Verify progress updates reach clients
|
||||
- Test error handling
|
||||
|
||||
### Example Test
|
||||
|
||||
```python
|
||||
async def test_download_progress_flow():
|
||||
# Setup
|
||||
progress_service = ProgressService()
|
||||
events_received = []
|
||||
|
||||
async def mock_event_handler(event):
|
||||
events_received.append(event)
|
||||
|
||||
progress_service.subscribe("progress_updated", mock_event_handler)
|
||||
|
||||
# Execute
|
||||
await progress_service.start_progress(
|
||||
progress_id="test_download",
|
||||
progress_type=ProgressType.DOWNLOAD,
|
||||
title="Test"
|
||||
)
|
||||
|
||||
# Verify
|
||||
assert len(events_received) == 1
|
||||
assert events_received[0].event_type == "download_progress"
|
||||
assert events_received[0].progress.id == "test_download"
|
||||
```
|
||||
|
||||
## Future Enhancements
|
||||
|
||||
1. **Progress Persistence**: Save progress to database for recovery
|
||||
2. **Progress History**: Keep detailed history for analytics
|
||||
3. **Rate Limiting**: Throttle progress updates to prevent spam
|
||||
4. **Progress Aggregation**: Combine multiple progress operations
|
||||
5. **Custom Rooms**: Allow clients to subscribe to specific progress types
|
||||
|
||||
## Related Documentation
|
||||
|
||||
- [WebSocket API](./websocket_api.md)
|
||||
- [Download Service](./download_service.md)
|
||||
- [Error Handling](./error_handling_validation.md)
|
||||
- [API Implementation](./api_implementation_summary.md)
|
||||
39
features.md
39
features.md
@ -1,24 +1,53 @@
|
||||
# Aniworld Web Application Features
|
||||
|
||||
## Authentication & Security
|
||||
|
||||
- **Master Password Login**: Secure access to the application with a master password system
|
||||
- **JWT Token Sessions**: Stateless authentication with JSON Web Tokens
|
||||
- **Rate Limiting**: Built-in protection against brute force attacks
|
||||
|
||||
## Configuration Management
|
||||
|
||||
- **Setup Page**: Initial configuration interface for server setup and basic settings
|
||||
- **Config Page**: View and modify application configuration settings
|
||||
- **Scheduler Configuration**: Configure automated rescan schedules
|
||||
- **Backup Management**: Create, restore, and manage configuration backups
|
||||
|
||||
## User Interface
|
||||
|
||||
- **Dark Mode**: Toggle between light and dark themes for better user experience
|
||||
- **Responsive Design**: Mobile-friendly interface with touch support
|
||||
- **Real-time Updates**: WebSocket-based live notifications and progress tracking
|
||||
|
||||
## Anime Management
|
||||
|
||||
- **Anime Library Page**: Display list of anime series with missing episodes
|
||||
- **Series Selection**: Select individual anime series and add episodes to download queue
|
||||
- **Anime Search Page**: Search functionality to find and add new anime series to the library
|
||||
- **Anime Search**: Search for anime series using integrated providers
|
||||
- **Library Scanning**: Automated scanning for missing episodes
|
||||
|
||||
## Download Management
|
||||
- **Download Queue Page**: View and manage the current download queue
|
||||
- **Download Status Display**: Real-time status updates and progress of current downloads
|
||||
- **Queue Operations**: Add, remove, and prioritize items in the download queue
|
||||
|
||||
- **Download Queue Page**: View and manage the current download queue with organized sections
|
||||
- **Queue Organization**: Displays downloads organized by status (pending, active, completed, failed)
|
||||
- **Manual Start/Stop Control**: User manually starts downloads one at a time with Start/Stop buttons
|
||||
- **FIFO Queue Processing**: First-in, first-out queue order (no priority or reordering)
|
||||
- **Single Download Mode**: Only one download active at a time, new downloads must be manually started
|
||||
- **Download Status Display**: Real-time status updates and progress of current download
|
||||
- **Queue Operations**: Add and remove items from the pending queue
|
||||
- **Completed Downloads List**: Separate section for completed downloads with clear button
|
||||
- **Failed Downloads List**: Separate section for failed downloads with retry and clear options
|
||||
- **Retry Failed Downloads**: Automatically retry failed downloads with configurable limits
|
||||
- **Clear Completed**: Remove completed downloads from the queue
|
||||
- **Clear Failed**: Remove failed downloads from the queue
|
||||
- **Queue Statistics**: Real-time counters for pending, active, completed, and failed items
|
||||
|
||||
## Real-time Communication
|
||||
|
||||
- **WebSocket Support**: Real-time notifications for download progress and queue updates
|
||||
- **Progress Tracking**: Live progress updates for downloads and scans
|
||||
- **System Notifications**: Real-time system messages and alerts
|
||||
|
||||
## Core Functionality Overview
|
||||
The web application provides a complete interface for managing anime downloads with user-friendly pages for configuration, library management, search capabilities, and download monitoring.
|
||||
|
||||
The web application provides a complete interface for managing anime downloads with user-friendly pages for configuration, library management, search capabilities, and download monitoring. All operations are tracked in real-time with comprehensive progress reporting and error handling.
|
||||
|
||||
@ -41,9 +41,8 @@ conda activate AniWorld
|
||||
│ │ │ ├── config.py # Configuration endpoints
|
||||
│ │ │ ├── anime.py # Anime management endpoints
|
||||
│ │ │ ├── download.py # Download queue endpoints
|
||||
│ │ │ ├── providers.py # Provider health & config endpoints
|
||||
│ │ │ ├── websocket.py # WebSocket real-time endpoints
|
||||
│ │ │ └── search.py # Search endpoints
|
||||
│ │ │ ├── scheduler.py # Scheduler configuration endpoints
|
||||
│ │ │ └── websocket.py # WebSocket real-time endpoints
|
||||
│ │ ├── models/ # Pydantic models
|
||||
│ │ │ ├── __init__.py
|
||||
│ │ │ ├── auth.py
|
||||
@ -57,7 +56,10 @@ conda activate AniWorld
|
||||
│ │ │ ├── config_service.py
|
||||
│ │ │ ├── anime_service.py
|
||||
│ │ │ ├── download_service.py
|
||||
│ │ │ └── websocket_service.py # WebSocket connection management
|
||||
│ │ │ ├── websocket_service.py # WebSocket connection management
|
||||
│ │ │ ├── progress_service.py # Progress tracking
|
||||
│ │ │ ├── notification_service.py # Notification system
|
||||
│ │ │ └── cache_service.py # Caching layer
|
||||
│ │ ├── database/ # Database layer
|
||||
│ │ │ ├── __init__.py # Database package
|
||||
│ │ │ ├── base.py # Base models and mixins
|
||||
@ -214,21 +216,6 @@ conda activate AniWorld
|
||||
- `POST /api/scheduler/config` - Update scheduler configuration
|
||||
- `POST /api/scheduler/trigger-rescan` - Manually trigger rescan
|
||||
|
||||
### Logging
|
||||
|
||||
- `GET /api/logging/config` - Get logging configuration
|
||||
- `POST /api/logging/config` - Update logging configuration
|
||||
- `GET /api/logging/files` - List all log files
|
||||
- `GET /api/logging/files/{filename}/download` - Download log file
|
||||
- `GET /api/logging/files/{filename}/tail` - Get last N lines of log file
|
||||
- `POST /api/logging/test` - Test logging by writing messages at all levels
|
||||
- `POST /api/logging/cleanup` - Clean up old log files
|
||||
|
||||
### Diagnostics
|
||||
|
||||
- `GET /api/diagnostics/network` - Run network connectivity diagnostics
|
||||
- `GET /api/diagnostics/system` - Get basic system information
|
||||
|
||||
### Anime Management
|
||||
|
||||
- `GET /api/anime` - List anime with missing episodes
|
||||
@ -245,85 +232,42 @@ initialization.
|
||||
|
||||
- `GET /api/queue/status` - Get download queue status and statistics
|
||||
- `POST /api/queue/add` - Add episodes to download queue
|
||||
- `DELETE /api/queue/{id}` - Remove item from queue
|
||||
- `DELETE /api/queue/` - Remove multiple items from queue
|
||||
- `POST /api/queue/start` - Start download queue processing
|
||||
- `POST /api/queue/stop` - Stop download queue processing
|
||||
- `POST /api/queue/pause` - Pause queue processing
|
||||
- `POST /api/queue/resume` - Resume queue processing
|
||||
- `POST /api/queue/reorder` - Reorder pending queue items
|
||||
- `DELETE /api/queue/{id}` - Remove single item from pending queue
|
||||
- `POST /api/queue/start` - Manually start next download from queue (one at a time)
|
||||
- `POST /api/queue/stop` - Stop processing new downloads
|
||||
- `DELETE /api/queue/completed` - Clear completed downloads
|
||||
- `POST /api/queue/retry` - Retry failed downloads
|
||||
- `DELETE /api/queue/failed` - Clear failed downloads
|
||||
- `POST /api/queue/retry/{id}` - Retry a specific failed download
|
||||
- `POST /api/queue/retry` - Retry all failed downloads
|
||||
|
||||
### Provider Management (October 2025)
|
||||
**Manual Download Control:**
|
||||
|
||||
The provider system has been enhanced with comprehensive health monitoring,
|
||||
automatic failover, performance tracking, and dynamic configuration.
|
||||
- Queue processing is fully manual - no auto-start
|
||||
- User must click "Start" to begin downloading next item from queue
|
||||
- Only one download active at a time
|
||||
- "Stop" prevents new downloads but allows current to complete
|
||||
- FIFO queue order (first-in, first-out)
|
||||
|
||||
**Provider Health Monitoring:**
|
||||
**Queue Organization:**
|
||||
|
||||
- `GET /api/providers/health` - Get overall provider health summary
|
||||
- `GET /api/providers/health/{provider_name}` - Get specific provider health
|
||||
- `GET /api/providers/available` - List currently available providers
|
||||
- `GET /api/providers/best` - Get best performing provider
|
||||
- `POST /api/providers/health/{provider_name}/reset` - Reset provider metrics
|
||||
- **Pending Queue**: Items waiting to be downloaded, displayed in FIFO order
|
||||
- **Active Download**: Currently downloading item with progress bar (max 1)
|
||||
- **Completed Downloads**: Successfully downloaded items with completion timestamps
|
||||
- **Failed Downloads**: Failed items with error messages and retry options
|
||||
|
||||
**Provider Configuration:**
|
||||
**Queue Display Features:**
|
||||
|
||||
- `GET /api/providers/config` - Get all provider configurations
|
||||
- `GET /api/providers/config/{provider_name}` - Get specific provider config
|
||||
- `PUT /api/providers/config/{provider_name}` - Update provider settings
|
||||
- `POST /api/providers/config/{provider_name}/enable` - Enable provider
|
||||
- `POST /api/providers/config/{provider_name}/disable` - Disable provider
|
||||
- Real-time statistics counters (pending, active, completed, failed)
|
||||
- Empty state messages with helpful hints
|
||||
- Per-section action buttons (clear, retry all)
|
||||
- Start/Stop buttons for manual queue control
|
||||
|
||||
**Failover Management:**
|
||||
### WebSocket
|
||||
|
||||
- `GET /api/providers/failover` - Get failover statistics
|
||||
- `POST /api/providers/failover/{provider_name}/add` - Add to failover chain
|
||||
- `DELETE /api/providers/failover/{provider_name}` - Remove from failover
|
||||
|
||||
**Provider Enhancement Features:**
|
||||
|
||||
- **Health Monitoring**: Real-time tracking of provider availability, response
|
||||
times, success rates, and bandwidth usage. Automatic marking of providers as
|
||||
unavailable after consecutive failures.
|
||||
- **Automatic Failover**: Seamless switching between providers when primary
|
||||
fails. Configurable retry attempts and delays.
|
||||
- **Performance Tracking**: Wrapped provider interface that automatically
|
||||
records metrics for all operations (search, download, metadata retrieval).
|
||||
- **Dynamic Configuration**: Runtime updates to provider settings without
|
||||
application restart. Configurable timeouts, retries, bandwidth limits.
|
||||
- **Best Provider Selection**: Intelligent selection based on success rate,
|
||||
response time, and availability.
|
||||
|
||||
**Provider Metrics Tracked:**
|
||||
|
||||
- Total requests (successful/failed)
|
||||
- Average response time (milliseconds)
|
||||
- Success rate (percentage)
|
||||
- Consecutive failures count
|
||||
- Total bytes downloaded
|
||||
- Uptime percentage (last 60 minutes)
|
||||
- Last error message and timestamp
|
||||
|
||||
**Implementation:**
|
||||
|
||||
- `src/core/providers/health_monitor.py` - ProviderHealthMonitor class
|
||||
- `src/core/providers/failover.py` - ProviderFailover system
|
||||
- `src/core/providers/monitored_provider.py` - Performance tracking wrapper
|
||||
- `src/core/providers/config_manager.py` - Dynamic configuration manager
|
||||
- `src/server/api/providers.py` - Provider management API endpoints
|
||||
|
||||
**Testing:**
|
||||
|
||||
- 34 unit tests covering health monitoring, failover, and configuration
|
||||
- Tests for provider availability tracking and failover scenarios
|
||||
- Configuration persistence and validation tests
|
||||
|
||||
### Search
|
||||
|
||||
- `GET /api/search?q={query}` - Search for anime
|
||||
- `POST /api/search/add` - Add anime to library
|
||||
- `WS /api/ws` - WebSocket connection for real-time updates
|
||||
- Real-time download progress notifications
|
||||
- Queue status updates
|
||||
- System notifications
|
||||
|
||||
## Logging
|
||||
|
||||
@ -345,7 +289,7 @@ automatic failover, performance tracking, and dynamic configuration.
|
||||
- Master password protection for application access
|
||||
- Secure session management with JWT tokens
|
||||
- Input validation and sanitization
|
||||
- Rate limiting on API endpoints
|
||||
- Built-in rate limiting in authentication middleware
|
||||
- HTTPS enforcement in production
|
||||
- Secure file path handling to prevent directory traversal
|
||||
|
||||
@ -827,8 +771,6 @@ The `SeriesApp` class (`src/core/SeriesApp.py`) is the main application engine f
|
||||
- `search(words)`: Search for anime series
|
||||
- `download()`: Download episodes with progress tracking
|
||||
- `ReScan()`: Scan directory for missing episodes
|
||||
- `async_download()`: Async version of download
|
||||
- `async_rescan()`: Async version of rescan
|
||||
- `cancel_operation()`: Cancel current operation
|
||||
- `get_operation_status()`: Get current status
|
||||
- `get_series_list()`: Get series with missing episodes
|
||||
|
||||
@ -105,9 +105,30 @@ For each task completed:
|
||||
|
||||
---
|
||||
|
||||
### Prerequisites
|
||||
|
||||
1. Server is running: `conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload`
|
||||
2. Password: `Hallo123!`
|
||||
3. Login via browser at `http://127.0.0.1:8000/login`
|
||||
|
||||
**Deployment Steps:**
|
||||
|
||||
1. Commit all changes to git repository
|
||||
2. Create deployment tag (e.g., `v1.0.0-queue-simplified`)
|
||||
3. Deploy to production environment
|
||||
4. Monitor logs for any unexpected behavior
|
||||
5. Verify production queue functionality
|
||||
|
||||
### Notes
|
||||
|
||||
- This is a simplification that removes complexity while maintaining core functionality
|
||||
- Improves user experience with explicit manual control
|
||||
- Easier to understand, test, and maintain
|
||||
- Good foundation for future enhancements if needed
|
||||
- No database schema changes required
|
||||
- WebSocket infrastructure remains unchanged
|
||||
|
||||
# Tasks
|
||||
|
||||
## Setup
|
||||
|
||||
- [x] Redirect to setup if no config is present.
|
||||
- [x] After setup confirmed redirect to login
|
||||
[] check method from SeriesApp are used in a correct way. SeriesApp method changed. make sure that classes that use SeriesApp take the latest interface.
|
||||
[] SeriesApp no have events make sure services and api use them
|
||||
|
||||
@ -3,58 +3,93 @@ SeriesApp - Core application logic for anime series management.
|
||||
|
||||
This module provides the main application interface for searching,
|
||||
downloading, and managing anime series with support for async callbacks,
|
||||
progress reporting, error handling, and operation cancellation.
|
||||
progress reporting, and error handling.
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import uuid
|
||||
from dataclasses import dataclass
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from events import Events
|
||||
|
||||
from src.core.entities.SerieList import SerieList
|
||||
from src.core.interfaces.callbacks import (
|
||||
CallbackManager,
|
||||
CompletionContext,
|
||||
ErrorContext,
|
||||
OperationType,
|
||||
ProgressContext,
|
||||
ProgressPhase,
|
||||
)
|
||||
from src.core.providers.provider_factory import Loaders
|
||||
from src.core.SerieScanner import SerieScanner
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class OperationStatus(Enum):
|
||||
"""Status of an operation."""
|
||||
IDLE = "idle"
|
||||
RUNNING = "running"
|
||||
COMPLETED = "completed"
|
||||
CANCELLED = "cancelled"
|
||||
FAILED = "failed"
|
||||
class DownloadStatusEventArgs:
|
||||
"""Event arguments for download status events."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
serie_folder: str,
|
||||
season: int,
|
||||
episode: int,
|
||||
status: str,
|
||||
progress: float = 0.0,
|
||||
message: Optional[str] = None,
|
||||
error: Optional[Exception] = None,
|
||||
eta: Optional[int] = None,
|
||||
mbper_sec: Optional[float] = None,
|
||||
):
|
||||
"""
|
||||
Initialize download status event arguments.
|
||||
|
||||
@dataclass
|
||||
class ProgressInfo:
|
||||
"""Progress information for long-running operations."""
|
||||
current: int
|
||||
total: int
|
||||
message: str
|
||||
percentage: float
|
||||
status: OperationStatus
|
||||
Args:
|
||||
serie_folder: Serie folder name
|
||||
season: Season number
|
||||
episode: Episode number
|
||||
status: Status message (e.g., "started", "progress", "completed", "failed")
|
||||
progress: Download progress (0.0 to 1.0)
|
||||
message: Optional status message
|
||||
error: Optional error if status is "failed"
|
||||
eta: Estimated time remaining in seconds
|
||||
mbper_sec: Download speed in MB/s
|
||||
"""
|
||||
self.serie_folder = serie_folder
|
||||
self.season = season
|
||||
self.episode = episode
|
||||
self.status = status
|
||||
self.progress = progress
|
||||
self.message = message
|
||||
self.error = error
|
||||
self.eta = eta
|
||||
self.mbper_sec = mbper_sec
|
||||
|
||||
class ScanStatusEventArgs:
|
||||
"""Event arguments for scan status events."""
|
||||
|
||||
@dataclass
|
||||
class OperationResult:
|
||||
"""Result of an operation."""
|
||||
success: bool
|
||||
message: str
|
||||
data: Optional[Any] = None
|
||||
error: Optional[Exception] = None
|
||||
def __init__(
|
||||
self,
|
||||
current: int,
|
||||
total: int,
|
||||
folder: str,
|
||||
status: str,
|
||||
progress: float = 0.0,
|
||||
message: Optional[str] = None,
|
||||
error: Optional[Exception] = None,
|
||||
):
|
||||
"""
|
||||
Initialize scan status event arguments.
|
||||
|
||||
Args:
|
||||
current: Current item being scanned
|
||||
total: Total items to scan
|
||||
folder: Current folder being scanned
|
||||
status: Status message (e.g., "started", "progress", "completed", "failed", "cancelled")
|
||||
progress: Scan progress (0.0 to 1.0)
|
||||
message: Optional status message
|
||||
error: Optional error if status is "failed"
|
||||
"""
|
||||
self.current = current
|
||||
self.total = total
|
||||
self.folder = folder
|
||||
self.status = status
|
||||
self.progress = progress
|
||||
self.message = message
|
||||
self.error = error
|
||||
|
||||
class SeriesApp:
|
||||
"""
|
||||
@ -66,88 +101,86 @@ class SeriesApp:
|
||||
- Scanning directories for missing episodes
|
||||
- Managing series lists
|
||||
|
||||
Supports async callbacks for progress reporting and cancellation.
|
||||
"""
|
||||
Supports async callbacks for progress reporting.
|
||||
|
||||
_initialization_count = 0
|
||||
Events:
|
||||
download_status: Raised when download status changes.
|
||||
Handler signature: def handler(args: DownloadStatusEventArgs)
|
||||
scan_status: Raised when scan status changes.
|
||||
Handler signature: def handler(args: ScanStatusEventArgs)
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
directory_to_search: str,
|
||||
progress_callback: Optional[Callable[[ProgressInfo], None]] = None,
|
||||
error_callback: Optional[Callable[[Exception], None]] = None,
|
||||
callback_manager: Optional[CallbackManager] = None
|
||||
):
|
||||
"""
|
||||
Initialize SeriesApp.
|
||||
|
||||
Args:
|
||||
directory_to_search: Base directory for anime series
|
||||
progress_callback: Optional legacy callback for progress updates
|
||||
error_callback: Optional legacy callback for error notifications
|
||||
callback_manager: Optional callback manager for new callback system
|
||||
"""
|
||||
SeriesApp._initialization_count += 1
|
||||
|
||||
# Only show initialization message for the first instance
|
||||
if SeriesApp._initialization_count <= 1:
|
||||
logger.info("Initializing SeriesApp...")
|
||||
|
||||
self.directory_to_search = directory_to_search
|
||||
self.progress_callback = progress_callback
|
||||
self.error_callback = error_callback
|
||||
|
||||
# Initialize new callback system
|
||||
self._callback_manager = callback_manager or CallbackManager()
|
||||
# Initialize events
|
||||
self._events = Events()
|
||||
self._events.download_status = None
|
||||
self._events.scan_status = None
|
||||
|
||||
# Cancellation support
|
||||
self._cancel_flag = False
|
||||
self._current_operation: Optional[str] = None
|
||||
self._current_operation_id: Optional[str] = None
|
||||
self._operation_status = OperationStatus.IDLE
|
||||
self.loaders = Loaders()
|
||||
self.loader = self.loaders.GetLoader(key="aniworld.to")
|
||||
self.serie_scanner = SerieScanner(directory_to_search, self.loader)
|
||||
self.list = SerieList(self.directory_to_search)
|
||||
# Synchronous init used during constructor to avoid awaiting in __init__
|
||||
self._init_list_sync()
|
||||
|
||||
# Initialize components
|
||||
try:
|
||||
self.Loaders = Loaders()
|
||||
self.loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
self.SerieScanner = SerieScanner(
|
||||
directory_to_search,
|
||||
self.loader,
|
||||
self._callback_manager
|
||||
)
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
logger.info(
|
||||
"SeriesApp initialized for directory: %s",
|
||||
directory_to_search
|
||||
)
|
||||
except (IOError, OSError, RuntimeError) as e:
|
||||
logger.error("Failed to initialize SeriesApp: %s", e)
|
||||
self._handle_error(e)
|
||||
raise
|
||||
logger.info("SeriesApp initialized for directory: %s", directory_to_search)
|
||||
|
||||
@property
|
||||
def callback_manager(self) -> CallbackManager:
|
||||
"""Get the callback manager instance."""
|
||||
return self._callback_manager
|
||||
|
||||
def __InitList__(self):
|
||||
"""Initialize the series list with missing episodes."""
|
||||
try:
|
||||
self.series_list = self.List.GetMissingEpisode()
|
||||
logger.debug(
|
||||
"Loaded %d series with missing episodes",
|
||||
len(self.series_list)
|
||||
)
|
||||
except (IOError, OSError, RuntimeError) as e:
|
||||
logger.error("Failed to initialize series list: %s", e)
|
||||
self._handle_error(e)
|
||||
raise
|
||||
|
||||
def search(self, words: str) -> List[Dict[str, Any]]:
|
||||
def download_status(self):
|
||||
"""
|
||||
Search for anime series.
|
||||
Event raised when download status changes.
|
||||
|
||||
Subscribe using:
|
||||
app.download_status += handler
|
||||
"""
|
||||
return self._events.download_status
|
||||
|
||||
@download_status.setter
|
||||
def download_status(self, value):
|
||||
"""Set download_status event handler."""
|
||||
self._events.download_status = value
|
||||
|
||||
@property
|
||||
def scan_status(self):
|
||||
"""
|
||||
Event raised when scan status changes.
|
||||
|
||||
Subscribe using:
|
||||
app.scan_status += handler
|
||||
"""
|
||||
return self._events.scan_status
|
||||
|
||||
@scan_status.setter
|
||||
def scan_status(self, value):
|
||||
"""Set scan_status event handler."""
|
||||
self._events.scan_status = value
|
||||
|
||||
def _init_list_sync(self) -> None:
|
||||
"""Synchronous initialization helper for constructor."""
|
||||
self.series_list = self.list.GetMissingEpisode()
|
||||
logger.debug("Loaded %d series with missing episodes", len(self.series_list))
|
||||
|
||||
async def _init_list(self) -> None:
|
||||
"""Initialize the series list with missing episodes (async)."""
|
||||
self.series_list = await asyncio.to_thread(self.list.GetMissingEpisode)
|
||||
logger.debug("Loaded %d series with missing episodes", len(self.series_list))
|
||||
|
||||
|
||||
async def search(self, words: str) -> List[Dict[str, Any]]:
|
||||
"""
|
||||
Search for anime series (async).
|
||||
|
||||
Args:
|
||||
words: Search query
|
||||
@ -158,443 +191,252 @@ class SeriesApp:
|
||||
Raises:
|
||||
RuntimeError: If search fails
|
||||
"""
|
||||
try:
|
||||
logger.info("Searching for: %s", words)
|
||||
results = self.loader.search(words)
|
||||
results = await asyncio.to_thread(self.loader.search, words)
|
||||
logger.info("Found %d results", len(results))
|
||||
return results
|
||||
except (IOError, OSError, RuntimeError) as e:
|
||||
logger.error("Search failed for '%s': %s", words, e)
|
||||
self._handle_error(e)
|
||||
raise
|
||||
|
||||
def download(
|
||||
async def download(
|
||||
self,
|
||||
serieFolder: str,
|
||||
serie_folder: str,
|
||||
season: int,
|
||||
episode: int,
|
||||
key: str,
|
||||
callback: Optional[Callable[[float], None]] = None,
|
||||
language: str = "German Dub"
|
||||
) -> OperationResult:
|
||||
language: str = "German Dub",
|
||||
) -> bool:
|
||||
"""
|
||||
Download an episode.
|
||||
Download an episode (async).
|
||||
|
||||
Args:
|
||||
serieFolder: Serie folder name
|
||||
serie_folder: Serie folder name
|
||||
season: Season number
|
||||
episode: Episode number
|
||||
key: Serie key
|
||||
callback: Optional legacy progress callback
|
||||
language: Language preference
|
||||
|
||||
Returns:
|
||||
OperationResult with download status
|
||||
True if download succeeded, False otherwise
|
||||
"""
|
||||
self._current_operation = f"download_S{season:02d}E{episode:02d}"
|
||||
self._current_operation_id = str(uuid.uuid4())
|
||||
self._operation_status = OperationStatus.RUNNING
|
||||
self._cancel_flag = False
|
||||
logger.info("Starting download: %s S%02dE%02d", serie_folder, season, episode)
|
||||
|
||||
# Fire download started event
|
||||
self._events.download_status(
|
||||
DownloadStatusEventArgs(
|
||||
serie_folder=serie_folder,
|
||||
season=season,
|
||||
episode=episode,
|
||||
status="started",
|
||||
message="Download started",
|
||||
)
|
||||
)
|
||||
|
||||
try:
|
||||
logger.info(
|
||||
"Starting download: %s S%02dE%02d",
|
||||
serieFolder, season, episode
|
||||
def download_callback(progress_info):
|
||||
logger.debug(f"wrapped_callback called with: {progress_info}")
|
||||
|
||||
downloaded = progress_info.get('downloaded_bytes', 0)
|
||||
total_bytes = (
|
||||
progress_info.get('total_bytes')
|
||||
or progress_info.get('total_bytes_estimate', 0)
|
||||
)
|
||||
|
||||
# Notify download starting
|
||||
start_msg = (
|
||||
f"Starting download: {serieFolder} "
|
||||
f"S{season:02d}E{episode:02d}"
|
||||
)
|
||||
self._callback_manager.notify_progress(
|
||||
ProgressContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
phase=ProgressPhase.STARTING,
|
||||
current=0,
|
||||
total=100,
|
||||
percentage=0.0,
|
||||
message=start_msg,
|
||||
metadata={
|
||||
"series": serieFolder,
|
||||
"season": season,
|
||||
"episode": episode,
|
||||
"key": key,
|
||||
"language": language
|
||||
}
|
||||
)
|
||||
)
|
||||
speed = progress_info.get('speed', 0) # bytes/sec
|
||||
eta = progress_info.get('eta') # seconds
|
||||
mbper_sec = speed / (1024 * 1024) if speed else None
|
||||
|
||||
# Check for cancellation before starting
|
||||
if self._is_cancelled():
|
||||
self._callback_manager.notify_completion(
|
||||
CompletionContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
success=False,
|
||||
message="Download cancelled before starting"
|
||||
self._events.download_status(
|
||||
DownloadStatusEventArgs(
|
||||
serie_folder=serie_folder,
|
||||
season=season,
|
||||
episode=episode,
|
||||
status="progress",
|
||||
message="Download progress",
|
||||
progress=(downloaded / total_bytes) * 100 if total_bytes else 0,
|
||||
eta=eta,
|
||||
mbper_sec=mbper_sec,
|
||||
)
|
||||
)
|
||||
return OperationResult(
|
||||
success=False,
|
||||
message="Download cancelled before starting"
|
||||
)
|
||||
|
||||
# Wrap callback to enforce cancellation checks and bridge the new
|
||||
# event-driven progress reporting with the legacy callback API that
|
||||
# the CLI still relies on.
|
||||
def wrapped_callback(progress: float):
|
||||
if self._is_cancelled():
|
||||
raise InterruptedError("Download cancelled by user")
|
||||
|
||||
# Notify progress via new callback system
|
||||
self._callback_manager.notify_progress(
|
||||
ProgressContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
phase=ProgressPhase.IN_PROGRESS,
|
||||
current=int(progress),
|
||||
total=100,
|
||||
percentage=progress,
|
||||
message=f"Downloading: {progress:.1f}%",
|
||||
metadata={
|
||||
"series": serieFolder,
|
||||
"season": season,
|
||||
"episode": episode
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Call legacy callback if provided
|
||||
if callback:
|
||||
callback(progress)
|
||||
|
||||
# Propagate progress into the legacy callback chain so existing
|
||||
# UI surfaces continue to receive updates without rewriting the
|
||||
# old interfaces.
|
||||
# Call legacy progress_callback if provided
|
||||
if self.progress_callback:
|
||||
self.progress_callback(ProgressInfo(
|
||||
current=int(progress),
|
||||
total=100,
|
||||
message=f"Downloading S{season:02d}E{episode:02d}",
|
||||
percentage=progress,
|
||||
status=OperationStatus.RUNNING
|
||||
))
|
||||
|
||||
# Perform download
|
||||
self.loader.download(
|
||||
# Perform download in thread to avoid blocking event loop
|
||||
download_success = await asyncio.to_thread(
|
||||
self.loader.download,
|
||||
self.directory_to_search,
|
||||
serieFolder,
|
||||
serie_folder,
|
||||
season,
|
||||
episode,
|
||||
key,
|
||||
language,
|
||||
wrapped_callback
|
||||
download_callback
|
||||
)
|
||||
|
||||
self._operation_status = OperationStatus.COMPLETED
|
||||
if download_success:
|
||||
logger.info(
|
||||
"Download completed: %s S%02dE%02d",
|
||||
serieFolder, season, episode
|
||||
"Download completed: %s S%02dE%02d", serie_folder, season, episode
|
||||
)
|
||||
|
||||
# Notify completion
|
||||
msg = f"Successfully downloaded S{season:02d}E{episode:02d}"
|
||||
self._callback_manager.notify_completion(
|
||||
CompletionContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
success=True,
|
||||
message=msg,
|
||||
statistics={
|
||||
"series": serieFolder,
|
||||
"season": season,
|
||||
"episode": episode
|
||||
}
|
||||
# Fire download completed event
|
||||
self._events.download_status(
|
||||
DownloadStatusEventArgs(
|
||||
serie_folder=serie_folder,
|
||||
season=season,
|
||||
episode=episode,
|
||||
status="completed",
|
||||
progress=1.0,
|
||||
message="Download completed successfully",
|
||||
)
|
||||
)
|
||||
|
||||
return OperationResult(
|
||||
success=True,
|
||||
message=msg
|
||||
)
|
||||
|
||||
except InterruptedError as e:
|
||||
self._operation_status = OperationStatus.CANCELLED
|
||||
logger.warning("Download cancelled: %s", e)
|
||||
|
||||
# Notify cancellation
|
||||
self._callback_manager.notify_completion(
|
||||
CompletionContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
success=False,
|
||||
message="Download cancelled"
|
||||
)
|
||||
)
|
||||
|
||||
return OperationResult(
|
||||
success=False,
|
||||
message="Download cancelled",
|
||||
error=e
|
||||
)
|
||||
except (IOError, OSError, RuntimeError) as e:
|
||||
self._operation_status = OperationStatus.FAILED
|
||||
logger.error("Download failed: %s", e)
|
||||
|
||||
# Notify error
|
||||
error_msg = f"Download failed: {str(e)}"
|
||||
self._callback_manager.notify_error(
|
||||
ErrorContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
error=e,
|
||||
message=error_msg,
|
||||
recoverable=False,
|
||||
metadata={
|
||||
"series": serieFolder,
|
||||
"season": season,
|
||||
"episode": episode
|
||||
}
|
||||
)
|
||||
)
|
||||
|
||||
# Notify completion with failure
|
||||
self._callback_manager.notify_completion(
|
||||
CompletionContext(
|
||||
operation_type=OperationType.DOWNLOAD,
|
||||
operation_id=self._current_operation_id,
|
||||
success=False,
|
||||
message=error_msg
|
||||
)
|
||||
)
|
||||
|
||||
self._handle_error(e)
|
||||
return OperationResult(
|
||||
success=False,
|
||||
message=error_msg,
|
||||
error=e
|
||||
)
|
||||
finally:
|
||||
self._current_operation = None
|
||||
self._current_operation_id = None
|
||||
|
||||
def ReScan(
|
||||
self,
|
||||
callback: Optional[Callable[[str, int], None]] = None
|
||||
) -> OperationResult:
|
||||
"""
|
||||
Rescan directory for missing episodes.
|
||||
|
||||
Args:
|
||||
callback: Optional progress callback (folder, current_count)
|
||||
|
||||
Returns:
|
||||
OperationResult with scan status
|
||||
"""
|
||||
self._current_operation = "rescan"
|
||||
self._operation_status = OperationStatus.RUNNING
|
||||
self._cancel_flag = False
|
||||
|
||||
try:
|
||||
logger.info("Starting directory rescan")
|
||||
|
||||
# Get total items to scan
|
||||
total_to_scan = self.SerieScanner.get_total_to_scan()
|
||||
logger.info("Total folders to scan: %d", total_to_scan)
|
||||
|
||||
# Reinitialize scanner
|
||||
self.SerieScanner.reinit()
|
||||
|
||||
# Wrap the scanner callback so we can surface progress through the
|
||||
# new ProgressInfo pipeline while maintaining backwards
|
||||
# compatibility with the legacy tuple-based callback signature.
|
||||
def wrapped_callback(folder: str, current: int):
|
||||
if self._is_cancelled():
|
||||
raise InterruptedError("Scan cancelled by user")
|
||||
|
||||
# Calculate progress
|
||||
if total_to_scan > 0:
|
||||
percentage = (current / total_to_scan * 100)
|
||||
else:
|
||||
percentage = 0
|
||||
|
||||
# Report progress
|
||||
if self.progress_callback:
|
||||
progress_info = ProgressInfo(
|
||||
current=current,
|
||||
total=total_to_scan,
|
||||
message=f"Scanning: {folder}",
|
||||
percentage=percentage,
|
||||
status=OperationStatus.RUNNING
|
||||
)
|
||||
self.progress_callback(progress_info)
|
||||
|
||||
# Call original callback if provided
|
||||
if callback:
|
||||
callback(folder, current)
|
||||
|
||||
# Perform scan
|
||||
self.SerieScanner.scan(wrapped_callback)
|
||||
|
||||
# Reinitialize list
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
self._operation_status = OperationStatus.COMPLETED
|
||||
logger.info("Directory rescan completed successfully")
|
||||
|
||||
msg = (
|
||||
f"Scan completed. Found {len(self.series_list)} "
|
||||
f"series."
|
||||
)
|
||||
return OperationResult(
|
||||
success=True,
|
||||
message=msg,
|
||||
data={"series_count": len(self.series_list)}
|
||||
logger.warning(
|
||||
"Download failed: %s S%02dE%02d", serie_folder, season, episode
|
||||
)
|
||||
|
||||
except InterruptedError as e:
|
||||
self._operation_status = OperationStatus.CANCELLED
|
||||
logger.warning("Scan cancelled: %s", e)
|
||||
return OperationResult(
|
||||
success=False,
|
||||
message="Scan cancelled",
|
||||
error=e
|
||||
# Fire download failed event
|
||||
self._events.download_status(
|
||||
DownloadStatusEventArgs(
|
||||
serie_folder=serie_folder,
|
||||
season=season,
|
||||
episode=episode,
|
||||
status="failed",
|
||||
message="Download failed",
|
||||
)
|
||||
except (IOError, OSError, RuntimeError) as e:
|
||||
self._operation_status = OperationStatus.FAILED
|
||||
logger.error("Scan failed: %s", e)
|
||||
self._handle_error(e)
|
||||
return OperationResult(
|
||||
success=False,
|
||||
message=f"Scan failed: {str(e)}",
|
||||
error=e
|
||||
)
|
||||
finally:
|
||||
self._current_operation = None
|
||||
|
||||
async def async_download(
|
||||
self,
|
||||
serieFolder: str,
|
||||
season: int,
|
||||
episode: int,
|
||||
key: str,
|
||||
callback: Optional[Callable[[float], None]] = None,
|
||||
language: str = "German Dub"
|
||||
) -> OperationResult:
|
||||
"""
|
||||
Async version of download method.
|
||||
return download_success
|
||||
|
||||
Args:
|
||||
serieFolder: Serie folder name
|
||||
season: Season number
|
||||
episode: Episode number
|
||||
key: Serie key
|
||||
callback: Optional progress callback
|
||||
language: Language preference
|
||||
|
||||
Returns:
|
||||
OperationResult with download status
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
self.download,
|
||||
serieFolder,
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Download error: %s S%02dE%02d - %s",
|
||||
serie_folder,
|
||||
season,
|
||||
episode,
|
||||
key,
|
||||
callback,
|
||||
language
|
||||
str(e),
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
async def async_rescan(
|
||||
self,
|
||||
callback: Optional[Callable[[str, int], None]] = None
|
||||
) -> OperationResult:
|
||||
"""
|
||||
Async version of ReScan method.
|
||||
|
||||
Args:
|
||||
callback: Optional progress callback
|
||||
|
||||
Returns:
|
||||
OperationResult with scan status
|
||||
"""
|
||||
loop = asyncio.get_event_loop()
|
||||
return await loop.run_in_executor(
|
||||
None,
|
||||
self.ReScan,
|
||||
callback
|
||||
# Fire download error event
|
||||
self._events.download_status(
|
||||
DownloadStatusEventArgs(
|
||||
serie_folder=serie_folder,
|
||||
season=season,
|
||||
episode=episode,
|
||||
status="failed",
|
||||
error=e,
|
||||
message=f"Download error: {str(e)}",
|
||||
)
|
||||
)
|
||||
|
||||
def cancel_operation(self) -> bool:
|
||||
"""
|
||||
Cancel the current operation.
|
||||
|
||||
Returns:
|
||||
True if operation cancelled, False if no operation running
|
||||
"""
|
||||
if (self._current_operation and
|
||||
self._operation_status == OperationStatus.RUNNING):
|
||||
logger.info(
|
||||
"Cancelling operation: %s",
|
||||
self._current_operation
|
||||
)
|
||||
self._cancel_flag = True
|
||||
return True
|
||||
return False
|
||||
|
||||
def _is_cancelled(self) -> bool:
|
||||
"""Check if the current operation has been cancelled."""
|
||||
return self._cancel_flag
|
||||
|
||||
def _handle_error(self, error: Exception) -> None:
|
||||
async def rescan(self) -> int:
|
||||
"""
|
||||
Handle errors and notify via callback.
|
||||
Rescan directory for missing episodes (async).
|
||||
|
||||
Args:
|
||||
error: Exception that occurred
|
||||
Returns:
|
||||
Number of series with missing episodes after rescan.
|
||||
"""
|
||||
if self.error_callback:
|
||||
logger.info("Starting directory rescan")
|
||||
|
||||
try:
|
||||
self.error_callback(error)
|
||||
except (RuntimeError, ValueError) as callback_error:
|
||||
logger.error(
|
||||
"Error in error callback: %s",
|
||||
callback_error
|
||||
# Get total items to scan
|
||||
total_to_scan = await asyncio.to_thread(self.serie_scanner.get_total_to_scan)
|
||||
logger.info("Total folders to scan: %d", total_to_scan)
|
||||
|
||||
# Fire scan started event
|
||||
self._events.scan_status(
|
||||
ScanStatusEventArgs(
|
||||
current=0,
|
||||
total=total_to_scan,
|
||||
folder="",
|
||||
status="started",
|
||||
progress=0.0,
|
||||
message="Scan started",
|
||||
)
|
||||
)
|
||||
|
||||
def get_series_list(self) -> List[Any]:
|
||||
# Reinitialize scanner
|
||||
await asyncio.to_thread(self.serie_scanner.reinit)
|
||||
|
||||
def scan_callback(folder: str, current: int):
|
||||
# Calculate progress
|
||||
if total_to_scan > 0:
|
||||
progress = current / total_to_scan
|
||||
else:
|
||||
progress = 0.0
|
||||
|
||||
# Fire scan progress event
|
||||
self._events.scan_status(
|
||||
ScanStatusEventArgs(
|
||||
current=current,
|
||||
total=total_to_scan,
|
||||
folder=folder,
|
||||
status="progress",
|
||||
progress=progress,
|
||||
message=f"Scanning: {folder}",
|
||||
)
|
||||
)
|
||||
|
||||
# Perform scan
|
||||
await asyncio.to_thread(self.serie_scanner.scan, scan_callback)
|
||||
|
||||
# Reinitialize list
|
||||
self.list = SerieList(self.directory_to_search)
|
||||
await self._init_list()
|
||||
|
||||
logger.info("Directory rescan completed successfully")
|
||||
|
||||
# Fire scan completed event
|
||||
self._events.scan_status(
|
||||
ScanStatusEventArgs(
|
||||
current=total_to_scan,
|
||||
total=total_to_scan,
|
||||
folder="",
|
||||
status="completed",
|
||||
progress=1.0,
|
||||
message=f"Scan completed. Found {len(self.series_list)} series with missing episodes.",
|
||||
)
|
||||
)
|
||||
|
||||
return len(self.series_list)
|
||||
|
||||
except InterruptedError:
|
||||
logger.warning("Scan cancelled by user")
|
||||
|
||||
# Fire scan cancelled event
|
||||
self._events.scan_status(
|
||||
ScanStatusEventArgs(
|
||||
current=0,
|
||||
total=total_to_scan if 'total_to_scan' in locals() else 0,
|
||||
folder="",
|
||||
status="cancelled",
|
||||
message="Scan cancelled by user",
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Scan error: %s", str(e), exc_info=True)
|
||||
|
||||
# Fire scan failed event
|
||||
self._events.scan_status(
|
||||
ScanStatusEventArgs(
|
||||
current=0,
|
||||
total=total_to_scan if 'total_to_scan' in locals() else 0,
|
||||
folder="",
|
||||
status="failed",
|
||||
error=e,
|
||||
message=f"Scan error: {str(e)}",
|
||||
)
|
||||
)
|
||||
raise
|
||||
|
||||
async def get_series_list(self) -> List[Any]:
|
||||
"""
|
||||
Get the current series list.
|
||||
Get the current series list (async).
|
||||
|
||||
Returns:
|
||||
List of series with missing episodes
|
||||
"""
|
||||
return self.series_list
|
||||
|
||||
def refresh_series_list(self) -> None:
|
||||
"""Reload the cached series list from the underlying data store."""
|
||||
self.__InitList__()
|
||||
|
||||
def get_operation_status(self) -> OperationStatus:
|
||||
"""
|
||||
Get the current operation status.
|
||||
|
||||
Returns:
|
||||
Current operation status
|
||||
"""
|
||||
return self._operation_status
|
||||
|
||||
def get_current_operation(self) -> Optional[str]:
|
||||
"""
|
||||
Get the current operation name.
|
||||
|
||||
Returns:
|
||||
Name of current operation or None
|
||||
"""
|
||||
return self._current_operation
|
||||
async def refresh_series_list(self) -> None:
|
||||
"""Reload the cached series list from the underlying data store (async)."""
|
||||
await self._init_list()
|
||||
|
||||
@ -93,12 +93,16 @@ class AniworldLoader(Loader):
|
||||
|
||||
def clear_cache(self):
|
||||
"""Clear the cached HTML data."""
|
||||
logging.debug("Clearing HTML cache")
|
||||
self._KeyHTMLDict = {}
|
||||
self._EpisodeHTMLDict = {}
|
||||
logging.debug("HTML cache cleared successfully")
|
||||
|
||||
def remove_from_cache(self):
|
||||
"""Remove episode HTML from cache."""
|
||||
logging.debug("Removing episode HTML from cache")
|
||||
self._EpisodeHTMLDict = {}
|
||||
logging.debug("Episode HTML cache cleared")
|
||||
|
||||
def search(self, word: str) -> list:
|
||||
"""Search for anime series.
|
||||
@ -109,23 +113,30 @@ class AniworldLoader(Loader):
|
||||
Returns:
|
||||
List of found series
|
||||
"""
|
||||
logging.info(f"Searching for anime with keyword: '{word}'")
|
||||
search_url = (
|
||||
f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
||||
)
|
||||
logging.debug(f"Search URL: {search_url}")
|
||||
anime_list = self.fetch_anime_list(search_url)
|
||||
logging.info(f"Found {len(anime_list)} anime series for keyword '{word}'")
|
||||
|
||||
return anime_list
|
||||
|
||||
def fetch_anime_list(self, url: str) -> list:
|
||||
logging.debug(f"Fetching anime list from URL: {url}")
|
||||
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
logging.debug(f"Response status code: {response.status_code}")
|
||||
|
||||
clean_text = response.text.strip()
|
||||
|
||||
try:
|
||||
decoded_data = json.loads(html.unescape(clean_text))
|
||||
logging.debug(f"Successfully decoded JSON data on first attempt")
|
||||
return decoded_data if isinstance(decoded_data, list) else []
|
||||
except json.JSONDecodeError:
|
||||
logging.warning("Initial JSON decode failed, attempting cleanup")
|
||||
try:
|
||||
# Remove BOM and problematic characters
|
||||
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
|
||||
@ -133,8 +144,10 @@ class AniworldLoader(Loader):
|
||||
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
|
||||
# Parse the new text
|
||||
decoded_data = json.loads(clean_text)
|
||||
logging.debug("Successfully decoded JSON after cleanup")
|
||||
return decoded_data if isinstance(decoded_data, list) else []
|
||||
except (requests.RequestException, json.JSONDecodeError) as exc:
|
||||
logging.error(f"Failed to decode anime list from {url}: {exc}")
|
||||
raise ValueError("Could not get valid anime: ") from exc
|
||||
|
||||
def _get_language_key(self, language: str) -> int:
|
||||
@ -152,6 +165,7 @@ class AniworldLoader(Loader):
|
||||
language_code = 2
|
||||
if language == "German Sub":
|
||||
language_code = 3
|
||||
logging.debug(f"Converted language '{language}' to code {language_code}")
|
||||
return language_code
|
||||
|
||||
def is_language(
|
||||
@ -162,6 +176,7 @@ class AniworldLoader(Loader):
|
||||
language: str = "German Dub"
|
||||
) -> bool:
|
||||
"""Check if episode is available in specified language."""
|
||||
logging.debug(f"Checking if S{season:02}E{episode:03} ({key}) is available in {language}")
|
||||
language_code = self._get_language_key(language)
|
||||
|
||||
episode_soup = BeautifulSoup(
|
||||
@ -179,7 +194,9 @@ class AniworldLoader(Loader):
|
||||
if lang_key and lang_key.isdigit():
|
||||
languages.append(int(lang_key))
|
||||
|
||||
return language_code in languages
|
||||
is_available = language_code in languages
|
||||
logging.debug(f"Available languages for S{season:02}E{episode:03}: {languages}, requested: {language_code}, available: {is_available}")
|
||||
return is_available
|
||||
|
||||
def download(
|
||||
self,
|
||||
@ -192,10 +209,12 @@ class AniworldLoader(Loader):
|
||||
progress_callback=None
|
||||
) -> bool:
|
||||
"""Download episode to specified directory."""
|
||||
logging.info(f"Starting download for S{season:02}E{episode:03} ({key}) in {language}")
|
||||
sanitized_anime_title = ''.join(
|
||||
char for char in self.get_title(key)
|
||||
if char not in self.INVALID_PATH_CHARS
|
||||
)
|
||||
logging.debug(f"Sanitized anime title: {sanitized_anime_title}")
|
||||
|
||||
if season == 0:
|
||||
output_file = (
|
||||
@ -215,16 +234,20 @@ class AniworldLoader(Loader):
|
||||
f"Season {season}"
|
||||
)
|
||||
output_path = os.path.join(folder_path, output_file)
|
||||
logging.debug(f"Output path: {output_path}")
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
temp_dir = "./Temp/"
|
||||
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
|
||||
temp_path = os.path.join(temp_dir, output_file)
|
||||
logging.debug(f"Temporary path: {temp_path}")
|
||||
|
||||
for provider in self.SUPPORTED_PROVIDERS:
|
||||
logging.debug(f"Attempting download with provider: {provider}")
|
||||
link, header = self._get_direct_link_from_provider(
|
||||
season, episode, key, language
|
||||
)
|
||||
logging.debug("Direct link obtained from provider")
|
||||
ydl_opts = {
|
||||
'fragment_retries': float('inf'),
|
||||
'outtmpl': temp_path,
|
||||
@ -236,18 +259,69 @@ class AniworldLoader(Loader):
|
||||
|
||||
if header:
|
||||
ydl_opts['http_headers'] = header
|
||||
logging.debug("Using custom headers for download")
|
||||
if progress_callback:
|
||||
ydl_opts['progress_hooks'] = [progress_callback]
|
||||
# Wrap the callback to add logging
|
||||
def logged_progress_callback(d):
|
||||
logging.debug(
|
||||
f"YT-DLP progress: status={d.get('status')}, "
|
||||
f"downloaded={d.get('downloaded_bytes')}, "
|
||||
f"total={d.get('total_bytes')}, "
|
||||
f"speed={d.get('speed')}"
|
||||
)
|
||||
progress_callback(d)
|
||||
|
||||
ydl_opts['progress_hooks'] = [logged_progress_callback]
|
||||
logging.debug("Progress callback registered with YT-DLP")
|
||||
|
||||
try:
|
||||
logging.debug("Starting YoutubeDL download")
|
||||
logging.debug(f"Download link: {link[:100]}...")
|
||||
logging.debug(f"YDL options: {ydl_opts}")
|
||||
|
||||
with YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([link])
|
||||
info = ydl.extract_info(link, download=True)
|
||||
logging.debug(
|
||||
f"Download info: "
|
||||
f"title={info.get('title')}, "
|
||||
f"filesize={info.get('filesize')}"
|
||||
)
|
||||
|
||||
if os.path.exists(temp_path):
|
||||
logging.debug("Moving file from temp to final destination")
|
||||
shutil.copy(temp_path, output_path)
|
||||
os.remove(temp_path)
|
||||
break
|
||||
logging.info(
|
||||
f"Download completed successfully: {output_file}"
|
||||
)
|
||||
self.clear_cache()
|
||||
return True
|
||||
else:
|
||||
logging.error(
|
||||
f"Download failed: temp file not found at {temp_path}"
|
||||
)
|
||||
self.clear_cache()
|
||||
return False
|
||||
except BrokenPipeError as e:
|
||||
logging.error(
|
||||
f"Broken pipe error with provider {provider}: {e}. "
|
||||
f"This usually means the stream connection was closed."
|
||||
)
|
||||
# Try next provider if available
|
||||
continue
|
||||
except Exception as e:
|
||||
logging.error(
|
||||
f"YoutubeDL download failed with provider {provider}: "
|
||||
f"{type(e).__name__}: {e}"
|
||||
)
|
||||
# Try next provider if available
|
||||
continue
|
||||
break
|
||||
|
||||
# If we get here, all providers failed
|
||||
logging.error("All download providers failed")
|
||||
self.clear_cache()
|
||||
return False
|
||||
|
||||
def get_site_key(self) -> str:
|
||||
"""Get the site key for this provider."""
|
||||
@ -255,6 +329,7 @@ class AniworldLoader(Loader):
|
||||
|
||||
def get_title(self, key: str) -> str:
|
||||
"""Get anime title from series key."""
|
||||
logging.debug(f"Getting title for key: {key}")
|
||||
soup = BeautifulSoup(
|
||||
self._get_key_html(key).content,
|
||||
'html.parser'
|
||||
@ -262,8 +337,11 @@ class AniworldLoader(Loader):
|
||||
title_div = soup.find('div', class_='series-title')
|
||||
|
||||
if title_div:
|
||||
return title_div.find('h1').find('span').text
|
||||
title = title_div.find('h1').find('span').text
|
||||
logging.debug(f"Found title: {title}")
|
||||
return title
|
||||
|
||||
logging.warning(f"No title found for key: {key}")
|
||||
return ""
|
||||
|
||||
def _get_key_html(self, key: str):
|
||||
@ -276,14 +354,18 @@ class AniworldLoader(Loader):
|
||||
Cached or fetched HTML response
|
||||
"""
|
||||
if key in self._KeyHTMLDict:
|
||||
logging.debug(f"Using cached HTML for key: {key}")
|
||||
return self._KeyHTMLDict[key]
|
||||
|
||||
# Sanitize key parameter for URL
|
||||
safe_key = quote(key, safe='')
|
||||
url = f"{self.ANIWORLD_TO}/anime/stream/{safe_key}"
|
||||
logging.debug(f"Fetching HTML for key: {key} from {url}")
|
||||
self._KeyHTMLDict[key] = self.session.get(
|
||||
f"{self.ANIWORLD_TO}/anime/stream/{safe_key}",
|
||||
url,
|
||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
logging.debug(f"Cached HTML for key: {key}")
|
||||
return self._KeyHTMLDict[key]
|
||||
|
||||
def _get_episode_html(self, season: int, episode: int, key: str):
|
||||
@ -302,11 +384,14 @@ class AniworldLoader(Loader):
|
||||
"""
|
||||
# Validate season and episode numbers
|
||||
if season < 1 or season > 999:
|
||||
logging.error(f"Invalid season number: {season}")
|
||||
raise ValueError(f"Invalid season number: {season}")
|
||||
if episode < 1 or episode > 9999:
|
||||
logging.error(f"Invalid episode number: {episode}")
|
||||
raise ValueError(f"Invalid episode number: {episode}")
|
||||
|
||||
if key in self._EpisodeHTMLDict:
|
||||
logging.debug(f"Using cached HTML for S{season:02}E{episode:03} ({key})")
|
||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||
|
||||
# Sanitize key parameter for URL
|
||||
@ -315,8 +400,10 @@ class AniworldLoader(Loader):
|
||||
f"{self.ANIWORLD_TO}/anime/stream/{safe_key}/"
|
||||
f"staffel-{season}/episode-{episode}"
|
||||
)
|
||||
logging.debug(f"Fetching episode HTML from: {link}")
|
||||
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
self._EpisodeHTMLDict[(key, season, episode)] = html
|
||||
logging.debug(f"Cached episode HTML for S{season:02}E{episode:03} ({key})")
|
||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||
|
||||
def _get_provider_from_html(
|
||||
@ -336,6 +423,7 @@ class AniworldLoader(Loader):
|
||||
2: 'https://aniworld.to/redirect/1766405'},
|
||||
}
|
||||
"""
|
||||
logging.debug(f"Extracting providers from HTML for S{season:02}E{episode:03} ({key})")
|
||||
soup = BeautifulSoup(
|
||||
self._get_episode_html(season, episode, key).content,
|
||||
'html.parser'
|
||||
@ -347,6 +435,7 @@ class AniworldLoader(Loader):
|
||||
)
|
||||
|
||||
if not episode_links:
|
||||
logging.warning(f"No episode links found for S{season:02}E{episode:03} ({key})")
|
||||
return providers
|
||||
|
||||
for link in episode_links:
|
||||
@ -374,7 +463,9 @@ class AniworldLoader(Loader):
|
||||
providers[provider_name][lang_key] = (
|
||||
f"{self.ANIWORLD_TO}{redirect_link}"
|
||||
)
|
||||
logging.debug(f"Found provider: {provider_name}, lang_key: {lang_key}")
|
||||
|
||||
logging.debug(f"Total providers found: {len(providers)}")
|
||||
return providers
|
||||
|
||||
def _get_redirect_link(
|
||||
@ -385,6 +476,7 @@ class AniworldLoader(Loader):
|
||||
language: str = "German Dub"
|
||||
):
|
||||
"""Get redirect link for episode in specified language."""
|
||||
logging.debug(f"Getting redirect link for S{season:02}E{episode:03} ({key}) in {language}")
|
||||
language_code = self._get_language_key(language)
|
||||
if self.is_language(season, episode, key, language):
|
||||
for (provider_name, lang_dict) in (
|
||||
@ -393,7 +485,9 @@ class AniworldLoader(Loader):
|
||||
).items()
|
||||
):
|
||||
if language_code in lang_dict:
|
||||
logging.debug(f"Found redirect link with provider: {provider_name}")
|
||||
return (lang_dict[language_code], provider_name)
|
||||
logging.warning(f"No redirect link found for S{season:02}E{episode:03} ({key}) in {language}")
|
||||
return None
|
||||
|
||||
def _get_embeded_link(
|
||||
@ -404,15 +498,18 @@ class AniworldLoader(Loader):
|
||||
language: str = "German Dub"
|
||||
):
|
||||
"""Get embedded link from redirect link."""
|
||||
logging.debug(f"Getting embedded link for S{season:02}E{episode:03} ({key}) in {language}")
|
||||
redirect_link, provider_name = (
|
||||
self._get_redirect_link(season, episode, key, language)
|
||||
)
|
||||
logging.debug(f"Redirect link: {redirect_link}, provider: {provider_name}")
|
||||
|
||||
embeded_link = self.session.get(
|
||||
redirect_link,
|
||||
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||
headers={'User-Agent': self.RANDOM_USER_AGENT}
|
||||
).url
|
||||
logging.debug(f"Embedded link: {embeded_link}")
|
||||
return embeded_link
|
||||
|
||||
def _get_direct_link_from_provider(
|
||||
@ -423,12 +520,15 @@ class AniworldLoader(Loader):
|
||||
language: str = "German Dub"
|
||||
):
|
||||
"""Get direct download link from streaming provider."""
|
||||
logging.debug(f"Getting direct link from provider for S{season:02}E{episode:03} ({key}) in {language}")
|
||||
embeded_link = self._get_embeded_link(
|
||||
season, episode, key, language
|
||||
)
|
||||
if embeded_link is None:
|
||||
logging.error(f"No embedded link found for S{season:02}E{episode:03} ({key})")
|
||||
return None
|
||||
|
||||
logging.debug(f"Using VOE provider to extract direct link")
|
||||
return self.Providers.GetProvider(
|
||||
"VOE"
|
||||
).get_link(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
|
||||
@ -442,19 +542,23 @@ class AniworldLoader(Loader):
|
||||
Returns:
|
||||
Dictionary mapping season numbers to episode counts
|
||||
"""
|
||||
logging.info(f"Getting season and episode count for slug: {slug}")
|
||||
# Sanitize slug parameter for URL
|
||||
safe_slug = quote(slug, safe='')
|
||||
base_url = f"{self.ANIWORLD_TO}/anime/stream/{safe_slug}/"
|
||||
logging.debug(f"Base URL: {base_url}")
|
||||
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
|
||||
season_meta = soup.find('meta', itemprop='numberOfSeasons')
|
||||
number_of_seasons = int(season_meta['content']) if season_meta else 0
|
||||
logging.info(f"Found {number_of_seasons} seasons for '{slug}'")
|
||||
|
||||
episode_counts = {}
|
||||
|
||||
for season in range(1, number_of_seasons + 1):
|
||||
season_url = f"{base_url}staffel-{season}"
|
||||
logging.debug(f"Fetching episodes for season {season} from: {season_url}")
|
||||
response = requests.get(
|
||||
season_url,
|
||||
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||
@ -469,5 +573,7 @@ class AniworldLoader(Loader):
|
||||
)
|
||||
|
||||
episode_counts[season] = len(unique_links)
|
||||
logging.debug(f"Season {season} has {episode_counts[season]} episodes")
|
||||
|
||||
logging.info(f"Episode count retrieval complete for '{slug}': {episode_counts}")
|
||||
return episode_counts
|
||||
|
||||
@ -1,258 +0,0 @@
|
||||
"""Analytics API endpoints for accessing system analytics and reports.
|
||||
|
||||
Provides REST API endpoints for querying analytics data including download
|
||||
statistics, series popularity, storage analysis, and performance reports.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.services.analytics_service import get_analytics_service
|
||||
|
||||
router = APIRouter(prefix="/api/analytics", tags=["analytics"])
|
||||
|
||||
|
||||
class DownloadStatsResponse(BaseModel):
|
||||
"""Download statistics response model."""
|
||||
|
||||
total_downloads: int
|
||||
successful_downloads: int
|
||||
failed_downloads: int
|
||||
total_bytes_downloaded: int
|
||||
average_speed_mbps: float
|
||||
success_rate: float
|
||||
average_duration_seconds: float
|
||||
|
||||
|
||||
class SeriesPopularityResponse(BaseModel):
|
||||
"""Series popularity response model."""
|
||||
|
||||
series_name: str
|
||||
download_count: int
|
||||
total_size_bytes: int
|
||||
last_download: Optional[str]
|
||||
success_rate: float
|
||||
|
||||
|
||||
class StorageAnalysisResponse(BaseModel):
|
||||
"""Storage analysis response model."""
|
||||
|
||||
total_storage_bytes: int
|
||||
used_storage_bytes: int
|
||||
free_storage_bytes: int
|
||||
storage_percent_used: float
|
||||
downloads_directory_size_bytes: int
|
||||
cache_directory_size_bytes: int
|
||||
logs_directory_size_bytes: int
|
||||
|
||||
|
||||
class PerformanceReportResponse(BaseModel):
|
||||
"""Performance report response model."""
|
||||
|
||||
period_start: str
|
||||
period_end: str
|
||||
downloads_per_hour: float
|
||||
average_queue_size: float
|
||||
peak_memory_usage_mb: float
|
||||
average_cpu_percent: float
|
||||
uptime_seconds: float
|
||||
error_rate: float
|
||||
|
||||
|
||||
class SummaryReportResponse(BaseModel):
|
||||
"""Comprehensive analytics summary response."""
|
||||
|
||||
timestamp: str
|
||||
download_stats: DownloadStatsResponse
|
||||
series_popularity: list[SeriesPopularityResponse]
|
||||
storage_analysis: StorageAnalysisResponse
|
||||
performance_report: PerformanceReportResponse
|
||||
|
||||
|
||||
@router.get("/downloads", response_model=DownloadStatsResponse)
|
||||
async def get_download_statistics(
|
||||
days: int = 30,
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> DownloadStatsResponse:
|
||||
"""Get download statistics for specified period.
|
||||
|
||||
Args:
|
||||
days: Number of days to analyze (default: 30)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Download statistics including success rates and speeds
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
stats = await service.get_download_stats(db, days=days)
|
||||
|
||||
return DownloadStatsResponse(
|
||||
total_downloads=stats.total_downloads,
|
||||
successful_downloads=stats.successful_downloads,
|
||||
failed_downloads=stats.failed_downloads,
|
||||
total_bytes_downloaded=stats.total_bytes_downloaded,
|
||||
average_speed_mbps=stats.average_speed_mbps,
|
||||
success_rate=stats.success_rate,
|
||||
average_duration_seconds=stats.average_duration_seconds,
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get download statistics: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/series-popularity",
|
||||
response_model=list[SeriesPopularityResponse]
|
||||
)
|
||||
async def get_series_popularity(
|
||||
limit: int = 10,
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> list[SeriesPopularityResponse]:
|
||||
"""Get most popular series by download count.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of series (default: 10)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
List of series sorted by popularity
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
popularity = await service.get_series_popularity(db, limit=limit)
|
||||
|
||||
return [
|
||||
SeriesPopularityResponse(
|
||||
series_name=p.series_name,
|
||||
download_count=p.download_count,
|
||||
total_size_bytes=p.total_size_bytes,
|
||||
last_download=p.last_download,
|
||||
success_rate=p.success_rate,
|
||||
)
|
||||
for p in popularity
|
||||
]
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get series popularity: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/storage",
|
||||
response_model=StorageAnalysisResponse
|
||||
)
|
||||
async def get_storage_analysis() -> StorageAnalysisResponse:
|
||||
"""Get current storage usage analysis.
|
||||
|
||||
Returns:
|
||||
Storage breakdown including disk and directory usage
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
analysis = service.get_storage_analysis()
|
||||
|
||||
return StorageAnalysisResponse(
|
||||
total_storage_bytes=analysis.total_storage_bytes,
|
||||
used_storage_bytes=analysis.used_storage_bytes,
|
||||
free_storage_bytes=analysis.free_storage_bytes,
|
||||
storage_percent_used=analysis.storage_percent_used,
|
||||
downloads_directory_size_bytes=(
|
||||
analysis.downloads_directory_size_bytes
|
||||
),
|
||||
cache_directory_size_bytes=(
|
||||
analysis.cache_directory_size_bytes
|
||||
),
|
||||
logs_directory_size_bytes=(
|
||||
analysis.logs_directory_size_bytes
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get storage analysis: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/performance",
|
||||
response_model=PerformanceReportResponse
|
||||
)
|
||||
async def get_performance_report(
|
||||
hours: int = 24,
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> PerformanceReportResponse:
|
||||
"""Get performance metrics for specified period.
|
||||
|
||||
Args:
|
||||
hours: Number of hours to analyze (default: 24)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Performance metrics including speeds and system usage
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
report = await service.get_performance_report(db, hours=hours)
|
||||
|
||||
return PerformanceReportResponse(
|
||||
period_start=report.period_start,
|
||||
period_end=report.period_end,
|
||||
downloads_per_hour=report.downloads_per_hour,
|
||||
average_queue_size=report.average_queue_size,
|
||||
peak_memory_usage_mb=report.peak_memory_usage_mb,
|
||||
average_cpu_percent=report.average_cpu_percent,
|
||||
uptime_seconds=report.uptime_seconds,
|
||||
error_rate=report.error_rate,
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get performance report: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/summary", response_model=SummaryReportResponse)
|
||||
async def get_summary_report(
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> SummaryReportResponse:
|
||||
"""Get comprehensive analytics summary.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Complete analytics report with all metrics
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
summary = await service.generate_summary_report(db)
|
||||
|
||||
return SummaryReportResponse(
|
||||
timestamp=summary["timestamp"],
|
||||
download_stats=DownloadStatsResponse(
|
||||
**summary["download_stats"]
|
||||
),
|
||||
series_popularity=[
|
||||
SeriesPopularityResponse(**p)
|
||||
for p in summary["series_popularity"]
|
||||
],
|
||||
storage_analysis=StorageAnalysisResponse(
|
||||
**summary["storage_analysis"]
|
||||
),
|
||||
performance_report=PerformanceReportResponse(
|
||||
**summary["performance_report"]
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to generate summary report: {str(e)}",
|
||||
)
|
||||
@ -1,13 +1,10 @@
|
||||
from typing import Any, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.server.utils.dependencies import (
|
||||
get_optional_series_app,
|
||||
get_series_app,
|
||||
require_auth,
|
||||
)
|
||||
from src.core.entities.series import Serie
|
||||
from src.server.utils.dependencies import get_series_app, require_auth
|
||||
|
||||
router = APIRouter(prefix="/api/anime", tags=["anime"])
|
||||
|
||||
@ -30,12 +27,15 @@ async def get_anime_status(
|
||||
HTTPException: If status retrieval fails
|
||||
"""
|
||||
try:
|
||||
directory = getattr(series_app, "directory", "") if series_app else ""
|
||||
directory = (
|
||||
getattr(series_app, "directory_to_search", "")
|
||||
if series_app else ""
|
||||
)
|
||||
|
||||
# Get series count
|
||||
series_count = 0
|
||||
if series_app and hasattr(series_app, "List"):
|
||||
series = series_app.List.GetList()
|
||||
if series_app and hasattr(series_app, "list"):
|
||||
series = series_app.list.GetList()
|
||||
series_count = len(series) if series else 0
|
||||
|
||||
return {
|
||||
@ -49,51 +49,6 @@ async def get_anime_status(
|
||||
) from exc
|
||||
|
||||
|
||||
@router.get("/process/locks")
|
||||
async def get_process_locks(
|
||||
_auth: dict = Depends(require_auth),
|
||||
series_app: Any = Depends(get_series_app),
|
||||
) -> dict:
|
||||
"""Get process lock status for rescan and download operations.
|
||||
|
||||
Args:
|
||||
_auth: Ensures the caller is authenticated (value unused)
|
||||
series_app: Core `SeriesApp` instance provided via dependency
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Lock status information
|
||||
|
||||
Raises:
|
||||
HTTPException: If lock status retrieval fails
|
||||
"""
|
||||
try:
|
||||
locks = {
|
||||
"rescan": {"is_locked": False},
|
||||
"download": {"is_locked": False}
|
||||
}
|
||||
|
||||
# Check if SeriesApp has lock status methods
|
||||
if series_app:
|
||||
if hasattr(series_app, "isRescanning"):
|
||||
locks["rescan"]["is_locked"] = series_app.isRescanning()
|
||||
if hasattr(series_app, "isDownloading"):
|
||||
locks["download"]["is_locked"] = series_app.isDownloading()
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"locks": locks
|
||||
}
|
||||
except Exception as exc:
|
||||
return {
|
||||
"success": False,
|
||||
"error": str(exc),
|
||||
"locks": {
|
||||
"rescan": {"is_locked": False},
|
||||
"download": {"is_locked": False}
|
||||
}
|
||||
}
|
||||
|
||||
|
||||
class AnimeSummary(BaseModel):
|
||||
"""Summary of an anime series with missing episodes."""
|
||||
key: str # Unique identifier (used as id in frontend)
|
||||
@ -101,6 +56,7 @@ class AnimeSummary(BaseModel):
|
||||
site: str # Provider site
|
||||
folder: str # Local folder name
|
||||
missing_episodes: dict # Episode dictionary: {season: [episode_numbers]}
|
||||
link: Optional[str] = "" # Link to the series page (for adding new series)
|
||||
|
||||
class Config:
|
||||
"""Pydantic model configuration."""
|
||||
@ -110,7 +66,8 @@ class AnimeSummary(BaseModel):
|
||||
"name": "Beheneko",
|
||||
"site": "aniworld.to",
|
||||
"folder": "beheneko the elf girls cat (2025)",
|
||||
"missing_episodes": {"1": [1, 2, 3, 4]}
|
||||
"missing_episodes": {"1": [1, 2, 3, 4]},
|
||||
"link": "https://aniworld.to/anime/stream/beheneko"
|
||||
}
|
||||
}
|
||||
|
||||
@ -212,10 +169,10 @@ async def list_anime(
|
||||
|
||||
try:
|
||||
# Get missing episodes from series app
|
||||
if not hasattr(series_app, "List"):
|
||||
if not hasattr(series_app, "list"):
|
||||
return []
|
||||
|
||||
series = series_app.List.GetMissingEpisode()
|
||||
series = series_app.list.GetMissingEpisode()
|
||||
summaries: List[AnimeSummary] = []
|
||||
for serie in series:
|
||||
# Get all properties from the serie object
|
||||
@ -338,12 +295,6 @@ class AddSeriesRequest(BaseModel):
|
||||
name: str
|
||||
|
||||
|
||||
class DownloadFoldersRequest(BaseModel):
|
||||
"""Request model for downloading missing episodes from folders."""
|
||||
|
||||
folders: List[str]
|
||||
|
||||
|
||||
def validate_search_query(query: str) -> str:
|
||||
"""Validate and sanitize search query.
|
||||
|
||||
@ -397,17 +348,17 @@ def validate_search_query(query: str) -> str:
|
||||
return normalized
|
||||
|
||||
|
||||
class SearchAnimeRequest(BaseModel):
|
||||
"""Request model for searching anime."""
|
||||
query: str = Field(..., min_length=1, description="Search query string")
|
||||
|
||||
|
||||
@router.get("/search", response_model=List[AnimeSummary])
|
||||
@router.post(
|
||||
"/search",
|
||||
response_model=List[AnimeSummary],
|
||||
include_in_schema=False,
|
||||
)
|
||||
async def search_anime(
|
||||
async def search_anime_get(
|
||||
query: str,
|
||||
series_app: Optional[Any] = Depends(get_optional_series_app),
|
||||
series_app: Optional[Any] = Depends(get_series_app),
|
||||
) -> List[AnimeSummary]:
|
||||
"""Search the provider for additional series matching a query.
|
||||
"""Search the provider for additional series matching a query (GET).
|
||||
|
||||
Args:
|
||||
query: Search term passed as query parameter
|
||||
@ -418,9 +369,48 @@ async def search_anime(
|
||||
|
||||
Raises:
|
||||
HTTPException: When provider communication fails or query is invalid.
|
||||
"""
|
||||
return await _perform_search(query, series_app)
|
||||
|
||||
Note: Authentication removed for input validation testing.
|
||||
Note: POST method added for compatibility with security tests.
|
||||
|
||||
@router.post(
|
||||
"/search",
|
||||
response_model=List[AnimeSummary],
|
||||
)
|
||||
async def search_anime_post(
|
||||
request: SearchAnimeRequest,
|
||||
series_app: Optional[Any] = Depends(get_series_app),
|
||||
) -> List[AnimeSummary]:
|
||||
"""Search the provider for additional series matching a query (POST).
|
||||
|
||||
Args:
|
||||
request: Request containing the search query
|
||||
series_app: Optional SeriesApp instance provided via dependency.
|
||||
|
||||
Returns:
|
||||
List[AnimeSummary]: Discovered matches returned from the provider.
|
||||
|
||||
Raises:
|
||||
HTTPException: When provider communication fails or query is invalid.
|
||||
"""
|
||||
return await _perform_search(request.query, series_app)
|
||||
|
||||
|
||||
async def _perform_search(
|
||||
query: str,
|
||||
series_app: Optional[Any],
|
||||
) -> List[AnimeSummary]:
|
||||
"""Internal function to perform the search logic.
|
||||
|
||||
Args:
|
||||
query: Search term
|
||||
series_app: Optional SeriesApp instance.
|
||||
|
||||
Returns:
|
||||
List[AnimeSummary]: Discovered matches returned from the provider.
|
||||
|
||||
Raises:
|
||||
HTTPException: When provider communication fails or query is invalid.
|
||||
"""
|
||||
try:
|
||||
# Validate and sanitize the query
|
||||
@ -444,6 +434,7 @@ async def search_anime(
|
||||
title = match.get("title") or match.get("name") or ""
|
||||
site = match.get("site") or ""
|
||||
folder = match.get("folder") or ""
|
||||
link = match.get("link") or match.get("url") or ""
|
||||
missing = (
|
||||
match.get("missing_episodes")
|
||||
or match.get("missing")
|
||||
@ -454,6 +445,7 @@ async def search_anime(
|
||||
title = getattr(match, "title", getattr(match, "name", ""))
|
||||
site = getattr(match, "site", "")
|
||||
folder = getattr(match, "folder", "")
|
||||
link = getattr(match, "link", getattr(match, "url", ""))
|
||||
missing = getattr(match, "missing_episodes", {})
|
||||
|
||||
summaries.append(
|
||||
@ -462,6 +454,7 @@ async def search_anime(
|
||||
name=title,
|
||||
site=site,
|
||||
folder=folder,
|
||||
link=link,
|
||||
missing_episodes=missing,
|
||||
)
|
||||
)
|
||||
@ -496,24 +489,50 @@ async def add_series(
|
||||
HTTPException: If adding the series fails
|
||||
"""
|
||||
try:
|
||||
if not hasattr(series_app, "AddSeries"):
|
||||
# Validate inputs
|
||||
if not request.link or not request.link.strip():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail="Add series functionality not available",
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Series link cannot be empty",
|
||||
)
|
||||
|
||||
result = series_app.AddSeries(request.link, request.name)
|
||||
if not request.name or not request.name.strip():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Series name cannot be empty",
|
||||
)
|
||||
|
||||
# Check if series_app has the list attribute
|
||||
if not hasattr(series_app, "list"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail="Series list functionality not available",
|
||||
)
|
||||
|
||||
# Create a new Serie object
|
||||
# Following the pattern from CLI:
|
||||
# Serie(key, name, site, folder, episodeDict)
|
||||
# The key and folder are both the link in this case
|
||||
# episodeDict is empty {} for a new series
|
||||
serie = Serie(
|
||||
key=request.link.strip(),
|
||||
name=request.name.strip(),
|
||||
site="aniworld.to",
|
||||
folder=request.name.strip(),
|
||||
episodeDict={}
|
||||
)
|
||||
|
||||
# Add the series to the list
|
||||
series_app.list.add(serie)
|
||||
|
||||
# Refresh the series list to update the cache
|
||||
if hasattr(series_app, "refresh_series_list"):
|
||||
series_app.refresh_series_list()
|
||||
|
||||
if result:
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Successfully added series: {request.name}"
|
||||
}
|
||||
else:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Failed to add series - series may already exist",
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
@ -523,52 +542,10 @@ async def add_series(
|
||||
) from exc
|
||||
|
||||
|
||||
@router.post("/download")
|
||||
async def download_folders(
|
||||
request: DownloadFoldersRequest,
|
||||
_auth: dict = Depends(require_auth),
|
||||
series_app: Any = Depends(get_series_app),
|
||||
) -> dict:
|
||||
"""Start downloading missing episodes from the specified folders.
|
||||
|
||||
Args:
|
||||
request: Request containing list of folder names
|
||||
_auth: Ensures the caller is authenticated (value unused)
|
||||
series_app: Core `SeriesApp` instance provided via dependency
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Status payload with success message
|
||||
|
||||
Raises:
|
||||
HTTPException: If download initiation fails
|
||||
"""
|
||||
try:
|
||||
if not hasattr(series_app, "Download"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_501_NOT_IMPLEMENTED,
|
||||
detail="Download functionality not available",
|
||||
)
|
||||
|
||||
# Call Download with the folders and a no-op callback
|
||||
series_app.Download(request.folders, lambda *args, **kwargs: None)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Download started for {len(request.folders)} series"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to start download: {str(exc)}",
|
||||
) from exc
|
||||
|
||||
|
||||
@router.get("/{anime_id}", response_model=AnimeDetail)
|
||||
async def get_anime(
|
||||
anime_id: str,
|
||||
series_app: Optional[Any] = Depends(get_optional_series_app)
|
||||
series_app: Optional[Any] = Depends(get_series_app)
|
||||
) -> AnimeDetail:
|
||||
"""Return detailed information about a specific series.
|
||||
|
||||
@ -584,13 +561,13 @@ async def get_anime(
|
||||
"""
|
||||
try:
|
||||
# Check if series_app is available
|
||||
if not series_app or not hasattr(series_app, "List"):
|
||||
if not series_app or not hasattr(series_app, "list"):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail="Series not found",
|
||||
)
|
||||
|
||||
series = series_app.List.GetList()
|
||||
series = series_app.list.GetList()
|
||||
found = None
|
||||
for serie in series:
|
||||
matches_key = getattr(serie, "key", None) == anime_id
|
||||
@ -626,47 +603,6 @@ async def get_anime(
|
||||
) from exc
|
||||
|
||||
|
||||
# Test endpoint for input validation
|
||||
class AnimeCreateRequest(BaseModel):
|
||||
"""Request model for creating anime (test endpoint)."""
|
||||
|
||||
title: str
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
# Maximum allowed input size for security
|
||||
MAX_INPUT_LENGTH = 100000 # 100KB
|
||||
|
||||
|
||||
@router.post("", include_in_schema=False, status_code=status.HTTP_201_CREATED)
|
||||
async def create_anime_test(request: AnimeCreateRequest):
|
||||
"""Test endpoint for input validation testing.
|
||||
|
||||
This endpoint validates input sizes and content for security testing.
|
||||
Not used in production - only for validation tests.
|
||||
"""
|
||||
# Validate input size
|
||||
if len(request.title) > MAX_INPUT_LENGTH:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
detail="Title exceeds maximum allowed length",
|
||||
)
|
||||
|
||||
if request.description and len(request.description) > MAX_INPUT_LENGTH:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
detail="Description exceeds maximum allowed length",
|
||||
)
|
||||
|
||||
# Return success for valid input
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Anime created (test mode)",
|
||||
"data": {
|
||||
"title": request.title[:100], # Truncated for response
|
||||
"description": (
|
||||
request.description[:100] if request.description else None
|
||||
),
|
||||
},
|
||||
}
|
||||
|
||||
|
||||
@ -1,304 +0,0 @@
|
||||
"""Backup management API endpoints."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from src.server.services.backup_service import BackupService, get_backup_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/backup", tags=["backup"])
|
||||
|
||||
|
||||
class BackupCreateRequest(BaseModel):
|
||||
"""Request to create a backup."""
|
||||
|
||||
backup_type: str # 'config', 'database', 'full'
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class BackupResponse(BaseModel):
|
||||
"""Response for backup creation."""
|
||||
|
||||
success: bool
|
||||
message: str
|
||||
backup_name: Optional[str] = None
|
||||
size_bytes: Optional[int] = None
|
||||
|
||||
|
||||
class BackupListResponse(BaseModel):
|
||||
"""Response for listing backups."""
|
||||
|
||||
backups: List[Dict[str, Any]]
|
||||
total_count: int
|
||||
|
||||
|
||||
class RestoreRequest(BaseModel):
|
||||
"""Request to restore from backup."""
|
||||
|
||||
backup_name: str
|
||||
|
||||
|
||||
class RestoreResponse(BaseModel):
|
||||
"""Response for restore operation."""
|
||||
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
def get_backup_service_dep() -> BackupService:
|
||||
"""Dependency to get backup service."""
|
||||
return get_backup_service()
|
||||
|
||||
|
||||
@router.post("/create", response_model=BackupResponse)
|
||||
async def create_backup(
|
||||
request: BackupCreateRequest,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> BackupResponse:
|
||||
"""Create a new backup.
|
||||
|
||||
Args:
|
||||
request: Backup creation request.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
BackupResponse: Result of backup creation.
|
||||
"""
|
||||
try:
|
||||
backup_info = None
|
||||
|
||||
if request.backup_type == "config":
|
||||
backup_info = backup_service.backup_configuration(
|
||||
request.description or ""
|
||||
)
|
||||
elif request.backup_type == "database":
|
||||
backup_info = backup_service.backup_database(
|
||||
request.description or ""
|
||||
)
|
||||
elif request.backup_type == "full":
|
||||
backup_info = backup_service.backup_full(
|
||||
request.description or ""
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Invalid backup type: {request.backup_type}")
|
||||
|
||||
if backup_info is None:
|
||||
return BackupResponse(
|
||||
success=False,
|
||||
message=f"Failed to create {request.backup_type} backup",
|
||||
)
|
||||
|
||||
return BackupResponse(
|
||||
success=True,
|
||||
message=(
|
||||
f"{request.backup_type.capitalize()} backup created "
|
||||
"successfully"
|
||||
),
|
||||
backup_name=backup_info.name,
|
||||
size_bytes=backup_info.size_bytes,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create backup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/list", response_model=BackupListResponse)
|
||||
async def list_backups(
|
||||
backup_type: Optional[str] = None,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> BackupListResponse:
|
||||
"""List available backups.
|
||||
|
||||
Args:
|
||||
backup_type: Optional filter by backup type.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
BackupListResponse: List of available backups.
|
||||
"""
|
||||
try:
|
||||
backups = backup_service.list_backups(backup_type)
|
||||
return BackupListResponse(backups=backups, total_count=len(backups))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list backups: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/restore", response_model=RestoreResponse)
|
||||
async def restore_backup(
|
||||
request: RestoreRequest,
|
||||
backup_type: Optional[str] = None,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> RestoreResponse:
|
||||
"""Restore from a backup.
|
||||
|
||||
Args:
|
||||
request: Restore request.
|
||||
backup_type: Type of backup to restore.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
RestoreResponse: Result of restore operation.
|
||||
"""
|
||||
try:
|
||||
# Determine backup type from filename if not provided
|
||||
if backup_type is None:
|
||||
if "config" in request.backup_name:
|
||||
backup_type = "config"
|
||||
elif "database" in request.backup_name:
|
||||
backup_type = "database"
|
||||
else:
|
||||
backup_type = "full"
|
||||
|
||||
success = False
|
||||
|
||||
if backup_type == "config":
|
||||
success = backup_service.restore_configuration(
|
||||
request.backup_name
|
||||
)
|
||||
elif backup_type == "database":
|
||||
success = backup_service.restore_database(request.backup_name)
|
||||
else:
|
||||
raise ValueError(f"Cannot restore backup type: {backup_type}")
|
||||
|
||||
if not success:
|
||||
return RestoreResponse(
|
||||
success=False,
|
||||
message=f"Failed to restore {backup_type} backup",
|
||||
)
|
||||
|
||||
return RestoreResponse(
|
||||
success=True,
|
||||
message=f"{backup_type.capitalize()} backup restored successfully",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore backup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/{backup_name}", response_model=Dict[str, Any])
|
||||
async def delete_backup(
|
||||
backup_name: str,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Delete a backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to delete.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Result of delete operation.
|
||||
"""
|
||||
try:
|
||||
success = backup_service.delete_backup(backup_name)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Backup not found")
|
||||
|
||||
return {"success": True, "message": "Backup deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete backup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/cleanup", response_model=Dict[str, Any])
|
||||
async def cleanup_backups(
|
||||
max_backups: int = 10,
|
||||
backup_type: Optional[str] = None,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Clean up old backups.
|
||||
|
||||
Args:
|
||||
max_backups: Maximum number of backups to keep.
|
||||
backup_type: Optional filter by backup type.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Number of backups deleted.
|
||||
"""
|
||||
try:
|
||||
deleted_count = backup_service.cleanup_old_backups(
|
||||
max_backups, backup_type
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Cleanup completed",
|
||||
"deleted_count": deleted_count,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup backups: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/export/anime", response_model=Dict[str, Any])
|
||||
async def export_anime_data(
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Export anime library data.
|
||||
|
||||
Args:
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Result of export operation.
|
||||
"""
|
||||
try:
|
||||
output_file = "data/backups/anime_export.json"
|
||||
success = backup_service.export_anime_data(output_file)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Failed to export anime data"
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Anime data exported successfully",
|
||||
"export_file": output_file,
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export anime data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/import/anime", response_model=Dict[str, Any])
|
||||
async def import_anime_data(
|
||||
import_file: str,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Import anime library data.
|
||||
|
||||
Args:
|
||||
import_file: Path to import file.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Result of import operation.
|
||||
"""
|
||||
try:
|
||||
success = backup_service.import_anime_data(import_file)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Failed to import anime data"
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Anime data imported successfully",
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import anime data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@ -1,214 +0,0 @@
|
||||
"""Diagnostics API endpoints for Aniworld.
|
||||
|
||||
This module provides endpoints for system diagnostics and health checks.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/diagnostics", tags=["diagnostics"])
|
||||
|
||||
|
||||
class NetworkTestResult(BaseModel):
|
||||
"""Result of a network connectivity test."""
|
||||
|
||||
host: str = Field(..., description="Hostname or URL tested")
|
||||
reachable: bool = Field(..., description="Whether host is reachable")
|
||||
response_time_ms: Optional[float] = Field(
|
||||
None, description="Response time in milliseconds"
|
||||
)
|
||||
error: Optional[str] = Field(None, description="Error message if failed")
|
||||
|
||||
|
||||
class NetworkDiagnostics(BaseModel):
|
||||
"""Network diagnostics results."""
|
||||
|
||||
internet_connected: bool = Field(
|
||||
..., description="Overall internet connectivity status"
|
||||
)
|
||||
dns_working: bool = Field(..., description="DNS resolution status")
|
||||
aniworld_reachable: bool = Field(
|
||||
..., description="Aniworld.to connectivity status"
|
||||
)
|
||||
tests: List[NetworkTestResult] = Field(
|
||||
..., description="Individual network tests"
|
||||
)
|
||||
|
||||
|
||||
async def check_dns() -> bool:
|
||||
"""Check if DNS resolution is working.
|
||||
|
||||
Returns:
|
||||
bool: True if DNS is working
|
||||
"""
|
||||
try:
|
||||
socket.gethostbyname("google.com")
|
||||
return True
|
||||
except socket.gaierror:
|
||||
return False
|
||||
|
||||
|
||||
async def check_host_connectivity(
|
||||
host: str, port: int = 80, timeout: float = 5.0
|
||||
) -> NetworkTestResult:
|
||||
"""Test connectivity to a specific host.
|
||||
|
||||
Args:
|
||||
host: Hostname or IP address to test
|
||||
port: Port to test (default: 80)
|
||||
timeout: Timeout in seconds (default: 5.0)
|
||||
|
||||
Returns:
|
||||
NetworkTestResult with test results
|
||||
"""
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Try to establish a connection
|
||||
loop = asyncio.get_event_loop()
|
||||
await asyncio.wait_for(
|
||||
loop.run_in_executor(
|
||||
None,
|
||||
lambda: socket.create_connection(
|
||||
(host, port), timeout=timeout
|
||||
),
|
||||
),
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
response_time = (time.time() - start_time) * 1000
|
||||
|
||||
return NetworkTestResult(
|
||||
host=host,
|
||||
reachable=True,
|
||||
response_time_ms=round(response_time, 2),
|
||||
)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error="Connection timeout"
|
||||
)
|
||||
except socket.gaierror as e:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error=f"DNS resolution failed: {e}"
|
||||
)
|
||||
except ConnectionRefusedError:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error="Connection refused"
|
||||
)
|
||||
except Exception as e:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error=f"Connection error: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/network")
|
||||
async def network_diagnostics(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict:
|
||||
"""Run network connectivity diagnostics.
|
||||
|
||||
Tests DNS resolution and connectivity to common services including
|
||||
aniworld.to.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
Dict with status and diagnostics data
|
||||
|
||||
Raises:
|
||||
HTTPException: If diagnostics fail
|
||||
"""
|
||||
try:
|
||||
logger.info("Running network diagnostics")
|
||||
|
||||
# Check DNS
|
||||
dns_working = await check_dns()
|
||||
|
||||
# Test connectivity to various hosts including aniworld.to
|
||||
test_hosts = [
|
||||
("google.com", 80),
|
||||
("cloudflare.com", 80),
|
||||
("github.com", 443),
|
||||
("aniworld.to", 443),
|
||||
]
|
||||
|
||||
# Run all tests concurrently
|
||||
test_tasks = [
|
||||
check_host_connectivity(host, port) for host, port in test_hosts
|
||||
]
|
||||
test_results = await asyncio.gather(*test_tasks)
|
||||
|
||||
# Determine overall internet connectivity
|
||||
internet_connected = any(result.reachable for result in test_results)
|
||||
|
||||
# Check if aniworld.to is reachable
|
||||
aniworld_result = next(
|
||||
(r for r in test_results if r.host == "aniworld.to"),
|
||||
None
|
||||
)
|
||||
aniworld_reachable = (
|
||||
aniworld_result.reachable if aniworld_result else False
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Network diagnostics complete: "
|
||||
f"DNS={dns_working}, Internet={internet_connected}, "
|
||||
f"Aniworld={aniworld_reachable}"
|
||||
)
|
||||
|
||||
# Create diagnostics data
|
||||
diagnostics_data = NetworkDiagnostics(
|
||||
internet_connected=internet_connected,
|
||||
dns_working=dns_working,
|
||||
aniworld_reachable=aniworld_reachable,
|
||||
tests=test_results,
|
||||
)
|
||||
|
||||
# Return in standard format expected by frontend
|
||||
return {
|
||||
"status": "success",
|
||||
"data": diagnostics_data.model_dump(),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to run network diagnostics")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to run network diagnostics: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/system", response_model=Dict[str, str])
|
||||
async def system_info(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Get basic system information.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
Dictionary with system information
|
||||
"""
|
||||
import platform
|
||||
import sys
|
||||
|
||||
return {
|
||||
"platform": platform.platform(),
|
||||
"python_version": sys.version,
|
||||
"architecture": platform.machine(),
|
||||
"processor": platform.processor(),
|
||||
"hostname": socket.gethostname(),
|
||||
}
|
||||
@ -10,7 +10,6 @@ from fastapi.responses import JSONResponse
|
||||
from src.server.models.download import (
|
||||
DownloadRequest,
|
||||
QueueOperationRequest,
|
||||
QueueReorderRequest,
|
||||
QueueStatusResponse,
|
||||
)
|
||||
from src.server.services.download_service import DownloadService, DownloadServiceError
|
||||
@ -18,9 +17,6 @@ from src.server.utils.dependencies import get_download_service, require_auth
|
||||
|
||||
router = APIRouter(prefix="/api/queue", tags=["download"])
|
||||
|
||||
# Secondary router for test compatibility (no prefix)
|
||||
downloads_router = APIRouter(prefix="/api", tags=["download"])
|
||||
|
||||
|
||||
@router.get("/status", response_model=QueueStatusResponse)
|
||||
async def get_queue_status(
|
||||
@ -47,39 +43,27 @@ async def get_queue_status(
|
||||
queue_status = await download_service.get_queue_status()
|
||||
queue_stats = await download_service.get_queue_stats()
|
||||
|
||||
# Preserve the legacy response contract expected by the original CLI
|
||||
# client and existing integration tests. Those consumers still parse
|
||||
# the bare dictionaries that the pre-FastAPI implementation emitted,
|
||||
# so we keep the canonical field names (``active``/``pending``/
|
||||
# ``completed``/``failed``) and dump each Pydantic object to plain
|
||||
# JSON-compatible dicts instead of returning the richer
|
||||
# ``QueueStatusResponse`` shape directly. This guarantees both the
|
||||
# CLI and older dashboard widgets do not need schema migrations while
|
||||
# the new web UI can continue to evolve independently.
|
||||
status_payload = {
|
||||
"is_running": queue_status.is_running,
|
||||
"is_paused": queue_status.is_paused,
|
||||
"active": [
|
||||
# Build response with field names expected by frontend
|
||||
# Frontend expects top-level arrays (active_downloads, pending_queue, etc.)
|
||||
# not nested under a 'status' object
|
||||
active_downloads = [
|
||||
it.model_dump(mode="json")
|
||||
for it in queue_status.active_downloads
|
||||
],
|
||||
"pending": [
|
||||
]
|
||||
pending_queue = [
|
||||
it.model_dump(mode="json")
|
||||
for it in queue_status.pending_queue
|
||||
],
|
||||
"completed": [
|
||||
]
|
||||
completed_downloads = [
|
||||
it.model_dump(mode="json")
|
||||
for it in queue_status.completed_downloads
|
||||
],
|
||||
"failed": [
|
||||
]
|
||||
failed_downloads = [
|
||||
it.model_dump(mode="json")
|
||||
for it in queue_status.failed_downloads
|
||||
],
|
||||
}
|
||||
]
|
||||
|
||||
# Add the derived ``success_rate`` metric so dashboards built against
|
||||
# the previous API continue to function without recalculating it
|
||||
# client-side.
|
||||
# Calculate success rate
|
||||
completed = queue_stats.completed_count
|
||||
failed = queue_stats.failed_count
|
||||
success_rate = None
|
||||
@ -91,7 +75,12 @@ async def get_queue_status(
|
||||
|
||||
return JSONResponse(
|
||||
content={
|
||||
"status": status_payload,
|
||||
"is_running": queue_status.is_running,
|
||||
"is_paused": queue_status.is_paused,
|
||||
"active_downloads": active_downloads,
|
||||
"pending_queue": pending_queue,
|
||||
"completed_downloads": completed_downloads,
|
||||
"failed_downloads": failed_downloads,
|
||||
"statistics": stats_payload,
|
||||
}
|
||||
)
|
||||
@ -139,6 +128,7 @@ async def add_to_queue(
|
||||
# Add to queue
|
||||
added_ids = await download_service.add_to_queue(
|
||||
serie_id=request.serie_id,
|
||||
serie_folder=request.serie_folder,
|
||||
serie_name=request.serie_name,
|
||||
episodes=request.episodes,
|
||||
priority=request.priority,
|
||||
@ -208,6 +198,74 @@ async def clear_completed(
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/failed", status_code=status.HTTP_200_OK)
|
||||
async def clear_failed(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Clear failed downloads from history.
|
||||
|
||||
Removes all failed download items from the queue history. This helps
|
||||
keep the queue display clean and manageable.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Returns:
|
||||
dict: Status message with count of cleared items
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 500 on service error
|
||||
"""
|
||||
try:
|
||||
cleared_count = await download_service.clear_failed()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Cleared {cleared_count} failed item(s)",
|
||||
"count": cleared_count,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to clear failed items: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/pending", status_code=status.HTTP_200_OK)
|
||||
async def clear_pending(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Clear all pending downloads from the queue.
|
||||
|
||||
Removes all pending download items from the queue. This is useful for
|
||||
clearing the entire queue at once instead of removing items one by one.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Returns:
|
||||
dict: Status message with count of cleared items
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 500 on service error
|
||||
"""
|
||||
try:
|
||||
cleared_count = await download_service.clear_pending()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Removed {cleared_count} pending item(s)",
|
||||
"count": cleared_count,
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to clear pending items: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/{item_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def remove_from_queue(
|
||||
item_id: str = Path(..., description="Download item ID to remove"),
|
||||
@ -252,38 +310,43 @@ async def remove_from_queue(
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def remove_multiple_from_queue(
|
||||
request: QueueOperationRequest,
|
||||
@router.post("/start", status_code=status.HTTP_200_OK)
|
||||
async def start_queue(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Remove multiple items from the download queue.
|
||||
"""Start automatic queue processing.
|
||||
|
||||
Batch removal of multiple download items. Each item is processed
|
||||
individually, and the operation continues even if some items are not
|
||||
found.
|
||||
Starts processing all pending downloads sequentially, one at a time.
|
||||
The queue will continue processing until all items are complete or
|
||||
the queue is manually stopped. Processing continues even if the browser
|
||||
is closed.
|
||||
|
||||
Only one download can be active at a time. If a download is already
|
||||
active or queue processing is running, an error is returned.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Args:
|
||||
request: List of download item IDs to remove
|
||||
Returns:
|
||||
dict: Status message confirming queue processing started
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 400 for invalid request,
|
||||
500 on service error
|
||||
HTTPException: 401 if not authenticated, 400 if queue is empty or
|
||||
processing already active, 500 on service error
|
||||
"""
|
||||
try:
|
||||
if not request.item_ids:
|
||||
result = await download_service.start_queue_processing()
|
||||
|
||||
if result is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="At least one item ID must be specified",
|
||||
detail="No pending downloads in queue",
|
||||
)
|
||||
|
||||
await download_service.remove_from_queue(request.item_ids)
|
||||
|
||||
# Note: We don't raise 404 if some items weren't found, as this is
|
||||
# a batch operation and partial success is acceptable
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Queue processing started",
|
||||
}
|
||||
|
||||
except DownloadServiceError as e:
|
||||
raise HTTPException(
|
||||
@ -295,41 +358,7 @@ async def remove_multiple_from_queue(
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to remove items from queue: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/start", status_code=status.HTTP_200_OK)
|
||||
async def start_queue(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Start the download queue processor.
|
||||
|
||||
Starts processing the download queue. Downloads will be processed according
|
||||
to priority and concurrency limits. If the queue is already running, this
|
||||
operation is idempotent.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Returns:
|
||||
dict: Status message indicating queue has been started
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 500 on service error
|
||||
"""
|
||||
try:
|
||||
await download_service.start()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Download queue processing started",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to start download queue: {str(e)}",
|
||||
detail=f"Failed to start queue processing: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@ -338,230 +367,34 @@ async def stop_queue(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Stop the download queue processor.
|
||||
"""Stop processing new downloads from queue.
|
||||
|
||||
Stops processing the download queue. Active downloads will be allowed to
|
||||
complete (with a timeout), then the queue processor will shut down.
|
||||
Queue state is persisted before shutdown.
|
||||
Prevents new downloads from starting. The current active download will
|
||||
continue to completion, but no new downloads will be started from the
|
||||
pending queue.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Returns:
|
||||
dict: Status message indicating queue has been stopped
|
||||
dict: Status message indicating queue processing has been stopped
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 500 on service error
|
||||
"""
|
||||
try:
|
||||
await download_service.stop()
|
||||
await download_service.stop_downloads()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Download queue processing stopped",
|
||||
"message": (
|
||||
"Queue processing stopped (current download will continue)"
|
||||
),
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to stop download queue: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/pause", status_code=status.HTTP_200_OK)
|
||||
async def pause_queue(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Pause the download queue processor.
|
||||
|
||||
Pauses download processing. Active downloads will continue, but no new
|
||||
downloads will be started until the queue is resumed.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Returns:
|
||||
dict: Status message indicating queue has been paused
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 500 on service error
|
||||
"""
|
||||
try:
|
||||
await download_service.pause_queue()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Download queue paused",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to pause download queue: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/resume", status_code=status.HTTP_200_OK)
|
||||
async def resume_queue(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Resume the download queue processor.
|
||||
|
||||
Resumes download processing after being paused. The queue will continue
|
||||
processing pending items according to priority.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Returns:
|
||||
dict: Status message indicating queue has been resumed
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 500 on service error
|
||||
"""
|
||||
try:
|
||||
await download_service.resume_queue()
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Download queue resumed",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to resume download queue: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
# Backwards-compatible control endpoints (some integration tests and older
|
||||
# clients call `/api/queue/control/<action>`). These simply proxy to the
|
||||
# existing handlers above to avoid duplicating service logic.
|
||||
|
||||
|
||||
@router.post("/control/start", status_code=status.HTTP_200_OK)
|
||||
async def control_start(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
return await start_queue(_, download_service)
|
||||
|
||||
|
||||
@router.post("/control/stop", status_code=status.HTTP_200_OK)
|
||||
async def control_stop(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
return await stop_queue(_, download_service)
|
||||
|
||||
|
||||
@router.post("/control/pause", status_code=status.HTTP_200_OK)
|
||||
async def control_pause(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
return await pause_queue(_, download_service)
|
||||
|
||||
|
||||
@router.post("/control/resume", status_code=status.HTTP_200_OK)
|
||||
async def control_resume(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
return await resume_queue(_, download_service)
|
||||
|
||||
|
||||
@router.post("/control/clear_completed", status_code=status.HTTP_200_OK)
|
||||
async def control_clear_completed(
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
# Call the existing clear_completed implementation which returns a dict
|
||||
return await clear_completed(_, download_service)
|
||||
|
||||
|
||||
@router.post("/reorder", status_code=status.HTTP_200_OK)
|
||||
async def reorder_queue(
|
||||
request: dict,
|
||||
_: dict = Depends(require_auth),
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Reorder an item in the pending queue.
|
||||
|
||||
Changes the position of a pending download item in the queue. This only
|
||||
affects items that haven't started downloading yet. The position is
|
||||
0-based.
|
||||
|
||||
Requires authentication.
|
||||
|
||||
Args:
|
||||
request: Item ID and new position in queue
|
||||
|
||||
Returns:
|
||||
dict: Status message indicating item has been reordered
|
||||
|
||||
Raises:
|
||||
HTTPException: 401 if not authenticated, 404 if item not found,
|
||||
400 for invalid request, 500 on service error
|
||||
"""
|
||||
try:
|
||||
# Support legacy bulk reorder payload used by some integration tests:
|
||||
# {"item_order": ["id1", "id2", ...]}
|
||||
if "item_order" in request:
|
||||
item_order = request.get("item_order", [])
|
||||
if not isinstance(item_order, list):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="item_order must be a list of item IDs",
|
||||
)
|
||||
|
||||
success = await download_service.reorder_queue_bulk(item_order)
|
||||
else:
|
||||
# Fallback to single-item reorder shape
|
||||
# Validate request
|
||||
try:
|
||||
req = QueueReorderRequest(**request)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail=str(e),
|
||||
)
|
||||
|
||||
success = await download_service.reorder_queue(
|
||||
item_id=req.item_id,
|
||||
new_position=req.new_position,
|
||||
)
|
||||
|
||||
if not success:
|
||||
# Provide an appropriate 404 message depending on request shape
|
||||
if "item_order" in request:
|
||||
detail = (
|
||||
"One or more items in item_order were not "
|
||||
"found in pending queue"
|
||||
)
|
||||
else:
|
||||
detail = f"Item {req.item_id} not found in pending queue"
|
||||
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=detail,
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Queue item reordered successfully",
|
||||
}
|
||||
|
||||
except DownloadServiceError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=str(e),
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to reorder queue item: {str(e)}",
|
||||
detail=f"Failed to stop queue processing: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@ -596,6 +429,7 @@ async def retry_failed(
|
||||
return {
|
||||
"status": "success",
|
||||
"message": f"Retrying {len(retried_ids)} failed item(s)",
|
||||
"retried_count": len(retried_ids),
|
||||
"retried_ids": retried_ids,
|
||||
}
|
||||
|
||||
@ -604,50 +438,3 @@ async def retry_failed(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retry downloads: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
# Alternative endpoint for compatibility with input validation tests
|
||||
@downloads_router.post(
|
||||
"/downloads",
|
||||
status_code=status.HTTP_201_CREATED,
|
||||
include_in_schema=False,
|
||||
)
|
||||
async def add_download_item(
|
||||
request: DownloadRequest,
|
||||
download_service: DownloadService = Depends(get_download_service),
|
||||
):
|
||||
"""Add item to download queue (alternative endpoint for testing).
|
||||
|
||||
This is an alias for POST /api/queue/add for input validation testing.
|
||||
Uses the same validation logic as the main queue endpoint.
|
||||
Note: Authentication check removed for input validation testing.
|
||||
"""
|
||||
# Validate that values are not negative
|
||||
try:
|
||||
anime_id_val = int(request.anime_id)
|
||||
if anime_id_val < 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="anime_id must be a positive number",
|
||||
)
|
||||
except (ValueError, TypeError):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="anime_id must be a valid number",
|
||||
)
|
||||
|
||||
# Validate episode numbers if provided
|
||||
if request.episodes:
|
||||
for ep in request.episodes:
|
||||
if ep < 0:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
||||
detail="Episode numbers must be positive",
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Download request validated",
|
||||
}
|
||||
|
||||
|
||||
|
||||
@ -1,426 +0,0 @@
|
||||
"""Logging API endpoints for Aniworld.
|
||||
|
||||
This module provides endpoints for managing application logging
|
||||
configuration and accessing log files.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.responses import FileResponse, PlainTextResponse
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.server.models.config import LoggingConfig
|
||||
from src.server.services.config_service import ConfigServiceError, get_config_service
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/logging", tags=["logging"])
|
||||
|
||||
|
||||
class LogFileInfo(BaseModel):
|
||||
"""Information about a log file."""
|
||||
|
||||
name: str = Field(..., description="File name")
|
||||
size: int = Field(..., description="File size in bytes")
|
||||
modified: float = Field(..., description="Last modified timestamp")
|
||||
path: str = Field(..., description="Relative path from logs directory")
|
||||
|
||||
|
||||
class LogCleanupResult(BaseModel):
|
||||
"""Result of log cleanup operation."""
|
||||
|
||||
files_deleted: int = Field(..., description="Number of files deleted")
|
||||
space_freed: int = Field(..., description="Space freed in bytes")
|
||||
errors: List[str] = Field(
|
||||
default_factory=list, description="Any errors encountered"
|
||||
)
|
||||
|
||||
|
||||
def get_logs_directory() -> Path:
|
||||
"""Get the logs directory path.
|
||||
|
||||
Returns:
|
||||
Path: Logs directory path
|
||||
|
||||
Raises:
|
||||
HTTPException: If logs directory doesn't exist
|
||||
"""
|
||||
# Check both common locations
|
||||
possible_paths = [
|
||||
Path("logs"),
|
||||
Path("src/cli/logs"),
|
||||
Path("data/logs"),
|
||||
]
|
||||
|
||||
for log_path in possible_paths:
|
||||
if log_path.exists() and log_path.is_dir():
|
||||
return log_path
|
||||
|
||||
# Default to logs directory even if it doesn't exist
|
||||
logs_dir = Path("logs")
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
return logs_dir
|
||||
|
||||
|
||||
@router.get("/config", response_model=LoggingConfig)
|
||||
def get_logging_config(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> LoggingConfig:
|
||||
"""Get current logging configuration.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
LoggingConfig: Current logging configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration cannot be loaded
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
return app_config.logging
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to load logging config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to load logging configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/config", response_model=LoggingConfig)
|
||||
def update_logging_config(
|
||||
logging_config: LoggingConfig,
|
||||
auth: dict = Depends(require_auth),
|
||||
) -> LoggingConfig:
|
||||
"""Update logging configuration.
|
||||
|
||||
Args:
|
||||
logging_config: New logging configuration
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
LoggingConfig: Updated logging configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration update fails
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Update logging section
|
||||
app_config.logging = logging_config
|
||||
|
||||
# Save and return
|
||||
config_service.save_config(app_config)
|
||||
logger.info(
|
||||
f"Logging config updated by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
# Apply the new logging configuration
|
||||
_apply_logging_config(logging_config)
|
||||
|
||||
return logging_config
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to update logging config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update logging configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
def _apply_logging_config(config: LoggingConfig) -> None:
|
||||
"""Apply logging configuration to the Python logging system.
|
||||
|
||||
Args:
|
||||
config: Logging configuration to apply
|
||||
"""
|
||||
# Set the root logger level
|
||||
logging.getLogger().setLevel(config.level)
|
||||
|
||||
# If a file is specified, configure file handler
|
||||
if config.file:
|
||||
file_path = Path(config.file)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Remove existing file handlers
|
||||
root_logger = logging.getLogger()
|
||||
for handler in root_logger.handlers[:]:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
root_logger.removeHandler(handler)
|
||||
|
||||
# Add new file handler with rotation if configured
|
||||
if config.max_bytes and config.max_bytes > 0:
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
handler = RotatingFileHandler(
|
||||
config.file,
|
||||
maxBytes=config.max_bytes,
|
||||
backupCount=config.backup_count or 3,
|
||||
)
|
||||
else:
|
||||
handler = logging.FileHandler(config.file)
|
||||
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
|
||||
@router.get("/files", response_model=List[LogFileInfo])
|
||||
def list_log_files(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> List[LogFileInfo]:
|
||||
"""List available log files.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
List of log file information
|
||||
|
||||
Raises:
|
||||
HTTPException: If logs directory cannot be accessed
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
files: List[LogFileInfo] = []
|
||||
|
||||
for file_path in logs_dir.rglob("*.log*"):
|
||||
if file_path.is_file():
|
||||
stat = file_path.stat()
|
||||
rel_path = file_path.relative_to(logs_dir)
|
||||
files.append(
|
||||
LogFileInfo(
|
||||
name=file_path.name,
|
||||
size=stat.st_size,
|
||||
modified=stat.st_mtime,
|
||||
path=str(rel_path),
|
||||
)
|
||||
)
|
||||
|
||||
# Sort by modified time, newest first
|
||||
files.sort(key=lambda x: x.modified, reverse=True)
|
||||
return files
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to list log files")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to list log files: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/files/{filename:path}/download")
|
||||
async def download_log_file(
|
||||
filename: str, auth: dict = Depends(require_auth)
|
||||
) -> FileResponse:
|
||||
"""Download a specific log file.
|
||||
|
||||
Args:
|
||||
filename: Name or relative path of the log file
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
File download response
|
||||
|
||||
Raises:
|
||||
HTTPException: If file not found or access denied
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
file_path = logs_dir / filename
|
||||
|
||||
# Security: Ensure the file is within logs directory
|
||||
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to file outside logs directory",
|
||||
)
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Log file not found: {filename}",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Log file download: {filename} "
|
||||
f"by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return FileResponse(
|
||||
path=str(file_path),
|
||||
filename=file_path.name,
|
||||
media_type="text/plain",
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to download log file: {filename}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to download log file: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/files/{filename:path}/tail")
|
||||
async def tail_log_file(
|
||||
filename: str,
|
||||
lines: int = 100,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> PlainTextResponse:
|
||||
"""Get the last N lines of a log file.
|
||||
|
||||
Args:
|
||||
filename: Name or relative path of the log file
|
||||
lines: Number of lines to retrieve (default: 100)
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
Plain text response with log file tail
|
||||
|
||||
Raises:
|
||||
HTTPException: If file not found or access denied
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
file_path = logs_dir / filename
|
||||
|
||||
# Security: Ensure the file is within logs directory
|
||||
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to file outside logs directory",
|
||||
)
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Log file not found: {filename}",
|
||||
)
|
||||
|
||||
# Read the last N lines efficiently
|
||||
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
# For small files, just read all
|
||||
content = f.readlines()
|
||||
tail_lines = content[-lines:] if len(content) > lines else content
|
||||
|
||||
return PlainTextResponse(content="".join(tail_lines))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to tail log file: {filename}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to tail log file: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/test", response_model=Dict[str, str])
|
||||
async def test_logging(
|
||||
auth: dict = Depends(require_auth)
|
||||
) -> Dict[str, str]:
|
||||
"""Test logging by writing messages at all levels.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
test_logger = logging.getLogger("aniworld.test")
|
||||
|
||||
test_logger.debug("Test DEBUG message")
|
||||
test_logger.info("Test INFO message")
|
||||
test_logger.warning("Test WARNING message")
|
||||
test_logger.error("Test ERROR message")
|
||||
test_logger.critical("Test CRITICAL message")
|
||||
|
||||
logger.info(
|
||||
f"Logging test triggered by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Test messages logged at all levels",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to test logging")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to test logging: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/cleanup", response_model=LogCleanupResult)
|
||||
async def cleanup_logs(
|
||||
max_age_days: int = 30, auth: dict = Depends(require_auth)
|
||||
) -> LogCleanupResult:
|
||||
"""Clean up old log files.
|
||||
|
||||
Args:
|
||||
max_age_days: Maximum age in days for log files to keep
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Cleanup result with statistics
|
||||
|
||||
Raises:
|
||||
HTTPException: If cleanup fails
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
current_time = os.path.getmtime(logs_dir)
|
||||
max_age_seconds = max_age_days * 24 * 60 * 60
|
||||
|
||||
files_deleted = 0
|
||||
space_freed = 0
|
||||
errors: List[str] = []
|
||||
|
||||
for file_path in logs_dir.rglob("*.log*"):
|
||||
if not file_path.is_file():
|
||||
continue
|
||||
|
||||
try:
|
||||
file_age = current_time - file_path.stat().st_mtime
|
||||
if file_age > max_age_seconds:
|
||||
file_size = file_path.stat().st_size
|
||||
file_path.unlink()
|
||||
files_deleted += 1
|
||||
space_freed += file_size
|
||||
logger.info(f"Deleted old log file: {file_path.name}")
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete {file_path.name}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(error_msg)
|
||||
|
||||
logger.info(
|
||||
f"Log cleanup by {auth.get('username', 'unknown')}: "
|
||||
f"{files_deleted} files, {space_freed} bytes"
|
||||
)
|
||||
|
||||
return LogCleanupResult(
|
||||
files_deleted=files_deleted,
|
||||
space_freed=space_freed,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to cleanup logs")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to cleanup logs: {str(e)}",
|
||||
) from e
|
||||
@ -1,459 +0,0 @@
|
||||
"""Maintenance API endpoints for system housekeeping and diagnostics.
|
||||
|
||||
This module exposes cleanup routines, system statistics, maintenance
|
||||
operations, and health reporting endpoints that rely on the shared system
|
||||
utilities and monitoring services. The routes allow administrators to
|
||||
prune logs, inspect disk usage, vacuum or analyze the database, and gather
|
||||
holistic health metrics for AniWorld deployments."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.infrastructure.security.database_integrity import DatabaseIntegrityChecker
|
||||
from src.server.services.monitoring_service import get_monitoring_service
|
||||
from src.server.utils.dependencies import get_database_session
|
||||
from src.server.utils.system import get_system_utilities
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/maintenance", tags=["maintenance"])
|
||||
|
||||
|
||||
def get_system_utils():
|
||||
"""Dependency to get system utilities."""
|
||||
return get_system_utilities()
|
||||
|
||||
|
||||
@router.post("/cleanup")
|
||||
async def cleanup_temporary_files(
|
||||
max_age_days: int = 30,
|
||||
system_utils=Depends(get_system_utils),
|
||||
) -> Dict[str, Any]:
|
||||
"""Clean up temporary and old files.
|
||||
|
||||
Args:
|
||||
max_age_days: Delete files older than this many days.
|
||||
system_utils: System utilities dependency.
|
||||
|
||||
Returns:
|
||||
dict: Cleanup results.
|
||||
"""
|
||||
try:
|
||||
deleted_logs = system_utils.cleanup_directory(
|
||||
"logs", "*.log", max_age_days
|
||||
)
|
||||
deleted_temp = system_utils.cleanup_directory(
|
||||
"Temp", "*", max_age_days
|
||||
)
|
||||
deleted_dirs = system_utils.cleanup_empty_directories("logs")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"deleted_logs": deleted_logs,
|
||||
"deleted_temp_files": deleted_temp,
|
||||
"deleted_empty_dirs": deleted_dirs,
|
||||
"total_deleted": deleted_logs + deleted_temp + deleted_dirs,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Cleanup failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/stats")
|
||||
async def get_maintenance_stats(
|
||||
db: AsyncSession = Depends(get_database_session),
|
||||
system_utils=Depends(get_system_utils),
|
||||
) -> Dict[str, Any]:
|
||||
"""Get system maintenance statistics.
|
||||
|
||||
Args:
|
||||
db: Database session dependency.
|
||||
system_utils: System utilities dependency.
|
||||
|
||||
Returns:
|
||||
dict: Maintenance statistics.
|
||||
"""
|
||||
try:
|
||||
monitoring = get_monitoring_service()
|
||||
|
||||
# Get disk usage
|
||||
disk_info = system_utils.get_disk_usage("/")
|
||||
|
||||
# Get logs directory size
|
||||
logs_size = system_utils.get_directory_size("logs")
|
||||
data_size = system_utils.get_directory_size("data")
|
||||
temp_size = system_utils.get_directory_size("Temp")
|
||||
|
||||
# Get system info
|
||||
system_info = system_utils.get_system_info()
|
||||
|
||||
# Get queue metrics
|
||||
queue_metrics = await monitoring.get_queue_metrics(db)
|
||||
|
||||
return {
|
||||
"disk": {
|
||||
"total_gb": disk_info.total_bytes / (1024**3),
|
||||
"used_gb": disk_info.used_bytes / (1024**3),
|
||||
"free_gb": disk_info.free_bytes / (1024**3),
|
||||
"percent_used": disk_info.percent_used,
|
||||
},
|
||||
"directories": {
|
||||
"logs_mb": logs_size / (1024 * 1024),
|
||||
"data_mb": data_size / (1024 * 1024),
|
||||
"temp_mb": temp_size / (1024 * 1024),
|
||||
},
|
||||
"system": system_info,
|
||||
"queue": {
|
||||
"total_items": queue_metrics.total_items,
|
||||
"downloaded_gb": queue_metrics.downloaded_bytes / (1024**3),
|
||||
"total_gb": queue_metrics.total_size_bytes / (1024**3),
|
||||
},
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get maintenance stats: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/vacuum")
|
||||
async def vacuum_database(
|
||||
db: AsyncSession = Depends(get_database_session),
|
||||
) -> Dict[str, Any]:
|
||||
"""Optimize database (vacuum).
|
||||
|
||||
Args:
|
||||
db: Database session dependency.
|
||||
|
||||
Returns:
|
||||
dict: Vacuum result.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# VACUUM command to optimize database
|
||||
await db.execute(text("VACUUM"))
|
||||
await db.commit()
|
||||
|
||||
logger.info("Database vacuumed successfully")
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Database optimized successfully",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Database vacuum failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/rebuild-index")
|
||||
async def rebuild_database_indexes(
|
||||
db: AsyncSession = Depends(get_database_session),
|
||||
) -> Dict[str, Any]:
|
||||
"""Rebuild database indexes.
|
||||
|
||||
Note: This is a placeholder as SQLite doesn't have REINDEX
|
||||
for most operations. For production databases, implement
|
||||
specific index rebuilding logic.
|
||||
|
||||
Args:
|
||||
db: Database session dependency.
|
||||
|
||||
Returns:
|
||||
dict: Rebuild result.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy import text
|
||||
|
||||
# Analyze database for query optimization
|
||||
await db.execute(text("ANALYZE"))
|
||||
await db.commit()
|
||||
|
||||
logger.info("Database indexes analyzed successfully")
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Database indexes analyzed successfully",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Index rebuild failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/prune-logs")
|
||||
async def prune_old_logs(
|
||||
days: int = 7,
|
||||
system_utils=Depends(get_system_utils),
|
||||
) -> Dict[str, Any]:
|
||||
"""Remove log files older than specified days.
|
||||
|
||||
Args:
|
||||
days: Keep logs from last N days.
|
||||
system_utils: System utilities dependency.
|
||||
|
||||
Returns:
|
||||
dict: Pruning results.
|
||||
"""
|
||||
try:
|
||||
deleted = system_utils.cleanup_directory(
|
||||
"logs", "*.log", max_age_days=days
|
||||
)
|
||||
|
||||
logger.info(f"Pruned {deleted} log files")
|
||||
return {
|
||||
"success": True,
|
||||
"deleted_count": deleted,
|
||||
"message": f"Deleted {deleted} log files older than {days} days",
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Log pruning failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/disk-usage")
|
||||
async def get_disk_usage(
|
||||
system_utils=Depends(get_system_utils),
|
||||
) -> Dict[str, Any]:
|
||||
"""Get detailed disk usage information.
|
||||
|
||||
Args:
|
||||
system_utils: System utilities dependency.
|
||||
|
||||
Returns:
|
||||
dict: Disk usage for all partitions.
|
||||
"""
|
||||
try:
|
||||
disk_infos = system_utils.get_all_disk_usage()
|
||||
|
||||
partitions = []
|
||||
for disk_info in disk_infos:
|
||||
partitions.append(
|
||||
{
|
||||
"path": disk_info.path,
|
||||
"total_gb": disk_info.total_bytes / (1024**3),
|
||||
"used_gb": disk_info.used_bytes / (1024**3),
|
||||
"free_gb": disk_info.free_bytes / (1024**3),
|
||||
"percent_used": disk_info.percent_used,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"partitions": partitions,
|
||||
"total_partitions": len(partitions),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get disk usage: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/processes")
|
||||
async def get_running_processes(
|
||||
limit: int = 10,
|
||||
system_utils=Depends(get_system_utils),
|
||||
) -> Dict[str, Any]:
|
||||
"""Get running processes information.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of processes to return.
|
||||
system_utils: System utilities dependency.
|
||||
|
||||
Returns:
|
||||
dict: Running processes information.
|
||||
"""
|
||||
try:
|
||||
processes = system_utils.get_all_processes()
|
||||
|
||||
# Sort by memory usage and get top N
|
||||
sorted_processes = sorted(
|
||||
processes, key=lambda x: x.memory_mb, reverse=True
|
||||
)
|
||||
|
||||
top_processes = []
|
||||
for proc in sorted_processes[:limit]:
|
||||
top_processes.append(
|
||||
{
|
||||
"pid": proc.pid,
|
||||
"name": proc.name,
|
||||
"cpu_percent": round(proc.cpu_percent, 2),
|
||||
"memory_mb": round(proc.memory_mb, 2),
|
||||
"status": proc.status,
|
||||
}
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"processes": top_processes,
|
||||
"total_processes": len(processes),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get processes: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/health-check")
|
||||
async def full_health_check(
|
||||
db: AsyncSession = Depends(get_database_session),
|
||||
system_utils=Depends(get_system_utils),
|
||||
) -> Dict[str, Any]:
|
||||
"""Perform full system health check and generate report.
|
||||
|
||||
Args:
|
||||
db: Database session dependency.
|
||||
system_utils: System utilities dependency.
|
||||
|
||||
Returns:
|
||||
dict: Complete health check report.
|
||||
"""
|
||||
try:
|
||||
monitoring = get_monitoring_service()
|
||||
|
||||
# Check database and filesystem
|
||||
from src.server.api.health import check_database_health
|
||||
from src.server.api.health import check_filesystem_health as check_fs
|
||||
db_health = await check_database_health(db)
|
||||
fs_health = check_fs()
|
||||
|
||||
# Get system metrics
|
||||
system_metrics = monitoring.get_system_metrics()
|
||||
|
||||
# Get error metrics
|
||||
error_metrics = monitoring.get_error_metrics()
|
||||
|
||||
# Get queue metrics
|
||||
queue_metrics = await monitoring.get_queue_metrics(db)
|
||||
|
||||
# Determine overall health
|
||||
issues = []
|
||||
if db_health.status != "healthy":
|
||||
issues.append("Database connectivity issue")
|
||||
if fs_health.get("status") != "healthy":
|
||||
issues.append("Filesystem accessibility issue")
|
||||
if system_metrics.cpu_percent > 80:
|
||||
issues.append(f"High CPU usage: {system_metrics.cpu_percent}%")
|
||||
if system_metrics.memory_percent > 80:
|
||||
issues.append(
|
||||
f"High memory usage: {system_metrics.memory_percent}%"
|
||||
)
|
||||
if error_metrics.error_rate_per_hour > 1.0:
|
||||
issues.append(
|
||||
f"High error rate: "
|
||||
f"{error_metrics.error_rate_per_hour:.2f} errors/hour"
|
||||
)
|
||||
|
||||
overall_health = "healthy"
|
||||
if issues:
|
||||
overall_health = "degraded" if len(issues) < 3 else "unhealthy"
|
||||
|
||||
return {
|
||||
"overall_health": overall_health,
|
||||
"issues": issues,
|
||||
"metrics": {
|
||||
"database": {
|
||||
"status": db_health.status,
|
||||
"connection_time_ms": db_health.connection_time_ms,
|
||||
},
|
||||
"filesystem": fs_health,
|
||||
"system": {
|
||||
"cpu_percent": system_metrics.cpu_percent,
|
||||
"memory_percent": system_metrics.memory_percent,
|
||||
"disk_percent": system_metrics.disk_percent,
|
||||
},
|
||||
"queue": {
|
||||
"total_items": queue_metrics.total_items,
|
||||
"failed_items": queue_metrics.failed_items,
|
||||
"success_rate": round(queue_metrics.success_rate, 2),
|
||||
},
|
||||
"errors": {
|
||||
"errors_24h": error_metrics.errors_24h,
|
||||
"rate_per_hour": round(
|
||||
error_metrics.error_rate_per_hour, 2
|
||||
),
|
||||
},
|
||||
},
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Health check failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/integrity/check")
|
||||
async def check_database_integrity(
|
||||
db: AsyncSession = Depends(get_database_session),
|
||||
) -> Dict[str, Any]:
|
||||
"""Check database integrity.
|
||||
|
||||
Verifies:
|
||||
- No orphaned records
|
||||
- Valid foreign key references
|
||||
- No duplicate keys
|
||||
- Data consistency
|
||||
|
||||
Args:
|
||||
db: Database session dependency.
|
||||
|
||||
Returns:
|
||||
dict: Integrity check results with issues found.
|
||||
"""
|
||||
try:
|
||||
# Convert async session to sync for the checker
|
||||
# Note: This is a temporary solution. In production,
|
||||
# consider implementing async version of integrity checker.
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
sync_session = Session(bind=db.sync_session.bind)
|
||||
|
||||
checker = DatabaseIntegrityChecker(sync_session)
|
||||
results = checker.check_all()
|
||||
|
||||
if results["total_issues"] > 0:
|
||||
logger.warning(
|
||||
f"Database integrity check found {results['total_issues']} "
|
||||
f"issues"
|
||||
)
|
||||
else:
|
||||
logger.info("Database integrity check passed")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"timestamp": None, # Add timestamp if needed
|
||||
"results": results,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Integrity check failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/integrity/repair")
|
||||
async def repair_database_integrity(
|
||||
db: AsyncSession = Depends(get_database_session),
|
||||
) -> Dict[str, Any]:
|
||||
"""Repair database integrity by removing orphaned records.
|
||||
|
||||
**Warning**: This operation will delete orphaned records permanently.
|
||||
|
||||
Args:
|
||||
db: Database session dependency.
|
||||
|
||||
Returns:
|
||||
dict: Repair results with count of records removed.
|
||||
"""
|
||||
try:
|
||||
from sqlalchemy.orm import Session
|
||||
|
||||
sync_session = Session(bind=db.sync_session.bind)
|
||||
|
||||
checker = DatabaseIntegrityChecker(sync_session)
|
||||
removed_count = checker.repair_orphaned_records()
|
||||
|
||||
logger.info(f"Removed {removed_count} orphaned records")
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"removed_records": removed_count,
|
||||
"message": (
|
||||
f"Successfully removed {removed_count} orphaned records"
|
||||
),
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Integrity repair failed: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@ -1,531 +0,0 @@
|
||||
"""Provider management API endpoints.
|
||||
|
||||
This module provides REST API endpoints for monitoring and managing
|
||||
anime providers, including health checks, configuration, and failover.
|
||||
"""
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.core.providers.config_manager import ProviderSettings, get_config_manager
|
||||
from src.core.providers.failover import get_failover
|
||||
from src.core.providers.health_monitor import get_health_monitor
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/providers", tags=["providers"])
|
||||
|
||||
|
||||
# Request/Response Models
|
||||
|
||||
|
||||
class ProviderHealthResponse(BaseModel):
|
||||
"""Response model for provider health status."""
|
||||
|
||||
provider_name: str
|
||||
is_available: bool
|
||||
last_check_time: Optional[str] = None
|
||||
total_requests: int
|
||||
successful_requests: int
|
||||
failed_requests: int
|
||||
success_rate: float
|
||||
average_response_time_ms: float
|
||||
last_error: Optional[str] = None
|
||||
last_error_time: Optional[str] = None
|
||||
consecutive_failures: int
|
||||
total_bytes_downloaded: int
|
||||
uptime_percentage: float
|
||||
|
||||
|
||||
class HealthSummaryResponse(BaseModel):
|
||||
"""Response model for overall health summary."""
|
||||
|
||||
total_providers: int
|
||||
available_providers: int
|
||||
availability_percentage: float
|
||||
average_success_rate: float
|
||||
average_response_time_ms: float
|
||||
providers: Dict[str, Dict[str, Any]]
|
||||
|
||||
|
||||
class ProviderSettingsRequest(BaseModel):
|
||||
"""Request model for updating provider settings."""
|
||||
|
||||
enabled: Optional[bool] = None
|
||||
priority: Optional[int] = None
|
||||
timeout_seconds: Optional[int] = Field(None, gt=0)
|
||||
max_retries: Optional[int] = Field(None, ge=0)
|
||||
retry_delay_seconds: Optional[float] = Field(None, gt=0)
|
||||
max_concurrent_downloads: Optional[int] = Field(None, gt=0)
|
||||
bandwidth_limit_mbps: Optional[float] = Field(None, gt=0)
|
||||
|
||||
|
||||
class ProviderSettingsResponse(BaseModel):
|
||||
"""Response model for provider settings."""
|
||||
|
||||
name: str
|
||||
enabled: bool
|
||||
priority: int
|
||||
timeout_seconds: int
|
||||
max_retries: int
|
||||
retry_delay_seconds: float
|
||||
max_concurrent_downloads: int
|
||||
bandwidth_limit_mbps: Optional[float] = None
|
||||
|
||||
|
||||
class FailoverStatsResponse(BaseModel):
|
||||
"""Response model for failover statistics."""
|
||||
|
||||
total_providers: int
|
||||
providers: List[str]
|
||||
current_provider: str
|
||||
max_retries: int
|
||||
retry_delay: float
|
||||
health_monitoring_enabled: bool
|
||||
available_providers: Optional[List[str]] = None
|
||||
unavailable_providers: Optional[List[str]] = None
|
||||
|
||||
|
||||
# Health Monitoring Endpoints
|
||||
|
||||
|
||||
@router.get("/health", response_model=HealthSummaryResponse)
|
||||
async def get_providers_health(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> HealthSummaryResponse:
|
||||
"""Get overall provider health summary.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Health summary for all providers.
|
||||
"""
|
||||
try:
|
||||
health_monitor = get_health_monitor()
|
||||
summary = health_monitor.get_health_summary()
|
||||
return HealthSummaryResponse(**summary)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get provider health: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve provider health: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/health/{provider_name}", response_model=ProviderHealthResponse) # noqa: E501
|
||||
async def get_provider_health(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> ProviderHealthResponse:
|
||||
"""Get health status for a specific provider.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Health metrics for the provider.
|
||||
|
||||
Raises:
|
||||
HTTPException: If provider not found or error occurs.
|
||||
"""
|
||||
try:
|
||||
health_monitor = get_health_monitor()
|
||||
metrics = health_monitor.get_provider_metrics(provider_name)
|
||||
|
||||
if not metrics:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Provider '{provider_name}' not found",
|
||||
)
|
||||
|
||||
return ProviderHealthResponse(**metrics.to_dict())
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to get health for {provider_name}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve provider health: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/available", response_model=List[str])
|
||||
async def get_available_providers(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> List[str]:
|
||||
"""Get list of currently available providers.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
List of available provider names.
|
||||
"""
|
||||
try:
|
||||
health_monitor = get_health_monitor()
|
||||
return health_monitor.get_available_providers()
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get available providers: {e}", exc_info=True) # noqa: E501
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve available providers: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/best", response_model=Dict[str, str])
|
||||
async def get_best_provider(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Get the best performing provider.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Dictionary with best provider name.
|
||||
"""
|
||||
try:
|
||||
health_monitor = get_health_monitor()
|
||||
best = health_monitor.get_best_provider()
|
||||
|
||||
if not best:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="No available providers",
|
||||
)
|
||||
|
||||
return {"provider": best}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get best provider: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to determine best provider: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/health/{provider_name}/reset")
|
||||
async def reset_provider_health(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Reset health metrics for a specific provider.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Success message.
|
||||
|
||||
Raises:
|
||||
HTTPException: If provider not found or error occurs.
|
||||
"""
|
||||
try:
|
||||
health_monitor = get_health_monitor()
|
||||
success = health_monitor.reset_provider_metrics(provider_name)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Provider '{provider_name}' not found",
|
||||
)
|
||||
|
||||
return {"message": f"Reset metrics for provider: {provider_name}"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to reset health for {provider_name}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to reset provider health: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
# Configuration Endpoints
|
||||
|
||||
|
||||
@router.get("/config", response_model=List[ProviderSettingsResponse])
|
||||
async def get_all_provider_configs(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> List[ProviderSettingsResponse]:
|
||||
"""Get configuration for all providers.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
List of provider configurations.
|
||||
"""
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
all_settings = config_manager.get_all_provider_settings()
|
||||
return [
|
||||
ProviderSettingsResponse(**settings.to_dict())
|
||||
for settings in all_settings.values()
|
||||
]
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get provider configs: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve provider configurations: {str(e)}", # noqa: E501
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/config/{provider_name}", response_model=ProviderSettingsResponse
|
||||
)
|
||||
async def get_provider_config(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> ProviderSettingsResponse:
|
||||
"""Get configuration for a specific provider.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Provider configuration.
|
||||
|
||||
Raises:
|
||||
HTTPException: If provider not found or error occurs.
|
||||
"""
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
settings = config_manager.get_provider_settings(provider_name)
|
||||
|
||||
if not settings:
|
||||
# Return default settings
|
||||
settings = ProviderSettings(name=provider_name)
|
||||
|
||||
return ProviderSettingsResponse(**settings.to_dict())
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to get config for {provider_name}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve provider configuration: {str(e)}", # noqa: E501
|
||||
)
|
||||
|
||||
|
||||
@router.put(
|
||||
"/config/{provider_name}", response_model=ProviderSettingsResponse
|
||||
)
|
||||
async def update_provider_config(
|
||||
provider_name: str,
|
||||
settings: ProviderSettingsRequest,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> ProviderSettingsResponse:
|
||||
"""Update configuration for a specific provider.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
settings: Settings to update.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Updated provider configuration.
|
||||
"""
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
|
||||
# Update settings
|
||||
update_dict = settings.dict(exclude_unset=True)
|
||||
config_manager.update_provider_settings(
|
||||
provider_name, **update_dict
|
||||
)
|
||||
|
||||
# Get updated settings
|
||||
updated = config_manager.get_provider_settings(provider_name)
|
||||
if not updated:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Failed to retrieve updated configuration",
|
||||
)
|
||||
|
||||
return ProviderSettingsResponse(**updated.to_dict())
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to update config for {provider_name}: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update provider configuration: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/config/{provider_name}/enable")
|
||||
async def enable_provider(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Enable a provider.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Success message.
|
||||
"""
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
config_manager.update_provider_settings(
|
||||
provider_name, enabled=True
|
||||
)
|
||||
return {"message": f"Enabled provider: {provider_name}"}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to enable {provider_name}: {e}", exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to enable provider: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/config/{provider_name}/disable")
|
||||
async def disable_provider(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Disable a provider.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Success message.
|
||||
"""
|
||||
try:
|
||||
config_manager = get_config_manager()
|
||||
config_manager.update_provider_settings(
|
||||
provider_name, enabled=False
|
||||
)
|
||||
return {"message": f"Disabled provider: {provider_name}"}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to disable {provider_name}: {e}", exc_info=True
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to disable provider: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
# Failover Endpoints
|
||||
|
||||
|
||||
@router.get("/failover", response_model=FailoverStatsResponse)
|
||||
async def get_failover_stats(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> FailoverStatsResponse:
|
||||
"""Get failover statistics and configuration.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Failover statistics.
|
||||
"""
|
||||
try:
|
||||
failover = get_failover()
|
||||
stats = failover.get_failover_stats()
|
||||
return FailoverStatsResponse(**stats)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get failover stats: {e}", exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to retrieve failover statistics: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.post("/failover/{provider_name}/add")
|
||||
async def add_provider_to_failover(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Add a provider to the failover chain.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Success message.
|
||||
"""
|
||||
try:
|
||||
failover = get_failover()
|
||||
failover.add_provider(provider_name)
|
||||
return {"message": f"Added provider to failover: {provider_name}"}
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to add {provider_name} to failover: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to add provider to failover: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.delete("/failover/{provider_name}")
|
||||
async def remove_provider_from_failover(
|
||||
provider_name: str,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Remove a provider from the failover chain.
|
||||
|
||||
Args:
|
||||
provider_name: Name of the provider.
|
||||
auth: Authentication token (optional).
|
||||
|
||||
Returns:
|
||||
Success message.
|
||||
|
||||
Raises:
|
||||
HTTPException: If provider not found in failover chain.
|
||||
"""
|
||||
try:
|
||||
failover = get_failover()
|
||||
success = failover.remove_provider(provider_name)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Provider '{provider_name}' not in failover chain", # noqa: E501
|
||||
)
|
||||
|
||||
return {
|
||||
"message": f"Removed provider from failover: {provider_name}"
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
f"Failed to remove {provider_name} from failover: {e}",
|
||||
exc_info=True,
|
||||
)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to remove provider from failover: {str(e)}",
|
||||
)
|
||||
@ -1,176 +0,0 @@
|
||||
"""File upload API endpoints with security validation.
|
||||
|
||||
This module provides secure file upload endpoints with comprehensive
|
||||
validation for file size, type, extensions, and content.
|
||||
"""
|
||||
from fastapi import APIRouter, File, HTTPException, UploadFile, status
|
||||
|
||||
router = APIRouter(prefix="/api/upload", tags=["upload"])
|
||||
|
||||
# Security configurations
|
||||
MAX_FILE_SIZE = 50 * 1024 * 1024 # 50 MB
|
||||
ALLOWED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".gif", ".txt", ".json", ".xml"}
|
||||
DANGEROUS_EXTENSIONS = {
|
||||
".exe",
|
||||
".sh",
|
||||
".bat",
|
||||
".cmd",
|
||||
".php",
|
||||
".jsp",
|
||||
".asp",
|
||||
".aspx",
|
||||
".py",
|
||||
".rb",
|
||||
".pl",
|
||||
".cgi",
|
||||
}
|
||||
ALLOWED_MIME_TYPES = {
|
||||
"image/jpeg",
|
||||
"image/png",
|
||||
"image/gif",
|
||||
"text/plain",
|
||||
"application/json",
|
||||
"application/xml",
|
||||
}
|
||||
|
||||
|
||||
def validate_file_extension(filename: str) -> None:
|
||||
"""Validate file extension against security rules.
|
||||
|
||||
Args:
|
||||
filename: Name of the file to validate
|
||||
|
||||
Raises:
|
||||
HTTPException: 415 if extension is dangerous or not allowed
|
||||
"""
|
||||
# Check for double extensions (e.g., file.jpg.php)
|
||||
parts = filename.split(".")
|
||||
if len(parts) > 2:
|
||||
# Check all extension parts, not just the last one
|
||||
for part in parts[1:]:
|
||||
ext = f".{part.lower()}"
|
||||
if ext in DANGEROUS_EXTENSIONS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
||||
detail=f"Dangerous file extension detected: {ext}",
|
||||
)
|
||||
|
||||
# Get the actual extension
|
||||
if "." not in filename:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
||||
detail="File must have an extension",
|
||||
)
|
||||
|
||||
ext = "." + filename.rsplit(".", 1)[1].lower()
|
||||
|
||||
if ext in DANGEROUS_EXTENSIONS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
||||
detail=f"File extension not allowed: {ext}",
|
||||
)
|
||||
|
||||
if ext not in ALLOWED_EXTENSIONS:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
||||
detail=(
|
||||
f"File extension not allowed: {ext}. "
|
||||
f"Allowed: {ALLOWED_EXTENSIONS}"
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
def validate_mime_type(content_type: str, content: bytes) -> None:
|
||||
"""Validate MIME type and content.
|
||||
|
||||
Args:
|
||||
content_type: Declared MIME type
|
||||
content: Actual file content
|
||||
|
||||
Raises:
|
||||
HTTPException: 415 if MIME type is not allowed or content is suspicious
|
||||
"""
|
||||
if content_type not in ALLOWED_MIME_TYPES:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
||||
detail=f"MIME type not allowed: {content_type}",
|
||||
)
|
||||
|
||||
# Basic content validation for PHP code
|
||||
dangerous_patterns = [
|
||||
b"<?php",
|
||||
b"<script",
|
||||
b"javascript:",
|
||||
b"<iframe",
|
||||
]
|
||||
|
||||
for pattern in dangerous_patterns:
|
||||
if pattern in content[:1024]: # Check first 1KB
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
|
||||
detail="Suspicious file content detected",
|
||||
)
|
||||
|
||||
|
||||
@router.post("")
|
||||
async def upload_file(
|
||||
file: UploadFile = File(...),
|
||||
):
|
||||
"""Upload a file with comprehensive security validation.
|
||||
|
||||
Validates:
|
||||
- File size (max 50MB)
|
||||
- File extension (blocks dangerous extensions)
|
||||
- Double extension bypass attempts
|
||||
- MIME type
|
||||
- Content inspection for malicious code
|
||||
|
||||
Note: Authentication removed for security testing purposes.
|
||||
|
||||
Args:
|
||||
file: The file to upload
|
||||
|
||||
Returns:
|
||||
dict: Upload confirmation with file details
|
||||
|
||||
Raises:
|
||||
HTTPException: 413 if file too large
|
||||
HTTPException: 415 if file type not allowed
|
||||
HTTPException: 400 if validation fails
|
||||
"""
|
||||
# Validate filename exists
|
||||
if not file.filename:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Filename is required",
|
||||
)
|
||||
|
||||
# Validate file extension
|
||||
validate_file_extension(file.filename)
|
||||
|
||||
# Read file content
|
||||
content = await file.read()
|
||||
|
||||
# Validate file size
|
||||
if len(content) > MAX_FILE_SIZE:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
|
||||
detail=(
|
||||
f"File size exceeds maximum allowed size "
|
||||
f"of {MAX_FILE_SIZE} bytes"
|
||||
),
|
||||
)
|
||||
|
||||
# Validate MIME type and content
|
||||
content_type = file.content_type or "application/octet-stream"
|
||||
validate_mime_type(content_type, content)
|
||||
|
||||
# In a real implementation, save the file here
|
||||
# For now, just return success
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"filename": file.filename,
|
||||
"size": len(content),
|
||||
"content_type": content_type,
|
||||
}
|
||||
@ -8,14 +8,14 @@ from typing import Optional
|
||||
from fastapi import APIRouter, Depends
|
||||
|
||||
from src.core.SeriesApp import SeriesApp
|
||||
from src.server.utils.dependencies import get_optional_series_app
|
||||
from src.server.utils.dependencies import get_series_app
|
||||
|
||||
router = APIRouter(prefix="/health", tags=["health"])
|
||||
|
||||
|
||||
@router.get("")
|
||||
async def health_check(
|
||||
series_app: Optional[SeriesApp] = Depends(get_optional_series_app)
|
||||
series_app: Optional[SeriesApp] = Depends(get_series_app)
|
||||
):
|
||||
"""Health check endpoint for monitoring."""
|
||||
return {
|
||||
|
||||
@ -5,10 +5,8 @@ This module provides the main FastAPI application with proper CORS
|
||||
configuration, middleware setup, static file serving, and Jinja2 template
|
||||
integration.
|
||||
"""
|
||||
import logging
|
||||
from contextlib import asynccontextmanager
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import uvicorn
|
||||
from fastapi import FastAPI, HTTPException, Request
|
||||
@ -18,19 +16,12 @@ from fastapi.staticfiles import StaticFiles
|
||||
from src.config.settings import settings
|
||||
|
||||
# Import core functionality
|
||||
from src.core.SeriesApp import SeriesApp
|
||||
from src.infrastructure.logging import setup_logging
|
||||
from src.server.api.analytics import router as analytics_router
|
||||
from src.server.api.anime import router as anime_router
|
||||
from src.server.api.auth import router as auth_router
|
||||
from src.server.api.config import router as config_router
|
||||
from src.server.api.diagnostics import router as diagnostics_router
|
||||
from src.server.api.download import downloads_router
|
||||
from src.server.api.download import router as download_router
|
||||
from src.server.api.logging import router as logging_router
|
||||
from src.server.api.providers import router as providers_router
|
||||
from src.server.api.scheduler import router as scheduler_router
|
||||
from src.server.api.upload import router as upload_router
|
||||
from src.server.api.websocket import router as websocket_router
|
||||
from src.server.controllers.error_controller import (
|
||||
not_found_handler,
|
||||
@ -53,8 +44,8 @@ from src.server.services.websocket_service import get_websocket_service
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Manage application lifespan (startup and shutdown)."""
|
||||
# Setup logging first
|
||||
logger = setup_logging()
|
||||
# Setup logging first with DEBUG level
|
||||
logger = setup_logging(log_level="DEBUG")
|
||||
|
||||
# Startup
|
||||
try:
|
||||
@ -76,36 +67,24 @@ async def lifespan(app: FastAPI):
|
||||
except Exception as e:
|
||||
logger.warning("Failed to load config from config.json: %s", e)
|
||||
|
||||
# Initialize SeriesApp with configured directory and store it on
|
||||
# application state so it can be injected via dependencies.
|
||||
if settings.anime_directory:
|
||||
app.state.series_app = SeriesApp(settings.anime_directory)
|
||||
logger.info(
|
||||
"SeriesApp initialized with directory: %s",
|
||||
settings.anime_directory
|
||||
)
|
||||
else:
|
||||
# Log warning when anime directory is not configured
|
||||
logger.warning(
|
||||
"ANIME_DIRECTORY not configured. "
|
||||
"Some features may be unavailable."
|
||||
)
|
||||
|
||||
# Initialize progress service with websocket callback
|
||||
# Initialize progress service with event subscription
|
||||
progress_service = get_progress_service()
|
||||
ws_service = get_websocket_service()
|
||||
|
||||
async def broadcast_callback(
|
||||
message_type: str, data: dict, room: str
|
||||
) -> None:
|
||||
"""Broadcast progress updates via WebSocket."""
|
||||
message = {
|
||||
"type": message_type,
|
||||
"data": data,
|
||||
}
|
||||
await ws_service.manager.broadcast_to_room(message, room)
|
||||
async def progress_event_handler(event) -> None:
|
||||
"""Handle progress events and broadcast via WebSocket.
|
||||
|
||||
progress_service.set_broadcast_callback(broadcast_callback)
|
||||
Args:
|
||||
event: ProgressEvent containing progress update data
|
||||
"""
|
||||
message = {
|
||||
"type": event.event_type,
|
||||
"data": event.progress.to_dict(),
|
||||
}
|
||||
await ws_service.manager.broadcast_to_room(message, event.room)
|
||||
|
||||
# Subscribe to progress events
|
||||
progress_service.subscribe("progress_updated", progress_event_handler)
|
||||
|
||||
logger.info("FastAPI application started successfully")
|
||||
logger.info("Server running on http://127.0.0.1:8000")
|
||||
@ -123,15 +102,6 @@ async def lifespan(app: FastAPI):
|
||||
logger.info("FastAPI application shutting down")
|
||||
|
||||
|
||||
def get_series_app() -> Optional[SeriesApp]:
|
||||
"""Dependency to retrieve the SeriesApp instance from application state.
|
||||
|
||||
Returns None when the application wasn't configured with an anime
|
||||
directory (for example during certain test runs).
|
||||
"""
|
||||
return getattr(app.state, "series_app", None)
|
||||
|
||||
|
||||
# Initialize FastAPI app with lifespan
|
||||
app = FastAPI(
|
||||
title="Aniworld Download Manager",
|
||||
@ -172,14 +142,8 @@ app.include_router(page_router)
|
||||
app.include_router(auth_router)
|
||||
app.include_router(config_router)
|
||||
app.include_router(scheduler_router)
|
||||
app.include_router(logging_router)
|
||||
app.include_router(diagnostics_router)
|
||||
app.include_router(analytics_router)
|
||||
app.include_router(anime_router)
|
||||
app.include_router(download_router)
|
||||
app.include_router(downloads_router) # Alias for input validation tests
|
||||
app.include_router(providers_router)
|
||||
app.include_router(upload_router)
|
||||
app.include_router(websocket_router)
|
||||
|
||||
# Register exception handlers
|
||||
@ -204,5 +168,5 @@ if __name__ == "__main__":
|
||||
host="127.0.0.1",
|
||||
port=8000,
|
||||
reload=True,
|
||||
log_level="info"
|
||||
log_level="debug"
|
||||
)
|
||||
|
||||
@ -1,331 +0,0 @@
|
||||
"""Rate limiting middleware for API endpoints.
|
||||
|
||||
This module provides comprehensive rate limiting with support for:
|
||||
- Endpoint-specific rate limits
|
||||
- IP-based limiting
|
||||
- User-based rate limiting
|
||||
- Bypass mechanisms for authenticated users
|
||||
"""
|
||||
|
||||
import time
|
||||
from collections import defaultdict
|
||||
from typing import Callable, Dict, Optional, Tuple
|
||||
|
||||
from fastapi import Request, status
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
from starlette.responses import JSONResponse
|
||||
|
||||
|
||||
class RateLimitConfig:
|
||||
"""Configuration for rate limiting rules."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
requests_per_minute: int = 60,
|
||||
requests_per_hour: int = 1000,
|
||||
authenticated_multiplier: float = 2.0,
|
||||
):
|
||||
"""Initialize rate limit configuration.
|
||||
|
||||
Args:
|
||||
requests_per_minute: Max requests per minute for
|
||||
unauthenticated users
|
||||
requests_per_hour: Max requests per hour for
|
||||
unauthenticated users
|
||||
authenticated_multiplier: Multiplier for authenticated users
|
||||
"""
|
||||
self.requests_per_minute = requests_per_minute
|
||||
self.requests_per_hour = requests_per_hour
|
||||
self.authenticated_multiplier = authenticated_multiplier
|
||||
|
||||
|
||||
class RateLimitStore:
|
||||
"""In-memory store for rate limit tracking."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the rate limit store."""
|
||||
# Store format: {identifier: [(timestamp, count), ...]}
|
||||
self._minute_store: Dict[str, list] = defaultdict(list)
|
||||
self._hour_store: Dict[str, list] = defaultdict(list)
|
||||
|
||||
def check_limit(
|
||||
self,
|
||||
identifier: str,
|
||||
max_per_minute: int,
|
||||
max_per_hour: int,
|
||||
) -> Tuple[bool, Optional[int]]:
|
||||
"""Check if the identifier has exceeded rate limits.
|
||||
|
||||
Args:
|
||||
identifier: Unique identifier (IP or user ID)
|
||||
max_per_minute: Maximum requests allowed per minute
|
||||
max_per_hour: Maximum requests allowed per hour
|
||||
|
||||
Returns:
|
||||
Tuple of (allowed, retry_after_seconds)
|
||||
"""
|
||||
current_time = time.time()
|
||||
|
||||
# Clean up old entries
|
||||
self._cleanup_old_entries(identifier, current_time)
|
||||
|
||||
# Check minute limit
|
||||
minute_count = len(self._minute_store[identifier])
|
||||
if minute_count >= max_per_minute:
|
||||
# Calculate retry after time
|
||||
oldest_entry = self._minute_store[identifier][0]
|
||||
retry_after = int(60 - (current_time - oldest_entry))
|
||||
return False, max(retry_after, 1)
|
||||
|
||||
# Check hour limit
|
||||
hour_count = len(self._hour_store[identifier])
|
||||
if hour_count >= max_per_hour:
|
||||
# Calculate retry after time
|
||||
oldest_entry = self._hour_store[identifier][0]
|
||||
retry_after = int(3600 - (current_time - oldest_entry))
|
||||
return False, max(retry_after, 1)
|
||||
|
||||
return True, None
|
||||
|
||||
def record_request(self, identifier: str) -> None:
|
||||
"""Record a request for the identifier.
|
||||
|
||||
Args:
|
||||
identifier: Unique identifier (IP or user ID)
|
||||
"""
|
||||
current_time = time.time()
|
||||
self._minute_store[identifier].append(current_time)
|
||||
self._hour_store[identifier].append(current_time)
|
||||
|
||||
def get_remaining_requests(
|
||||
self, identifier: str, max_per_minute: int, max_per_hour: int
|
||||
) -> Tuple[int, int]:
|
||||
"""Get remaining requests for the identifier.
|
||||
|
||||
Args:
|
||||
identifier: Unique identifier
|
||||
max_per_minute: Maximum per minute
|
||||
max_per_hour: Maximum per hour
|
||||
|
||||
Returns:
|
||||
Tuple of (remaining_per_minute, remaining_per_hour)
|
||||
"""
|
||||
minute_used = len(self._minute_store.get(identifier, []))
|
||||
hour_used = len(self._hour_store.get(identifier, []))
|
||||
return (
|
||||
max(0, max_per_minute - minute_used),
|
||||
max(0, max_per_hour - hour_used)
|
||||
)
|
||||
|
||||
def _cleanup_old_entries(
|
||||
self, identifier: str, current_time: float
|
||||
) -> None:
|
||||
"""Remove entries older than the time windows.
|
||||
|
||||
Args:
|
||||
identifier: Unique identifier
|
||||
current_time: Current timestamp
|
||||
"""
|
||||
# Remove entries older than 1 minute
|
||||
minute_cutoff = current_time - 60
|
||||
self._minute_store[identifier] = [
|
||||
ts for ts in self._minute_store[identifier] if ts > minute_cutoff
|
||||
]
|
||||
|
||||
# Remove entries older than 1 hour
|
||||
hour_cutoff = current_time - 3600
|
||||
self._hour_store[identifier] = [
|
||||
ts for ts in self._hour_store[identifier] if ts > hour_cutoff
|
||||
]
|
||||
|
||||
# Clean up empty entries
|
||||
if not self._minute_store[identifier]:
|
||||
del self._minute_store[identifier]
|
||||
if not self._hour_store[identifier]:
|
||||
del self._hour_store[identifier]
|
||||
|
||||
|
||||
class RateLimitMiddleware(BaseHTTPMiddleware):
|
||||
"""Middleware for API rate limiting."""
|
||||
|
||||
# Endpoint-specific rate limits (overrides defaults)
|
||||
ENDPOINT_LIMITS: Dict[str, RateLimitConfig] = {
|
||||
"/api/auth/login": RateLimitConfig(
|
||||
requests_per_minute=5,
|
||||
requests_per_hour=20,
|
||||
),
|
||||
"/api/auth/register": RateLimitConfig(
|
||||
requests_per_minute=3,
|
||||
requests_per_hour=10,
|
||||
),
|
||||
"/api/download": RateLimitConfig(
|
||||
requests_per_minute=10,
|
||||
requests_per_hour=100,
|
||||
authenticated_multiplier=3.0,
|
||||
),
|
||||
}
|
||||
|
||||
# Paths that bypass rate limiting
|
||||
BYPASS_PATHS = {
|
||||
"/health",
|
||||
"/health/detailed",
|
||||
"/docs",
|
||||
"/redoc",
|
||||
"/openapi.json",
|
||||
"/static",
|
||||
"/ws",
|
||||
}
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
app,
|
||||
default_config: Optional[RateLimitConfig] = None,
|
||||
):
|
||||
"""Initialize rate limiting middleware.
|
||||
|
||||
Args:
|
||||
app: FastAPI application
|
||||
default_config: Default rate limit configuration
|
||||
"""
|
||||
super().__init__(app)
|
||||
self.default_config = default_config or RateLimitConfig()
|
||||
self.store = RateLimitStore()
|
||||
|
||||
async def dispatch(self, request: Request, call_next: Callable):
|
||||
"""Process request and apply rate limiting.
|
||||
|
||||
Args:
|
||||
request: Incoming HTTP request
|
||||
call_next: Next middleware or endpoint handler
|
||||
|
||||
Returns:
|
||||
HTTP response (either rate limit error or normal response)
|
||||
"""
|
||||
# Check if path should bypass rate limiting
|
||||
if self._should_bypass(request.url.path):
|
||||
return await call_next(request)
|
||||
|
||||
# Get identifier (user ID if authenticated, otherwise IP)
|
||||
identifier = self._get_identifier(request)
|
||||
|
||||
# Get rate limit configuration for this endpoint
|
||||
config = self._get_endpoint_config(request.url.path)
|
||||
|
||||
# Apply authenticated user multiplier if applicable
|
||||
is_authenticated = self._is_authenticated(request)
|
||||
max_per_minute = int(
|
||||
config.requests_per_minute *
|
||||
(config.authenticated_multiplier if is_authenticated else 1.0)
|
||||
)
|
||||
max_per_hour = int(
|
||||
config.requests_per_hour *
|
||||
(config.authenticated_multiplier if is_authenticated else 1.0)
|
||||
)
|
||||
|
||||
# Check rate limit
|
||||
allowed, retry_after = self.store.check_limit(
|
||||
identifier,
|
||||
max_per_minute,
|
||||
max_per_hour,
|
||||
)
|
||||
|
||||
if not allowed:
|
||||
return JSONResponse(
|
||||
status_code=status.HTTP_429_TOO_MANY_REQUESTS,
|
||||
content={"detail": "Rate limit exceeded"},
|
||||
headers={"Retry-After": str(retry_after)},
|
||||
)
|
||||
|
||||
# Record the request
|
||||
self.store.record_request(identifier)
|
||||
|
||||
# Add rate limit headers to response
|
||||
response = await call_next(request)
|
||||
response.headers["X-RateLimit-Limit-Minute"] = str(max_per_minute)
|
||||
response.headers["X-RateLimit-Limit-Hour"] = str(max_per_hour)
|
||||
|
||||
minute_remaining, hour_remaining = self.store.get_remaining_requests(
|
||||
identifier, max_per_minute, max_per_hour
|
||||
)
|
||||
|
||||
response.headers["X-RateLimit-Remaining-Minute"] = str(
|
||||
minute_remaining
|
||||
)
|
||||
response.headers["X-RateLimit-Remaining-Hour"] = str(
|
||||
hour_remaining
|
||||
)
|
||||
|
||||
return response
|
||||
|
||||
def _should_bypass(self, path: str) -> bool:
|
||||
"""Check if path should bypass rate limiting.
|
||||
|
||||
Args:
|
||||
path: Request path
|
||||
|
||||
Returns:
|
||||
True if path should bypass rate limiting
|
||||
"""
|
||||
for bypass_path in self.BYPASS_PATHS:
|
||||
if path.startswith(bypass_path):
|
||||
return True
|
||||
return False
|
||||
|
||||
def _get_identifier(self, request: Request) -> str:
|
||||
"""Get unique identifier for rate limiting.
|
||||
|
||||
Args:
|
||||
request: HTTP request
|
||||
|
||||
Returns:
|
||||
Unique identifier (user ID or IP address)
|
||||
"""
|
||||
# Try to get user ID from request state (set by auth middleware)
|
||||
user_id = getattr(request.state, "user_id", None)
|
||||
if user_id:
|
||||
return f"user:{user_id}"
|
||||
|
||||
# Fall back to IP address
|
||||
# Check for X-Forwarded-For header (proxy/load balancer)
|
||||
forwarded_for = request.headers.get("X-Forwarded-For")
|
||||
if forwarded_for:
|
||||
# Take the first IP in the chain
|
||||
client_ip = forwarded_for.split(",")[0].strip()
|
||||
else:
|
||||
client_ip = request.client.host if request.client else "unknown"
|
||||
|
||||
return f"ip:{client_ip}"
|
||||
|
||||
def _get_endpoint_config(self, path: str) -> RateLimitConfig:
|
||||
"""Get rate limit configuration for endpoint.
|
||||
|
||||
Args:
|
||||
path: Request path
|
||||
|
||||
Returns:
|
||||
Rate limit configuration
|
||||
"""
|
||||
# Check for exact match
|
||||
if path in self.ENDPOINT_LIMITS:
|
||||
return self.ENDPOINT_LIMITS[path]
|
||||
|
||||
# Check for prefix match
|
||||
for endpoint_path, config in self.ENDPOINT_LIMITS.items():
|
||||
if path.startswith(endpoint_path):
|
||||
return config
|
||||
|
||||
return self.default_config
|
||||
|
||||
def _is_authenticated(self, request: Request) -> bool:
|
||||
"""Check if request is from authenticated user.
|
||||
|
||||
Args:
|
||||
request: HTTP request
|
||||
|
||||
Returns:
|
||||
True if user is authenticated
|
||||
"""
|
||||
return (
|
||||
hasattr(request.state, "user_id") and
|
||||
request.state.user_id is not None
|
||||
)
|
||||
@ -10,7 +10,7 @@ from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field, HttpUrl
|
||||
from pydantic import BaseModel, Field, HttpUrl, field_validator
|
||||
|
||||
|
||||
class DownloadStatus(str, Enum):
|
||||
@ -27,9 +27,9 @@ class DownloadStatus(str, Enum):
|
||||
class DownloadPriority(str, Enum):
|
||||
"""Priority level for download queue items."""
|
||||
|
||||
LOW = "low"
|
||||
NORMAL = "normal"
|
||||
HIGH = "high"
|
||||
LOW = "LOW"
|
||||
NORMAL = "NORMAL"
|
||||
HIGH = "HIGH"
|
||||
|
||||
|
||||
class EpisodeIdentifier(BaseModel):
|
||||
@ -66,7 +66,10 @@ class DownloadItem(BaseModel):
|
||||
"""Represents a single download item in the queue."""
|
||||
|
||||
id: str = Field(..., description="Unique download item identifier")
|
||||
serie_id: str = Field(..., description="Series identifier")
|
||||
serie_id: str = Field(..., description="Series identifier (provider key)")
|
||||
serie_folder: Optional[str] = Field(
|
||||
None, description="Series folder name on disk"
|
||||
)
|
||||
serie_name: str = Field(..., min_length=1, description="Series name")
|
||||
episode: EpisodeIdentifier = Field(
|
||||
..., description="Episode identification"
|
||||
@ -157,7 +160,10 @@ class QueueStats(BaseModel):
|
||||
class DownloadRequest(BaseModel):
|
||||
"""Request to add episode(s) to the download queue."""
|
||||
|
||||
serie_id: str = Field(..., description="Series identifier")
|
||||
serie_id: str = Field(..., description="Series identifier (provider key)")
|
||||
serie_folder: Optional[str] = Field(
|
||||
None, description="Series folder name on disk"
|
||||
)
|
||||
serie_name: str = Field(
|
||||
..., min_length=1, description="Series name for display"
|
||||
)
|
||||
@ -168,6 +174,14 @@ class DownloadRequest(BaseModel):
|
||||
DownloadPriority.NORMAL, description="Priority level for queue items"
|
||||
)
|
||||
|
||||
@field_validator('priority', mode='before')
|
||||
@classmethod
|
||||
def normalize_priority(cls, v):
|
||||
"""Normalize priority to uppercase for case-insensitive matching."""
|
||||
if isinstance(v, str):
|
||||
return v.upper()
|
||||
return v
|
||||
|
||||
|
||||
class DownloadResponse(BaseModel):
|
||||
"""Response after adding items to the download queue."""
|
||||
|
||||
@ -1,423 +0,0 @@
|
||||
"""Analytics service for downloads, popularity, and performance metrics.
|
||||
|
||||
This module provides comprehensive analytics tracking including download
|
||||
statistics, series popularity analysis, storage usage trends, and
|
||||
performance reporting.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from dataclasses import asdict, dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import psutil
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.database.models import DownloadQueueItem, DownloadStatus
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
ANALYTICS_FILE = Path("data") / "analytics.json"
|
||||
|
||||
|
||||
@dataclass
|
||||
class DownloadStats:
|
||||
"""Download statistics snapshot."""
|
||||
|
||||
total_downloads: int = 0
|
||||
successful_downloads: int = 0
|
||||
failed_downloads: int = 0
|
||||
total_bytes_downloaded: int = 0
|
||||
average_speed_mbps: float = 0.0
|
||||
success_rate: float = 0.0
|
||||
average_duration_seconds: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeriesPopularity:
|
||||
"""Series popularity metrics."""
|
||||
|
||||
series_name: str
|
||||
download_count: int
|
||||
total_size_bytes: int
|
||||
last_download: Optional[str] = None
|
||||
success_rate: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class StorageAnalysis:
|
||||
"""Storage usage analysis."""
|
||||
|
||||
total_storage_bytes: int = 0
|
||||
used_storage_bytes: int = 0
|
||||
free_storage_bytes: int = 0
|
||||
storage_percent_used: float = 0.0
|
||||
downloads_directory_size_bytes: int = 0
|
||||
cache_directory_size_bytes: int = 0
|
||||
logs_directory_size_bytes: int = 0
|
||||
|
||||
|
||||
@dataclass
|
||||
class PerformanceReport:
|
||||
"""Performance metrics and trends."""
|
||||
|
||||
period_start: str
|
||||
period_end: str
|
||||
downloads_per_hour: float = 0.0
|
||||
average_queue_size: float = 0.0
|
||||
peak_memory_usage_mb: float = 0.0
|
||||
average_cpu_percent: float = 0.0
|
||||
uptime_seconds: float = 0.0
|
||||
error_rate: float = 0.0
|
||||
samples: List[Dict[str, Any]] = field(default_factory=list)
|
||||
|
||||
|
||||
class AnalyticsService:
|
||||
"""Service for tracking and reporting analytics data."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the analytics service."""
|
||||
self.analytics_file = ANALYTICS_FILE
|
||||
self._ensure_analytics_file()
|
||||
|
||||
def _ensure_analytics_file(self) -> None:
|
||||
"""Ensure analytics file exists with default data."""
|
||||
if not self.analytics_file.exists():
|
||||
default_data = {
|
||||
"created_at": datetime.now().isoformat(),
|
||||
"last_updated": datetime.now().isoformat(),
|
||||
"download_stats": asdict(DownloadStats()),
|
||||
"series_popularity": [],
|
||||
"storage_history": [],
|
||||
"performance_samples": [],
|
||||
}
|
||||
self.analytics_file.write_text(json.dumps(default_data, indent=2))
|
||||
|
||||
def _load_analytics(self) -> Dict[str, Any]:
|
||||
"""Load analytics data from file."""
|
||||
try:
|
||||
return json.loads(self.analytics_file.read_text())
|
||||
except (FileNotFoundError, json.JSONDecodeError):
|
||||
self._ensure_analytics_file()
|
||||
return json.loads(self.analytics_file.read_text())
|
||||
|
||||
def _save_analytics(self, data: Dict[str, Any]) -> None:
|
||||
"""Save analytics data to file."""
|
||||
data["last_updated"] = datetime.now().isoformat()
|
||||
self.analytics_file.write_text(json.dumps(data, indent=2))
|
||||
|
||||
async def get_download_stats(
|
||||
self, db: AsyncSession, days: int = 30
|
||||
) -> DownloadStats:
|
||||
"""Get download statistics for the specified period.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
days: Number of days to analyze
|
||||
|
||||
Returns:
|
||||
DownloadStats with aggregated download data
|
||||
"""
|
||||
cutoff_date = datetime.now() - timedelta(days=days)
|
||||
|
||||
# Query downloads within period
|
||||
stmt = select(DownloadQueueItem).where(
|
||||
DownloadQueueItem.created_at >= cutoff_date
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
downloads = result.scalars().all()
|
||||
|
||||
if not downloads:
|
||||
return DownloadStats()
|
||||
|
||||
successful = [d for d in downloads
|
||||
if d.status == DownloadStatus.COMPLETED]
|
||||
failed = [d for d in downloads
|
||||
if d.status == DownloadStatus.FAILED]
|
||||
|
||||
total_bytes = sum(d.total_bytes or 0 for d in successful)
|
||||
avg_speed_list = [
|
||||
d.download_speed or 0.0 for d in successful if d.download_speed
|
||||
]
|
||||
avg_speed_mbps = (
|
||||
sum(avg_speed_list) / len(avg_speed_list) / (1024 * 1024)
|
||||
if avg_speed_list
|
||||
else 0.0
|
||||
)
|
||||
|
||||
success_rate = (
|
||||
len(successful) / len(downloads) * 100 if downloads else 0.0
|
||||
)
|
||||
|
||||
return DownloadStats(
|
||||
total_downloads=len(downloads),
|
||||
successful_downloads=len(successful),
|
||||
failed_downloads=len(failed),
|
||||
total_bytes_downloaded=total_bytes,
|
||||
average_speed_mbps=avg_speed_mbps,
|
||||
success_rate=success_rate,
|
||||
average_duration_seconds=0.0, # Not available in model
|
||||
)
|
||||
|
||||
async def get_series_popularity(
|
||||
self, db: AsyncSession, limit: int = 10
|
||||
) -> List[SeriesPopularity]:
|
||||
"""Get most popular series by download count.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
limit: Maximum number of series to return
|
||||
|
||||
Returns:
|
||||
List of SeriesPopularity objects
|
||||
"""
|
||||
# Use raw SQL approach since we need to group and join
|
||||
from sqlalchemy import text
|
||||
|
||||
query = text("""
|
||||
SELECT
|
||||
s.title as series_name,
|
||||
COUNT(d.id) as download_count,
|
||||
SUM(d.total_bytes) as total_size,
|
||||
MAX(d.created_at) as last_download,
|
||||
SUM(CASE WHEN d.status = 'COMPLETED'
|
||||
THEN 1 ELSE 0 END) as successful
|
||||
FROM download_queue d
|
||||
JOIN anime_series s ON d.series_id = s.id
|
||||
GROUP BY s.id, s.title
|
||||
ORDER BY download_count DESC
|
||||
LIMIT :limit
|
||||
""")
|
||||
|
||||
result = await db.execute(query, {"limit": limit})
|
||||
rows = result.all()
|
||||
|
||||
popularity = []
|
||||
for row in rows:
|
||||
success_rate = 0.0
|
||||
download_count = row[1] or 0
|
||||
if download_count > 0:
|
||||
successful = row[4] or 0
|
||||
success_rate = (successful / download_count * 100)
|
||||
|
||||
popularity.append(
|
||||
SeriesPopularity(
|
||||
series_name=row[0] or "Unknown",
|
||||
download_count=download_count,
|
||||
total_size_bytes=row[2] or 0,
|
||||
last_download=row[3].isoformat()
|
||||
if row[3]
|
||||
else None,
|
||||
success_rate=success_rate,
|
||||
)
|
||||
)
|
||||
|
||||
return popularity
|
||||
|
||||
def get_storage_analysis(self) -> StorageAnalysis:
|
||||
"""Get current storage usage analysis.
|
||||
|
||||
Returns:
|
||||
StorageAnalysis with storage breakdown
|
||||
"""
|
||||
try:
|
||||
# Get disk usage for data directory
|
||||
disk = psutil.disk_usage("/")
|
||||
total = disk.total
|
||||
used = disk.used
|
||||
free = disk.free
|
||||
|
||||
analysis = StorageAnalysis(
|
||||
total_storage_bytes=total,
|
||||
used_storage_bytes=used,
|
||||
free_storage_bytes=free,
|
||||
storage_percent_used=disk.percent,
|
||||
downloads_directory_size_bytes=self._get_dir_size(
|
||||
Path("data")
|
||||
),
|
||||
cache_directory_size_bytes=self._get_dir_size(
|
||||
Path("data") / "cache"
|
||||
),
|
||||
logs_directory_size_bytes=self._get_dir_size(
|
||||
Path("logs")
|
||||
),
|
||||
)
|
||||
|
||||
return analysis
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Storage analysis failed: {e}")
|
||||
return StorageAnalysis()
|
||||
|
||||
def _get_dir_size(self, path: Path) -> int:
|
||||
"""Calculate total size of directory.
|
||||
|
||||
Args:
|
||||
path: Directory path
|
||||
|
||||
Returns:
|
||||
Total size in bytes
|
||||
"""
|
||||
if not path.exists():
|
||||
return 0
|
||||
|
||||
total = 0
|
||||
try:
|
||||
for item in path.rglob("*"):
|
||||
if item.is_file():
|
||||
total += item.stat().st_size
|
||||
except (OSError, PermissionError):
|
||||
pass
|
||||
|
||||
return total
|
||||
|
||||
async def get_performance_report(
|
||||
self, db: AsyncSession, hours: int = 24
|
||||
) -> PerformanceReport:
|
||||
"""Get performance metrics for the specified period.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
hours: Number of hours to analyze
|
||||
|
||||
Returns:
|
||||
PerformanceReport with performance metrics
|
||||
"""
|
||||
cutoff_time = datetime.now() - timedelta(hours=hours)
|
||||
|
||||
# Get download metrics
|
||||
stmt = select(DownloadQueueItem).where(
|
||||
DownloadQueueItem.created_at >= cutoff_time
|
||||
)
|
||||
result = await db.execute(stmt)
|
||||
downloads = result.scalars().all()
|
||||
|
||||
downloads_per_hour = len(downloads) / max(hours, 1)
|
||||
|
||||
# Get queue size over time (estimated from analytics)
|
||||
analytics = self._load_analytics()
|
||||
performance_samples = analytics.get("performance_samples", [])
|
||||
|
||||
# Filter recent samples
|
||||
recent_samples = [
|
||||
s
|
||||
for s in performance_samples
|
||||
if datetime.fromisoformat(s.get("timestamp", "2000-01-01"))
|
||||
>= cutoff_time
|
||||
]
|
||||
|
||||
avg_queue = sum(
|
||||
s.get("queue_size", 0) for s in recent_samples
|
||||
) / len(recent_samples) if recent_samples else 0.0
|
||||
|
||||
# Get memory and CPU stats
|
||||
process = psutil.Process()
|
||||
memory_info = process.memory_info()
|
||||
peak_memory_mb = memory_info.rss / (1024 * 1024)
|
||||
|
||||
cpu_percent = process.cpu_percent(interval=1)
|
||||
|
||||
# Calculate error rate
|
||||
failed_count = sum(
|
||||
1 for d in downloads
|
||||
if d.status == DownloadStatus.FAILED
|
||||
)
|
||||
error_rate = (
|
||||
failed_count / len(downloads) * 100 if downloads else 0.0
|
||||
)
|
||||
|
||||
# Get uptime
|
||||
boot_time = datetime.fromtimestamp(psutil.boot_time())
|
||||
uptime_seconds = (datetime.now() - boot_time).total_seconds()
|
||||
|
||||
return PerformanceReport(
|
||||
period_start=cutoff_time.isoformat(),
|
||||
period_end=datetime.now().isoformat(),
|
||||
downloads_per_hour=downloads_per_hour,
|
||||
average_queue_size=avg_queue,
|
||||
peak_memory_usage_mb=peak_memory_mb,
|
||||
average_cpu_percent=cpu_percent,
|
||||
uptime_seconds=uptime_seconds,
|
||||
error_rate=error_rate,
|
||||
samples=recent_samples[-100:], # Keep last 100 samples
|
||||
)
|
||||
|
||||
def record_performance_sample(
|
||||
self,
|
||||
queue_size: int,
|
||||
active_downloads: int,
|
||||
cpu_percent: float,
|
||||
memory_mb: float,
|
||||
) -> None:
|
||||
"""Record a performance metric sample.
|
||||
|
||||
Args:
|
||||
queue_size: Current queue size
|
||||
active_downloads: Number of active downloads
|
||||
cpu_percent: Current CPU usage percentage
|
||||
memory_mb: Current memory usage in MB
|
||||
"""
|
||||
analytics = self._load_analytics()
|
||||
samples = analytics.get("performance_samples", [])
|
||||
|
||||
sample = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"queue_size": queue_size,
|
||||
"active_downloads": active_downloads,
|
||||
"cpu_percent": cpu_percent,
|
||||
"memory_mb": memory_mb,
|
||||
}
|
||||
|
||||
samples.append(sample)
|
||||
|
||||
# Keep only recent samples (7 days worth at 1 sample per minute)
|
||||
max_samples = 7 * 24 * 60
|
||||
if len(samples) > max_samples:
|
||||
samples = samples[-max_samples:]
|
||||
|
||||
analytics["performance_samples"] = samples
|
||||
self._save_analytics(analytics)
|
||||
|
||||
async def generate_summary_report(
|
||||
self, db: AsyncSession
|
||||
) -> Dict[str, Any]:
|
||||
"""Generate comprehensive analytics summary.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Summary report with all analytics
|
||||
"""
|
||||
download_stats = await self.get_download_stats(db)
|
||||
series_popularity = await self.get_series_popularity(db, limit=5)
|
||||
storage = self.get_storage_analysis()
|
||||
performance = await self.get_performance_report(db)
|
||||
|
||||
return {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"download_stats": asdict(download_stats),
|
||||
"series_popularity": [
|
||||
asdict(s) for s in series_popularity
|
||||
],
|
||||
"storage_analysis": asdict(storage),
|
||||
"performance_report": asdict(performance),
|
||||
}
|
||||
|
||||
|
||||
_analytics_service_instance: Optional[AnalyticsService] = None
|
||||
|
||||
|
||||
def get_analytics_service() -> AnalyticsService:
|
||||
"""Get or create singleton analytics service instance.
|
||||
|
||||
Returns:
|
||||
AnalyticsService instance
|
||||
"""
|
||||
global _analytics_service_instance
|
||||
if _analytics_service_instance is None:
|
||||
_analytics_service_instance = AnalyticsService()
|
||||
return _analytics_service_instance
|
||||
@ -1,9 +1,8 @@
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from functools import lru_cache
|
||||
from typing import Callable, List, Optional
|
||||
from typing import List, Optional
|
||||
|
||||
import structlog
|
||||
|
||||
@ -22,116 +21,184 @@ class AnimeServiceError(Exception):
|
||||
|
||||
|
||||
class AnimeService:
|
||||
"""Wraps the blocking SeriesApp for use in the FastAPI web layer.
|
||||
"""Wraps SeriesApp for use in the FastAPI web layer.
|
||||
|
||||
- Runs blocking operations in a threadpool
|
||||
- SeriesApp methods are now async, no need for threadpool
|
||||
- Subscribes to SeriesApp events for progress tracking
|
||||
- Exposes async methods
|
||||
- Adds simple in-memory caching for read operations
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
directory: str,
|
||||
max_workers: int = 4,
|
||||
series_app: SeriesApp,
|
||||
progress_service: Optional[ProgressService] = None,
|
||||
):
|
||||
self._directory = directory
|
||||
self._executor = ThreadPoolExecutor(max_workers=max_workers)
|
||||
self._app = series_app
|
||||
self._directory = series_app.directory_to_search
|
||||
self._progress_service = progress_service or get_progress_service()
|
||||
# SeriesApp is blocking; instantiate per-service
|
||||
# Subscribe to SeriesApp events
|
||||
# Note: Events library uses assignment (=), not += operator
|
||||
try:
|
||||
self._app = SeriesApp(directory)
|
||||
self._app.download_status = self._on_download_status
|
||||
self._app.scan_status = self._on_scan_status
|
||||
logger.debug("Successfully subscribed to SeriesApp events")
|
||||
except Exception as e:
|
||||
logger.exception("Failed to initialize SeriesApp")
|
||||
logger.exception("Failed to subscribe to SeriesApp events")
|
||||
raise AnimeServiceError("Initialization failed") from e
|
||||
|
||||
async def _run_in_executor(self, func, *args, **kwargs):
|
||||
loop = asyncio.get_event_loop()
|
||||
def _on_download_status(self, args) -> None:
|
||||
"""Handle download status events from SeriesApp.
|
||||
|
||||
Args:
|
||||
args: DownloadStatusEventArgs from SeriesApp
|
||||
"""
|
||||
try:
|
||||
return await loop.run_in_executor(self._executor, lambda: func(*args, **kwargs))
|
||||
except Exception as e:
|
||||
logger.exception("Executor task failed")
|
||||
raise AnimeServiceError(str(e)) from e
|
||||
# Map SeriesApp download events to progress service
|
||||
if args.status == "started":
|
||||
asyncio.create_task(
|
||||
self._progress_service.start_progress(
|
||||
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
|
||||
progress_type=ProgressType.DOWNLOAD,
|
||||
title=f"Downloading {args.serie_folder}",
|
||||
message=f"S{args.season:02d}E{args.episode:02d}",
|
||||
)
|
||||
)
|
||||
elif args.status == "progress":
|
||||
asyncio.create_task(
|
||||
self._progress_service.update_progress(
|
||||
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
|
||||
current=int(args.progress),
|
||||
total=100,
|
||||
message=args.message or "Downloading...",
|
||||
)
|
||||
)
|
||||
elif args.status == "completed":
|
||||
asyncio.create_task(
|
||||
self._progress_service.complete_progress(
|
||||
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
|
||||
message="Download completed",
|
||||
)
|
||||
)
|
||||
elif args.status == "failed":
|
||||
asyncio.create_task(
|
||||
self._progress_service.fail_progress(
|
||||
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
|
||||
error_message=args.message or str(args.error),
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error(
|
||||
"Error handling download status event",
|
||||
error=str(exc)
|
||||
)
|
||||
|
||||
def _on_scan_status(self, args) -> None:
|
||||
"""Handle scan status events from SeriesApp.
|
||||
|
||||
Args:
|
||||
args: ScanStatusEventArgs from SeriesApp
|
||||
"""
|
||||
try:
|
||||
scan_id = "library_scan"
|
||||
|
||||
# Map SeriesApp scan events to progress service
|
||||
if args.status == "started":
|
||||
asyncio.create_task(
|
||||
self._progress_service.start_progress(
|
||||
progress_id=scan_id,
|
||||
progress_type=ProgressType.SCAN,
|
||||
title="Scanning anime library",
|
||||
message=args.message or "Initializing scan...",
|
||||
)
|
||||
)
|
||||
elif args.status == "progress":
|
||||
asyncio.create_task(
|
||||
self._progress_service.update_progress(
|
||||
progress_id=scan_id,
|
||||
current=args.current,
|
||||
total=args.total,
|
||||
message=args.message or f"Scanning: {args.folder}",
|
||||
)
|
||||
)
|
||||
elif args.status == "completed":
|
||||
asyncio.create_task(
|
||||
self._progress_service.complete_progress(
|
||||
progress_id=scan_id,
|
||||
message=args.message or "Scan completed",
|
||||
)
|
||||
)
|
||||
elif args.status == "failed":
|
||||
asyncio.create_task(
|
||||
self._progress_service.fail_progress(
|
||||
progress_id=scan_id,
|
||||
error_message=args.message or str(args.error),
|
||||
)
|
||||
)
|
||||
elif args.status == "cancelled":
|
||||
asyncio.create_task(
|
||||
self._progress_service.fail_progress(
|
||||
progress_id=scan_id,
|
||||
error_message=args.message or "Scan cancelled",
|
||||
)
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.error("Error handling scan status event", error=str(exc))
|
||||
|
||||
@lru_cache(maxsize=128)
|
||||
def _cached_list_missing(self) -> List[dict]:
|
||||
# Synchronous cached call used by async wrapper
|
||||
# Synchronous cached call - SeriesApp.series_list is populated
|
||||
# during initialization
|
||||
try:
|
||||
series = self._app.series_list
|
||||
# normalize to simple dicts
|
||||
return [s.to_dict() if hasattr(s, "to_dict") else s for s in series]
|
||||
except Exception as e:
|
||||
return [
|
||||
s.to_dict() if hasattr(s, "to_dict") else s
|
||||
for s in series
|
||||
]
|
||||
except Exception:
|
||||
logger.exception("Failed to get missing episodes list")
|
||||
raise
|
||||
|
||||
async def list_missing(self) -> List[dict]:
|
||||
"""Return list of series with missing episodes."""
|
||||
try:
|
||||
return await self._run_in_executor(self._cached_list_missing)
|
||||
# series_list is already populated, just access it
|
||||
return self._cached_list_missing()
|
||||
except AnimeServiceError:
|
||||
raise
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
logger.exception("list_missing failed")
|
||||
raise AnimeServiceError("Failed to list missing series") from e
|
||||
raise AnimeServiceError("Failed to list missing series") from exc
|
||||
|
||||
async def search(self, query: str) -> List[dict]:
|
||||
"""Search for series using underlying loader.Search."""
|
||||
"""Search for series using underlying loader.
|
||||
|
||||
Args:
|
||||
query: Search query string
|
||||
|
||||
Returns:
|
||||
List of search results as dictionaries
|
||||
"""
|
||||
if not query:
|
||||
return []
|
||||
try:
|
||||
result = await self._run_in_executor(self._app.search, query)
|
||||
# result may already be list of dicts or objects
|
||||
# SeriesApp.search is now async
|
||||
result = await self._app.search(query)
|
||||
return result
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
logger.exception("search failed")
|
||||
raise AnimeServiceError("Search failed") from e
|
||||
raise AnimeServiceError("Search failed") from exc
|
||||
|
||||
async def rescan(self, callback: Optional[Callable] = None) -> None:
|
||||
"""Trigger a re-scan. Accepts an optional callback function.
|
||||
async def rescan(self) -> None:
|
||||
"""Trigger a re-scan.
|
||||
|
||||
The callback is executed in the threadpool by SeriesApp.
|
||||
Progress updates are tracked and broadcasted via ProgressService.
|
||||
The SeriesApp now handles progress tracking via events which are
|
||||
forwarded to the ProgressService through event handlers.
|
||||
"""
|
||||
scan_id = "library_scan"
|
||||
|
||||
try:
|
||||
# Start progress tracking
|
||||
await self._progress_service.start_progress(
|
||||
progress_id=scan_id,
|
||||
progress_type=ProgressType.SCAN,
|
||||
title="Scanning anime library",
|
||||
message="Initializing scan...",
|
||||
)
|
||||
|
||||
# Create wrapped callback for progress updates
|
||||
def progress_callback(progress_data: dict) -> None:
|
||||
"""Update progress during scan."""
|
||||
try:
|
||||
if callback:
|
||||
callback(progress_data)
|
||||
|
||||
# Update progress service
|
||||
current = progress_data.get("current", 0)
|
||||
total = progress_data.get("total", 0)
|
||||
message = progress_data.get("message", "Scanning...")
|
||||
|
||||
# Schedule the coroutine without waiting for it
|
||||
# This is safe because we don't need the result
|
||||
loop = asyncio.get_event_loop()
|
||||
if loop.is_running():
|
||||
asyncio.ensure_future(
|
||||
self._progress_service.update_progress(
|
||||
progress_id=scan_id,
|
||||
current=current,
|
||||
total=total,
|
||||
message=message,
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Scan progress callback error", error=str(e))
|
||||
|
||||
# Run scan
|
||||
await self._run_in_executor(self._app.ReScan, progress_callback)
|
||||
# SeriesApp.re_scan is now async and handles events internally
|
||||
await self._app.re_scan()
|
||||
|
||||
# invalidate cache
|
||||
try:
|
||||
@ -139,36 +206,37 @@ class AnimeService:
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Complete progress tracking
|
||||
await self._progress_service.complete_progress(
|
||||
progress_id=scan_id,
|
||||
message="Scan completed successfully",
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
except Exception as exc:
|
||||
logger.exception("rescan failed")
|
||||
raise AnimeServiceError("Rescan failed") from exc
|
||||
|
||||
# Fail progress tracking
|
||||
await self._progress_service.fail_progress(
|
||||
progress_id=scan_id,
|
||||
error_message=str(e),
|
||||
)
|
||||
async def download(
|
||||
self,
|
||||
serie_folder: str,
|
||||
season: int,
|
||||
episode: int,
|
||||
key: str,
|
||||
) -> bool:
|
||||
"""Start a download.
|
||||
|
||||
raise AnimeServiceError("Rescan failed") from e
|
||||
|
||||
async def download(self, serie_folder: str, season: int, episode: int, key: str, callback=None) -> bool:
|
||||
"""Start a download via the underlying loader.
|
||||
The SeriesApp now handles progress tracking via events which are
|
||||
forwarded to the ProgressService through event handlers.
|
||||
|
||||
Returns True on success or raises AnimeServiceError on failure.
|
||||
"""
|
||||
try:
|
||||
result = await self._run_in_executor(self._app.download, serie_folder, season, episode, key, callback)
|
||||
return bool(result)
|
||||
except Exception as e:
|
||||
# SeriesApp.download is now async and handles events internally
|
||||
return await self._app.download(
|
||||
serie_folder=serie_folder,
|
||||
season=season,
|
||||
episode=episode,
|
||||
key=key,
|
||||
)
|
||||
except Exception as exc:
|
||||
logger.exception("download failed")
|
||||
raise AnimeServiceError("Download failed") from e
|
||||
raise AnimeServiceError("Download failed") from exc
|
||||
|
||||
|
||||
def get_anime_service(directory: str = "./") -> AnimeService:
|
||||
"""Factory used by FastAPI dependency injection."""
|
||||
return AnimeService(directory)
|
||||
def get_anime_service(series_app: SeriesApp) -> AnimeService:
|
||||
"""Factory used for creating AnimeService with a SeriesApp instance."""
|
||||
return AnimeService(series_app)
|
||||
|
||||
@ -1,610 +0,0 @@
|
||||
"""
|
||||
Audit Service for AniWorld.
|
||||
|
||||
This module provides comprehensive audit logging for security-critical
|
||||
operations including authentication, configuration changes, and downloads.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
from datetime import datetime, timedelta
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class AuditEventType(str, Enum):
|
||||
"""Types of audit events."""
|
||||
|
||||
# Authentication events
|
||||
AUTH_SETUP = "auth.setup"
|
||||
AUTH_LOGIN_SUCCESS = "auth.login.success"
|
||||
AUTH_LOGIN_FAILURE = "auth.login.failure"
|
||||
AUTH_LOGOUT = "auth.logout"
|
||||
AUTH_TOKEN_REFRESH = "auth.token.refresh"
|
||||
AUTH_TOKEN_INVALID = "auth.token.invalid"
|
||||
|
||||
# Configuration events
|
||||
CONFIG_READ = "config.read"
|
||||
CONFIG_UPDATE = "config.update"
|
||||
CONFIG_BACKUP = "config.backup"
|
||||
CONFIG_RESTORE = "config.restore"
|
||||
CONFIG_DELETE = "config.delete"
|
||||
|
||||
# Download events
|
||||
DOWNLOAD_ADDED = "download.added"
|
||||
DOWNLOAD_STARTED = "download.started"
|
||||
DOWNLOAD_COMPLETED = "download.completed"
|
||||
DOWNLOAD_FAILED = "download.failed"
|
||||
DOWNLOAD_CANCELLED = "download.cancelled"
|
||||
DOWNLOAD_REMOVED = "download.removed"
|
||||
|
||||
# Queue events
|
||||
QUEUE_STARTED = "queue.started"
|
||||
QUEUE_STOPPED = "queue.stopped"
|
||||
QUEUE_PAUSED = "queue.paused"
|
||||
QUEUE_RESUMED = "queue.resumed"
|
||||
QUEUE_CLEARED = "queue.cleared"
|
||||
|
||||
# System events
|
||||
SYSTEM_STARTUP = "system.startup"
|
||||
SYSTEM_SHUTDOWN = "system.shutdown"
|
||||
SYSTEM_ERROR = "system.error"
|
||||
|
||||
|
||||
class AuditEventSeverity(str, Enum):
|
||||
"""Severity levels for audit events."""
|
||||
|
||||
DEBUG = "debug"
|
||||
INFO = "info"
|
||||
WARNING = "warning"
|
||||
ERROR = "error"
|
||||
CRITICAL = "critical"
|
||||
|
||||
|
||||
class AuditEvent(BaseModel):
|
||||
"""Audit event model."""
|
||||
|
||||
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
||||
event_type: AuditEventType
|
||||
severity: AuditEventSeverity = AuditEventSeverity.INFO
|
||||
user_id: Optional[str] = None
|
||||
ip_address: Optional[str] = None
|
||||
user_agent: Optional[str] = None
|
||||
resource: Optional[str] = None
|
||||
action: Optional[str] = None
|
||||
status: str = "success"
|
||||
message: str
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
session_id: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
"""Pydantic config."""
|
||||
|
||||
json_encoders = {datetime: lambda v: v.isoformat()}
|
||||
|
||||
|
||||
class AuditLogStorage:
|
||||
"""Base class for audit log storage backends."""
|
||||
|
||||
async def write_event(self, event: AuditEvent) -> None:
|
||||
"""
|
||||
Write an audit event to storage.
|
||||
|
||||
Args:
|
||||
event: Audit event to write
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def read_events(
|
||||
self,
|
||||
start_time: Optional[datetime] = None,
|
||||
end_time: Optional[datetime] = None,
|
||||
event_types: Optional[List[AuditEventType]] = None,
|
||||
user_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
) -> List[AuditEvent]:
|
||||
"""
|
||||
Read audit events from storage.
|
||||
|
||||
Args:
|
||||
start_time: Start of time range
|
||||
end_time: End of time range
|
||||
event_types: Filter by event types
|
||||
user_id: Filter by user ID
|
||||
limit: Maximum number of events to return
|
||||
|
||||
Returns:
|
||||
List of audit events
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
async def cleanup_old_events(self, days: int = 90) -> int:
|
||||
"""
|
||||
Clean up audit events older than specified days.
|
||||
|
||||
Args:
|
||||
days: Number of days to retain
|
||||
|
||||
Returns:
|
||||
Number of events deleted
|
||||
"""
|
||||
raise NotImplementedError
|
||||
|
||||
|
||||
class FileAuditLogStorage(AuditLogStorage):
|
||||
"""File-based audit log storage."""
|
||||
|
||||
def __init__(self, log_directory: str = "logs/audit"):
|
||||
"""
|
||||
Initialize file-based audit log storage.
|
||||
|
||||
Args:
|
||||
log_directory: Directory to store audit logs
|
||||
"""
|
||||
self.log_directory = Path(log_directory)
|
||||
self.log_directory.mkdir(parents=True, exist_ok=True)
|
||||
self._current_date: Optional[str] = None
|
||||
self._current_file: Optional[Path] = None
|
||||
|
||||
def _get_log_file(self, date: datetime) -> Path:
|
||||
"""
|
||||
Get log file path for a specific date.
|
||||
|
||||
Args:
|
||||
date: Date for log file
|
||||
|
||||
Returns:
|
||||
Path to log file
|
||||
"""
|
||||
date_str = date.strftime("%Y-%m-%d")
|
||||
return self.log_directory / f"audit_{date_str}.jsonl"
|
||||
|
||||
async def write_event(self, event: AuditEvent) -> None:
|
||||
"""
|
||||
Write an audit event to file.
|
||||
|
||||
Args:
|
||||
event: Audit event to write
|
||||
"""
|
||||
log_file = self._get_log_file(event.timestamp)
|
||||
|
||||
try:
|
||||
with open(log_file, "a", encoding="utf-8") as f:
|
||||
f.write(event.model_dump_json() + "\n")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to write audit event to file: {e}")
|
||||
|
||||
async def read_events(
|
||||
self,
|
||||
start_time: Optional[datetime] = None,
|
||||
end_time: Optional[datetime] = None,
|
||||
event_types: Optional[List[AuditEventType]] = None,
|
||||
user_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
) -> List[AuditEvent]:
|
||||
"""
|
||||
Read audit events from files.
|
||||
|
||||
Args:
|
||||
start_time: Start of time range
|
||||
end_time: End of time range
|
||||
event_types: Filter by event types
|
||||
user_id: Filter by user ID
|
||||
limit: Maximum number of events to return
|
||||
|
||||
Returns:
|
||||
List of audit events
|
||||
"""
|
||||
if start_time is None:
|
||||
start_time = datetime.utcnow() - timedelta(days=7)
|
||||
if end_time is None:
|
||||
end_time = datetime.utcnow()
|
||||
|
||||
events: List[AuditEvent] = []
|
||||
current_date = start_time.date()
|
||||
end_date = end_time.date()
|
||||
|
||||
# Read from all log files in date range
|
||||
while current_date <= end_date and len(events) < limit:
|
||||
log_file = self._get_log_file(datetime.combine(current_date, datetime.min.time()))
|
||||
|
||||
if log_file.exists():
|
||||
try:
|
||||
with open(log_file, "r", encoding="utf-8") as f:
|
||||
for line in f:
|
||||
if len(events) >= limit:
|
||||
break
|
||||
|
||||
try:
|
||||
event_data = json.loads(line.strip())
|
||||
event = AuditEvent(**event_data)
|
||||
|
||||
# Apply filters
|
||||
if event.timestamp < start_time or event.timestamp > end_time:
|
||||
continue
|
||||
|
||||
if event_types and event.event_type not in event_types:
|
||||
continue
|
||||
|
||||
if user_id and event.user_id != user_id:
|
||||
continue
|
||||
|
||||
events.append(event)
|
||||
|
||||
except (json.JSONDecodeError, ValueError) as e:
|
||||
logger.warning(f"Failed to parse audit event: {e}")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to read audit log file {log_file}: {e}")
|
||||
|
||||
current_date += timedelta(days=1)
|
||||
|
||||
# Sort by timestamp descending
|
||||
events.sort(key=lambda e: e.timestamp, reverse=True)
|
||||
return events[:limit]
|
||||
|
||||
async def cleanup_old_events(self, days: int = 90) -> int:
|
||||
"""
|
||||
Clean up audit events older than specified days.
|
||||
|
||||
Args:
|
||||
days: Number of days to retain
|
||||
|
||||
Returns:
|
||||
Number of files deleted
|
||||
"""
|
||||
cutoff_date = datetime.utcnow() - timedelta(days=days)
|
||||
deleted_count = 0
|
||||
|
||||
for log_file in self.log_directory.glob("audit_*.jsonl"):
|
||||
try:
|
||||
# Extract date from filename
|
||||
date_str = log_file.stem.replace("audit_", "")
|
||||
file_date = datetime.strptime(date_str, "%Y-%m-%d")
|
||||
|
||||
if file_date < cutoff_date:
|
||||
log_file.unlink()
|
||||
deleted_count += 1
|
||||
logger.info(f"Deleted old audit log: {log_file}")
|
||||
|
||||
except (ValueError, OSError) as e:
|
||||
logger.warning(f"Failed to process audit log file {log_file}: {e}")
|
||||
|
||||
return deleted_count
|
||||
|
||||
|
||||
class AuditService:
|
||||
"""Main audit service for logging security events."""
|
||||
|
||||
def __init__(self, storage: Optional[AuditLogStorage] = None):
|
||||
"""
|
||||
Initialize audit service.
|
||||
|
||||
Args:
|
||||
storage: Storage backend for audit logs
|
||||
"""
|
||||
self.storage = storage or FileAuditLogStorage()
|
||||
|
||||
async def log_event(
|
||||
self,
|
||||
event_type: AuditEventType,
|
||||
message: str,
|
||||
severity: AuditEventSeverity = AuditEventSeverity.INFO,
|
||||
user_id: Optional[str] = None,
|
||||
ip_address: Optional[str] = None,
|
||||
user_agent: Optional[str] = None,
|
||||
resource: Optional[str] = None,
|
||||
action: Optional[str] = None,
|
||||
status: str = "success",
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
session_id: Optional[str] = None,
|
||||
) -> None:
|
||||
"""
|
||||
Log an audit event.
|
||||
|
||||
Args:
|
||||
event_type: Type of event
|
||||
message: Human-readable message
|
||||
severity: Event severity
|
||||
user_id: User identifier
|
||||
ip_address: Client IP address
|
||||
user_agent: Client user agent
|
||||
resource: Resource being accessed
|
||||
action: Action performed
|
||||
status: Operation status
|
||||
details: Additional details
|
||||
session_id: Session identifier
|
||||
"""
|
||||
event = AuditEvent(
|
||||
event_type=event_type,
|
||||
severity=severity,
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
resource=resource,
|
||||
action=action,
|
||||
status=status,
|
||||
message=message,
|
||||
details=details,
|
||||
session_id=session_id,
|
||||
)
|
||||
|
||||
await self.storage.write_event(event)
|
||||
|
||||
# Also log to application logger for high severity events
|
||||
if severity in [AuditEventSeverity.ERROR, AuditEventSeverity.CRITICAL]:
|
||||
logger.error(f"Audit: {message}", extra={"audit_event": event.model_dump()})
|
||||
elif severity == AuditEventSeverity.WARNING:
|
||||
logger.warning(f"Audit: {message}", extra={"audit_event": event.model_dump()})
|
||||
|
||||
async def log_auth_setup(
|
||||
self, user_id: str, ip_address: Optional[str] = None
|
||||
) -> None:
|
||||
"""Log initial authentication setup."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.AUTH_SETUP,
|
||||
message=f"Authentication configured by user {user_id}",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
action="setup",
|
||||
)
|
||||
|
||||
async def log_login_success(
|
||||
self,
|
||||
user_id: str,
|
||||
ip_address: Optional[str] = None,
|
||||
user_agent: Optional[str] = None,
|
||||
session_id: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Log successful login."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.AUTH_LOGIN_SUCCESS,
|
||||
message=f"User {user_id} logged in successfully",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
session_id=session_id,
|
||||
action="login",
|
||||
)
|
||||
|
||||
async def log_login_failure(
|
||||
self,
|
||||
user_id: Optional[str] = None,
|
||||
ip_address: Optional[str] = None,
|
||||
user_agent: Optional[str] = None,
|
||||
reason: str = "Invalid credentials",
|
||||
) -> None:
|
||||
"""Log failed login attempt."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.AUTH_LOGIN_FAILURE,
|
||||
message=f"Login failed for user {user_id or 'unknown'}: {reason}",
|
||||
severity=AuditEventSeverity.WARNING,
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
user_agent=user_agent,
|
||||
status="failure",
|
||||
action="login",
|
||||
details={"reason": reason},
|
||||
)
|
||||
|
||||
async def log_logout(
|
||||
self,
|
||||
user_id: str,
|
||||
ip_address: Optional[str] = None,
|
||||
session_id: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Log user logout."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.AUTH_LOGOUT,
|
||||
message=f"User {user_id} logged out",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
session_id=session_id,
|
||||
action="logout",
|
||||
)
|
||||
|
||||
async def log_config_update(
|
||||
self,
|
||||
user_id: str,
|
||||
changes: Dict[str, Any],
|
||||
ip_address: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Log configuration update."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.CONFIG_UPDATE,
|
||||
message=f"Configuration updated by user {user_id}",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
resource="config",
|
||||
action="update",
|
||||
details={"changes": changes},
|
||||
)
|
||||
|
||||
async def log_config_backup(
|
||||
self, user_id: str, backup_file: str, ip_address: Optional[str] = None
|
||||
) -> None:
|
||||
"""Log configuration backup."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.CONFIG_BACKUP,
|
||||
message=f"Configuration backed up by user {user_id}",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
resource="config",
|
||||
action="backup",
|
||||
details={"backup_file": backup_file},
|
||||
)
|
||||
|
||||
async def log_config_restore(
|
||||
self, user_id: str, backup_file: str, ip_address: Optional[str] = None
|
||||
) -> None:
|
||||
"""Log configuration restore."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.CONFIG_RESTORE,
|
||||
message=f"Configuration restored by user {user_id}",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
resource="config",
|
||||
action="restore",
|
||||
details={"backup_file": backup_file},
|
||||
)
|
||||
|
||||
async def log_download_added(
|
||||
self,
|
||||
user_id: str,
|
||||
series_name: str,
|
||||
episodes: List[str],
|
||||
ip_address: Optional[str] = None,
|
||||
) -> None:
|
||||
"""Log download added to queue."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.DOWNLOAD_ADDED,
|
||||
message=f"Download added by user {user_id}: {series_name}",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
resource=series_name,
|
||||
action="add",
|
||||
details={"episodes": episodes},
|
||||
)
|
||||
|
||||
async def log_download_completed(
|
||||
self, series_name: str, episode: str, file_path: str
|
||||
) -> None:
|
||||
"""Log completed download."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.DOWNLOAD_COMPLETED,
|
||||
message=f"Download completed: {series_name} - {episode}",
|
||||
resource=series_name,
|
||||
action="download",
|
||||
details={"episode": episode, "file_path": file_path},
|
||||
)
|
||||
|
||||
async def log_download_failed(
|
||||
self, series_name: str, episode: str, error: str
|
||||
) -> None:
|
||||
"""Log failed download."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.DOWNLOAD_FAILED,
|
||||
message=f"Download failed: {series_name} - {episode}",
|
||||
severity=AuditEventSeverity.ERROR,
|
||||
resource=series_name,
|
||||
action="download",
|
||||
status="failure",
|
||||
details={"episode": episode, "error": error},
|
||||
)
|
||||
|
||||
async def log_queue_operation(
|
||||
self,
|
||||
user_id: str,
|
||||
operation: str,
|
||||
ip_address: Optional[str] = None,
|
||||
details: Optional[Dict[str, Any]] = None,
|
||||
) -> None:
|
||||
"""Log queue operation."""
|
||||
event_type_map = {
|
||||
"start": AuditEventType.QUEUE_STARTED,
|
||||
"stop": AuditEventType.QUEUE_STOPPED,
|
||||
"pause": AuditEventType.QUEUE_PAUSED,
|
||||
"resume": AuditEventType.QUEUE_RESUMED,
|
||||
"clear": AuditEventType.QUEUE_CLEARED,
|
||||
}
|
||||
|
||||
event_type = event_type_map.get(operation, AuditEventType.SYSTEM_ERROR)
|
||||
await self.log_event(
|
||||
event_type=event_type,
|
||||
message=f"Queue {operation} by user {user_id}",
|
||||
user_id=user_id,
|
||||
ip_address=ip_address,
|
||||
resource="queue",
|
||||
action=operation,
|
||||
details=details,
|
||||
)
|
||||
|
||||
async def log_system_error(
|
||||
self, error: str, details: Optional[Dict[str, Any]] = None
|
||||
) -> None:
|
||||
"""Log system error."""
|
||||
await self.log_event(
|
||||
event_type=AuditEventType.SYSTEM_ERROR,
|
||||
message=f"System error: {error}",
|
||||
severity=AuditEventSeverity.ERROR,
|
||||
status="error",
|
||||
details=details,
|
||||
)
|
||||
|
||||
async def get_events(
|
||||
self,
|
||||
start_time: Optional[datetime] = None,
|
||||
end_time: Optional[datetime] = None,
|
||||
event_types: Optional[List[AuditEventType]] = None,
|
||||
user_id: Optional[str] = None,
|
||||
limit: int = 100,
|
||||
) -> List[AuditEvent]:
|
||||
"""
|
||||
Get audit events with filters.
|
||||
|
||||
Args:
|
||||
start_time: Start of time range
|
||||
end_time: End of time range
|
||||
event_types: Filter by event types
|
||||
user_id: Filter by user ID
|
||||
limit: Maximum number of events to return
|
||||
|
||||
Returns:
|
||||
List of audit events
|
||||
"""
|
||||
return await self.storage.read_events(
|
||||
start_time=start_time,
|
||||
end_time=end_time,
|
||||
event_types=event_types,
|
||||
user_id=user_id,
|
||||
limit=limit,
|
||||
)
|
||||
|
||||
async def cleanup_old_events(self, days: int = 90) -> int:
|
||||
"""
|
||||
Clean up old audit events.
|
||||
|
||||
Args:
|
||||
days: Number of days to retain
|
||||
|
||||
Returns:
|
||||
Number of events deleted
|
||||
"""
|
||||
return await self.storage.cleanup_old_events(days)
|
||||
|
||||
|
||||
# Global audit service instance
|
||||
_audit_service: Optional[AuditService] = None
|
||||
|
||||
|
||||
def get_audit_service() -> AuditService:
|
||||
"""
|
||||
Get the global audit service instance.
|
||||
|
||||
Returns:
|
||||
AuditService instance
|
||||
"""
|
||||
global _audit_service
|
||||
if _audit_service is None:
|
||||
_audit_service = AuditService()
|
||||
return _audit_service
|
||||
|
||||
|
||||
def configure_audit_service(storage: Optional[AuditLogStorage] = None) -> AuditService:
|
||||
"""
|
||||
Configure the global audit service.
|
||||
|
||||
Args:
|
||||
storage: Custom storage backend
|
||||
|
||||
Returns:
|
||||
Configured AuditService instance
|
||||
"""
|
||||
global _audit_service
|
||||
_audit_service = AuditService(storage=storage)
|
||||
return _audit_service
|
||||
@ -1,432 +0,0 @@
|
||||
"""Backup and restore service for configuration and data management."""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
import shutil
|
||||
import tarfile
|
||||
from dataclasses import dataclass
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupInfo:
|
||||
"""Information about a backup."""
|
||||
|
||||
name: str
|
||||
timestamp: datetime
|
||||
size_bytes: int
|
||||
backup_type: str # 'config', 'data', 'full'
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class BackupService:
|
||||
"""Service for managing backups and restores."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
backup_dir: str = "data/backups",
|
||||
config_dir: str = "data",
|
||||
database_path: str = "data/aniworld.db",
|
||||
):
|
||||
"""Initialize backup service.
|
||||
|
||||
Args:
|
||||
backup_dir: Directory to store backups.
|
||||
config_dir: Directory containing configuration files.
|
||||
database_path: Path to the database file.
|
||||
"""
|
||||
self.backup_dir = Path(backup_dir)
|
||||
self.config_dir = Path(config_dir)
|
||||
self.database_path = Path(database_path)
|
||||
|
||||
# Create backup directory if it doesn't exist
|
||||
self.backup_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
def backup_configuration(
|
||||
self, description: str = ""
|
||||
) -> Optional[BackupInfo]:
|
||||
"""Create a configuration backup.
|
||||
|
||||
Args:
|
||||
description: Optional description for the backup.
|
||||
|
||||
Returns:
|
||||
BackupInfo: Information about the created backup.
|
||||
"""
|
||||
try:
|
||||
timestamp = datetime.now()
|
||||
backup_name = (
|
||||
f"config_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
||||
)
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
with tarfile.open(backup_path, "w:gz") as tar:
|
||||
# Add configuration files
|
||||
config_files = [
|
||||
self.config_dir / "config.json",
|
||||
]
|
||||
|
||||
for config_file in config_files:
|
||||
if config_file.exists():
|
||||
tar.add(config_file, arcname=config_file.name)
|
||||
|
||||
size_bytes = backup_path.stat().st_size
|
||||
|
||||
info = BackupInfo(
|
||||
name=backup_name,
|
||||
timestamp=timestamp,
|
||||
size_bytes=size_bytes,
|
||||
backup_type="config",
|
||||
description=description,
|
||||
)
|
||||
|
||||
logger.info(f"Configuration backup created: {backup_name}")
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create configuration backup: {e}")
|
||||
return None
|
||||
|
||||
def backup_database(
|
||||
self, description: str = ""
|
||||
) -> Optional[BackupInfo]:
|
||||
"""Create a database backup.
|
||||
|
||||
Args:
|
||||
description: Optional description for the backup.
|
||||
|
||||
Returns:
|
||||
BackupInfo: Information about the created backup.
|
||||
"""
|
||||
try:
|
||||
if not self.database_path.exists():
|
||||
logger.warning(
|
||||
f"Database file not found: {self.database_path}"
|
||||
)
|
||||
return None
|
||||
|
||||
timestamp = datetime.now()
|
||||
backup_name = (
|
||||
f"database_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
||||
)
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
with tarfile.open(backup_path, "w:gz") as tar:
|
||||
tar.add(self.database_path, arcname=self.database_path.name)
|
||||
|
||||
size_bytes = backup_path.stat().st_size
|
||||
|
||||
info = BackupInfo(
|
||||
name=backup_name,
|
||||
timestamp=timestamp,
|
||||
size_bytes=size_bytes,
|
||||
backup_type="data",
|
||||
description=description,
|
||||
)
|
||||
|
||||
logger.info(f"Database backup created: {backup_name}")
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create database backup: {e}")
|
||||
return None
|
||||
|
||||
def backup_full(
|
||||
self, description: str = ""
|
||||
) -> Optional[BackupInfo]:
|
||||
"""Create a full system backup.
|
||||
|
||||
Args:
|
||||
description: Optional description for the backup.
|
||||
|
||||
Returns:
|
||||
BackupInfo: Information about the created backup.
|
||||
"""
|
||||
try:
|
||||
timestamp = datetime.now()
|
||||
backup_name = f"full_{timestamp.strftime('%Y%m%d_%H%M%S')}.tar.gz"
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
with tarfile.open(backup_path, "w:gz") as tar:
|
||||
# Add configuration
|
||||
config_file = self.config_dir / "config.json"
|
||||
if config_file.exists():
|
||||
tar.add(config_file, arcname=config_file.name)
|
||||
|
||||
# Add database
|
||||
if self.database_path.exists():
|
||||
tar.add(
|
||||
self.database_path,
|
||||
arcname=self.database_path.name,
|
||||
)
|
||||
|
||||
# Add download queue
|
||||
queue_file = self.config_dir / "download_queue.json"
|
||||
if queue_file.exists():
|
||||
tar.add(queue_file, arcname=queue_file.name)
|
||||
|
||||
size_bytes = backup_path.stat().st_size
|
||||
|
||||
info = BackupInfo(
|
||||
name=backup_name,
|
||||
timestamp=timestamp,
|
||||
size_bytes=size_bytes,
|
||||
backup_type="full",
|
||||
description=description,
|
||||
)
|
||||
|
||||
logger.info(f"Full backup created: {backup_name}")
|
||||
return info
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create full backup: {e}")
|
||||
return None
|
||||
|
||||
def restore_configuration(self, backup_name: str) -> bool:
|
||||
"""Restore configuration from backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to restore.
|
||||
|
||||
Returns:
|
||||
bool: True if restore was successful.
|
||||
"""
|
||||
try:
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
if not backup_path.exists():
|
||||
logger.error(f"Backup file not found: {backup_name}")
|
||||
return False
|
||||
|
||||
# Extract to temporary directory
|
||||
temp_dir = self.backup_dir / "temp_restore"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
|
||||
with tarfile.open(backup_path, "r:gz") as tar:
|
||||
tar.extractall(temp_dir)
|
||||
|
||||
# Copy configuration file back
|
||||
config_file = temp_dir / "config.json"
|
||||
if config_file.exists():
|
||||
shutil.copy(config_file, self.config_dir / "config.json")
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
logger.info(f"Configuration restored from: {backup_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore configuration: {e}")
|
||||
return False
|
||||
|
||||
def restore_database(self, backup_name: str) -> bool:
|
||||
"""Restore database from backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to restore.
|
||||
|
||||
Returns:
|
||||
bool: True if restore was successful.
|
||||
"""
|
||||
try:
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
if not backup_path.exists():
|
||||
logger.error(f"Backup file not found: {backup_name}")
|
||||
return False
|
||||
|
||||
# Create backup of current database
|
||||
if self.database_path.exists():
|
||||
current_backup = (
|
||||
self.database_path.parent
|
||||
/ f"{self.database_path.name}.backup"
|
||||
)
|
||||
shutil.copy(self.database_path, current_backup)
|
||||
logger.info(f"Current database backed up to: {current_backup}")
|
||||
|
||||
# Extract to temporary directory
|
||||
temp_dir = self.backup_dir / "temp_restore"
|
||||
temp_dir.mkdir(exist_ok=True)
|
||||
|
||||
with tarfile.open(backup_path, "r:gz") as tar:
|
||||
tar.extractall(temp_dir)
|
||||
|
||||
# Copy database file back
|
||||
db_file = temp_dir / self.database_path.name
|
||||
if db_file.exists():
|
||||
shutil.copy(db_file, self.database_path)
|
||||
|
||||
# Cleanup
|
||||
shutil.rmtree(temp_dir)
|
||||
|
||||
logger.info(f"Database restored from: {backup_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore database: {e}")
|
||||
return False
|
||||
|
||||
def list_backups(
|
||||
self, backup_type: Optional[str] = None
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""List available backups.
|
||||
|
||||
Args:
|
||||
backup_type: Optional filter by backup type.
|
||||
|
||||
Returns:
|
||||
list: List of backup information.
|
||||
"""
|
||||
try:
|
||||
backups = []
|
||||
|
||||
for backup_file in sorted(self.backup_dir.glob("*.tar.gz")):
|
||||
# Extract type from filename
|
||||
filename = backup_file.name
|
||||
file_type = filename.split("_")[0]
|
||||
|
||||
if backup_type and file_type != backup_type:
|
||||
continue
|
||||
|
||||
# Extract timestamp
|
||||
timestamp_str = (
|
||||
filename.split("_", 1)[1].replace(".tar.gz", "")
|
||||
)
|
||||
|
||||
backups.append(
|
||||
{
|
||||
"name": filename,
|
||||
"type": file_type,
|
||||
"size_bytes": backup_file.stat().st_size,
|
||||
"created": timestamp_str,
|
||||
}
|
||||
)
|
||||
|
||||
return sorted(backups, key=lambda x: x["created"], reverse=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list backups: {e}")
|
||||
return []
|
||||
|
||||
def delete_backup(self, backup_name: str) -> bool:
|
||||
"""Delete a backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to delete.
|
||||
|
||||
Returns:
|
||||
bool: True if delete was successful.
|
||||
"""
|
||||
try:
|
||||
backup_path = self.backup_dir / backup_name
|
||||
|
||||
if not backup_path.exists():
|
||||
logger.warning(f"Backup not found: {backup_name}")
|
||||
return False
|
||||
|
||||
backup_path.unlink()
|
||||
logger.info(f"Backup deleted: {backup_name}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete backup: {e}")
|
||||
return False
|
||||
|
||||
def cleanup_old_backups(
|
||||
self, max_backups: int = 10, backup_type: Optional[str] = None
|
||||
) -> int:
|
||||
"""Remove old backups, keeping only the most recent ones.
|
||||
|
||||
Args:
|
||||
max_backups: Maximum number of backups to keep.
|
||||
backup_type: Optional filter by backup type.
|
||||
|
||||
Returns:
|
||||
int: Number of backups deleted.
|
||||
"""
|
||||
try:
|
||||
backups = self.list_backups(backup_type)
|
||||
|
||||
if len(backups) <= max_backups:
|
||||
return 0
|
||||
|
||||
deleted_count = 0
|
||||
for backup in backups[max_backups:]:
|
||||
if self.delete_backup(backup["name"]):
|
||||
deleted_count += 1
|
||||
|
||||
logger.info(f"Cleaned up {deleted_count} old backups")
|
||||
return deleted_count
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup old backups: {e}")
|
||||
return 0
|
||||
|
||||
def export_anime_data(
|
||||
self, output_file: str
|
||||
) -> bool:
|
||||
"""Export anime library data to JSON.
|
||||
|
||||
Args:
|
||||
output_file: Path to export file.
|
||||
|
||||
Returns:
|
||||
bool: True if export was successful.
|
||||
"""
|
||||
try:
|
||||
# This would integrate with the anime service
|
||||
# to export anime library data
|
||||
export_data = {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"anime_count": 0,
|
||||
"data": [],
|
||||
}
|
||||
|
||||
with open(output_file, "w") as f:
|
||||
json.dump(export_data, f, indent=2)
|
||||
|
||||
logger.info(f"Anime data exported to: {output_file}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export anime data: {e}")
|
||||
return False
|
||||
|
||||
def import_anime_data(self, input_file: str) -> bool:
|
||||
"""Import anime library data from JSON.
|
||||
|
||||
Args:
|
||||
input_file: Path to import file.
|
||||
|
||||
Returns:
|
||||
bool: True if import was successful.
|
||||
"""
|
||||
try:
|
||||
if not os.path.exists(input_file):
|
||||
logger.error(f"Import file not found: {input_file}")
|
||||
return False
|
||||
|
||||
with open(input_file, "r") as f:
|
||||
json.load(f) # Load and validate JSON
|
||||
|
||||
# This would integrate with the anime service
|
||||
# to import anime library data
|
||||
|
||||
logger.info(f"Anime data imported from: {input_file}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import anime data: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Global backup service instance
|
||||
_backup_service: Optional[BackupService] = None
|
||||
|
||||
|
||||
def get_backup_service() -> BackupService:
|
||||
"""Get or create the global backup service instance.
|
||||
|
||||
Returns:
|
||||
BackupService: The backup service instance.
|
||||
"""
|
||||
global _backup_service
|
||||
if _backup_service is None:
|
||||
_backup_service = BackupService()
|
||||
return _backup_service
|
||||
@ -1,8 +1,8 @@
|
||||
"""Download queue service for managing anime episode downloads.
|
||||
|
||||
This module provides a comprehensive queue management system for handling
|
||||
concurrent anime episode downloads with priority-based scheduling, progress
|
||||
tracking, persistence, and automatic retry functionality.
|
||||
This module provides a simplified queue management system for handling
|
||||
anime episode downloads with manual start/stop controls, progress tracking,
|
||||
persistence, and retry functionality.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
@ -13,25 +13,20 @@ from collections import deque
|
||||
from concurrent.futures import ThreadPoolExecutor
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
from typing import Callable, Dict, List, Optional
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
import structlog
|
||||
|
||||
from src.server.models.download import (
|
||||
DownloadItem,
|
||||
DownloadPriority,
|
||||
DownloadProgress,
|
||||
DownloadStatus,
|
||||
EpisodeIdentifier,
|
||||
QueueStats,
|
||||
QueueStatus,
|
||||
)
|
||||
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
||||
from src.server.services.progress_service import (
|
||||
ProgressService,
|
||||
ProgressType,
|
||||
get_progress_service,
|
||||
)
|
||||
from src.server.services.progress_service import ProgressService, get_progress_service
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
@ -41,11 +36,11 @@ class DownloadServiceError(Exception):
|
||||
|
||||
|
||||
class DownloadService:
|
||||
"""Manages the download queue with concurrent processing and persistence.
|
||||
"""Manages the download queue with manual start/stop controls.
|
||||
|
||||
Features:
|
||||
- Priority-based queue management
|
||||
- Concurrent download processing
|
||||
- Manual download start/stop
|
||||
- FIFO queue processing
|
||||
- Real-time progress tracking
|
||||
- Queue persistence and recovery
|
||||
- Automatic retry logic
|
||||
@ -55,7 +50,6 @@ class DownloadService:
|
||||
def __init__(
|
||||
self,
|
||||
anime_service: AnimeService,
|
||||
max_concurrent_downloads: int = 2,
|
||||
max_retries: int = 3,
|
||||
persistence_path: str = "./data/download_queue.json",
|
||||
progress_service: Optional[ProgressService] = None,
|
||||
@ -64,13 +58,11 @@ class DownloadService:
|
||||
|
||||
Args:
|
||||
anime_service: Service for anime operations
|
||||
max_concurrent_downloads: Maximum simultaneous downloads
|
||||
max_retries: Maximum retry attempts for failed downloads
|
||||
persistence_path: Path to persist queue state
|
||||
progress_service: Optional progress service for tracking
|
||||
"""
|
||||
self._anime_service = anime_service
|
||||
self._max_concurrent = max_concurrent_downloads
|
||||
self._max_retries = max_retries
|
||||
self._persistence_path = Path(persistence_path)
|
||||
self._progress_service = progress_service or get_progress_service()
|
||||
@ -79,22 +71,15 @@ class DownloadService:
|
||||
self._pending_queue: deque[DownloadItem] = deque()
|
||||
# Helper dict for O(1) lookup of pending items by ID
|
||||
self._pending_items_by_id: Dict[str, DownloadItem] = {}
|
||||
self._active_downloads: Dict[str, DownloadItem] = {}
|
||||
self._active_download: Optional[DownloadItem] = None
|
||||
self._completed_items: deque[DownloadItem] = deque(maxlen=100)
|
||||
self._failed_items: deque[DownloadItem] = deque(maxlen=50)
|
||||
|
||||
# Control flags
|
||||
self._is_running = False
|
||||
self._is_paused = False
|
||||
self._shutdown_event = asyncio.Event()
|
||||
self._is_stopped = True # Queue processing is stopped by default
|
||||
|
||||
# Executor for blocking operations
|
||||
self._executor = ThreadPoolExecutor(
|
||||
max_workers=max_concurrent_downloads
|
||||
)
|
||||
|
||||
# WebSocket broadcast callback
|
||||
self._broadcast_callback: Optional[Callable] = None
|
||||
self._executor = ThreadPoolExecutor(max_workers=1)
|
||||
|
||||
# Statistics tracking
|
||||
self._total_downloaded_mb: float = 0.0
|
||||
@ -103,12 +88,27 @@ class DownloadService:
|
||||
# Load persisted queue
|
||||
self._load_queue()
|
||||
|
||||
# Initialize queue progress tracking
|
||||
asyncio.create_task(self._init_queue_progress())
|
||||
|
||||
logger.info(
|
||||
"DownloadService initialized",
|
||||
max_concurrent=max_concurrent_downloads,
|
||||
max_retries=max_retries,
|
||||
)
|
||||
|
||||
async def _init_queue_progress(self) -> None:
|
||||
"""Initialize the download queue progress tracking."""
|
||||
try:
|
||||
from src.server.services.progress_service import ProgressType
|
||||
await self._progress_service.start_progress(
|
||||
progress_id="download_queue",
|
||||
progress_type=ProgressType.QUEUE,
|
||||
title="Download Queue",
|
||||
message="Queue ready",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize queue progress", error=str(e))
|
||||
|
||||
def _add_to_pending_queue(
|
||||
self, item: DownloadItem, front: bool = False
|
||||
) -> None:
|
||||
@ -149,28 +149,6 @@ class DownloadService:
|
||||
except (ValueError, KeyError):
|
||||
return None
|
||||
|
||||
def set_broadcast_callback(self, callback: Callable) -> None:
|
||||
"""Set callback for broadcasting status updates via WebSocket."""
|
||||
self._broadcast_callback = callback
|
||||
logger.debug("Broadcast callback registered")
|
||||
|
||||
async def _broadcast_update(self, update_type: str, data: dict) -> None:
|
||||
"""Broadcast update to connected WebSocket clients.
|
||||
|
||||
Args:
|
||||
update_type: Type of update (download_progress, queue_status, etc.)
|
||||
data: Update data to broadcast
|
||||
"""
|
||||
if self._broadcast_callback:
|
||||
try:
|
||||
await self._broadcast_callback(update_type, data)
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to broadcast update",
|
||||
update_type=update_type,
|
||||
error=str(e),
|
||||
)
|
||||
|
||||
def _generate_item_id(self) -> str:
|
||||
"""Generate unique identifier for download items."""
|
||||
return str(uuid.uuid4())
|
||||
@ -212,14 +190,17 @@ class DownloadService:
|
||||
try:
|
||||
self._persistence_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
active_items = (
|
||||
[self._active_download] if self._active_download else []
|
||||
)
|
||||
|
||||
data = {
|
||||
"pending": [
|
||||
item.model_dump(mode="json")
|
||||
for item in self._pending_queue
|
||||
],
|
||||
"active": [
|
||||
item.model_dump(mode="json")
|
||||
for item in self._active_downloads.values()
|
||||
item.model_dump(mode="json") for item in active_items
|
||||
],
|
||||
"failed": [
|
||||
item.model_dump(mode="json")
|
||||
@ -238,17 +219,19 @@ class DownloadService:
|
||||
async def add_to_queue(
|
||||
self,
|
||||
serie_id: str,
|
||||
serie_folder: str,
|
||||
serie_name: str,
|
||||
episodes: List[EpisodeIdentifier],
|
||||
priority: DownloadPriority = DownloadPriority.NORMAL,
|
||||
) -> List[str]:
|
||||
"""Add episodes to the download queue.
|
||||
"""Add episodes to the download queue (FIFO order).
|
||||
|
||||
Args:
|
||||
serie_id: Series identifier
|
||||
serie_id: Series identifier (provider key)
|
||||
serie_folder: Series folder name on disk
|
||||
serie_name: Series display name
|
||||
episodes: List of episodes to download
|
||||
priority: Queue priority level
|
||||
priority: Queue priority level (ignored, kept for compatibility)
|
||||
|
||||
Returns:
|
||||
List of created download item IDs
|
||||
@ -263,6 +246,7 @@ class DownloadService:
|
||||
item = DownloadItem(
|
||||
id=self._generate_item_id(),
|
||||
serie_id=serie_id,
|
||||
serie_folder=serie_folder,
|
||||
serie_name=serie_name,
|
||||
episode=episode,
|
||||
status=DownloadStatus.PENDING,
|
||||
@ -270,12 +254,8 @@ class DownloadService:
|
||||
added_at=datetime.now(timezone.utc),
|
||||
)
|
||||
|
||||
# Insert based on priority. High-priority downloads jump the
|
||||
# line via appendleft so they execute before existing work;
|
||||
# everything else is appended to preserve FIFO order.
|
||||
self._add_to_pending_queue(
|
||||
item, front=(priority == DownloadPriority.HIGH)
|
||||
)
|
||||
# Always append to end (FIFO order)
|
||||
self._add_to_pending_queue(item, front=False)
|
||||
|
||||
created_ids.append(item.id)
|
||||
|
||||
@ -285,20 +265,21 @@ class DownloadService:
|
||||
serie=serie_name,
|
||||
season=episode.season,
|
||||
episode=episode.episode,
|
||||
priority=priority.value,
|
||||
)
|
||||
|
||||
self._save_queue()
|
||||
|
||||
# Broadcast queue status update
|
||||
# Notify via progress service
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_status",
|
||||
{
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=f"Added {len(created_ids)} items to queue",
|
||||
metadata={
|
||||
"action": "items_added",
|
||||
"added_ids": created_ids,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
return created_ids
|
||||
@ -324,12 +305,13 @@ class DownloadService:
|
||||
try:
|
||||
for item_id in item_ids:
|
||||
# Check if item is currently downloading
|
||||
if item_id in self._active_downloads:
|
||||
item = self._active_downloads[item_id]
|
||||
active = self._active_download
|
||||
if active and active.id == item_id:
|
||||
item = active
|
||||
item.status = DownloadStatus.CANCELLED
|
||||
item.completed_at = datetime.now(timezone.utc)
|
||||
self._failed_items.append(item)
|
||||
del self._active_downloads[item_id]
|
||||
self._active_download = None
|
||||
removed_ids.append(item_id)
|
||||
logger.info("Cancelled active download", item_id=item_id)
|
||||
continue
|
||||
@ -346,15 +328,17 @@ class DownloadService:
|
||||
|
||||
if removed_ids:
|
||||
self._save_queue()
|
||||
# Broadcast queue status update
|
||||
# Notify via progress service
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_status",
|
||||
{
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=f"Removed {len(removed_ids)} items from queue",
|
||||
metadata={
|
||||
"action": "items_removed",
|
||||
"removed_ids": removed_ids,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
return removed_ids
|
||||
@ -365,118 +349,151 @@ class DownloadService:
|
||||
f"Failed to remove items: {str(e)}"
|
||||
) from e
|
||||
|
||||
async def reorder_queue(self, item_id: str, new_position: int) -> bool:
|
||||
"""Reorder an item in the pending queue.
|
||||
async def start_queue_processing(self) -> Optional[str]:
|
||||
"""Start automatic queue processing of all pending downloads.
|
||||
|
||||
Args:
|
||||
item_id: Download item ID to reorder
|
||||
new_position: New position in queue (0-based)
|
||||
This will process all pending downloads one by one until the queue
|
||||
is empty or stopped. The processing continues even if the browser
|
||||
is closed.
|
||||
|
||||
Returns:
|
||||
True if reordering was successful
|
||||
Item ID of first started download, or None if queue is empty
|
||||
|
||||
Raises:
|
||||
DownloadServiceError: If reordering fails
|
||||
DownloadServiceError: If queue processing is already active
|
||||
"""
|
||||
try:
|
||||
# Find and remove item - O(1) lookup using helper dict
|
||||
item_to_move = self._pending_items_by_id.get(item_id)
|
||||
|
||||
if not item_to_move:
|
||||
# Check if download already active
|
||||
if self._active_download:
|
||||
raise DownloadServiceError(
|
||||
f"Item {item_id} not found in pending queue"
|
||||
"Queue processing is already active"
|
||||
)
|
||||
|
||||
# Remove from current position
|
||||
self._pending_queue.remove(item_to_move)
|
||||
del self._pending_items_by_id[item_id]
|
||||
# Check if queue is empty
|
||||
if not self._pending_queue:
|
||||
logger.info("No pending downloads to start")
|
||||
return None
|
||||
|
||||
# Insert at new position
|
||||
queue_list = list(self._pending_queue)
|
||||
new_position = max(0, min(new_position, len(queue_list)))
|
||||
queue_list.insert(new_position, item_to_move)
|
||||
self._pending_queue = deque(queue_list)
|
||||
# Re-add to helper dict
|
||||
self._pending_items_by_id[item_id] = item_to_move
|
||||
# Mark queue as running
|
||||
self._is_stopped = False
|
||||
|
||||
self._save_queue()
|
||||
# Start queue processing in background
|
||||
asyncio.create_task(self._process_queue())
|
||||
|
||||
# Broadcast queue status update
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_status",
|
||||
{
|
||||
"action": "queue_reordered",
|
||||
"item_id": item_id,
|
||||
"new_position": new_position,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
)
|
||||
logger.info("Queue processing started")
|
||||
|
||||
logger.info(
|
||||
"Queue item reordered",
|
||||
item_id=item_id,
|
||||
new_position=new_position
|
||||
)
|
||||
return True
|
||||
return "queue_started"
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to reorder queue", error=str(e))
|
||||
logger.error("Failed to start queue processing", error=str(e))
|
||||
raise DownloadServiceError(
|
||||
f"Failed to reorder: {str(e)}"
|
||||
f"Failed to start queue processing: {str(e)}"
|
||||
) from e
|
||||
|
||||
async def reorder_queue_bulk(self, item_order: List[str]) -> bool:
|
||||
"""Reorder pending queue to match provided item order for the specified
|
||||
item IDs. Any pending items not mentioned will be appended after the
|
||||
ordered items preserving their relative order.
|
||||
async def _process_queue(self) -> None:
|
||||
"""Process all items in the queue sequentially.
|
||||
|
||||
Args:
|
||||
item_order: Desired ordering of item IDs for pending queue
|
||||
|
||||
Returns:
|
||||
True if operation completed
|
||||
This runs continuously until the queue is empty or stopped.
|
||||
Each download is processed one at a time, and the next one starts
|
||||
automatically after the previous one completes.
|
||||
"""
|
||||
logger.info("Queue processor started")
|
||||
|
||||
while not self._is_stopped and len(self._pending_queue) > 0:
|
||||
try:
|
||||
# Map existing pending items by id
|
||||
existing = {item.id: item for item in list(self._pending_queue)}
|
||||
# Get next item from queue
|
||||
item = self._pending_queue.popleft()
|
||||
del self._pending_items_by_id[item.id]
|
||||
|
||||
new_queue: List[DownloadItem] = []
|
||||
|
||||
# Add items in the requested order if present
|
||||
for item_id in item_order:
|
||||
item = existing.pop(item_id, None)
|
||||
if item:
|
||||
new_queue.append(item)
|
||||
|
||||
# Append any remaining items preserving original order
|
||||
for item in list(self._pending_queue):
|
||||
if item.id in existing:
|
||||
new_queue.append(item)
|
||||
existing.pop(item.id, None)
|
||||
|
||||
# Replace pending queue
|
||||
self._pending_queue = deque(new_queue)
|
||||
|
||||
self._save_queue()
|
||||
|
||||
# Broadcast queue status update
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_status",
|
||||
{
|
||||
"action": "queue_bulk_reordered",
|
||||
"item_order": item_order,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
logger.info(
|
||||
"Processing next item from queue",
|
||||
item_id=item.id,
|
||||
serie=item.serie_name,
|
||||
remaining=len(self._pending_queue)
|
||||
)
|
||||
|
||||
logger.info("Bulk queue reorder applied", ordered_count=len(item_order))
|
||||
return True
|
||||
# Notify via progress service
|
||||
queue_status = await self.get_queue_status()
|
||||
msg = (
|
||||
f"Started: {item.serie_name} "
|
||||
f"S{item.episode.season:02d}E{item.episode.episode:02d}"
|
||||
)
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=msg,
|
||||
metadata={
|
||||
"action": "download_started",
|
||||
"item_id": item.id,
|
||||
"serie_name": item.serie_name,
|
||||
"season": item.episode.season,
|
||||
"episode": item.episode.episode,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
# Process the download (this will wait until complete)
|
||||
await self._process_download(item)
|
||||
|
||||
# Small delay between downloads
|
||||
await asyncio.sleep(1)
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Failed to apply bulk reorder", error=str(e))
|
||||
raise DownloadServiceError(f"Failed to reorder: {str(e)}") from e
|
||||
logger.error(
|
||||
"Error in queue processing loop",
|
||||
error=str(e),
|
||||
exc_info=True
|
||||
)
|
||||
# Continue with next item even if one fails
|
||||
await asyncio.sleep(2)
|
||||
|
||||
# Queue processing completed
|
||||
self._is_stopped = True
|
||||
|
||||
if len(self._pending_queue) == 0:
|
||||
logger.info("Queue processing completed - all items processed")
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._progress_service.complete_progress(
|
||||
progress_id="download_queue",
|
||||
message="All downloads completed",
|
||||
metadata={
|
||||
"queue_status": queue_status.model_dump(mode="json")
|
||||
},
|
||||
)
|
||||
else:
|
||||
logger.info("Queue processing stopped by user")
|
||||
|
||||
async def start_next_download(self) -> Optional[str]:
|
||||
"""Legacy method - redirects to start_queue_processing.
|
||||
|
||||
Returns:
|
||||
Item ID of started download, or None if queue is empty
|
||||
|
||||
Raises:
|
||||
DownloadServiceError: If a download is already active
|
||||
"""
|
||||
return await self.start_queue_processing()
|
||||
|
||||
async def stop_downloads(self) -> None:
|
||||
"""Stop processing new downloads from queue.
|
||||
|
||||
Current download will continue, but no new downloads will start.
|
||||
"""
|
||||
self._is_stopped = True
|
||||
logger.info("Download processing stopped")
|
||||
|
||||
# Notify via progress service
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message="Queue processing stopped",
|
||||
metadata={
|
||||
"action": "queue_stopped",
|
||||
"is_stopped": True,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
async def get_queue_status(self) -> QueueStatus:
|
||||
"""Get current status of all queues.
|
||||
@ -484,10 +501,13 @@ class DownloadService:
|
||||
Returns:
|
||||
Complete queue status with all items
|
||||
"""
|
||||
active_downloads = (
|
||||
[self._active_download] if self._active_download else []
|
||||
)
|
||||
return QueueStatus(
|
||||
is_running=self._is_running,
|
||||
is_paused=self._is_paused,
|
||||
active_downloads=list(self._active_downloads.values()),
|
||||
is_running=not self._is_stopped,
|
||||
is_paused=False, # Kept for compatibility
|
||||
active_downloads=active_downloads,
|
||||
pending_queue=list(self._pending_queue),
|
||||
completed_downloads=list(self._completed_items),
|
||||
failed_downloads=list(self._failed_items),
|
||||
@ -499,7 +519,7 @@ class DownloadService:
|
||||
Returns:
|
||||
Statistics about the download queue
|
||||
"""
|
||||
active_count = len(self._active_downloads)
|
||||
active_count = 1 if self._active_download else 0
|
||||
pending_count = len(self._pending_queue)
|
||||
completed_count = len(self._completed_items)
|
||||
failed_count = len(self._failed_items)
|
||||
@ -532,36 +552,6 @@ class DownloadService:
|
||||
estimated_time_remaining=eta_seconds,
|
||||
)
|
||||
|
||||
async def pause_queue(self) -> None:
|
||||
"""Pause download processing."""
|
||||
self._is_paused = True
|
||||
logger.info("Download queue paused")
|
||||
|
||||
# Broadcast queue status update
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_paused",
|
||||
{
|
||||
"is_paused": True,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
)
|
||||
|
||||
async def resume_queue(self) -> None:
|
||||
"""Resume download processing."""
|
||||
self._is_paused = False
|
||||
logger.info("Download queue resumed")
|
||||
|
||||
# Broadcast queue status update
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_resumed",
|
||||
{
|
||||
"is_paused": False,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
)
|
||||
|
||||
async def clear_completed(self) -> int:
|
||||
"""Clear completed downloads from history.
|
||||
|
||||
@ -572,16 +562,74 @@ class DownloadService:
|
||||
self._completed_items.clear()
|
||||
logger.info("Cleared completed items", count=count)
|
||||
|
||||
# Broadcast queue status update
|
||||
# Notify via progress service
|
||||
if count > 0:
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_status",
|
||||
{
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=f"Cleared {count} completed items",
|
||||
metadata={
|
||||
"action": "completed_cleared",
|
||||
"cleared_count": count,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
async def clear_failed(self) -> int:
|
||||
"""Clear failed downloads from history.
|
||||
|
||||
Returns:
|
||||
Number of items cleared
|
||||
"""
|
||||
count = len(self._failed_items)
|
||||
self._failed_items.clear()
|
||||
logger.info("Cleared failed items", count=count)
|
||||
|
||||
# Notify via progress service
|
||||
if count > 0:
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=f"Cleared {count} failed items",
|
||||
metadata={
|
||||
"action": "failed_cleared",
|
||||
"cleared_count": count,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
return count
|
||||
|
||||
async def clear_pending(self) -> int:
|
||||
"""Clear all pending downloads from the queue.
|
||||
|
||||
Returns:
|
||||
Number of items cleared
|
||||
"""
|
||||
count = len(self._pending_queue)
|
||||
self._pending_queue.clear()
|
||||
self._pending_items_by_id.clear()
|
||||
logger.info("Cleared pending items", count=count)
|
||||
|
||||
# Save queue state
|
||||
self._save_queue()
|
||||
|
||||
# Notify via progress service
|
||||
if count > 0:
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=f"Cleared {count} pending items",
|
||||
metadata={
|
||||
"action": "pending_cleared",
|
||||
"cleared_count": count,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
return count
|
||||
@ -628,15 +676,17 @@ class DownloadService:
|
||||
|
||||
if retried_ids:
|
||||
self._save_queue()
|
||||
# Broadcast queue status update
|
||||
# Notify via progress service
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_status",
|
||||
{
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message=f"Retried {len(retried_ids)} failed items",
|
||||
metadata={
|
||||
"action": "items_retried",
|
||||
"retried_ids": retried_ids,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
|
||||
return retried_ids
|
||||
@ -647,67 +697,6 @@ class DownloadService:
|
||||
f"Failed to retry: {str(e)}"
|
||||
) from e
|
||||
|
||||
def _create_progress_callback(self, item: DownloadItem) -> Callable:
|
||||
"""Create a progress callback for a download item.
|
||||
|
||||
Args:
|
||||
item: Download item to track progress for
|
||||
|
||||
Returns:
|
||||
Callback function for progress updates
|
||||
"""
|
||||
def progress_callback(progress_data: dict) -> None:
|
||||
"""Update progress and broadcast to clients."""
|
||||
try:
|
||||
# Update item progress
|
||||
item.progress = DownloadProgress(
|
||||
percent=progress_data.get("percent", 0.0),
|
||||
downloaded_mb=progress_data.get("downloaded_mb", 0.0),
|
||||
total_mb=progress_data.get("total_mb"),
|
||||
speed_mbps=progress_data.get("speed_mbps"),
|
||||
eta_seconds=progress_data.get("eta_seconds"),
|
||||
)
|
||||
|
||||
# Track speed for statistics
|
||||
if item.progress.speed_mbps:
|
||||
self._download_speeds.append(item.progress.speed_mbps)
|
||||
|
||||
# Update progress service
|
||||
if item.progress.total_mb and item.progress.total_mb > 0:
|
||||
current_mb = int(item.progress.downloaded_mb)
|
||||
total_mb = int(item.progress.total_mb)
|
||||
|
||||
asyncio.create_task(
|
||||
self._progress_service.update_progress(
|
||||
progress_id=f"download_{item.id}",
|
||||
current=current_mb,
|
||||
total=total_mb,
|
||||
metadata={
|
||||
"speed_mbps": item.progress.speed_mbps,
|
||||
"eta_seconds": item.progress.eta_seconds,
|
||||
},
|
||||
)
|
||||
)
|
||||
|
||||
# Broadcast update (fire and forget)
|
||||
asyncio.create_task(
|
||||
self._broadcast_update(
|
||||
"download_progress",
|
||||
{
|
||||
"download_id": item.id,
|
||||
"item_id": item.id,
|
||||
"serie_name": item.serie_name,
|
||||
"season": item.episode.season,
|
||||
"episode": item.episode.episode,
|
||||
"progress": item.progress.model_dump(mode="json"),
|
||||
},
|
||||
)
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Progress callback error", error=str(e))
|
||||
|
||||
return progress_callback
|
||||
|
||||
async def _process_download(self, item: DownloadItem) -> None:
|
||||
"""Process a single download item.
|
||||
|
||||
@ -718,7 +707,7 @@ class DownloadService:
|
||||
# Update status
|
||||
item.status = DownloadStatus.DOWNLOADING
|
||||
item.started_at = datetime.now(timezone.utc)
|
||||
self._active_downloads[item.id] = item
|
||||
self._active_download = item
|
||||
|
||||
logger.info(
|
||||
"Starting download",
|
||||
@ -728,32 +717,17 @@ class DownloadService:
|
||||
episode=item.episode.episode,
|
||||
)
|
||||
|
||||
# Start progress tracking
|
||||
await self._progress_service.start_progress(
|
||||
progress_id=f"download_{item.id}",
|
||||
progress_type=ProgressType.DOWNLOAD,
|
||||
title=f"Downloading {item.serie_name}",
|
||||
message=(
|
||||
f"S{item.episode.season:02d}E{item.episode.episode:02d}"
|
||||
),
|
||||
metadata={
|
||||
"item_id": item.id,
|
||||
"serie_name": item.serie_name,
|
||||
"season": item.episode.season,
|
||||
"episode": item.episode.episode,
|
||||
},
|
||||
)
|
||||
|
||||
# Create progress callback
|
||||
progress_callback = self._create_progress_callback(item)
|
||||
|
||||
# Execute download via anime service
|
||||
# AnimeService handles ALL progress via SeriesApp events:
|
||||
# - download started/progress/completed/failed events
|
||||
# - All updates forwarded to ProgressService
|
||||
# - ProgressService broadcasts to WebSocket clients
|
||||
folder = item.serie_folder if item.serie_folder else item.serie_id
|
||||
success = await self._anime_service.download(
|
||||
serie_folder=item.serie_id,
|
||||
serie_folder=folder,
|
||||
season=item.episode.season,
|
||||
episode=item.episode.episode,
|
||||
key=item.serie_id, # Assuming serie_id is the provider key
|
||||
callback=progress_callback,
|
||||
key=item.serie_id,
|
||||
)
|
||||
|
||||
# Handle result
|
||||
@ -770,31 +744,6 @@ class DownloadService:
|
||||
logger.info(
|
||||
"Download completed successfully", item_id=item.id
|
||||
)
|
||||
|
||||
# Complete progress tracking
|
||||
await self._progress_service.complete_progress(
|
||||
progress_id=f"download_{item.id}",
|
||||
message="Download completed successfully",
|
||||
metadata={
|
||||
"downloaded_mb": item.progress.downloaded_mb
|
||||
if item.progress
|
||||
else 0,
|
||||
},
|
||||
)
|
||||
|
||||
await self._broadcast_update(
|
||||
"download_complete",
|
||||
{
|
||||
"download_id": item.id,
|
||||
"item_id": item.id,
|
||||
"serie_name": item.serie_name,
|
||||
"season": item.episode.season,
|
||||
"episode": item.episode.episode,
|
||||
"downloaded_mb": item.progress.downloaded_mb
|
||||
if item.progress
|
||||
else 0,
|
||||
},
|
||||
)
|
||||
else:
|
||||
raise AnimeServiceError("Download returned False")
|
||||
|
||||
@ -811,106 +760,36 @@ class DownloadService:
|
||||
error=str(e),
|
||||
retry_count=item.retry_count,
|
||||
)
|
||||
|
||||
# Fail progress tracking
|
||||
await self._progress_service.fail_progress(
|
||||
progress_id=f"download_{item.id}",
|
||||
error_message=str(e),
|
||||
metadata={"retry_count": item.retry_count},
|
||||
)
|
||||
|
||||
await self._broadcast_update(
|
||||
"download_failed",
|
||||
{
|
||||
"download_id": item.id,
|
||||
"item_id": item.id,
|
||||
"serie_name": item.serie_name,
|
||||
"season": item.episode.season,
|
||||
"episode": item.episode.episode,
|
||||
"error": item.error,
|
||||
"retry_count": item.retry_count,
|
||||
},
|
||||
)
|
||||
# Note: Failure is already broadcast by AnimeService
|
||||
# via ProgressService when SeriesApp fires failed event
|
||||
|
||||
finally:
|
||||
# Remove from active downloads
|
||||
if item.id in self._active_downloads:
|
||||
del self._active_downloads[item.id]
|
||||
if self._active_download and self._active_download.id == item.id:
|
||||
self._active_download = None
|
||||
|
||||
self._save_queue()
|
||||
|
||||
async def _queue_processor(self) -> None:
|
||||
"""Main queue processing loop."""
|
||||
logger.info("Queue processor started")
|
||||
|
||||
while not self._shutdown_event.is_set():
|
||||
try:
|
||||
# Wait if paused
|
||||
if self._is_paused:
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
# Check if we can start more downloads
|
||||
if len(self._active_downloads) >= self._max_concurrent:
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
# Get next item from queue
|
||||
if not self._pending_queue:
|
||||
await asyncio.sleep(1)
|
||||
continue
|
||||
|
||||
item = self._pending_queue.popleft()
|
||||
|
||||
# Process download in background
|
||||
asyncio.create_task(self._process_download(item))
|
||||
|
||||
except Exception as e:
|
||||
logger.error("Queue processor error", error=str(e))
|
||||
await asyncio.sleep(5)
|
||||
|
||||
logger.info("Queue processor stopped")
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Start the download queue processor."""
|
||||
if self._is_running:
|
||||
logger.warning("Queue processor already running")
|
||||
return
|
||||
"""Initialize the download queue service (compatibility method).
|
||||
|
||||
self._is_running = True
|
||||
self._shutdown_event.clear()
|
||||
|
||||
# Start processor task
|
||||
asyncio.create_task(self._queue_processor())
|
||||
|
||||
logger.info("Download queue service started")
|
||||
|
||||
# Broadcast queue started event
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_started",
|
||||
{
|
||||
"is_running": True,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
)
|
||||
Note: Downloads are started manually via start_next_download().
|
||||
"""
|
||||
logger.info("Download queue service initialized")
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop the download queue processor."""
|
||||
if not self._is_running:
|
||||
return
|
||||
"""Stop the download queue service and wait for active download.
|
||||
|
||||
Note: This waits for the current download to complete.
|
||||
"""
|
||||
logger.info("Stopping download queue service...")
|
||||
|
||||
self._is_running = False
|
||||
self._shutdown_event.set()
|
||||
|
||||
# Wait for active downloads to complete (with timeout)
|
||||
# Wait for active download to complete (with timeout)
|
||||
timeout = 30 # seconds
|
||||
start_time = asyncio.get_event_loop().time()
|
||||
|
||||
while (
|
||||
self._active_downloads
|
||||
self._active_download
|
||||
and (asyncio.get_event_loop().time() - start_time) < timeout
|
||||
):
|
||||
await asyncio.sleep(1)
|
||||
@ -923,16 +802,6 @@ class DownloadService:
|
||||
|
||||
logger.info("Download queue service stopped")
|
||||
|
||||
# Broadcast queue stopped event
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._broadcast_update(
|
||||
"queue_stopped",
|
||||
{
|
||||
"is_running": False,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_download_service_instance: Optional[DownloadService] = None
|
||||
|
||||
@ -1,324 +0,0 @@
|
||||
"""Monitoring service for system resource tracking and metrics collection."""
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import psutil
|
||||
from sqlalchemy import select
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.database.models import DownloadQueueItem
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class QueueMetrics:
|
||||
"""Download queue statistics and metrics."""
|
||||
|
||||
total_items: int = 0
|
||||
pending_items: int = 0
|
||||
downloading_items: int = 0
|
||||
completed_items: int = 0
|
||||
failed_items: int = 0
|
||||
total_size_bytes: int = 0
|
||||
downloaded_bytes: int = 0
|
||||
average_speed_mbps: float = 0.0
|
||||
estimated_time_remaining: Optional[timedelta] = None
|
||||
success_rate: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class SystemMetrics:
|
||||
"""System resource metrics at a point in time."""
|
||||
|
||||
timestamp: datetime
|
||||
cpu_percent: float
|
||||
memory_percent: float
|
||||
memory_available_mb: float
|
||||
disk_percent: float
|
||||
disk_free_mb: float
|
||||
uptime_seconds: float
|
||||
|
||||
|
||||
@dataclass
|
||||
class ErrorMetrics:
|
||||
"""Error tracking and statistics."""
|
||||
|
||||
total_errors: int = 0
|
||||
errors_24h: int = 0
|
||||
most_common_errors: Dict[str, int] = field(default_factory=dict)
|
||||
last_error_time: Optional[datetime] = None
|
||||
error_rate_per_hour: float = 0.0
|
||||
|
||||
|
||||
class MonitoringService:
|
||||
"""Service for monitoring system resources and application metrics."""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize monitoring service."""
|
||||
self._error_log: List[tuple[datetime, str]] = []
|
||||
self._performance_samples: List[SystemMetrics] = []
|
||||
self._max_samples = 1440 # Keep 24 hours of minute samples
|
||||
|
||||
def get_system_metrics(self) -> SystemMetrics:
|
||||
"""Get current system resource metrics.
|
||||
|
||||
Returns:
|
||||
SystemMetrics: Current system metrics.
|
||||
"""
|
||||
try:
|
||||
import time
|
||||
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
memory_info = psutil.virtual_memory()
|
||||
disk_info = psutil.disk_usage("/")
|
||||
boot_time = psutil.boot_time()
|
||||
uptime_seconds = time.time() - boot_time
|
||||
|
||||
metrics = SystemMetrics(
|
||||
timestamp=datetime.now(),
|
||||
cpu_percent=cpu_percent,
|
||||
memory_percent=memory_info.percent,
|
||||
memory_available_mb=memory_info.available / (1024 * 1024),
|
||||
disk_percent=disk_info.percent,
|
||||
disk_free_mb=disk_info.free / (1024 * 1024),
|
||||
uptime_seconds=uptime_seconds,
|
||||
)
|
||||
|
||||
# Store sample
|
||||
self._performance_samples.append(metrics)
|
||||
if len(self._performance_samples) > self._max_samples:
|
||||
self._performance_samples.pop(0)
|
||||
|
||||
return metrics
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get system metrics: {e}")
|
||||
raise
|
||||
|
||||
async def get_queue_metrics(self, db: AsyncSession) -> QueueMetrics:
|
||||
"""Get download queue metrics.
|
||||
|
||||
Args:
|
||||
db: Database session.
|
||||
|
||||
Returns:
|
||||
QueueMetrics: Queue statistics and progress.
|
||||
"""
|
||||
try:
|
||||
# Get all queue items
|
||||
result = await db.execute(select(DownloadQueueItem))
|
||||
items = result.scalars().all()
|
||||
|
||||
if not items:
|
||||
return QueueMetrics()
|
||||
|
||||
# Calculate metrics
|
||||
total_items = len(items)
|
||||
pending_items = sum(1 for i in items if i.status == "PENDING")
|
||||
downloading_items = sum(
|
||||
1 for i in items if i.status == "DOWNLOADING"
|
||||
)
|
||||
completed_items = sum(1 for i in items if i.status == "COMPLETED")
|
||||
failed_items = sum(1 for i in items if i.status == "FAILED")
|
||||
|
||||
total_size_bytes = sum(
|
||||
(i.total_bytes or 0) for i in items
|
||||
)
|
||||
downloaded_bytes = sum(
|
||||
(i.downloaded_bytes or 0) for i in items
|
||||
)
|
||||
|
||||
# Calculate average speed from active downloads
|
||||
speeds = [
|
||||
i.download_speed for i in items
|
||||
if i.status == "DOWNLOADING" and i.download_speed
|
||||
]
|
||||
average_speed_mbps = (
|
||||
sum(speeds) / len(speeds) / (1024 * 1024) if speeds else 0
|
||||
)
|
||||
|
||||
# Calculate success rate
|
||||
success_rate = (
|
||||
(completed_items / total_items * 100) if total_items > 0 else 0
|
||||
)
|
||||
|
||||
# Estimate time remaining
|
||||
estimated_time_remaining = None
|
||||
if average_speed_mbps > 0 and total_size_bytes > downloaded_bytes:
|
||||
remaining_bytes = total_size_bytes - downloaded_bytes
|
||||
remaining_seconds = remaining_bytes / average_speed_mbps
|
||||
estimated_time_remaining = timedelta(seconds=remaining_seconds)
|
||||
|
||||
return QueueMetrics(
|
||||
total_items=total_items,
|
||||
pending_items=pending_items,
|
||||
downloading_items=downloading_items,
|
||||
completed_items=completed_items,
|
||||
failed_items=failed_items,
|
||||
total_size_bytes=total_size_bytes,
|
||||
downloaded_bytes=downloaded_bytes,
|
||||
average_speed_mbps=average_speed_mbps,
|
||||
estimated_time_remaining=estimated_time_remaining,
|
||||
success_rate=success_rate,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get queue metrics: {e}")
|
||||
raise
|
||||
|
||||
def log_error(self, error_message: str) -> None:
|
||||
"""Log an error for tracking purposes.
|
||||
|
||||
Args:
|
||||
error_message: The error message to log.
|
||||
"""
|
||||
self._error_log.append((datetime.now(), error_message))
|
||||
logger.debug(f"Error logged: {error_message}")
|
||||
|
||||
def get_error_metrics(self) -> ErrorMetrics:
|
||||
"""Get error tracking metrics.
|
||||
|
||||
Returns:
|
||||
ErrorMetrics: Error statistics and trends.
|
||||
"""
|
||||
total_errors = len(self._error_log)
|
||||
|
||||
# Get errors from last 24 hours
|
||||
cutoff_time = datetime.now() - timedelta(hours=24)
|
||||
recent_errors = [
|
||||
(time, msg) for time, msg in self._error_log
|
||||
if time >= cutoff_time
|
||||
]
|
||||
errors_24h = len(recent_errors)
|
||||
|
||||
# Count error types
|
||||
error_counts: Dict[str, int] = {}
|
||||
for _, msg in recent_errors:
|
||||
error_type = msg.split(":")[0]
|
||||
error_counts[error_type] = error_counts.get(error_type, 0) + 1
|
||||
|
||||
# Sort by count
|
||||
most_common_errors = dict(
|
||||
sorted(error_counts.items(), key=lambda x: x[1], reverse=True)[:10]
|
||||
)
|
||||
|
||||
# Get last error time
|
||||
last_error_time = (
|
||||
recent_errors[-1][0] if recent_errors else None
|
||||
)
|
||||
|
||||
# Calculate error rate per hour
|
||||
error_rate_per_hour = (
|
||||
errors_24h / 24 if errors_24h > 0 else 0
|
||||
)
|
||||
|
||||
return ErrorMetrics(
|
||||
total_errors=total_errors,
|
||||
errors_24h=errors_24h,
|
||||
most_common_errors=most_common_errors,
|
||||
last_error_time=last_error_time,
|
||||
error_rate_per_hour=error_rate_per_hour,
|
||||
)
|
||||
|
||||
def get_performance_summary(self) -> Dict[str, Any]:
|
||||
"""Get performance summary from collected samples.
|
||||
|
||||
Returns:
|
||||
dict: Performance statistics.
|
||||
"""
|
||||
if not self._performance_samples:
|
||||
return {}
|
||||
|
||||
cpu_values = [m.cpu_percent for m in self._performance_samples]
|
||||
memory_values = [m.memory_percent for m in self._performance_samples]
|
||||
disk_values = [m.disk_percent for m in self._performance_samples]
|
||||
|
||||
return {
|
||||
"cpu": {
|
||||
"current": cpu_values[-1],
|
||||
"average": sum(cpu_values) / len(cpu_values),
|
||||
"max": max(cpu_values),
|
||||
"min": min(cpu_values),
|
||||
},
|
||||
"memory": {
|
||||
"current": memory_values[-1],
|
||||
"average": sum(memory_values) / len(memory_values),
|
||||
"max": max(memory_values),
|
||||
"min": min(memory_values),
|
||||
},
|
||||
"disk": {
|
||||
"current": disk_values[-1],
|
||||
"average": sum(disk_values) / len(disk_values),
|
||||
"max": max(disk_values),
|
||||
"min": min(disk_values),
|
||||
},
|
||||
"sample_count": len(self._performance_samples),
|
||||
}
|
||||
|
||||
async def get_comprehensive_status(
|
||||
self, db: AsyncSession
|
||||
) -> Dict[str, Any]:
|
||||
"""Get comprehensive system status summary.
|
||||
|
||||
Args:
|
||||
db: Database session.
|
||||
|
||||
Returns:
|
||||
dict: Complete system status.
|
||||
"""
|
||||
try:
|
||||
system_metrics = self.get_system_metrics()
|
||||
queue_metrics = await self.get_queue_metrics(db)
|
||||
error_metrics = self.get_error_metrics()
|
||||
performance = self.get_performance_summary()
|
||||
|
||||
return {
|
||||
"timestamp": datetime.now().isoformat(),
|
||||
"system": {
|
||||
"cpu_percent": system_metrics.cpu_percent,
|
||||
"memory_percent": system_metrics.memory_percent,
|
||||
"disk_percent": system_metrics.disk_percent,
|
||||
"uptime_seconds": system_metrics.uptime_seconds,
|
||||
},
|
||||
"queue": {
|
||||
"total_items": queue_metrics.total_items,
|
||||
"pending": queue_metrics.pending_items,
|
||||
"downloading": queue_metrics.downloading_items,
|
||||
"completed": queue_metrics.completed_items,
|
||||
"failed": queue_metrics.failed_items,
|
||||
"success_rate": round(queue_metrics.success_rate, 2),
|
||||
"average_speed_mbps": round(
|
||||
queue_metrics.average_speed_mbps, 2
|
||||
),
|
||||
},
|
||||
"errors": {
|
||||
"total": error_metrics.total_errors,
|
||||
"last_24h": error_metrics.errors_24h,
|
||||
"rate_per_hour": round(
|
||||
error_metrics.error_rate_per_hour, 2
|
||||
),
|
||||
"most_common": error_metrics.most_common_errors,
|
||||
},
|
||||
"performance": performance,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get comprehensive status: {e}")
|
||||
raise
|
||||
|
||||
|
||||
# Global monitoring service instance
|
||||
_monitoring_service: Optional[MonitoringService] = None
|
||||
|
||||
|
||||
def get_monitoring_service() -> MonitoringService:
|
||||
"""Get or create the global monitoring service instance.
|
||||
|
||||
Returns:
|
||||
MonitoringService: The monitoring service instance.
|
||||
"""
|
||||
global _monitoring_service
|
||||
if _monitoring_service is None:
|
||||
_monitoring_service = MonitoringService()
|
||||
return _monitoring_service
|
||||
@ -11,7 +11,7 @@ import asyncio
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from typing import Any, Callable, Dict, Optional
|
||||
from typing import Any, Callable, Dict, List, Optional
|
||||
|
||||
import structlog
|
||||
|
||||
@ -85,6 +85,30 @@ class ProgressUpdate:
|
||||
}
|
||||
|
||||
|
||||
@dataclass
|
||||
class ProgressEvent:
|
||||
"""Represents a progress event for subscribers.
|
||||
|
||||
Attributes:
|
||||
event_type: Type of event (e.g., 'download_progress')
|
||||
progress_id: Unique identifier for the progress operation
|
||||
progress: The progress update data
|
||||
room: WebSocket room to broadcast to (default: 'progress')
|
||||
"""
|
||||
|
||||
event_type: str
|
||||
progress_id: str
|
||||
progress: ProgressUpdate
|
||||
room: str = "progress"
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
"""Convert event to dictionary for broadcasting."""
|
||||
return {
|
||||
"type": self.event_type,
|
||||
"data": self.progress.to_dict(),
|
||||
}
|
||||
|
||||
|
||||
class ProgressServiceError(Exception):
|
||||
"""Service-level exception for progress operations."""
|
||||
|
||||
@ -109,42 +133,80 @@ class ProgressService:
|
||||
self._history: Dict[str, ProgressUpdate] = {}
|
||||
self._max_history_size = 50
|
||||
|
||||
# WebSocket broadcast callback
|
||||
self._broadcast_callback: Optional[Callable] = None
|
||||
# Event subscribers: event_name -> list of handlers
|
||||
self._event_handlers: Dict[
|
||||
str, List[Callable[[ProgressEvent], None]]
|
||||
] = {}
|
||||
|
||||
# Lock for thread-safe operations
|
||||
self._lock = asyncio.Lock()
|
||||
|
||||
logger.info("ProgressService initialized")
|
||||
|
||||
def set_broadcast_callback(self, callback: Callable) -> None:
|
||||
"""Set callback for broadcasting progress updates via WebSocket.
|
||||
def subscribe(
|
||||
self, event_name: str, handler: Callable[[ProgressEvent], None]
|
||||
) -> None:
|
||||
"""Subscribe to progress events.
|
||||
|
||||
Args:
|
||||
callback: Async function to call for broadcasting updates
|
||||
event_name: Name of event to subscribe to
|
||||
(e.g., 'progress_updated')
|
||||
handler: Async function to call when event occurs
|
||||
"""
|
||||
self._broadcast_callback = callback
|
||||
logger.debug("Progress broadcast callback registered")
|
||||
if event_name not in self._event_handlers:
|
||||
self._event_handlers[event_name] = []
|
||||
|
||||
async def _broadcast(self, update: ProgressUpdate, room: str) -> None:
|
||||
"""Broadcast progress update to WebSocket clients.
|
||||
self._event_handlers[event_name].append(handler)
|
||||
logger.debug("Event handler subscribed", event=event_name)
|
||||
|
||||
def unsubscribe(
|
||||
self, event_name: str, handler: Callable[[ProgressEvent], None]
|
||||
) -> None:
|
||||
"""Unsubscribe from progress events.
|
||||
|
||||
Args:
|
||||
update: Progress update to broadcast
|
||||
room: WebSocket room to broadcast to
|
||||
event_name: Name of event to unsubscribe from
|
||||
handler: Handler function to remove
|
||||
"""
|
||||
if self._broadcast_callback:
|
||||
if event_name in self._event_handlers:
|
||||
try:
|
||||
await self._broadcast_callback(
|
||||
message_type=f"{update.type.value}_progress",
|
||||
data=update.to_dict(),
|
||||
room=room,
|
||||
self._event_handlers[event_name].remove(handler)
|
||||
logger.debug("Event handler unsubscribed", event=event_name)
|
||||
except ValueError:
|
||||
logger.warning(
|
||||
"Handler not found for unsubscribe", event=event_name
|
||||
)
|
||||
except Exception as e:
|
||||
|
||||
async def _emit_event(self, event: ProgressEvent) -> None:
|
||||
"""Emit event to all subscribers.
|
||||
|
||||
Args:
|
||||
event: Progress event to emit
|
||||
|
||||
Note:
|
||||
Errors in individual handlers are logged but do not
|
||||
prevent other handlers from executing.
|
||||
"""
|
||||
event_name = "progress_updated"
|
||||
|
||||
if event_name in self._event_handlers:
|
||||
handlers = self._event_handlers[event_name]
|
||||
if handlers:
|
||||
# Execute all handlers, capturing exceptions
|
||||
tasks = [handler(event) for handler in handlers]
|
||||
# Ignore type error - tasks will be coroutines at runtime
|
||||
results = await asyncio.gather(
|
||||
*tasks, return_exceptions=True
|
||||
) # type: ignore[arg-type]
|
||||
|
||||
# Log any exceptions that occurred
|
||||
for idx, result in enumerate(results):
|
||||
if isinstance(result, Exception):
|
||||
logger.error(
|
||||
"Failed to broadcast progress update",
|
||||
error=str(e),
|
||||
progress_id=update.id,
|
||||
"Event handler raised exception",
|
||||
event=event_name,
|
||||
error=str(result),
|
||||
handler_index=idx,
|
||||
)
|
||||
|
||||
async def start_progress(
|
||||
@ -197,9 +259,15 @@ class ProgressService:
|
||||
title=title,
|
||||
)
|
||||
|
||||
# Broadcast to appropriate room
|
||||
# Emit event to subscribers
|
||||
room = f"{progress_type.value}_progress"
|
||||
await self._broadcast(update, room)
|
||||
event = ProgressEvent(
|
||||
event_type=f"{progress_type.value}_progress",
|
||||
progress_id=progress_id,
|
||||
progress=update,
|
||||
room=room,
|
||||
)
|
||||
await self._emit_event(event)
|
||||
|
||||
return update
|
||||
|
||||
@ -262,7 +330,13 @@ class ProgressService:
|
||||
|
||||
if should_broadcast:
|
||||
room = f"{update.type.value}_progress"
|
||||
await self._broadcast(update, room)
|
||||
event = ProgressEvent(
|
||||
event_type=f"{update.type.value}_progress",
|
||||
progress_id=progress_id,
|
||||
progress=update,
|
||||
room=room,
|
||||
)
|
||||
await self._emit_event(event)
|
||||
|
||||
return update
|
||||
|
||||
@ -311,9 +385,15 @@ class ProgressService:
|
||||
type=update.type.value,
|
||||
)
|
||||
|
||||
# Broadcast completion
|
||||
# Emit completion event
|
||||
room = f"{update.type.value}_progress"
|
||||
await self._broadcast(update, room)
|
||||
event = ProgressEvent(
|
||||
event_type=f"{update.type.value}_progress",
|
||||
progress_id=progress_id,
|
||||
progress=update,
|
||||
room=room,
|
||||
)
|
||||
await self._emit_event(event)
|
||||
|
||||
return update
|
||||
|
||||
@ -361,9 +441,15 @@ class ProgressService:
|
||||
error=error_message,
|
||||
)
|
||||
|
||||
# Broadcast failure
|
||||
# Emit failure event
|
||||
room = f"{update.type.value}_progress"
|
||||
await self._broadcast(update, room)
|
||||
event = ProgressEvent(
|
||||
event_type=f"{update.type.value}_progress",
|
||||
progress_id=progress_id,
|
||||
progress=update,
|
||||
room=room,
|
||||
)
|
||||
await self._emit_event(event)
|
||||
|
||||
return update
|
||||
|
||||
@ -405,9 +491,15 @@ class ProgressService:
|
||||
type=update.type.value,
|
||||
)
|
||||
|
||||
# Broadcast cancellation
|
||||
# Emit cancellation event
|
||||
room = f"{update.type.value}_progress"
|
||||
await self._broadcast(update, room)
|
||||
event = ProgressEvent(
|
||||
event_type=f"{update.type.value}_progress",
|
||||
progress_id=progress_id,
|
||||
progress=update,
|
||||
room=room,
|
||||
)
|
||||
await self._emit_event(event)
|
||||
|
||||
return update
|
||||
|
||||
|
||||
@ -103,40 +103,6 @@ def reset_series_app() -> None:
|
||||
_series_app = None
|
||||
|
||||
|
||||
def get_optional_series_app() -> Optional[SeriesApp]:
|
||||
"""
|
||||
Dependency to optionally get SeriesApp instance.
|
||||
|
||||
Returns None if not configured instead of raising an exception.
|
||||
Useful for endpoints that can validate input before needing the service.
|
||||
|
||||
Returns:
|
||||
Optional[SeriesApp]: The main application instance or None
|
||||
"""
|
||||
global _series_app
|
||||
|
||||
# Try to load anime_directory from config.json if not in settings
|
||||
if not settings.anime_directory:
|
||||
try:
|
||||
from src.server.services.config_service import get_config_service
|
||||
config_service = get_config_service()
|
||||
config = config_service.load_config()
|
||||
if config.other and config.other.get("anime_directory"):
|
||||
settings.anime_directory = str(config.other["anime_directory"])
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
if not settings.anime_directory:
|
||||
return None
|
||||
|
||||
if _series_app is None:
|
||||
try:
|
||||
_series_app = SeriesApp(settings.anime_directory)
|
||||
except Exception:
|
||||
return None
|
||||
|
||||
return _series_app
|
||||
|
||||
|
||||
async def get_database_session() -> AsyncGenerator:
|
||||
"""
|
||||
@ -389,7 +355,9 @@ def get_anime_service() -> "AnimeService":
|
||||
try:
|
||||
from src.server.services.anime_service import AnimeService
|
||||
|
||||
_anime_service = AnimeService(settings.anime_directory)
|
||||
# Get the singleton SeriesApp instance
|
||||
series_app = get_series_app()
|
||||
_anime_service = AnimeService(series_app)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
@ -416,39 +384,14 @@ def get_download_service() -> "DownloadService":
|
||||
|
||||
if _download_service is None:
|
||||
try:
|
||||
from src.server.services import (
|
||||
websocket_service as websocket_service_module,
|
||||
)
|
||||
from src.server.services.download_service import DownloadService
|
||||
|
||||
anime_service = get_anime_service()
|
||||
_download_service = DownloadService(anime_service)
|
||||
|
||||
ws_service = websocket_service_module.get_websocket_service()
|
||||
|
||||
async def broadcast_callback(update_type: str, data: dict) -> None:
|
||||
"""Broadcast download updates via WebSocket."""
|
||||
if update_type == "download_progress":
|
||||
await ws_service.broadcast_download_progress(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
elif update_type == "download_complete":
|
||||
await ws_service.broadcast_download_complete(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
elif update_type == "download_failed":
|
||||
await ws_service.broadcast_download_failed(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
elif update_type == "queue_status":
|
||||
await ws_service.broadcast_queue_status(data)
|
||||
else:
|
||||
await ws_service.broadcast_queue_status(data)
|
||||
|
||||
_download_service.set_broadcast_callback(broadcast_callback)
|
||||
# Note: DownloadService no longer needs broadcast callbacks.
|
||||
# Progress updates flow through:
|
||||
# SeriesApp → AnimeService → ProgressService → WebSocketService
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
|
||||
@ -1218,6 +1218,52 @@ body {
|
||||
background: linear-gradient(90deg, rgba(var(--color-accent-rgb), 0.05) 0%, transparent 10%);
|
||||
}
|
||||
|
||||
/* Drag and Drop Styles */
|
||||
.draggable-item {
|
||||
cursor: move;
|
||||
user-select: none;
|
||||
}
|
||||
|
||||
.draggable-item.dragging {
|
||||
opacity: 0.5;
|
||||
transform: scale(0.98);
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
.draggable-item.drag-over {
|
||||
border-top: 3px solid var(--color-primary);
|
||||
margin-top: 8px;
|
||||
}
|
||||
|
||||
.drag-handle {
|
||||
position: absolute;
|
||||
left: 8px;
|
||||
top: 50%;
|
||||
transform: translateY(-50%);
|
||||
color: var(--color-text-tertiary);
|
||||
cursor: grab;
|
||||
font-size: 1.2rem;
|
||||
padding: var(--spacing-xs);
|
||||
transition: color var(--transition-duration);
|
||||
}
|
||||
|
||||
.drag-handle:hover {
|
||||
color: var(--color-primary);
|
||||
}
|
||||
|
||||
.drag-handle:active {
|
||||
cursor: grabbing;
|
||||
}
|
||||
|
||||
.sortable-list {
|
||||
position: relative;
|
||||
min-height: 100px;
|
||||
}
|
||||
|
||||
.pending-queue-list {
|
||||
position: relative;
|
||||
}
|
||||
|
||||
.download-header {
|
||||
display: flex;
|
||||
justify-content: space-between;
|
||||
@ -1261,11 +1307,11 @@ body {
|
||||
.queue-position {
|
||||
position: absolute;
|
||||
top: var(--spacing-sm);
|
||||
left: var(--spacing-sm);
|
||||
left: 48px;
|
||||
background: var(--color-warning);
|
||||
color: white;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
width: 28px;
|
||||
height: 28px;
|
||||
border-radius: 50%;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
@ -1275,7 +1321,18 @@ body {
|
||||
}
|
||||
|
||||
.download-card.pending .download-info {
|
||||
margin-left: 40px;
|
||||
margin-left: 80px;
|
||||
}
|
||||
|
||||
.download-card.pending .download-header {
|
||||
padding-left: 0;
|
||||
}
|
||||
|
||||
.empty-state small {
|
||||
display: block;
|
||||
margin-top: var(--spacing-sm);
|
||||
font-size: var(--font-size-small);
|
||||
opacity: 0.7;
|
||||
}
|
||||
|
||||
/* Progress Bars */
|
||||
|
||||
@ -1,77 +0,0 @@
|
||||
/**
|
||||
* Accessibility Features Module
|
||||
* Enhances accessibility for all users
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize accessibility features
|
||||
*/
|
||||
function initAccessibilityFeatures() {
|
||||
setupFocusManagement();
|
||||
setupAriaLabels();
|
||||
console.log('[Accessibility Features] Initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup focus management
|
||||
*/
|
||||
function setupFocusManagement() {
|
||||
// Add focus visible class for keyboard navigation
|
||||
document.addEventListener('keydown', (e) => {
|
||||
if (e.key === 'Tab') {
|
||||
document.body.classList.add('keyboard-navigation');
|
||||
}
|
||||
});
|
||||
|
||||
document.addEventListener('mousedown', () => {
|
||||
document.body.classList.remove('keyboard-navigation');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup ARIA labels for dynamic content
|
||||
*/
|
||||
function setupAriaLabels() {
|
||||
// Ensure all interactive elements have proper ARIA labels
|
||||
const buttons = document.querySelectorAll('button:not([aria-label])');
|
||||
buttons.forEach(button => {
|
||||
if (!button.getAttribute('aria-label') && button.title) {
|
||||
button.setAttribute('aria-label', button.title);
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Announce message to screen readers
|
||||
*/
|
||||
function announceToScreenReader(message, priority = 'polite') {
|
||||
const announcement = document.createElement('div');
|
||||
announcement.setAttribute('role', 'status');
|
||||
announcement.setAttribute('aria-live', priority);
|
||||
announcement.setAttribute('aria-atomic', 'true');
|
||||
announcement.className = 'sr-only';
|
||||
announcement.textContent = message;
|
||||
|
||||
document.body.appendChild(announcement);
|
||||
|
||||
setTimeout(() => {
|
||||
announcement.remove();
|
||||
}, 1000);
|
||||
}
|
||||
|
||||
// Export functions
|
||||
window.Accessibility = {
|
||||
announce: announceToScreenReader
|
||||
};
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initAccessibilityFeatures);
|
||||
} else {
|
||||
initAccessibilityFeatures();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,29 +0,0 @@
|
||||
/**
|
||||
* Advanced Search Module
|
||||
* Provides advanced search and filtering capabilities
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize advanced search
|
||||
*/
|
||||
function initAdvancedSearch() {
|
||||
console.log('[Advanced Search] Module loaded (functionality to be implemented)');
|
||||
|
||||
// TODO: Implement advanced search features
|
||||
// - Filter by genre
|
||||
// - Filter by year
|
||||
// - Filter by status
|
||||
// - Sort options
|
||||
}
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initAdvancedSearch);
|
||||
} else {
|
||||
initAdvancedSearch();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -26,10 +26,6 @@ class AniWorldApp {
|
||||
this.loadSeries();
|
||||
this.initTheme();
|
||||
this.updateConnectionStatus();
|
||||
this.startProcessStatusMonitoring();
|
||||
|
||||
// Initialize Mobile & Accessibility features
|
||||
this.initMobileAndAccessibility();
|
||||
}
|
||||
|
||||
async checkAuthentication() {
|
||||
@ -196,7 +192,6 @@ class AniWorldApp {
|
||||
|
||||
this.showToast(this.localization.getText('connected-server'), 'success');
|
||||
this.updateConnectionStatus();
|
||||
this.checkProcessLocks();
|
||||
});
|
||||
|
||||
this.socket.on('disconnect', () => {
|
||||
@ -505,19 +500,6 @@ class AniWorldApp {
|
||||
this.hideStatus();
|
||||
});
|
||||
|
||||
// Download controls
|
||||
document.getElementById('pause-download').addEventListener('click', () => {
|
||||
this.pauseDownload();
|
||||
});
|
||||
|
||||
document.getElementById('resume-download').addEventListener('click', () => {
|
||||
this.resumeDownload();
|
||||
});
|
||||
|
||||
document.getElementById('cancel-download').addEventListener('click', () => {
|
||||
this.cancelDownload();
|
||||
});
|
||||
|
||||
// Logout functionality
|
||||
document.getElementById('logout-btn').addEventListener('click', () => {
|
||||
this.logout();
|
||||
@ -834,10 +816,13 @@ class AniWorldApp {
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
if (data.status === 'success') {
|
||||
// Check if response is a direct array (new format) or wrapped object (legacy)
|
||||
if (Array.isArray(data)) {
|
||||
this.displaySearchResults(data);
|
||||
} else if (data.status === 'success') {
|
||||
this.displaySearchResults(data.results);
|
||||
} else {
|
||||
this.showToast(`Search error: ${data.message}`, 'error');
|
||||
this.showToast(`Search error: ${data.message || 'Unknown error'}`, 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Search error:', error);
|
||||
@ -902,6 +887,7 @@ class AniWorldApp {
|
||||
}
|
||||
|
||||
async downloadSelected() {
|
||||
console.log('=== downloadSelected v1.1 - DEBUG VERSION ===');
|
||||
if (this.selectedSeries.size === 0) {
|
||||
this.showToast('No series selected', 'warning');
|
||||
return;
|
||||
@ -909,22 +895,104 @@ class AniWorldApp {
|
||||
|
||||
try {
|
||||
const folders = Array.from(this.selectedSeries);
|
||||
console.log('=== Starting download for selected series ===');
|
||||
console.log('Selected folders:', folders);
|
||||
console.log('seriesData:', this.seriesData);
|
||||
let totalEpisodesAdded = 0;
|
||||
let failedSeries = [];
|
||||
|
||||
const response = await this.makeAuthenticatedRequest('/api/anime/download', {
|
||||
// For each selected series, get its missing episodes and add to queue
|
||||
for (const folder of folders) {
|
||||
const serie = this.seriesData.find(s => s.folder === folder);
|
||||
if (!serie || !serie.episodeDict) {
|
||||
console.error('Serie not found or has no episodeDict:', folder, serie);
|
||||
failedSeries.push(folder);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Validate required fields
|
||||
if (!serie.key) {
|
||||
console.error('Serie missing key:', serie);
|
||||
failedSeries.push(folder);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Convert episodeDict format {season: [episodes]} to episode identifiers
|
||||
const episodes = [];
|
||||
for (const [season, episodeNumbers] of Object.entries(serie.episodeDict)) {
|
||||
if (Array.isArray(episodeNumbers)) {
|
||||
for (const episode of episodeNumbers) {
|
||||
episodes.push({
|
||||
season: parseInt(season),
|
||||
episode: episode
|
||||
});
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if (episodes.length === 0) {
|
||||
console.log('No episodes to add for serie:', serie.name);
|
||||
continue;
|
||||
}
|
||||
|
||||
// Use folder name as fallback if serie name is empty
|
||||
const serieName = serie.name && serie.name.trim() ? serie.name : serie.folder;
|
||||
|
||||
// Add episodes to download queue
|
||||
const requestBody = {
|
||||
serie_id: serie.key,
|
||||
serie_folder: serie.folder,
|
||||
serie_name: serieName,
|
||||
episodes: episodes,
|
||||
priority: 'NORMAL'
|
||||
};
|
||||
console.log('Sending queue add request:', requestBody);
|
||||
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/add', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'Content-Type': 'application/json',
|
||||
},
|
||||
body: JSON.stringify({ folders })
|
||||
body: JSON.stringify(requestBody)
|
||||
});
|
||||
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
if (!response) {
|
||||
failedSeries.push(folder);
|
||||
continue;
|
||||
}
|
||||
|
||||
if (data.status === 'success') {
|
||||
this.showToast('Download started', 'success');
|
||||
const data = await response.json();
|
||||
console.log('Queue add response:', response.status, data);
|
||||
|
||||
// Log validation errors in detail
|
||||
if (data.detail && Array.isArray(data.detail)) {
|
||||
console.error('Validation errors:', JSON.stringify(data.detail, null, 2));
|
||||
}
|
||||
|
||||
if (response.ok && data.status === 'success') {
|
||||
totalEpisodesAdded += episodes.length;
|
||||
} else {
|
||||
this.showToast(`Download error: ${data.message}`, 'error');
|
||||
console.error('Failed to add to queue:', data);
|
||||
failedSeries.push(folder);
|
||||
}
|
||||
}
|
||||
|
||||
// Show result message
|
||||
console.log('=== Download request complete ===');
|
||||
console.log('Total episodes added:', totalEpisodesAdded);
|
||||
console.log('Failed series:', failedSeries);
|
||||
|
||||
if (totalEpisodesAdded > 0) {
|
||||
const message = failedSeries.length > 0
|
||||
? `Added ${totalEpisodesAdded} episode(s) to queue (${failedSeries.length} series failed)`
|
||||
: `Added ${totalEpisodesAdded} episode(s) to download queue`;
|
||||
this.showToast(message, 'success');
|
||||
} else {
|
||||
const errorDetails = failedSeries.length > 0
|
||||
? `Failed series: ${failedSeries.join(', ')}`
|
||||
: 'No episodes were added. Check browser console for details.';
|
||||
console.error('Failed to add episodes. Details:', errorDetails);
|
||||
this.showToast('Failed to add episodes to queue. Check console for details.', 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Download error:', error);
|
||||
@ -1099,74 +1167,6 @@ class AniWorldApp {
|
||||
}
|
||||
}
|
||||
|
||||
async checkProcessLocks() {
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/anime/process/locks');
|
||||
if (!response) {
|
||||
// If no response, set status as idle
|
||||
this.updateProcessStatus('rescan', false);
|
||||
this.updateProcessStatus('download', false);
|
||||
return;
|
||||
}
|
||||
|
||||
// Check if response is actually JSON and not HTML (login page)
|
||||
const contentType = response.headers.get('content-type');
|
||||
if (!contentType || !contentType.includes('application/json')) {
|
||||
console.warn('Process locks API returned non-JSON response, likely authentication issue');
|
||||
// Set status as idle if we can't get proper response
|
||||
this.updateProcessStatus('rescan', false);
|
||||
this.updateProcessStatus('download', false);
|
||||
return;
|
||||
}
|
||||
|
||||
const data = await response.json();
|
||||
|
||||
if (data.success) {
|
||||
const locks = data.locks;
|
||||
this.updateProcessStatus('rescan', locks.rescan?.is_locked || false);
|
||||
this.updateProcessStatus('download', locks.download?.is_locked || false);
|
||||
|
||||
// Update button states
|
||||
const rescanBtn = document.getElementById('rescan-btn');
|
||||
if (rescanBtn) {
|
||||
if (locks.rescan?.is_locked) {
|
||||
rescanBtn.disabled = true;
|
||||
const span = rescanBtn.querySelector('span');
|
||||
if (span) span.textContent = 'Scanning...';
|
||||
} else {
|
||||
rescanBtn.disabled = false;
|
||||
const span = rescanBtn.querySelector('span');
|
||||
if (span) span.textContent = 'Rescan';
|
||||
}
|
||||
}
|
||||
} else {
|
||||
// If API returns error, set status as idle
|
||||
console.warn('Process locks API returned error:', data.error);
|
||||
this.updateProcessStatus('rescan', false);
|
||||
this.updateProcessStatus('download', false);
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error checking process locks:', error);
|
||||
// On error, set status as idle
|
||||
this.updateProcessStatus('rescan', false);
|
||||
this.updateProcessStatus('download', false);
|
||||
}
|
||||
}
|
||||
|
||||
startProcessStatusMonitoring() {
|
||||
// Initial check on page load
|
||||
this.checkProcessLocks();
|
||||
|
||||
// Check process status every 5 seconds
|
||||
setInterval(() => {
|
||||
if (this.isConnected) {
|
||||
this.checkProcessLocks();
|
||||
}
|
||||
}, 5000);
|
||||
|
||||
console.log('Process status monitoring started');
|
||||
}
|
||||
|
||||
async showConfigModal() {
|
||||
const modal = document.getElementById('config-modal');
|
||||
|
||||
@ -1723,155 +1723,6 @@ class AniWorldApp {
|
||||
}
|
||||
}
|
||||
|
||||
showBackupsModal(backups) {
|
||||
// Create modal to show backups
|
||||
const modal = document.createElement('div');
|
||||
modal.className = 'modal';
|
||||
modal.style.display = 'block';
|
||||
|
||||
const modalContent = document.createElement('div');
|
||||
modalContent.className = 'modal-content';
|
||||
modalContent.style.maxWidth = '60%';
|
||||
|
||||
const header = document.createElement('div');
|
||||
header.innerHTML = '<h3>Configuration Backups</h3>';
|
||||
|
||||
const backupList = document.createElement('div');
|
||||
backupList.className = 'backup-list';
|
||||
|
||||
if (backups.length === 0) {
|
||||
backupList.innerHTML = '<div class="backup-item"><span>No backups found</span></div>';
|
||||
} else {
|
||||
backups.forEach(backup => {
|
||||
const item = document.createElement('div');
|
||||
item.className = 'backup-item';
|
||||
|
||||
const info = document.createElement('div');
|
||||
info.className = 'backup-info';
|
||||
|
||||
const name = document.createElement('div');
|
||||
name.className = 'backup-name';
|
||||
name.textContent = backup.filename;
|
||||
|
||||
const details = document.createElement('div');
|
||||
details.className = 'backup-details';
|
||||
details.textContent = `Size: ${backup.size_kb} KB • Modified: ${backup.modified_display}`;
|
||||
|
||||
info.appendChild(name);
|
||||
info.appendChild(details);
|
||||
|
||||
const actions = document.createElement('div');
|
||||
actions.className = 'backup-actions';
|
||||
|
||||
const restoreBtn = document.createElement('button');
|
||||
restoreBtn.className = 'btn btn-xs btn-primary';
|
||||
restoreBtn.textContent = 'Restore';
|
||||
restoreBtn.onclick = () => {
|
||||
if (confirm('Are you sure you want to restore this backup? Current configuration will be overwritten.')) {
|
||||
this.restoreBackup(backup.filename);
|
||||
document.body.removeChild(modal);
|
||||
}
|
||||
};
|
||||
|
||||
const downloadBtn = document.createElement('button');
|
||||
downloadBtn.className = 'btn btn-xs btn-secondary';
|
||||
downloadBtn.textContent = 'Download';
|
||||
downloadBtn.onclick = () => this.downloadBackup(backup.filename);
|
||||
|
||||
actions.appendChild(restoreBtn);
|
||||
actions.appendChild(downloadBtn);
|
||||
|
||||
item.appendChild(info);
|
||||
item.appendChild(actions);
|
||||
|
||||
backupList.appendChild(item);
|
||||
});
|
||||
}
|
||||
|
||||
const closeBtn = document.createElement('button');
|
||||
closeBtn.textContent = 'Close';
|
||||
closeBtn.className = 'btn btn-secondary';
|
||||
closeBtn.onclick = () => document.body.removeChild(modal);
|
||||
|
||||
modalContent.appendChild(header);
|
||||
modalContent.appendChild(backupList);
|
||||
modalContent.appendChild(closeBtn);
|
||||
modal.appendChild(modalContent);
|
||||
document.body.appendChild(modal);
|
||||
|
||||
// Close on background click
|
||||
modal.onclick = (e) => {
|
||||
if (e.target === modal) {
|
||||
document.body.removeChild(modal);
|
||||
}
|
||||
};
|
||||
}
|
||||
|
||||
async restoreBackup(filename) {
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest(`/api/config/backup/${encodeURIComponent(filename)}/restore`, {
|
||||
method: 'POST'
|
||||
});
|
||||
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
if (data.success) {
|
||||
this.showToast('Configuration restored successfully', 'success');
|
||||
// Reload the config modal
|
||||
setTimeout(() => {
|
||||
this.hideConfigModal();
|
||||
this.showConfigModal();
|
||||
}, 1000);
|
||||
} else {
|
||||
this.showToast(`Failed to restore backup: ${data.error}`, 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error restoring backup:', error);
|
||||
this.showToast('Failed to restore backup', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
downloadBackup(filename) {
|
||||
const link = document.createElement('a');
|
||||
link.href = `/api/config/backup/${encodeURIComponent(filename)}/download`;
|
||||
link.download = filename;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
}
|
||||
|
||||
async exportConfig() {
|
||||
try {
|
||||
const includeSensitive = confirm('Include sensitive data (passwords, salts)? Click Cancel for safe export without sensitive data.');
|
||||
|
||||
const response = await this.makeAuthenticatedRequest('/api/config/export', {
|
||||
method: 'POST',
|
||||
body: JSON.stringify({ include_sensitive: includeSensitive })
|
||||
});
|
||||
|
||||
if (response && response.ok) {
|
||||
// Handle file download
|
||||
const blob = await response.blob();
|
||||
const url = window.URL.createObjectURL(blob);
|
||||
const link = document.createElement('a');
|
||||
link.href = url;
|
||||
link.download = `aniworld_config_${new Date().toISOString().slice(0, 19).replace(/:/g, '-')}.json`;
|
||||
document.body.appendChild(link);
|
||||
link.click();
|
||||
document.body.removeChild(link);
|
||||
window.URL.revokeObjectURL(url);
|
||||
|
||||
this.showToast('Configuration exported successfully', 'success');
|
||||
} else {
|
||||
this.showToast('Failed to export configuration', 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error exporting config:', error);
|
||||
this.showToast('Failed to export configuration', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async validateConfig() {
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/config/validate', {
|
||||
@ -1956,57 +1807,6 @@ class AniWorldApp {
|
||||
}
|
||||
}
|
||||
|
||||
async pauseDownload() {
|
||||
if (!this.isDownloading || this.isPaused) return;
|
||||
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/pause', { method: 'POST' });
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
document.getElementById('pause-download').classList.add('hidden');
|
||||
document.getElementById('resume-download').classList.remove('hidden');
|
||||
this.showToast('Queue paused', 'warning');
|
||||
} catch (error) {
|
||||
console.error('Pause error:', error);
|
||||
this.showToast('Failed to pause queue', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async resumeDownload() {
|
||||
if (!this.isDownloading || !this.isPaused) return;
|
||||
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/resume', { method: 'POST' });
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
document.getElementById('pause-download').classList.remove('hidden');
|
||||
document.getElementById('resume-download').classList.add('hidden');
|
||||
this.showToast('Queue resumed', 'success');
|
||||
} catch (error) {
|
||||
console.error('Resume error:', error);
|
||||
this.showToast('Failed to resume queue', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async cancelDownload() {
|
||||
if (!this.isDownloading) return;
|
||||
|
||||
if (confirm('Are you sure you want to stop the download queue?')) {
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/stop', { method: 'POST' });
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
this.showToast('Queue stopped', 'warning');
|
||||
} catch (error) {
|
||||
console.error('Stop error:', error);
|
||||
this.showToast('Failed to stop queue', 'error');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
showDownloadQueue(data) {
|
||||
const queueSection = document.getElementById('download-queue-section');
|
||||
const queueProgress = document.getElementById('queue-progress');
|
||||
|
||||
@ -1,29 +0,0 @@
|
||||
/**
|
||||
* Bulk Operations Module
|
||||
* Handles bulk selection and operations on multiple series
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize bulk operations
|
||||
*/
|
||||
function initBulkOperations() {
|
||||
console.log('[Bulk Operations] Module loaded (functionality to be implemented)');
|
||||
|
||||
// TODO: Implement bulk operations
|
||||
// - Select multiple series
|
||||
// - Bulk download
|
||||
// - Bulk mark as watched
|
||||
// - Bulk delete
|
||||
}
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initBulkOperations);
|
||||
} else {
|
||||
initBulkOperations();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,42 +0,0 @@
|
||||
/**
|
||||
* Color Contrast Compliance Module
|
||||
* Ensures WCAG color contrast compliance
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize color contrast compliance
|
||||
*/
|
||||
function initColorContrastCompliance() {
|
||||
checkContrastCompliance();
|
||||
console.log('[Color Contrast Compliance] Initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Check if color contrast meets WCAG standards
|
||||
*/
|
||||
function checkContrastCompliance() {
|
||||
// This would typically check computed styles
|
||||
// For now, we rely on CSS variables defined in styles.css
|
||||
console.log('[Color Contrast] Relying on predefined WCAG-compliant color scheme');
|
||||
}
|
||||
|
||||
/**
|
||||
* Calculate contrast ratio between two colors
|
||||
*/
|
||||
function calculateContrastRatio(color1, color2) {
|
||||
// Simplified contrast calculation
|
||||
// Real implementation would use relative luminance
|
||||
return 4.5; // Placeholder
|
||||
}
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initColorContrastCompliance);
|
||||
} else {
|
||||
initColorContrastCompliance();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,26 +0,0 @@
|
||||
/**
|
||||
* Drag and Drop Module
|
||||
* Handles drag-and-drop functionality for series cards
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize drag and drop
|
||||
*/
|
||||
function initDragDrop() {
|
||||
console.log('[Drag & Drop] Module loaded (functionality to be implemented)');
|
||||
|
||||
// TODO: Implement drag-and-drop for series cards
|
||||
// This will allow users to reorder series or add to queue via drag-and-drop
|
||||
}
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initDragDrop);
|
||||
} else {
|
||||
initDragDrop();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,144 +0,0 @@
|
||||
/**
|
||||
* Keyboard Shortcuts Module
|
||||
* Handles keyboard navigation and shortcuts for improved accessibility
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
// Keyboard shortcuts configuration
|
||||
const shortcuts = {
|
||||
'ctrl+k': 'focusSearch',
|
||||
'ctrl+r': 'triggerRescan',
|
||||
'ctrl+q': 'openQueue',
|
||||
'escape': 'closeModals',
|
||||
'tab': 'navigationMode',
|
||||
'/': 'focusSearch'
|
||||
};
|
||||
|
||||
/**
|
||||
* Initialize keyboard shortcuts
|
||||
*/
|
||||
function initKeyboardShortcuts() {
|
||||
document.addEventListener('keydown', handleKeydown);
|
||||
console.log('[Keyboard Shortcuts] Initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle keydown events
|
||||
*/
|
||||
function handleKeydown(event) {
|
||||
const key = getKeyCombo(event);
|
||||
|
||||
if (shortcuts[key]) {
|
||||
const action = shortcuts[key];
|
||||
handleShortcut(action, event);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get key combination string
|
||||
*/
|
||||
function getKeyCombo(event) {
|
||||
const parts = [];
|
||||
|
||||
if (event.ctrlKey) parts.push('ctrl');
|
||||
if (event.altKey) parts.push('alt');
|
||||
if (event.shiftKey) parts.push('shift');
|
||||
|
||||
const key = event.key.toLowerCase();
|
||||
parts.push(key);
|
||||
|
||||
return parts.join('+');
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle keyboard shortcut action
|
||||
*/
|
||||
function handleShortcut(action, event) {
|
||||
switch(action) {
|
||||
case 'focusSearch':
|
||||
event.preventDefault();
|
||||
focusSearchInput();
|
||||
break;
|
||||
case 'triggerRescan':
|
||||
event.preventDefault();
|
||||
triggerRescan();
|
||||
break;
|
||||
case 'openQueue':
|
||||
event.preventDefault();
|
||||
openQueue();
|
||||
break;
|
||||
case 'closeModals':
|
||||
closeAllModals();
|
||||
break;
|
||||
case 'navigationMode':
|
||||
handleTabNavigation(event);
|
||||
break;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Focus search input
|
||||
*/
|
||||
function focusSearchInput() {
|
||||
const searchInput = document.getElementById('search-input');
|
||||
if (searchInput) {
|
||||
searchInput.focus();
|
||||
searchInput.select();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Trigger rescan
|
||||
*/
|
||||
function triggerRescan() {
|
||||
const rescanBtn = document.getElementById('rescan-btn');
|
||||
if (rescanBtn && !rescanBtn.disabled) {
|
||||
rescanBtn.click();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Open queue page
|
||||
*/
|
||||
function openQueue() {
|
||||
window.location.href = '/queue';
|
||||
}
|
||||
|
||||
/**
|
||||
* Close all open modals
|
||||
*/
|
||||
function closeAllModals() {
|
||||
const modals = document.querySelectorAll('.modal.active');
|
||||
modals.forEach(modal => {
|
||||
modal.classList.remove('active');
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle tab navigation with visual indicators
|
||||
*/
|
||||
function handleTabNavigation(event) {
|
||||
// Add keyboard-focus class to focused element
|
||||
const previousFocus = document.querySelector('.keyboard-focus');
|
||||
if (previousFocus) {
|
||||
previousFocus.classList.remove('keyboard-focus');
|
||||
}
|
||||
|
||||
// Will be applied after tab completes
|
||||
setTimeout(() => {
|
||||
if (document.activeElement) {
|
||||
document.activeElement.classList.add('keyboard-focus');
|
||||
}
|
||||
}, 0);
|
||||
}
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initKeyboardShortcuts);
|
||||
} else {
|
||||
initKeyboardShortcuts();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,80 +0,0 @@
|
||||
/**
|
||||
* Mobile Responsive Module
|
||||
* Handles mobile-specific functionality and responsive behavior
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
let isMobile = false;
|
||||
|
||||
/**
|
||||
* Initialize mobile responsive features
|
||||
*/
|
||||
function initMobileResponsive() {
|
||||
detectMobile();
|
||||
setupResponsiveHandlers();
|
||||
console.log('[Mobile Responsive] Initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect if device is mobile
|
||||
*/
|
||||
function detectMobile() {
|
||||
isMobile = /Android|webOS|iPhone|iPad|iPod|BlackBerry|IEMobile|Opera Mini/i.test(navigator.userAgent);
|
||||
|
||||
if (isMobile) {
|
||||
document.body.classList.add('mobile-device');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup responsive event handlers
|
||||
*/
|
||||
function setupResponsiveHandlers() {
|
||||
window.addEventListener('resize', handleResize);
|
||||
handleResize(); // Initial call
|
||||
}
|
||||
|
||||
/**
|
||||
* Handle window resize
|
||||
*/
|
||||
function handleResize() {
|
||||
const width = window.innerWidth;
|
||||
|
||||
if (width < 768) {
|
||||
applyMobileLayout();
|
||||
} else {
|
||||
applyDesktopLayout();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply mobile-specific layout
|
||||
*/
|
||||
function applyMobileLayout() {
|
||||
document.body.classList.add('mobile-layout');
|
||||
document.body.classList.remove('desktop-layout');
|
||||
}
|
||||
|
||||
/**
|
||||
* Apply desktop-specific layout
|
||||
*/
|
||||
function applyDesktopLayout() {
|
||||
document.body.classList.add('desktop-layout');
|
||||
document.body.classList.remove('mobile-layout');
|
||||
}
|
||||
|
||||
// Export functions
|
||||
window.MobileResponsive = {
|
||||
isMobile: () => isMobile
|
||||
};
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initMobileResponsive);
|
||||
} else {
|
||||
initMobileResponsive();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,76 +0,0 @@
|
||||
/**
|
||||
* Multi-Screen Support Module
|
||||
* Handles multi-monitor and window management
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize multi-screen support
|
||||
*/
|
||||
function initMultiScreenSupport() {
|
||||
if ('screen' in window) {
|
||||
detectScreens();
|
||||
console.log('[Multi-Screen Support] Initialized');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Detect available screens
|
||||
*/
|
||||
function detectScreens() {
|
||||
// Modern browsers support window.screen
|
||||
const screenInfo = {
|
||||
width: window.screen.width,
|
||||
height: window.screen.height,
|
||||
availWidth: window.screen.availWidth,
|
||||
availHeight: window.screen.availHeight,
|
||||
colorDepth: window.screen.colorDepth,
|
||||
pixelDepth: window.screen.pixelDepth
|
||||
};
|
||||
|
||||
console.log('[Multi-Screen] Screen info:', screenInfo);
|
||||
}
|
||||
|
||||
/**
|
||||
* Request fullscreen
|
||||
*/
|
||||
function requestFullscreen() {
|
||||
const elem = document.documentElement;
|
||||
if (elem.requestFullscreen) {
|
||||
elem.requestFullscreen();
|
||||
} else if (elem.webkitRequestFullscreen) {
|
||||
elem.webkitRequestFullscreen();
|
||||
} else if (elem.msRequestFullscreen) {
|
||||
elem.msRequestFullscreen();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Exit fullscreen
|
||||
*/
|
||||
function exitFullscreen() {
|
||||
if (document.exitFullscreen) {
|
||||
document.exitFullscreen();
|
||||
} else if (document.webkitExitFullscreen) {
|
||||
document.webkitExitFullscreen();
|
||||
} else if (document.msExitFullscreen) {
|
||||
document.msExitFullscreen();
|
||||
}
|
||||
}
|
||||
|
||||
// Export functions
|
||||
window.MultiScreen = {
|
||||
requestFullscreen: requestFullscreen,
|
||||
exitFullscreen: exitFullscreen
|
||||
};
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initMultiScreenSupport);
|
||||
} else {
|
||||
initMultiScreenSupport();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -6,7 +6,7 @@ class QueueManager {
|
||||
constructor() {
|
||||
this.socket = null;
|
||||
this.refreshInterval = null;
|
||||
this.isReordering = false;
|
||||
this.pendingProgressUpdates = new Map(); // Store progress updates waiting for cards
|
||||
|
||||
this.init();
|
||||
}
|
||||
@ -15,8 +15,9 @@ class QueueManager {
|
||||
this.initSocket();
|
||||
this.bindEvents();
|
||||
this.initTheme();
|
||||
this.startRefreshTimer();
|
||||
this.loadQueueData();
|
||||
// Remove polling - use WebSocket events for real-time updates
|
||||
// this.startRefreshTimer(); // ← REMOVED
|
||||
this.loadQueueData(); // Load initial data once
|
||||
}
|
||||
|
||||
initSocket() {
|
||||
@ -55,21 +56,21 @@ class QueueManager {
|
||||
}
|
||||
});
|
||||
|
||||
this.socket.on('download_progress_update', (data) => {
|
||||
this.updateDownloadProgress(data);
|
||||
});
|
||||
|
||||
// Download queue events
|
||||
this.socket.on('download_started', () => {
|
||||
this.showToast('Download queue started', 'success');
|
||||
this.loadQueueData(); // Refresh data
|
||||
// Full reload needed - queue structure changed
|
||||
this.loadQueueData();
|
||||
});
|
||||
this.socket.on('queue_started', () => {
|
||||
this.showToast('Download queue started', 'success');
|
||||
this.loadQueueData(); // Refresh data
|
||||
// Full reload needed - queue structure changed
|
||||
this.loadQueueData();
|
||||
});
|
||||
|
||||
this.socket.on('download_progress', (data) => {
|
||||
// Update progress in real-time without reloading all data
|
||||
console.log('Received download progress:', data);
|
||||
this.updateDownloadProgress(data);
|
||||
});
|
||||
|
||||
@ -78,7 +79,15 @@ class QueueManager {
|
||||
const serieName = data.serie_name || data.serie || 'Unknown';
|
||||
const episode = data.episode || '';
|
||||
this.showToast(`Completed: ${serieName}${episode ? ' - Episode ' + episode : ''}`, 'success');
|
||||
this.loadQueueData(); // Refresh data
|
||||
|
||||
// Clear any pending progress updates for this download
|
||||
const downloadId = data.item_id || data.download_id || data.id;
|
||||
if (downloadId) {
|
||||
this.pendingProgressUpdates.delete(downloadId);
|
||||
}
|
||||
|
||||
// Full reload needed - item moved from active to completed
|
||||
this.loadQueueData();
|
||||
};
|
||||
this.socket.on('download_completed', handleDownloadComplete);
|
||||
this.socket.on('download_complete', handleDownloadComplete);
|
||||
@ -87,14 +96,29 @@ class QueueManager {
|
||||
const handleDownloadError = (data) => {
|
||||
const message = data.error || data.message || 'Unknown error';
|
||||
this.showToast(`Download failed: ${message}`, 'error');
|
||||
this.loadQueueData(); // Refresh data
|
||||
|
||||
// Clear any pending progress updates for this download
|
||||
const downloadId = data.item_id || data.download_id || data.id;
|
||||
if (downloadId) {
|
||||
this.pendingProgressUpdates.delete(downloadId);
|
||||
}
|
||||
|
||||
// Full reload needed - item moved from active to failed
|
||||
this.loadQueueData();
|
||||
};
|
||||
this.socket.on('download_error', handleDownloadError);
|
||||
this.socket.on('download_failed', handleDownloadError);
|
||||
|
||||
this.socket.on('download_queue_completed', () => {
|
||||
this.showToast('All downloads completed!', 'success');
|
||||
this.loadQueueData(); // Refresh data
|
||||
// Full reload needed - queue state changed
|
||||
this.loadQueueData();
|
||||
});
|
||||
|
||||
this.socket.on('queue_completed', () => {
|
||||
this.showToast('All downloads completed!', 'success');
|
||||
// Full reload needed - queue state changed
|
||||
this.loadQueueData();
|
||||
});
|
||||
|
||||
this.socket.on('download_stop_requested', () => {
|
||||
@ -104,7 +128,8 @@ class QueueManager {
|
||||
// Handle both old and new queue stopped events
|
||||
const handleQueueStopped = () => {
|
||||
this.showToast('Download queue stopped', 'success');
|
||||
this.loadQueueData(); // Refresh data
|
||||
// Full reload needed - queue state changed
|
||||
this.loadQueueData();
|
||||
};
|
||||
this.socket.on('download_stopped', handleQueueStopped);
|
||||
this.socket.on('queue_stopped', handleQueueStopped);
|
||||
@ -112,11 +137,13 @@ class QueueManager {
|
||||
// Handle queue paused/resumed
|
||||
this.socket.on('queue_paused', () => {
|
||||
this.showToast('Queue paused', 'info');
|
||||
// Full reload needed - queue state changed
|
||||
this.loadQueueData();
|
||||
});
|
||||
|
||||
this.socket.on('queue_resumed', () => {
|
||||
this.showToast('Queue resumed', 'success');
|
||||
// Full reload needed - queue state changed
|
||||
this.loadQueueData();
|
||||
});
|
||||
}
|
||||
@ -128,10 +155,6 @@ class QueueManager {
|
||||
});
|
||||
|
||||
// Queue management actions
|
||||
document.getElementById('clear-queue-btn').addEventListener('click', () => {
|
||||
this.clearQueue('pending');
|
||||
});
|
||||
|
||||
document.getElementById('clear-completed-btn').addEventListener('click', () => {
|
||||
this.clearQueue('completed');
|
||||
});
|
||||
@ -140,29 +163,21 @@ class QueueManager {
|
||||
this.clearQueue('failed');
|
||||
});
|
||||
|
||||
document.getElementById('clear-pending-btn').addEventListener('click', () => {
|
||||
this.clearQueue('pending');
|
||||
});
|
||||
|
||||
document.getElementById('retry-all-btn').addEventListener('click', () => {
|
||||
this.retryAllFailed();
|
||||
});
|
||||
|
||||
document.getElementById('reorder-queue-btn').addEventListener('click', () => {
|
||||
this.toggleReorderMode();
|
||||
});
|
||||
|
||||
// Download controls
|
||||
document.getElementById('start-queue-btn').addEventListener('click', () => {
|
||||
this.startDownloadQueue();
|
||||
this.startDownload();
|
||||
});
|
||||
|
||||
document.getElementById('stop-queue-btn').addEventListener('click', () => {
|
||||
this.stopDownloadQueue();
|
||||
});
|
||||
|
||||
document.getElementById('pause-all-btn').addEventListener('click', () => {
|
||||
this.pauseAllDownloads();
|
||||
});
|
||||
|
||||
document.getElementById('resume-all-btn').addEventListener('click', () => {
|
||||
this.resumeAllDownloads();
|
||||
this.stopDownloads();
|
||||
});
|
||||
|
||||
// Modal events
|
||||
@ -218,6 +233,9 @@ class QueueManager {
|
||||
const data = await response.json();
|
||||
this.updateQueueDisplay(data);
|
||||
|
||||
// Process any pending progress updates after queue is loaded
|
||||
this.processPendingProgressUpdates();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error loading queue data:', error);
|
||||
}
|
||||
@ -244,18 +262,26 @@ class QueueManager {
|
||||
}
|
||||
|
||||
updateStatistics(stats, data) {
|
||||
document.getElementById('total-items').textContent = stats.total_items || 0;
|
||||
document.getElementById('pending-items').textContent = (data.pending_queue || []).length;
|
||||
document.getElementById('completed-items').textContent = stats.completed_items || 0;
|
||||
document.getElementById('failed-items').textContent = stats.failed_items || 0;
|
||||
// Ensure stats object exists
|
||||
const statistics = stats || {};
|
||||
|
||||
document.getElementById('current-speed').textContent = stats.current_speed || '0 MB/s';
|
||||
document.getElementById('average-speed').textContent = stats.average_speed || '0 MB/s';
|
||||
document.getElementById('total-items').textContent = statistics.total_items || 0;
|
||||
document.getElementById('pending-items').textContent = (data.pending_queue || []).length;
|
||||
document.getElementById('completed-items').textContent = statistics.completed_items || 0;
|
||||
document.getElementById('failed-items').textContent = statistics.failed_items || 0;
|
||||
|
||||
// Update section counts
|
||||
document.getElementById('queue-count').textContent = (data.pending_queue || []).length;
|
||||
document.getElementById('completed-count').textContent = statistics.completed_items || 0;
|
||||
document.getElementById('failed-count').textContent = statistics.failed_items || 0;
|
||||
|
||||
document.getElementById('current-speed').textContent = statistics.current_speed || '0 MB/s';
|
||||
document.getElementById('average-speed').textContent = statistics.average_speed || '0 MB/s';
|
||||
|
||||
// Format ETA
|
||||
const etaElement = document.getElementById('eta-time');
|
||||
if (stats.eta) {
|
||||
const eta = new Date(stats.eta);
|
||||
if (statistics.eta) {
|
||||
const eta = new Date(statistics.eta);
|
||||
const now = new Date();
|
||||
const diffMs = eta - now;
|
||||
|
||||
@ -271,6 +297,159 @@ class QueueManager {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Update download progress in real-time
|
||||
* @param {Object} data - Progress data from WebSocket
|
||||
*/
|
||||
updateDownloadProgress(data) {
|
||||
console.log('updateDownloadProgress called with:', JSON.stringify(data, null, 2));
|
||||
|
||||
// Extract download ID - prioritize metadata.item_id (actual item ID)
|
||||
// Progress service sends id with "download_" prefix, but we need the actual item ID
|
||||
let downloadId = null;
|
||||
|
||||
// First try metadata.item_id (this is the actual download item ID)
|
||||
if (data.metadata && data.metadata.item_id) {
|
||||
downloadId = data.metadata.item_id;
|
||||
}
|
||||
|
||||
// Fallback to other ID fields
|
||||
if (!downloadId) {
|
||||
downloadId = data.item_id || data.download_id;
|
||||
}
|
||||
|
||||
// If ID starts with "download_", extract the actual ID
|
||||
if (!downloadId && data.id) {
|
||||
if (data.id.startsWith('download_')) {
|
||||
downloadId = data.id.substring(9); // Remove "download_" prefix
|
||||
} else {
|
||||
downloadId = data.id;
|
||||
}
|
||||
}
|
||||
|
||||
// Check if data is wrapped in another 'data' property
|
||||
if (!downloadId && data.data) {
|
||||
if (data.data.metadata && data.data.metadata.item_id) {
|
||||
downloadId = data.data.metadata.item_id;
|
||||
} else if (data.data.item_id) {
|
||||
downloadId = data.data.item_id;
|
||||
} else if (data.data.id && data.data.id.startsWith('download_')) {
|
||||
downloadId = data.data.id.substring(9);
|
||||
} else {
|
||||
downloadId = data.data.id || data.data.download_id;
|
||||
}
|
||||
data = data.data; // Use nested data
|
||||
}
|
||||
|
||||
if (!downloadId) {
|
||||
console.warn('No download ID in progress data');
|
||||
console.warn('Data structure:', data);
|
||||
console.warn('Available keys:', Object.keys(data));
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Looking for download card with ID: ${downloadId}`);
|
||||
|
||||
// Find the download card in active downloads
|
||||
const card = document.querySelector(`[data-download-id="${downloadId}"]`);
|
||||
if (!card) {
|
||||
// Card not found - store update and reload queue
|
||||
console.warn(`Download card not found for ID: ${downloadId}`);
|
||||
|
||||
// Debug: Log all existing download cards
|
||||
const allCards = document.querySelectorAll('[data-download-id]');
|
||||
console.log(`Found ${allCards.length} download cards:`);
|
||||
allCards.forEach(c => console.log(` - ${c.getAttribute('data-download-id')}`));
|
||||
|
||||
// Store this progress update to retry after queue loads
|
||||
console.log(`Storing progress update for ${downloadId} to retry after reload`);
|
||||
this.pendingProgressUpdates.set(downloadId, data);
|
||||
|
||||
// Reload queue to sync state
|
||||
console.log('Reloading queue to sync state...');
|
||||
this.loadQueueData();
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Found download card for ID: ${downloadId}, updating progress`);
|
||||
|
||||
// Extract progress information - handle both ProgressService and yt-dlp formats
|
||||
const progress = data.progress || data;
|
||||
const percent = progress.percent || 0;
|
||||
|
||||
// Check if we have detailed yt-dlp progress (downloaded_mb, total_mb, speed_mbps)
|
||||
// or basic ProgressService progress (current, total)
|
||||
let downloaded, total, speed;
|
||||
|
||||
if (progress.downloaded_mb !== undefined && progress.total_mb !== undefined) {
|
||||
// yt-dlp detailed format
|
||||
downloaded = progress.downloaded_mb.toFixed(1);
|
||||
total = progress.total_mb.toFixed(1);
|
||||
speed = progress.speed_mbps ? progress.speed_mbps.toFixed(1) : '0.0';
|
||||
} else if (progress.current !== undefined && progress.total !== undefined) {
|
||||
// ProgressService basic format - convert bytes to MB
|
||||
downloaded = (progress.current / (1024 * 1024)).toFixed(1);
|
||||
total = progress.total > 0 ? (progress.total / (1024 * 1024)).toFixed(1) : 'Unknown';
|
||||
speed = '0.0'; // Speed not available in basic format
|
||||
} else {
|
||||
// Fallback
|
||||
downloaded = '0.0';
|
||||
total = 'Unknown';
|
||||
speed = '0.0';
|
||||
}
|
||||
|
||||
// Update progress bar
|
||||
const progressFill = card.querySelector('.progress-fill');
|
||||
if (progressFill) {
|
||||
progressFill.style.width = `${percent}%`;
|
||||
}
|
||||
|
||||
// Update progress text
|
||||
const progressInfo = card.querySelector('.progress-info');
|
||||
if (progressInfo) {
|
||||
const percentSpan = progressInfo.querySelector('span:first-child');
|
||||
const speedSpan = progressInfo.querySelector('.download-speed');
|
||||
|
||||
if (percentSpan) {
|
||||
percentSpan.textContent = `${percent.toFixed(1)}% (${downloaded} MB / ${total} MB)`;
|
||||
}
|
||||
if (speedSpan) {
|
||||
speedSpan.textContent = `${speed} MB/s`;
|
||||
}
|
||||
}
|
||||
|
||||
console.log(`Updated progress for ${downloadId}: ${percent.toFixed(1)}%`);
|
||||
}
|
||||
|
||||
processPendingProgressUpdates() {
|
||||
if (this.pendingProgressUpdates.size === 0) {
|
||||
return;
|
||||
}
|
||||
|
||||
console.log(`Processing ${this.pendingProgressUpdates.size} pending progress updates...`);
|
||||
|
||||
// Process each pending update
|
||||
const processed = [];
|
||||
for (const [downloadId, data] of this.pendingProgressUpdates.entries()) {
|
||||
// Check if card now exists
|
||||
const card = document.querySelector(`[data-download-id="${downloadId}"]`);
|
||||
if (card) {
|
||||
console.log(`Retrying progress update for ${downloadId}`);
|
||||
this.updateDownloadProgress(data);
|
||||
processed.push(downloadId);
|
||||
} else {
|
||||
console.log(`Card still not found for ${downloadId}, will retry on next reload`);
|
||||
}
|
||||
}
|
||||
|
||||
// Remove processed updates
|
||||
processed.forEach(id => this.pendingProgressUpdates.delete(id));
|
||||
|
||||
if (processed.length > 0) {
|
||||
console.log(`Successfully processed ${processed.length} pending updates`);
|
||||
}
|
||||
}
|
||||
|
||||
renderActiveDownloads(downloads) {
|
||||
const container = document.getElementById('active-downloads');
|
||||
|
||||
@ -295,20 +474,12 @@ class QueueManager {
|
||||
const total = progress.total_mb ? `${progress.total_mb.toFixed(1)} MB` : 'Unknown';
|
||||
|
||||
return `
|
||||
<div class="download-card active">
|
||||
<div class="download-card active" data-download-id="${download.id}">
|
||||
<div class="download-header">
|
||||
<div class="download-info">
|
||||
<h4>${this.escapeHtml(download.serie_name)}</h4>
|
||||
<p>${this.escapeHtml(download.episode.season)}x${String(download.episode.episode).padStart(2, '0')} - ${this.escapeHtml(download.episode.title || 'Episode ' + download.episode.episode)}</p>
|
||||
</div>
|
||||
<div class="download-actions">
|
||||
<button class="btn btn-small btn-secondary" onclick="queueManager.pauseDownload('${download.id}')">
|
||||
<i class="fas fa-pause"></i>
|
||||
</button>
|
||||
<button class="btn btn-small btn-error" onclick="queueManager.cancelDownload('${download.id}')">
|
||||
<i class="fas fa-stop"></i>
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
<div class="download-progress">
|
||||
<div class="progress-bar">
|
||||
@ -331,6 +502,7 @@ class QueueManager {
|
||||
<div class="empty-state">
|
||||
<i class="fas fa-list"></i>
|
||||
<p>No items in queue</p>
|
||||
<small>Add episodes from the main page to start downloading</small>
|
||||
</div>
|
||||
`;
|
||||
return;
|
||||
@ -341,10 +513,11 @@ class QueueManager {
|
||||
|
||||
createPendingQueueCard(download, index) {
|
||||
const addedAt = new Date(download.added_at).toLocaleString();
|
||||
const priorityClass = download.priority === 'high' ? 'high-priority' : '';
|
||||
|
||||
return `
|
||||
<div class="download-card pending ${priorityClass}" data-id="${download.id}">
|
||||
<div class="download-card pending"
|
||||
data-id="${download.id}"
|
||||
data-index="${index}">
|
||||
<div class="queue-position">${index + 1}</div>
|
||||
<div class="download-header">
|
||||
<div class="download-info">
|
||||
@ -353,7 +526,6 @@ class QueueManager {
|
||||
<small>Added: ${addedAt}</small>
|
||||
</div>
|
||||
<div class="download-actions">
|
||||
${download.priority === 'high' ? '<i class="fas fa-arrow-up priority-indicator" title="High Priority"></i>' : ''}
|
||||
<button class="btn btn-small btn-secondary" onclick="queueManager.removeFromQueue('${download.id}')">
|
||||
<i class="fas fa-trash"></i>
|
||||
</button>
|
||||
@ -420,7 +592,7 @@ class QueueManager {
|
||||
const retryCount = download.retry_count || 0;
|
||||
|
||||
return `
|
||||
<div class="download-card failed">
|
||||
<div class="download-card failed" data-id="${download.id}">
|
||||
<div class="download-header">
|
||||
<div class="download-info">
|
||||
<h4>${this.escapeHtml(download.serie_name)}</h4>
|
||||
@ -441,10 +613,23 @@ class QueueManager {
|
||||
`;
|
||||
}
|
||||
|
||||
async removeFailedDownload(downloadId) {
|
||||
await this.removeFromQueue(downloadId);
|
||||
}
|
||||
|
||||
updateButtonStates(data) {
|
||||
const hasActive = (data.active_downloads || []).length > 0;
|
||||
const hasPending = (data.pending_queue || []).length > 0;
|
||||
const hasFailed = (data.failed_downloads || []).length > 0;
|
||||
const hasCompleted = (data.completed_downloads || []).length > 0;
|
||||
|
||||
console.log('Button states update:', {
|
||||
hasPending,
|
||||
pendingCount: (data.pending_queue || []).length,
|
||||
hasActive,
|
||||
hasFailed,
|
||||
hasCompleted
|
||||
});
|
||||
|
||||
// Enable start button only if there are pending items and no active downloads
|
||||
document.getElementById('start-queue-btn').disabled = !hasPending || hasActive;
|
||||
@ -459,23 +644,31 @@ class QueueManager {
|
||||
document.getElementById('start-queue-btn').style.display = 'inline-flex';
|
||||
}
|
||||
|
||||
document.getElementById('pause-all-btn').disabled = !hasActive;
|
||||
document.getElementById('clear-queue-btn').disabled = !hasPending;
|
||||
document.getElementById('reorder-queue-btn').disabled = !hasPending || (data.pending_queue || []).length < 2;
|
||||
document.getElementById('retry-all-btn').disabled = !hasFailed;
|
||||
document.getElementById('clear-completed-btn').disabled = !hasCompleted;
|
||||
document.getElementById('clear-failed-btn').disabled = !hasFailed;
|
||||
|
||||
// Update clear pending button if it exists
|
||||
const clearPendingBtn = document.getElementById('clear-pending-btn');
|
||||
if (clearPendingBtn) {
|
||||
clearPendingBtn.disabled = !hasPending;
|
||||
console.log('Clear pending button updated:', { disabled: !hasPending, hasPending });
|
||||
} else {
|
||||
console.error('Clear pending button not found in DOM');
|
||||
}
|
||||
}
|
||||
|
||||
async clearQueue(type) {
|
||||
const titles = {
|
||||
pending: 'Clear Queue',
|
||||
completed: 'Clear Completed Downloads',
|
||||
failed: 'Clear Failed Downloads'
|
||||
failed: 'Clear Failed Downloads',
|
||||
pending: 'Remove All Pending Downloads'
|
||||
};
|
||||
|
||||
const messages = {
|
||||
pending: 'Are you sure you want to clear all pending downloads from the queue?',
|
||||
completed: 'Are you sure you want to clear all completed downloads?',
|
||||
failed: 'Are you sure you want to clear all failed downloads?'
|
||||
failed: 'Are you sure you want to clear all failed downloads?',
|
||||
pending: 'Are you sure you want to remove all pending downloads from the queue?'
|
||||
};
|
||||
|
||||
const confirmed = await this.showConfirmModal(titles[type], messages[type]);
|
||||
@ -483,7 +676,6 @@ class QueueManager {
|
||||
|
||||
try {
|
||||
if (type === 'completed') {
|
||||
// Use the new DELETE /api/queue/completed endpoint
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/completed', {
|
||||
method: 'DELETE'
|
||||
});
|
||||
@ -491,11 +683,28 @@ class QueueManager {
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
this.showToast(`Cleared ${data.cleared_count} completed downloads`, 'success');
|
||||
this.showToast(`Cleared ${data.count} completed downloads`, 'success');
|
||||
this.loadQueueData();
|
||||
} else if (type === 'failed') {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/failed', {
|
||||
method: 'DELETE'
|
||||
});
|
||||
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
this.showToast(`Cleared ${data.count} failed downloads`, 'success');
|
||||
this.loadQueueData();
|
||||
} else if (type === 'pending') {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/pending', {
|
||||
method: 'DELETE'
|
||||
});
|
||||
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
this.showToast(`Removed ${data.count} pending downloads`, 'success');
|
||||
this.loadQueueData();
|
||||
} else {
|
||||
// For pending and failed, use the old logic (TODO: implement backend endpoints)
|
||||
this.showToast(`Clear ${type} not yet implemented`, 'warning');
|
||||
}
|
||||
|
||||
} catch (error) {
|
||||
@ -528,14 +737,31 @@ class QueueManager {
|
||||
const confirmed = await this.showConfirmModal('Retry All Failed Downloads', 'Are you sure you want to retry all failed downloads?');
|
||||
if (!confirmed) return;
|
||||
|
||||
// Get all failed downloads and retry them individually
|
||||
try {
|
||||
// Get all failed download IDs
|
||||
const failedCards = document.querySelectorAll('#failed-downloads .download-card.failed');
|
||||
const itemIds = Array.from(failedCards).map(card => card.dataset.id).filter(id => id);
|
||||
|
||||
for (const card of failedCards) {
|
||||
const downloadId = card.dataset.id;
|
||||
if (downloadId) {
|
||||
await this.retryDownload(downloadId);
|
||||
if (itemIds.length === 0) {
|
||||
this.showToast('No failed downloads to retry', 'info');
|
||||
return;
|
||||
}
|
||||
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/retry', {
|
||||
method: 'POST',
|
||||
headers: { 'Content-Type': 'application/json' },
|
||||
body: JSON.stringify({ item_ids: itemIds })
|
||||
});
|
||||
|
||||
if (!response) return;
|
||||
const data = await response.json();
|
||||
|
||||
this.showToast(`Retried ${data.retried_count || itemIds.length} download(s)`, 'success');
|
||||
this.loadQueueData();
|
||||
|
||||
} catch (error) {
|
||||
console.error('Error retrying failed downloads:', error);
|
||||
this.showToast('Failed to retry downloads', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
@ -571,7 +797,7 @@ class QueueManager {
|
||||
return `${minutes}m ${seconds}s`;
|
||||
}
|
||||
|
||||
async startDownloadQueue() {
|
||||
async startDownload() {
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/start', {
|
||||
method: 'POST'
|
||||
@ -581,22 +807,24 @@ class QueueManager {
|
||||
const data = await response.json();
|
||||
|
||||
if (data.status === 'success') {
|
||||
this.showToast('Download queue started', 'success');
|
||||
this.showToast('Queue processing started - all items will download automatically', 'success');
|
||||
|
||||
// Update UI
|
||||
document.getElementById('start-queue-btn').style.display = 'none';
|
||||
document.getElementById('stop-queue-btn').style.display = 'inline-flex';
|
||||
document.getElementById('stop-queue-btn').disabled = false;
|
||||
|
||||
this.loadQueueData(); // Refresh display
|
||||
} else {
|
||||
this.showToast(`Failed to start queue: ${data.message}`, 'error');
|
||||
this.showToast(`Failed to start queue: ${data.message || 'Unknown error'}`, 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error starting download queue:', error);
|
||||
this.showToast('Failed to start download queue', 'error');
|
||||
console.error('Error starting queue:', error);
|
||||
this.showToast('Failed to start queue processing', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
async stopDownloadQueue() {
|
||||
async stopDownloads() {
|
||||
try {
|
||||
const response = await this.makeAuthenticatedRequest('/api/queue/stop', {
|
||||
method: 'POST'
|
||||
@ -606,36 +834,23 @@ class QueueManager {
|
||||
const data = await response.json();
|
||||
|
||||
if (data.status === 'success') {
|
||||
this.showToast('Download queue stopped', 'success');
|
||||
this.showToast('Queue processing stopped', 'success');
|
||||
|
||||
// Update UI
|
||||
document.getElementById('stop-queue-btn').style.display = 'none';
|
||||
document.getElementById('start-queue-btn').style.display = 'inline-flex';
|
||||
document.getElementById('start-queue-btn').disabled = false;
|
||||
|
||||
this.loadQueueData(); // Refresh display
|
||||
} else {
|
||||
this.showToast(`Failed to stop queue: ${data.message}`, 'error');
|
||||
}
|
||||
} catch (error) {
|
||||
console.error('Error stopping download queue:', error);
|
||||
this.showToast('Failed to stop download queue', 'error');
|
||||
console.error('Error stopping queue:', error);
|
||||
this.showToast('Failed to stop queue', 'error');
|
||||
}
|
||||
}
|
||||
|
||||
pauseAllDownloads() {
|
||||
// TODO: Implement pause functionality
|
||||
this.showToast('Pause functionality not yet implemented', 'info');
|
||||
}
|
||||
|
||||
resumeAllDownloads() {
|
||||
// TODO: Implement resume functionality
|
||||
this.showToast('Resume functionality not yet implemented', 'info');
|
||||
}
|
||||
|
||||
toggleReorderMode() {
|
||||
// TODO: Implement reorder functionality
|
||||
this.showToast('Reorder functionality not yet implemented', 'info');
|
||||
}
|
||||
|
||||
async makeAuthenticatedRequest(url, options = {}) {
|
||||
// Get JWT token from localStorage
|
||||
const token = localStorage.getItem('access_token');
|
||||
|
||||
@ -1,66 +0,0 @@
|
||||
/**
|
||||
* Touch Gestures Module
|
||||
* Handles touch gestures for mobile devices
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
/**
|
||||
* Initialize touch gestures
|
||||
*/
|
||||
function initTouchGestures() {
|
||||
if ('ontouchstart' in window) {
|
||||
setupSwipeGestures();
|
||||
console.log('[Touch Gestures] Initialized');
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup swipe gesture handlers
|
||||
*/
|
||||
function setupSwipeGestures() {
|
||||
let touchStartX = 0;
|
||||
let touchStartY = 0;
|
||||
let touchEndX = 0;
|
||||
let touchEndY = 0;
|
||||
|
||||
document.addEventListener('touchstart', (e) => {
|
||||
touchStartX = e.changedTouches[0].screenX;
|
||||
touchStartY = e.changedTouches[0].screenY;
|
||||
}, { passive: true });
|
||||
|
||||
document.addEventListener('touchend', (e) => {
|
||||
touchEndX = e.changedTouches[0].screenX;
|
||||
touchEndY = e.changedTouches[0].screenY;
|
||||
handleSwipe();
|
||||
}, { passive: true });
|
||||
|
||||
function handleSwipe() {
|
||||
const deltaX = touchEndX - touchStartX;
|
||||
const deltaY = touchEndY - touchStartY;
|
||||
const minSwipeDistance = 50;
|
||||
|
||||
if (Math.abs(deltaX) > Math.abs(deltaY)) {
|
||||
// Horizontal swipe
|
||||
if (Math.abs(deltaX) > minSwipeDistance) {
|
||||
if (deltaX > 0) {
|
||||
// Swipe right
|
||||
console.log('[Touch Gestures] Swipe right detected');
|
||||
} else {
|
||||
// Swipe left
|
||||
console.log('[Touch Gestures] Swipe left detected');
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initTouchGestures);
|
||||
} else {
|
||||
initTouchGestures();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -1,111 +0,0 @@
|
||||
/**
|
||||
* Undo/Redo Module
|
||||
* Provides undo/redo functionality for user actions
|
||||
*/
|
||||
|
||||
(function() {
|
||||
'use strict';
|
||||
|
||||
const actionHistory = [];
|
||||
let currentIndex = -1;
|
||||
|
||||
/**
|
||||
* Initialize undo/redo system
|
||||
*/
|
||||
function initUndoRedo() {
|
||||
setupKeyboardShortcuts();
|
||||
console.log('[Undo/Redo] Initialized');
|
||||
}
|
||||
|
||||
/**
|
||||
* Setup keyboard shortcuts for undo/redo
|
||||
*/
|
||||
function setupKeyboardShortcuts() {
|
||||
document.addEventListener('keydown', (event) => {
|
||||
if (event.ctrlKey || event.metaKey) {
|
||||
if (event.key === 'z' && !event.shiftKey) {
|
||||
event.preventDefault();
|
||||
undo();
|
||||
} else if (event.key === 'z' && event.shiftKey || event.key === 'y') {
|
||||
event.preventDefault();
|
||||
redo();
|
||||
}
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
* Add action to history
|
||||
*/
|
||||
function addAction(action) {
|
||||
// Remove any actions after current index
|
||||
actionHistory.splice(currentIndex + 1);
|
||||
|
||||
// Add new action
|
||||
actionHistory.push(action);
|
||||
currentIndex++;
|
||||
|
||||
// Limit history size
|
||||
if (actionHistory.length > 50) {
|
||||
actionHistory.shift();
|
||||
currentIndex--;
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Undo last action
|
||||
*/
|
||||
function undo() {
|
||||
if (currentIndex >= 0) {
|
||||
const action = actionHistory[currentIndex];
|
||||
if (action && action.undo) {
|
||||
action.undo();
|
||||
currentIndex--;
|
||||
showNotification('Action undone');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Redo last undone action
|
||||
*/
|
||||
function redo() {
|
||||
if (currentIndex < actionHistory.length - 1) {
|
||||
currentIndex++;
|
||||
const action = actionHistory[currentIndex];
|
||||
if (action && action.redo) {
|
||||
action.redo();
|
||||
showNotification('Action redone');
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Show undo/redo notification
|
||||
*/
|
||||
function showNotification(message) {
|
||||
const notification = document.createElement('div');
|
||||
notification.className = 'undo-notification';
|
||||
notification.textContent = message;
|
||||
document.body.appendChild(notification);
|
||||
|
||||
setTimeout(() => {
|
||||
notification.remove();
|
||||
}, 2000);
|
||||
}
|
||||
|
||||
// Export functions
|
||||
window.UndoRedo = {
|
||||
add: addAction,
|
||||
undo: undo,
|
||||
redo: redo
|
||||
};
|
||||
|
||||
// Initialize on DOM ready
|
||||
if (document.readyState === 'loading') {
|
||||
document.addEventListener('DOMContentLoaded', initUndoRedo);
|
||||
} else {
|
||||
initUndoRedo();
|
||||
}
|
||||
|
||||
})();
|
||||
@ -102,6 +102,8 @@ class WebSocketClient {
|
||||
const message = JSON.parse(data);
|
||||
const { type, data: payload, timestamp } = message;
|
||||
|
||||
console.log(`WebSocket message: type=${type}`, payload);
|
||||
|
||||
// Emit event with payload
|
||||
if (type) {
|
||||
this.emit(type, payload || {});
|
||||
|
||||
@ -171,21 +171,7 @@
|
||||
</div>
|
||||
<div id="progress-text" class="progress-text">0%</div>
|
||||
</div>
|
||||
<div id="download-controls" class="download-controls hidden">
|
||||
<button id="pause-download" class="btn btn-secondary btn-small">
|
||||
<i class="fas fa-pause"></i>
|
||||
<span data-text="pause">Pause</span>
|
||||
</button>
|
||||
<button id="resume-download" class="btn btn-primary btn-small hidden">
|
||||
<i class="fas fa-play"></i>
|
||||
<span data-text="resume">Resume</span>
|
||||
</button>
|
||||
<button id="cancel-download" class="btn btn-small"
|
||||
style="background-color: var(--color-error); color: white;">
|
||||
<i class="fas fa-stop"></i>
|
||||
<span data-text="cancel">Cancel</span>
|
||||
</button>
|
||||
</div>
|
||||
<!-- Download controls removed - use dedicated queue page -->
|
||||
</div>
|
||||
</div>
|
||||
|
||||
|
||||
@ -106,16 +106,6 @@
|
||||
<i class="fas fa-play-circle"></i>
|
||||
Active Downloads
|
||||
</h2>
|
||||
<div class="section-actions">
|
||||
<button id="pause-all-btn" class="btn btn-secondary" disabled>
|
||||
<i class="fas fa-pause"></i>
|
||||
Pause All
|
||||
</button>
|
||||
<button id="resume-all-btn" class="btn btn-primary" disabled style="display: none;">
|
||||
<i class="fas fa-play"></i>
|
||||
Resume All
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
|
||||
<div class="active-downloads-list" id="active-downloads">
|
||||
@ -131,24 +121,20 @@
|
||||
<div class="section-header">
|
||||
<h2>
|
||||
<i class="fas fa-clock"></i>
|
||||
Download Queue
|
||||
Download Queue (<span id="queue-count">0</span>)
|
||||
</h2>
|
||||
<div class="section-actions">
|
||||
<button id="clear-pending-btn" class="btn btn-secondary" disabled>
|
||||
<i class="fas fa-trash-alt"></i>
|
||||
Remove All
|
||||
</button>
|
||||
<button id="start-queue-btn" class="btn btn-primary" disabled>
|
||||
<i class="fas fa-play"></i>
|
||||
Start Downloads
|
||||
Start
|
||||
</button>
|
||||
<button id="stop-queue-btn" class="btn btn-secondary" disabled style="display: none;">
|
||||
<i class="fas fa-stop"></i>
|
||||
Stop Downloads
|
||||
</button>
|
||||
<button id="clear-queue-btn" class="btn btn-secondary" disabled>
|
||||
<i class="fas fa-trash"></i>
|
||||
Clear Queue
|
||||
</button>
|
||||
<button id="reorder-queue-btn" class="btn btn-secondary" disabled>
|
||||
<i class="fas fa-sort"></i>
|
||||
Reorder
|
||||
Stop
|
||||
</button>
|
||||
</div>
|
||||
</div>
|
||||
@ -157,6 +143,7 @@
|
||||
<div class="empty-state">
|
||||
<i class="fas fa-list"></i>
|
||||
<p>No items in queue</p>
|
||||
<small>Add episodes from the main page to start downloading</small>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
@ -166,10 +153,10 @@
|
||||
<div class="section-header">
|
||||
<h2>
|
||||
<i class="fas fa-check-circle"></i>
|
||||
Recent Completed
|
||||
Completed (<span id="completed-count">0</span>)
|
||||
</h2>
|
||||
<div class="section-actions">
|
||||
<button id="clear-completed-btn" class="btn btn-secondary">
|
||||
<button id="clear-completed-btn" class="btn btn-secondary" disabled>
|
||||
<i class="fas fa-broom"></i>
|
||||
Clear Completed
|
||||
</button>
|
||||
@ -178,8 +165,9 @@
|
||||
|
||||
<div class="completed-downloads-list" id="completed-downloads">
|
||||
<div class="empty-state">
|
||||
<i class="fas fa-check-circle"></i>
|
||||
<i class="fas fa-check-circle text-success"></i>
|
||||
<p>No completed downloads</p>
|
||||
<small>Completed episodes will appear here</small>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
@ -189,14 +177,14 @@
|
||||
<div class="section-header">
|
||||
<h2>
|
||||
<i class="fas fa-exclamation-triangle"></i>
|
||||
Failed Downloads
|
||||
Failed (<span id="failed-count">0</span>)
|
||||
</h2>
|
||||
<div class="section-actions">
|
||||
<button id="retry-all-btn" class="btn btn-warning" disabled>
|
||||
<i class="fas fa-redo"></i>
|
||||
Retry All
|
||||
</button>
|
||||
<button id="clear-failed-btn" class="btn btn-secondary">
|
||||
<button id="clear-failed-btn" class="btn btn-secondary" disabled>
|
||||
<i class="fas fa-trash"></i>
|
||||
Clear Failed
|
||||
</button>
|
||||
@ -207,6 +195,7 @@
|
||||
<div class="empty-state">
|
||||
<i class="fas fa-check-circle text-success"></i>
|
||||
<p>No failed downloads</p>
|
||||
<small>Failed episodes can be retried or removed</small>
|
||||
</div>
|
||||
</div>
|
||||
</section>
|
||||
|
||||
23
stop_server.sh
Normal file
23
stop_server.sh
Normal file
@ -0,0 +1,23 @@
|
||||
#!/bin/bash
|
||||
# Stop Aniworld FastAPI Server
|
||||
|
||||
echo "Stopping Aniworld server..."
|
||||
|
||||
# Method 1: Kill uvicorn processes
|
||||
pkill -f "uvicorn.*fastapi_app:app" && echo "✓ Stopped uvicorn processes"
|
||||
|
||||
# Method 2: Kill any process using port 8000
|
||||
PORT_PID=$(lsof -ti:8000)
|
||||
if [ -n "$PORT_PID" ]; then
|
||||
kill -9 $PORT_PID
|
||||
echo "✓ Killed process on port 8000 (PID: $PORT_PID)"
|
||||
else
|
||||
echo "✓ Port 8000 is already free"
|
||||
fi
|
||||
|
||||
# Method 3: Kill any python processes running the server
|
||||
pkill -f "run_server.py" && echo "✓ Stopped run_server.py processes"
|
||||
|
||||
echo ""
|
||||
echo "Server stopped successfully!"
|
||||
echo "You can restart it with: ./start_server.sh"
|
||||
@ -1,154 +0,0 @@
|
||||
"""Integration tests for analytics API endpoints.
|
||||
|
||||
Tests analytics API endpoints including download statistics,
|
||||
series popularity, storage analysis, and performance reports.
|
||||
"""
|
||||
|
||||
from unittest.mock import AsyncMock, patch
|
||||
|
||||
import pytest
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
from src.server.fastapi_app import app
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_downloads_endpoint():
|
||||
"""Test GET /api/analytics/downloads endpoint."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
response = await client.get("/api/analytics/downloads?days=30")
|
||||
|
||||
assert response.status_code in [200, 422, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_series_popularity_endpoint():
|
||||
"""Test GET /api/analytics/series-popularity endpoint."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
response = await client.get(
|
||||
"/api/analytics/series-popularity?limit=10"
|
||||
)
|
||||
|
||||
assert response.status_code in [200, 422, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_storage_endpoint():
|
||||
"""Test GET /api/analytics/storage endpoint."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("psutil.disk_usage") as mock_disk:
|
||||
mock_disk.return_value = {
|
||||
"total": 1024 * 1024 * 1024,
|
||||
"used": 512 * 1024 * 1024,
|
||||
"free": 512 * 1024 * 1024,
|
||||
"percent": 50.0,
|
||||
}
|
||||
|
||||
response = await client.get("/api/analytics/storage")
|
||||
|
||||
assert response.status_code in [200, 401, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_performance_endpoint():
|
||||
"""Test GET /api/analytics/performance endpoint."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
response = await client.get(
|
||||
"/api/analytics/performance?hours=24"
|
||||
)
|
||||
|
||||
assert response.status_code in [200, 422, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_summary_endpoint():
|
||||
"""Test GET /api/analytics/summary endpoint."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
response = await client.get("/api/analytics/summary")
|
||||
|
||||
assert response.status_code in [200, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_downloads_with_query_params():
|
||||
"""Test /api/analytics/downloads with different query params."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
response = await client.get("/api/analytics/downloads?days=7")
|
||||
|
||||
assert response.status_code in [200, 422, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_series_with_different_limits():
|
||||
"""Test /api/analytics/series-popularity with different limits."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
for limit in [5, 10, 20]:
|
||||
response = await client.get(
|
||||
f"/api/analytics/series-popularity?limit={limit}"
|
||||
)
|
||||
assert response.status_code in [200, 422, 500]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_performance_with_different_hours():
|
||||
"""Test /api/analytics/performance with different hour ranges."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
|
||||
mock_db = AsyncMock()
|
||||
mock_get_db.return_value = mock_db
|
||||
|
||||
for hours in [1, 12, 24, 72]:
|
||||
response = await client.get(
|
||||
f"/api/analytics/performance?hours={hours}"
|
||||
)
|
||||
assert response.status_code in [200, 422, 500]
|
||||
|
||||
|
||||
@ -43,6 +43,16 @@ class FakeSeriesApp:
|
||||
"""Trigger rescan with callback."""
|
||||
callback()
|
||||
|
||||
def add(self, serie):
|
||||
"""Add a serie to the list."""
|
||||
# Check if already exists
|
||||
if not any(s.key == serie.key for s in self._items):
|
||||
self._items.append(serie)
|
||||
|
||||
def refresh_series_list(self):
|
||||
"""Refresh series list."""
|
||||
pass
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def reset_auth_state():
|
||||
@ -144,3 +154,61 @@ async def test_get_anime_detail_endpoint_unauthorized():
|
||||
response = await client.get("/api/v1/anime/1")
|
||||
# Should work or require auth
|
||||
assert response.status_code in (200, 401, 404, 503)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_series_endpoint_unauthorized():
|
||||
"""Test POST /api/anime/add without authentication."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(transport=transport, base_url="http://test") as client:
|
||||
response = await client.post(
|
||||
"/api/anime/add",
|
||||
json={"link": "test-link", "name": "Test Anime"}
|
||||
)
|
||||
# Should require auth
|
||||
assert response.status_code == 401
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_series_endpoint_authenticated(authenticated_client):
|
||||
"""Test POST /api/anime/add with authentication."""
|
||||
response = await authenticated_client.post(
|
||||
"/api/anime/add",
|
||||
json={"link": "test-anime-link", "name": "Test New Anime"}
|
||||
)
|
||||
|
||||
# The endpoint should succeed (returns 200 or may fail if series exists)
|
||||
assert response.status_code in (200, 400)
|
||||
data = response.json()
|
||||
|
||||
if response.status_code == 200:
|
||||
assert data["status"] == "success"
|
||||
assert "Test New Anime" in data["message"]
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_series_endpoint_empty_name(authenticated_client):
|
||||
"""Test POST /api/anime/add with empty name."""
|
||||
response = await authenticated_client.post(
|
||||
"/api/anime/add",
|
||||
json={"link": "test-link", "name": ""}
|
||||
)
|
||||
|
||||
# Should return 400 for empty name
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert "name" in data["detail"].lower()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_series_endpoint_empty_link(authenticated_client):
|
||||
"""Test POST /api/anime/add with empty link."""
|
||||
response = await authenticated_client.post(
|
||||
"/api/anime/add",
|
||||
json={"link": "", "name": "Test Anime"}
|
||||
)
|
||||
|
||||
# Should return 400 for empty link
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert "link" in data["detail"].lower()
|
||||
|
||||
@ -92,14 +92,9 @@ def mock_download_service():
|
||||
# Mock remove_from_queue
|
||||
service.remove_from_queue = AsyncMock(return_value=["item-id-1"])
|
||||
|
||||
# Mock reorder_queue
|
||||
service.reorder_queue = AsyncMock(return_value=True)
|
||||
|
||||
# Mock start/stop/pause/resume
|
||||
service.start = AsyncMock()
|
||||
service.stop = AsyncMock()
|
||||
service.pause_queue = AsyncMock()
|
||||
service.resume_queue = AsyncMock()
|
||||
# Mock start/stop
|
||||
service.start_next_download = AsyncMock(return_value="item-id-1")
|
||||
service.stop_downloads = AsyncMock()
|
||||
|
||||
# Mock clear_completed and retry_failed
|
||||
service.clear_completed = AsyncMock(return_value=5)
|
||||
@ -116,10 +111,16 @@ async def test_get_queue_status(authenticated_client, mock_download_service):
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert "status" in data
|
||||
# Updated to match new response structure
|
||||
assert "is_running" in data
|
||||
assert "is_paused" in data
|
||||
assert "active_downloads" in data
|
||||
assert "pending_queue" in data
|
||||
assert "completed_downloads" in data
|
||||
assert "failed_downloads" in data
|
||||
assert "statistics" in data
|
||||
assert data["status"]["is_running"] is True
|
||||
assert data["status"]["is_paused"] is False
|
||||
assert data["is_running"] is True
|
||||
assert data["is_paused"] is False
|
||||
|
||||
mock_download_service.get_queue_status.assert_called_once()
|
||||
mock_download_service.get_queue_stats.assert_called_once()
|
||||
@ -259,54 +260,56 @@ async def test_remove_from_queue_not_found(
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_multiple_from_queue(
|
||||
async def test_start_download_success(
|
||||
authenticated_client, mock_download_service
|
||||
):
|
||||
"""Test DELETE /api/queue/ with multiple items."""
|
||||
request_data = {"item_ids": ["item-id-1", "item-id-2"]}
|
||||
|
||||
response = await authenticated_client.request(
|
||||
"DELETE", "/api/queue/", json=request_data
|
||||
)
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
mock_download_service.remove_from_queue.assert_called_once_with(
|
||||
["item-id-1", "item-id-2"]
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_multiple_empty_list(
|
||||
authenticated_client, mock_download_service
|
||||
):
|
||||
"""Test removing with empty item list returns 400."""
|
||||
request_data = {"item_ids": []}
|
||||
|
||||
response = await authenticated_client.request(
|
||||
"DELETE", "/api/queue/", json=request_data
|
||||
)
|
||||
|
||||
assert response.status_code == 400
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_queue(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/start endpoint."""
|
||||
"""Test POST /api/queue/start starts first pending download."""
|
||||
response = await authenticated_client.post("/api/queue/start")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["status"] == "success"
|
||||
assert "started" in data["message"].lower()
|
||||
assert "item_id" in data
|
||||
assert data["item_id"] == "item-id-1"
|
||||
|
||||
mock_download_service.start.assert_called_once()
|
||||
mock_download_service.start_next_download.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_queue(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/stop endpoint."""
|
||||
async def test_start_download_empty_queue(
|
||||
authenticated_client, mock_download_service
|
||||
):
|
||||
"""Test starting download with empty queue returns 400."""
|
||||
mock_download_service.start_next_download.return_value = None
|
||||
|
||||
response = await authenticated_client.post("/api/queue/start")
|
||||
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
detail = data["detail"].lower()
|
||||
assert "empty" in detail or "no pending" in detail
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_download_already_active(
|
||||
authenticated_client, mock_download_service
|
||||
):
|
||||
"""Test starting download while one is active returns 400."""
|
||||
mock_download_service.start_next_download.side_effect = (
|
||||
DownloadServiceError("A download is already in progress")
|
||||
)
|
||||
|
||||
response = await authenticated_client.post("/api/queue/start")
|
||||
|
||||
assert response.status_code == 400
|
||||
data = response.json()
|
||||
assert "already" in data["detail"].lower()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_downloads(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/stop stops queue processing."""
|
||||
response = await authenticated_client.post("/api/queue/stop")
|
||||
|
||||
assert response.status_code == 200
|
||||
@ -315,70 +318,7 @@ async def test_stop_queue(authenticated_client, mock_download_service):
|
||||
assert data["status"] == "success"
|
||||
assert "stopped" in data["message"].lower()
|
||||
|
||||
mock_download_service.stop.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_pause_queue(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/pause endpoint."""
|
||||
response = await authenticated_client.post("/api/queue/pause")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["status"] == "success"
|
||||
assert "paused" in data["message"].lower()
|
||||
|
||||
mock_download_service.pause_queue.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resume_queue(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/resume endpoint."""
|
||||
response = await authenticated_client.post("/api/queue/resume")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["status"] == "success"
|
||||
assert "resumed" in data["message"].lower()
|
||||
|
||||
mock_download_service.resume_queue.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_queue(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/reorder endpoint."""
|
||||
request_data = {"item_id": "item-id-1", "new_position": 0}
|
||||
|
||||
response = await authenticated_client.post(
|
||||
"/api/queue/reorder", json=request_data
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["status"] == "success"
|
||||
|
||||
mock_download_service.reorder_queue.assert_called_once_with(
|
||||
item_id="item-id-1", new_position=0
|
||||
)
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_queue_not_found(
|
||||
authenticated_client, mock_download_service
|
||||
):
|
||||
"""Test reordering non-existent item returns 404."""
|
||||
mock_download_service.reorder_queue.return_value = False
|
||||
|
||||
request_data = {"item_id": "non-existent", "new_position": 0}
|
||||
|
||||
response = await authenticated_client.post(
|
||||
"/api/queue/reorder", json=request_data
|
||||
)
|
||||
|
||||
assert response.status_code == 404
|
||||
mock_download_service.stop_downloads.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@ -395,6 +335,22 @@ async def test_clear_completed(authenticated_client, mock_download_service):
|
||||
mock_download_service.clear_completed.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_pending(authenticated_client, mock_download_service):
|
||||
"""Test DELETE /api/queue/pending endpoint."""
|
||||
mock_download_service.clear_pending = AsyncMock(return_value=3)
|
||||
|
||||
response = await authenticated_client.delete("/api/queue/pending")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
assert data["status"] == "success"
|
||||
assert data["count"] == 3
|
||||
|
||||
mock_download_service.clear_pending.assert_called_once()
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_retry_failed(authenticated_client, mock_download_service):
|
||||
"""Test POST /api/queue/retry endpoint."""
|
||||
@ -444,8 +400,6 @@ async def test_queue_endpoints_require_auth(mock_download_service):
|
||||
("DELETE", "/api/queue/item-1"),
|
||||
("POST", "/api/queue/start"),
|
||||
("POST", "/api/queue/stop"),
|
||||
("POST", "/api/queue/pause"),
|
||||
("POST", "/api/queue/resume"),
|
||||
]
|
||||
|
||||
for method, url in endpoints:
|
||||
@ -456,7 +410,8 @@ async def test_queue_endpoints_require_auth(mock_download_service):
|
||||
elif method == "DELETE":
|
||||
response = await client.delete(url)
|
||||
|
||||
# Should return 401 or 503 (503 if service not available)
|
||||
# Should return 401 or 503 (503 if service unavailable)
|
||||
assert response.status_code in (401, 503), (
|
||||
f"{method} {url} should require auth, got {response.status_code}"
|
||||
f"{method} {url} should require auth, "
|
||||
f"got {response.status_code}"
|
||||
)
|
||||
|
||||
466
tests/api/test_queue_features.py
Normal file
466
tests/api/test_queue_features.py
Normal file
@ -0,0 +1,466 @@
|
||||
"""Tests for queue management features.
|
||||
|
||||
This module tests the queue page functionality including:
|
||||
- Display of queued items in organized lists
|
||||
- Drag-and-drop reordering
|
||||
- Starting and stopping queue processing
|
||||
- Filtering completed and failed downloads
|
||||
"""
|
||||
import pytest
|
||||
from httpx import ASGITransport, AsyncClient
|
||||
|
||||
from src.server.fastapi_app import app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def client():
|
||||
"""Create an async test client."""
|
||||
transport = ASGITransport(app=app)
|
||||
async with AsyncClient(
|
||||
transport=transport, base_url="http://test"
|
||||
) as client:
|
||||
yield client
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def auth_headers(client: AsyncClient):
|
||||
"""Get authentication headers with valid JWT token."""
|
||||
# Setup auth
|
||||
await client.post(
|
||||
"/api/auth/setup",
|
||||
json={"master_password": "TestPass123!"}
|
||||
)
|
||||
|
||||
# Login
|
||||
response = await client.post(
|
||||
"/api/auth/login",
|
||||
json={"password": "TestPass123!"}
|
||||
)
|
||||
|
||||
data = response.json()
|
||||
token = data["access_token"]
|
||||
|
||||
return {"Authorization": f"Bearer {token}"}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_download_request():
|
||||
"""Sample download request for testing."""
|
||||
return {
|
||||
"serie_id": "test-series",
|
||||
"serie_name": "Test Series",
|
||||
"episodes": [
|
||||
{"season": 1, "episode": 1},
|
||||
{"season": 1, "episode": 2}
|
||||
],
|
||||
"priority": "normal"
|
||||
}
|
||||
|
||||
|
||||
class TestQueueDisplay:
|
||||
"""Test queue display and organization."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_queue_status_includes_all_sections(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test queue status includes all sections."""
|
||||
response = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
|
||||
# Verify structure
|
||||
assert "status" in data
|
||||
assert "statistics" in data
|
||||
|
||||
status = data["status"]
|
||||
assert "active" in status
|
||||
assert "pending" in status
|
||||
assert "completed" in status
|
||||
assert "failed" in status
|
||||
assert "is_running" in status
|
||||
assert "is_paused" in status
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_queue_items_have_required_fields(
|
||||
self, client: AsyncClient, auth_headers: dict,
|
||||
sample_download_request: dict
|
||||
):
|
||||
"""Test queue items have required display fields."""
|
||||
# Add an item to the queue
|
||||
add_response = await client.post(
|
||||
"/api/queue/add",
|
||||
json=sample_download_request,
|
||||
headers=auth_headers
|
||||
)
|
||||
assert add_response.status_code == 201
|
||||
|
||||
# Get queue status
|
||||
response = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
assert response.status_code == 200
|
||||
|
||||
data = response.json()
|
||||
pending = data["status"]["pending"]
|
||||
|
||||
assert len(pending) > 0
|
||||
item = pending[0]
|
||||
|
||||
# Verify required fields for display
|
||||
assert "id" in item
|
||||
assert "serie_name" in item
|
||||
assert "episode" in item
|
||||
assert "priority" in item
|
||||
assert "added_at" in item
|
||||
|
||||
# Verify episode structure
|
||||
episode = item["episode"]
|
||||
assert "season" in episode
|
||||
assert "episode" in episode
|
||||
|
||||
|
||||
class TestQueueReordering:
|
||||
"""Test queue reordering functionality."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_queue_with_item_ids(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test reordering queue using item_ids array."""
|
||||
# Clear existing queue first
|
||||
status_response = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
existing_items = [
|
||||
item["id"]
|
||||
for item in status_response.json()["status"]["pending"]
|
||||
]
|
||||
if existing_items:
|
||||
await client.request(
|
||||
"DELETE",
|
||||
"/api/queue/",
|
||||
json={"item_ids": existing_items},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
# Add exactly 3 items
|
||||
added_ids = []
|
||||
for i in range(3):
|
||||
response = await client.post(
|
||||
"/api/queue/add",
|
||||
json={
|
||||
"serie_id": f"test-{i}",
|
||||
"serie_name": f"Test Series {i}",
|
||||
"episodes": [{"season": 1, "episode": i+1}],
|
||||
"priority": "normal"
|
||||
},
|
||||
headers=auth_headers
|
||||
)
|
||||
if response.status_code == 201:
|
||||
data = response.json()
|
||||
if "added_items" in data and data["added_items"]:
|
||||
added_ids.extend(data["added_items"])
|
||||
|
||||
assert len(added_ids) == 3, f"Expected 3 items, got {len(added_ids)}"
|
||||
|
||||
# Reverse the order
|
||||
new_order = list(reversed(added_ids))
|
||||
|
||||
# Reorder
|
||||
reorder_response = await client.post(
|
||||
"/api/queue/reorder",
|
||||
json={"item_ids": new_order},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert reorder_response.status_code == 200
|
||||
assert reorder_response.json()["status"] == "success"
|
||||
|
||||
# Verify new order
|
||||
status_response = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
current_order = [
|
||||
item["id"]
|
||||
for item in status_response.json()["status"]["pending"]
|
||||
]
|
||||
|
||||
assert current_order == new_order
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_with_invalid_ids(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test reordering with non-existent IDs succeeds (idempotent)."""
|
||||
response = await client.post(
|
||||
"/api/queue/reorder",
|
||||
json={"item_ids": ["invalid-id-1", "invalid-id-2"]},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
# Bulk reorder is idempotent and succeeds even with invalid IDs
|
||||
# It just ignores items that don't exist
|
||||
assert response.status_code == 200
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_empty_list(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test reordering with empty list."""
|
||||
response = await client.post(
|
||||
"/api/queue/reorder",
|
||||
json={"item_ids": []},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
# Should succeed but do nothing
|
||||
assert response.status_code in [200, 404]
|
||||
|
||||
|
||||
class TestQueueControl:
|
||||
"""Test queue start/stop functionality."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_queue(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test starting the download queue."""
|
||||
response = await client.post(
|
||||
"/api/queue/start",
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_queue(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test stopping the download queue."""
|
||||
# Start first
|
||||
await client.post("/api/queue/start", headers=auth_headers)
|
||||
|
||||
# Then stop
|
||||
response = await client.post(
|
||||
"/api/queue/stop",
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_queue_status_reflects_running_state(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test queue status reflects running state."""
|
||||
# Initially not running
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
assert status.json()["status"]["is_running"] is False
|
||||
|
||||
# Start queue
|
||||
await client.post("/api/queue/start", headers=auth_headers)
|
||||
|
||||
# Should be running
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
assert status.json()["status"]["is_running"] is True
|
||||
|
||||
# Stop queue
|
||||
await client.post("/api/queue/stop", headers=auth_headers)
|
||||
|
||||
# Should not be running
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
assert status.json()["status"]["is_running"] is False
|
||||
|
||||
|
||||
class TestCompletedDownloads:
|
||||
"""Test completed downloads management."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_completed_downloads(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test clearing completed downloads."""
|
||||
response = await client.delete(
|
||||
"/api/queue/completed",
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "count" in data
|
||||
assert data["status"] == "success"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_completed_section_count(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test that completed count is accurate."""
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
data = status.json()
|
||||
|
||||
completed_count = data["statistics"]["completed_count"]
|
||||
completed_list = len(data["status"]["completed"])
|
||||
|
||||
# Count should match list length
|
||||
assert completed_count == completed_list
|
||||
|
||||
|
||||
class TestFailedDownloads:
|
||||
"""Test failed downloads management."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_failed_downloads(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test clearing failed downloads."""
|
||||
response = await client.delete(
|
||||
"/api/queue/failed",
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "count" in data
|
||||
assert data["status"] == "success"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_retry_failed_downloads(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test retrying failed downloads."""
|
||||
response = await client.post(
|
||||
"/api/queue/retry",
|
||||
json={"item_ids": []},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "retried_count" in data
|
||||
assert data["status"] == "success"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_retry_specific_failed_download(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test retrying a specific failed download."""
|
||||
# Test the endpoint accepts the format
|
||||
response = await client.post(
|
||||
"/api/queue/retry",
|
||||
json={"item_ids": ["some-id"]},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
# Should succeed even if ID doesn't exist (idempotent)
|
||||
assert response.status_code == 200
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_failed_section_count(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test that failed count is accurate."""
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
data = status.json()
|
||||
|
||||
failed_count = data["statistics"]["failed_count"]
|
||||
failed_list = len(data["status"]["failed"])
|
||||
|
||||
# Count should match list length
|
||||
assert failed_count == failed_list
|
||||
|
||||
|
||||
class TestBulkOperations:
|
||||
"""Test bulk queue operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_multiple_items(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test removing multiple items from queue."""
|
||||
# Add multiple items
|
||||
item_ids = []
|
||||
for i in range(3):
|
||||
add_response = await client.post(
|
||||
"/api/queue/add",
|
||||
json={
|
||||
"serie_id": f"bulk-test-{i}",
|
||||
"serie_name": f"Bulk Test {i}",
|
||||
"episodes": [{"season": 1, "episode": i+1}],
|
||||
"priority": "normal"
|
||||
},
|
||||
headers=auth_headers
|
||||
)
|
||||
if add_response.status_code == 201:
|
||||
data = add_response.json()
|
||||
if "added_items" in data and len(data["added_items"]) > 0:
|
||||
item_ids.append(data["added_items"][0])
|
||||
|
||||
# Remove all at once
|
||||
if item_ids:
|
||||
response = await client.request(
|
||||
"DELETE",
|
||||
"/api/queue/",
|
||||
json={"item_ids": item_ids},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_entire_pending_queue(
|
||||
self, client: AsyncClient, auth_headers: dict
|
||||
):
|
||||
"""Test clearing entire pending queue."""
|
||||
# Get all pending items
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
pending = status.json()["status"]["pending"]
|
||||
|
||||
if pending:
|
||||
item_ids = [item["id"] for item in pending]
|
||||
|
||||
# Remove all
|
||||
response = await client.request(
|
||||
"DELETE",
|
||||
"/api/queue/",
|
||||
json={"item_ids": item_ids},
|
||||
headers=auth_headers
|
||||
)
|
||||
|
||||
assert response.status_code == 204
|
||||
|
||||
# Verify queue is empty
|
||||
status = await client.get(
|
||||
"/api/queue/status",
|
||||
headers=auth_headers
|
||||
)
|
||||
assert len(status.json()["status"]["pending"]) == 0
|
||||
@ -197,7 +197,7 @@ class TestFrontendAnimeAPI:
|
||||
assert isinstance(data, list)
|
||||
# Search should return results (actual API call)
|
||||
if len(data) > 0:
|
||||
assert "title" in data[0]
|
||||
assert "name" in data[0]
|
||||
|
||||
async def test_rescan_anime(self, authenticated_client):
|
||||
"""Test POST /api/anime/rescan triggers rescan."""
|
||||
@ -247,23 +247,17 @@ class TestFrontendDownloadAPI:
|
||||
assert "status" in data or "statistics" in data
|
||||
|
||||
async def test_start_download_queue(self, authenticated_client):
|
||||
"""Test POST /api/queue/start starts queue."""
|
||||
"""Test POST /api/queue/start starts next download."""
|
||||
response = await authenticated_client.post("/api/queue/start")
|
||||
|
||||
assert response.status_code == 200
|
||||
# Should return 200 with item_id, or 400 if queue is empty
|
||||
assert response.status_code in [200, 400]
|
||||
data = response.json()
|
||||
assert "message" in data or "status" in data
|
||||
|
||||
async def test_pause_download_queue(self, authenticated_client):
|
||||
"""Test POST /api/queue/pause pauses queue."""
|
||||
response = await authenticated_client.post("/api/queue/pause")
|
||||
|
||||
assert response.status_code == 200
|
||||
data = response.json()
|
||||
assert "message" in data or "status" in data
|
||||
if response.status_code == 200:
|
||||
assert "item_id" in data
|
||||
|
||||
async def test_stop_download_queue(self, authenticated_client):
|
||||
"""Test POST /api/queue/stop stops queue."""
|
||||
"""Test POST /api/queue/stop stops processing new downloads."""
|
||||
response = await authenticated_client.post("/api/queue/stop")
|
||||
|
||||
assert response.status_code == 200
|
||||
|
||||
@ -323,8 +323,8 @@ class TestProtectedEndpoints:
|
||||
endpoints = [
|
||||
("/api/queue/status", "GET"),
|
||||
("/api/queue/add", "POST"),
|
||||
("/api/queue/control/start", "POST"),
|
||||
("/api/queue/control/pause", "POST"),
|
||||
("/api/queue/start", "POST"),
|
||||
("/api/queue/pause", "POST"),
|
||||
]
|
||||
|
||||
token = await self.get_valid_token(client)
|
||||
|
||||
@ -153,16 +153,15 @@ class TestDownloadFlowEndToEnd:
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
|
||||
# Verify status structure
|
||||
assert "status" in data
|
||||
# Verify status structure (updated for new response format)
|
||||
assert "is_running" in data
|
||||
assert "is_paused" in data
|
||||
assert "pending_queue" in data
|
||||
assert "active_downloads" in data
|
||||
assert "completed_downloads" in data
|
||||
assert "failed_downloads" in data
|
||||
assert "statistics" in data
|
||||
|
||||
status = data["status"]
|
||||
assert "pending" in status
|
||||
assert "active" in status
|
||||
assert "completed" in status
|
||||
assert "failed" in status
|
||||
|
||||
async def test_add_with_different_priorities(self, authenticated_client):
|
||||
"""Test adding episodes with different priority levels."""
|
||||
priorities = ["high", "normal", "low"]
|
||||
@ -216,36 +215,7 @@ class TestQueueControlOperations:
|
||||
|
||||
async def test_start_queue_processing(self, authenticated_client):
|
||||
"""Test starting the queue processor."""
|
||||
response = await authenticated_client.post("/api/queue/control/start")
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
|
||||
async def test_pause_queue_processing(self, authenticated_client):
|
||||
"""Test pausing the queue processor."""
|
||||
# Start first
|
||||
await authenticated_client.post("/api/queue/control/start")
|
||||
|
||||
# Then pause
|
||||
response = await authenticated_client.post("/api/queue/control/pause")
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
assert data["status"] == "success"
|
||||
|
||||
async def test_resume_queue_processing(self, authenticated_client):
|
||||
"""Test resuming the queue processor."""
|
||||
# Start and pause first
|
||||
await authenticated_client.post("/api/queue/control/start")
|
||||
await authenticated_client.post("/api/queue/control/pause")
|
||||
|
||||
# Then resume
|
||||
response = await authenticated_client.post("/api/queue/control/resume")
|
||||
response = await authenticated_client.post("/api/queue/start")
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
|
||||
@ -255,7 +225,7 @@ class TestQueueControlOperations:
|
||||
|
||||
async def test_clear_completed_downloads(self, authenticated_client):
|
||||
"""Test clearing completed downloads from the queue."""
|
||||
response = await authenticated_client.post("/api/queue/control/clear_completed")
|
||||
response = await authenticated_client.delete("/api/queue/completed")
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
|
||||
@ -294,36 +264,9 @@ class TestQueueItemOperations:
|
||||
# For now, test the endpoint with a dummy ID
|
||||
response = await authenticated_client.post("/api/queue/items/dummy-id/retry")
|
||||
|
||||
# Should return 404 if item doesn't exist, or 503 if service unavailable
|
||||
# Should return 404 if item doesn't exist, or 503 if unavailable
|
||||
assert response.status_code in [200, 404, 503]
|
||||
|
||||
async def test_reorder_queue_items(self, authenticated_client):
|
||||
"""Test reordering queue items."""
|
||||
# Add multiple items
|
||||
item_ids = []
|
||||
for i in range(3):
|
||||
add_response = await authenticated_client.post(
|
||||
"/api/queue/add",
|
||||
json={
|
||||
"serie_id": f"series-{i}",
|
||||
"serie_name": f"Series {i}",
|
||||
"episodes": [{"season": 1, "episode": 1}],
|
||||
"priority": "normal"
|
||||
}
|
||||
)
|
||||
|
||||
if add_response.status_code == 201:
|
||||
item_ids.extend(add_response.json()["item_ids"])
|
||||
|
||||
if len(item_ids) >= 2:
|
||||
# Reorder items
|
||||
response = await authenticated_client.post(
|
||||
"/api/queue/reorder",
|
||||
json={"item_order": list(reversed(item_ids))}
|
||||
)
|
||||
|
||||
assert response.status_code in [200, 503]
|
||||
|
||||
|
||||
class TestDownloadProgressTracking:
|
||||
"""Test progress tracking during downloads."""
|
||||
@ -348,11 +291,11 @@ class TestDownloadProgressTracking:
|
||||
|
||||
if response.status_code == 200:
|
||||
data = response.json()
|
||||
assert "status" in data
|
||||
# Updated for new response format
|
||||
assert "active_downloads" in data
|
||||
|
||||
# Check that items can have progress
|
||||
status = data["status"]
|
||||
for item in status.get("active", []):
|
||||
for item in data.get("active_downloads", []):
|
||||
if "progress" in item and item["progress"]:
|
||||
assert "percentage" in item["progress"]
|
||||
assert "current_mb" in item["progress"]
|
||||
@ -414,13 +357,18 @@ class TestErrorHandlingAndRetries:
|
||||
|
||||
if add_response.status_code == 201:
|
||||
# Get queue status to check retry count
|
||||
status_response = await authenticated_client.get("/api/queue/status")
|
||||
status_response = await authenticated_client.get(
|
||||
"/api/queue/status"
|
||||
)
|
||||
|
||||
if status_response.status_code == 200:
|
||||
data = status_response.json()
|
||||
# Verify structure includes retry_count field
|
||||
for item_list in [data["status"].get("pending", []),
|
||||
data["status"].get("failed", [])]:
|
||||
# Updated to match new response structure
|
||||
for item_list in [
|
||||
data.get("pending_queue", []),
|
||||
data.get("failed_downloads", [])
|
||||
]:
|
||||
for item in item_list:
|
||||
assert "retry_count" in item
|
||||
|
||||
@ -448,7 +396,7 @@ class TestAuthenticationRequirements:
|
||||
|
||||
async def test_queue_control_requires_auth(self, client):
|
||||
"""Test that queue control endpoints require authentication."""
|
||||
response = await client.post("/api/queue/control/start")
|
||||
response = await client.post("/api/queue/start")
|
||||
assert response.status_code == 401
|
||||
|
||||
async def test_item_operations_require_auth(self, client):
|
||||
@ -598,33 +546,7 @@ class TestCompleteDownloadWorkflow:
|
||||
assert progress_response.status_code in [200, 503]
|
||||
|
||||
# 5. Verify final state (completed or still processing)
|
||||
final_response = await authenticated_client.get("/api/queue/status")
|
||||
assert final_response.status_code in [200, 503]
|
||||
|
||||
async def test_workflow_with_pause_and_resume(self, authenticated_client):
|
||||
"""Test download workflow with pause and resume."""
|
||||
# Add items
|
||||
await authenticated_client.post(
|
||||
"/api/queue/add",
|
||||
json={
|
||||
"serie_id": "pause-test",
|
||||
"serie_name": "Pause Test Series",
|
||||
"episodes": [{"season": 1, "episode": 1}],
|
||||
"priority": "normal"
|
||||
}
|
||||
final_response = await authenticated_client.get(
|
||||
"/api/queue/status"
|
||||
)
|
||||
|
||||
# Start processing
|
||||
await authenticated_client.post("/api/queue/control/start")
|
||||
|
||||
# Pause
|
||||
pause_response = await authenticated_client.post("/api/queue/control/pause")
|
||||
assert pause_response.status_code in [200, 503]
|
||||
|
||||
# Resume
|
||||
resume_response = await authenticated_client.post("/api/queue/control/resume")
|
||||
assert resume_response.status_code in [200, 503]
|
||||
|
||||
# Verify queue status
|
||||
status_response = await authenticated_client.get("/api/queue/status")
|
||||
assert status_response.status_code in [200, 503]
|
||||
assert final_response.status_code in [200, 503]
|
||||
|
||||
398
tests/integration/test_download_progress_integration.py
Normal file
398
tests/integration/test_download_progress_integration.py
Normal file
@ -0,0 +1,398 @@
|
||||
"""Integration tests for download progress WebSocket real-time updates.
|
||||
|
||||
This module tests the end-to-end flow of download progress from the
|
||||
download service through the WebSocket service to connected clients.
|
||||
"""
|
||||
import asyncio
|
||||
from typing import Any, Dict, List
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from src.server.models.download import EpisodeIdentifier
|
||||
from src.server.services.anime_service import AnimeService
|
||||
from src.server.services.download_service import DownloadService
|
||||
from src.server.services.progress_service import ProgressService
|
||||
from src.server.services.websocket_service import WebSocketService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_series_app():
|
||||
"""Mock SeriesApp for testing."""
|
||||
app = Mock()
|
||||
app.series_list = []
|
||||
app.search = Mock(return_value=[])
|
||||
app.ReScan = Mock()
|
||||
|
||||
def mock_download(
|
||||
serie_folder, season, episode, key, callback=None, **kwargs
|
||||
):
|
||||
"""Simulate download with realistic progress updates."""
|
||||
if callback:
|
||||
# Simulate yt-dlp progress updates
|
||||
for percent in [10, 25, 50, 75, 90, 100]:
|
||||
callback({
|
||||
'percent': float(percent),
|
||||
'downloaded_mb': percent,
|
||||
'total_mb': 100.0,
|
||||
'speed_mbps': 2.5,
|
||||
'eta_seconds': int((100 - percent) / 2.5),
|
||||
})
|
||||
|
||||
result = Mock()
|
||||
result.success = True
|
||||
result.message = "Download completed"
|
||||
return result
|
||||
|
||||
app.download = Mock(side_effect=mock_download)
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def progress_service():
|
||||
"""Create a ProgressService instance."""
|
||||
return ProgressService()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def websocket_service():
|
||||
"""Create a WebSocketService instance."""
|
||||
return WebSocketService()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def anime_service(mock_series_app, progress_service):
|
||||
"""Create an AnimeService."""
|
||||
with patch(
|
||||
"src.server.services.anime_service.SeriesApp",
|
||||
return_value=mock_series_app
|
||||
):
|
||||
service = AnimeService(
|
||||
directory="/test/anime",
|
||||
progress_service=progress_service,
|
||||
)
|
||||
yield service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def download_service(anime_service, progress_service):
|
||||
"""Create a DownloadService."""
|
||||
service = DownloadService(
|
||||
anime_service=anime_service,
|
||||
progress_service=progress_service,
|
||||
persistence_path="/tmp/test_integration_progress_queue.json",
|
||||
)
|
||||
yield service
|
||||
await service.stop()
|
||||
|
||||
|
||||
class TestDownloadProgressIntegration:
|
||||
"""Integration tests for download progress WebSocket flow."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_full_progress_flow_with_websocket(
|
||||
self, download_service, websocket_service
|
||||
):
|
||||
"""Test complete flow from download to WebSocket broadcast."""
|
||||
# Track all messages sent via WebSocket
|
||||
sent_messages: List[Dict[str, Any]] = []
|
||||
|
||||
# Mock WebSocket broadcast methods
|
||||
original_broadcast_progress = (
|
||||
websocket_service.broadcast_download_progress
|
||||
)
|
||||
|
||||
async def mock_broadcast_progress(download_id: str, data: dict):
|
||||
"""Capture broadcast calls."""
|
||||
sent_messages.append({
|
||||
'type': 'download_progress',
|
||||
'download_id': download_id,
|
||||
'data': data,
|
||||
})
|
||||
# Call original to maintain functionality
|
||||
await original_broadcast_progress(download_id, data)
|
||||
|
||||
websocket_service.broadcast_download_progress = (
|
||||
mock_broadcast_progress
|
||||
)
|
||||
|
||||
# Connect download service to WebSocket service
|
||||
async def broadcast_callback(update_type: str, data: dict):
|
||||
"""Bridge download service to WebSocket service."""
|
||||
if update_type == "download_progress":
|
||||
await websocket_service.broadcast_download_progress(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
|
||||
download_service.set_broadcast_callback(broadcast_callback)
|
||||
|
||||
# Add download to queue
|
||||
await download_service.add_to_queue(
|
||||
serie_id="integration_test",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Integration Test Anime",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
# Start processing
|
||||
await download_service.start_queue_processing()
|
||||
|
||||
# Wait for download to complete
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
# Verify progress messages were sent
|
||||
progress_messages = [
|
||||
m for m in sent_messages if m['type'] == 'download_progress'
|
||||
]
|
||||
|
||||
assert len(progress_messages) >= 3 # Multiple progress updates
|
||||
|
||||
# Verify progress increases
|
||||
percentages = [
|
||||
m['data'].get('progress', {}).get('percent', 0)
|
||||
for m in progress_messages
|
||||
]
|
||||
|
||||
# Should have increasing percentages
|
||||
for i in range(1, len(percentages)):
|
||||
assert percentages[i] >= percentages[i - 1]
|
||||
|
||||
# Last update should be close to 100%
|
||||
assert percentages[-1] >= 90
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_websocket_client_receives_progress(
|
||||
self, download_service, websocket_service
|
||||
):
|
||||
"""Test that WebSocket clients receive progress messages."""
|
||||
# Track messages received by clients
|
||||
client_messages: List[Dict[str, Any]] = []
|
||||
|
||||
# Mock WebSocket client
|
||||
class MockWebSocket:
|
||||
"""Mock WebSocket for testing."""
|
||||
|
||||
async def accept(self):
|
||||
pass
|
||||
|
||||
async def send_json(self, data):
|
||||
"""Capture sent messages."""
|
||||
client_messages.append(data)
|
||||
|
||||
async def receive_json(self):
|
||||
# Keep connection open
|
||||
await asyncio.sleep(10)
|
||||
|
||||
mock_ws = MockWebSocket()
|
||||
|
||||
# Connect mock client
|
||||
connection_id = "test_client_1"
|
||||
await websocket_service.connect(mock_ws, connection_id)
|
||||
|
||||
# Connect download service to WebSocket service
|
||||
async def broadcast_callback(update_type: str, data: dict):
|
||||
if update_type == "download_progress":
|
||||
await websocket_service.broadcast_download_progress(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
|
||||
download_service.set_broadcast_callback(broadcast_callback)
|
||||
|
||||
# Add and start download
|
||||
await download_service.add_to_queue(
|
||||
serie_id="client_test",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Client Test Anime",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
# Verify client received messages
|
||||
progress_messages = [
|
||||
m for m in client_messages
|
||||
if m.get('type') == 'download_progress'
|
||||
]
|
||||
|
||||
assert len(progress_messages) >= 2
|
||||
|
||||
# Cleanup
|
||||
await websocket_service.disconnect(connection_id)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_multiple_clients_receive_same_progress(
|
||||
self, download_service, websocket_service
|
||||
):
|
||||
"""Test that all connected clients receive progress updates."""
|
||||
# Track messages for each client
|
||||
client1_messages: List[Dict] = []
|
||||
client2_messages: List[Dict] = []
|
||||
|
||||
class MockWebSocket:
|
||||
"""Mock WebSocket for testing."""
|
||||
|
||||
def __init__(self, message_list):
|
||||
self.messages = message_list
|
||||
|
||||
async def accept(self):
|
||||
pass
|
||||
|
||||
async def send_json(self, data):
|
||||
self.messages.append(data)
|
||||
|
||||
async def receive_json(self):
|
||||
await asyncio.sleep(10)
|
||||
|
||||
# Connect two clients
|
||||
client1 = MockWebSocket(client1_messages)
|
||||
client2 = MockWebSocket(client2_messages)
|
||||
|
||||
await websocket_service.connect(client1, "client1")
|
||||
await websocket_service.connect(client2, "client2")
|
||||
|
||||
# Connect download service
|
||||
async def broadcast_callback(update_type: str, data: dict):
|
||||
if update_type == "download_progress":
|
||||
await websocket_service.broadcast_download_progress(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
|
||||
download_service.set_broadcast_callback(broadcast_callback)
|
||||
|
||||
# Start download
|
||||
await download_service.add_to_queue(
|
||||
serie_id="multi_client_test",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Multi Client Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
# Both clients should receive progress
|
||||
client1_progress = [
|
||||
m for m in client1_messages
|
||||
if m.get('type') == 'download_progress'
|
||||
]
|
||||
client2_progress = [
|
||||
m for m in client2_messages
|
||||
if m.get('type') == 'download_progress'
|
||||
]
|
||||
|
||||
assert len(client1_progress) >= 2
|
||||
assert len(client2_progress) >= 2
|
||||
|
||||
# Both should have similar number of updates
|
||||
assert abs(len(client1_progress) - len(client2_progress)) <= 2
|
||||
|
||||
# Cleanup
|
||||
await websocket_service.disconnect("client1")
|
||||
await websocket_service.disconnect("client2")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_data_structure_matches_frontend_expectations(
|
||||
self, download_service, websocket_service
|
||||
):
|
||||
"""Test that progress data structure matches frontend requirements."""
|
||||
captured_data: List[Dict] = []
|
||||
|
||||
async def capture_broadcast(update_type: str, data: dict):
|
||||
if update_type == "download_progress":
|
||||
captured_data.append(data)
|
||||
await websocket_service.broadcast_download_progress(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
|
||||
download_service.set_broadcast_callback(capture_broadcast)
|
||||
|
||||
await download_service.add_to_queue(
|
||||
serie_id="structure_test",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Structure Test",
|
||||
episodes=[EpisodeIdentifier(season=2, episode=3)],
|
||||
)
|
||||
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
assert len(captured_data) > 0
|
||||
|
||||
# Verify data structure matches frontend expectations
|
||||
for data in captured_data:
|
||||
# Required fields for frontend (queue.js)
|
||||
assert 'download_id' in data or 'item_id' in data
|
||||
assert 'serie_name' in data
|
||||
assert 'season' in data
|
||||
assert 'episode' in data
|
||||
assert 'progress' in data
|
||||
|
||||
# Progress object structure
|
||||
progress = data['progress']
|
||||
assert 'percent' in progress
|
||||
assert 'downloaded_mb' in progress
|
||||
assert 'total_mb' in progress
|
||||
|
||||
# Verify episode info
|
||||
assert data['season'] == 2
|
||||
assert data['episode'] == 3
|
||||
assert data['serie_name'] == "Structure Test"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_disconnected_client_doesnt_receive_progress(
|
||||
self, download_service, websocket_service
|
||||
):
|
||||
"""Test that disconnected clients don't receive updates."""
|
||||
client_messages: List[Dict] = []
|
||||
|
||||
class MockWebSocket:
|
||||
async def accept(self):
|
||||
pass
|
||||
|
||||
async def send_json(self, data):
|
||||
client_messages.append(data)
|
||||
|
||||
async def receive_json(self):
|
||||
await asyncio.sleep(10)
|
||||
|
||||
mock_ws = MockWebSocket()
|
||||
|
||||
# Connect and then disconnect
|
||||
connection_id = "temp_client"
|
||||
await websocket_service.connect(mock_ws, connection_id)
|
||||
await websocket_service.disconnect(connection_id)
|
||||
|
||||
# Connect download service
|
||||
async def broadcast_callback(update_type: str, data: dict):
|
||||
if update_type == "download_progress":
|
||||
await websocket_service.broadcast_download_progress(
|
||||
data.get("download_id", ""),
|
||||
data,
|
||||
)
|
||||
|
||||
download_service.set_broadcast_callback(broadcast_callback)
|
||||
|
||||
# Start download after disconnect
|
||||
await download_service.add_to_queue(
|
||||
serie_id="disconnect_test",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Disconnect Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
initial_message_count = len(client_messages)
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(1.0)
|
||||
|
||||
# Should not receive progress updates after disconnect
|
||||
progress_messages = [
|
||||
m for m in client_messages[initial_message_count:]
|
||||
if m.get('type') == 'download_progress'
|
||||
]
|
||||
|
||||
assert len(progress_messages) == 0
|
||||
@ -60,7 +60,6 @@ async def download_service(anime_service, progress_service):
|
||||
"""Create a DownloadService with dependencies."""
|
||||
service = DownloadService(
|
||||
anime_service=anime_service,
|
||||
max_concurrent_downloads=2,
|
||||
progress_service=progress_service,
|
||||
persistence_path="/tmp/test_queue.json",
|
||||
)
|
||||
@ -173,40 +172,6 @@ class TestWebSocketDownloadIntegration:
|
||||
assert stop_broadcast is not None
|
||||
assert stop_broadcast["data"]["is_running"] is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_queue_pause_resume_broadcast(
|
||||
self, download_service
|
||||
):
|
||||
"""Test that pause/resume operations broadcast updates."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
# Pause queue
|
||||
await download_service.pause_queue()
|
||||
|
||||
# Resume queue
|
||||
await download_service.resume_queue()
|
||||
|
||||
# Find pause/resume broadcasts
|
||||
pause_broadcast = next(
|
||||
(b for b in broadcasts if b["type"] == "queue_paused"),
|
||||
None,
|
||||
)
|
||||
resume_broadcast = next(
|
||||
(b for b in broadcasts if b["type"] == "queue_resumed"),
|
||||
None,
|
||||
)
|
||||
|
||||
assert pause_broadcast is not None
|
||||
assert pause_broadcast["data"]["is_paused"] is True
|
||||
|
||||
assert resume_broadcast is not None
|
||||
assert resume_broadcast["data"]["is_paused"] is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_completed_broadcast(
|
||||
self, download_service
|
||||
|
||||
@ -322,74 +322,3 @@ class TestAPIParameterValidation:
|
||||
# Should not grant admin from parameter
|
||||
data = response.json()
|
||||
assert not data.get("data", {}).get("is_admin", False)
|
||||
|
||||
|
||||
@pytest.mark.security
|
||||
class TestFileUploadSecurity:
|
||||
"""Security tests for file upload handling."""
|
||||
|
||||
@pytest.fixture
|
||||
async def client(self):
|
||||
"""Create async HTTP client for testing."""
|
||||
from httpx import ASGITransport
|
||||
|
||||
async with AsyncClient(
|
||||
transport=ASGITransport(app=app), base_url="http://test"
|
||||
) as ac:
|
||||
yield ac
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_malicious_file_extension(self, client):
|
||||
"""Test handling of dangerous file extensions."""
|
||||
dangerous_extensions = [
|
||||
".exe",
|
||||
".sh",
|
||||
".bat",
|
||||
".cmd",
|
||||
".php",
|
||||
".jsp",
|
||||
]
|
||||
|
||||
for ext in dangerous_extensions:
|
||||
files = {"file": (f"test{ext}", b"malicious content")}
|
||||
response = await client.post("/api/upload", files=files)
|
||||
|
||||
# Should reject dangerous files
|
||||
assert response.status_code in [400, 403, 415]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_file_size_limit(self, client):
|
||||
"""Test enforcement of file size limits."""
|
||||
# Try to upload very large file
|
||||
large_content = b"A" * (100 * 1024 * 1024) # 100MB
|
||||
|
||||
files = {"file": ("large.txt", large_content)}
|
||||
response = await client.post("/api/upload", files=files)
|
||||
|
||||
# Should reject oversized files
|
||||
assert response.status_code in [413, 422]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_double_extension_bypass(self, client):
|
||||
"""Test protection against double extension bypass."""
|
||||
files = {"file": ("image.jpg.php", b"<?php phpinfo(); ?>")}
|
||||
response = await client.post("/api/upload", files=files)
|
||||
|
||||
# Should detect and reject
|
||||
assert response.status_code in [400, 403, 415]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_mime_type_validation(self, client):
|
||||
"""Test MIME type validation."""
|
||||
# PHP file with image MIME type
|
||||
files = {
|
||||
"file": (
|
||||
"image.jpg",
|
||||
b"<?php phpinfo(); ?>",
|
||||
"image/jpeg",
|
||||
)
|
||||
}
|
||||
response = await client.post("/api/upload", files=files)
|
||||
|
||||
# Should validate actual content, not just MIME type
|
||||
assert response.status_code in [400, 403, 415]
|
||||
|
||||
@ -1,315 +0,0 @@
|
||||
"""Unit tests for analytics service.
|
||||
|
||||
Tests analytics service functionality including download statistics,
|
||||
series popularity tracking, storage analysis, and performance reporting.
|
||||
"""
|
||||
|
||||
import json
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from unittest.mock import AsyncMock, MagicMock, patch
|
||||
|
||||
import pytest
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.services.analytics_service import (
|
||||
AnalyticsService,
|
||||
DownloadStats,
|
||||
PerformanceReport,
|
||||
StorageAnalysis,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def analytics_service(tmp_path):
|
||||
"""Create analytics service with temp directory."""
|
||||
with patch("src.server.services.analytics_service.ANALYTICS_FILE",
|
||||
tmp_path / "analytics.json"):
|
||||
service = AnalyticsService()
|
||||
yield service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def mock_db():
|
||||
"""Create mock database session."""
|
||||
db = AsyncMock(spec=AsyncSession)
|
||||
return db
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_service_initialization(analytics_service):
|
||||
"""Test analytics service initializes with default data."""
|
||||
assert analytics_service.analytics_file.exists()
|
||||
|
||||
data = json.loads(analytics_service.analytics_file.read_text())
|
||||
assert "created_at" in data
|
||||
assert "download_stats" in data
|
||||
assert "series_popularity" in data
|
||||
assert data["download_stats"]["total_downloads"] == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_download_stats_no_data(
|
||||
analytics_service, mock_db
|
||||
):
|
||||
"""Test download statistics with no download data."""
|
||||
mock_db.execute = AsyncMock(return_value=MagicMock(
|
||||
scalars=MagicMock(return_value=MagicMock(all=MagicMock(
|
||||
return_value=[]
|
||||
)))
|
||||
))
|
||||
|
||||
stats = await analytics_service.get_download_stats(mock_db)
|
||||
|
||||
assert isinstance(stats, DownloadStats)
|
||||
assert stats.total_downloads == 0
|
||||
assert stats.successful_downloads == 0
|
||||
assert stats.success_rate == 0.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_download_stats_with_data(
|
||||
analytics_service, mock_db
|
||||
):
|
||||
"""Test download statistics with download data."""
|
||||
# Mock downloads - updated to use actual model fields
|
||||
download1 = MagicMock()
|
||||
download1.status = "completed"
|
||||
download1.total_bytes = 1024 * 1024 * 100 # 100 MB
|
||||
download1.download_speed = 1024 * 1024 * 10 # 10 MB/s
|
||||
|
||||
download2 = MagicMock()
|
||||
download2.status = "failed"
|
||||
download2.total_bytes = 0
|
||||
download2.download_speed = None
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=MagicMock(
|
||||
scalars=MagicMock(return_value=MagicMock(all=MagicMock(
|
||||
return_value=[download1, download2]
|
||||
)))
|
||||
))
|
||||
|
||||
stats = await analytics_service.get_download_stats(mock_db)
|
||||
|
||||
assert stats.total_downloads == 2
|
||||
assert stats.successful_downloads == 1
|
||||
assert stats.failed_downloads == 1
|
||||
assert stats.success_rate == 50.0
|
||||
assert stats.total_bytes_downloaded == 1024 * 1024 * 100
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_series_popularity_empty(
|
||||
analytics_service, mock_db
|
||||
):
|
||||
"""Test series popularity with no data."""
|
||||
mock_db.execute = AsyncMock(return_value=MagicMock(
|
||||
all=MagicMock(return_value=[])
|
||||
))
|
||||
|
||||
popularity = await analytics_service.get_series_popularity(
|
||||
mock_db, limit=10
|
||||
)
|
||||
|
||||
assert isinstance(popularity, list)
|
||||
assert len(popularity) == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_series_popularity_with_data(
|
||||
analytics_service, mock_db
|
||||
):
|
||||
"""Test series popularity with data."""
|
||||
# Mock returns tuples:
|
||||
# (series_name, download_count, total_size, last_download, successful)
|
||||
row = (
|
||||
"Test Anime",
|
||||
5,
|
||||
1024 * 1024 * 500,
|
||||
datetime.now(),
|
||||
4
|
||||
)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=MagicMock(
|
||||
all=MagicMock(return_value=[row])
|
||||
))
|
||||
|
||||
popularity = await analytics_service.get_series_popularity(
|
||||
mock_db, limit=10
|
||||
)
|
||||
|
||||
assert len(popularity) == 1
|
||||
assert popularity[0].series_name == "Test Anime"
|
||||
assert popularity[0].download_count == 5
|
||||
assert popularity[0].success_rate == 80.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_storage_analysis(analytics_service):
|
||||
"""Test storage analysis retrieval."""
|
||||
with patch("psutil.disk_usage") as mock_disk:
|
||||
mock_disk.return_value = MagicMock(
|
||||
total=1024 * 1024 * 1024 * 1024,
|
||||
used=512 * 1024 * 1024 * 1024,
|
||||
free=512 * 1024 * 1024 * 1024,
|
||||
percent=50.0,
|
||||
)
|
||||
|
||||
analysis = analytics_service.get_storage_analysis()
|
||||
|
||||
assert isinstance(analysis, StorageAnalysis)
|
||||
assert analysis.total_storage_bytes > 0
|
||||
assert analysis.storage_percent_used == 50.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_performance_report_no_data(
|
||||
analytics_service, mock_db
|
||||
):
|
||||
"""Test performance report with no data."""
|
||||
mock_db.execute = AsyncMock(return_value=MagicMock(
|
||||
scalars=MagicMock(return_value=MagicMock(all=MagicMock(
|
||||
return_value=[]
|
||||
)))
|
||||
))
|
||||
|
||||
with patch("psutil.Process") as mock_process:
|
||||
mock_process.return_value = MagicMock(
|
||||
memory_info=MagicMock(
|
||||
return_value=MagicMock(rss=100 * 1024 * 1024)
|
||||
),
|
||||
cpu_percent=MagicMock(return_value=10.0),
|
||||
)
|
||||
|
||||
report = await analytics_service.get_performance_report(
|
||||
mock_db, hours=24
|
||||
)
|
||||
|
||||
assert isinstance(report, PerformanceReport)
|
||||
assert report.downloads_per_hour == 0.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_performance_sample(analytics_service):
|
||||
"""Test recording performance samples."""
|
||||
analytics_service.record_performance_sample(
|
||||
queue_size=5,
|
||||
active_downloads=2,
|
||||
cpu_percent=25.0,
|
||||
memory_mb=512.0,
|
||||
)
|
||||
|
||||
data = json.loads(
|
||||
analytics_service.analytics_file.read_text()
|
||||
)
|
||||
assert len(data["performance_samples"]) == 1
|
||||
sample = data["performance_samples"][0]
|
||||
assert sample["queue_size"] == 5
|
||||
assert sample["active_downloads"] == 2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_record_multiple_performance_samples(
|
||||
analytics_service
|
||||
):
|
||||
"""Test recording multiple performance samples."""
|
||||
for i in range(5):
|
||||
analytics_service.record_performance_sample(
|
||||
queue_size=i,
|
||||
active_downloads=i % 2,
|
||||
cpu_percent=10.0 + i,
|
||||
memory_mb=256.0 + i * 50,
|
||||
)
|
||||
|
||||
data = json.loads(
|
||||
analytics_service.analytics_file.read_text()
|
||||
)
|
||||
assert len(data["performance_samples"]) == 5
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_generate_summary_report(
|
||||
analytics_service, mock_db
|
||||
):
|
||||
"""Test generating comprehensive summary report."""
|
||||
mock_db.execute = AsyncMock(return_value=MagicMock(
|
||||
scalars=MagicMock(return_value=MagicMock(all=MagicMock(
|
||||
return_value=[]
|
||||
))),
|
||||
all=MagicMock(return_value=[]),
|
||||
))
|
||||
|
||||
with patch("psutil.disk_usage") as mock_disk:
|
||||
mock_disk.return_value = MagicMock(
|
||||
total=1024 * 1024 * 1024,
|
||||
used=512 * 1024 * 1024,
|
||||
free=512 * 1024 * 1024,
|
||||
percent=50.0,
|
||||
)
|
||||
|
||||
with patch("psutil.Process"):
|
||||
report = await analytics_service.generate_summary_report(
|
||||
mock_db
|
||||
)
|
||||
|
||||
assert "timestamp" in report
|
||||
assert "download_stats" in report
|
||||
assert "series_popularity" in report
|
||||
assert "storage_analysis" in report
|
||||
assert "performance_report" in report
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_dir_size(analytics_service, tmp_path):
|
||||
"""Test directory size calculation."""
|
||||
# Create test files
|
||||
(tmp_path / "file1.txt").write_text("test content")
|
||||
(tmp_path / "file2.txt").write_text("more test content")
|
||||
subdir = tmp_path / "subdir"
|
||||
subdir.mkdir()
|
||||
(subdir / "file3.txt").write_text("nested content")
|
||||
|
||||
size = analytics_service._get_dir_size(tmp_path)
|
||||
|
||||
assert size > 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_dir_size_nonexistent(analytics_service):
|
||||
"""Test directory size for nonexistent directory."""
|
||||
size = analytics_service._get_dir_size(
|
||||
Path("/nonexistent/directory")
|
||||
)
|
||||
|
||||
assert size == 0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_persistence(analytics_service):
|
||||
"""Test analytics data persistence."""
|
||||
analytics_service.record_performance_sample(
|
||||
queue_size=10,
|
||||
active_downloads=3,
|
||||
cpu_percent=50.0,
|
||||
memory_mb=1024.0,
|
||||
)
|
||||
|
||||
# Create new service instance
|
||||
analytics_service2 = AnalyticsService()
|
||||
analytics_service2.analytics_file = analytics_service.analytics_file
|
||||
|
||||
data = json.loads(
|
||||
analytics_service2.analytics_file.read_text()
|
||||
)
|
||||
assert len(data["performance_samples"]) == 1
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_analytics_service_singleton(analytics_service):
|
||||
"""Test analytics service singleton pattern."""
|
||||
from src.server.services.analytics_service import get_analytics_service
|
||||
|
||||
service1 = get_analytics_service()
|
||||
service2 = get_analytics_service()
|
||||
|
||||
assert service1 is service2
|
||||
@ -1,259 +0,0 @@
|
||||
"""Unit tests for backup service."""
|
||||
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from src.server.services.backup_service import BackupService, get_backup_service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def temp_backup_env():
|
||||
"""Create temporary directories for testing."""
|
||||
with tempfile.TemporaryDirectory() as tmpdir:
|
||||
backup_dir = Path(tmpdir) / "backups"
|
||||
config_dir = Path(tmpdir) / "config"
|
||||
config_dir.mkdir()
|
||||
|
||||
# Create mock config files
|
||||
(config_dir / "config.json").write_text('{"test": "config"}')
|
||||
(config_dir / "download_queue.json").write_text('{"queue": []}')
|
||||
|
||||
yield {
|
||||
"backup_dir": str(backup_dir),
|
||||
"config_dir": str(config_dir),
|
||||
"tmpdir": tmpdir,
|
||||
}
|
||||
|
||||
|
||||
def test_backup_service_initialization(temp_backup_env):
|
||||
"""Test backup service initialization."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
assert service is not None
|
||||
assert service.backup_dir.exists()
|
||||
|
||||
|
||||
def test_backup_configuration(temp_backup_env):
|
||||
"""Test configuration backup creation."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
backup_info = service.backup_configuration("Test backup")
|
||||
|
||||
assert backup_info is not None
|
||||
assert backup_info.backup_type == "config"
|
||||
assert backup_info.size_bytes > 0
|
||||
assert "config_" in backup_info.name
|
||||
|
||||
|
||||
def test_backup_configuration_no_config(temp_backup_env):
|
||||
"""Test configuration backup with missing config file."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
# Remove config file
|
||||
(Path(temp_backup_env["config_dir"]) / "config.json").unlink()
|
||||
|
||||
# Should still create backup (empty tar)
|
||||
backup_info = service.backup_configuration()
|
||||
|
||||
assert backup_info is not None
|
||||
|
||||
|
||||
def test_backup_database(temp_backup_env):
|
||||
"""Test database backup creation."""
|
||||
# Create mock database file
|
||||
db_path = Path(temp_backup_env["tmpdir"]) / "aniworld.db"
|
||||
db_path.write_bytes(b"mock database content")
|
||||
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
database_path=str(db_path),
|
||||
)
|
||||
|
||||
backup_info = service.backup_database("DB backup")
|
||||
|
||||
assert backup_info is not None
|
||||
assert backup_info.backup_type == "data"
|
||||
assert backup_info.size_bytes > 0
|
||||
assert "database_" in backup_info.name
|
||||
|
||||
|
||||
def test_backup_database_not_found(temp_backup_env):
|
||||
"""Test database backup with missing database."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
database_path="/nonexistent/database.db",
|
||||
)
|
||||
|
||||
backup_info = service.backup_database()
|
||||
|
||||
assert backup_info is None
|
||||
|
||||
|
||||
def test_backup_full(temp_backup_env):
|
||||
"""Test full system backup."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
backup_info = service.backup_full("Full backup")
|
||||
|
||||
assert backup_info is not None
|
||||
assert backup_info.backup_type == "full"
|
||||
assert backup_info.size_bytes > 0
|
||||
|
||||
|
||||
def test_list_backups(temp_backup_env):
|
||||
"""Test listing backups."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
# Create several backups
|
||||
service.backup_configuration()
|
||||
service.backup_full()
|
||||
|
||||
backups = service.list_backups()
|
||||
|
||||
assert len(backups) >= 2
|
||||
assert all("name" in b for b in backups)
|
||||
assert all("type" in b for b in backups)
|
||||
|
||||
|
||||
def test_list_backups_by_type(temp_backup_env):
|
||||
"""Test listing backups filtered by type."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
# Create different types of backups
|
||||
service.backup_configuration()
|
||||
service.backup_full()
|
||||
|
||||
config_backups = service.list_backups("config")
|
||||
|
||||
assert all(b["type"] == "config" for b in config_backups)
|
||||
|
||||
|
||||
def test_delete_backup(temp_backup_env):
|
||||
"""Test backup deletion."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
backup_info = service.backup_configuration()
|
||||
assert backup_info is not None
|
||||
|
||||
backups_before = service.list_backups()
|
||||
assert len(backups_before) > 0
|
||||
|
||||
result = service.delete_backup(backup_info.name)
|
||||
|
||||
assert result is True
|
||||
backups_after = service.list_backups()
|
||||
assert len(backups_after) < len(backups_before)
|
||||
|
||||
|
||||
def test_delete_backup_not_found(temp_backup_env):
|
||||
"""Test deleting non-existent backup."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
result = service.delete_backup("nonexistent_backup.tar.gz")
|
||||
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_cleanup_old_backups(temp_backup_env):
|
||||
"""Test cleanup of old backups."""
|
||||
import time
|
||||
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
# Create multiple backups with small delays to ensure unique timestamps
|
||||
for i in range(5):
|
||||
service.backup_configuration()
|
||||
time.sleep(1) # Ensure different timestamps
|
||||
|
||||
backups_before = service.list_backups()
|
||||
assert len(backups_before) == 5
|
||||
|
||||
# Keep only 2 backups
|
||||
deleted = service.cleanup_old_backups(max_backups=2)
|
||||
|
||||
backups_after = service.list_backups()
|
||||
assert len(backups_after) <= 2
|
||||
assert deleted == 3
|
||||
|
||||
|
||||
def test_export_anime_data(temp_backup_env):
|
||||
"""Test anime data export."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
export_file = Path(temp_backup_env["tmpdir"]) / "anime_export.json"
|
||||
result = service.export_anime_data(str(export_file))
|
||||
|
||||
assert result is True
|
||||
assert export_file.exists()
|
||||
assert "timestamp" in export_file.read_text()
|
||||
|
||||
|
||||
def test_import_anime_data(temp_backup_env):
|
||||
"""Test anime data import."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
# Create import file
|
||||
import_file = Path(temp_backup_env["tmpdir"]) / "anime_import.json"
|
||||
import_file.write_text('{"timestamp": "2025-01-01T00:00:00", "data": []}')
|
||||
|
||||
result = service.import_anime_data(str(import_file))
|
||||
|
||||
assert result is True
|
||||
|
||||
|
||||
def test_import_anime_data_not_found(temp_backup_env):
|
||||
"""Test anime data import with missing file."""
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
result = service.import_anime_data("/nonexistent/file.json")
|
||||
|
||||
assert result is False
|
||||
|
||||
|
||||
def test_get_backup_service():
|
||||
"""Test singleton backup service."""
|
||||
service1 = get_backup_service()
|
||||
service2 = get_backup_service()
|
||||
|
||||
assert service1 is service2
|
||||
assert isinstance(service1, BackupService)
|
||||
@ -1,227 +0,0 @@
|
||||
"""Unit tests for diagnostics endpoints."""
|
||||
from unittest.mock import MagicMock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from src.server.api.diagnostics import (
|
||||
NetworkTestResult,
|
||||
check_dns,
|
||||
check_host_connectivity,
|
||||
network_diagnostics,
|
||||
)
|
||||
|
||||
|
||||
class TestDiagnosticsEndpoint:
|
||||
"""Test diagnostics API endpoints."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_network_diagnostics_returns_standard_format(self):
|
||||
"""Test that network diagnostics returns the expected format."""
|
||||
# Mock authentication
|
||||
mock_auth = {"user_id": "test_user"}
|
||||
|
||||
# Mock the helper functions
|
||||
with patch(
|
||||
"src.server.api.diagnostics.check_dns",
|
||||
return_value=True
|
||||
), patch(
|
||||
"src.server.api.diagnostics.check_host_connectivity",
|
||||
side_effect=[
|
||||
NetworkTestResult(
|
||||
host="google.com",
|
||||
reachable=True,
|
||||
response_time_ms=50.5
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="cloudflare.com",
|
||||
reachable=True,
|
||||
response_time_ms=30.2
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="github.com",
|
||||
reachable=True,
|
||||
response_time_ms=100.0
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="aniworld.to",
|
||||
reachable=True,
|
||||
response_time_ms=75.3
|
||||
),
|
||||
]
|
||||
):
|
||||
# Call the endpoint
|
||||
result = await network_diagnostics(auth=mock_auth)
|
||||
|
||||
# Verify response structure
|
||||
assert isinstance(result, dict)
|
||||
assert "status" in result
|
||||
assert "data" in result
|
||||
assert result["status"] == "success"
|
||||
|
||||
# Verify data structure
|
||||
data = result["data"]
|
||||
assert "internet_connected" in data
|
||||
assert "dns_working" in data
|
||||
assert "aniworld_reachable" in data
|
||||
assert "tests" in data
|
||||
|
||||
# Verify values
|
||||
assert data["internet_connected"] is True
|
||||
assert data["dns_working"] is True
|
||||
assert data["aniworld_reachable"] is True
|
||||
assert len(data["tests"]) == 4
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_network_diagnostics_aniworld_unreachable(self):
|
||||
"""Test diagnostics when aniworld.to is unreachable."""
|
||||
mock_auth = {"user_id": "test_user"}
|
||||
|
||||
with patch(
|
||||
"src.server.api.diagnostics.check_dns",
|
||||
return_value=True
|
||||
), patch(
|
||||
"src.server.api.diagnostics.check_host_connectivity",
|
||||
side_effect=[
|
||||
NetworkTestResult(
|
||||
host="google.com",
|
||||
reachable=True,
|
||||
response_time_ms=50.5
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="cloudflare.com",
|
||||
reachable=True,
|
||||
response_time_ms=30.2
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="github.com",
|
||||
reachable=True,
|
||||
response_time_ms=100.0
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="aniworld.to",
|
||||
reachable=False,
|
||||
error="Connection timeout"
|
||||
),
|
||||
]
|
||||
):
|
||||
result = await network_diagnostics(auth=mock_auth)
|
||||
|
||||
# Verify aniworld is marked as unreachable
|
||||
assert result["status"] == "success"
|
||||
assert result["data"]["aniworld_reachable"] is False
|
||||
assert result["data"]["internet_connected"] is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_network_diagnostics_all_unreachable(self):
|
||||
"""Test diagnostics when all hosts are unreachable."""
|
||||
mock_auth = {"user_id": "test_user"}
|
||||
|
||||
with patch(
|
||||
"src.server.api.diagnostics.check_dns",
|
||||
return_value=False
|
||||
), patch(
|
||||
"src.server.api.diagnostics.check_host_connectivity",
|
||||
side_effect=[
|
||||
NetworkTestResult(
|
||||
host="google.com",
|
||||
reachable=False,
|
||||
error="Connection timeout"
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="cloudflare.com",
|
||||
reachable=False,
|
||||
error="Connection timeout"
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="github.com",
|
||||
reachable=False,
|
||||
error="Connection timeout"
|
||||
),
|
||||
NetworkTestResult(
|
||||
host="aniworld.to",
|
||||
reachable=False,
|
||||
error="Connection timeout"
|
||||
),
|
||||
]
|
||||
):
|
||||
result = await network_diagnostics(auth=mock_auth)
|
||||
|
||||
# Verify all are unreachable
|
||||
assert result["status"] == "success"
|
||||
assert result["data"]["internet_connected"] is False
|
||||
assert result["data"]["dns_working"] is False
|
||||
assert result["data"]["aniworld_reachable"] is False
|
||||
|
||||
|
||||
class TestNetworkHelpers:
|
||||
"""Test network helper functions."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_check_dns_success(self):
|
||||
"""Test DNS check when DNS is working."""
|
||||
with patch("socket.gethostbyname", return_value="142.250.185.78"):
|
||||
result = await check_dns()
|
||||
assert result is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_check_dns_failure(self):
|
||||
"""Test DNS check when DNS fails."""
|
||||
import socket
|
||||
with patch(
|
||||
"socket.gethostbyname",
|
||||
side_effect=socket.gaierror("DNS lookup failed")
|
||||
):
|
||||
result = await check_dns()
|
||||
assert result is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_host_connectivity_success(self):
|
||||
"""Test host connectivity check when host is reachable."""
|
||||
with patch(
|
||||
"socket.create_connection",
|
||||
return_value=MagicMock()
|
||||
):
|
||||
result = await check_host_connectivity("google.com", 80)
|
||||
assert result.host == "google.com"
|
||||
assert result.reachable is True
|
||||
assert result.response_time_ms is not None
|
||||
assert result.response_time_ms >= 0
|
||||
assert result.error is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_host_connectivity_timeout(self):
|
||||
"""Test host connectivity when connection times out."""
|
||||
import asyncio
|
||||
with patch(
|
||||
"socket.create_connection",
|
||||
side_effect=asyncio.TimeoutError()
|
||||
):
|
||||
result = await check_host_connectivity("example.com", 80, 1.0)
|
||||
assert result.host == "example.com"
|
||||
assert result.reachable is False
|
||||
assert result.error == "Connection timeout"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_host_connectivity_dns_failure(self):
|
||||
"""Test host connectivity when DNS resolution fails."""
|
||||
import socket
|
||||
with patch(
|
||||
"socket.create_connection",
|
||||
side_effect=socket.gaierror("Name resolution failed")
|
||||
):
|
||||
result = await check_host_connectivity("invalid.host", 80)
|
||||
assert result.host == "invalid.host"
|
||||
assert result.reachable is False
|
||||
assert "DNS resolution failed" in result.error
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_host_connectivity_connection_refused(self):
|
||||
"""Test host connectivity when connection is refused."""
|
||||
with patch(
|
||||
"socket.create_connection",
|
||||
side_effect=ConnectionRefusedError()
|
||||
):
|
||||
result = await check_host_connectivity("localhost", 12345)
|
||||
assert result.host == "localhost"
|
||||
assert result.reachable is False
|
||||
assert result.error == "Connection refused"
|
||||
403
tests/unit/test_download_progress_websocket.py
Normal file
403
tests/unit/test_download_progress_websocket.py
Normal file
@ -0,0 +1,403 @@
|
||||
"""Unit tests for download progress WebSocket updates.
|
||||
|
||||
This module tests the integration between download service progress tracking
|
||||
and WebSocket broadcasting to ensure real-time updates are properly sent
|
||||
to connected clients.
|
||||
"""
|
||||
import asyncio
|
||||
from typing import Any, Dict, List
|
||||
from unittest.mock import Mock, patch
|
||||
|
||||
import pytest
|
||||
|
||||
from src.server.models.download import (
|
||||
DownloadPriority,
|
||||
DownloadProgress,
|
||||
EpisodeIdentifier,
|
||||
)
|
||||
from src.server.services.anime_service import AnimeService
|
||||
from src.server.services.download_service import DownloadService
|
||||
from src.server.services.progress_service import ProgressService
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def mock_series_app():
|
||||
"""Mock SeriesApp for testing."""
|
||||
app = Mock()
|
||||
app.series_list = []
|
||||
app.search = Mock(return_value=[])
|
||||
app.ReScan = Mock()
|
||||
|
||||
# Mock download with progress callback
|
||||
def mock_download(
|
||||
serie_folder, season, episode, key, callback=None, **kwargs
|
||||
):
|
||||
"""Simulate download with progress updates."""
|
||||
if callback:
|
||||
# Simulate progress updates
|
||||
callback({
|
||||
'percent': 25.0,
|
||||
'downloaded_mb': 25.0,
|
||||
'total_mb': 100.0,
|
||||
'speed_mbps': 2.5,
|
||||
'eta_seconds': 30,
|
||||
})
|
||||
callback({
|
||||
'percent': 50.0,
|
||||
'downloaded_mb': 50.0,
|
||||
'total_mb': 100.0,
|
||||
'speed_mbps': 2.5,
|
||||
'eta_seconds': 20,
|
||||
})
|
||||
callback({
|
||||
'percent': 100.0,
|
||||
'downloaded_mb': 100.0,
|
||||
'total_mb': 100.0,
|
||||
'speed_mbps': 2.5,
|
||||
'eta_seconds': 0,
|
||||
})
|
||||
|
||||
# Return success result
|
||||
result = Mock()
|
||||
result.success = True
|
||||
result.message = "Download completed"
|
||||
return result
|
||||
|
||||
app.download = Mock(side_effect=mock_download)
|
||||
return app
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def progress_service():
|
||||
"""Create a ProgressService instance for testing."""
|
||||
return ProgressService()
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def anime_service(mock_series_app, progress_service):
|
||||
"""Create an AnimeService with mocked dependencies."""
|
||||
with patch(
|
||||
"src.server.services.anime_service.SeriesApp",
|
||||
return_value=mock_series_app
|
||||
):
|
||||
service = AnimeService(
|
||||
directory="/test/anime",
|
||||
progress_service=progress_service,
|
||||
)
|
||||
yield service
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
async def download_service(anime_service, progress_service):
|
||||
"""Create a DownloadService with dependencies."""
|
||||
service = DownloadService(
|
||||
anime_service=anime_service,
|
||||
progress_service=progress_service,
|
||||
persistence_path="/tmp/test_download_progress_queue.json",
|
||||
)
|
||||
yield service
|
||||
await service.stop()
|
||||
|
||||
|
||||
class TestDownloadProgressWebSocket:
|
||||
"""Test download progress WebSocket broadcasting."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_callback_broadcasts_updates(
|
||||
self, download_service
|
||||
):
|
||||
"""Test that progress callback broadcasts updates via WebSocket."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
"""Capture broadcast calls."""
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
# Add item to queue
|
||||
item_ids = await download_service.add_to_queue(
|
||||
serie_id="test_serie_1",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Test Anime",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
priority=DownloadPriority.NORMAL,
|
||||
)
|
||||
|
||||
assert len(item_ids) == 1
|
||||
|
||||
# Start processing - this should trigger download with progress
|
||||
result = await download_service.start_queue_processing()
|
||||
assert result is not None
|
||||
|
||||
# Wait for download to process
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# Filter progress broadcasts
|
||||
progress_broadcasts = [
|
||||
b for b in broadcasts if b["type"] == "download_progress"
|
||||
]
|
||||
|
||||
# Should have received multiple progress updates
|
||||
assert len(progress_broadcasts) >= 2
|
||||
|
||||
# Verify progress data structure
|
||||
for broadcast in progress_broadcasts:
|
||||
data = broadcast["data"]
|
||||
assert "download_id" in data or "item_id" in data
|
||||
assert "progress" in data
|
||||
|
||||
progress = data["progress"]
|
||||
assert "percent" in progress
|
||||
assert "downloaded_mb" in progress
|
||||
assert "total_mb" in progress
|
||||
assert 0 <= progress["percent"] <= 100
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_updates_include_episode_info(
|
||||
self, download_service
|
||||
):
|
||||
"""Test that progress updates include episode information."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
# Add item with specific episode info
|
||||
await download_service.add_to_queue(
|
||||
serie_id="test_serie_2",
|
||||
serie_folder="test_folder",
|
||||
serie_name="My Test Anime",
|
||||
episodes=[EpisodeIdentifier(season=2, episode=5)],
|
||||
priority=DownloadPriority.HIGH,
|
||||
)
|
||||
|
||||
# Start processing
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# Find progress broadcasts
|
||||
progress_broadcasts = [
|
||||
b for b in broadcasts if b["type"] == "download_progress"
|
||||
]
|
||||
|
||||
assert len(progress_broadcasts) > 0
|
||||
|
||||
# Verify episode info is included
|
||||
data = progress_broadcasts[0]["data"]
|
||||
assert data["serie_name"] == "My Test Anime"
|
||||
assert data["season"] == 2
|
||||
assert data["episode"] == 5
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_percent_increases(self, download_service):
|
||||
"""Test that progress percentage increases over time."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
await download_service.add_to_queue(
|
||||
serie_id="test_serie_3",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Progress Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# Get progress broadcasts in order
|
||||
progress_broadcasts = [
|
||||
b for b in broadcasts if b["type"] == "download_progress"
|
||||
]
|
||||
|
||||
# Verify we have multiple updates
|
||||
assert len(progress_broadcasts) >= 2
|
||||
|
||||
# Verify progress increases
|
||||
percentages = [
|
||||
b["data"]["progress"]["percent"] for b in progress_broadcasts
|
||||
]
|
||||
|
||||
# Each percentage should be >= the previous one
|
||||
for i in range(1, len(percentages)):
|
||||
assert percentages[i] >= percentages[i - 1]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_includes_speed_and_eta(self, download_service):
|
||||
"""Test that progress updates include speed and ETA."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
await download_service.add_to_queue(
|
||||
serie_id="test_serie_4",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Speed Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
progress_broadcasts = [
|
||||
b for b in broadcasts if b["type"] == "download_progress"
|
||||
]
|
||||
|
||||
assert len(progress_broadcasts) > 0
|
||||
|
||||
# Check that speed and ETA are present
|
||||
progress = progress_broadcasts[0]["data"]["progress"]
|
||||
assert "speed_mbps" in progress
|
||||
assert "eta_seconds" in progress
|
||||
|
||||
# Speed and ETA should be numeric (or None)
|
||||
if progress["speed_mbps"] is not None:
|
||||
assert isinstance(progress["speed_mbps"], (int, float))
|
||||
if progress["eta_seconds"] is not None:
|
||||
assert isinstance(progress["eta_seconds"], (int, float))
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_no_broadcast_without_callback(self, download_service):
|
||||
"""Test that no errors occur when broadcast callback is not set."""
|
||||
# Don't set broadcast callback
|
||||
|
||||
await download_service.add_to_queue(
|
||||
serie_id="test_serie_5",
|
||||
serie_folder="test_folder",
|
||||
serie_name="No Broadcast Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
# Should complete without errors
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# Verify download completed successfully
|
||||
status = await download_service.get_queue_status()
|
||||
assert len(status.completed_downloads) == 1
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_broadcast_error_handling(self, download_service):
|
||||
"""Test that broadcast errors don't break download process."""
|
||||
error_count = 0
|
||||
|
||||
async def failing_broadcast(update_type: str, data: dict):
|
||||
"""Broadcast that always fails."""
|
||||
nonlocal error_count
|
||||
error_count += 1
|
||||
raise RuntimeError("Broadcast failed")
|
||||
|
||||
download_service.set_broadcast_callback(failing_broadcast)
|
||||
|
||||
await download_service.add_to_queue(
|
||||
serie_id="test_serie_6",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Error Handling Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
# Should complete despite broadcast errors
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
# Verify download still completed
|
||||
status = await download_service.get_queue_status()
|
||||
assert len(status.completed_downloads) == 1
|
||||
|
||||
# Verify broadcast was attempted
|
||||
assert error_count > 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_multiple_downloads_broadcast_separately(
|
||||
self, download_service
|
||||
):
|
||||
"""Test that multiple downloads broadcast their progress separately."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
# Add multiple episodes
|
||||
item_ids = await download_service.add_to_queue(
|
||||
serie_id="test_serie_7",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Multi Episode Test",
|
||||
episodes=[
|
||||
EpisodeIdentifier(season=1, episode=1),
|
||||
EpisodeIdentifier(season=1, episode=2),
|
||||
],
|
||||
)
|
||||
|
||||
assert len(item_ids) == 2
|
||||
|
||||
# Start processing
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(1.0) # Give time for both downloads
|
||||
|
||||
# Get progress broadcasts
|
||||
progress_broadcasts = [
|
||||
b for b in broadcasts if b["type"] == "download_progress"
|
||||
]
|
||||
|
||||
# Should have progress for both episodes
|
||||
assert len(progress_broadcasts) >= 4 # At least 2 updates per episode
|
||||
|
||||
# Verify different download IDs
|
||||
download_ids = set()
|
||||
for broadcast in progress_broadcasts:
|
||||
download_id = (
|
||||
broadcast["data"].get("download_id")
|
||||
or broadcast["data"].get("item_id")
|
||||
)
|
||||
if download_id:
|
||||
download_ids.add(download_id)
|
||||
|
||||
# Should have at least 2 unique download IDs
|
||||
assert len(download_ids) >= 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_data_format_matches_model(self, download_service):
|
||||
"""Test that broadcast data matches DownloadProgress model."""
|
||||
broadcasts: List[Dict[str, Any]] = []
|
||||
|
||||
async def mock_broadcast(update_type: str, data: dict):
|
||||
broadcasts.append({"type": update_type, "data": data})
|
||||
|
||||
download_service.set_broadcast_callback(mock_broadcast)
|
||||
|
||||
await download_service.add_to_queue(
|
||||
serie_id="test_serie_8",
|
||||
serie_folder="test_folder",
|
||||
serie_name="Model Test",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
await download_service.start_queue_processing()
|
||||
await asyncio.sleep(0.5)
|
||||
|
||||
progress_broadcasts = [
|
||||
b for b in broadcasts if b["type"] == "download_progress"
|
||||
]
|
||||
|
||||
assert len(progress_broadcasts) > 0
|
||||
|
||||
# Verify progress can be parsed as DownloadProgress
|
||||
progress_data = progress_broadcasts[0]["data"]["progress"]
|
||||
progress = DownloadProgress(**progress_data)
|
||||
|
||||
# Verify required fields
|
||||
assert isinstance(progress.percent, float)
|
||||
assert isinstance(progress.downloaded_mb, float)
|
||||
assert 0 <= progress.percent <= 100
|
||||
assert progress.downloaded_mb >= 0
|
||||
@ -1,7 +1,7 @@
|
||||
"""Unit tests for the download queue service.
|
||||
|
||||
Tests cover queue management, priority handling, persistence,
|
||||
concurrent downloads, and error scenarios.
|
||||
Tests cover queue management, manual download control, persistence,
|
||||
and error scenarios for the simplified download service.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
@ -42,7 +42,6 @@ def download_service(mock_anime_service, temp_persistence_path):
|
||||
"""Create a DownloadService instance for testing."""
|
||||
return DownloadService(
|
||||
anime_service=mock_anime_service,
|
||||
max_concurrent_downloads=2,
|
||||
max_retries=3,
|
||||
persistence_path=temp_persistence_path,
|
||||
)
|
||||
@ -61,11 +60,10 @@ class TestDownloadServiceInitialization:
|
||||
)
|
||||
|
||||
assert len(service._pending_queue) == 0
|
||||
assert len(service._active_downloads) == 0
|
||||
assert service._active_download is None
|
||||
assert len(service._completed_items) == 0
|
||||
assert len(service._failed_items) == 0
|
||||
assert service._is_running is False
|
||||
assert service._is_paused is False
|
||||
assert service._is_stopped is True
|
||||
|
||||
def test_initialization_loads_persisted_queue(
|
||||
self, mock_anime_service, temp_persistence_path
|
||||
@ -152,29 +150,6 @@ class TestQueueManagement:
|
||||
assert len(item_ids) == 3
|
||||
assert len(download_service._pending_queue) == 3
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_add_high_priority_to_front(self, download_service):
|
||||
"""Test that high priority items are added to front of queue."""
|
||||
# Add normal priority item
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_name="Test Series",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
priority=DownloadPriority.NORMAL,
|
||||
)
|
||||
|
||||
# Add high priority item
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-2",
|
||||
serie_name="Priority Series",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
priority=DownloadPriority.HIGH,
|
||||
)
|
||||
|
||||
# High priority should be at front
|
||||
assert download_service._pending_queue[0].serie_id == "series-2"
|
||||
assert download_service._pending_queue[1].serie_id == "series-1"
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_from_pending_queue(self, download_service):
|
||||
"""Test removing items from pending queue."""
|
||||
@ -191,32 +166,108 @@ class TestQueueManagement:
|
||||
assert len(download_service._pending_queue) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_queue(self, download_service):
|
||||
"""Test reordering items in queue."""
|
||||
# Add three items
|
||||
async def test_start_next_download(self, download_service):
|
||||
"""Test starting the next download from queue."""
|
||||
# Add items to queue
|
||||
item_ids = await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_name="Test Series",
|
||||
episodes=[
|
||||
EpisodeIdentifier(season=1, episode=1),
|
||||
EpisodeIdentifier(season=1, episode=2),
|
||||
],
|
||||
)
|
||||
|
||||
# Start next download
|
||||
started_id = await download_service.start_next_download()
|
||||
|
||||
assert started_id is not None
|
||||
assert started_id == item_ids[0]
|
||||
assert len(download_service._pending_queue) == 1
|
||||
assert download_service._is_stopped is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_next_download_empty_queue(self, download_service):
|
||||
"""Test starting download with empty queue returns None."""
|
||||
result = await download_service.start_next_download()
|
||||
assert result is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_next_download_already_active(
|
||||
self, download_service, mock_anime_service
|
||||
):
|
||||
"""Test that starting download while one is active raises error."""
|
||||
# Add items and start one
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_name="Series 1",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
serie_name="Test Series",
|
||||
episodes=[
|
||||
EpisodeIdentifier(season=1, episode=1),
|
||||
EpisodeIdentifier(season=1, episode=2),
|
||||
],
|
||||
)
|
||||
|
||||
# Make download slow so it stays active
|
||||
async def slow_download(**kwargs):
|
||||
await asyncio.sleep(10)
|
||||
|
||||
mock_anime_service.download = AsyncMock(side_effect=slow_download)
|
||||
|
||||
# Start first download (will block for 10s in background)
|
||||
item_id = await download_service.start_next_download()
|
||||
assert item_id is not None
|
||||
await asyncio.sleep(0.1) # Let it start processing
|
||||
|
||||
# Try to start another - should fail because one is active
|
||||
with pytest.raises(DownloadServiceError, match="already in progress"):
|
||||
await download_service.start_next_download()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_downloads(self, download_service):
|
||||
"""Test stopping queue processing."""
|
||||
await download_service.stop_downloads()
|
||||
assert download_service._is_stopped is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_download_completion_moves_to_list(
|
||||
self, download_service, mock_anime_service
|
||||
):
|
||||
"""Test successful download moves item to completed list."""
|
||||
# Add item
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-2",
|
||||
serie_name="Series 2",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-3",
|
||||
serie_name="Series 3",
|
||||
serie_id="series-1",
|
||||
serie_name="Test Series",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
# Move last item to position 0
|
||||
item_to_move = download_service._pending_queue[2].id
|
||||
success = await download_service.reorder_queue(item_to_move, 0)
|
||||
# Start and wait for completion
|
||||
await download_service.start_next_download()
|
||||
await asyncio.sleep(0.2) # Wait for download to complete
|
||||
|
||||
assert success is True
|
||||
assert download_service._pending_queue[0].id == item_to_move
|
||||
assert download_service._pending_queue[0].serie_id == "series-3"
|
||||
assert len(download_service._completed_items) == 1
|
||||
assert download_service._active_download is None
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_download_failure_moves_to_list(
|
||||
self, download_service, mock_anime_service
|
||||
):
|
||||
"""Test failed download moves item to failed list."""
|
||||
# Make download fail
|
||||
mock_anime_service.download = AsyncMock(return_value=False)
|
||||
|
||||
# Add item
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_name="Test Series",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
# Start and wait for failure
|
||||
await download_service.start_next_download()
|
||||
await asyncio.sleep(0.2) # Wait for download to fail
|
||||
|
||||
assert len(download_service._failed_items) == 1
|
||||
assert download_service._active_download is None
|
||||
|
||||
|
||||
class TestQueueStatus:
|
||||
@ -237,6 +288,7 @@ class TestQueueStatus:
|
||||
|
||||
status = await download_service.get_queue_status()
|
||||
|
||||
# Queue is stopped until start_next_download() is called
|
||||
assert status.is_running is False
|
||||
assert status.is_paused is False
|
||||
assert len(status.pending_queue) == 2
|
||||
@ -270,19 +322,6 @@ class TestQueueStatus:
|
||||
class TestQueueControl:
|
||||
"""Test queue control operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_pause_queue(self, download_service):
|
||||
"""Test pausing the queue."""
|
||||
await download_service.pause_queue()
|
||||
assert download_service._is_paused is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_resume_queue(self, download_service):
|
||||
"""Test resuming the queue."""
|
||||
await download_service.pause_queue()
|
||||
await download_service.resume_queue()
|
||||
assert download_service._is_paused is False
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_completed(self, download_service):
|
||||
"""Test clearing completed downloads."""
|
||||
@ -301,6 +340,37 @@ class TestQueueControl:
|
||||
assert count == 1
|
||||
assert len(download_service._completed_items) == 0
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_clear_pending(self, download_service):
|
||||
"""Test clearing all pending downloads from the queue."""
|
||||
# Add multiple items to the queue
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_folder="test-series-1",
|
||||
serie_name="Test Series 1",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-2",
|
||||
serie_folder="test-series-2",
|
||||
serie_name="Test Series 2",
|
||||
episodes=[
|
||||
EpisodeIdentifier(season=1, episode=2),
|
||||
EpisodeIdentifier(season=1, episode=3),
|
||||
],
|
||||
)
|
||||
|
||||
# Verify items were added
|
||||
assert len(download_service._pending_queue) == 3
|
||||
|
||||
# Clear pending queue
|
||||
count = await download_service.clear_pending()
|
||||
|
||||
# Verify all pending items were cleared
|
||||
assert count == 3
|
||||
assert len(download_service._pending_queue) == 0
|
||||
assert len(download_service._pending_items_by_id) == 0
|
||||
|
||||
|
||||
class TestPersistence:
|
||||
"""Test queue persistence functionality."""
|
||||
@ -431,6 +501,82 @@ class TestBroadcastCallbacks:
|
||||
# Verify callback was called
|
||||
mock_callback.assert_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_progress_callback_format(self, download_service):
|
||||
"""Test that progress callback receives correct data format."""
|
||||
# Set up a mock callback to capture progress updates
|
||||
progress_updates = []
|
||||
|
||||
def capture_progress(progress_data: dict):
|
||||
progress_updates.append(progress_data)
|
||||
|
||||
# Mock download to simulate progress
|
||||
async def mock_download_with_progress(*args, **kwargs):
|
||||
# Get the callback from kwargs
|
||||
callback = kwargs.get('callback')
|
||||
if callback:
|
||||
# Simulate progress updates with the expected format
|
||||
callback({
|
||||
'percent': 50.0,
|
||||
'downloaded_mb': 250.5,
|
||||
'total_mb': 501.0,
|
||||
'speed_mbps': 5.2,
|
||||
'eta_seconds': 48,
|
||||
})
|
||||
return True
|
||||
|
||||
download_service._anime_service.download = mock_download_with_progress
|
||||
|
||||
# Add an item to the queue
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_name="Test Series",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=1)],
|
||||
)
|
||||
|
||||
# Process the download
|
||||
item = download_service._pending_queue.popleft()
|
||||
del download_service._pending_items_by_id[item.id]
|
||||
|
||||
# Replace the progress callback with our capture function
|
||||
original_callback = download_service._create_progress_callback
|
||||
|
||||
def wrapper(item):
|
||||
callback = original_callback(item)
|
||||
|
||||
def wrapped_callback(data):
|
||||
capture_progress(data)
|
||||
callback(data)
|
||||
|
||||
return wrapped_callback
|
||||
|
||||
download_service._create_progress_callback = wrapper
|
||||
|
||||
await download_service._process_download(item)
|
||||
|
||||
# Verify progress callback was called with correct format
|
||||
assert len(progress_updates) > 0
|
||||
progress_data = progress_updates[0]
|
||||
|
||||
# Check all expected keys are present
|
||||
assert 'percent' in progress_data
|
||||
assert 'downloaded_mb' in progress_data
|
||||
assert 'total_mb' in progress_data
|
||||
assert 'speed_mbps' in progress_data
|
||||
assert 'eta_seconds' in progress_data
|
||||
|
||||
# Verify values are of correct type
|
||||
assert isinstance(progress_data['percent'], (int, float))
|
||||
assert isinstance(progress_data['downloaded_mb'], (int, float))
|
||||
assert (
|
||||
progress_data['total_mb'] is None
|
||||
or isinstance(progress_data['total_mb'], (int, float))
|
||||
)
|
||||
assert (
|
||||
progress_data['speed_mbps'] is None
|
||||
or isinstance(progress_data['speed_mbps'], (int, float))
|
||||
)
|
||||
|
||||
|
||||
class TestServiceLifecycle:
|
||||
"""Test service start and stop operations."""
|
||||
@ -438,33 +584,29 @@ class TestServiceLifecycle:
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_service(self, download_service):
|
||||
"""Test starting the service."""
|
||||
# start() is now just for initialization/compatibility
|
||||
await download_service.start()
|
||||
assert download_service._is_running is True
|
||||
# No _is_running attribute - simplified service doesn't track this
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_stop_service(self, download_service):
|
||||
"""Test stopping the service."""
|
||||
await download_service.start()
|
||||
await download_service.stop()
|
||||
assert download_service._is_running is False
|
||||
# Verifies service can be stopped without errors
|
||||
# No _is_running attribute in simplified service
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_start_already_running(self, download_service):
|
||||
"""Test starting service when already running."""
|
||||
await download_service.start()
|
||||
await download_service.start() # Should not raise error
|
||||
assert download_service._is_running is True
|
||||
# No _is_running attribute in simplified service
|
||||
|
||||
|
||||
class TestErrorHandling:
|
||||
"""Test error handling in download service."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_reorder_nonexistent_item(self, download_service):
|
||||
"""Test reordering non-existent item raises error."""
|
||||
with pytest.raises(DownloadServiceError):
|
||||
await download_service.reorder_queue("nonexistent-id", 0)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_download_failure_moves_to_failed(self, download_service):
|
||||
"""Test that download failures are handled correctly."""
|
||||
|
||||
@ -1,237 +0,0 @@
|
||||
"""Unit tests for monitoring service."""
|
||||
|
||||
from datetime import datetime, timedelta
|
||||
from unittest.mock import AsyncMock, MagicMock
|
||||
|
||||
import pytest
|
||||
|
||||
from src.server.services.monitoring_service import (
|
||||
ErrorMetrics,
|
||||
MonitoringService,
|
||||
QueueMetrics,
|
||||
SystemMetrics,
|
||||
get_monitoring_service,
|
||||
)
|
||||
|
||||
|
||||
def test_monitoring_service_initialization():
|
||||
"""Test monitoring service initialization."""
|
||||
service = MonitoringService()
|
||||
|
||||
assert service is not None
|
||||
assert service._error_log == []
|
||||
assert service._performance_samples == []
|
||||
|
||||
|
||||
def test_get_system_metrics():
|
||||
"""Test system metrics collection."""
|
||||
service = MonitoringService()
|
||||
metrics = service.get_system_metrics()
|
||||
|
||||
assert isinstance(metrics, SystemMetrics)
|
||||
assert metrics.cpu_percent >= 0
|
||||
assert metrics.memory_percent >= 0
|
||||
assert metrics.disk_percent >= 0
|
||||
assert metrics.uptime_seconds > 0
|
||||
assert metrics.memory_available_mb > 0
|
||||
assert metrics.disk_free_mb > 0
|
||||
|
||||
|
||||
def test_system_metrics_stored():
|
||||
"""Test that system metrics are stored for performance tracking."""
|
||||
service = MonitoringService()
|
||||
|
||||
metrics1 = service.get_system_metrics()
|
||||
metrics2 = service.get_system_metrics()
|
||||
|
||||
assert len(service._performance_samples) == 2
|
||||
assert service._performance_samples[0] == metrics1
|
||||
assert service._performance_samples[1] == metrics2
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_queue_metrics_empty():
|
||||
"""Test queue metrics with no items."""
|
||||
service = MonitoringService()
|
||||
mock_db = AsyncMock()
|
||||
|
||||
# Mock empty result
|
||||
mock_scalars = AsyncMock()
|
||||
mock_scalars.all = MagicMock(return_value=[])
|
||||
|
||||
mock_result = AsyncMock()
|
||||
mock_result.scalars = MagicMock(return_value=mock_scalars)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
metrics = await service.get_queue_metrics(mock_db)
|
||||
|
||||
assert isinstance(metrics, QueueMetrics)
|
||||
assert metrics.total_items == 0
|
||||
assert metrics.success_rate == 0.0
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_queue_metrics_with_items():
|
||||
"""Test queue metrics with download items."""
|
||||
service = MonitoringService()
|
||||
mock_db = AsyncMock()
|
||||
|
||||
# Create mock queue items
|
||||
item1 = MagicMock()
|
||||
item1.status = "COMPLETED"
|
||||
item1.total_bytes = 1000000
|
||||
item1.downloaded_bytes = 1000000
|
||||
item1.download_speed = 1000000
|
||||
|
||||
item2 = MagicMock()
|
||||
item2.status = "DOWNLOADING"
|
||||
item2.total_bytes = 2000000
|
||||
item2.downloaded_bytes = 1000000
|
||||
item2.download_speed = 500000
|
||||
|
||||
item3 = MagicMock()
|
||||
item3.status = "FAILED"
|
||||
item3.total_bytes = 500000
|
||||
item3.downloaded_bytes = 0
|
||||
item3.download_speed = None
|
||||
|
||||
# Mock result
|
||||
mock_scalars = AsyncMock()
|
||||
mock_scalars.all = MagicMock(return_value=[item1, item2, item3])
|
||||
|
||||
mock_result = AsyncMock()
|
||||
mock_result.scalars = MagicMock(return_value=mock_scalars)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
metrics = await service.get_queue_metrics(mock_db)
|
||||
|
||||
assert metrics.total_items == 3
|
||||
assert metrics.completed_items == 1
|
||||
assert metrics.downloading_items == 1
|
||||
assert metrics.failed_items == 1
|
||||
assert metrics.total_size_bytes == 3500000
|
||||
assert metrics.downloaded_bytes == 2000000
|
||||
assert metrics.success_rate > 0
|
||||
|
||||
|
||||
def test_log_error():
|
||||
"""Test error logging."""
|
||||
service = MonitoringService()
|
||||
|
||||
service.log_error("Test error 1")
|
||||
service.log_error("Test error 2")
|
||||
|
||||
assert len(service._error_log) == 2
|
||||
assert service._error_log[0][1] == "Test error 1"
|
||||
assert service._error_log[1][1] == "Test error 2"
|
||||
|
||||
|
||||
def test_get_error_metrics_empty():
|
||||
"""Test error metrics with no errors."""
|
||||
service = MonitoringService()
|
||||
metrics = service.get_error_metrics()
|
||||
|
||||
assert isinstance(metrics, ErrorMetrics)
|
||||
assert metrics.total_errors == 0
|
||||
assert metrics.errors_24h == 0
|
||||
assert metrics.error_rate_per_hour == 0.0
|
||||
|
||||
|
||||
def test_get_error_metrics_with_errors():
|
||||
"""Test error metrics with multiple errors."""
|
||||
service = MonitoringService()
|
||||
|
||||
service.log_error("ConnectionError: Failed to connect")
|
||||
service.log_error("ConnectionError: Timeout")
|
||||
service.log_error("TimeoutError: Download timeout")
|
||||
|
||||
metrics = service.get_error_metrics()
|
||||
|
||||
assert metrics.total_errors == 3
|
||||
assert metrics.errors_24h == 3
|
||||
assert metrics.last_error_time is not None
|
||||
assert len(metrics.most_common_errors) > 0
|
||||
|
||||
|
||||
def test_get_error_metrics_old_errors():
|
||||
"""Test error metrics excludes old errors."""
|
||||
service = MonitoringService()
|
||||
|
||||
# Add old error (simulate by directly adding to log)
|
||||
old_time = datetime.now() - timedelta(hours=25)
|
||||
service._error_log.append((old_time, "Old error"))
|
||||
|
||||
# Add recent error
|
||||
service.log_error("Recent error")
|
||||
|
||||
metrics = service.get_error_metrics()
|
||||
|
||||
assert metrics.total_errors == 2
|
||||
assert metrics.errors_24h == 1
|
||||
|
||||
|
||||
def test_get_performance_summary():
|
||||
"""Test performance summary generation."""
|
||||
service = MonitoringService()
|
||||
|
||||
# Collect some samples
|
||||
service.get_system_metrics()
|
||||
service.get_system_metrics()
|
||||
service.get_system_metrics()
|
||||
|
||||
summary = service.get_performance_summary()
|
||||
|
||||
assert "cpu" in summary
|
||||
assert "memory" in summary
|
||||
assert "disk" in summary
|
||||
assert "sample_count" in summary
|
||||
assert summary["sample_count"] == 3
|
||||
assert "current" in summary["cpu"]
|
||||
assert "average" in summary["cpu"]
|
||||
assert "max" in summary["cpu"]
|
||||
assert "min" in summary["cpu"]
|
||||
|
||||
|
||||
def test_get_performance_summary_empty():
|
||||
"""Test performance summary with no samples."""
|
||||
service = MonitoringService()
|
||||
summary = service.get_performance_summary()
|
||||
|
||||
assert summary == {}
|
||||
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_get_comprehensive_status():
|
||||
"""Test comprehensive system status."""
|
||||
service = MonitoringService()
|
||||
mock_db = AsyncMock()
|
||||
|
||||
# Mock empty queue
|
||||
mock_scalars = AsyncMock()
|
||||
mock_scalars.all = MagicMock(return_value=[])
|
||||
|
||||
mock_result = AsyncMock()
|
||||
mock_result.scalars = MagicMock(return_value=mock_scalars)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
status = await service.get_comprehensive_status(mock_db)
|
||||
|
||||
assert "timestamp" in status
|
||||
assert "system" in status
|
||||
assert "queue" in status
|
||||
assert "errors" in status
|
||||
assert "performance" in status
|
||||
assert status["system"]["cpu_percent"] >= 0
|
||||
assert status["queue"]["total_items"] == 0
|
||||
|
||||
|
||||
def test_get_monitoring_service():
|
||||
"""Test singleton monitoring service."""
|
||||
service1 = get_monitoring_service()
|
||||
service2 = get_monitoring_service()
|
||||
|
||||
assert service1 is service2
|
||||
assert isinstance(service1, MonitoringService)
|
||||
@ -1,269 +0,0 @@
|
||||
"""Tests for rate limiting middleware."""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
import httpx
|
||||
from fastapi import FastAPI, Request
|
||||
from fastapi.testclient import TestClient
|
||||
|
||||
from src.server.middleware.rate_limit import (
|
||||
RateLimitConfig,
|
||||
RateLimitMiddleware,
|
||||
RateLimitStore,
|
||||
)
|
||||
|
||||
# Shim for environments where httpx.Client.__init__ doesn't accept an
|
||||
# 'app' kwarg (some httpx versions have a different signature). The
|
||||
# TestClient in Starlette passes `app=` through; to keep tests portable
|
||||
# we pop it before calling the real initializer.
|
||||
_orig_httpx_init = httpx.Client.__init__
|
||||
|
||||
|
||||
def _httpx_init_shim(self, *args, **kwargs):
|
||||
kwargs.pop("app", None)
|
||||
return _orig_httpx_init(self, *args, **kwargs)
|
||||
|
||||
|
||||
httpx.Client.__init__ = _httpx_init_shim
|
||||
|
||||
|
||||
class TestRateLimitStore:
|
||||
"""Tests for RateLimitStore class."""
|
||||
|
||||
def test_check_limit_allows_within_limits(self):
|
||||
"""Test that requests within limits are allowed."""
|
||||
store = RateLimitStore()
|
||||
|
||||
# First request should be allowed
|
||||
allowed, retry_after = store.check_limit("test_id", 10, 100)
|
||||
assert allowed is True
|
||||
assert retry_after is None
|
||||
|
||||
# Record the request
|
||||
store.record_request("test_id")
|
||||
|
||||
# Next request should still be allowed
|
||||
allowed, retry_after = store.check_limit("test_id", 10, 100)
|
||||
assert allowed is True
|
||||
assert retry_after is None
|
||||
|
||||
def test_check_limit_blocks_over_minute_limit(self):
|
||||
"""Test that requests over minute limit are blocked."""
|
||||
store = RateLimitStore()
|
||||
|
||||
# Fill up to the minute limit
|
||||
for _ in range(5):
|
||||
store.record_request("test_id")
|
||||
|
||||
# Next request should be blocked
|
||||
allowed, retry_after = store.check_limit("test_id", 5, 100)
|
||||
assert allowed is False
|
||||
assert retry_after is not None
|
||||
assert retry_after > 0
|
||||
|
||||
def test_check_limit_blocks_over_hour_limit(self):
|
||||
"""Test that requests over hour limit are blocked."""
|
||||
store = RateLimitStore()
|
||||
|
||||
# Fill up to hour limit
|
||||
for _ in range(10):
|
||||
store.record_request("test_id")
|
||||
|
||||
# Next request should be blocked
|
||||
allowed, retry_after = store.check_limit("test_id", 100, 10)
|
||||
assert allowed is False
|
||||
assert retry_after is not None
|
||||
assert retry_after > 0
|
||||
|
||||
def test_get_remaining_requests(self):
|
||||
"""Test getting remaining requests."""
|
||||
store = RateLimitStore()
|
||||
|
||||
# Initially, all requests are remaining
|
||||
minute_rem, hour_rem = store.get_remaining_requests(
|
||||
"test_id", 10, 100
|
||||
)
|
||||
assert minute_rem == 10
|
||||
assert hour_rem == 100
|
||||
|
||||
# After one request
|
||||
store.record_request("test_id")
|
||||
minute_rem, hour_rem = store.get_remaining_requests(
|
||||
"test_id", 10, 100
|
||||
)
|
||||
assert minute_rem == 9
|
||||
assert hour_rem == 99
|
||||
|
||||
|
||||
class TestRateLimitConfig:
|
||||
"""Tests for RateLimitConfig class."""
|
||||
|
||||
def test_default_config(self):
|
||||
"""Test default configuration values."""
|
||||
config = RateLimitConfig()
|
||||
assert config.requests_per_minute == 60
|
||||
assert config.requests_per_hour == 1000
|
||||
assert config.authenticated_multiplier == 2.0
|
||||
|
||||
def test_custom_config(self):
|
||||
"""Test custom configuration values."""
|
||||
config = RateLimitConfig(
|
||||
requests_per_minute=10,
|
||||
requests_per_hour=100,
|
||||
authenticated_multiplier=3.0,
|
||||
)
|
||||
assert config.requests_per_minute == 10
|
||||
assert config.requests_per_hour == 100
|
||||
assert config.authenticated_multiplier == 3.0
|
||||
|
||||
|
||||
class TestRateLimitMiddleware:
|
||||
"""Tests for RateLimitMiddleware class."""
|
||||
|
||||
def create_app(
|
||||
self, default_config: Optional[RateLimitConfig] = None
|
||||
) -> FastAPI:
|
||||
"""Create a test FastAPI app with rate limiting.
|
||||
|
||||
Args:
|
||||
default_config: Optional default configuration
|
||||
|
||||
Returns:
|
||||
Configured FastAPI app
|
||||
"""
|
||||
app = FastAPI()
|
||||
|
||||
# Add rate limiting middleware
|
||||
app.add_middleware(
|
||||
RateLimitMiddleware,
|
||||
default_config=default_config,
|
||||
)
|
||||
|
||||
@app.get("/api/test")
|
||||
async def test_endpoint():
|
||||
return {"message": "success"}
|
||||
|
||||
@app.get("/health")
|
||||
async def health_endpoint():
|
||||
return {"status": "ok"}
|
||||
|
||||
@app.get("/api/auth/login")
|
||||
async def login_endpoint():
|
||||
return {"message": "login"}
|
||||
|
||||
return app
|
||||
|
||||
def test_allows_requests_within_limit(self):
|
||||
"""Test that requests within limit are allowed."""
|
||||
app = self.create_app()
|
||||
client = TestClient(app)
|
||||
|
||||
# Make several requests within limit
|
||||
for _ in range(5):
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_blocks_requests_over_limit(self):
|
||||
"""Test that requests over limit are blocked."""
|
||||
config = RateLimitConfig(
|
||||
requests_per_minute=3,
|
||||
requests_per_hour=100,
|
||||
)
|
||||
app = self.create_app(config)
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
|
||||
# Make requests up to limit
|
||||
for _ in range(3):
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Next request should be rate limited
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 429
|
||||
assert "Retry-After" in response.headers
|
||||
|
||||
def test_bypass_health_endpoint(self):
|
||||
"""Test that health endpoint bypasses rate limiting."""
|
||||
config = RateLimitConfig(
|
||||
requests_per_minute=1,
|
||||
requests_per_hour=1,
|
||||
)
|
||||
app = self.create_app(config)
|
||||
client = TestClient(app)
|
||||
|
||||
# Make many requests to health endpoint
|
||||
for _ in range(10):
|
||||
response = client.get("/health")
|
||||
assert response.status_code == 200
|
||||
|
||||
def test_endpoint_specific_limits(self):
|
||||
"""Test that endpoint-specific limits are applied."""
|
||||
app = self.create_app()
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
|
||||
# Login endpoint has strict limit (5 per minute)
|
||||
for _ in range(5):
|
||||
response = client.get("/api/auth/login")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Next login request should be rate limited
|
||||
response = client.get("/api/auth/login")
|
||||
assert response.status_code == 429
|
||||
|
||||
def test_rate_limit_headers(self):
|
||||
"""Test that rate limit headers are added to response."""
|
||||
app = self.create_app()
|
||||
client = TestClient(app)
|
||||
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 200
|
||||
assert "X-RateLimit-Limit-Minute" in response.headers
|
||||
assert "X-RateLimit-Limit-Hour" in response.headers
|
||||
assert "X-RateLimit-Remaining-Minute" in response.headers
|
||||
assert "X-RateLimit-Remaining-Hour" in response.headers
|
||||
|
||||
def test_authenticated_user_multiplier(self):
|
||||
"""Test that authenticated users get higher limits."""
|
||||
config = RateLimitConfig(
|
||||
requests_per_minute=5,
|
||||
requests_per_hour=100,
|
||||
authenticated_multiplier=2.0,
|
||||
)
|
||||
app = self.create_app(config)
|
||||
|
||||
# Add middleware to simulate authentication
|
||||
@app.middleware("http")
|
||||
async def add_user_to_state(request: Request, call_next):
|
||||
request.state.user_id = "user123"
|
||||
response = await call_next(request)
|
||||
return response
|
||||
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
|
||||
# Should be able to make 10 requests (5 * 2.0)
|
||||
for _ in range(10):
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Next request should be rate limited
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 429
|
||||
|
||||
def test_different_ips_tracked_separately(self):
|
||||
"""Test that different IPs are tracked separately."""
|
||||
config = RateLimitConfig(
|
||||
requests_per_minute=2,
|
||||
requests_per_hour=100,
|
||||
)
|
||||
app = self.create_app(config)
|
||||
client = TestClient(app, raise_server_exceptions=False)
|
||||
|
||||
# Make requests from "different" IPs
|
||||
# Note: TestClient uses same IP, but we can test the logic
|
||||
for _ in range(2):
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 200
|
||||
|
||||
# Third request should be rate limited
|
||||
response = client.get("/api/test")
|
||||
assert response.status_code == 429
|
||||
@ -353,59 +353,6 @@ class TestSeriesAppReScan:
|
||||
assert "cancelled" in result.message.lower()
|
||||
|
||||
|
||||
class TestSeriesAppAsync:
|
||||
"""Test async operations."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('src.core.SeriesApp.Loaders')
|
||||
@patch('src.core.SeriesApp.SerieScanner')
|
||||
@patch('src.core.SeriesApp.SerieList')
|
||||
async def test_async_download(
|
||||
self, mock_serie_list, mock_scanner, mock_loaders
|
||||
):
|
||||
"""Test async download."""
|
||||
test_dir = "/test/anime"
|
||||
app = SeriesApp(test_dir)
|
||||
|
||||
# Mock download
|
||||
app.loader.Download = Mock()
|
||||
|
||||
# Perform async download
|
||||
result = await app.async_download(
|
||||
"anime_folder",
|
||||
season=1,
|
||||
episode=1,
|
||||
key="anime_key"
|
||||
)
|
||||
|
||||
# Verify result
|
||||
assert isinstance(result, OperationResult)
|
||||
assert result.success is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
@patch('src.core.SeriesApp.Loaders')
|
||||
@patch('src.core.SeriesApp.SerieScanner')
|
||||
@patch('src.core.SeriesApp.SerieList')
|
||||
async def test_async_rescan(
|
||||
self, mock_serie_list, mock_scanner, mock_loaders
|
||||
):
|
||||
"""Test async rescan."""
|
||||
test_dir = "/test/anime"
|
||||
app = SeriesApp(test_dir)
|
||||
|
||||
# Mock scanner
|
||||
app.SerieScanner.GetTotalToScan = Mock(return_value=5)
|
||||
app.SerieScanner.Reinit = Mock()
|
||||
app.SerieScanner.Scan = Mock()
|
||||
|
||||
# Perform async rescan
|
||||
result = await app.async_rescan()
|
||||
|
||||
# Verify result
|
||||
assert isinstance(result, OperationResult)
|
||||
assert result.success is True
|
||||
|
||||
|
||||
class TestSeriesAppCancellation:
|
||||
"""Test operation cancellation."""
|
||||
|
||||
|
||||
Loading…
x
Reference in New Issue
Block a user