remove part 1

This commit is contained in:
Lukas 2025-10-30 20:06:45 +01:00
parent 627f8b0cc4
commit 4649cf562d
11 changed files with 128 additions and 1765 deletions

View File

@ -17,7 +17,7 @@
"keep_days": 30 "keep_days": 30
}, },
"other": { "other": {
"master_password_hash": "$pbkdf2-sha256$29000$yvlfq9V6z5kzBgDAuNfamw$yOIAkdvscVcnLca5C0CY/1rM3PblB.50gnmiYPycaAk", "master_password_hash": "$pbkdf2-sha256$29000$RghBiDGmVCrFWAvhnDNGiA$b6P/Dl0GF7SJUfbEq7HcZQL5ljyqswE6Gyq3YSoLtOs",
"anime_directory": "/home/lukas/Volume/serien/" "anime_directory": "/home/lukas/Volume/serien/"
}, },
"version": "1.0.0" "version": "1.0.0"

View File

@ -0,0 +1,24 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$8D5nbO3d23sPASAE4FzLWQ$CqXdc8Zryr9Jgyb4AEC/9GrMBnKrmFYt/rEBuYyHFqo",
"anime_directory": "/home/lukas/Volume/serien/"
},
"version": "1.0.0"
}

View File

@ -0,0 +1,24 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$qLVW6j3nPIeQUioFwLjX.g$w.mBJsPDLs5wO9E2NL1wcHrBgVOFaDwSh684x8f1FDg",
"anime_directory": "/home/lukas/Volume/serien/"
},
"version": "1.0.0"
}

View File

@ -1,7 +1,7 @@
{ {
"pending": [ "pending": [
{ {
"id": "c7f0d083-d220-4b77-8436-a63cb1a3cd41", "id": "215e91a4-e9e8-43cb-be9d-19fbe371c29c",
"serie_id": "workflow-series", "serie_id": "workflow-series",
"serie_name": "Workflow Test Series", "serie_name": "Workflow Test Series",
"episode": { "episode": {
@ -11,7 +11,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "high", "priority": "high",
"added_at": "2025-10-30T18:54:21.361837Z", "added_at": "2025-10-30T19:05:25.215695Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -20,7 +20,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "9a7081cb-670e-4eb0-85be-f93ad2ee76ef", "id": "c275473f-2df3-4dd0-a4a0-62183694745e",
"serie_id": "series-2", "serie_id": "series-2",
"serie_name": "Series 2", "serie_name": "Series 2",
"episode": { "episode": {
@ -30,7 +30,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.910955Z", "added_at": "2025-10-30T19:05:24.806513Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -39,7 +39,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "f8218676-c3f8-4037-a8e2-7d7a92f5a220", "id": "95b55f5f-90d7-48d0-b42f-d3501a1749bf",
"serie_id": "series-1", "serie_id": "series-1",
"serie_name": "Series 1", "serie_name": "Series 1",
"episode": { "episode": {
@ -49,7 +49,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.908397Z", "added_at": "2025-10-30T19:05:24.803908Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -58,7 +58,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "ddbcdfd8-1d09-48e0-8727-36682c773dae", "id": "b0946fb4-4dca-49d1-a740-459f2d8ddd07",
"serie_id": "series-0", "serie_id": "series-0",
"serie_name": "Series 0", "serie_name": "Series 0",
"episode": { "episode": {
@ -68,7 +68,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.906192Z", "added_at": "2025-10-30T19:05:24.801620Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -77,7 +77,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "835c59d2-7272-4ca3-b60c-0f564908d173", "id": "d3bb12ec-dfeb-46cb-8122-40bb9c736514",
"serie_id": "series-high", "serie_id": "series-high",
"serie_name": "Series High", "serie_name": "Series High",
"episode": { "episode": {
@ -87,7 +87,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "high", "priority": "high",
"added_at": "2025-10-30T18:54:20.574207Z", "added_at": "2025-10-30T19:05:24.503113Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -96,7 +96,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "d02479b2-c831-4ff2-af0b-6aaedd502980", "id": "5efb1e06-ce1f-4c92-8966-fb1b0b7b1cae",
"serie_id": "test-series-2", "serie_id": "test-series-2",
"serie_name": "Another Series", "serie_name": "Another Series",
"episode": { "episode": {
@ -106,7 +106,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "high", "priority": "high",
"added_at": "2025-10-30T18:54:20.507314Z", "added_at": "2025-10-30T19:05:24.467651Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -115,7 +115,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "b7ac0d47-2cd7-49cb-bbf3-9f3ba2490785", "id": "c4ea258d-43c7-4fa3-8931-65889d2b8f51",
"serie_id": "test-series-1", "serie_id": "test-series-1",
"serie_name": "Test Anime Series", "serie_name": "Test Anime Series",
"episode": { "episode": {
@ -125,7 +125,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.465257Z", "added_at": "2025-10-30T19:05:24.434002Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -134,7 +134,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "e001144e-e536-4104-8a90-1ddd9916c32c", "id": "ee2d8903-e721-4c36-a61e-65fe2a6df9fb",
"serie_id": "test-series-1", "serie_id": "test-series-1",
"serie_name": "Test Anime Series", "serie_name": "Test Anime Series",
"episode": { "episode": {
@ -144,7 +144,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.465365Z", "added_at": "2025-10-30T19:05:24.434111Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -153,7 +153,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "7315d49d-8e84-497d-acaa-b5b8b19980fe", "id": "199e17cc-5eed-4c07-8ae9-ba88298edd49",
"serie_id": "series-normal", "serie_id": "series-normal",
"serie_name": "Series Normal", "serie_name": "Series Normal",
"episode": { "episode": {
@ -163,7 +163,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.576631Z", "added_at": "2025-10-30T19:05:24.505310Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -172,7 +172,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "50c70333-4d65-4a69-8b32-8f1644867681", "id": "6cff3820-65c9-4e40-a4e6-b4303caa3540",
"serie_id": "series-low", "serie_id": "series-low",
"serie_name": "Series Low", "serie_name": "Series Low",
"episode": { "episode": {
@ -182,7 +182,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "low", "priority": "low",
"added_at": "2025-10-30T18:54:20.581167Z", "added_at": "2025-10-30T19:05:24.507439Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -191,7 +191,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "4276f2e3-45e9-413d-87ab-e7ffd65f754e", "id": "34b7cded-3dba-4fc6-b022-b27d99a11bee",
"serie_id": "test-series", "serie_id": "test-series",
"serie_name": "Test Series", "serie_name": "Test Series",
"episode": { "episode": {
@ -201,7 +201,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.841051Z", "added_at": "2025-10-30T19:05:24.737027Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -210,7 +210,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "6f47264e-c7a7-4991-b5d3-569c99228580", "id": "a3e6f4a4-a6e3-4840-94c2-ba5911da9207",
"serie_id": "test-series", "serie_id": "test-series",
"serie_name": "Test Series", "serie_name": "Test Series",
"episode": { "episode": {
@ -220,7 +220,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:20.948517Z", "added_at": "2025-10-30T19:05:24.839668Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -229,7 +229,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "4135367a-cfe9-4d09-95be-a909805e66b7", "id": "21e46b9e-edbf-4021-9f58-64a39f36fe76",
"serie_id": "invalid-series", "serie_id": "invalid-series",
"serie_name": "Invalid Series", "serie_name": "Invalid Series",
"episode": { "episode": {
@ -239,7 +239,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.031222Z", "added_at": "2025-10-30T19:05:24.906811Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -248,7 +248,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "7005ac80-386a-43a3-ad6f-6a2b60aab3b3", "id": "4d0192e7-70e1-49d6-b1d2-f94e5e51c57e",
"serie_id": "test-series", "serie_id": "test-series",
"serie_name": "Test Series", "serie_name": "Test Series",
"episode": { "episode": {
@ -258,7 +258,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.072761Z", "added_at": "2025-10-30T19:05:24.938376Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -267,26 +267,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "32e35da2-d255-405e-829a-02c1a5ba64a5", "id": "dc26caca-98b1-415d-812e-db892d99a059",
"serie_id": "series-2",
"serie_name": "Series 2",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T18:54:21.166090Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "28d8d198-2ae2-4e29-89e6-812a444cb5d2",
"serie_id": "series-0", "serie_id": "series-0",
"serie_name": "Series 0", "serie_name": "Series 0",
"episode": { "episode": {
@ -296,7 +277,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.169681Z", "added_at": "2025-10-30T19:05:25.021924Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -305,7 +286,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "f0776c79-a61c-4237-ac57-7eed248431c2", "id": "d7075bc3-6ac4-4d6d-9896-c8fadd6b86b9",
"serie_id": "series-1", "serie_id": "series-1",
"serie_name": "Series 1", "serie_name": "Series 1",
"episode": { "episode": {
@ -315,7 +296,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.171115Z", "added_at": "2025-10-30T19:05:25.022834Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -324,26 +305,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "57101762-1c77-48c6-b8ac-e21bf649f468", "id": "03f547b2-7533-428d-b90f-bb999b7b47ea",
"serie_id": "series-3",
"serie_name": "Series 3",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T18:54:21.171777Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "9ef218df-b877-4ff9-be74-a770e5f865b5",
"serie_id": "series-4", "serie_id": "series-4",
"serie_name": "Series 4", "serie_name": "Series 4",
"episode": { "episode": {
@ -353,7 +315,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.172560Z", "added_at": "2025-10-30T19:05:25.023529Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -362,7 +324,45 @@
"source_url": null "source_url": null
}, },
{ {
"id": "3fe68c06-4755-4f02-bdd5-a4760f79064f", "id": "a01825cc-430b-470f-a883-e6cf052386d5",
"serie_id": "series-2",
"serie_name": "Series 2",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.024174Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "68039d81-8bcf-4aa7-ad33-55545bd6405f",
"serie_id": "series-3",
"serie_name": "Series 3",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.026402Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "7a2d009a-778e-4ba4-9e9a-b845e1c36429",
"serie_id": "persistent-series", "serie_id": "persistent-series",
"serie_name": "Persistent Series", "serie_name": "Persistent Series",
"episode": { "episode": {
@ -372,7 +372,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.254000Z", "added_at": "2025-10-30T19:05:25.107603Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -381,7 +381,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "bcd2328c-ba3b-4a5b-a364-1964963324c2", "id": "bb16d9b7-b0ab-45c3-b02f-8b733bda1075",
"serie_id": "ws-series", "serie_id": "ws-series",
"serie_name": "WebSocket Series", "serie_name": "WebSocket Series",
"episode": { "episode": {
@ -391,7 +391,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.320033Z", "added_at": "2025-10-30T19:05:25.178269Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -400,7 +400,7 @@
"source_url": null "source_url": null
}, },
{ {
"id": "83e98629-7fe4-46e5-ad15-d60b5e2c2d09", "id": "c8240c65-5ac7-4731-900c-9f02083c1eb4",
"serie_id": "pause-test", "serie_id": "pause-test",
"serie_name": "Pause Test Series", "serie_name": "Pause Test Series",
"episode": { "episode": {
@ -410,7 +410,7 @@
}, },
"status": "pending", "status": "pending",
"priority": "normal", "priority": "normal",
"added_at": "2025-10-30T18:54:21.509480Z", "added_at": "2025-10-30T19:05:25.355677Z",
"started_at": null, "started_at": null,
"completed_at": null, "completed_at": null,
"progress": null, "progress": null,
@ -421,5 +421,5 @@
], ],
"active": [], "active": [],
"failed": [], "failed": [],
"timestamp": "2025-10-30T18:54:21.509760+00:00" "timestamp": "2025-10-30T19:05:25.355950+00:00"
} }

View File

@ -1,214 +0,0 @@
"""Diagnostics API endpoints for Aniworld.
This module provides endpoints for system diagnostics and health checks.
"""
import asyncio
import logging
import socket
from typing import Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel, Field
from src.server.utils.dependencies import require_auth
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/diagnostics", tags=["diagnostics"])
class NetworkTestResult(BaseModel):
"""Result of a network connectivity test."""
host: str = Field(..., description="Hostname or URL tested")
reachable: bool = Field(..., description="Whether host is reachable")
response_time_ms: Optional[float] = Field(
None, description="Response time in milliseconds"
)
error: Optional[str] = Field(None, description="Error message if failed")
class NetworkDiagnostics(BaseModel):
"""Network diagnostics results."""
internet_connected: bool = Field(
..., description="Overall internet connectivity status"
)
dns_working: bool = Field(..., description="DNS resolution status")
aniworld_reachable: bool = Field(
..., description="Aniworld.to connectivity status"
)
tests: List[NetworkTestResult] = Field(
..., description="Individual network tests"
)
async def check_dns() -> bool:
"""Check if DNS resolution is working.
Returns:
bool: True if DNS is working
"""
try:
socket.gethostbyname("google.com")
return True
except socket.gaierror:
return False
async def check_host_connectivity(
host: str, port: int = 80, timeout: float = 5.0
) -> NetworkTestResult:
"""Test connectivity to a specific host.
Args:
host: Hostname or IP address to test
port: Port to test (default: 80)
timeout: Timeout in seconds (default: 5.0)
Returns:
NetworkTestResult with test results
"""
import time
start_time = time.time()
try:
# Try to establish a connection
loop = asyncio.get_event_loop()
await asyncio.wait_for(
loop.run_in_executor(
None,
lambda: socket.create_connection(
(host, port), timeout=timeout
),
),
timeout=timeout,
)
response_time = (time.time() - start_time) * 1000
return NetworkTestResult(
host=host,
reachable=True,
response_time_ms=round(response_time, 2),
)
except asyncio.TimeoutError:
return NetworkTestResult(
host=host, reachable=False, error="Connection timeout"
)
except socket.gaierror as e:
return NetworkTestResult(
host=host, reachable=False, error=f"DNS resolution failed: {e}"
)
except ConnectionRefusedError:
return NetworkTestResult(
host=host, reachable=False, error="Connection refused"
)
except Exception as e:
return NetworkTestResult(
host=host, reachable=False, error=f"Connection error: {str(e)}"
)
@router.get("/network")
async def network_diagnostics(
auth: Optional[dict] = Depends(require_auth),
) -> Dict:
"""Run network connectivity diagnostics.
Tests DNS resolution and connectivity to common services including
aniworld.to.
Args:
auth: Authentication token (optional)
Returns:
Dict with status and diagnostics data
Raises:
HTTPException: If diagnostics fail
"""
try:
logger.info("Running network diagnostics")
# Check DNS
dns_working = await check_dns()
# Test connectivity to various hosts including aniworld.to
test_hosts = [
("google.com", 80),
("cloudflare.com", 80),
("github.com", 443),
("aniworld.to", 443),
]
# Run all tests concurrently
test_tasks = [
check_host_connectivity(host, port) for host, port in test_hosts
]
test_results = await asyncio.gather(*test_tasks)
# Determine overall internet connectivity
internet_connected = any(result.reachable for result in test_results)
# Check if aniworld.to is reachable
aniworld_result = next(
(r for r in test_results if r.host == "aniworld.to"),
None
)
aniworld_reachable = (
aniworld_result.reachable if aniworld_result else False
)
logger.info(
f"Network diagnostics complete: "
f"DNS={dns_working}, Internet={internet_connected}, "
f"Aniworld={aniworld_reachable}"
)
# Create diagnostics data
diagnostics_data = NetworkDiagnostics(
internet_connected=internet_connected,
dns_working=dns_working,
aniworld_reachable=aniworld_reachable,
tests=test_results,
)
# Return in standard format expected by frontend
return {
"status": "success",
"data": diagnostics_data.model_dump(),
}
except Exception as e:
logger.exception("Failed to run network diagnostics")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to run network diagnostics: {str(e)}",
) from e
@router.get("/system", response_model=Dict[str, str])
async def system_info(
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Get basic system information.
Args:
auth: Authentication token (optional)
Returns:
Dictionary with system information
"""
import platform
import sys
return {
"platform": platform.platform(),
"python_version": sys.version,
"architecture": platform.machine(),
"processor": platform.processor(),
"hostname": socket.gethostname(),
}

View File

@ -1,459 +0,0 @@
"""Maintenance API endpoints for system housekeeping and diagnostics.
This module exposes cleanup routines, system statistics, maintenance
operations, and health reporting endpoints that rely on the shared system
utilities and monitoring services. The routes allow administrators to
prune logs, inspect disk usage, vacuum or analyze the database, and gather
holistic health metrics for AniWorld deployments."""
import logging
from typing import Any, Dict
from fastapi import APIRouter, Depends, HTTPException
from sqlalchemy.ext.asyncio import AsyncSession
from src.infrastructure.security.database_integrity import DatabaseIntegrityChecker
from src.server.services.monitoring_service import get_monitoring_service
from src.server.utils.dependencies import get_database_session
from src.server.utils.system import get_system_utilities
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/maintenance", tags=["maintenance"])
def get_system_utils():
"""Dependency to get system utilities."""
return get_system_utilities()
@router.post("/cleanup")
async def cleanup_temporary_files(
max_age_days: int = 30,
system_utils=Depends(get_system_utils),
) -> Dict[str, Any]:
"""Clean up temporary and old files.
Args:
max_age_days: Delete files older than this many days.
system_utils: System utilities dependency.
Returns:
dict: Cleanup results.
"""
try:
deleted_logs = system_utils.cleanup_directory(
"logs", "*.log", max_age_days
)
deleted_temp = system_utils.cleanup_directory(
"Temp", "*", max_age_days
)
deleted_dirs = system_utils.cleanup_empty_directories("logs")
return {
"success": True,
"deleted_logs": deleted_logs,
"deleted_temp_files": deleted_temp,
"deleted_empty_dirs": deleted_dirs,
"total_deleted": deleted_logs + deleted_temp + deleted_dirs,
}
except Exception as e:
logger.error(f"Cleanup failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/stats")
async def get_maintenance_stats(
db: AsyncSession = Depends(get_database_session),
system_utils=Depends(get_system_utils),
) -> Dict[str, Any]:
"""Get system maintenance statistics.
Args:
db: Database session dependency.
system_utils: System utilities dependency.
Returns:
dict: Maintenance statistics.
"""
try:
monitoring = get_monitoring_service()
# Get disk usage
disk_info = system_utils.get_disk_usage("/")
# Get logs directory size
logs_size = system_utils.get_directory_size("logs")
data_size = system_utils.get_directory_size("data")
temp_size = system_utils.get_directory_size("Temp")
# Get system info
system_info = system_utils.get_system_info()
# Get queue metrics
queue_metrics = await monitoring.get_queue_metrics(db)
return {
"disk": {
"total_gb": disk_info.total_bytes / (1024**3),
"used_gb": disk_info.used_bytes / (1024**3),
"free_gb": disk_info.free_bytes / (1024**3),
"percent_used": disk_info.percent_used,
},
"directories": {
"logs_mb": logs_size / (1024 * 1024),
"data_mb": data_size / (1024 * 1024),
"temp_mb": temp_size / (1024 * 1024),
},
"system": system_info,
"queue": {
"total_items": queue_metrics.total_items,
"downloaded_gb": queue_metrics.downloaded_bytes / (1024**3),
"total_gb": queue_metrics.total_size_bytes / (1024**3),
},
}
except Exception as e:
logger.error(f"Failed to get maintenance stats: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/vacuum")
async def vacuum_database(
db: AsyncSession = Depends(get_database_session),
) -> Dict[str, Any]:
"""Optimize database (vacuum).
Args:
db: Database session dependency.
Returns:
dict: Vacuum result.
"""
try:
from sqlalchemy import text
# VACUUM command to optimize database
await db.execute(text("VACUUM"))
await db.commit()
logger.info("Database vacuumed successfully")
return {
"success": True,
"message": "Database optimized successfully",
}
except Exception as e:
logger.error(f"Database vacuum failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/rebuild-index")
async def rebuild_database_indexes(
db: AsyncSession = Depends(get_database_session),
) -> Dict[str, Any]:
"""Rebuild database indexes.
Note: This is a placeholder as SQLite doesn't have REINDEX
for most operations. For production databases, implement
specific index rebuilding logic.
Args:
db: Database session dependency.
Returns:
dict: Rebuild result.
"""
try:
from sqlalchemy import text
# Analyze database for query optimization
await db.execute(text("ANALYZE"))
await db.commit()
logger.info("Database indexes analyzed successfully")
return {
"success": True,
"message": "Database indexes analyzed successfully",
}
except Exception as e:
logger.error(f"Index rebuild failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/prune-logs")
async def prune_old_logs(
days: int = 7,
system_utils=Depends(get_system_utils),
) -> Dict[str, Any]:
"""Remove log files older than specified days.
Args:
days: Keep logs from last N days.
system_utils: System utilities dependency.
Returns:
dict: Pruning results.
"""
try:
deleted = system_utils.cleanup_directory(
"logs", "*.log", max_age_days=days
)
logger.info(f"Pruned {deleted} log files")
return {
"success": True,
"deleted_count": deleted,
"message": f"Deleted {deleted} log files older than {days} days",
}
except Exception as e:
logger.error(f"Log pruning failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/disk-usage")
async def get_disk_usage(
system_utils=Depends(get_system_utils),
) -> Dict[str, Any]:
"""Get detailed disk usage information.
Args:
system_utils: System utilities dependency.
Returns:
dict: Disk usage for all partitions.
"""
try:
disk_infos = system_utils.get_all_disk_usage()
partitions = []
for disk_info in disk_infos:
partitions.append(
{
"path": disk_info.path,
"total_gb": disk_info.total_bytes / (1024**3),
"used_gb": disk_info.used_bytes / (1024**3),
"free_gb": disk_info.free_bytes / (1024**3),
"percent_used": disk_info.percent_used,
}
)
return {
"success": True,
"partitions": partitions,
"total_partitions": len(partitions),
}
except Exception as e:
logger.error(f"Failed to get disk usage: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/processes")
async def get_running_processes(
limit: int = 10,
system_utils=Depends(get_system_utils),
) -> Dict[str, Any]:
"""Get running processes information.
Args:
limit: Maximum number of processes to return.
system_utils: System utilities dependency.
Returns:
dict: Running processes information.
"""
try:
processes = system_utils.get_all_processes()
# Sort by memory usage and get top N
sorted_processes = sorted(
processes, key=lambda x: x.memory_mb, reverse=True
)
top_processes = []
for proc in sorted_processes[:limit]:
top_processes.append(
{
"pid": proc.pid,
"name": proc.name,
"cpu_percent": round(proc.cpu_percent, 2),
"memory_mb": round(proc.memory_mb, 2),
"status": proc.status,
}
)
return {
"success": True,
"processes": top_processes,
"total_processes": len(processes),
}
except Exception as e:
logger.error(f"Failed to get processes: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/health-check")
async def full_health_check(
db: AsyncSession = Depends(get_database_session),
system_utils=Depends(get_system_utils),
) -> Dict[str, Any]:
"""Perform full system health check and generate report.
Args:
db: Database session dependency.
system_utils: System utilities dependency.
Returns:
dict: Complete health check report.
"""
try:
monitoring = get_monitoring_service()
# Check database and filesystem
from src.server.api.health import check_database_health
from src.server.api.health import check_filesystem_health as check_fs
db_health = await check_database_health(db)
fs_health = check_fs()
# Get system metrics
system_metrics = monitoring.get_system_metrics()
# Get error metrics
error_metrics = monitoring.get_error_metrics()
# Get queue metrics
queue_metrics = await monitoring.get_queue_metrics(db)
# Determine overall health
issues = []
if db_health.status != "healthy":
issues.append("Database connectivity issue")
if fs_health.get("status") != "healthy":
issues.append("Filesystem accessibility issue")
if system_metrics.cpu_percent > 80:
issues.append(f"High CPU usage: {system_metrics.cpu_percent}%")
if system_metrics.memory_percent > 80:
issues.append(
f"High memory usage: {system_metrics.memory_percent}%"
)
if error_metrics.error_rate_per_hour > 1.0:
issues.append(
f"High error rate: "
f"{error_metrics.error_rate_per_hour:.2f} errors/hour"
)
overall_health = "healthy"
if issues:
overall_health = "degraded" if len(issues) < 3 else "unhealthy"
return {
"overall_health": overall_health,
"issues": issues,
"metrics": {
"database": {
"status": db_health.status,
"connection_time_ms": db_health.connection_time_ms,
},
"filesystem": fs_health,
"system": {
"cpu_percent": system_metrics.cpu_percent,
"memory_percent": system_metrics.memory_percent,
"disk_percent": system_metrics.disk_percent,
},
"queue": {
"total_items": queue_metrics.total_items,
"failed_items": queue_metrics.failed_items,
"success_rate": round(queue_metrics.success_rate, 2),
},
"errors": {
"errors_24h": error_metrics.errors_24h,
"rate_per_hour": round(
error_metrics.error_rate_per_hour, 2
),
},
},
}
except Exception as e:
logger.error(f"Health check failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/integrity/check")
async def check_database_integrity(
db: AsyncSession = Depends(get_database_session),
) -> Dict[str, Any]:
"""Check database integrity.
Verifies:
- No orphaned records
- Valid foreign key references
- No duplicate keys
- Data consistency
Args:
db: Database session dependency.
Returns:
dict: Integrity check results with issues found.
"""
try:
# Convert async session to sync for the checker
# Note: This is a temporary solution. In production,
# consider implementing async version of integrity checker.
from sqlalchemy.orm import Session
sync_session = Session(bind=db.sync_session.bind)
checker = DatabaseIntegrityChecker(sync_session)
results = checker.check_all()
if results["total_issues"] > 0:
logger.warning(
f"Database integrity check found {results['total_issues']} "
f"issues"
)
else:
logger.info("Database integrity check passed")
return {
"success": True,
"timestamp": None, # Add timestamp if needed
"results": results,
}
except Exception as e:
logger.error(f"Integrity check failed: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/integrity/repair")
async def repair_database_integrity(
db: AsyncSession = Depends(get_database_session),
) -> Dict[str, Any]:
"""Repair database integrity by removing orphaned records.
**Warning**: This operation will delete orphaned records permanently.
Args:
db: Database session dependency.
Returns:
dict: Repair results with count of records removed.
"""
try:
from sqlalchemy.orm import Session
sync_session = Session(bind=db.sync_session.bind)
checker = DatabaseIntegrityChecker(sync_session)
removed_count = checker.repair_orphaned_records()
logger.info(f"Removed {removed_count} orphaned records")
return {
"success": True,
"removed_records": removed_count,
"message": (
f"Successfully removed {removed_count} orphaned records"
),
}
except Exception as e:
logger.error(f"Integrity repair failed: {e}")
raise HTTPException(status_code=500, detail=str(e))

View File

@ -1,531 +0,0 @@
"""Provider management API endpoints.
This module provides REST API endpoints for monitoring and managing
anime providers, including health checks, configuration, and failover.
"""
import logging
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from pydantic import BaseModel, Field
from src.core.providers.config_manager import ProviderSettings, get_config_manager
from src.core.providers.failover import get_failover
from src.core.providers.health_monitor import get_health_monitor
from src.server.utils.dependencies import require_auth
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/providers", tags=["providers"])
# Request/Response Models
class ProviderHealthResponse(BaseModel):
"""Response model for provider health status."""
provider_name: str
is_available: bool
last_check_time: Optional[str] = None
total_requests: int
successful_requests: int
failed_requests: int
success_rate: float
average_response_time_ms: float
last_error: Optional[str] = None
last_error_time: Optional[str] = None
consecutive_failures: int
total_bytes_downloaded: int
uptime_percentage: float
class HealthSummaryResponse(BaseModel):
"""Response model for overall health summary."""
total_providers: int
available_providers: int
availability_percentage: float
average_success_rate: float
average_response_time_ms: float
providers: Dict[str, Dict[str, Any]]
class ProviderSettingsRequest(BaseModel):
"""Request model for updating provider settings."""
enabled: Optional[bool] = None
priority: Optional[int] = None
timeout_seconds: Optional[int] = Field(None, gt=0)
max_retries: Optional[int] = Field(None, ge=0)
retry_delay_seconds: Optional[float] = Field(None, gt=0)
max_concurrent_downloads: Optional[int] = Field(None, gt=0)
bandwidth_limit_mbps: Optional[float] = Field(None, gt=0)
class ProviderSettingsResponse(BaseModel):
"""Response model for provider settings."""
name: str
enabled: bool
priority: int
timeout_seconds: int
max_retries: int
retry_delay_seconds: float
max_concurrent_downloads: int
bandwidth_limit_mbps: Optional[float] = None
class FailoverStatsResponse(BaseModel):
"""Response model for failover statistics."""
total_providers: int
providers: List[str]
current_provider: str
max_retries: int
retry_delay: float
health_monitoring_enabled: bool
available_providers: Optional[List[str]] = None
unavailable_providers: Optional[List[str]] = None
# Health Monitoring Endpoints
@router.get("/health", response_model=HealthSummaryResponse)
async def get_providers_health(
auth: Optional[dict] = Depends(require_auth),
) -> HealthSummaryResponse:
"""Get overall provider health summary.
Args:
auth: Authentication token (optional).
Returns:
Health summary for all providers.
"""
try:
health_monitor = get_health_monitor()
summary = health_monitor.get_health_summary()
return HealthSummaryResponse(**summary)
except Exception as e:
logger.error(f"Failed to get provider health: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve provider health: {str(e)}",
)
@router.get("/health/{provider_name}", response_model=ProviderHealthResponse) # noqa: E501
async def get_provider_health(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> ProviderHealthResponse:
"""Get health status for a specific provider.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Health metrics for the provider.
Raises:
HTTPException: If provider not found or error occurs.
"""
try:
health_monitor = get_health_monitor()
metrics = health_monitor.get_provider_metrics(provider_name)
if not metrics:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Provider '{provider_name}' not found",
)
return ProviderHealthResponse(**metrics.to_dict())
except HTTPException:
raise
except Exception as e:
logger.error(
f"Failed to get health for {provider_name}: {e}",
exc_info=True,
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve provider health: {str(e)}",
)
@router.get("/available", response_model=List[str])
async def get_available_providers(
auth: Optional[dict] = Depends(require_auth),
) -> List[str]:
"""Get list of currently available providers.
Args:
auth: Authentication token (optional).
Returns:
List of available provider names.
"""
try:
health_monitor = get_health_monitor()
return health_monitor.get_available_providers()
except Exception as e:
logger.error(f"Failed to get available providers: {e}", exc_info=True) # noqa: E501
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve available providers: {str(e)}",
)
@router.get("/best", response_model=Dict[str, str])
async def get_best_provider(
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Get the best performing provider.
Args:
auth: Authentication token (optional).
Returns:
Dictionary with best provider name.
"""
try:
health_monitor = get_health_monitor()
best = health_monitor.get_best_provider()
if not best:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="No available providers",
)
return {"provider": best}
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to get best provider: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to determine best provider: {str(e)}",
)
@router.post("/health/{provider_name}/reset")
async def reset_provider_health(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Reset health metrics for a specific provider.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Success message.
Raises:
HTTPException: If provider not found or error occurs.
"""
try:
health_monitor = get_health_monitor()
success = health_monitor.reset_provider_metrics(provider_name)
if not success:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Provider '{provider_name}' not found",
)
return {"message": f"Reset metrics for provider: {provider_name}"}
except HTTPException:
raise
except Exception as e:
logger.error(
f"Failed to reset health for {provider_name}: {e}",
exc_info=True,
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to reset provider health: {str(e)}",
)
# Configuration Endpoints
@router.get("/config", response_model=List[ProviderSettingsResponse])
async def get_all_provider_configs(
auth: Optional[dict] = Depends(require_auth),
) -> List[ProviderSettingsResponse]:
"""Get configuration for all providers.
Args:
auth: Authentication token (optional).
Returns:
List of provider configurations.
"""
try:
config_manager = get_config_manager()
all_settings = config_manager.get_all_provider_settings()
return [
ProviderSettingsResponse(**settings.to_dict())
for settings in all_settings.values()
]
except Exception as e:
logger.error(f"Failed to get provider configs: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve provider configurations: {str(e)}", # noqa: E501
)
@router.get(
"/config/{provider_name}", response_model=ProviderSettingsResponse
)
async def get_provider_config(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> ProviderSettingsResponse:
"""Get configuration for a specific provider.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Provider configuration.
Raises:
HTTPException: If provider not found or error occurs.
"""
try:
config_manager = get_config_manager()
settings = config_manager.get_provider_settings(provider_name)
if not settings:
# Return default settings
settings = ProviderSettings(name=provider_name)
return ProviderSettingsResponse(**settings.to_dict())
except Exception as e:
logger.error(
f"Failed to get config for {provider_name}: {e}",
exc_info=True,
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve provider configuration: {str(e)}", # noqa: E501
)
@router.put(
"/config/{provider_name}", response_model=ProviderSettingsResponse
)
async def update_provider_config(
provider_name: str,
settings: ProviderSettingsRequest,
auth: Optional[dict] = Depends(require_auth),
) -> ProviderSettingsResponse:
"""Update configuration for a specific provider.
Args:
provider_name: Name of the provider.
settings: Settings to update.
auth: Authentication token (optional).
Returns:
Updated provider configuration.
"""
try:
config_manager = get_config_manager()
# Update settings
update_dict = settings.dict(exclude_unset=True)
config_manager.update_provider_settings(
provider_name, **update_dict
)
# Get updated settings
updated = config_manager.get_provider_settings(provider_name)
if not updated:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail="Failed to retrieve updated configuration",
)
return ProviderSettingsResponse(**updated.to_dict())
except HTTPException:
raise
except Exception as e:
logger.error(
f"Failed to update config for {provider_name}: {e}",
exc_info=True,
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to update provider configuration: {str(e)}",
)
@router.post("/config/{provider_name}/enable")
async def enable_provider(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Enable a provider.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Success message.
"""
try:
config_manager = get_config_manager()
config_manager.update_provider_settings(
provider_name, enabled=True
)
return {"message": f"Enabled provider: {provider_name}"}
except Exception as e:
logger.error(
f"Failed to enable {provider_name}: {e}", exc_info=True
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to enable provider: {str(e)}",
)
@router.post("/config/{provider_name}/disable")
async def disable_provider(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Disable a provider.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Success message.
"""
try:
config_manager = get_config_manager()
config_manager.update_provider_settings(
provider_name, enabled=False
)
return {"message": f"Disabled provider: {provider_name}"}
except Exception as e:
logger.error(
f"Failed to disable {provider_name}: {e}", exc_info=True
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to disable provider: {str(e)}",
)
# Failover Endpoints
@router.get("/failover", response_model=FailoverStatsResponse)
async def get_failover_stats(
auth: Optional[dict] = Depends(require_auth),
) -> FailoverStatsResponse:
"""Get failover statistics and configuration.
Args:
auth: Authentication token (optional).
Returns:
Failover statistics.
"""
try:
failover = get_failover()
stats = failover.get_failover_stats()
return FailoverStatsResponse(**stats)
except Exception as e:
logger.error(f"Failed to get failover stats: {e}", exc_info=True)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to retrieve failover statistics: {str(e)}",
)
@router.post("/failover/{provider_name}/add")
async def add_provider_to_failover(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Add a provider to the failover chain.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Success message.
"""
try:
failover = get_failover()
failover.add_provider(provider_name)
return {"message": f"Added provider to failover: {provider_name}"}
except Exception as e:
logger.error(
f"Failed to add {provider_name} to failover: {e}",
exc_info=True,
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to add provider to failover: {str(e)}",
)
@router.delete("/failover/{provider_name}")
async def remove_provider_from_failover(
provider_name: str,
auth: Optional[dict] = Depends(require_auth),
) -> Dict[str, str]:
"""Remove a provider from the failover chain.
Args:
provider_name: Name of the provider.
auth: Authentication token (optional).
Returns:
Success message.
Raises:
HTTPException: If provider not found in failover chain.
"""
try:
failover = get_failover()
success = failover.remove_provider(provider_name)
if not success:
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Provider '{provider_name}' not in failover chain", # noqa: E501
)
return {
"message": f"Removed provider from failover: {provider_name}"
}
except HTTPException:
raise
except Exception as e:
logger.error(
f"Failed to remove {provider_name} from failover: {e}",
exc_info=True,
)
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to remove provider from failover: {str(e)}",
)

View File

@ -1,176 +0,0 @@
"""File upload API endpoints with security validation.
This module provides secure file upload endpoints with comprehensive
validation for file size, type, extensions, and content.
"""
from fastapi import APIRouter, File, HTTPException, UploadFile, status
router = APIRouter(prefix="/api/upload", tags=["upload"])
# Security configurations
MAX_FILE_SIZE = 50 * 1024 * 1024 # 50 MB
ALLOWED_EXTENSIONS = {".jpg", ".jpeg", ".png", ".gif", ".txt", ".json", ".xml"}
DANGEROUS_EXTENSIONS = {
".exe",
".sh",
".bat",
".cmd",
".php",
".jsp",
".asp",
".aspx",
".py",
".rb",
".pl",
".cgi",
}
ALLOWED_MIME_TYPES = {
"image/jpeg",
"image/png",
"image/gif",
"text/plain",
"application/json",
"application/xml",
}
def validate_file_extension(filename: str) -> None:
"""Validate file extension against security rules.
Args:
filename: Name of the file to validate
Raises:
HTTPException: 415 if extension is dangerous or not allowed
"""
# Check for double extensions (e.g., file.jpg.php)
parts = filename.split(".")
if len(parts) > 2:
# Check all extension parts, not just the last one
for part in parts[1:]:
ext = f".{part.lower()}"
if ext in DANGEROUS_EXTENSIONS:
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail=f"Dangerous file extension detected: {ext}",
)
# Get the actual extension
if "." not in filename:
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail="File must have an extension",
)
ext = "." + filename.rsplit(".", 1)[1].lower()
if ext in DANGEROUS_EXTENSIONS:
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail=f"File extension not allowed: {ext}",
)
if ext not in ALLOWED_EXTENSIONS:
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail=(
f"File extension not allowed: {ext}. "
f"Allowed: {ALLOWED_EXTENSIONS}"
),
)
def validate_mime_type(content_type: str, content: bytes) -> None:
"""Validate MIME type and content.
Args:
content_type: Declared MIME type
content: Actual file content
Raises:
HTTPException: 415 if MIME type is not allowed or content is suspicious
"""
if content_type not in ALLOWED_MIME_TYPES:
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail=f"MIME type not allowed: {content_type}",
)
# Basic content validation for PHP code
dangerous_patterns = [
b"<?php",
b"<script",
b"javascript:",
b"<iframe",
]
for pattern in dangerous_patterns:
if pattern in content[:1024]: # Check first 1KB
raise HTTPException(
status_code=status.HTTP_415_UNSUPPORTED_MEDIA_TYPE,
detail="Suspicious file content detected",
)
@router.post("")
async def upload_file(
file: UploadFile = File(...),
):
"""Upload a file with comprehensive security validation.
Validates:
- File size (max 50MB)
- File extension (blocks dangerous extensions)
- Double extension bypass attempts
- MIME type
- Content inspection for malicious code
Note: Authentication removed for security testing purposes.
Args:
file: The file to upload
Returns:
dict: Upload confirmation with file details
Raises:
HTTPException: 413 if file too large
HTTPException: 415 if file type not allowed
HTTPException: 400 if validation fails
"""
# Validate filename exists
if not file.filename:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Filename is required",
)
# Validate file extension
validate_file_extension(file.filename)
# Read file content
content = await file.read()
# Validate file size
if len(content) > MAX_FILE_SIZE:
raise HTTPException(
status_code=status.HTTP_413_REQUEST_ENTITY_TOO_LARGE,
detail=(
f"File size exceeds maximum allowed size "
f"of {MAX_FILE_SIZE} bytes"
),
)
# Validate MIME type and content
content_type = file.content_type or "application/octet-stream"
validate_mime_type(content_type, content)
# In a real implementation, save the file here
# For now, just return success
return {
"status": "success",
"filename": file.filename,
"size": len(content),
"content_type": content_type,
}

View File

@ -5,7 +5,6 @@ This module provides the main FastAPI application with proper CORS
configuration, middleware setup, static file serving, and Jinja2 template configuration, middleware setup, static file serving, and Jinja2 template
integration. integration.
""" """
import logging
from contextlib import asynccontextmanager from contextlib import asynccontextmanager
from pathlib import Path from pathlib import Path
from typing import Optional from typing import Optional
@ -24,13 +23,10 @@ from src.server.api.analytics import router as analytics_router
from src.server.api.anime import router as anime_router from src.server.api.anime import router as anime_router
from src.server.api.auth import router as auth_router from src.server.api.auth import router as auth_router
from src.server.api.config import router as config_router from src.server.api.config import router as config_router
from src.server.api.diagnostics import router as diagnostics_router
from src.server.api.download import downloads_router from src.server.api.download import downloads_router
from src.server.api.download import router as download_router from src.server.api.download import router as download_router
from src.server.api.logging import router as logging_router from src.server.api.logging import router as logging_router
from src.server.api.providers import router as providers_router
from src.server.api.scheduler import router as scheduler_router from src.server.api.scheduler import router as scheduler_router
from src.server.api.upload import router as upload_router
from src.server.api.websocket import router as websocket_router from src.server.api.websocket import router as websocket_router
from src.server.controllers.error_controller import ( from src.server.controllers.error_controller import (
not_found_handler, not_found_handler,
@ -173,13 +169,10 @@ app.include_router(auth_router)
app.include_router(config_router) app.include_router(config_router)
app.include_router(scheduler_router) app.include_router(scheduler_router)
app.include_router(logging_router) app.include_router(logging_router)
app.include_router(diagnostics_router)
app.include_router(analytics_router) app.include_router(analytics_router)
app.include_router(anime_router) app.include_router(anime_router)
app.include_router(download_router) app.include_router(download_router)
app.include_router(downloads_router) # Alias for input validation tests app.include_router(downloads_router) # Alias for input validation tests
app.include_router(providers_router)
app.include_router(upload_router)
app.include_router(websocket_router) app.include_router(websocket_router)
# Register exception handlers # Register exception handlers

View File

@ -322,74 +322,3 @@ class TestAPIParameterValidation:
# Should not grant admin from parameter # Should not grant admin from parameter
data = response.json() data = response.json()
assert not data.get("data", {}).get("is_admin", False) assert not data.get("data", {}).get("is_admin", False)
@pytest.mark.security
class TestFileUploadSecurity:
"""Security tests for file upload handling."""
@pytest.fixture
async def client(self):
"""Create async HTTP client for testing."""
from httpx import ASGITransport
async with AsyncClient(
transport=ASGITransport(app=app), base_url="http://test"
) as ac:
yield ac
@pytest.mark.asyncio
async def test_malicious_file_extension(self, client):
"""Test handling of dangerous file extensions."""
dangerous_extensions = [
".exe",
".sh",
".bat",
".cmd",
".php",
".jsp",
]
for ext in dangerous_extensions:
files = {"file": (f"test{ext}", b"malicious content")}
response = await client.post("/api/upload", files=files)
# Should reject dangerous files
assert response.status_code in [400, 403, 415]
@pytest.mark.asyncio
async def test_file_size_limit(self, client):
"""Test enforcement of file size limits."""
# Try to upload very large file
large_content = b"A" * (100 * 1024 * 1024) # 100MB
files = {"file": ("large.txt", large_content)}
response = await client.post("/api/upload", files=files)
# Should reject oversized files
assert response.status_code in [413, 422]
@pytest.mark.asyncio
async def test_double_extension_bypass(self, client):
"""Test protection against double extension bypass."""
files = {"file": ("image.jpg.php", b"<?php phpinfo(); ?>")}
response = await client.post("/api/upload", files=files)
# Should detect and reject
assert response.status_code in [400, 403, 415]
@pytest.mark.asyncio
async def test_mime_type_validation(self, client):
"""Test MIME type validation."""
# PHP file with image MIME type
files = {
"file": (
"image.jpg",
b"<?php phpinfo(); ?>",
"image/jpeg",
)
}
response = await client.post("/api/upload", files=files)
# Should validate actual content, not just MIME type
assert response.status_code in [400, 403, 415]

View File

@ -1,227 +0,0 @@
"""Unit tests for diagnostics endpoints."""
from unittest.mock import MagicMock, patch
import pytest
from src.server.api.diagnostics import (
NetworkTestResult,
check_dns,
check_host_connectivity,
network_diagnostics,
)
class TestDiagnosticsEndpoint:
"""Test diagnostics API endpoints."""
@pytest.mark.asyncio
async def test_network_diagnostics_returns_standard_format(self):
"""Test that network diagnostics returns the expected format."""
# Mock authentication
mock_auth = {"user_id": "test_user"}
# Mock the helper functions
with patch(
"src.server.api.diagnostics.check_dns",
return_value=True
), patch(
"src.server.api.diagnostics.check_host_connectivity",
side_effect=[
NetworkTestResult(
host="google.com",
reachable=True,
response_time_ms=50.5
),
NetworkTestResult(
host="cloudflare.com",
reachable=True,
response_time_ms=30.2
),
NetworkTestResult(
host="github.com",
reachable=True,
response_time_ms=100.0
),
NetworkTestResult(
host="aniworld.to",
reachable=True,
response_time_ms=75.3
),
]
):
# Call the endpoint
result = await network_diagnostics(auth=mock_auth)
# Verify response structure
assert isinstance(result, dict)
assert "status" in result
assert "data" in result
assert result["status"] == "success"
# Verify data structure
data = result["data"]
assert "internet_connected" in data
assert "dns_working" in data
assert "aniworld_reachable" in data
assert "tests" in data
# Verify values
assert data["internet_connected"] is True
assert data["dns_working"] is True
assert data["aniworld_reachable"] is True
assert len(data["tests"]) == 4
@pytest.mark.asyncio
async def test_network_diagnostics_aniworld_unreachable(self):
"""Test diagnostics when aniworld.to is unreachable."""
mock_auth = {"user_id": "test_user"}
with patch(
"src.server.api.diagnostics.check_dns",
return_value=True
), patch(
"src.server.api.diagnostics.check_host_connectivity",
side_effect=[
NetworkTestResult(
host="google.com",
reachable=True,
response_time_ms=50.5
),
NetworkTestResult(
host="cloudflare.com",
reachable=True,
response_time_ms=30.2
),
NetworkTestResult(
host="github.com",
reachable=True,
response_time_ms=100.0
),
NetworkTestResult(
host="aniworld.to",
reachable=False,
error="Connection timeout"
),
]
):
result = await network_diagnostics(auth=mock_auth)
# Verify aniworld is marked as unreachable
assert result["status"] == "success"
assert result["data"]["aniworld_reachable"] is False
assert result["data"]["internet_connected"] is True
@pytest.mark.asyncio
async def test_network_diagnostics_all_unreachable(self):
"""Test diagnostics when all hosts are unreachable."""
mock_auth = {"user_id": "test_user"}
with patch(
"src.server.api.diagnostics.check_dns",
return_value=False
), patch(
"src.server.api.diagnostics.check_host_connectivity",
side_effect=[
NetworkTestResult(
host="google.com",
reachable=False,
error="Connection timeout"
),
NetworkTestResult(
host="cloudflare.com",
reachable=False,
error="Connection timeout"
),
NetworkTestResult(
host="github.com",
reachable=False,
error="Connection timeout"
),
NetworkTestResult(
host="aniworld.to",
reachable=False,
error="Connection timeout"
),
]
):
result = await network_diagnostics(auth=mock_auth)
# Verify all are unreachable
assert result["status"] == "success"
assert result["data"]["internet_connected"] is False
assert result["data"]["dns_working"] is False
assert result["data"]["aniworld_reachable"] is False
class TestNetworkHelpers:
"""Test network helper functions."""
@pytest.mark.asyncio
async def test_check_dns_success(self):
"""Test DNS check when DNS is working."""
with patch("socket.gethostbyname", return_value="142.250.185.78"):
result = await check_dns()
assert result is True
@pytest.mark.asyncio
async def test_check_dns_failure(self):
"""Test DNS check when DNS fails."""
import socket
with patch(
"socket.gethostbyname",
side_effect=socket.gaierror("DNS lookup failed")
):
result = await check_dns()
assert result is False
@pytest.mark.asyncio
async def test_host_connectivity_success(self):
"""Test host connectivity check when host is reachable."""
with patch(
"socket.create_connection",
return_value=MagicMock()
):
result = await check_host_connectivity("google.com", 80)
assert result.host == "google.com"
assert result.reachable is True
assert result.response_time_ms is not None
assert result.response_time_ms >= 0
assert result.error is None
@pytest.mark.asyncio
async def test_host_connectivity_timeout(self):
"""Test host connectivity when connection times out."""
import asyncio
with patch(
"socket.create_connection",
side_effect=asyncio.TimeoutError()
):
result = await check_host_connectivity("example.com", 80, 1.0)
assert result.host == "example.com"
assert result.reachable is False
assert result.error == "Connection timeout"
@pytest.mark.asyncio
async def test_host_connectivity_dns_failure(self):
"""Test host connectivity when DNS resolution fails."""
import socket
with patch(
"socket.create_connection",
side_effect=socket.gaierror("Name resolution failed")
):
result = await check_host_connectivity("invalid.host", 80)
assert result.host == "invalid.host"
assert result.reachable is False
assert "DNS resolution failed" in result.error
@pytest.mark.asyncio
async def test_host_connectivity_connection_refused(self):
"""Test host connectivity when connection is refused."""
with patch(
"socket.create_connection",
side_effect=ConnectionRefusedError()
):
result = await check_host_connectivity("localhost", 12345)
assert result.host == "localhost"
assert result.reachable is False
assert result.error == "Connection refused"