remove part 2

This commit is contained in:
Lukas 2025-10-30 20:11:38 +01:00
parent 4649cf562d
commit fd76be02fd
8 changed files with 138 additions and 1260 deletions

View File

@ -17,7 +17,7 @@
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$RghBiDGmVCrFWAvhnDNGiA$b6P/Dl0GF7SJUfbEq7HcZQL5ljyqswE6Gyq3YSoLtOs",
"master_password_hash": "$pbkdf2-sha256$29000$h/BeKwXA2BvDOAdgjFHqvQ$2Yg8AHGbPwrNDbxJbES3N3GiCI/hE2TthGAQ61AFir0",
"anime_directory": "/home/lukas/Volume/serien/"
},
"version": "1.0.0"

View File

@ -0,0 +1,24 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$.r9Xag1BSKl17j3H.P./tw$XLhO13awaCA0TUXFdd6pMlefl41zxL1rLWON/wy.FSU",
"anime_directory": "/home/lukas/Volume/serien/"
},
"version": "1.0.0"
}

View File

@ -1,7 +1,7 @@
{
"pending": [
{
"id": "215e91a4-e9e8-43cb-be9d-19fbe371c29c",
"id": "e2f16d76-6a15-4958-a523-dfc58d25cf40",
"serie_id": "workflow-series",
"serie_name": "Workflow Test Series",
"episode": {
@ -11,7 +11,7 @@
},
"status": "pending",
"priority": "high",
"added_at": "2025-10-30T19:05:25.215695Z",
"added_at": "2025-10-30T19:09:58.786271Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -20,7 +20,7 @@
"source_url": null
},
{
"id": "c275473f-2df3-4dd0-a4a0-62183694745e",
"id": "a2391f20-1430-4533-8928-2d501c0ffb3a",
"serie_id": "series-2",
"serie_name": "Series 2",
"episode": {
@ -30,7 +30,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.806513Z",
"added_at": "2025-10-30T19:09:58.375433Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -39,7 +39,7 @@
"source_url": null
},
{
"id": "95b55f5f-90d7-48d0-b42f-d3501a1749bf",
"id": "2335d591-6b56-4b6e-b6fc-84f3b8f86907",
"serie_id": "series-1",
"serie_name": "Series 1",
"episode": {
@ -49,7 +49,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.803908Z",
"added_at": "2025-10-30T19:09:58.372955Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -58,7 +58,7 @@
"source_url": null
},
{
"id": "b0946fb4-4dca-49d1-a740-459f2d8ddd07",
"id": "65e99af4-9ebc-4048-b5d7-30206e3116a4",
"serie_id": "series-0",
"serie_name": "Series 0",
"episode": {
@ -68,7 +68,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.801620Z",
"added_at": "2025-10-30T19:09:58.370802Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -77,7 +77,7 @@
"source_url": null
},
{
"id": "d3bb12ec-dfeb-46cb-8122-40bb9c736514",
"id": "b4d3cf14-316d-4c80-ba88-6f3379983fd6",
"serie_id": "series-high",
"serie_name": "Series High",
"episode": {
@ -87,7 +87,7 @@
},
"status": "pending",
"priority": "high",
"added_at": "2025-10-30T19:05:24.503113Z",
"added_at": "2025-10-30T19:09:58.067288Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -96,7 +96,7 @@
"source_url": null
},
{
"id": "5efb1e06-ce1f-4c92-8966-fb1b0b7b1cae",
"id": "2d236bcd-816c-4d64-ba69-b46fd8913134",
"serie_id": "test-series-2",
"serie_name": "Another Series",
"episode": {
@ -106,7 +106,7 @@
},
"status": "pending",
"priority": "high",
"added_at": "2025-10-30T19:05:24.467651Z",
"added_at": "2025-10-30T19:09:58.029347Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -115,7 +115,7 @@
"source_url": null
},
{
"id": "c4ea258d-43c7-4fa3-8931-65889d2b8f51",
"id": "8547af6d-5f7f-46da-83c8-f4ba06d4f59c",
"serie_id": "test-series-1",
"serie_name": "Test Anime Series",
"episode": {
@ -125,7 +125,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.434002Z",
"added_at": "2025-10-30T19:09:57.994538Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -134,7 +134,7 @@
"source_url": null
},
{
"id": "ee2d8903-e721-4c36-a61e-65fe2a6df9fb",
"id": "42e8ac82-def0-49be-91cb-eb248a9018ab",
"serie_id": "test-series-1",
"serie_name": "Test Anime Series",
"episode": {
@ -144,7 +144,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.434111Z",
"added_at": "2025-10-30T19:09:57.994647Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -153,7 +153,7 @@
"source_url": null
},
{
"id": "199e17cc-5eed-4c07-8ae9-ba88298edd49",
"id": "0091bf48-5377-4a9a-9a55-a0abd60a3258",
"serie_id": "series-normal",
"serie_name": "Series Normal",
"episode": {
@ -163,7 +163,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.505310Z",
"added_at": "2025-10-30T19:09:58.069543Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -172,7 +172,7 @@
"source_url": null
},
{
"id": "6cff3820-65c9-4e40-a4e6-b4303caa3540",
"id": "c6f5cf39-94c8-4ba6-b397-a62c8d183329",
"serie_id": "series-low",
"serie_name": "Series Low",
"episode": {
@ -182,7 +182,7 @@
},
"status": "pending",
"priority": "low",
"added_at": "2025-10-30T19:05:24.507439Z",
"added_at": "2025-10-30T19:09:58.071479Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -191,7 +191,7 @@
"source_url": null
},
{
"id": "34b7cded-3dba-4fc6-b022-b27d99a11bee",
"id": "c939d7ec-1918-4b1e-9ad4-d8bf9256ccf6",
"serie_id": "test-series",
"serie_name": "Test Series",
"episode": {
@ -201,7 +201,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.737027Z",
"added_at": "2025-10-30T19:09:58.306482Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -210,7 +210,7 @@
"source_url": null
},
{
"id": "a3e6f4a4-a6e3-4840-94c2-ba5911da9207",
"id": "b471abef-dc4e-4f0d-a26a-3342ed54f4de",
"serie_id": "test-series",
"serie_name": "Test Series",
"episode": {
@ -220,7 +220,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.839668Z",
"added_at": "2025-10-30T19:09:58.410862Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -229,7 +229,7 @@
"source_url": null
},
{
"id": "21e46b9e-edbf-4021-9f58-64a39f36fe76",
"id": "b52c97d5-6897-4762-a72a-4ed19701b187",
"serie_id": "invalid-series",
"serie_name": "Invalid Series",
"episode": {
@ -239,7 +239,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.906811Z",
"added_at": "2025-10-30T19:09:58.481423Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -248,7 +248,7 @@
"source_url": null
},
{
"id": "4d0192e7-70e1-49d6-b1d2-f94e5e51c57e",
"id": "7a6fda0d-0402-47fe-a833-597fc625463c",
"serie_id": "test-series",
"serie_name": "Test Series",
"episode": {
@ -258,7 +258,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:24.938376Z",
"added_at": "2025-10-30T19:09:58.515500Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -267,83 +267,7 @@
"source_url": null
},
{
"id": "dc26caca-98b1-415d-812e-db892d99a059",
"serie_id": "series-0",
"serie_name": "Series 0",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.021924Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "d7075bc3-6ac4-4d6d-9896-c8fadd6b86b9",
"serie_id": "series-1",
"serie_name": "Series 1",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.022834Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "03f547b2-7533-428d-b90f-bb999b7b47ea",
"serie_id": "series-4",
"serie_name": "Series 4",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.023529Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "a01825cc-430b-470f-a883-e6cf052386d5",
"serie_id": "series-2",
"serie_name": "Series 2",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.024174Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "68039d81-8bcf-4aa7-ad33-55545bd6405f",
"id": "9edd6b21-8b50-417c-a971-38ed5dfd2fec",
"serie_id": "series-3",
"serie_name": "Series 3",
"episode": {
@ -353,7 +277,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.026402Z",
"added_at": "2025-10-30T19:09:58.601566Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -362,7 +286,83 @@
"source_url": null
},
{
"id": "7a2d009a-778e-4ba4-9e9a-b845e1c36429",
"id": "5a076539-ff06-4ec6-9ff4-4f83288bb0f7",
"serie_id": "series-4",
"serie_name": "Series 4",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:09:58.602644Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "2d4da5dc-9a8a-4b7a-94ba-65d6992649d1",
"serie_id": "series-1",
"serie_name": "Series 1",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:09:58.603367Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "f1cc1316-271f-497e-a89c-b40f4ae55af3",
"serie_id": "series-0",
"serie_name": "Series 0",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:09:58.604139Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "5dcf3947-3b48-459a-a5b3-7a7b80bde64d",
"serie_id": "series-2",
"serie_name": "Series 2",
"episode": {
"season": 1,
"episode": 1,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:09:58.605026Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "981cee6f-9404-4973-975a-8f8988c9bd15",
"serie_id": "persistent-series",
"serie_name": "Persistent Series",
"episode": {
@ -372,7 +372,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.107603Z",
"added_at": "2025-10-30T19:09:58.686065Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -381,7 +381,7 @@
"source_url": null
},
{
"id": "bb16d9b7-b0ab-45c3-b02f-8b733bda1075",
"id": "5a33815e-e68f-4708-a0de-d8a23243ef0b",
"serie_id": "ws-series",
"serie_name": "WebSocket Series",
"episode": {
@ -391,7 +391,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.178269Z",
"added_at": "2025-10-30T19:09:58.752568Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -400,7 +400,7 @@
"source_url": null
},
{
"id": "c8240c65-5ac7-4731-900c-9f02083c1eb4",
"id": "58ecc4b3-483a-4c85-ab68-47bf663edad0",
"serie_id": "pause-test",
"serie_name": "Pause Test Series",
"episode": {
@ -410,7 +410,7 @@
},
"status": "pending",
"priority": "normal",
"added_at": "2025-10-30T19:05:25.355677Z",
"added_at": "2025-10-30T19:09:58.927908Z",
"started_at": null,
"completed_at": null,
"progress": null,
@ -421,5 +421,5 @@
],
"active": [],
"failed": [],
"timestamp": "2025-10-30T19:05:25.355950+00:00"
"timestamp": "2025-10-30T19:09:58.928247+00:00"
}

View File

@ -1,258 +0,0 @@
"""Analytics API endpoints for accessing system analytics and reports.
Provides REST API endpoints for querying analytics data including download
statistics, series popularity, storage analysis, and performance reports.
"""
from typing import Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from sqlalchemy.ext.asyncio import AsyncSession
from src.server.database.connection import get_db_session
from src.server.services.analytics_service import get_analytics_service
router = APIRouter(prefix="/api/analytics", tags=["analytics"])
class DownloadStatsResponse(BaseModel):
"""Download statistics response model."""
total_downloads: int
successful_downloads: int
failed_downloads: int
total_bytes_downloaded: int
average_speed_mbps: float
success_rate: float
average_duration_seconds: float
class SeriesPopularityResponse(BaseModel):
"""Series popularity response model."""
series_name: str
download_count: int
total_size_bytes: int
last_download: Optional[str]
success_rate: float
class StorageAnalysisResponse(BaseModel):
"""Storage analysis response model."""
total_storage_bytes: int
used_storage_bytes: int
free_storage_bytes: int
storage_percent_used: float
downloads_directory_size_bytes: int
cache_directory_size_bytes: int
logs_directory_size_bytes: int
class PerformanceReportResponse(BaseModel):
"""Performance report response model."""
period_start: str
period_end: str
downloads_per_hour: float
average_queue_size: float
peak_memory_usage_mb: float
average_cpu_percent: float
uptime_seconds: float
error_rate: float
class SummaryReportResponse(BaseModel):
"""Comprehensive analytics summary response."""
timestamp: str
download_stats: DownloadStatsResponse
series_popularity: list[SeriesPopularityResponse]
storage_analysis: StorageAnalysisResponse
performance_report: PerformanceReportResponse
@router.get("/downloads", response_model=DownloadStatsResponse)
async def get_download_statistics(
days: int = 30,
db: AsyncSession = Depends(get_db_session),
) -> DownloadStatsResponse:
"""Get download statistics for specified period.
Args:
days: Number of days to analyze (default: 30)
db: Database session
Returns:
Download statistics including success rates and speeds
"""
try:
service = get_analytics_service()
stats = await service.get_download_stats(db, days=days)
return DownloadStatsResponse(
total_downloads=stats.total_downloads,
successful_downloads=stats.successful_downloads,
failed_downloads=stats.failed_downloads,
total_bytes_downloaded=stats.total_bytes_downloaded,
average_speed_mbps=stats.average_speed_mbps,
success_rate=stats.success_rate,
average_duration_seconds=stats.average_duration_seconds,
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to get download statistics: {str(e)}",
)
@router.get(
"/series-popularity",
response_model=list[SeriesPopularityResponse]
)
async def get_series_popularity(
limit: int = 10,
db: AsyncSession = Depends(get_db_session),
) -> list[SeriesPopularityResponse]:
"""Get most popular series by download count.
Args:
limit: Maximum number of series (default: 10)
db: Database session
Returns:
List of series sorted by popularity
"""
try:
service = get_analytics_service()
popularity = await service.get_series_popularity(db, limit=limit)
return [
SeriesPopularityResponse(
series_name=p.series_name,
download_count=p.download_count,
total_size_bytes=p.total_size_bytes,
last_download=p.last_download,
success_rate=p.success_rate,
)
for p in popularity
]
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to get series popularity: {str(e)}",
)
@router.get(
"/storage",
response_model=StorageAnalysisResponse
)
async def get_storage_analysis() -> StorageAnalysisResponse:
"""Get current storage usage analysis.
Returns:
Storage breakdown including disk and directory usage
"""
try:
service = get_analytics_service()
analysis = service.get_storage_analysis()
return StorageAnalysisResponse(
total_storage_bytes=analysis.total_storage_bytes,
used_storage_bytes=analysis.used_storage_bytes,
free_storage_bytes=analysis.free_storage_bytes,
storage_percent_used=analysis.storage_percent_used,
downloads_directory_size_bytes=(
analysis.downloads_directory_size_bytes
),
cache_directory_size_bytes=(
analysis.cache_directory_size_bytes
),
logs_directory_size_bytes=(
analysis.logs_directory_size_bytes
),
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to get storage analysis: {str(e)}",
)
@router.get(
"/performance",
response_model=PerformanceReportResponse
)
async def get_performance_report(
hours: int = 24,
db: AsyncSession = Depends(get_db_session),
) -> PerformanceReportResponse:
"""Get performance metrics for specified period.
Args:
hours: Number of hours to analyze (default: 24)
db: Database session
Returns:
Performance metrics including speeds and system usage
"""
try:
service = get_analytics_service()
report = await service.get_performance_report(db, hours=hours)
return PerformanceReportResponse(
period_start=report.period_start,
period_end=report.period_end,
downloads_per_hour=report.downloads_per_hour,
average_queue_size=report.average_queue_size,
peak_memory_usage_mb=report.peak_memory_usage_mb,
average_cpu_percent=report.average_cpu_percent,
uptime_seconds=report.uptime_seconds,
error_rate=report.error_rate,
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to get performance report: {str(e)}",
)
@router.get("/summary", response_model=SummaryReportResponse)
async def get_summary_report(
db: AsyncSession = Depends(get_db_session),
) -> SummaryReportResponse:
"""Get comprehensive analytics summary.
Args:
db: Database session
Returns:
Complete analytics report with all metrics
"""
try:
service = get_analytics_service()
summary = await service.generate_summary_report(db)
return SummaryReportResponse(
timestamp=summary["timestamp"],
download_stats=DownloadStatsResponse(
**summary["download_stats"]
),
series_popularity=[
SeriesPopularityResponse(**p)
for p in summary["series_popularity"]
],
storage_analysis=StorageAnalysisResponse(
**summary["storage_analysis"]
),
performance_report=PerformanceReportResponse(
**summary["performance_report"]
),
)
except Exception as e:
raise HTTPException(
status_code=500,
detail=f"Failed to generate summary report: {str(e)}",
)

View File

@ -1,304 +0,0 @@
"""Backup management API endpoints."""
import logging
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException
from pydantic import BaseModel
from src.server.services.backup_service import BackupService, get_backup_service
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/backup", tags=["backup"])
class BackupCreateRequest(BaseModel):
"""Request to create a backup."""
backup_type: str # 'config', 'database', 'full'
description: Optional[str] = None
class BackupResponse(BaseModel):
"""Response for backup creation."""
success: bool
message: str
backup_name: Optional[str] = None
size_bytes: Optional[int] = None
class BackupListResponse(BaseModel):
"""Response for listing backups."""
backups: List[Dict[str, Any]]
total_count: int
class RestoreRequest(BaseModel):
"""Request to restore from backup."""
backup_name: str
class RestoreResponse(BaseModel):
"""Response for restore operation."""
success: bool
message: str
def get_backup_service_dep() -> BackupService:
"""Dependency to get backup service."""
return get_backup_service()
@router.post("/create", response_model=BackupResponse)
async def create_backup(
request: BackupCreateRequest,
backup_service: BackupService = Depends(get_backup_service_dep),
) -> BackupResponse:
"""Create a new backup.
Args:
request: Backup creation request.
backup_service: Backup service dependency.
Returns:
BackupResponse: Result of backup creation.
"""
try:
backup_info = None
if request.backup_type == "config":
backup_info = backup_service.backup_configuration(
request.description or ""
)
elif request.backup_type == "database":
backup_info = backup_service.backup_database(
request.description or ""
)
elif request.backup_type == "full":
backup_info = backup_service.backup_full(
request.description or ""
)
else:
raise ValueError(f"Invalid backup type: {request.backup_type}")
if backup_info is None:
return BackupResponse(
success=False,
message=f"Failed to create {request.backup_type} backup",
)
return BackupResponse(
success=True,
message=(
f"{request.backup_type.capitalize()} backup created "
"successfully"
),
backup_name=backup_info.name,
size_bytes=backup_info.size_bytes,
)
except Exception as e:
logger.error(f"Failed to create backup: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.get("/list", response_model=BackupListResponse)
async def list_backups(
backup_type: Optional[str] = None,
backup_service: BackupService = Depends(get_backup_service_dep),
) -> BackupListResponse:
"""List available backups.
Args:
backup_type: Optional filter by backup type.
backup_service: Backup service dependency.
Returns:
BackupListResponse: List of available backups.
"""
try:
backups = backup_service.list_backups(backup_type)
return BackupListResponse(backups=backups, total_count=len(backups))
except Exception as e:
logger.error(f"Failed to list backups: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/restore", response_model=RestoreResponse)
async def restore_backup(
request: RestoreRequest,
backup_type: Optional[str] = None,
backup_service: BackupService = Depends(get_backup_service_dep),
) -> RestoreResponse:
"""Restore from a backup.
Args:
request: Restore request.
backup_type: Type of backup to restore.
backup_service: Backup service dependency.
Returns:
RestoreResponse: Result of restore operation.
"""
try:
# Determine backup type from filename if not provided
if backup_type is None:
if "config" in request.backup_name:
backup_type = "config"
elif "database" in request.backup_name:
backup_type = "database"
else:
backup_type = "full"
success = False
if backup_type == "config":
success = backup_service.restore_configuration(
request.backup_name
)
elif backup_type == "database":
success = backup_service.restore_database(request.backup_name)
else:
raise ValueError(f"Cannot restore backup type: {backup_type}")
if not success:
return RestoreResponse(
success=False,
message=f"Failed to restore {backup_type} backup",
)
return RestoreResponse(
success=True,
message=f"{backup_type.capitalize()} backup restored successfully",
)
except Exception as e:
logger.error(f"Failed to restore backup: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.delete("/{backup_name}", response_model=Dict[str, Any])
async def delete_backup(
backup_name: str,
backup_service: BackupService = Depends(get_backup_service_dep),
) -> Dict[str, Any]:
"""Delete a backup.
Args:
backup_name: Name of the backup to delete.
backup_service: Backup service dependency.
Returns:
dict: Result of delete operation.
"""
try:
success = backup_service.delete_backup(backup_name)
if not success:
raise HTTPException(status_code=404, detail="Backup not found")
return {"success": True, "message": "Backup deleted successfully"}
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to delete backup: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/cleanup", response_model=Dict[str, Any])
async def cleanup_backups(
max_backups: int = 10,
backup_type: Optional[str] = None,
backup_service: BackupService = Depends(get_backup_service_dep),
) -> Dict[str, Any]:
"""Clean up old backups.
Args:
max_backups: Maximum number of backups to keep.
backup_type: Optional filter by backup type.
backup_service: Backup service dependency.
Returns:
dict: Number of backups deleted.
"""
try:
deleted_count = backup_service.cleanup_old_backups(
max_backups, backup_type
)
return {
"success": True,
"message": "Cleanup completed",
"deleted_count": deleted_count,
}
except Exception as e:
logger.error(f"Failed to cleanup backups: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/export/anime", response_model=Dict[str, Any])
async def export_anime_data(
backup_service: BackupService = Depends(get_backup_service_dep),
) -> Dict[str, Any]:
"""Export anime library data.
Args:
backup_service: Backup service dependency.
Returns:
dict: Result of export operation.
"""
try:
output_file = "data/backups/anime_export.json"
success = backup_service.export_anime_data(output_file)
if not success:
raise HTTPException(
status_code=500, detail="Failed to export anime data"
)
return {
"success": True,
"message": "Anime data exported successfully",
"export_file": output_file,
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to export anime data: {e}")
raise HTTPException(status_code=500, detail=str(e))
@router.post("/import/anime", response_model=Dict[str, Any])
async def import_anime_data(
import_file: str,
backup_service: BackupService = Depends(get_backup_service_dep),
) -> Dict[str, Any]:
"""Import anime library data.
Args:
import_file: Path to import file.
backup_service: Backup service dependency.
Returns:
dict: Result of import operation.
"""
try:
success = backup_service.import_anime_data(import_file)
if not success:
raise HTTPException(
status_code=400, detail="Failed to import anime data"
)
return {
"success": True,
"message": "Anime data imported successfully",
}
except HTTPException:
raise
except Exception as e:
logger.error(f"Failed to import anime data: {e}")
raise HTTPException(status_code=500, detail=str(e))

View File

@ -1,426 +0,0 @@
"""Logging API endpoints for Aniworld.
This module provides endpoints for managing application logging
configuration and accessing log files.
"""
import logging
import os
from pathlib import Path
from typing import Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, status
from fastapi.responses import FileResponse, PlainTextResponse
from pydantic import BaseModel, Field
from src.server.models.config import LoggingConfig
from src.server.services.config_service import ConfigServiceError, get_config_service
from src.server.utils.dependencies import require_auth
logger = logging.getLogger(__name__)
router = APIRouter(prefix="/api/logging", tags=["logging"])
class LogFileInfo(BaseModel):
"""Information about a log file."""
name: str = Field(..., description="File name")
size: int = Field(..., description="File size in bytes")
modified: float = Field(..., description="Last modified timestamp")
path: str = Field(..., description="Relative path from logs directory")
class LogCleanupResult(BaseModel):
"""Result of log cleanup operation."""
files_deleted: int = Field(..., description="Number of files deleted")
space_freed: int = Field(..., description="Space freed in bytes")
errors: List[str] = Field(
default_factory=list, description="Any errors encountered"
)
def get_logs_directory() -> Path:
"""Get the logs directory path.
Returns:
Path: Logs directory path
Raises:
HTTPException: If logs directory doesn't exist
"""
# Check both common locations
possible_paths = [
Path("logs"),
Path("src/cli/logs"),
Path("data/logs"),
]
for log_path in possible_paths:
if log_path.exists() and log_path.is_dir():
return log_path
# Default to logs directory even if it doesn't exist
logs_dir = Path("logs")
logs_dir.mkdir(parents=True, exist_ok=True)
return logs_dir
@router.get("/config", response_model=LoggingConfig)
def get_logging_config(
auth: Optional[dict] = Depends(require_auth)
) -> LoggingConfig:
"""Get current logging configuration.
Args:
auth: Authentication token (optional for read operations)
Returns:
LoggingConfig: Current logging configuration
Raises:
HTTPException: If configuration cannot be loaded
"""
try:
config_service = get_config_service()
app_config = config_service.load_config()
return app_config.logging
except ConfigServiceError as e:
logger.error(f"Failed to load logging config: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to load logging configuration: {e}",
) from e
@router.post("/config", response_model=LoggingConfig)
def update_logging_config(
logging_config: LoggingConfig,
auth: dict = Depends(require_auth),
) -> LoggingConfig:
"""Update logging configuration.
Args:
logging_config: New logging configuration
auth: Authentication token (required)
Returns:
LoggingConfig: Updated logging configuration
Raises:
HTTPException: If configuration update fails
"""
try:
config_service = get_config_service()
app_config = config_service.load_config()
# Update logging section
app_config.logging = logging_config
# Save and return
config_service.save_config(app_config)
logger.info(
f"Logging config updated by {auth.get('username', 'unknown')}"
)
# Apply the new logging configuration
_apply_logging_config(logging_config)
return logging_config
except ConfigServiceError as e:
logger.error(f"Failed to update logging config: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to update logging configuration: {e}",
) from e
def _apply_logging_config(config: LoggingConfig) -> None:
"""Apply logging configuration to the Python logging system.
Args:
config: Logging configuration to apply
"""
# Set the root logger level
logging.getLogger().setLevel(config.level)
# If a file is specified, configure file handler
if config.file:
file_path = Path(config.file)
file_path.parent.mkdir(parents=True, exist_ok=True)
# Remove existing file handlers
root_logger = logging.getLogger()
for handler in root_logger.handlers[:]:
if isinstance(handler, logging.FileHandler):
root_logger.removeHandler(handler)
# Add new file handler with rotation if configured
if config.max_bytes and config.max_bytes > 0:
from logging.handlers import RotatingFileHandler
handler = RotatingFileHandler(
config.file,
maxBytes=config.max_bytes,
backupCount=config.backup_count or 3,
)
else:
handler = logging.FileHandler(config.file)
handler.setFormatter(
logging.Formatter(
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
)
)
root_logger.addHandler(handler)
@router.get("/files", response_model=List[LogFileInfo])
def list_log_files(
auth: Optional[dict] = Depends(require_auth)
) -> List[LogFileInfo]:
"""List available log files.
Args:
auth: Authentication token (optional for read operations)
Returns:
List of log file information
Raises:
HTTPException: If logs directory cannot be accessed
"""
try:
logs_dir = get_logs_directory()
files: List[LogFileInfo] = []
for file_path in logs_dir.rglob("*.log*"):
if file_path.is_file():
stat = file_path.stat()
rel_path = file_path.relative_to(logs_dir)
files.append(
LogFileInfo(
name=file_path.name,
size=stat.st_size,
modified=stat.st_mtime,
path=str(rel_path),
)
)
# Sort by modified time, newest first
files.sort(key=lambda x: x.modified, reverse=True)
return files
except Exception as e:
logger.exception("Failed to list log files")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to list log files: {str(e)}",
) from e
@router.get("/files/{filename:path}/download")
async def download_log_file(
filename: str, auth: dict = Depends(require_auth)
) -> FileResponse:
"""Download a specific log file.
Args:
filename: Name or relative path of the log file
auth: Authentication token (required)
Returns:
File download response
Raises:
HTTPException: If file not found or access denied
"""
try:
logs_dir = get_logs_directory()
file_path = logs_dir / filename
# Security: Ensure the file is within logs directory
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Access denied to file outside logs directory",
)
if not file_path.exists() or not file_path.is_file():
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Log file not found: {filename}",
)
logger.info(
f"Log file download: {filename} "
f"by {auth.get('username', 'unknown')}"
)
return FileResponse(
path=str(file_path),
filename=file_path.name,
media_type="text/plain",
)
except HTTPException:
raise
except Exception as e:
logger.exception(f"Failed to download log file: {filename}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to download log file: {str(e)}",
) from e
@router.get("/files/{filename:path}/tail")
async def tail_log_file(
filename: str,
lines: int = 100,
auth: Optional[dict] = Depends(require_auth),
) -> PlainTextResponse:
"""Get the last N lines of a log file.
Args:
filename: Name or relative path of the log file
lines: Number of lines to retrieve (default: 100)
auth: Authentication token (optional)
Returns:
Plain text response with log file tail
Raises:
HTTPException: If file not found or access denied
"""
try:
logs_dir = get_logs_directory()
file_path = logs_dir / filename
# Security: Ensure the file is within logs directory
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
raise HTTPException(
status_code=status.HTTP_403_FORBIDDEN,
detail="Access denied to file outside logs directory",
)
if not file_path.exists() or not file_path.is_file():
raise HTTPException(
status_code=status.HTTP_404_NOT_FOUND,
detail=f"Log file not found: {filename}",
)
# Read the last N lines efficiently
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
# For small files, just read all
content = f.readlines()
tail_lines = content[-lines:] if len(content) > lines else content
return PlainTextResponse(content="".join(tail_lines))
except HTTPException:
raise
except Exception as e:
logger.exception(f"Failed to tail log file: {filename}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to tail log file: {str(e)}",
) from e
@router.post("/test", response_model=Dict[str, str])
async def test_logging(
auth: dict = Depends(require_auth)
) -> Dict[str, str]:
"""Test logging by writing messages at all levels.
Args:
auth: Authentication token (required)
Returns:
Success message
"""
try:
test_logger = logging.getLogger("aniworld.test")
test_logger.debug("Test DEBUG message")
test_logger.info("Test INFO message")
test_logger.warning("Test WARNING message")
test_logger.error("Test ERROR message")
test_logger.critical("Test CRITICAL message")
logger.info(
f"Logging test triggered by {auth.get('username', 'unknown')}"
)
return {
"status": "success",
"message": "Test messages logged at all levels",
}
except Exception as e:
logger.exception("Failed to test logging")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to test logging: {str(e)}",
) from e
@router.post("/cleanup", response_model=LogCleanupResult)
async def cleanup_logs(
max_age_days: int = 30, auth: dict = Depends(require_auth)
) -> LogCleanupResult:
"""Clean up old log files.
Args:
max_age_days: Maximum age in days for log files to keep
auth: Authentication token (required)
Returns:
Cleanup result with statistics
Raises:
HTTPException: If cleanup fails
"""
try:
logs_dir = get_logs_directory()
current_time = os.path.getmtime(logs_dir)
max_age_seconds = max_age_days * 24 * 60 * 60
files_deleted = 0
space_freed = 0
errors: List[str] = []
for file_path in logs_dir.rglob("*.log*"):
if not file_path.is_file():
continue
try:
file_age = current_time - file_path.stat().st_mtime
if file_age > max_age_seconds:
file_size = file_path.stat().st_size
file_path.unlink()
files_deleted += 1
space_freed += file_size
logger.info(f"Deleted old log file: {file_path.name}")
except Exception as e:
error_msg = f"Failed to delete {file_path.name}: {str(e)}"
errors.append(error_msg)
logger.warning(error_msg)
logger.info(
f"Log cleanup by {auth.get('username', 'unknown')}: "
f"{files_deleted} files, {space_freed} bytes"
)
return LogCleanupResult(
files_deleted=files_deleted,
space_freed=space_freed,
errors=errors,
)
except Exception as e:
logger.exception("Failed to cleanup logs")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Failed to cleanup logs: {str(e)}",
) from e

View File

@ -19,13 +19,11 @@ from src.config.settings import settings
# Import core functionality
from src.core.SeriesApp import SeriesApp
from src.infrastructure.logging import setup_logging
from src.server.api.analytics import router as analytics_router
from src.server.api.anime import router as anime_router
from src.server.api.auth import router as auth_router
from src.server.api.config import router as config_router
from src.server.api.download import downloads_router
from src.server.api.download import router as download_router
from src.server.api.logging import router as logging_router
from src.server.api.scheduler import router as scheduler_router
from src.server.api.websocket import router as websocket_router
from src.server.controllers.error_controller import (
@ -168,8 +166,6 @@ app.include_router(page_router)
app.include_router(auth_router)
app.include_router(config_router)
app.include_router(scheduler_router)
app.include_router(logging_router)
app.include_router(analytics_router)
app.include_router(anime_router)
app.include_router(download_router)
app.include_router(downloads_router) # Alias for input validation tests

View File

@ -1,154 +0,0 @@
"""Integration tests for analytics API endpoints.
Tests analytics API endpoints including download statistics,
series popularity, storage analysis, and performance reports.
"""
from unittest.mock import AsyncMock, patch
import pytest
from httpx import ASGITransport, AsyncClient
from src.server.fastapi_app import app
@pytest.mark.asyncio
async def test_analytics_downloads_endpoint():
"""Test GET /api/analytics/downloads endpoint."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
response = await client.get("/api/analytics/downloads?days=30")
assert response.status_code in [200, 422, 500]
@pytest.mark.asyncio
async def test_analytics_series_popularity_endpoint():
"""Test GET /api/analytics/series-popularity endpoint."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
response = await client.get(
"/api/analytics/series-popularity?limit=10"
)
assert response.status_code in [200, 422, 500]
@pytest.mark.asyncio
async def test_analytics_storage_endpoint():
"""Test GET /api/analytics/storage endpoint."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("psutil.disk_usage") as mock_disk:
mock_disk.return_value = {
"total": 1024 * 1024 * 1024,
"used": 512 * 1024 * 1024,
"free": 512 * 1024 * 1024,
"percent": 50.0,
}
response = await client.get("/api/analytics/storage")
assert response.status_code in [200, 401, 500]
@pytest.mark.asyncio
async def test_analytics_performance_endpoint():
"""Test GET /api/analytics/performance endpoint."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
response = await client.get(
"/api/analytics/performance?hours=24"
)
assert response.status_code in [200, 422, 500]
@pytest.mark.asyncio
async def test_analytics_summary_endpoint():
"""Test GET /api/analytics/summary endpoint."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
response = await client.get("/api/analytics/summary")
assert response.status_code in [200, 500]
@pytest.mark.asyncio
async def test_analytics_downloads_with_query_params():
"""Test /api/analytics/downloads with different query params."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
response = await client.get("/api/analytics/downloads?days=7")
assert response.status_code in [200, 422, 500]
@pytest.mark.asyncio
async def test_analytics_series_with_different_limits():
"""Test /api/analytics/series-popularity with different limits."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
for limit in [5, 10, 20]:
response = await client.get(
f"/api/analytics/series-popularity?limit={limit}"
)
assert response.status_code in [200, 422, 500]
@pytest.mark.asyncio
async def test_analytics_performance_with_different_hours():
"""Test /api/analytics/performance with different hour ranges."""
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport, base_url="http://test"
) as client:
with patch("src.server.api.analytics.get_db_session") as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value = mock_db
for hours in [1, 12, 24, 72]:
response = await client.get(
f"/api/analytics/performance?hours={hours}"
)
assert response.status_code in [200, 422, 500]