remove part 2
This commit is contained in:
@@ -1,258 +0,0 @@
|
||||
"""Analytics API endpoints for accessing system analytics and reports.
|
||||
|
||||
Provides REST API endpoints for querying analytics data including download
|
||||
statistics, series popularity, storage analysis, and performance reports.
|
||||
"""
|
||||
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.services.analytics_service import get_analytics_service
|
||||
|
||||
router = APIRouter(prefix="/api/analytics", tags=["analytics"])
|
||||
|
||||
|
||||
class DownloadStatsResponse(BaseModel):
|
||||
"""Download statistics response model."""
|
||||
|
||||
total_downloads: int
|
||||
successful_downloads: int
|
||||
failed_downloads: int
|
||||
total_bytes_downloaded: int
|
||||
average_speed_mbps: float
|
||||
success_rate: float
|
||||
average_duration_seconds: float
|
||||
|
||||
|
||||
class SeriesPopularityResponse(BaseModel):
|
||||
"""Series popularity response model."""
|
||||
|
||||
series_name: str
|
||||
download_count: int
|
||||
total_size_bytes: int
|
||||
last_download: Optional[str]
|
||||
success_rate: float
|
||||
|
||||
|
||||
class StorageAnalysisResponse(BaseModel):
|
||||
"""Storage analysis response model."""
|
||||
|
||||
total_storage_bytes: int
|
||||
used_storage_bytes: int
|
||||
free_storage_bytes: int
|
||||
storage_percent_used: float
|
||||
downloads_directory_size_bytes: int
|
||||
cache_directory_size_bytes: int
|
||||
logs_directory_size_bytes: int
|
||||
|
||||
|
||||
class PerformanceReportResponse(BaseModel):
|
||||
"""Performance report response model."""
|
||||
|
||||
period_start: str
|
||||
period_end: str
|
||||
downloads_per_hour: float
|
||||
average_queue_size: float
|
||||
peak_memory_usage_mb: float
|
||||
average_cpu_percent: float
|
||||
uptime_seconds: float
|
||||
error_rate: float
|
||||
|
||||
|
||||
class SummaryReportResponse(BaseModel):
|
||||
"""Comprehensive analytics summary response."""
|
||||
|
||||
timestamp: str
|
||||
download_stats: DownloadStatsResponse
|
||||
series_popularity: list[SeriesPopularityResponse]
|
||||
storage_analysis: StorageAnalysisResponse
|
||||
performance_report: PerformanceReportResponse
|
||||
|
||||
|
||||
@router.get("/downloads", response_model=DownloadStatsResponse)
|
||||
async def get_download_statistics(
|
||||
days: int = 30,
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> DownloadStatsResponse:
|
||||
"""Get download statistics for specified period.
|
||||
|
||||
Args:
|
||||
days: Number of days to analyze (default: 30)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Download statistics including success rates and speeds
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
stats = await service.get_download_stats(db, days=days)
|
||||
|
||||
return DownloadStatsResponse(
|
||||
total_downloads=stats.total_downloads,
|
||||
successful_downloads=stats.successful_downloads,
|
||||
failed_downloads=stats.failed_downloads,
|
||||
total_bytes_downloaded=stats.total_bytes_downloaded,
|
||||
average_speed_mbps=stats.average_speed_mbps,
|
||||
success_rate=stats.success_rate,
|
||||
average_duration_seconds=stats.average_duration_seconds,
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get download statistics: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/series-popularity",
|
||||
response_model=list[SeriesPopularityResponse]
|
||||
)
|
||||
async def get_series_popularity(
|
||||
limit: int = 10,
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> list[SeriesPopularityResponse]:
|
||||
"""Get most popular series by download count.
|
||||
|
||||
Args:
|
||||
limit: Maximum number of series (default: 10)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
List of series sorted by popularity
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
popularity = await service.get_series_popularity(db, limit=limit)
|
||||
|
||||
return [
|
||||
SeriesPopularityResponse(
|
||||
series_name=p.series_name,
|
||||
download_count=p.download_count,
|
||||
total_size_bytes=p.total_size_bytes,
|
||||
last_download=p.last_download,
|
||||
success_rate=p.success_rate,
|
||||
)
|
||||
for p in popularity
|
||||
]
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get series popularity: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/storage",
|
||||
response_model=StorageAnalysisResponse
|
||||
)
|
||||
async def get_storage_analysis() -> StorageAnalysisResponse:
|
||||
"""Get current storage usage analysis.
|
||||
|
||||
Returns:
|
||||
Storage breakdown including disk and directory usage
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
analysis = service.get_storage_analysis()
|
||||
|
||||
return StorageAnalysisResponse(
|
||||
total_storage_bytes=analysis.total_storage_bytes,
|
||||
used_storage_bytes=analysis.used_storage_bytes,
|
||||
free_storage_bytes=analysis.free_storage_bytes,
|
||||
storage_percent_used=analysis.storage_percent_used,
|
||||
downloads_directory_size_bytes=(
|
||||
analysis.downloads_directory_size_bytes
|
||||
),
|
||||
cache_directory_size_bytes=(
|
||||
analysis.cache_directory_size_bytes
|
||||
),
|
||||
logs_directory_size_bytes=(
|
||||
analysis.logs_directory_size_bytes
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get storage analysis: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get(
|
||||
"/performance",
|
||||
response_model=PerformanceReportResponse
|
||||
)
|
||||
async def get_performance_report(
|
||||
hours: int = 24,
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> PerformanceReportResponse:
|
||||
"""Get performance metrics for specified period.
|
||||
|
||||
Args:
|
||||
hours: Number of hours to analyze (default: 24)
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Performance metrics including speeds and system usage
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
report = await service.get_performance_report(db, hours=hours)
|
||||
|
||||
return PerformanceReportResponse(
|
||||
period_start=report.period_start,
|
||||
period_end=report.period_end,
|
||||
downloads_per_hour=report.downloads_per_hour,
|
||||
average_queue_size=report.average_queue_size,
|
||||
peak_memory_usage_mb=report.peak_memory_usage_mb,
|
||||
average_cpu_percent=report.average_cpu_percent,
|
||||
uptime_seconds=report.uptime_seconds,
|
||||
error_rate=report.error_rate,
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to get performance report: {str(e)}",
|
||||
)
|
||||
|
||||
|
||||
@router.get("/summary", response_model=SummaryReportResponse)
|
||||
async def get_summary_report(
|
||||
db: AsyncSession = Depends(get_db_session),
|
||||
) -> SummaryReportResponse:
|
||||
"""Get comprehensive analytics summary.
|
||||
|
||||
Args:
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Complete analytics report with all metrics
|
||||
"""
|
||||
try:
|
||||
service = get_analytics_service()
|
||||
summary = await service.generate_summary_report(db)
|
||||
|
||||
return SummaryReportResponse(
|
||||
timestamp=summary["timestamp"],
|
||||
download_stats=DownloadStatsResponse(
|
||||
**summary["download_stats"]
|
||||
),
|
||||
series_popularity=[
|
||||
SeriesPopularityResponse(**p)
|
||||
for p in summary["series_popularity"]
|
||||
],
|
||||
storage_analysis=StorageAnalysisResponse(
|
||||
**summary["storage_analysis"]
|
||||
),
|
||||
performance_report=PerformanceReportResponse(
|
||||
**summary["performance_report"]
|
||||
),
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=500,
|
||||
detail=f"Failed to generate summary report: {str(e)}",
|
||||
)
|
||||
@@ -1,304 +0,0 @@
|
||||
"""Backup management API endpoints."""
|
||||
|
||||
import logging
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException
|
||||
from pydantic import BaseModel
|
||||
|
||||
from src.server.services.backup_service import BackupService, get_backup_service
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/backup", tags=["backup"])
|
||||
|
||||
|
||||
class BackupCreateRequest(BaseModel):
|
||||
"""Request to create a backup."""
|
||||
|
||||
backup_type: str # 'config', 'database', 'full'
|
||||
description: Optional[str] = None
|
||||
|
||||
|
||||
class BackupResponse(BaseModel):
|
||||
"""Response for backup creation."""
|
||||
|
||||
success: bool
|
||||
message: str
|
||||
backup_name: Optional[str] = None
|
||||
size_bytes: Optional[int] = None
|
||||
|
||||
|
||||
class BackupListResponse(BaseModel):
|
||||
"""Response for listing backups."""
|
||||
|
||||
backups: List[Dict[str, Any]]
|
||||
total_count: int
|
||||
|
||||
|
||||
class RestoreRequest(BaseModel):
|
||||
"""Request to restore from backup."""
|
||||
|
||||
backup_name: str
|
||||
|
||||
|
||||
class RestoreResponse(BaseModel):
|
||||
"""Response for restore operation."""
|
||||
|
||||
success: bool
|
||||
message: str
|
||||
|
||||
|
||||
def get_backup_service_dep() -> BackupService:
|
||||
"""Dependency to get backup service."""
|
||||
return get_backup_service()
|
||||
|
||||
|
||||
@router.post("/create", response_model=BackupResponse)
|
||||
async def create_backup(
|
||||
request: BackupCreateRequest,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> BackupResponse:
|
||||
"""Create a new backup.
|
||||
|
||||
Args:
|
||||
request: Backup creation request.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
BackupResponse: Result of backup creation.
|
||||
"""
|
||||
try:
|
||||
backup_info = None
|
||||
|
||||
if request.backup_type == "config":
|
||||
backup_info = backup_service.backup_configuration(
|
||||
request.description or ""
|
||||
)
|
||||
elif request.backup_type == "database":
|
||||
backup_info = backup_service.backup_database(
|
||||
request.description or ""
|
||||
)
|
||||
elif request.backup_type == "full":
|
||||
backup_info = backup_service.backup_full(
|
||||
request.description or ""
|
||||
)
|
||||
else:
|
||||
raise ValueError(f"Invalid backup type: {request.backup_type}")
|
||||
|
||||
if backup_info is None:
|
||||
return BackupResponse(
|
||||
success=False,
|
||||
message=f"Failed to create {request.backup_type} backup",
|
||||
)
|
||||
|
||||
return BackupResponse(
|
||||
success=True,
|
||||
message=(
|
||||
f"{request.backup_type.capitalize()} backup created "
|
||||
"successfully"
|
||||
),
|
||||
backup_name=backup_info.name,
|
||||
size_bytes=backup_info.size_bytes,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create backup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.get("/list", response_model=BackupListResponse)
|
||||
async def list_backups(
|
||||
backup_type: Optional[str] = None,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> BackupListResponse:
|
||||
"""List available backups.
|
||||
|
||||
Args:
|
||||
backup_type: Optional filter by backup type.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
BackupListResponse: List of available backups.
|
||||
"""
|
||||
try:
|
||||
backups = backup_service.list_backups(backup_type)
|
||||
return BackupListResponse(backups=backups, total_count=len(backups))
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to list backups: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/restore", response_model=RestoreResponse)
|
||||
async def restore_backup(
|
||||
request: RestoreRequest,
|
||||
backup_type: Optional[str] = None,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> RestoreResponse:
|
||||
"""Restore from a backup.
|
||||
|
||||
Args:
|
||||
request: Restore request.
|
||||
backup_type: Type of backup to restore.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
RestoreResponse: Result of restore operation.
|
||||
"""
|
||||
try:
|
||||
# Determine backup type from filename if not provided
|
||||
if backup_type is None:
|
||||
if "config" in request.backup_name:
|
||||
backup_type = "config"
|
||||
elif "database" in request.backup_name:
|
||||
backup_type = "database"
|
||||
else:
|
||||
backup_type = "full"
|
||||
|
||||
success = False
|
||||
|
||||
if backup_type == "config":
|
||||
success = backup_service.restore_configuration(
|
||||
request.backup_name
|
||||
)
|
||||
elif backup_type == "database":
|
||||
success = backup_service.restore_database(request.backup_name)
|
||||
else:
|
||||
raise ValueError(f"Cannot restore backup type: {backup_type}")
|
||||
|
||||
if not success:
|
||||
return RestoreResponse(
|
||||
success=False,
|
||||
message=f"Failed to restore {backup_type} backup",
|
||||
)
|
||||
|
||||
return RestoreResponse(
|
||||
success=True,
|
||||
message=f"{backup_type.capitalize()} backup restored successfully",
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to restore backup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.delete("/{backup_name}", response_model=Dict[str, Any])
|
||||
async def delete_backup(
|
||||
backup_name: str,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Delete a backup.
|
||||
|
||||
Args:
|
||||
backup_name: Name of the backup to delete.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Result of delete operation.
|
||||
"""
|
||||
try:
|
||||
success = backup_service.delete_backup(backup_name)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(status_code=404, detail="Backup not found")
|
||||
|
||||
return {"success": True, "message": "Backup deleted successfully"}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to delete backup: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/cleanup", response_model=Dict[str, Any])
|
||||
async def cleanup_backups(
|
||||
max_backups: int = 10,
|
||||
backup_type: Optional[str] = None,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Clean up old backups.
|
||||
|
||||
Args:
|
||||
max_backups: Maximum number of backups to keep.
|
||||
backup_type: Optional filter by backup type.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Number of backups deleted.
|
||||
"""
|
||||
try:
|
||||
deleted_count = backup_service.cleanup_old_backups(
|
||||
max_backups, backup_type
|
||||
)
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Cleanup completed",
|
||||
"deleted_count": deleted_count,
|
||||
}
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to cleanup backups: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/export/anime", response_model=Dict[str, Any])
|
||||
async def export_anime_data(
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Export anime library data.
|
||||
|
||||
Args:
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Result of export operation.
|
||||
"""
|
||||
try:
|
||||
output_file = "data/backups/anime_export.json"
|
||||
success = backup_service.export_anime_data(output_file)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=500, detail="Failed to export anime data"
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Anime data exported successfully",
|
||||
"export_file": output_file,
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to export anime data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
|
||||
|
||||
@router.post("/import/anime", response_model=Dict[str, Any])
|
||||
async def import_anime_data(
|
||||
import_file: str,
|
||||
backup_service: BackupService = Depends(get_backup_service_dep),
|
||||
) -> Dict[str, Any]:
|
||||
"""Import anime library data.
|
||||
|
||||
Args:
|
||||
import_file: Path to import file.
|
||||
backup_service: Backup service dependency.
|
||||
|
||||
Returns:
|
||||
dict: Result of import operation.
|
||||
"""
|
||||
try:
|
||||
success = backup_service.import_anime_data(import_file)
|
||||
|
||||
if not success:
|
||||
raise HTTPException(
|
||||
status_code=400, detail="Failed to import anime data"
|
||||
)
|
||||
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Anime data imported successfully",
|
||||
}
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to import anime data: {e}")
|
||||
raise HTTPException(status_code=500, detail=str(e))
|
||||
@@ -1,426 +0,0 @@
|
||||
"""Logging API endpoints for Aniworld.
|
||||
|
||||
This module provides endpoints for managing application logging
|
||||
configuration and accessing log files.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.responses import FileResponse, PlainTextResponse
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.server.models.config import LoggingConfig
|
||||
from src.server.services.config_service import ConfigServiceError, get_config_service
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/logging", tags=["logging"])
|
||||
|
||||
|
||||
class LogFileInfo(BaseModel):
|
||||
"""Information about a log file."""
|
||||
|
||||
name: str = Field(..., description="File name")
|
||||
size: int = Field(..., description="File size in bytes")
|
||||
modified: float = Field(..., description="Last modified timestamp")
|
||||
path: str = Field(..., description="Relative path from logs directory")
|
||||
|
||||
|
||||
class LogCleanupResult(BaseModel):
|
||||
"""Result of log cleanup operation."""
|
||||
|
||||
files_deleted: int = Field(..., description="Number of files deleted")
|
||||
space_freed: int = Field(..., description="Space freed in bytes")
|
||||
errors: List[str] = Field(
|
||||
default_factory=list, description="Any errors encountered"
|
||||
)
|
||||
|
||||
|
||||
def get_logs_directory() -> Path:
|
||||
"""Get the logs directory path.
|
||||
|
||||
Returns:
|
||||
Path: Logs directory path
|
||||
|
||||
Raises:
|
||||
HTTPException: If logs directory doesn't exist
|
||||
"""
|
||||
# Check both common locations
|
||||
possible_paths = [
|
||||
Path("logs"),
|
||||
Path("src/cli/logs"),
|
||||
Path("data/logs"),
|
||||
]
|
||||
|
||||
for log_path in possible_paths:
|
||||
if log_path.exists() and log_path.is_dir():
|
||||
return log_path
|
||||
|
||||
# Default to logs directory even if it doesn't exist
|
||||
logs_dir = Path("logs")
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
return logs_dir
|
||||
|
||||
|
||||
@router.get("/config", response_model=LoggingConfig)
|
||||
def get_logging_config(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> LoggingConfig:
|
||||
"""Get current logging configuration.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
LoggingConfig: Current logging configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration cannot be loaded
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
return app_config.logging
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to load logging config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to load logging configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/config", response_model=LoggingConfig)
|
||||
def update_logging_config(
|
||||
logging_config: LoggingConfig,
|
||||
auth: dict = Depends(require_auth),
|
||||
) -> LoggingConfig:
|
||||
"""Update logging configuration.
|
||||
|
||||
Args:
|
||||
logging_config: New logging configuration
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
LoggingConfig: Updated logging configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration update fails
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Update logging section
|
||||
app_config.logging = logging_config
|
||||
|
||||
# Save and return
|
||||
config_service.save_config(app_config)
|
||||
logger.info(
|
||||
f"Logging config updated by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
# Apply the new logging configuration
|
||||
_apply_logging_config(logging_config)
|
||||
|
||||
return logging_config
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to update logging config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update logging configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
def _apply_logging_config(config: LoggingConfig) -> None:
|
||||
"""Apply logging configuration to the Python logging system.
|
||||
|
||||
Args:
|
||||
config: Logging configuration to apply
|
||||
"""
|
||||
# Set the root logger level
|
||||
logging.getLogger().setLevel(config.level)
|
||||
|
||||
# If a file is specified, configure file handler
|
||||
if config.file:
|
||||
file_path = Path(config.file)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Remove existing file handlers
|
||||
root_logger = logging.getLogger()
|
||||
for handler in root_logger.handlers[:]:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
root_logger.removeHandler(handler)
|
||||
|
||||
# Add new file handler with rotation if configured
|
||||
if config.max_bytes and config.max_bytes > 0:
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
handler = RotatingFileHandler(
|
||||
config.file,
|
||||
maxBytes=config.max_bytes,
|
||||
backupCount=config.backup_count or 3,
|
||||
)
|
||||
else:
|
||||
handler = logging.FileHandler(config.file)
|
||||
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
|
||||
@router.get("/files", response_model=List[LogFileInfo])
|
||||
def list_log_files(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> List[LogFileInfo]:
|
||||
"""List available log files.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
List of log file information
|
||||
|
||||
Raises:
|
||||
HTTPException: If logs directory cannot be accessed
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
files: List[LogFileInfo] = []
|
||||
|
||||
for file_path in logs_dir.rglob("*.log*"):
|
||||
if file_path.is_file():
|
||||
stat = file_path.stat()
|
||||
rel_path = file_path.relative_to(logs_dir)
|
||||
files.append(
|
||||
LogFileInfo(
|
||||
name=file_path.name,
|
||||
size=stat.st_size,
|
||||
modified=stat.st_mtime,
|
||||
path=str(rel_path),
|
||||
)
|
||||
)
|
||||
|
||||
# Sort by modified time, newest first
|
||||
files.sort(key=lambda x: x.modified, reverse=True)
|
||||
return files
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to list log files")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to list log files: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/files/{filename:path}/download")
|
||||
async def download_log_file(
|
||||
filename: str, auth: dict = Depends(require_auth)
|
||||
) -> FileResponse:
|
||||
"""Download a specific log file.
|
||||
|
||||
Args:
|
||||
filename: Name or relative path of the log file
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
File download response
|
||||
|
||||
Raises:
|
||||
HTTPException: If file not found or access denied
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
file_path = logs_dir / filename
|
||||
|
||||
# Security: Ensure the file is within logs directory
|
||||
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to file outside logs directory",
|
||||
)
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Log file not found: {filename}",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Log file download: {filename} "
|
||||
f"by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return FileResponse(
|
||||
path=str(file_path),
|
||||
filename=file_path.name,
|
||||
media_type="text/plain",
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to download log file: {filename}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to download log file: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/files/{filename:path}/tail")
|
||||
async def tail_log_file(
|
||||
filename: str,
|
||||
lines: int = 100,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> PlainTextResponse:
|
||||
"""Get the last N lines of a log file.
|
||||
|
||||
Args:
|
||||
filename: Name or relative path of the log file
|
||||
lines: Number of lines to retrieve (default: 100)
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
Plain text response with log file tail
|
||||
|
||||
Raises:
|
||||
HTTPException: If file not found or access denied
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
file_path = logs_dir / filename
|
||||
|
||||
# Security: Ensure the file is within logs directory
|
||||
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to file outside logs directory",
|
||||
)
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Log file not found: {filename}",
|
||||
)
|
||||
|
||||
# Read the last N lines efficiently
|
||||
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
# For small files, just read all
|
||||
content = f.readlines()
|
||||
tail_lines = content[-lines:] if len(content) > lines else content
|
||||
|
||||
return PlainTextResponse(content="".join(tail_lines))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to tail log file: {filename}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to tail log file: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/test", response_model=Dict[str, str])
|
||||
async def test_logging(
|
||||
auth: dict = Depends(require_auth)
|
||||
) -> Dict[str, str]:
|
||||
"""Test logging by writing messages at all levels.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
test_logger = logging.getLogger("aniworld.test")
|
||||
|
||||
test_logger.debug("Test DEBUG message")
|
||||
test_logger.info("Test INFO message")
|
||||
test_logger.warning("Test WARNING message")
|
||||
test_logger.error("Test ERROR message")
|
||||
test_logger.critical("Test CRITICAL message")
|
||||
|
||||
logger.info(
|
||||
f"Logging test triggered by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Test messages logged at all levels",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to test logging")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to test logging: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/cleanup", response_model=LogCleanupResult)
|
||||
async def cleanup_logs(
|
||||
max_age_days: int = 30, auth: dict = Depends(require_auth)
|
||||
) -> LogCleanupResult:
|
||||
"""Clean up old log files.
|
||||
|
||||
Args:
|
||||
max_age_days: Maximum age in days for log files to keep
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Cleanup result with statistics
|
||||
|
||||
Raises:
|
||||
HTTPException: If cleanup fails
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
current_time = os.path.getmtime(logs_dir)
|
||||
max_age_seconds = max_age_days * 24 * 60 * 60
|
||||
|
||||
files_deleted = 0
|
||||
space_freed = 0
|
||||
errors: List[str] = []
|
||||
|
||||
for file_path in logs_dir.rglob("*.log*"):
|
||||
if not file_path.is_file():
|
||||
continue
|
||||
|
||||
try:
|
||||
file_age = current_time - file_path.stat().st_mtime
|
||||
if file_age > max_age_seconds:
|
||||
file_size = file_path.stat().st_size
|
||||
file_path.unlink()
|
||||
files_deleted += 1
|
||||
space_freed += file_size
|
||||
logger.info(f"Deleted old log file: {file_path.name}")
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete {file_path.name}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(error_msg)
|
||||
|
||||
logger.info(
|
||||
f"Log cleanup by {auth.get('username', 'unknown')}: "
|
||||
f"{files_deleted} files, {space_freed} bytes"
|
||||
)
|
||||
|
||||
return LogCleanupResult(
|
||||
files_deleted=files_deleted,
|
||||
space_freed=space_freed,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to cleanup logs")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to cleanup logs: {str(e)}",
|
||||
) from e
|
||||
@@ -19,13 +19,11 @@ from src.config.settings import settings
|
||||
# Import core functionality
|
||||
from src.core.SeriesApp import SeriesApp
|
||||
from src.infrastructure.logging import setup_logging
|
||||
from src.server.api.analytics import router as analytics_router
|
||||
from src.server.api.anime import router as anime_router
|
||||
from src.server.api.auth import router as auth_router
|
||||
from src.server.api.config import router as config_router
|
||||
from src.server.api.download import downloads_router
|
||||
from src.server.api.download import router as download_router
|
||||
from src.server.api.logging import router as logging_router
|
||||
from src.server.api.scheduler import router as scheduler_router
|
||||
from src.server.api.websocket import router as websocket_router
|
||||
from src.server.controllers.error_controller import (
|
||||
@@ -168,8 +166,6 @@ app.include_router(page_router)
|
||||
app.include_router(auth_router)
|
||||
app.include_router(config_router)
|
||||
app.include_router(scheduler_router)
|
||||
app.include_router(logging_router)
|
||||
app.include_router(analytics_router)
|
||||
app.include_router(anime_router)
|
||||
app.include_router(download_router)
|
||||
app.include_router(downloads_router) # Alias for input validation tests
|
||||
|
||||
Reference in New Issue
Block a user