feat: implement missing API endpoints for scheduler, logging, and diagnostics
- Add scheduler API endpoints for configuration and manual rescan triggers - Add logging API endpoints for config management and log file operations - Add diagnostics API endpoints for network and system information - Extend config API with advanced settings, directory updates, export, and reset - Update FastAPI app to include new routers - Update API reference documentation with all new endpoints - Update infrastructure documentation with endpoint listings - Add comprehensive API implementation summary All new endpoints follow project coding standards with: - Type hints and Pydantic validation - Proper authentication and authorization - Comprehensive error handling and logging - Security best practices (path validation, input sanitization) Test results: 752/802 tests passing (93.8%)
This commit is contained in:
@@ -157,3 +157,193 @@ def delete_backup(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Failed to delete backup: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/section/advanced", response_model=Dict[str, object])
|
||||
def get_advanced_config(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> Dict[str, object]:
|
||||
"""Get advanced configuration section.
|
||||
|
||||
Returns:
|
||||
Dictionary with advanced configuration settings
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
return app_config.other.get("advanced", {})
|
||||
except ConfigServiceError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to load advanced config: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/section/advanced", response_model=Dict[str, str])
|
||||
def update_advanced_config(
|
||||
config: Dict[str, object], auth: dict = Depends(require_auth)
|
||||
) -> Dict[str, str]:
|
||||
"""Update advanced configuration section.
|
||||
|
||||
Args:
|
||||
config: Advanced configuration settings
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Update advanced section in other
|
||||
if "advanced" not in app_config.other:
|
||||
app_config.other["advanced"] = {}
|
||||
app_config.other["advanced"].update(config)
|
||||
|
||||
config_service.save_config(app_config)
|
||||
return {"message": "Advanced configuration updated successfully"}
|
||||
except ConfigServiceError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update advanced config: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/directory", response_model=Dict[str, str])
|
||||
def update_directory(
|
||||
directory_config: Dict[str, str], auth: dict = Depends(require_auth)
|
||||
) -> Dict[str, str]:
|
||||
"""Update anime directory configuration.
|
||||
|
||||
Args:
|
||||
directory_config: Dictionary with 'directory' key
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
directory = directory_config.get("directory")
|
||||
if not directory:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Directory path is required"
|
||||
)
|
||||
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Store directory in other section
|
||||
if "anime_directory" not in app_config.other:
|
||||
app_config.other["anime_directory"] = directory
|
||||
else:
|
||||
app_config.other["anime_directory"] = directory
|
||||
|
||||
config_service.save_config(app_config)
|
||||
return {"message": "Anime directory updated successfully"}
|
||||
except ConfigServiceError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update directory: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/export")
|
||||
async def export_config(
|
||||
export_options: Dict[str, bool], auth: dict = Depends(require_auth)
|
||||
):
|
||||
"""Export configuration to JSON file.
|
||||
|
||||
Args:
|
||||
export_options: Options for export (include_sensitive, etc.)
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
JSON file download response
|
||||
"""
|
||||
try:
|
||||
import json
|
||||
|
||||
from fastapi.responses import Response
|
||||
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Convert to dict
|
||||
config_dict = app_config.model_dump()
|
||||
|
||||
# Optionally remove sensitive data
|
||||
if not export_options.get("include_sensitive", False):
|
||||
# Remove sensitive fields if present
|
||||
config_dict.pop("password_salt", None)
|
||||
config_dict.pop("password_hash", None)
|
||||
|
||||
# Create filename with timestamp
|
||||
from datetime import datetime
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"aniworld_config_{timestamp}.json"
|
||||
|
||||
# Return as downloadable JSON
|
||||
content = json.dumps(config_dict, indent=2)
|
||||
return Response(
|
||||
content=content,
|
||||
media_type="application/json",
|
||||
headers={
|
||||
"Content-Disposition": f'attachment; filename="{filename}"'
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to export config: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/reset", response_model=Dict[str, str])
|
||||
def reset_config(
|
||||
reset_options: Dict[str, bool], auth: dict = Depends(require_auth)
|
||||
) -> Dict[str, str]:
|
||||
"""Reset configuration to defaults.
|
||||
|
||||
Args:
|
||||
reset_options: Options for reset (preserve_security, etc.)
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
|
||||
# Create backup before resetting
|
||||
config_service.create_backup("pre_reset")
|
||||
|
||||
# Load default config
|
||||
default_config = AppConfig()
|
||||
|
||||
# If preserve_security is True, keep authentication settings
|
||||
if reset_options.get("preserve_security", True):
|
||||
current_config = config_service.load_config()
|
||||
# Preserve security-related fields from other
|
||||
if "password_salt" in current_config.other:
|
||||
default_config.other["password_salt"] = (
|
||||
current_config.other["password_salt"]
|
||||
)
|
||||
if "password_hash" in current_config.other:
|
||||
default_config.other["password_hash"] = (
|
||||
current_config.other["password_hash"]
|
||||
)
|
||||
|
||||
# Save default config
|
||||
config_service.save_config(default_config)
|
||||
|
||||
return {
|
||||
"message": "Configuration reset to defaults successfully"
|
||||
}
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to reset config: {e}"
|
||||
) from e
|
||||
|
||||
|
||||
191
src/server/api/diagnostics.py
Normal file
191
src/server/api/diagnostics.py
Normal file
@@ -0,0 +1,191 @@
|
||||
"""Diagnostics API endpoints for Aniworld.
|
||||
|
||||
This module provides endpoints for system diagnostics and health checks.
|
||||
"""
|
||||
import asyncio
|
||||
import logging
|
||||
import socket
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/diagnostics", tags=["diagnostics"])
|
||||
|
||||
|
||||
class NetworkTestResult(BaseModel):
|
||||
"""Result of a network connectivity test."""
|
||||
|
||||
host: str = Field(..., description="Hostname or URL tested")
|
||||
reachable: bool = Field(..., description="Whether host is reachable")
|
||||
response_time_ms: Optional[float] = Field(
|
||||
None, description="Response time in milliseconds"
|
||||
)
|
||||
error: Optional[str] = Field(None, description="Error message if failed")
|
||||
|
||||
|
||||
class NetworkDiagnostics(BaseModel):
|
||||
"""Network diagnostics results."""
|
||||
|
||||
internet_connected: bool = Field(
|
||||
..., description="Overall internet connectivity status"
|
||||
)
|
||||
dns_working: bool = Field(..., description="DNS resolution status")
|
||||
tests: List[NetworkTestResult] = Field(
|
||||
..., description="Individual network tests"
|
||||
)
|
||||
|
||||
|
||||
async def check_dns() -> bool:
|
||||
"""Check if DNS resolution is working.
|
||||
|
||||
Returns:
|
||||
bool: True if DNS is working
|
||||
"""
|
||||
try:
|
||||
socket.gethostbyname("google.com")
|
||||
return True
|
||||
except socket.gaierror:
|
||||
return False
|
||||
|
||||
|
||||
async def test_host_connectivity(
|
||||
host: str, port: int = 80, timeout: float = 5.0
|
||||
) -> NetworkTestResult:
|
||||
"""Test connectivity to a specific host.
|
||||
|
||||
Args:
|
||||
host: Hostname or IP address to test
|
||||
port: Port to test (default: 80)
|
||||
timeout: Timeout in seconds (default: 5.0)
|
||||
|
||||
Returns:
|
||||
NetworkTestResult with test results
|
||||
"""
|
||||
import time
|
||||
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
# Try to establish a connection
|
||||
loop = asyncio.get_event_loop()
|
||||
await asyncio.wait_for(
|
||||
loop.run_in_executor(
|
||||
None,
|
||||
lambda: socket.create_connection(
|
||||
(host, port), timeout=timeout
|
||||
),
|
||||
),
|
||||
timeout=timeout,
|
||||
)
|
||||
|
||||
response_time = (time.time() - start_time) * 1000
|
||||
|
||||
return NetworkTestResult(
|
||||
host=host,
|
||||
reachable=True,
|
||||
response_time_ms=round(response_time, 2),
|
||||
)
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error="Connection timeout"
|
||||
)
|
||||
except socket.gaierror as e:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error=f"DNS resolution failed: {e}"
|
||||
)
|
||||
except ConnectionRefusedError:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error="Connection refused"
|
||||
)
|
||||
except Exception as e:
|
||||
return NetworkTestResult(
|
||||
host=host, reachable=False, error=f"Connection error: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.get("/network", response_model=NetworkDiagnostics)
|
||||
async def network_diagnostics(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> NetworkDiagnostics:
|
||||
"""Run network connectivity diagnostics.
|
||||
|
||||
Tests DNS resolution and connectivity to common services.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
NetworkDiagnostics with test results
|
||||
|
||||
Raises:
|
||||
HTTPException: If diagnostics fail
|
||||
"""
|
||||
try:
|
||||
logger.info("Running network diagnostics")
|
||||
|
||||
# Check DNS
|
||||
dns_working = await check_dns()
|
||||
|
||||
# Test connectivity to various hosts
|
||||
test_hosts = [
|
||||
("google.com", 80),
|
||||
("cloudflare.com", 80),
|
||||
("github.com", 443),
|
||||
]
|
||||
|
||||
# Run all tests concurrently
|
||||
test_tasks = [
|
||||
test_host_connectivity(host, port) for host, port in test_hosts
|
||||
]
|
||||
test_results = await asyncio.gather(*test_tasks)
|
||||
|
||||
# Determine overall internet connectivity
|
||||
internet_connected = any(result.reachable for result in test_results)
|
||||
|
||||
logger.info(
|
||||
f"Network diagnostics complete: "
|
||||
f"DNS={dns_working}, Internet={internet_connected}"
|
||||
)
|
||||
|
||||
return NetworkDiagnostics(
|
||||
internet_connected=internet_connected,
|
||||
dns_working=dns_working,
|
||||
tests=test_results,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to run network diagnostics")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to run network diagnostics: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/system", response_model=Dict[str, str])
|
||||
async def system_info(
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> Dict[str, str]:
|
||||
"""Get basic system information.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
Dictionary with system information
|
||||
"""
|
||||
import platform
|
||||
import sys
|
||||
|
||||
return {
|
||||
"platform": platform.platform(),
|
||||
"python_version": sys.version,
|
||||
"architecture": platform.machine(),
|
||||
"processor": platform.processor(),
|
||||
"hostname": socket.gethostname(),
|
||||
}
|
||||
426
src/server/api/logging.py
Normal file
426
src/server/api/logging.py
Normal file
@@ -0,0 +1,426 @@
|
||||
"""Logging API endpoints for Aniworld.
|
||||
|
||||
This module provides endpoints for managing application logging
|
||||
configuration and accessing log files.
|
||||
"""
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
from fastapi.responses import FileResponse, PlainTextResponse
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
from src.server.models.config import LoggingConfig
|
||||
from src.server.services.config_service import ConfigServiceError, get_config_service
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/logging", tags=["logging"])
|
||||
|
||||
|
||||
class LogFileInfo(BaseModel):
|
||||
"""Information about a log file."""
|
||||
|
||||
name: str = Field(..., description="File name")
|
||||
size: int = Field(..., description="File size in bytes")
|
||||
modified: float = Field(..., description="Last modified timestamp")
|
||||
path: str = Field(..., description="Relative path from logs directory")
|
||||
|
||||
|
||||
class LogCleanupResult(BaseModel):
|
||||
"""Result of log cleanup operation."""
|
||||
|
||||
files_deleted: int = Field(..., description="Number of files deleted")
|
||||
space_freed: int = Field(..., description="Space freed in bytes")
|
||||
errors: List[str] = Field(
|
||||
default_factory=list, description="Any errors encountered"
|
||||
)
|
||||
|
||||
|
||||
def get_logs_directory() -> Path:
|
||||
"""Get the logs directory path.
|
||||
|
||||
Returns:
|
||||
Path: Logs directory path
|
||||
|
||||
Raises:
|
||||
HTTPException: If logs directory doesn't exist
|
||||
"""
|
||||
# Check both common locations
|
||||
possible_paths = [
|
||||
Path("logs"),
|
||||
Path("src/cli/logs"),
|
||||
Path("data/logs"),
|
||||
]
|
||||
|
||||
for log_path in possible_paths:
|
||||
if log_path.exists() and log_path.is_dir():
|
||||
return log_path
|
||||
|
||||
# Default to logs directory even if it doesn't exist
|
||||
logs_dir = Path("logs")
|
||||
logs_dir.mkdir(parents=True, exist_ok=True)
|
||||
return logs_dir
|
||||
|
||||
|
||||
@router.get("/config", response_model=LoggingConfig)
|
||||
def get_logging_config(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> LoggingConfig:
|
||||
"""Get current logging configuration.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
LoggingConfig: Current logging configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration cannot be loaded
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
return app_config.logging
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to load logging config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to load logging configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/config", response_model=LoggingConfig)
|
||||
def update_logging_config(
|
||||
logging_config: LoggingConfig,
|
||||
auth: dict = Depends(require_auth),
|
||||
) -> LoggingConfig:
|
||||
"""Update logging configuration.
|
||||
|
||||
Args:
|
||||
logging_config: New logging configuration
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
LoggingConfig: Updated logging configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration update fails
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Update logging section
|
||||
app_config.logging = logging_config
|
||||
|
||||
# Save and return
|
||||
config_service.save_config(app_config)
|
||||
logger.info(
|
||||
f"Logging config updated by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
# Apply the new logging configuration
|
||||
_apply_logging_config(logging_config)
|
||||
|
||||
return logging_config
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to update logging config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update logging configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
def _apply_logging_config(config: LoggingConfig) -> None:
|
||||
"""Apply logging configuration to the Python logging system.
|
||||
|
||||
Args:
|
||||
config: Logging configuration to apply
|
||||
"""
|
||||
# Set the root logger level
|
||||
logging.getLogger().setLevel(config.level)
|
||||
|
||||
# If a file is specified, configure file handler
|
||||
if config.file:
|
||||
file_path = Path(config.file)
|
||||
file_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Remove existing file handlers
|
||||
root_logger = logging.getLogger()
|
||||
for handler in root_logger.handlers[:]:
|
||||
if isinstance(handler, logging.FileHandler):
|
||||
root_logger.removeHandler(handler)
|
||||
|
||||
# Add new file handler with rotation if configured
|
||||
if config.max_bytes and config.max_bytes > 0:
|
||||
from logging.handlers import RotatingFileHandler
|
||||
|
||||
handler = RotatingFileHandler(
|
||||
config.file,
|
||||
maxBytes=config.max_bytes,
|
||||
backupCount=config.backup_count or 3,
|
||||
)
|
||||
else:
|
||||
handler = logging.FileHandler(config.file)
|
||||
|
||||
handler.setFormatter(
|
||||
logging.Formatter(
|
||||
"%(asctime)s - %(name)s - %(levelname)s - %(message)s"
|
||||
)
|
||||
)
|
||||
root_logger.addHandler(handler)
|
||||
|
||||
|
||||
@router.get("/files", response_model=List[LogFileInfo])
|
||||
def list_log_files(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> List[LogFileInfo]:
|
||||
"""List available log files.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
List of log file information
|
||||
|
||||
Raises:
|
||||
HTTPException: If logs directory cannot be accessed
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
files: List[LogFileInfo] = []
|
||||
|
||||
for file_path in logs_dir.rglob("*.log*"):
|
||||
if file_path.is_file():
|
||||
stat = file_path.stat()
|
||||
rel_path = file_path.relative_to(logs_dir)
|
||||
files.append(
|
||||
LogFileInfo(
|
||||
name=file_path.name,
|
||||
size=stat.st_size,
|
||||
modified=stat.st_mtime,
|
||||
path=str(rel_path),
|
||||
)
|
||||
)
|
||||
|
||||
# Sort by modified time, newest first
|
||||
files.sort(key=lambda x: x.modified, reverse=True)
|
||||
return files
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to list log files")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to list log files: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/files/{filename:path}/download")
|
||||
async def download_log_file(
|
||||
filename: str, auth: dict = Depends(require_auth)
|
||||
) -> FileResponse:
|
||||
"""Download a specific log file.
|
||||
|
||||
Args:
|
||||
filename: Name or relative path of the log file
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
File download response
|
||||
|
||||
Raises:
|
||||
HTTPException: If file not found or access denied
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
file_path = logs_dir / filename
|
||||
|
||||
# Security: Ensure the file is within logs directory
|
||||
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to file outside logs directory",
|
||||
)
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Log file not found: {filename}",
|
||||
)
|
||||
|
||||
logger.info(
|
||||
f"Log file download: {filename} "
|
||||
f"by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return FileResponse(
|
||||
path=str(file_path),
|
||||
filename=file_path.name,
|
||||
media_type="text/plain",
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to download log file: {filename}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to download log file: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.get("/files/{filename:path}/tail")
|
||||
async def tail_log_file(
|
||||
filename: str,
|
||||
lines: int = 100,
|
||||
auth: Optional[dict] = Depends(require_auth),
|
||||
) -> PlainTextResponse:
|
||||
"""Get the last N lines of a log file.
|
||||
|
||||
Args:
|
||||
filename: Name or relative path of the log file
|
||||
lines: Number of lines to retrieve (default: 100)
|
||||
auth: Authentication token (optional)
|
||||
|
||||
Returns:
|
||||
Plain text response with log file tail
|
||||
|
||||
Raises:
|
||||
HTTPException: If file not found or access denied
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
file_path = logs_dir / filename
|
||||
|
||||
# Security: Ensure the file is within logs directory
|
||||
if not file_path.resolve().is_relative_to(logs_dir.resolve()):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_403_FORBIDDEN,
|
||||
detail="Access denied to file outside logs directory",
|
||||
)
|
||||
|
||||
if not file_path.exists() or not file_path.is_file():
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Log file not found: {filename}",
|
||||
)
|
||||
|
||||
# Read the last N lines efficiently
|
||||
with open(file_path, "r", encoding="utf-8", errors="ignore") as f:
|
||||
# For small files, just read all
|
||||
content = f.readlines()
|
||||
tail_lines = content[-lines:] if len(content) > lines else content
|
||||
|
||||
return PlainTextResponse(content="".join(tail_lines))
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to tail log file: {filename}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to tail log file: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/test", response_model=Dict[str, str])
|
||||
async def test_logging(
|
||||
auth: dict = Depends(require_auth)
|
||||
) -> Dict[str, str]:
|
||||
"""Test logging by writing messages at all levels.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Success message
|
||||
"""
|
||||
try:
|
||||
test_logger = logging.getLogger("aniworld.test")
|
||||
|
||||
test_logger.debug("Test DEBUG message")
|
||||
test_logger.info("Test INFO message")
|
||||
test_logger.warning("Test WARNING message")
|
||||
test_logger.error("Test ERROR message")
|
||||
test_logger.critical("Test CRITICAL message")
|
||||
|
||||
logger.info(
|
||||
f"Logging test triggered by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return {
|
||||
"status": "success",
|
||||
"message": "Test messages logged at all levels",
|
||||
}
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to test logging")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to test logging: {str(e)}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/cleanup", response_model=LogCleanupResult)
|
||||
async def cleanup_logs(
|
||||
max_age_days: int = 30, auth: dict = Depends(require_auth)
|
||||
) -> LogCleanupResult:
|
||||
"""Clean up old log files.
|
||||
|
||||
Args:
|
||||
max_age_days: Maximum age in days for log files to keep
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Cleanup result with statistics
|
||||
|
||||
Raises:
|
||||
HTTPException: If cleanup fails
|
||||
"""
|
||||
try:
|
||||
logs_dir = get_logs_directory()
|
||||
current_time = os.path.getmtime(logs_dir)
|
||||
max_age_seconds = max_age_days * 24 * 60 * 60
|
||||
|
||||
files_deleted = 0
|
||||
space_freed = 0
|
||||
errors: List[str] = []
|
||||
|
||||
for file_path in logs_dir.rglob("*.log*"):
|
||||
if not file_path.is_file():
|
||||
continue
|
||||
|
||||
try:
|
||||
file_age = current_time - file_path.stat().st_mtime
|
||||
if file_age > max_age_seconds:
|
||||
file_size = file_path.stat().st_size
|
||||
file_path.unlink()
|
||||
files_deleted += 1
|
||||
space_freed += file_size
|
||||
logger.info(f"Deleted old log file: {file_path.name}")
|
||||
except Exception as e:
|
||||
error_msg = f"Failed to delete {file_path.name}: {str(e)}"
|
||||
errors.append(error_msg)
|
||||
logger.warning(error_msg)
|
||||
|
||||
logger.info(
|
||||
f"Log cleanup by {auth.get('username', 'unknown')}: "
|
||||
f"{files_deleted} files, {space_freed} bytes"
|
||||
)
|
||||
|
||||
return LogCleanupResult(
|
||||
files_deleted=files_deleted,
|
||||
space_freed=space_freed,
|
||||
errors=errors,
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.exception("Failed to cleanup logs")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to cleanup logs: {str(e)}",
|
||||
) from e
|
||||
130
src/server/api/scheduler.py
Normal file
130
src/server/api/scheduler.py
Normal file
@@ -0,0 +1,130 @@
|
||||
"""Scheduler API endpoints for Aniworld.
|
||||
|
||||
This module provides endpoints for managing scheduled tasks such as
|
||||
automatic anime library rescans.
|
||||
"""
|
||||
import logging
|
||||
from typing import Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, status
|
||||
|
||||
from src.server.models.config import SchedulerConfig
|
||||
from src.server.services.config_service import ConfigServiceError, get_config_service
|
||||
from src.server.utils.dependencies import require_auth
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
router = APIRouter(prefix="/api/scheduler", tags=["scheduler"])
|
||||
|
||||
|
||||
@router.get("/config", response_model=SchedulerConfig)
|
||||
def get_scheduler_config(
|
||||
auth: Optional[dict] = Depends(require_auth)
|
||||
) -> SchedulerConfig:
|
||||
"""Get current scheduler configuration.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (optional for read operations)
|
||||
|
||||
Returns:
|
||||
SchedulerConfig: Current scheduler configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration cannot be loaded
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
return app_config.scheduler
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to load scheduler config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to load scheduler configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/config", response_model=SchedulerConfig)
|
||||
def update_scheduler_config(
|
||||
scheduler_config: SchedulerConfig,
|
||||
auth: dict = Depends(require_auth),
|
||||
) -> SchedulerConfig:
|
||||
"""Update scheduler configuration.
|
||||
|
||||
Args:
|
||||
scheduler_config: New scheduler configuration
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
SchedulerConfig: Updated scheduler configuration
|
||||
|
||||
Raises:
|
||||
HTTPException: If configuration update fails
|
||||
"""
|
||||
try:
|
||||
config_service = get_config_service()
|
||||
app_config = config_service.load_config()
|
||||
|
||||
# Update scheduler section
|
||||
app_config.scheduler = scheduler_config
|
||||
|
||||
# Save and return
|
||||
config_service.save_config(app_config)
|
||||
logger.info(
|
||||
f"Scheduler config updated by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
return scheduler_config
|
||||
except ConfigServiceError as e:
|
||||
logger.error(f"Failed to update scheduler config: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to update scheduler configuration: {e}",
|
||||
) from e
|
||||
|
||||
|
||||
@router.post("/trigger-rescan", response_model=Dict[str, str])
|
||||
async def trigger_rescan(auth: dict = Depends(require_auth)) -> Dict[str, str]:
|
||||
"""Manually trigger a library rescan.
|
||||
|
||||
This endpoint triggers an immediate anime library rescan, bypassing
|
||||
the scheduler interval.
|
||||
|
||||
Args:
|
||||
auth: Authentication token (required)
|
||||
|
||||
Returns:
|
||||
Dict with success message
|
||||
|
||||
Raises:
|
||||
HTTPException: If rescan cannot be triggered
|
||||
"""
|
||||
try:
|
||||
# Import here to avoid circular dependency
|
||||
from src.server.fastapi_app import get_series_app
|
||||
|
||||
series_app = get_series_app()
|
||||
if not series_app:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="SeriesApp not initialized",
|
||||
)
|
||||
|
||||
# Trigger the rescan
|
||||
logger.info(
|
||||
f"Manual rescan triggered by {auth.get('username', 'unknown')}"
|
||||
)
|
||||
|
||||
# Use existing rescan logic from anime API
|
||||
from src.server.api.anime import trigger_rescan as do_rescan
|
||||
|
||||
return await do_rescan()
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.exception("Failed to trigger manual rescan")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to trigger rescan: {str(e)}",
|
||||
) from e
|
||||
Reference in New Issue
Block a user