- Added NFOConfig model with TMDB API key, auto-create, media downloads, image size settings - Created NFO settings section in UI with form fields and validation - Implemented nfo-config.js module for loading, saving, and testing TMDB connection - Added TMDB API key validation endpoint (POST /api/config/tmdb/validate) - Integrated NFO config into AppConfig and ConfigUpdate models - Added 5 unit tests for NFO config model validation - Added API test for TMDB validation endpoint - All 16 config model tests passing, all 10 config API tests passing - Documented in docs/task7_status.md (100% complete)
177 lines
5.9 KiB
Python
177 lines
5.9 KiB
Python
from typing import Dict, List, Optional
|
|
|
|
from pydantic import BaseModel, Field, ValidationError, field_validator
|
|
|
|
|
|
class SchedulerConfig(BaseModel):
|
|
"""Scheduler related configuration."""
|
|
|
|
enabled: bool = Field(
|
|
default=True, description="Whether the scheduler is enabled"
|
|
)
|
|
interval_minutes: int = Field(
|
|
default=60, ge=1, description="Scheduler interval in minutes"
|
|
)
|
|
|
|
|
|
class BackupConfig(BaseModel):
|
|
"""Configuration for automatic backups of application data."""
|
|
|
|
enabled: bool = Field(
|
|
default=False, description="Whether backups are enabled"
|
|
)
|
|
path: Optional[str] = Field(
|
|
default="data/backups", description="Path to store backups"
|
|
)
|
|
keep_days: int = Field(
|
|
default=30, ge=0, description="How many days to keep backups"
|
|
)
|
|
|
|
|
|
class LoggingConfig(BaseModel):
|
|
"""Logging configuration with basic validation for level."""
|
|
|
|
level: str = Field(
|
|
default="INFO", description="Logging level"
|
|
)
|
|
file: Optional[str] = Field(
|
|
default=None, description="Optional file path for log output"
|
|
)
|
|
max_bytes: Optional[int] = Field(
|
|
default=None, ge=0, description="Max bytes per log file for rotation"
|
|
)
|
|
backup_count: Optional[int] = Field(
|
|
default=3, ge=0, description="Number of rotated log files to keep"
|
|
)
|
|
|
|
@field_validator("level")
|
|
@classmethod
|
|
def validate_level(cls, v: str) -> str:
|
|
allowed = {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"}
|
|
lvl = (v or "").upper()
|
|
if lvl not in allowed:
|
|
raise ValueError(f"invalid logging level: {v}")
|
|
return lvl
|
|
|
|
|
|
class NFOConfig(BaseModel):
|
|
"""NFO metadata configuration."""
|
|
|
|
tmdb_api_key: Optional[str] = Field(
|
|
default=None, description="TMDB API key for metadata scraping"
|
|
)
|
|
auto_create: bool = Field(
|
|
default=False, description="Auto-create NFO files for new series"
|
|
)
|
|
update_on_scan: bool = Field(
|
|
default=False, description="Update existing NFO files on rescan"
|
|
)
|
|
download_poster: bool = Field(
|
|
default=True, description="Download poster.jpg"
|
|
)
|
|
download_logo: bool = Field(
|
|
default=True, description="Download logo.png"
|
|
)
|
|
download_fanart: bool = Field(
|
|
default=True, description="Download fanart.jpg"
|
|
)
|
|
image_size: str = Field(
|
|
default="original", description="Image size (original or w500)"
|
|
)
|
|
|
|
@field_validator("image_size")
|
|
@classmethod
|
|
def validate_image_size(cls, v: str) -> str:
|
|
allowed = {"original", "w500"}
|
|
size = (v or "").lower()
|
|
if size not in allowed:
|
|
raise ValueError(
|
|
f"invalid image size: {v}. Must be 'original' or 'w500'"
|
|
)
|
|
return size
|
|
|
|
|
|
class ValidationResult(BaseModel):
|
|
"""Result of a configuration validation attempt."""
|
|
|
|
valid: bool = Field(..., description="Whether the configuration is valid")
|
|
errors: List[str] = Field(
|
|
default_factory=lambda: [],
|
|
description="List of validation error messages"
|
|
)
|
|
|
|
|
|
class AppConfig(BaseModel):
|
|
"""Top-level application configuration model used by the web layer.
|
|
|
|
This model intentionally keeps things small and serializable to JSON.
|
|
"""
|
|
|
|
name: str = Field(default="Aniworld", description="Application name")
|
|
data_dir: str = Field(default="data", description="Base data directory")
|
|
scheduler: SchedulerConfig = Field(
|
|
default_factory=SchedulerConfig
|
|
)
|
|
logging: LoggingConfig = Field(default_factory=LoggingConfig)
|
|
backup: BackupConfig = Field(default_factory=BackupConfig)
|
|
nfo: NFOConfig = Field(default_factory=NFOConfig)
|
|
other: Dict[str, object] = Field(
|
|
default_factory=dict, description="Arbitrary other settings"
|
|
)
|
|
|
|
def validate_config(self) -> ValidationResult:
|
|
"""Perform light-weight validation and return a ValidationResult.
|
|
|
|
This method intentionally avoids performing IO (no filesystem checks)
|
|
so it remains fast and side-effect free for unit tests and API use.
|
|
"""
|
|
errors: List[str] = []
|
|
|
|
# Pydantic field validators already run on construction; re-run a
|
|
# quick check for common constraints and collect messages.
|
|
try:
|
|
# Reconstruct to ensure nested validators are executed
|
|
AppConfig(**self.model_dump())
|
|
except ValidationError as exc:
|
|
for e in exc.errors():
|
|
loc = ".".join(str(x) for x in e.get("loc", []))
|
|
msg = f"{loc}: {e.get('msg')}"
|
|
errors.append(msg)
|
|
|
|
# backup.path must be set when backups are enabled
|
|
backup_data = self.model_dump().get("backup", {})
|
|
if backup_data.get("enabled") and not backup_data.get("path"):
|
|
errors.append(
|
|
"backup.path must be set when backups.enabled is true"
|
|
)
|
|
|
|
return ValidationResult(valid=(len(errors) == 0), errors=errors)
|
|
|
|
|
|
class ConfigUpdate(BaseModel):
|
|
scheduler: Optional[SchedulerConfig] = None
|
|
logging: Optional[LoggingConfig] = None
|
|
backup: Optional[BackupConfig] = None
|
|
nfo: Optional[NFOConfig] = None
|
|
other: Optional[Dict[str, object]] = None
|
|
|
|
def apply_to(self, current: AppConfig) -> AppConfig:
|
|
"""Return a new AppConfig with updates applied to the current config.
|
|
|
|
Performs a shallow merge for `other`.
|
|
"""
|
|
data = current.model_dump()
|
|
if self.scheduler is not None:
|
|
data["scheduler"] = self.scheduler.model_dump()
|
|
if self.logging is not None:
|
|
data["logging"] = self.logging.model_dump()
|
|
if self.backup is not None:
|
|
data["backup"] = self.backup.model_dump()
|
|
if self.nfo is not None:
|
|
data["nfo"] = self.nfo.model_dump()
|
|
if self.other is not None:
|
|
merged = dict(current.other or {})
|
|
merged.update(self.other)
|
|
data["other"] = merged
|
|
return AppConfig(**data)
|