Files
Aniworld/src/server/models/config.py
Lukas 0265ae2a70 feat: cron-based scheduler with auto-download after rescan
- Replace asyncio sleep loop with APScheduler AsyncIOScheduler + CronTrigger
- Add schedule_time (HH:MM), schedule_days (days of week), auto_download_after_rescan fields to SchedulerConfig
- Add _auto_download_missing() to queue missing episodes after rescan
- Reload config live via reload_config(SchedulerConfig) without restart
- Update GET/POST /api/scheduler/config to return {success, config, status} envelope
- Add day-of-week pill toggles to Settings -> Scheduler section in UI
- Update JS loadSchedulerConfig / saveSchedulerConfig for new API shape
- Add 29 unit tests for SchedulerConfig model, 18 unit tests for SchedulerService
- Rewrite 23 endpoint tests and 36 integration tests for APScheduler behaviour
- Coverage: 96% api/scheduler, 95% scheduler_service, 90% total (>= 80% threshold)
- Update docs: API.md, CONFIGURATION.md, features.md, CHANGELOG.md
2026-02-21 08:56:17 +01:00

228 lines
7.9 KiB
Python
Raw Blame History

This file contains ambiguous Unicode characters
This file contains Unicode characters that might be confused with other characters. If you think that this is intentional, you can safely ignore this warning. Use the Escape button to reveal them.
from typing import Dict, List, Optional
from pydantic import BaseModel, Field, ValidationError, field_validator
_VALID_DAYS = frozenset(["mon", "tue", "wed", "thu", "fri", "sat", "sun"])
_ALL_DAYS = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
class SchedulerConfig(BaseModel):
"""Scheduler related configuration.
Cron-based scheduling is configured via ``schedule_time`` and
``schedule_days``. The legacy ``interval_minutes`` field is kept for
backward compatibility but is **deprecated** and ignored when
``schedule_time`` is set.
"""
enabled: bool = Field(
default=True, description="Whether the scheduler is enabled"
)
interval_minutes: int = Field(
default=60,
ge=1,
description="[Deprecated] Scheduler interval in minutes. "
"Use schedule_time + schedule_days instead.",
)
schedule_time: str = Field(
default="03:00",
description="Daily run time in 24-hour HH:MM format (e.g. '03:00')",
)
schedule_days: List[str] = Field(
default_factory=lambda: list(_ALL_DAYS),
description="Days of week to run the scheduler (3-letter lowercase "
"abbreviations: mon, tue, wed, thu, fri, sat, sun). "
"Empty list means disabled.",
)
auto_download_after_rescan: bool = Field(
default=False,
description="Automatically queue and start downloads for all missing "
"episodes after a scheduled rescan completes.",
)
@field_validator("schedule_time")
@classmethod
def validate_schedule_time(cls, v: str) -> str:
"""Validate HH:MM format within 00:0023:59."""
import re
if not re.fullmatch(r"([01]\d|2[0-3]):[0-5]\d", v or ""):
raise ValueError(
f"Invalid schedule_time '{v}'. "
"Expected HH:MM in 24-hour format (00:0023:59)."
)
return v
@field_validator("schedule_days")
@classmethod
def validate_schedule_days(cls, v: List[str]) -> List[str]:
"""Validate each entry is a valid 3-letter lowercase day abbreviation."""
invalid = [d for d in v if d not in _VALID_DAYS]
if invalid:
raise ValueError(
f"Invalid day(s) in schedule_days: {invalid}. "
f"Allowed values: {sorted(_VALID_DAYS)}"
)
return v
class BackupConfig(BaseModel):
"""Configuration for automatic backups of application data."""
enabled: bool = Field(
default=False, description="Whether backups are enabled"
)
path: Optional[str] = Field(
default="data/backups", description="Path to store backups"
)
keep_days: int = Field(
default=30, ge=0, description="How many days to keep backups"
)
class LoggingConfig(BaseModel):
"""Logging configuration with basic validation for level."""
level: str = Field(
default="INFO", description="Logging level"
)
file: Optional[str] = Field(
default=None, description="Optional file path for log output"
)
max_bytes: Optional[int] = Field(
default=None, ge=0, description="Max bytes per log file for rotation"
)
backup_count: Optional[int] = Field(
default=3, ge=0, description="Number of rotated log files to keep"
)
@field_validator("level")
@classmethod
def validate_level(cls, v: str) -> str:
allowed = {"DEBUG", "INFO", "WARNING", "ERROR", "CRITICAL"}
lvl = (v or "").upper()
if lvl not in allowed:
raise ValueError(f"invalid logging level: {v}")
return lvl
class NFOConfig(BaseModel):
"""NFO metadata configuration."""
tmdb_api_key: Optional[str] = Field(
default=None, description="TMDB API key for metadata scraping"
)
auto_create: bool = Field(
default=False, description="Auto-create NFO files for new series"
)
update_on_scan: bool = Field(
default=False, description="Update existing NFO files on rescan"
)
download_poster: bool = Field(
default=True, description="Download poster.jpg"
)
download_logo: bool = Field(
default=True, description="Download logo.png"
)
download_fanart: bool = Field(
default=True, description="Download fanart.jpg"
)
image_size: str = Field(
default="original", description="Image size (original or w500)"
)
@field_validator("image_size")
@classmethod
def validate_image_size(cls, v: str) -> str:
allowed = {"original", "w500"}
size = (v or "").lower()
if size not in allowed:
raise ValueError(
f"invalid image size: {v}. Must be 'original' or 'w500'"
)
return size
class ValidationResult(BaseModel):
"""Result of a configuration validation attempt."""
valid: bool = Field(..., description="Whether the configuration is valid")
errors: List[str] = Field(
default_factory=lambda: [],
description="List of validation error messages"
)
class AppConfig(BaseModel):
"""Top-level application configuration model used by the web layer.
This model intentionally keeps things small and serializable to JSON.
"""
name: str = Field(default="Aniworld", description="Application name")
data_dir: str = Field(default="data", description="Base data directory")
scheduler: SchedulerConfig = Field(
default_factory=SchedulerConfig
)
logging: LoggingConfig = Field(default_factory=LoggingConfig)
backup: BackupConfig = Field(default_factory=BackupConfig)
nfo: NFOConfig = Field(default_factory=NFOConfig)
other: Dict[str, object] = Field(
default_factory=dict, description="Arbitrary other settings"
)
def validate_config(self) -> ValidationResult:
"""Perform light-weight validation and return a ValidationResult.
This method intentionally avoids performing IO (no filesystem checks)
so it remains fast and side-effect free for unit tests and API use.
"""
errors: List[str] = []
# Pydantic field validators already run on construction; re-run a
# quick check for common constraints and collect messages.
try:
# Reconstruct to ensure nested validators are executed
AppConfig(**self.model_dump())
except ValidationError as exc:
for e in exc.errors():
loc = ".".join(str(x) for x in e.get("loc", []))
msg = f"{loc}: {e.get('msg')}"
errors.append(msg)
# backup.path must be set when backups are enabled
backup_data = self.model_dump().get("backup", {})
if backup_data.get("enabled") and not backup_data.get("path"):
errors.append(
"backup.path must be set when backups.enabled is true"
)
return ValidationResult(valid=(len(errors) == 0), errors=errors)
class ConfigUpdate(BaseModel):
scheduler: Optional[SchedulerConfig] = None
logging: Optional[LoggingConfig] = None
backup: Optional[BackupConfig] = None
nfo: Optional[NFOConfig] = None
other: Optional[Dict[str, object]] = None
def apply_to(self, current: AppConfig) -> AppConfig:
"""Return a new AppConfig with updates applied to the current config.
Performs a shallow merge for `other`.
"""
data = current.model_dump()
if self.scheduler is not None:
data["scheduler"] = self.scheduler.model_dump()
if self.logging is not None:
data["logging"] = self.logging.model_dump()
if self.backup is not None:
data["backup"] = self.backup.model_dump()
if self.nfo is not None:
data["nfo"] = self.nfo.model_dump()
if self.other is not None:
merged = dict(current.other or {})
merged.update(self.other)
data["other"] = merged
return AppConfig(**data)