feat: cron-based scheduler with auto-download after rescan
- Replace asyncio sleep loop with APScheduler AsyncIOScheduler + CronTrigger
- Add schedule_time (HH:MM), schedule_days (days of week), auto_download_after_rescan fields to SchedulerConfig
- Add _auto_download_missing() to queue missing episodes after rescan
- Reload config live via reload_config(SchedulerConfig) without restart
- Update GET/POST /api/scheduler/config to return {success, config, status} envelope
- Add day-of-week pill toggles to Settings -> Scheduler section in UI
- Update JS loadSchedulerConfig / saveSchedulerConfig for new API shape
- Add 29 unit tests for SchedulerConfig model, 18 unit tests for SchedulerService
- Rewrite 23 endpoint tests and 36 integration tests for APScheduler behaviour
- Coverage: 96% api/scheduler, 95% scheduler_service, 90% total (>= 80% threshold)
- Update docs: API.md, CONFIGURATION.md, features.md, CHANGELOG.md
This commit is contained in:
115
docs/API.md
115
docs/API.md
@@ -660,7 +660,10 @@ Return current application configuration.
|
|||||||
"data_dir": "data",
|
"data_dir": "data",
|
||||||
"scheduler": {
|
"scheduler": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"interval_minutes": 60
|
"interval_minutes": 60,
|
||||||
|
"schedule_time": "03:00",
|
||||||
|
"schedule_days": ["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
"auto_download_after_rescan": false
|
||||||
},
|
},
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "INFO",
|
"level": "INFO",
|
||||||
@@ -691,7 +694,9 @@ Apply an update to the configuration.
|
|||||||
{
|
{
|
||||||
"scheduler": {
|
"scheduler": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"interval_minutes": 30
|
"interval_minutes": 60,
|
||||||
|
"schedule_time": "06:30",
|
||||||
|
"schedule_days": ["mon", "wed", "fri"]
|
||||||
},
|
},
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "DEBUG"
|
"level": "DEBUG"
|
||||||
@@ -1177,47 +1182,21 @@ Source: [src/server/api/nfo.py](../src/server/api/nfo.py#L637-L684)
|
|||||||
|
|
||||||
Prefix: `/api/scheduler`
|
Prefix: `/api/scheduler`
|
||||||
|
|
||||||
Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L1-L122)
|
All GET/POST config responses share the same envelope:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"config": { ... },
|
||||||
|
"status": { ... }
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py)
|
||||||
|
|
||||||
### GET /api/scheduler/config
|
### GET /api/scheduler/config
|
||||||
|
|
||||||
Get current scheduler configuration.
|
Get current scheduler configuration and runtime status.
|
||||||
|
|
||||||
**Authentication:** Required
|
|
||||||
|
|
||||||
**Response (200 OK):**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"enabled": true,
|
|
||||||
"interval_minutes": 60
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L22-L42)
|
|
||||||
|
|
||||||
### POST /api/scheduler/config
|
|
||||||
|
|
||||||
Update scheduler configuration.
|
|
||||||
|
|
||||||
**Authentication:** Required
|
|
||||||
|
|
||||||
**Request Body:**
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"enabled": true,
|
|
||||||
"interval_minutes": 30
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
**Response (200 OK):** Updated scheduler configuration
|
|
||||||
|
|
||||||
Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L45-L75)
|
|
||||||
|
|
||||||
### POST /api/scheduler/trigger-rescan
|
|
||||||
|
|
||||||
Manually trigger a library rescan.
|
|
||||||
|
|
||||||
**Authentication:** Required
|
**Authentication:** Required
|
||||||
|
|
||||||
@@ -1226,11 +1205,65 @@ Manually trigger a library rescan.
|
|||||||
```json
|
```json
|
||||||
{
|
{
|
||||||
"success": true,
|
"success": true,
|
||||||
|
"config": {
|
||||||
|
"enabled": true,
|
||||||
|
"interval_minutes": 60,
|
||||||
|
"schedule_time": "03:00",
|
||||||
|
"schedule_days": ["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
"auto_download_after_rescan": false
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"is_running": true,
|
||||||
|
"next_run": "2025-07-15T03:00:00+00:00",
|
||||||
|
"last_run": null,
|
||||||
|
"scan_in_progress": false
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### POST /api/scheduler/config
|
||||||
|
|
||||||
|
Update scheduler configuration and apply changes immediately.
|
||||||
|
|
||||||
|
**Authentication:** Required
|
||||||
|
|
||||||
|
**Request Body (all fields optional, uses model defaults):**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"enabled": true,
|
||||||
|
"schedule_time": "06:30",
|
||||||
|
"schedule_days": ["mon", "wed", "fri"],
|
||||||
|
"auto_download_after_rescan": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response (200 OK):** Same envelope as GET, reflecting saved values.
|
||||||
|
|
||||||
|
**Validation errors (422):**
|
||||||
|
|
||||||
|
- `schedule_time` must match `HH:MM` (00:00–23:59)
|
||||||
|
- `schedule_days` entries must be one of `mon tue wed thu fri sat sun`
|
||||||
|
- `interval_minutes` must be ≥ 1
|
||||||
|
|
||||||
|
### POST /api/scheduler/trigger-rescan
|
||||||
|
|
||||||
|
Manually trigger a library rescan (and auto-download if configured).
|
||||||
|
|
||||||
|
**Authentication:** Required
|
||||||
|
|
||||||
|
**Response (200 OK):**
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
"message": "Rescan started successfully"
|
"message": "Rescan started successfully"
|
||||||
}
|
}
|
||||||
```
|
```
|
||||||
|
|
||||||
Source: [src/server/api/scheduler.py](../src/server/api/scheduler.py#L78-L122)
|
**Error responses:**
|
||||||
|
|
||||||
|
- `503` — SeriesApp not yet initialised
|
||||||
|
- `500` — Rescan failed unexpectedly
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
|
|||||||
@@ -39,6 +39,23 @@ This changelog follows [Keep a Changelog](https://keepachangelog.com/) principle
|
|||||||
|
|
||||||
## [Unreleased] - 2026-01-18
|
## [Unreleased] - 2026-01-18
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Cron-based Scheduler**: Replaced the asyncio sleep-loop with APScheduler's `AsyncIOScheduler + CronTrigger`
|
||||||
|
- Schedule rescans at a specific **time of day** (`HH:MM`) on selected **days of the week**
|
||||||
|
- New `SchedulerConfig` fields: `schedule_time` (default `"03:00"`), `schedule_days` (default all 7), `auto_download_after_rescan` (default `false`)
|
||||||
|
- Old `interval_minutes` field retained for backward compatibility
|
||||||
|
- **Auto-download after rescan**: When `auto_download_after_rescan` is enabled, missing episodes are automatically queued for download after each scheduled rescan
|
||||||
|
- **Day-of-week UI**: New day-of-week pill toggles (Mon–Sun) in the Settings → Scheduler section
|
||||||
|
- **Live config reload**: POST `/api/scheduler/config` reschedules the APScheduler job without restarting the application
|
||||||
|
- **Enriched API response**: GET/POST `/api/scheduler/config` now returns `{"success", "config", "status"}` envelope including `next_run`, `last_run`, and `scan_in_progress`
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Scheduler API response format: previously returned flat config; now returns `{"success": true, "config": {...}, "status": {...}}`
|
||||||
|
- `reload_config()` is now a synchronous method accepting a `SchedulerConfig` argument (previously async, no arguments)
|
||||||
|
- Dependencies: added `APScheduler>=3.10.4` to `requirements.txt`
|
||||||
|
|
||||||
### Fixed
|
### Fixed
|
||||||
|
|
||||||
- **Series Visibility**: Fixed issue where series added to the database weren't appearing in the API/UI
|
- **Series Visibility**: Fixed issue where series added to the database weren't appearing in the API/UI
|
||||||
|
|||||||
@@ -114,7 +114,10 @@ Location: `data/config.json`
|
|||||||
"data_dir": "data",
|
"data_dir": "data",
|
||||||
"scheduler": {
|
"scheduler": {
|
||||||
"enabled": true,
|
"enabled": true,
|
||||||
"interval_minutes": 60
|
"interval_minutes": 60,
|
||||||
|
"schedule_time": "03:00",
|
||||||
|
"schedule_days": ["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
"auto_download_after_rescan": false
|
||||||
},
|
},
|
||||||
"logging": {
|
"logging": {
|
||||||
"level": "INFO",
|
"level": "INFO",
|
||||||
@@ -161,12 +164,17 @@ Source: [src/server/models/config.py](../src/server/models/config.py#L62-L66)
|
|||||||
|
|
||||||
### 4.2 Scheduler Settings
|
### 4.2 Scheduler Settings
|
||||||
|
|
||||||
Controls automatic library rescanning.
|
Controls automatic cron-based library rescanning (powered by APScheduler).
|
||||||
|
|
||||||
| Field | Type | Default | Description |
|
| Field | Type | Default | Description |
|
||||||
| ---------------------------- | ---- | ------- | -------------------------------------------- |
|
| -------------------------------------- | ------------ | --------------------------------------------- | -------------------------------------------------------------------- |
|
||||||
| `scheduler.enabled` | bool | `true` | Enable/disable automatic scans. |
|
| `scheduler.enabled` | bool | `true` | Enable/disable automatic scans. |
|
||||||
| `scheduler.interval_minutes` | int | `60` | Minutes between automatic scans. Minimum: 1. |
|
| `scheduler.interval_minutes` | int | `60` | Legacy field kept for backward compatibility. Minimum: 1. |
|
||||||
|
| `scheduler.schedule_time` | string | `"03:00"` | Daily run time in 24-h `HH:MM` format. |
|
||||||
|
| `scheduler.schedule_days` | list[string] | `["mon","tue","wed","thu","fri","sat","sun"]` | Days of the week to run the scan. Empty list disables the cron job. |
|
||||||
|
| `scheduler.auto_download_after_rescan` | bool | `false` | Automatically queue missing episodes for download after each rescan. |
|
||||||
|
|
||||||
|
Valid day abbreviations: `mon`, `tue`, `wed`, `thu`, `fri`, `sat`, `sun`.
|
||||||
|
|
||||||
Source: [src/server/models/config.py](../src/server/models/config.py#L5-L12)
|
Source: [src/server/models/config.py](../src/server/models/config.py#L5-L12)
|
||||||
|
|
||||||
|
|||||||
@@ -19,7 +19,7 @@ The application now features a comprehensive configuration system that allows us
|
|||||||
- Organized into logical sections with clear labels and help text
|
- Organized into logical sections with clear labels and help text
|
||||||
- Real-time saving with immediate feedback
|
- Real-time saving with immediate feedback
|
||||||
- Configuration validation to prevent invalid settings
|
- Configuration validation to prevent invalid settings
|
||||||
- Full control over scheduler interval, logging options, and backup settings
|
- Full control over cron-based scheduler (time, days of week, auto-download), logging options, and backup settings
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
@@ -35,14 +35,14 @@ The application now features a comprehensive configuration system that allows us
|
|||||||
- General Settings: Application name and data directory configuration
|
- General Settings: Application name and data directory configuration
|
||||||
- Security Settings: Master password setup with strength indicator
|
- Security Settings: Master password setup with strength indicator
|
||||||
- Anime Directory: Primary directory path for anime storage
|
- Anime Directory: Primary directory path for anime storage
|
||||||
- Scheduler Settings: Enable/disable scheduler and configure check interval (in minutes)
|
- Scheduler Settings: Enable/disable scheduler, configure daily run time, select days of week, and optionally auto-download missing episodes after rescan
|
||||||
- Logging Settings: Configure log level, file path, file size limits, and backup count
|
- Logging Settings: Configure log level, file path, file size limits, and backup count
|
||||||
- Backup Settings: Enable automatic backups with configurable path and retention period
|
- Backup Settings: Enable automatic backups with configurable path and retention period
|
||||||
- NFO Settings: TMDB API key, auto-creation options, and media file download preferences
|
- NFO Settings: TMDB API key, auto-creation options, and media file download preferences
|
||||||
- **Enhanced Settings/Config Modal**: Comprehensive configuration interface accessible from main page:
|
- **Enhanced Settings/Config Modal**: Comprehensive configuration interface accessible from main page:
|
||||||
- General Settings: Edit application name and data directory
|
- General Settings: Edit application name and data directory
|
||||||
- Anime Directory: Modify anime storage location with browse functionality
|
- Anime Directory: Modify anime storage location with browse functionality
|
||||||
- Scheduler Configuration: Enable/disable and configure check interval for automated operations
|
- Scheduler Configuration: Enable/disable, set cron run time (`HH:MM`), select active days of the week, and toggle auto-download after rescan
|
||||||
- Logging Configuration: Full control over logging level, file rotation, and backup count
|
- Logging Configuration: Full control over logging level, file rotation, and backup count
|
||||||
- Backup Configuration: Configure automatic backup settings including path and retention
|
- Backup Configuration: Configure automatic backup settings including path and retention
|
||||||
- NFO Settings: Complete control over TMDB integration and media file downloads
|
- NFO Settings: Complete control over TMDB integration and media file downloads
|
||||||
|
|||||||
@@ -17,4 +17,5 @@ sqlalchemy>=2.0.35
|
|||||||
aiosqlite>=0.19.0
|
aiosqlite>=0.19.0
|
||||||
aiohttp>=3.9.0
|
aiohttp>=3.9.0
|
||||||
lxml>=5.0.0
|
lxml>=5.0.0
|
||||||
pillow>=10.0.0
|
pillow>=10.0.0
|
||||||
|
APScheduler>=3.10.4
|
||||||
@@ -4,12 +4,13 @@ This module provides endpoints for managing scheduled tasks such as
|
|||||||
automatic anime library rescans.
|
automatic anime library rescans.
|
||||||
"""
|
"""
|
||||||
import logging
|
import logging
|
||||||
from typing import Dict, Optional
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, HTTPException, status
|
||||||
|
|
||||||
from src.server.models.config import SchedulerConfig
|
from src.server.models.config import SchedulerConfig
|
||||||
from src.server.services.config_service import ConfigServiceError, get_config_service
|
from src.server.services.config_service import ConfigServiceError, get_config_service
|
||||||
|
from src.server.services.scheduler_service import get_scheduler_service
|
||||||
from src.server.utils.dependencies import require_auth
|
from src.server.utils.dependencies import require_auth
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
logger = logging.getLogger(__name__)
|
||||||
@@ -17,78 +18,105 @@ logger = logging.getLogger(__name__)
|
|||||||
router = APIRouter(prefix="/api/scheduler", tags=["scheduler"])
|
router = APIRouter(prefix="/api/scheduler", tags=["scheduler"])
|
||||||
|
|
||||||
|
|
||||||
@router.get("/config", response_model=SchedulerConfig)
|
def _build_response(config: SchedulerConfig) -> Dict[str, Any]:
|
||||||
def get_scheduler_config(
|
"""Build a standardised GET/POST response combining config + runtime status."""
|
||||||
auth: Optional[dict] = Depends(require_auth)
|
scheduler_service = get_scheduler_service()
|
||||||
) -> SchedulerConfig:
|
runtime = scheduler_service.get_status()
|
||||||
"""Get current scheduler configuration.
|
|
||||||
|
|
||||||
Args:
|
return {
|
||||||
auth: Authentication token (optional for read operations)
|
"success": True,
|
||||||
|
"config": {
|
||||||
|
"enabled": config.enabled,
|
||||||
|
"interval_minutes": config.interval_minutes,
|
||||||
|
"schedule_time": config.schedule_time,
|
||||||
|
"schedule_days": config.schedule_days,
|
||||||
|
"auto_download_after_rescan": config.auto_download_after_rescan,
|
||||||
|
},
|
||||||
|
"status": {
|
||||||
|
"is_running": runtime.get("is_running", False),
|
||||||
|
"next_run": runtime.get("next_run"),
|
||||||
|
"last_run": runtime.get("last_run"),
|
||||||
|
"scan_in_progress": runtime.get("scan_in_progress", False),
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@router.get("/config")
|
||||||
|
def get_scheduler_config(
|
||||||
|
auth: Optional[dict] = Depends(require_auth),
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get current scheduler configuration along with runtime status.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
SchedulerConfig: Current scheduler configuration
|
Combined config and status response.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPException: If configuration cannot be loaded
|
HTTPException: 500 if configuration cannot be loaded.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
config_service = get_config_service()
|
config_service = get_config_service()
|
||||||
app_config = config_service.load_config()
|
app_config = config_service.load_config()
|
||||||
return app_config.scheduler
|
return _build_response(app_config.scheduler)
|
||||||
except ConfigServiceError as e:
|
except ConfigServiceError as exc:
|
||||||
logger.error(f"Failed to load scheduler config: {e}")
|
logger.error("Failed to load scheduler config: %s", exc)
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail=f"Failed to load scheduler configuration: {e}",
|
detail=f"Failed to load scheduler configuration: {exc}",
|
||||||
) from e
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
@router.post("/config", response_model=SchedulerConfig)
|
@router.post("/config")
|
||||||
def update_scheduler_config(
|
def update_scheduler_config(
|
||||||
scheduler_config: SchedulerConfig,
|
scheduler_config: SchedulerConfig,
|
||||||
auth: dict = Depends(require_auth),
|
auth: dict = Depends(require_auth),
|
||||||
) -> SchedulerConfig:
|
) -> Dict[str, Any]:
|
||||||
"""Update scheduler configuration.
|
"""Update scheduler configuration and apply changes immediately.
|
||||||
|
|
||||||
Args:
|
Accepts the full SchedulerConfig body; any fields not supplied default
|
||||||
scheduler_config: New scheduler configuration
|
to their model defaults (backward compatible).
|
||||||
auth: Authentication token (required)
|
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
SchedulerConfig: Updated scheduler configuration
|
Combined config and status response reflecting the saved config.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
HTTPException: If configuration update fails
|
HTTPException: 422 on validation errors (handled by FastAPI/Pydantic),
|
||||||
|
500 on save or scheduler failure.
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
config_service = get_config_service()
|
config_service = get_config_service()
|
||||||
app_config = config_service.load_config()
|
app_config = config_service.load_config()
|
||||||
|
|
||||||
# Update scheduler section
|
|
||||||
app_config.scheduler = scheduler_config
|
app_config.scheduler = scheduler_config
|
||||||
|
|
||||||
# Save and return
|
|
||||||
config_service.save_config(app_config)
|
config_service.save_config(app_config)
|
||||||
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Scheduler config updated by {auth.get('username', 'unknown')}"
|
"Scheduler config updated by %s: time=%s days=%s auto_dl=%s",
|
||||||
|
auth.get("username", "unknown"),
|
||||||
|
scheduler_config.schedule_time,
|
||||||
|
scheduler_config.schedule_days,
|
||||||
|
scheduler_config.auto_download_after_rescan,
|
||||||
)
|
)
|
||||||
|
|
||||||
return scheduler_config
|
# Apply changes to the running scheduler without restart
|
||||||
except ConfigServiceError as e:
|
try:
|
||||||
logger.error(f"Failed to update scheduler config: {e}")
|
sched_svc = get_scheduler_service()
|
||||||
|
sched_svc.reload_config(scheduler_config)
|
||||||
|
except Exception as sched_exc: # pylint: disable=broad-exception-caught
|
||||||
|
logger.error("Scheduler reload after config update failed: %s", sched_exc)
|
||||||
|
# Config was saved — don't fail the request, just warn
|
||||||
|
|
||||||
|
return _build_response(scheduler_config)
|
||||||
|
|
||||||
|
except ConfigServiceError as exc:
|
||||||
|
logger.error("Failed to update scheduler config: %s", exc)
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail=f"Failed to update scheduler configuration: {e}",
|
detail=f"Failed to update scheduler configuration: {exc}",
|
||||||
) from e
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
@router.post("/trigger-rescan", response_model=Dict[str, str])
|
@router.post("/trigger-rescan", response_model=Dict[str, str])
|
||||||
async def trigger_rescan(auth: dict = Depends(require_auth)) -> Dict[str, str]:
|
async def trigger_rescan(auth: dict = Depends(require_auth)) -> Dict[str, str]:
|
||||||
"""Manually trigger a library rescan.
|
"""Manually trigger a library rescan (and auto-download if configured).
|
||||||
|
|
||||||
This endpoint triggers an immediate anime library rescan, bypassing
|
|
||||||
the scheduler interval.
|
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
auth: Authentication token (required)
|
auth: Authentication token (required)
|
||||||
@@ -100,8 +128,7 @@ async def trigger_rescan(auth: dict = Depends(require_auth)) -> Dict[str, str]:
|
|||||||
HTTPException: If rescan cannot be triggered
|
HTTPException: If rescan cannot be triggered
|
||||||
"""
|
"""
|
||||||
try:
|
try:
|
||||||
# Import here to avoid circular dependency
|
from src.server.utils.dependencies import get_series_app # noqa: PLC0415
|
||||||
from src.server.utils.dependencies import get_series_app
|
|
||||||
|
|
||||||
series_app = get_series_app()
|
series_app = get_series_app()
|
||||||
if not series_app:
|
if not series_app:
|
||||||
@@ -110,21 +137,19 @@ async def trigger_rescan(auth: dict = Depends(require_auth)) -> Dict[str, str]:
|
|||||||
detail="SeriesApp not initialized",
|
detail="SeriesApp not initialized",
|
||||||
)
|
)
|
||||||
|
|
||||||
# Trigger the rescan
|
|
||||||
logger.info(
|
logger.info(
|
||||||
f"Manual rescan triggered by {auth.get('username', 'unknown')}"
|
"Manual rescan triggered by %s", auth.get("username", "unknown")
|
||||||
)
|
)
|
||||||
|
|
||||||
# Use existing rescan logic from anime API
|
from src.server.api.anime import trigger_rescan as do_rescan # noqa: PLC0415
|
||||||
from src.server.api.anime import trigger_rescan as do_rescan
|
|
||||||
|
|
||||||
return await do_rescan()
|
return await do_rescan()
|
||||||
|
|
||||||
except HTTPException:
|
except HTTPException:
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as exc:
|
||||||
logger.exception("Failed to trigger manual rescan")
|
logger.exception("Failed to trigger manual rescan")
|
||||||
raise HTTPException(
|
raise HTTPException(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||||
detail=f"Failed to trigger rescan: {str(e)}",
|
detail=f"Failed to trigger rescan: {exc}",
|
||||||
) from e
|
) from exc
|
||||||
|
|||||||
@@ -2,16 +2,67 @@ from typing import Dict, List, Optional
|
|||||||
|
|
||||||
from pydantic import BaseModel, Field, ValidationError, field_validator
|
from pydantic import BaseModel, Field, ValidationError, field_validator
|
||||||
|
|
||||||
|
_VALID_DAYS = frozenset(["mon", "tue", "wed", "thu", "fri", "sat", "sun"])
|
||||||
|
_ALL_DAYS = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
|
||||||
|
|
||||||
|
|
||||||
class SchedulerConfig(BaseModel):
|
class SchedulerConfig(BaseModel):
|
||||||
"""Scheduler related configuration."""
|
"""Scheduler related configuration.
|
||||||
|
|
||||||
|
Cron-based scheduling is configured via ``schedule_time`` and
|
||||||
|
``schedule_days``. The legacy ``interval_minutes`` field is kept for
|
||||||
|
backward compatibility but is **deprecated** and ignored when
|
||||||
|
``schedule_time`` is set.
|
||||||
|
"""
|
||||||
|
|
||||||
enabled: bool = Field(
|
enabled: bool = Field(
|
||||||
default=True, description="Whether the scheduler is enabled"
|
default=True, description="Whether the scheduler is enabled"
|
||||||
)
|
)
|
||||||
interval_minutes: int = Field(
|
interval_minutes: int = Field(
|
||||||
default=60, ge=1, description="Scheduler interval in minutes"
|
default=60,
|
||||||
|
ge=1,
|
||||||
|
description="[Deprecated] Scheduler interval in minutes. "
|
||||||
|
"Use schedule_time + schedule_days instead.",
|
||||||
)
|
)
|
||||||
|
schedule_time: str = Field(
|
||||||
|
default="03:00",
|
||||||
|
description="Daily run time in 24-hour HH:MM format (e.g. '03:00')",
|
||||||
|
)
|
||||||
|
schedule_days: List[str] = Field(
|
||||||
|
default_factory=lambda: list(_ALL_DAYS),
|
||||||
|
description="Days of week to run the scheduler (3-letter lowercase "
|
||||||
|
"abbreviations: mon, tue, wed, thu, fri, sat, sun). "
|
||||||
|
"Empty list means disabled.",
|
||||||
|
)
|
||||||
|
auto_download_after_rescan: bool = Field(
|
||||||
|
default=False,
|
||||||
|
description="Automatically queue and start downloads for all missing "
|
||||||
|
"episodes after a scheduled rescan completes.",
|
||||||
|
)
|
||||||
|
|
||||||
|
@field_validator("schedule_time")
|
||||||
|
@classmethod
|
||||||
|
def validate_schedule_time(cls, v: str) -> str:
|
||||||
|
"""Validate HH:MM format within 00:00–23:59."""
|
||||||
|
import re
|
||||||
|
if not re.fullmatch(r"([01]\d|2[0-3]):[0-5]\d", v or ""):
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid schedule_time '{v}'. "
|
||||||
|
"Expected HH:MM in 24-hour format (00:00–23:59)."
|
||||||
|
)
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator("schedule_days")
|
||||||
|
@classmethod
|
||||||
|
def validate_schedule_days(cls, v: List[str]) -> List[str]:
|
||||||
|
"""Validate each entry is a valid 3-letter lowercase day abbreviation."""
|
||||||
|
invalid = [d for d in v if d not in _VALID_DAYS]
|
||||||
|
if invalid:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid day(s) in schedule_days: {invalid}. "
|
||||||
|
f"Allowed values: {sorted(_VALID_DAYS)}"
|
||||||
|
)
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
class BackupConfig(BaseModel):
|
class BackupConfig(BaseModel):
|
||||||
|
|||||||
@@ -1,305 +1,377 @@
|
|||||||
"""Scheduler service for automatic library rescans.
|
"""Scheduler service for automatic library rescans.
|
||||||
|
|
||||||
This module provides a background scheduler that performs periodic library rescans
|
Uses APScheduler's AsyncIOScheduler with CronTrigger for precise
|
||||||
according to the configured interval. It handles conflict resolution with manual
|
cron-based scheduling. The legacy interval-based loop has been removed
|
||||||
scans and persists scheduler state.
|
in favour of the cron approach.
|
||||||
"""
|
"""
|
||||||
import asyncio
|
from __future__ import annotations
|
||||||
|
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
from typing import Optional
|
from typing import List, Optional
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
|
from apscheduler.schedulers.asyncio import AsyncIOScheduler
|
||||||
|
from apscheduler.triggers.cron import CronTrigger
|
||||||
|
|
||||||
from src.server.models.config import SchedulerConfig
|
from src.server.models.config import SchedulerConfig
|
||||||
from src.server.services.config_service import ConfigServiceError, get_config_service
|
from src.server.services.config_service import ConfigServiceError, get_config_service
|
||||||
|
|
||||||
logger = structlog.get_logger(__name__)
|
logger = structlog.get_logger(__name__)
|
||||||
|
|
||||||
|
_JOB_ID = "scheduled_rescan"
|
||||||
|
|
||||||
|
|
||||||
class SchedulerServiceError(Exception):
|
class SchedulerServiceError(Exception):
|
||||||
"""Service-level exception for scheduler operations."""
|
"""Service-level exception for scheduler operations."""
|
||||||
|
|
||||||
|
|
||||||
class SchedulerService:
|
class SchedulerService:
|
||||||
"""Manages automatic library rescans on a configurable schedule.
|
"""Manages automatic library rescans on a cron-based schedule.
|
||||||
|
|
||||||
Features:
|
Uses APScheduler's AsyncIOScheduler so scheduling integrates cleanly
|
||||||
- Periodic library rescans based on configured interval
|
with the running asyncio event loop. Supports:
|
||||||
- Conflict resolution (prevents concurrent scans)
|
|
||||||
- State persistence across restarts
|
- Cron-based scheduling (time of day + days of week)
|
||||||
- Manual trigger capability
|
- Immediate manual trigger
|
||||||
- Enable/disable functionality
|
- Live config reloading without app restart
|
||||||
|
- Auto-queueing downloads of missing episodes after rescan
|
||||||
The scheduler uses a simple interval-based approach where rescans
|
|
||||||
are triggered every N minutes as configured.
|
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
"""Initialize the scheduler service."""
|
"""Initialise the scheduler service."""
|
||||||
self._is_running: bool = False
|
self._is_running: bool = False
|
||||||
self._task: Optional[asyncio.Task] = None
|
self._scheduler: Optional[AsyncIOScheduler] = None
|
||||||
self._config: Optional[SchedulerConfig] = None
|
self._config: Optional[SchedulerConfig] = None
|
||||||
self._last_scan_time: Optional[datetime] = None
|
self._last_scan_time: Optional[datetime] = None
|
||||||
self._next_scan_time: Optional[datetime] = None
|
|
||||||
self._scan_in_progress: bool = False
|
self._scan_in_progress: bool = False
|
||||||
|
logger.info("SchedulerService initialised")
|
||||||
logger.info("SchedulerService initialized")
|
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
# Public lifecycle methods
|
||||||
|
# ------------------------------------------------------------------
|
||||||
|
|
||||||
async def start(self) -> None:
|
async def start(self) -> None:
|
||||||
"""Start the scheduler background task.
|
"""Start the APScheduler with the configured cron trigger.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SchedulerServiceError: If scheduler is already running
|
SchedulerServiceError: If the scheduler is already running or
|
||||||
|
config cannot be loaded.
|
||||||
"""
|
"""
|
||||||
if self._is_running:
|
if self._is_running:
|
||||||
raise SchedulerServiceError("Scheduler is already running")
|
raise SchedulerServiceError("Scheduler is already running")
|
||||||
|
|
||||||
# Load configuration
|
|
||||||
try:
|
try:
|
||||||
config_service = get_config_service()
|
config_service = get_config_service()
|
||||||
config = config_service.load_config()
|
config = config_service.load_config()
|
||||||
self._config = config.scheduler
|
self._config = config.scheduler
|
||||||
except ConfigServiceError as e:
|
except ConfigServiceError as exc:
|
||||||
logger.error("Failed to load scheduler configuration", error=str(e))
|
logger.error("Failed to load scheduler configuration", error=str(exc))
|
||||||
raise SchedulerServiceError(f"Failed to load config: {e}") from e
|
raise SchedulerServiceError(f"Failed to load config: {exc}") from exc
|
||||||
|
|
||||||
|
self._scheduler = AsyncIOScheduler()
|
||||||
|
|
||||||
if not self._config.enabled:
|
if not self._config.enabled:
|
||||||
logger.info("Scheduler is disabled in configuration")
|
logger.info("Scheduler is disabled in configuration — not adding jobs")
|
||||||
|
self._is_running = True
|
||||||
return
|
return
|
||||||
|
|
||||||
|
trigger = self._build_cron_trigger()
|
||||||
|
if trigger is None:
|
||||||
|
logger.warning(
|
||||||
|
"schedule_days is empty — scheduler started but no job scheduled"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._scheduler.add_job(
|
||||||
|
self._perform_rescan,
|
||||||
|
trigger=trigger,
|
||||||
|
id=_JOB_ID,
|
||||||
|
replace_existing=True,
|
||||||
|
misfire_grace_time=300,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Scheduler started with cron trigger",
|
||||||
|
schedule_time=self._config.schedule_time,
|
||||||
|
schedule_days=self._config.schedule_days,
|
||||||
|
)
|
||||||
|
|
||||||
|
self._scheduler.start()
|
||||||
self._is_running = True
|
self._is_running = True
|
||||||
self._task = asyncio.create_task(self._scheduler_loop())
|
|
||||||
logger.info(
|
|
||||||
"Scheduler started",
|
|
||||||
interval_minutes=self._config.interval_minutes
|
|
||||||
)
|
|
||||||
|
|
||||||
async def stop(self) -> None:
|
async def stop(self) -> None:
|
||||||
"""Stop the scheduler background task gracefully.
|
"""Stop the APScheduler gracefully."""
|
||||||
|
|
||||||
Cancels the running scheduler task and waits for it to complete.
|
|
||||||
"""
|
|
||||||
if not self._is_running:
|
if not self._is_running:
|
||||||
logger.debug("Scheduler stop called but not running")
|
logger.debug("Scheduler stop called but not running")
|
||||||
return
|
return
|
||||||
|
|
||||||
|
if self._scheduler and self._scheduler.running:
|
||||||
|
self._scheduler.shutdown(wait=False)
|
||||||
|
logger.info("Scheduler stopped")
|
||||||
|
|
||||||
self._is_running = False
|
self._is_running = False
|
||||||
|
|
||||||
if self._task and not self._task.done():
|
|
||||||
self._task.cancel()
|
|
||||||
try:
|
|
||||||
await self._task
|
|
||||||
except asyncio.CancelledError:
|
|
||||||
logger.info("Scheduler task cancelled successfully")
|
|
||||||
|
|
||||||
logger.info("Scheduler stopped")
|
|
||||||
|
|
||||||
async def trigger_rescan(self) -> bool:
|
async def trigger_rescan(self) -> bool:
|
||||||
"""Manually trigger a library rescan.
|
"""Manually trigger a library rescan.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
True if rescan was triggered, False if scan already in progress
|
True if rescan was started; False if a scan is already running.
|
||||||
|
|
||||||
Raises:
|
Raises:
|
||||||
SchedulerServiceError: If scheduler is not running
|
SchedulerServiceError: If the scheduler service is not started.
|
||||||
"""
|
"""
|
||||||
if not self._is_running:
|
if not self._is_running:
|
||||||
raise SchedulerServiceError("Scheduler is not running")
|
raise SchedulerServiceError("Scheduler is not running")
|
||||||
|
|
||||||
if self._scan_in_progress:
|
if self._scan_in_progress:
|
||||||
logger.warning("Cannot trigger rescan: scan already in progress")
|
logger.warning("Cannot trigger rescan: scan already in progress")
|
||||||
return False
|
return False
|
||||||
|
|
||||||
logger.info("Manual rescan triggered")
|
logger.info("Manual rescan triggered")
|
||||||
await self._perform_rescan()
|
await self._perform_rescan()
|
||||||
return True
|
return True
|
||||||
|
|
||||||
async def reload_config(self) -> None:
|
def reload_config(self, config: SchedulerConfig) -> None:
|
||||||
"""Reload scheduler configuration from config service.
|
"""Apply a new SchedulerConfig immediately.
|
||||||
|
|
||||||
The scheduler will restart with the new configuration if it's running.
|
If the scheduler is already running the job is rescheduled (or
|
||||||
|
removed) without stopping the scheduler.
|
||||||
Raises:
|
|
||||||
SchedulerServiceError: If config reload fails
|
Args:
|
||||||
|
config: New scheduler configuration to apply.
|
||||||
"""
|
"""
|
||||||
try:
|
self._config = config
|
||||||
config_service = get_config_service()
|
logger.info(
|
||||||
config = config_service.load_config()
|
"Scheduler config reloaded",
|
||||||
old_config = self._config
|
enabled=config.enabled,
|
||||||
self._config = config.scheduler
|
schedule_time=config.schedule_time,
|
||||||
|
schedule_days=config.schedule_days,
|
||||||
logger.info(
|
auto_download=config.auto_download_after_rescan,
|
||||||
"Scheduler configuration reloaded",
|
)
|
||||||
old_enabled=old_config.enabled if old_config else None,
|
|
||||||
new_enabled=self._config.enabled,
|
if not self._scheduler or not self._scheduler.running:
|
||||||
old_interval=old_config.interval_minutes if old_config else None,
|
return
|
||||||
new_interval=self._config.interval_minutes
|
|
||||||
)
|
if not config.enabled:
|
||||||
|
if self._scheduler.get_job(_JOB_ID):
|
||||||
# Restart scheduler if it's running and config changed
|
self._scheduler.remove_job(_JOB_ID)
|
||||||
if self._is_running:
|
logger.info("Scheduler job removed (disabled)")
|
||||||
if not self._config.enabled:
|
return
|
||||||
logger.info("Scheduler disabled, stopping...")
|
|
||||||
await self.stop()
|
trigger = self._build_cron_trigger()
|
||||||
elif old_config and old_config.interval_minutes != self._config.interval_minutes:
|
if trigger is None:
|
||||||
logger.info("Interval changed, restarting scheduler...")
|
if self._scheduler.get_job(_JOB_ID):
|
||||||
await self.stop()
|
self._scheduler.remove_job(_JOB_ID)
|
||||||
await self.start()
|
logger.warning("Scheduler job removed — schedule_days is empty")
|
||||||
elif self._config.enabled and not self._is_running:
|
else:
|
||||||
logger.info("Scheduler enabled, starting...")
|
if self._scheduler.get_job(_JOB_ID):
|
||||||
await self.start()
|
self._scheduler.reschedule_job(_JOB_ID, trigger=trigger)
|
||||||
|
logger.info(
|
||||||
except ConfigServiceError as e:
|
"Scheduler rescheduled with cron trigger",
|
||||||
logger.error("Failed to reload scheduler configuration", error=str(e))
|
schedule_time=config.schedule_time,
|
||||||
raise SchedulerServiceError(f"Failed to reload config: {e}") from e
|
schedule_days=config.schedule_days,
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self._scheduler.add_job(
|
||||||
|
self._perform_rescan,
|
||||||
|
trigger=trigger,
|
||||||
|
id=_JOB_ID,
|
||||||
|
replace_existing=True,
|
||||||
|
misfire_grace_time=300,
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
"Scheduler job added with cron trigger",
|
||||||
|
schedule_time=config.schedule_time,
|
||||||
|
schedule_days=config.schedule_days,
|
||||||
|
)
|
||||||
|
|
||||||
def get_status(self) -> dict:
|
def get_status(self) -> dict:
|
||||||
"""Get current scheduler status.
|
"""Return current scheduler status including cron configuration.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
Dict containing scheduler state information
|
Dict containing scheduler state and config fields.
|
||||||
"""
|
"""
|
||||||
|
next_run: Optional[str] = None
|
||||||
|
if self._scheduler and self._scheduler.running:
|
||||||
|
job = self._scheduler.get_job(_JOB_ID)
|
||||||
|
if job and job.next_run_time:
|
||||||
|
next_run = job.next_run_time.isoformat()
|
||||||
|
|
||||||
return {
|
return {
|
||||||
"is_running": self._is_running,
|
"is_running": self._is_running,
|
||||||
"enabled": self._config.enabled if self._config else False,
|
"enabled": self._config.enabled if self._config else False,
|
||||||
"interval_minutes": self._config.interval_minutes if self._config else None,
|
"interval_minutes": self._config.interval_minutes if self._config else None,
|
||||||
"last_scan_time": self._last_scan_time.isoformat() if self._last_scan_time else None,
|
"schedule_time": self._config.schedule_time if self._config else None,
|
||||||
"next_scan_time": self._next_scan_time.isoformat() if self._next_scan_time else None,
|
"schedule_days": self._config.schedule_days if self._config else [],
|
||||||
|
"auto_download_after_rescan": (
|
||||||
|
self._config.auto_download_after_rescan if self._config else False
|
||||||
|
),
|
||||||
|
"last_run": self._last_scan_time.isoformat() if self._last_scan_time else None,
|
||||||
|
"next_run": next_run,
|
||||||
"scan_in_progress": self._scan_in_progress,
|
"scan_in_progress": self._scan_in_progress,
|
||||||
}
|
}
|
||||||
|
|
||||||
async def _scheduler_loop(self) -> None:
|
# ------------------------------------------------------------------
|
||||||
"""Main scheduler loop that runs periodic rescans.
|
# Private helpers
|
||||||
|
# ------------------------------------------------------------------
|
||||||
This coroutine runs indefinitely until cancelled, sleeping for the
|
|
||||||
configured interval between rescans.
|
def _build_cron_trigger(self) -> Optional[CronTrigger]:
|
||||||
|
"""Convert config fields into an APScheduler CronTrigger.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
CronTrigger instance or None if schedule_days is empty.
|
||||||
"""
|
"""
|
||||||
logger.info("Scheduler loop started")
|
if not self._config or not self._config.schedule_days:
|
||||||
|
return None
|
||||||
while self._is_running:
|
|
||||||
try:
|
hour_str, minute_str = self._config.schedule_time.split(":")
|
||||||
if not self._config or not self._config.enabled:
|
day_of_week = ",".join(self._config.schedule_days)
|
||||||
logger.debug("Scheduler disabled, exiting loop")
|
|
||||||
break
|
trigger = CronTrigger(
|
||||||
|
hour=int(hour_str),
|
||||||
# Calculate next scan time
|
minute=int(minute_str),
|
||||||
interval_seconds = self._config.interval_minutes * 60
|
day_of_week=day_of_week,
|
||||||
self._next_scan_time = datetime.now(timezone.utc)
|
)
|
||||||
self._next_scan_time = self._next_scan_time.replace(
|
logger.debug(
|
||||||
second=0, microsecond=0
|
"CronTrigger built",
|
||||||
)
|
hour=hour_str,
|
||||||
|
minute=minute_str,
|
||||||
# Wait for the interval
|
day_of_week=day_of_week,
|
||||||
logger.debug(
|
)
|
||||||
"Waiting for next scan",
|
return trigger
|
||||||
interval_minutes=self._config.interval_minutes,
|
|
||||||
next_scan=self._next_scan_time.isoformat()
|
async def _broadcast(self, event_type: str, data: dict) -> None:
|
||||||
)
|
"""Broadcast a WebSocket event to all connected clients."""
|
||||||
await asyncio.sleep(interval_seconds)
|
try:
|
||||||
|
from src.server.services.websocket_service import ( # noqa: PLC0415
|
||||||
# Perform the rescan
|
get_websocket_service,
|
||||||
if self._is_running: # Check again after sleep
|
)
|
||||||
await self._perform_rescan()
|
|
||||||
|
ws_service = get_websocket_service()
|
||||||
except asyncio.CancelledError:
|
await ws_service.manager.broadcast({"type": event_type, "data": data})
|
||||||
logger.info("Scheduler loop cancelled")
|
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||||
break
|
logger.warning("WebSocket broadcast failed", event=event_type, error=str(exc))
|
||||||
except Exception as e: # pylint: disable=broad-exception-caught
|
|
||||||
logger.error(
|
async def _auto_download_missing(self) -> None:
|
||||||
"Error in scheduler loop",
|
"""Queue and start downloads for all series with missing episodes."""
|
||||||
error=str(e),
|
from src.server.models.download import EpisodeIdentifier # noqa: PLC0415
|
||||||
exc_info=True
|
from src.server.utils.dependencies import ( # noqa: PLC0415
|
||||||
)
|
get_anime_service,
|
||||||
# Continue loop despite errors
|
get_download_service,
|
||||||
await asyncio.sleep(60) # Wait 1 minute before retrying
|
)
|
||||||
|
|
||||||
logger.info("Scheduler loop exited")
|
anime_service = get_anime_service()
|
||||||
|
download_service = get_download_service()
|
||||||
|
|
||||||
|
series_list = anime_service._cached_list_missing()
|
||||||
|
queued_count = 0
|
||||||
|
|
||||||
|
for series in series_list:
|
||||||
|
episode_dict: dict = series.get("episodeDict") or {}
|
||||||
|
if not episode_dict:
|
||||||
|
continue
|
||||||
|
|
||||||
|
episodes: List[EpisodeIdentifier] = []
|
||||||
|
for season_str, ep_numbers in episode_dict.items():
|
||||||
|
for ep_num in ep_numbers:
|
||||||
|
episodes.append(
|
||||||
|
EpisodeIdentifier(season=int(season_str), episode=int(ep_num))
|
||||||
|
)
|
||||||
|
|
||||||
|
if not episodes:
|
||||||
|
continue
|
||||||
|
|
||||||
|
await download_service.add_to_queue(
|
||||||
|
serie_id=series.get("key", ""),
|
||||||
|
serie_folder=series.get("folder", series.get("name", "")),
|
||||||
|
serie_name=series.get("name", ""),
|
||||||
|
episodes=episodes,
|
||||||
|
)
|
||||||
|
queued_count += len(episodes)
|
||||||
|
logger.info(
|
||||||
|
"Auto-download queued episodes",
|
||||||
|
series=series.get("key"),
|
||||||
|
count=len(episodes),
|
||||||
|
)
|
||||||
|
|
||||||
|
if queued_count:
|
||||||
|
await download_service.start_queue_processing()
|
||||||
|
logger.info("Auto-download queue processing started", queued=queued_count)
|
||||||
|
|
||||||
|
await self._broadcast("auto_download_started", {"queued_count": queued_count})
|
||||||
|
logger.info("Auto-download completed", queued_count=queued_count)
|
||||||
|
|
||||||
async def _perform_rescan(self) -> None:
|
async def _perform_rescan(self) -> None:
|
||||||
"""Execute a library rescan.
|
"""Execute a library rescan and optionally trigger auto-download."""
|
||||||
|
|
||||||
This method calls the anime service to perform the actual rescan.
|
|
||||||
It includes conflict detection to prevent concurrent scans.
|
|
||||||
"""
|
|
||||||
if self._scan_in_progress:
|
if self._scan_in_progress:
|
||||||
logger.warning("Skipping rescan: previous scan still in progress")
|
logger.warning("Skipping rescan: previous scan still in progress")
|
||||||
return
|
return
|
||||||
|
|
||||||
self._scan_in_progress = True
|
self._scan_in_progress = True
|
||||||
scan_start = datetime.now(timezone.utc)
|
scan_start = datetime.now(timezone.utc)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
logger.info("Starting scheduled library rescan")
|
logger.info("Starting scheduled library rescan")
|
||||||
|
|
||||||
# Import here to avoid circular dependency
|
from src.server.utils.dependencies import get_anime_service # noqa: PLC0415
|
||||||
from src.server.services.websocket_service import get_websocket_service
|
|
||||||
from src.server.utils.dependencies import get_anime_service
|
|
||||||
|
|
||||||
anime_service = get_anime_service()
|
anime_service = get_anime_service()
|
||||||
ws_service = get_websocket_service()
|
|
||||||
|
await self._broadcast(
|
||||||
# Notify clients that scheduled rescan started
|
"scheduled_rescan_started",
|
||||||
await ws_service.manager.broadcast({
|
{"timestamp": scan_start.isoformat()},
|
||||||
"type": "scheduled_rescan_started",
|
)
|
||||||
"data": {
|
|
||||||
"timestamp": scan_start.isoformat()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
# Perform the rescan
|
|
||||||
await anime_service.rescan()
|
await anime_service.rescan()
|
||||||
|
|
||||||
self._last_scan_time = datetime.now(timezone.utc)
|
self._last_scan_time = datetime.now(timezone.utc)
|
||||||
|
duration = (self._last_scan_time - scan_start).total_seconds()
|
||||||
logger.info(
|
|
||||||
"Scheduled library rescan completed",
|
logger.info("Scheduled library rescan completed", duration_seconds=duration)
|
||||||
duration_seconds=(self._last_scan_time - scan_start).total_seconds()
|
|
||||||
)
|
await self._broadcast(
|
||||||
|
"scheduled_rescan_completed",
|
||||||
# Notify clients that rescan completed
|
{
|
||||||
await ws_service.manager.broadcast({
|
|
||||||
"type": "scheduled_rescan_completed",
|
|
||||||
"data": {
|
|
||||||
"timestamp": self._last_scan_time.isoformat(),
|
"timestamp": self._last_scan_time.isoformat(),
|
||||||
"duration_seconds": (self._last_scan_time - scan_start).total_seconds()
|
"duration_seconds": duration,
|
||||||
}
|
},
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e: # pylint: disable=broad-exception-caught
|
|
||||||
logger.error(
|
|
||||||
"Scheduled rescan failed",
|
|
||||||
error=str(e),
|
|
||||||
exc_info=True
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Notify clients of error
|
# Auto-download after rescan
|
||||||
try:
|
if self._config and self._config.auto_download_after_rescan:
|
||||||
from src.server.services.websocket_service import get_websocket_service
|
logger.info("Auto-download after rescan is enabled — starting")
|
||||||
ws_service = get_websocket_service()
|
try:
|
||||||
await ws_service.manager.broadcast({
|
await self._auto_download_missing()
|
||||||
"type": "scheduled_rescan_error",
|
except Exception as dl_exc: # pylint: disable=broad-exception-caught
|
||||||
"data": {
|
logger.error(
|
||||||
"error": str(e),
|
"Auto-download after rescan failed",
|
||||||
"timestamp": datetime.now(timezone.utc).isoformat()
|
error=str(dl_exc),
|
||||||
}
|
exc_info=True,
|
||||||
})
|
)
|
||||||
except Exception: # pylint: disable=broad-exception-caught
|
await self._broadcast(
|
||||||
pass # Don't fail if WebSocket notification fails
|
"auto_download_error", {"error": str(dl_exc)}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.debug("Auto-download after rescan is disabled — skipping")
|
||||||
|
|
||||||
|
except Exception as exc: # pylint: disable=broad-exception-caught
|
||||||
|
logger.error("Scheduled rescan failed", error=str(exc), exc_info=True)
|
||||||
|
await self._broadcast(
|
||||||
|
"scheduled_rescan_error",
|
||||||
|
{"error": str(exc), "timestamp": datetime.now(timezone.utc).isoformat()},
|
||||||
|
)
|
||||||
|
|
||||||
finally:
|
finally:
|
||||||
self._scan_in_progress = False
|
self._scan_in_progress = False
|
||||||
|
|
||||||
|
|
||||||
# Module-level singleton instance
|
# ---------------------------------------------------------------------------
|
||||||
|
# Module-level singleton
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
_scheduler_service: Optional[SchedulerService] = None
|
_scheduler_service: Optional[SchedulerService] = None
|
||||||
|
|
||||||
|
|
||||||
def get_scheduler_service() -> SchedulerService:
|
def get_scheduler_service() -> SchedulerService:
|
||||||
"""Get the singleton scheduler service instance.
|
"""Return the singleton SchedulerService instance."""
|
||||||
|
|
||||||
Returns:
|
|
||||||
SchedulerService singleton
|
|
||||||
"""
|
|
||||||
global _scheduler_service
|
global _scheduler_service
|
||||||
if _scheduler_service is None:
|
if _scheduler_service is None:
|
||||||
_scheduler_service = SchedulerService()
|
_scheduler_service = SchedulerService()
|
||||||
@@ -307,6 +379,6 @@ def get_scheduler_service() -> SchedulerService:
|
|||||||
|
|
||||||
|
|
||||||
def reset_scheduler_service() -> None:
|
def reset_scheduler_service() -> None:
|
||||||
"""Reset the scheduler service singleton (for testing)."""
|
"""Reset the singleton (used in tests)."""
|
||||||
global _scheduler_service
|
global _scheduler_service
|
||||||
_scheduler_service = None
|
_scheduler_service = None
|
||||||
|
|||||||
@@ -228,3 +228,122 @@
|
|||||||
font-size: var(--font-size-title);
|
font-size: var(--font-size-title);
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
/* ============================================================
|
||||||
|
Scheduler day-of-week toggle pills
|
||||||
|
============================================================ */
|
||||||
|
|
||||||
|
.scheduler-days-container {
|
||||||
|
display: flex;
|
||||||
|
flex-wrap: wrap;
|
||||||
|
gap: var(--spacing-sm);
|
||||||
|
margin-top: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scheduler-day-toggle-label {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
cursor: pointer;
|
||||||
|
user-select: none;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hide the raw checkbox visually */
|
||||||
|
.scheduler-day-toggle-label .scheduler-day-checkbox {
|
||||||
|
position: absolute;
|
||||||
|
opacity: 0;
|
||||||
|
width: 0;
|
||||||
|
height: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Pill styling */
|
||||||
|
.scheduler-day-label {
|
||||||
|
display: inline-flex;
|
||||||
|
align-items: center;
|
||||||
|
justify-content: center;
|
||||||
|
min-width: 2.6rem;
|
||||||
|
padding: var(--spacing-xs) var(--spacing-sm);
|
||||||
|
border: 1px solid var(--color-border);
|
||||||
|
border-radius: var(--border-radius-xl);
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
font-weight: 600;
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
background-color: var(--color-bg-secondary);
|
||||||
|
transition: background-color var(--transition-duration) var(--transition-easing),
|
||||||
|
color var(--transition-duration) var(--transition-easing),
|
||||||
|
border-color var(--transition-duration) var(--transition-easing);
|
||||||
|
cursor: pointer;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Checked state – filled accent */
|
||||||
|
.scheduler-day-checkbox:checked + .scheduler-day-label {
|
||||||
|
background-color: var(--color-accent);
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hover for unchecked */
|
||||||
|
.scheduler-day-toggle-label:hover .scheduler-day-label {
|
||||||
|
border-color: var(--color-accent);
|
||||||
|
color: var(--color-accent);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Hover for checked */
|
||||||
|
.scheduler-day-toggle-label:hover .scheduler-day-checkbox:checked + .scheduler-day-label {
|
||||||
|
background-color: var(--color-accent-hover);
|
||||||
|
border-color: var(--color-accent-hover);
|
||||||
|
color: #ffffff;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Dark theme overrides */
|
||||||
|
[data-theme="dark"] .scheduler-day-label {
|
||||||
|
border-color: var(--color-border-dark);
|
||||||
|
color: var(--color-text-secondary-dark);
|
||||||
|
background-color: var(--color-bg-secondary-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .scheduler-day-checkbox:checked + .scheduler-day-label {
|
||||||
|
background-color: var(--color-accent-dark);
|
||||||
|
border-color: var(--color-accent-dark);
|
||||||
|
color: var(--color-bg-primary-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] .scheduler-day-toggle-label:hover .scheduler-day-label {
|
||||||
|
border-color: var(--color-accent-dark);
|
||||||
|
color: var(--color-accent-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Next run display */
|
||||||
|
#scheduler-next-run {
|
||||||
|
font-style: italic;
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-tertiary);
|
||||||
|
}
|
||||||
|
|
||||||
|
[data-theme="dark"] #scheduler-next-run {
|
||||||
|
color: var(--color-text-tertiary-dark);
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Advanced/collapsible section */
|
||||||
|
.config-advanced {
|
||||||
|
font-size: var(--font-size-caption);
|
||||||
|
color: var(--color-text-secondary);
|
||||||
|
margin-top: var(--spacing-sm);
|
||||||
|
}
|
||||||
|
|
||||||
|
.config-advanced summary {
|
||||||
|
cursor: pointer;
|
||||||
|
padding: var(--spacing-xs) 0;
|
||||||
|
font-weight: 500;
|
||||||
|
}
|
||||||
|
|
||||||
|
/* Responsive: wrap day pills to 2 rows on mobile */
|
||||||
|
@media (max-width: 480px) {
|
||||||
|
.scheduler-days-container {
|
||||||
|
gap: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
|
||||||
|
.scheduler-day-label {
|
||||||
|
min-width: 2.2rem;
|
||||||
|
padding: var(--spacing-xs);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|||||||
@@ -1554,24 +1554,42 @@ class AniWorldApp {
|
|||||||
const data = await response.json();
|
const data = await response.json();
|
||||||
|
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
const config = data.config;
|
const config = data.config || {};
|
||||||
|
const schedulerStatus = data.status || {};
|
||||||
|
|
||||||
// Update UI elements
|
// Update UI elements
|
||||||
document.getElementById('scheduled-rescan-enabled').checked = config.enabled;
|
document.getElementById('scheduled-rescan-enabled').checked = !!config.enabled;
|
||||||
document.getElementById('scheduled-rescan-time').value = config.time || '03:00';
|
document.getElementById('scheduled-rescan-time').value = config.schedule_time || '03:00';
|
||||||
document.getElementById('auto-download-after-rescan').checked = config.auto_download_after_rescan;
|
document.getElementById('auto-download-after-rescan').checked = !!config.auto_download_after_rescan;
|
||||||
|
|
||||||
|
// Update day-of-week checkboxes
|
||||||
|
const days = Array.isArray(config.schedule_days) ? config.schedule_days : ['mon','tue','wed','thu','fri','sat','sun'];
|
||||||
|
['mon','tue','wed','thu','fri','sat','sun'].forEach(day => {
|
||||||
|
const cb = document.getElementById(`scheduler-day-${day}`);
|
||||||
|
if (cb) cb.checked = days.includes(day);
|
||||||
|
});
|
||||||
|
|
||||||
// Update status display
|
// Update status display
|
||||||
document.getElementById('next-rescan-time').textContent =
|
const nextRunEl = document.getElementById('scheduler-next-run');
|
||||||
config.next_run ? new Date(config.next_run).toLocaleString() : 'Not scheduled';
|
if (nextRunEl) {
|
||||||
document.getElementById('last-rescan-time').textContent =
|
nextRunEl.textContent = schedulerStatus.next_run
|
||||||
config.last_run ? new Date(config.last_run).toLocaleString() : 'Never';
|
? new Date(schedulerStatus.next_run).toLocaleString()
|
||||||
|
: 'Not scheduled';
|
||||||
|
}
|
||||||
|
const lastRunEl = document.getElementById('last-rescan-time');
|
||||||
|
if (lastRunEl) {
|
||||||
|
lastRunEl.textContent = schedulerStatus.last_run
|
||||||
|
? new Date(schedulerStatus.last_run).toLocaleString()
|
||||||
|
: 'Never';
|
||||||
|
}
|
||||||
|
|
||||||
const statusBadge = document.getElementById('scheduler-running-status');
|
const statusBadge = document.getElementById('scheduler-running-status');
|
||||||
statusBadge.textContent = config.is_running ? 'Running' : 'Stopped';
|
if (statusBadge) {
|
||||||
statusBadge.className = `info-value status-badge ${config.is_running ? 'running' : 'stopped'}`;
|
statusBadge.textContent = schedulerStatus.is_running ? 'Running' : 'Stopped';
|
||||||
|
statusBadge.className = `info-value status-badge ${schedulerStatus.is_running ? 'running' : 'stopped'}`;
|
||||||
|
}
|
||||||
|
|
||||||
// Enable/disable time input based on checkbox
|
// Enable/disable time/day inputs based on checkbox
|
||||||
this.toggleSchedulerTimeInput();
|
this.toggleSchedulerTimeInput();
|
||||||
}
|
}
|
||||||
} catch (error) {
|
} catch (error) {
|
||||||
@@ -1583,17 +1601,23 @@ class AniWorldApp {
|
|||||||
async saveSchedulerConfig() {
|
async saveSchedulerConfig() {
|
||||||
try {
|
try {
|
||||||
const enabled = document.getElementById('scheduled-rescan-enabled').checked;
|
const enabled = document.getElementById('scheduled-rescan-enabled').checked;
|
||||||
const time = document.getElementById('scheduled-rescan-time').value;
|
const scheduleTime = document.getElementById('scheduled-rescan-time').value || '03:00';
|
||||||
const autoDownload = document.getElementById('auto-download-after-rescan').checked;
|
const autoDownload = document.getElementById('auto-download-after-rescan').checked;
|
||||||
|
|
||||||
|
// Collect checked day-of-week values
|
||||||
|
const scheduleDays = ['mon','tue','wed','thu','fri','sat','sun']
|
||||||
|
.filter(day => {
|
||||||
|
const cb = document.getElementById(`scheduler-day-${day}`);
|
||||||
|
return cb ? cb.checked : true;
|
||||||
|
});
|
||||||
|
|
||||||
const response = await this.makeAuthenticatedRequest('/api/scheduler/config', {
|
const response = await this.makeAuthenticatedRequest('/api/scheduler/config', {
|
||||||
method: 'POST',
|
method: 'POST',
|
||||||
headers: {
|
headers: { 'Content-Type': 'application/json' },
|
||||||
'Content-Type': 'application/json',
|
|
||||||
},
|
|
||||||
body: JSON.stringify({
|
body: JSON.stringify({
|
||||||
enabled: enabled,
|
enabled: enabled,
|
||||||
time: time,
|
schedule_time: scheduleTime,
|
||||||
|
schedule_days: scheduleDays,
|
||||||
auto_download_after_rescan: autoDownload
|
auto_download_after_rescan: autoDownload
|
||||||
})
|
})
|
||||||
});
|
});
|
||||||
@@ -1603,7 +1627,12 @@ class AniWorldApp {
|
|||||||
|
|
||||||
if (data.success) {
|
if (data.success) {
|
||||||
this.showToast('Scheduler configuration saved successfully', 'success');
|
this.showToast('Scheduler configuration saved successfully', 'success');
|
||||||
// Reload config to update display
|
// Update next-run display from response
|
||||||
|
const nextRunEl = document.getElementById('scheduler-next-run');
|
||||||
|
if (nextRunEl && data.status && data.status.next_run) {
|
||||||
|
nextRunEl.textContent = new Date(data.status.next_run).toLocaleString();
|
||||||
|
}
|
||||||
|
// Reload config to sync the full UI
|
||||||
await this.loadSchedulerConfig();
|
await this.loadSchedulerConfig();
|
||||||
} else {
|
} else {
|
||||||
this.showToast(`Failed to save config: ${data.error}`, 'error');
|
this.showToast(`Failed to save config: ${data.error}`, 'error');
|
||||||
@@ -1637,11 +1666,19 @@ class AniWorldApp {
|
|||||||
toggleSchedulerTimeInput() {
|
toggleSchedulerTimeInput() {
|
||||||
const enabled = document.getElementById('scheduled-rescan-enabled').checked;
|
const enabled = document.getElementById('scheduled-rescan-enabled').checked;
|
||||||
const timeConfig = document.getElementById('rescan-time-config');
|
const timeConfig = document.getElementById('rescan-time-config');
|
||||||
|
const daysConfig = document.getElementById('rescan-days-config');
|
||||||
|
const nextRunEl = document.getElementById('scheduler-next-run');
|
||||||
|
|
||||||
if (enabled) {
|
if (timeConfig) {
|
||||||
timeConfig.classList.add('enabled');
|
timeConfig.classList.toggle('enabled', enabled);
|
||||||
} else {
|
}
|
||||||
timeConfig.classList.remove('enabled');
|
if (daysConfig) {
|
||||||
|
daysConfig.classList.toggle('enabled', enabled);
|
||||||
|
}
|
||||||
|
if (nextRunEl) {
|
||||||
|
nextRunEl.parentElement && nextRunEl.parentElement.parentElement
|
||||||
|
? nextRunEl.parentElement.parentElement.classList.toggle('hidden', !enabled)
|
||||||
|
: null;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|||||||
@@ -254,17 +254,46 @@
|
|||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="config-item" id="rescan-interval-config">
|
<div class="config-item" id="rescan-time-config">
|
||||||
<label for="scheduled-rescan-interval" data-text="rescan-interval">Check Interval (minutes):</label>
|
<label for="scheduled-rescan-time" data-text="rescan-time">Run at:</label>
|
||||||
<input type="number" id="scheduled-rescan-interval" value="60" min="1" class="input-field">
|
<input type="time" id="scheduled-rescan-time" value="03:00" class="input-field">
|
||||||
<small class="config-hint" data-text="rescan-interval-hint">
|
|
||||||
How often to check for new episodes (minimum 1 minute)
|
|
||||||
</small>
|
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="config-item" id="rescan-time-config">
|
<div class="config-item" id="rescan-days-config">
|
||||||
<label for="scheduled-rescan-time" data-text="rescan-time">Rescan Time (24h format):</label>
|
<label data-text="rescan-days">Days of week:</label>
|
||||||
<input type="time" id="scheduled-rescan-time" value="03:00" class="input-field">
|
<div class="scheduler-days-container">
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-mon" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-mon">Mon</span>
|
||||||
|
</label>
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-tue" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-tue">Tue</span>
|
||||||
|
</label>
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-wed" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-wed">Wed</span>
|
||||||
|
</label>
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-thu" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-thu">Thu</span>
|
||||||
|
</label>
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-fri" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-fri">Fri</span>
|
||||||
|
</label>
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-sat" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-sat">Sat</span>
|
||||||
|
</label>
|
||||||
|
<label class="scheduler-day-toggle-label">
|
||||||
|
<input type="checkbox" id="scheduler-day-sun" checked class="scheduler-day-checkbox">
|
||||||
|
<span class="scheduler-day-label" data-text="day-sun">Sun</span>
|
||||||
|
</label>
|
||||||
|
</div>
|
||||||
|
<small class="config-hint" data-text="rescan-days-hint">
|
||||||
|
Scheduler runs at the selected time on checked days. Uncheck all to disable scheduling.
|
||||||
|
</small>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
<div class="config-item">
|
<div class="config-item">
|
||||||
@@ -276,11 +305,23 @@
|
|||||||
</label>
|
</label>
|
||||||
</div>
|
</div>
|
||||||
|
|
||||||
|
<!-- Advanced: legacy interval (hidden by default) -->
|
||||||
|
<details class="config-advanced">
|
||||||
|
<summary data-text="advanced-settings">Advanced</summary>
|
||||||
|
<div class="config-item" id="rescan-interval-config">
|
||||||
|
<label for="scheduled-rescan-interval" data-text="rescan-interval">Legacy Check Interval (minutes):</label>
|
||||||
|
<input type="number" id="scheduled-rescan-interval" value="60" min="1" class="input-field">
|
||||||
|
<small class="config-hint" data-text="rescan-interval-hint">
|
||||||
|
Deprecated: only used if cron scheduling is not configured
|
||||||
|
</small>
|
||||||
|
</div>
|
||||||
|
</details>
|
||||||
|
|
||||||
<div class="config-item scheduler-status" id="scheduler-status">
|
<div class="config-item scheduler-status" id="scheduler-status">
|
||||||
<div class="scheduler-info">
|
<div class="scheduler-info">
|
||||||
<div class="info-row">
|
<div class="info-row">
|
||||||
<span data-text="next-rescan">Next Scheduled Rescan:</span>
|
<span data-text="next-rescan">Next Scheduled Rescan:</span>
|
||||||
<span id="next-rescan-time" class="info-value">-</span>
|
<span id="scheduler-next-run" class="info-value">-</span>
|
||||||
</div>
|
</div>
|
||||||
<div class="info-row">
|
<div class="info-row">
|
||||||
<span data-text="last-rescan">Last Scheduled Rescan:</span>
|
<span data-text="last-rescan">Last Scheduled Rescan:</span>
|
||||||
|
|||||||
@@ -36,393 +36,409 @@ async def client():
|
|||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def authenticated_client(client):
|
async def authenticated_client(client):
|
||||||
"""Create an authenticated test client with token."""
|
"""Create an authenticated test client with token."""
|
||||||
# Login to get token
|
|
||||||
response = await client.post(
|
response = await client.post(
|
||||||
"/api/auth/login",
|
"/api/auth/login",
|
||||||
json={"password": "TestPass123!"}
|
json={"password": "TestPass123!"}
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
token = response.json()["access_token"]
|
token = response.json()["access_token"]
|
||||||
|
|
||||||
# Add token to default headers
|
|
||||||
client.headers.update({"Authorization": f"Bearer {token}"})
|
client.headers.update({"Authorization": f"Bearer {token}"})
|
||||||
yield client
|
yield client
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_config_service():
|
def mock_config_service():
|
||||||
"""Create mock configuration service."""
|
"""Create mock configuration service with default SchedulerConfig."""
|
||||||
service = Mock()
|
service = Mock()
|
||||||
|
|
||||||
# Mock config object with scheduler section
|
|
||||||
config = Mock()
|
config = Mock()
|
||||||
config.scheduler = SchedulerConfig(
|
config.scheduler = SchedulerConfig(
|
||||||
enabled=True,
|
enabled=True,
|
||||||
interval_minutes=60
|
interval_minutes=60,
|
||||||
|
schedule_time="03:00",
|
||||||
|
schedule_days=["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
auto_download_after_rescan=False,
|
||||||
)
|
)
|
||||||
|
|
||||||
def save_config_side_effect(new_config):
|
def save_config_side_effect(new_config):
|
||||||
"""Update the scheduler config when save is called."""
|
|
||||||
config.scheduler = new_config.scheduler
|
config.scheduler = new_config.scheduler
|
||||||
|
|
||||||
service.load_config = Mock(return_value=config)
|
service.load_config = Mock(return_value=config)
|
||||||
service.save_config = Mock(side_effect=save_config_side_effect)
|
service.save_config = Mock(side_effect=save_config_side_effect)
|
||||||
|
|
||||||
return service
|
return service
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def mock_scheduler_service():
|
||||||
|
"""Create a mock scheduler service returning a basic status."""
|
||||||
|
svc = Mock()
|
||||||
|
svc.get_status = Mock(return_value={
|
||||||
|
"is_running": True,
|
||||||
|
"next_run": None,
|
||||||
|
"last_run": None,
|
||||||
|
"scan_in_progress": False,
|
||||||
|
})
|
||||||
|
svc.reload_config = Mock()
|
||||||
|
return svc
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# GET /api/scheduler/config
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
class TestGetSchedulerConfig:
|
class TestGetSchedulerConfig:
|
||||||
"""Tests for GET /api/scheduler/config endpoint."""
|
"""Tests for GET /api/scheduler/config endpoint."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_get_scheduler_config_success(
|
async def test_returns_success_envelope(
|
||||||
self,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
authenticated_client,
|
|
||||||
mock_config_service
|
|
||||||
):
|
):
|
||||||
"""Test successful scheduler configuration retrieval."""
|
"""Response carries the top-level success/config/status envelope."""
|
||||||
with patch(
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
'src.server.api.scheduler.get_config_service',
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
return_value=mock_config_service
|
|
||||||
):
|
|
||||||
response = await authenticated_client.get("/api/scheduler/config")
|
response = await authenticated_client.get("/api/scheduler/config")
|
||||||
|
|
||||||
assert response.status_code == 200
|
assert response.status_code == 200
|
||||||
data = response.json()
|
data = response.json()
|
||||||
assert data["enabled"] is True
|
assert data["success"] is True
|
||||||
assert data["interval_minutes"] == 60
|
assert "config" in data
|
||||||
mock_config_service.load_config.assert_called_once()
|
assert "status" in data
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_get_scheduler_config_unauthorized(self, client):
|
async def test_config_contains_all_fields(
|
||||||
"""Test scheduler config retrieval without authentication."""
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
|
):
|
||||||
|
"""Config block includes all SchedulerConfig fields."""
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
response = await authenticated_client.get("/api/scheduler/config")
|
||||||
|
|
||||||
|
cfg = response.json()["config"]
|
||||||
|
assert cfg["enabled"] is True
|
||||||
|
assert cfg["interval_minutes"] == 60
|
||||||
|
assert cfg["schedule_time"] == "03:00"
|
||||||
|
assert cfg["schedule_days"] == ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
|
||||||
|
assert cfg["auto_download_after_rescan"] is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_status_block_present(
|
||||||
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
|
):
|
||||||
|
"""Status block includes runtime keys."""
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
response = await authenticated_client.get("/api/scheduler/config")
|
||||||
|
|
||||||
|
st = response.json()["status"]
|
||||||
|
for key in ("is_running", "next_run", "last_run", "scan_in_progress"):
|
||||||
|
assert key in st
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_unauthorized(self, client):
|
||||||
|
"""GET without auth token returns 401."""
|
||||||
response = await client.get("/api/scheduler/config")
|
response = await client.get("/api/scheduler/config")
|
||||||
assert response.status_code == 401
|
assert response.status_code == 401
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_get_scheduler_config_load_failure(
|
async def test_config_load_failure_returns_500(
|
||||||
self,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
authenticated_client,
|
|
||||||
mock_config_service
|
|
||||||
):
|
):
|
||||||
"""Test scheduler config retrieval when config loading fails."""
|
"""500 when config_service.load_config raises ConfigServiceError."""
|
||||||
from src.server.services.config_service import ConfigServiceError
|
from src.server.services.config_service import ConfigServiceError
|
||||||
|
|
||||||
mock_config_service.load_config.side_effect = ConfigServiceError(
|
mock_config_service.load_config.side_effect = ConfigServiceError("disk error")
|
||||||
"Failed to load config"
|
|
||||||
)
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
with patch(
|
|
||||||
'src.server.api.scheduler.get_config_service',
|
|
||||||
return_value=mock_config_service
|
|
||||||
):
|
|
||||||
response = await authenticated_client.get("/api/scheduler/config")
|
response = await authenticated_client.get("/api/scheduler/config")
|
||||||
|
|
||||||
assert response.status_code == 500
|
assert response.status_code == 500
|
||||||
assert "Failed to load scheduler configuration" in response.text
|
assert "Failed to load scheduler configuration" in response.text
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# POST /api/scheduler/config
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
class TestUpdateSchedulerConfig:
|
class TestUpdateSchedulerConfig:
|
||||||
"""Tests for POST /api/scheduler/config endpoint."""
|
"""Tests for POST /api/scheduler/config endpoint."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_scheduler_config_success(
|
async def test_update_returns_success_envelope(
|
||||||
self,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
authenticated_client,
|
|
||||||
mock_config_service
|
|
||||||
):
|
):
|
||||||
"""Test successful scheduler configuration update."""
|
"""POST returns success envelope with saved values."""
|
||||||
new_config = {
|
payload = {
|
||||||
"enabled": False,
|
"enabled": False,
|
||||||
"interval_minutes": 120
|
"interval_minutes": 120,
|
||||||
|
"schedule_time": "06:30",
|
||||||
|
"schedule_days": ["mon", "wed", "fri"],
|
||||||
|
"auto_download_after_rescan": True,
|
||||||
}
|
}
|
||||||
|
|
||||||
with patch(
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
'src.server.api.scheduler.get_config_service',
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
return_value=mock_config_service
|
response = await authenticated_client.post("/api/scheduler/config", json=payload)
|
||||||
):
|
|
||||||
response = await authenticated_client.post(
|
assert response.status_code == 200
|
||||||
"/api/scheduler/config",
|
data = response.json()
|
||||||
json=new_config
|
assert data["success"] is True
|
||||||
)
|
assert data["config"]["enabled"] is False
|
||||||
|
assert data["config"]["schedule_time"] == "06:30"
|
||||||
assert response.status_code == 200
|
assert data["config"]["schedule_days"] == ["mon", "wed", "fri"]
|
||||||
data = response.json()
|
assert data["config"]["auto_download_after_rescan"] is True
|
||||||
assert data["enabled"] is False
|
|
||||||
assert data["interval_minutes"] == 120
|
|
||||||
|
|
||||||
mock_config_service.load_config.assert_called_once()
|
|
||||||
mock_config_service.save_config.assert_called_once()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_scheduler_config_unauthorized(self, client):
|
async def test_update_persists_to_config_service(
|
||||||
"""Test scheduler config update without authentication."""
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
new_config = {
|
):
|
||||||
"enabled": False,
|
"""POST calls save_config exactly once."""
|
||||||
"interval_minutes": 120
|
payload = {"enabled": True, "interval_minutes": 30}
|
||||||
}
|
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
response = await authenticated_client.post("/api/scheduler/config", json=payload)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
mock_config_service.save_config.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reload_config_called_after_save(
|
||||||
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
|
):
|
||||||
|
"""POST calls scheduler_service.reload_config(SchedulerConfig) after save."""
|
||||||
|
payload = {"enabled": True, "schedule_time": "10:00"}
|
||||||
|
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
await authenticated_client.post("/api/scheduler/config", json=payload)
|
||||||
|
|
||||||
|
mock_scheduler_service.reload_config.assert_called_once()
|
||||||
|
call_arg = mock_scheduler_service.reload_config.call_args[0][0]
|
||||||
|
assert isinstance(call_arg, SchedulerConfig)
|
||||||
|
assert call_arg.schedule_time == "10:00"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_unauthorized(self, client):
|
||||||
|
"""POST without auth token returns 401."""
|
||||||
response = await client.post(
|
response = await client.post(
|
||||||
"/api/scheduler/config",
|
"/api/scheduler/config",
|
||||||
json=new_config
|
json={"enabled": False, "interval_minutes": 120},
|
||||||
)
|
)
|
||||||
assert response.status_code == 401
|
assert response.status_code == 401
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_scheduler_config_invalid_data(
|
async def test_invalid_interval_returns_422(self, authenticated_client):
|
||||||
self,
|
"""interval_minutes < 1 triggers Pydantic validation error (422)."""
|
||||||
authenticated_client
|
|
||||||
):
|
|
||||||
"""Test scheduler config update with invalid data."""
|
|
||||||
invalid_config = {
|
|
||||||
"enabled": "not_a_boolean", # Should be boolean
|
|
||||||
"interval_minutes": -1 # Should be positive (>= 1)
|
|
||||||
}
|
|
||||||
|
|
||||||
response = await authenticated_client.post(
|
response = await authenticated_client.post(
|
||||||
"/api/scheduler/config",
|
"/api/scheduler/config",
|
||||||
json=invalid_config
|
json={"enabled": True, "interval_minutes": 0},
|
||||||
)
|
)
|
||||||
# Pydantic validation should fail with 422
|
|
||||||
assert response.status_code == 422
|
assert response.status_code == 422
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_scheduler_config_save_failure(
|
async def test_invalid_schedule_time_returns_422(self, authenticated_client):
|
||||||
self,
|
"""Bad schedule_time format triggers validation error (422)."""
|
||||||
authenticated_client,
|
response = await authenticated_client.post(
|
||||||
mock_config_service
|
"/api/scheduler/config",
|
||||||
):
|
json={"enabled": True, "schedule_time": "25:00"},
|
||||||
"""Test scheduler config update when save fails."""
|
|
||||||
from src.server.services.config_service import ConfigServiceError
|
|
||||||
|
|
||||||
mock_config_service.save_config.side_effect = ConfigServiceError(
|
|
||||||
"Failed to save config"
|
|
||||||
)
|
)
|
||||||
|
assert response.status_code == 422
|
||||||
new_config = {
|
|
||||||
"enabled": False,
|
|
||||||
"rescan_interval_hours": 48,
|
|
||||||
"rescan_on_startup": True
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch(
|
|
||||||
'src.server.api.scheduler.get_config_service',
|
|
||||||
return_value=mock_config_service
|
|
||||||
):
|
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/config",
|
|
||||||
json=new_config
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 500
|
|
||||||
assert "Failed to update scheduler configuration" in response.text
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_scheduler_enable_disable_toggle(
|
async def test_invalid_schedule_days_returns_422(self, authenticated_client):
|
||||||
self,
|
"""Unknown day abbreviation triggers validation error (422)."""
|
||||||
authenticated_client,
|
response = await authenticated_client.post(
|
||||||
mock_config_service
|
"/api/scheduler/config",
|
||||||
):
|
json={"enabled": True, "schedule_days": ["monday"]},
|
||||||
"""Test toggling scheduler enabled state."""
|
)
|
||||||
# First enable
|
assert response.status_code == 422
|
||||||
with patch(
|
|
||||||
'src.server.api.scheduler.get_config_service',
|
|
||||||
return_value=mock_config_service
|
|
||||||
):
|
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/config",
|
|
||||||
json={
|
|
||||||
"enabled": True,
|
|
||||||
"interval_minutes": 60
|
|
||||||
}
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json()["enabled"] is True
|
|
||||||
|
|
||||||
# Then disable
|
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/config",
|
|
||||||
json={
|
|
||||||
"enabled": False,
|
|
||||||
"interval_minutes": 60
|
|
||||||
}
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.json()["enabled"] is False
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_update_scheduler_interval_validation(
|
async def test_empty_schedule_days_accepted(
|
||||||
self,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
authenticated_client,
|
|
||||||
mock_config_service
|
|
||||||
):
|
):
|
||||||
"""Test scheduler interval value validation."""
|
"""Empty schedule_days list is valid (disables the cron job)."""
|
||||||
with patch(
|
payload = {"enabled": True, "schedule_days": []}
|
||||||
'src.server.api.scheduler.get_config_service',
|
|
||||||
return_value=mock_config_service
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
):
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
# Test minimum interval (1 minute)
|
response = await authenticated_client.post("/api/scheduler/config", json=payload)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
assert response.json()["config"]["schedule_days"] == []
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_update_enable_disable_toggle(
|
||||||
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
|
):
|
||||||
|
"""Toggling enabled is reflected in the returned config."""
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
r1 = await authenticated_client.post(
|
||||||
|
"/api/scheduler/config",
|
||||||
|
json={"enabled": True, "interval_minutes": 60},
|
||||||
|
)
|
||||||
|
assert r1.json()["config"]["enabled"] is True
|
||||||
|
|
||||||
|
r2 = await authenticated_client.post(
|
||||||
|
"/api/scheduler/config",
|
||||||
|
json={"enabled": False, "interval_minutes": 60},
|
||||||
|
)
|
||||||
|
assert r2.json()["config"]["enabled"] is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_save_failure_returns_500(
|
||||||
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
|
):
|
||||||
|
"""500 when config_service.save_config raises ConfigServiceError."""
|
||||||
|
from src.server.services.config_service import ConfigServiceError
|
||||||
|
|
||||||
|
mock_config_service.save_config.side_effect = ConfigServiceError("disk full")
|
||||||
|
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
response = await authenticated_client.post(
|
response = await authenticated_client.post(
|
||||||
"/api/scheduler/config",
|
"/api/scheduler/config",
|
||||||
json={
|
json={"enabled": False},
|
||||||
"enabled": True,
|
|
||||||
"interval_minutes": 1
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
|
||||||
|
assert response.status_code == 500
|
||||||
# Test large interval (7 days = 10080 minutes)
|
assert "Failed to update scheduler configuration" in response.text
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/config",
|
@pytest.mark.asyncio
|
||||||
json={
|
async def test_backward_compat_minimal_payload(
|
||||||
"enabled": True,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
"interval_minutes": 10080
|
):
|
||||||
}
|
"""Payload with only legacy fields fills new fields with model defaults."""
|
||||||
)
|
payload = {"enabled": True, "interval_minutes": 60}
|
||||||
assert response.status_code == 200
|
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
response = await authenticated_client.post("/api/scheduler/config", json=payload)
|
||||||
|
|
||||||
|
assert response.status_code == 200
|
||||||
|
cfg = response.json()["config"]
|
||||||
|
assert cfg["schedule_time"] == "03:00"
|
||||||
|
assert cfg["auto_download_after_rescan"] is False
|
||||||
|
assert len(cfg["schedule_days"]) == 7
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_interval_boundary_values(
|
||||||
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
|
):
|
||||||
|
"""interval_minutes = 1 and 10080 (1 week) are both valid."""
|
||||||
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
|
for minutes in (1, 10080):
|
||||||
|
r = await authenticated_client.post(
|
||||||
|
"/api/scheduler/config",
|
||||||
|
json={"enabled": True, "interval_minutes": minutes},
|
||||||
|
)
|
||||||
|
assert r.status_code == 200
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# POST /api/scheduler/trigger-rescan
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
class TestTriggerRescan:
|
class TestTriggerRescan:
|
||||||
"""Tests for POST /api/scheduler/trigger-rescan endpoint."""
|
"""Tests for POST /api/scheduler/trigger-rescan endpoint."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_trigger_rescan_success(self, authenticated_client):
|
async def test_trigger_rescan_success(self, authenticated_client):
|
||||||
"""Test successful manual rescan trigger."""
|
"""Successful trigger returns 200 with a message."""
|
||||||
mock_trigger = AsyncMock(return_value={"message": "Rescan triggered"})
|
mock_trigger = AsyncMock(return_value={"message": "Rescan triggered"})
|
||||||
mock_series_app = Mock()
|
mock_series_app = Mock()
|
||||||
|
|
||||||
with patch(
|
with patch("src.server.utils.dependencies.get_series_app", return_value=mock_series_app), \
|
||||||
'src.server.utils.dependencies.get_series_app',
|
patch("src.server.api.anime.trigger_rescan", mock_trigger):
|
||||||
return_value=mock_series_app
|
response = await authenticated_client.post("/api/scheduler/trigger-rescan")
|
||||||
), patch(
|
|
||||||
'src.server.api.anime.trigger_rescan',
|
assert response.status_code == 200
|
||||||
mock_trigger
|
assert "message" in response.json()
|
||||||
):
|
mock_trigger.assert_called_once()
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/trigger-rescan"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
assert "message" in data
|
|
||||||
mock_trigger.assert_called_once()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_trigger_rescan_unauthorized(self, client):
|
async def test_trigger_rescan_unauthorized(self, client):
|
||||||
"""Test manual rescan trigger without authentication."""
|
"""Trigger without auth token returns 401."""
|
||||||
response = await client.post("/api/scheduler/trigger-rescan")
|
response = await client.post("/api/scheduler/trigger-rescan")
|
||||||
assert response.status_code == 401
|
assert response.status_code == 401
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_trigger_rescan_series_app_not_initialized(
|
async def test_trigger_rescan_series_app_not_initialized(
|
||||||
self,
|
self, authenticated_client
|
||||||
authenticated_client
|
|
||||||
):
|
):
|
||||||
"""Test manual rescan trigger when SeriesApp not initialized."""
|
"""503 when SeriesApp is not yet initialised."""
|
||||||
with patch(
|
with patch("src.server.utils.dependencies.get_series_app", return_value=None):
|
||||||
'src.server.utils.dependencies.get_series_app',
|
response = await authenticated_client.post("/api/scheduler/trigger-rescan")
|
||||||
return_value=None
|
|
||||||
):
|
assert response.status_code == 503
|
||||||
response = await authenticated_client.post(
|
assert "SeriesApp not initialized" in response.text
|
||||||
"/api/scheduler/trigger-rescan"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 503
|
|
||||||
assert "SeriesApp not initialized" in response.text
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_trigger_rescan_failure(self, authenticated_client):
|
async def test_trigger_rescan_failure(self, authenticated_client):
|
||||||
"""Test manual rescan trigger when rescan fails."""
|
"""500 when underlying rescan call raises an exception."""
|
||||||
mock_trigger = AsyncMock(
|
mock_trigger = AsyncMock(side_effect=Exception("Rescan failed"))
|
||||||
side_effect=Exception("Rescan failed")
|
|
||||||
)
|
|
||||||
mock_series_app = Mock()
|
mock_series_app = Mock()
|
||||||
|
|
||||||
with patch(
|
with patch("src.server.utils.dependencies.get_series_app", return_value=mock_series_app), \
|
||||||
'src.server.utils.dependencies.get_series_app',
|
patch("src.server.api.anime.trigger_rescan", mock_trigger):
|
||||||
return_value=mock_series_app
|
response = await authenticated_client.post("/api/scheduler/trigger-rescan")
|
||||||
), patch(
|
|
||||||
'src.server.api.anime.trigger_rescan',
|
assert response.status_code == 500
|
||||||
mock_trigger
|
assert "Failed to trigger rescan" in response.text
|
||||||
):
|
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/trigger-rescan"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 500
|
|
||||||
assert "Failed to trigger rescan" in response.text
|
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Multi-step integration tests
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
class TestSchedulerEndpointsIntegration:
|
class TestSchedulerEndpointsIntegration:
|
||||||
"""Integration tests for scheduler endpoints."""
|
"""Multi-step integration tests for scheduler endpoints."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_full_config_workflow(
|
async def test_full_config_workflow(
|
||||||
self,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
authenticated_client,
|
|
||||||
mock_config_service
|
|
||||||
):
|
):
|
||||||
"""Test complete workflow: get config, update, get again."""
|
"""GET → POST → verify save called and response consistent."""
|
||||||
with patch(
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
'src.server.api.scheduler.get_config_service',
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service):
|
||||||
return_value=mock_config_service
|
r = await authenticated_client.get("/api/scheduler/config")
|
||||||
):
|
assert r.status_code == 200
|
||||||
# Get initial config
|
assert r.json()["config"]["enabled"] is True
|
||||||
response = await authenticated_client.get("/api/scheduler/config")
|
|
||||||
assert response.status_code == 200
|
r = await authenticated_client.post(
|
||||||
initial_config = response.json()
|
|
||||||
assert initial_config["enabled"] is True
|
|
||||||
|
|
||||||
# Update config
|
|
||||||
new_config = {
|
|
||||||
"enabled": False,
|
|
||||||
"interval_minutes": 30
|
|
||||||
}
|
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/config",
|
"/api/scheduler/config",
|
||||||
json=new_config
|
json={
|
||||||
|
"enabled": False,
|
||||||
|
"interval_minutes": 30,
|
||||||
|
"schedule_time": "12:00",
|
||||||
|
"schedule_days": ["mon", "fri"],
|
||||||
|
},
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert r.status_code == 200
|
||||||
updated_config = response.json()
|
cfg = r.json()["config"]
|
||||||
assert updated_config["enabled"] is False
|
assert cfg["enabled"] is False
|
||||||
assert updated_config["interval_minutes"] == 30
|
assert cfg["interval_minutes"] == 30
|
||||||
|
assert cfg["schedule_time"] == "12:00"
|
||||||
# Verify config persisted
|
assert cfg["schedule_days"] == ["mon", "fri"]
|
||||||
mock_config_service.save_config.assert_called_once()
|
|
||||||
|
mock_config_service.save_config.assert_called_once()
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_trigger_rescan_after_config_update(
|
async def test_trigger_rescan_after_config_update(
|
||||||
self,
|
self, authenticated_client, mock_config_service, mock_scheduler_service
|
||||||
authenticated_client,
|
|
||||||
mock_config_service
|
|
||||||
):
|
):
|
||||||
"""Test triggering rescan after updating config."""
|
"""POST config then POST trigger-rescan both succeed."""
|
||||||
mock_trigger = AsyncMock(return_value={"message": "Rescan triggered"})
|
mock_trigger = AsyncMock(return_value={"message": "Rescan triggered"})
|
||||||
mock_series_app = Mock()
|
mock_series_app = Mock()
|
||||||
|
|
||||||
with patch(
|
with patch("src.server.api.scheduler.get_config_service", return_value=mock_config_service), \
|
||||||
'src.server.api.scheduler.get_config_service',
|
patch("src.server.api.scheduler.get_scheduler_service", return_value=mock_scheduler_service), \
|
||||||
return_value=mock_config_service
|
patch("src.server.utils.dependencies.get_series_app", return_value=mock_series_app), \
|
||||||
), patch(
|
patch("src.server.api.anime.trigger_rescan", mock_trigger):
|
||||||
'src.server.utils.dependencies.get_series_app',
|
r = await authenticated_client.post(
|
||||||
return_value=mock_series_app
|
|
||||||
), patch(
|
|
||||||
'src.server.api.anime.trigger_rescan',
|
|
||||||
mock_trigger
|
|
||||||
):
|
|
||||||
# Update config to enable scheduler
|
|
||||||
response = await authenticated_client.post(
|
|
||||||
"/api/scheduler/config",
|
"/api/scheduler/config",
|
||||||
json={
|
json={"enabled": True, "interval_minutes": 360},
|
||||||
"enabled": True,
|
|
||||||
"interval_minutes": 360
|
|
||||||
}
|
|
||||||
)
|
)
|
||||||
assert response.status_code == 200
|
assert r.status_code == 200
|
||||||
|
|
||||||
# Trigger manual rescan
|
r = await authenticated_client.post("/api/scheduler/trigger-rescan")
|
||||||
response = await authenticated_client.post(
|
assert r.status_code == 200
|
||||||
"/api/scheduler/trigger-rescan"
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
mock_trigger.assert_called_once()
|
mock_trigger.assert_called_once()
|
||||||
|
|||||||
@@ -1,10 +1,8 @@
|
|||||||
"""Integration tests for scheduler workflow.
|
"""Integration tests for scheduler workflow.
|
||||||
|
|
||||||
This module tests end-to-end scheduler workflows including:
|
Tests end-to-end scheduler workflows with the APScheduler-based
|
||||||
- Scheduler trigger → library rescan → database update workflow
|
SchedulerService, covering lifecycle, manual triggers, config reloading,
|
||||||
- Configuration changes apply immediately
|
WebSocket broadcasting, auto-download, and concurrency protection.
|
||||||
- Scheduler persistence after application restart
|
|
||||||
- Concurrent manual and automated scan handling
|
|
||||||
"""
|
"""
|
||||||
import asyncio
|
import asyncio
|
||||||
from datetime import datetime, timezone
|
from datetime import datetime, timezone
|
||||||
@@ -15,499 +13,511 @@ import pytest
|
|||||||
from src.server.models.config import AppConfig, SchedulerConfig
|
from src.server.models.config import AppConfig, SchedulerConfig
|
||||||
from src.server.services.scheduler_service import (
|
from src.server.services.scheduler_service import (
|
||||||
SchedulerService,
|
SchedulerService,
|
||||||
|
SchedulerServiceError,
|
||||||
|
_JOB_ID,
|
||||||
get_scheduler_service,
|
get_scheduler_service,
|
||||||
reset_scheduler_service,
|
reset_scheduler_service,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Shared fixtures
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_config_service():
|
def mock_config_service():
|
||||||
"""Create a mock configuration service."""
|
"""Patch get_config_service used by SchedulerService.start()."""
|
||||||
with patch("src.server.services.scheduler_service.get_config_service") as mock:
|
with patch("src.server.services.scheduler_service.get_config_service") as mock:
|
||||||
config_service = Mock()
|
config_service = Mock()
|
||||||
|
|
||||||
# Default configuration
|
|
||||||
app_config = AppConfig(
|
app_config = AppConfig(
|
||||||
scheduler=SchedulerConfig(
|
scheduler=SchedulerConfig(
|
||||||
enabled=True,
|
enabled=True,
|
||||||
interval_minutes=1 # Short interval for testing
|
schedule_time="03:00",
|
||||||
|
schedule_days=["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
auto_download_after_rescan=False,
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
config_service.load_config.return_value = app_config
|
config_service.load_config.return_value = app_config
|
||||||
config_service.update_config = Mock()
|
|
||||||
|
|
||||||
mock.return_value = config_service
|
mock.return_value = config_service
|
||||||
yield config_service
|
yield config_service
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_anime_service():
|
def mock_anime_service():
|
||||||
"""Create a mock anime service that simulates database updates."""
|
"""Patch get_anime_service used inside _perform_rescan."""
|
||||||
with patch("src.server.utils.dependencies.get_anime_service") as mock:
|
with patch("src.server.utils.dependencies.get_anime_service") as mock:
|
||||||
service = Mock()
|
service = Mock()
|
||||||
service.rescan = AsyncMock()
|
service.rescan = AsyncMock()
|
||||||
service.series_list = []
|
|
||||||
|
|
||||||
# Simulate database update during rescan
|
|
||||||
async def rescan_side_effect():
|
|
||||||
# Simulate finding new series
|
|
||||||
service.series_list = [
|
|
||||||
{"key": "series1", "name": "New Series 1"},
|
|
||||||
{"key": "series2", "name": "New Series 2"}
|
|
||||||
]
|
|
||||||
await asyncio.sleep(0.1) # Simulate work
|
|
||||||
|
|
||||||
service.rescan.side_effect = rescan_side_effect
|
|
||||||
mock.return_value = service
|
mock.return_value = service
|
||||||
yield service
|
yield service
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def mock_websocket_service():
|
def mock_websocket_service():
|
||||||
"""Create a mock WebSocket service that tracks broadcasts."""
|
"""Patch get_websocket_service to capture broadcasts."""
|
||||||
with patch("src.server.services.websocket_service.get_websocket_service") as mock:
|
with patch("src.server.services.websocket_service.get_websocket_service") as mock:
|
||||||
service = Mock()
|
service = Mock()
|
||||||
service.manager = Mock()
|
service.manager = Mock()
|
||||||
service.broadcasts = [] # Track all broadcasts
|
service.broadcasts = []
|
||||||
|
|
||||||
async def broadcast_side_effect(message):
|
async def broadcast_side_effect(message):
|
||||||
service.broadcasts.append(message)
|
service.broadcasts.append(message)
|
||||||
|
|
||||||
service.manager.broadcast = AsyncMock(side_effect=broadcast_side_effect)
|
service.manager.broadcast = AsyncMock(side_effect=broadcast_side_effect)
|
||||||
mock.return_value = service
|
mock.return_value = service
|
||||||
yield service
|
yield service
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
async def scheduler_service():
|
async def scheduler_service(mock_config_service):
|
||||||
"""Create a fresh scheduler service instance for each test."""
|
"""Fresh SchedulerService instance; stopped automatically after each test."""
|
||||||
reset_scheduler_service()
|
reset_scheduler_service()
|
||||||
service = SchedulerService()
|
svc = SchedulerService()
|
||||||
yield service
|
yield svc
|
||||||
# Cleanup
|
if svc._is_running:
|
||||||
if service._is_running:
|
await svc.stop()
|
||||||
await service.stop()
|
|
||||||
|
|
||||||
|
|
||||||
class TestSchedulerWorkflow:
|
# ---------------------------------------------------------------------------
|
||||||
"""Tests for end-to-end scheduler workflows."""
|
# TestSchedulerLifecycle
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestSchedulerLifecycle:
|
||||||
|
"""Tests for SchedulerService start/stop lifecycle."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_scheduled_rescan_updates_database(
|
async def test_start_sets_is_running(self, scheduler_service):
|
||||||
self,
|
"""start() sets _is_running to True."""
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
|
||||||
"""Test that scheduled rescan updates the database with new series."""
|
|
||||||
# Start scheduler
|
|
||||||
await scheduler_service.start()
|
|
||||||
|
|
||||||
# Wait for at least one scan cycle (1 minute + buffer)
|
|
||||||
await asyncio.sleep(65)
|
|
||||||
|
|
||||||
# Verify database was updated
|
|
||||||
assert mock_anime_service.rescan.call_count >= 1
|
|
||||||
assert len(mock_anime_service.series_list) == 2
|
|
||||||
|
|
||||||
# Verify WebSocket notifications were sent
|
|
||||||
assert len(mock_websocket_service.broadcasts) >= 2
|
|
||||||
|
|
||||||
# Check for rescan events
|
|
||||||
event_types = [b["type"] for b in mock_websocket_service.broadcasts]
|
|
||||||
assert "scheduled_rescan_started" in event_types
|
|
||||||
assert "scheduled_rescan_completed" in event_types
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler_service.stop()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_configuration_change_applies_immediately(
|
|
||||||
self,
|
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
|
||||||
"""Test that configuration changes are applied immediately."""
|
|
||||||
# Start with 1 minute interval
|
|
||||||
await scheduler_service.start()
|
|
||||||
original_interval = scheduler_service._config.interval_minutes
|
|
||||||
assert original_interval == 1
|
|
||||||
|
|
||||||
# Change interval to 2 minutes
|
|
||||||
new_config = AppConfig(
|
|
||||||
scheduler=SchedulerConfig(
|
|
||||||
enabled=True,
|
|
||||||
interval_minutes=2
|
|
||||||
)
|
|
||||||
)
|
|
||||||
mock_config_service.load_config.return_value = new_config
|
|
||||||
|
|
||||||
# Reload configuration
|
|
||||||
await scheduler_service.reload_config()
|
|
||||||
|
|
||||||
# Verify new interval is applied
|
|
||||||
assert scheduler_service._config.interval_minutes == 2
|
|
||||||
assert scheduler_service._is_running is True # Should still be running
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler_service.stop()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_disable_scheduler_stops_execution(
|
|
||||||
self,
|
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
|
||||||
"""Test that disabling scheduler stops future rescans."""
|
|
||||||
# Start scheduler
|
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
assert scheduler_service._is_running is True
|
assert scheduler_service._is_running is True
|
||||||
|
|
||||||
# Wait for one scan to complete
|
@pytest.mark.asyncio
|
||||||
await asyncio.sleep(65)
|
async def test_stop_clears_is_running(self, scheduler_service):
|
||||||
initial_scan_count = mock_anime_service.rescan.call_count
|
"""stop() sets _is_running to False."""
|
||||||
assert initial_scan_count >= 1
|
await scheduler_service.start()
|
||||||
|
await scheduler_service.stop()
|
||||||
# Disable scheduler
|
|
||||||
disabled_config = AppConfig(
|
|
||||||
scheduler=SchedulerConfig(
|
|
||||||
enabled=False,
|
|
||||||
interval_minutes=1
|
|
||||||
)
|
|
||||||
)
|
|
||||||
mock_config_service.load_config.return_value = disabled_config
|
|
||||||
await scheduler_service.reload_config()
|
|
||||||
|
|
||||||
# Verify scheduler stopped
|
|
||||||
assert scheduler_service._is_running is False
|
assert scheduler_service._is_running is False
|
||||||
|
|
||||||
# Wait another scan cycle
|
|
||||||
await asyncio.sleep(65)
|
|
||||||
|
|
||||||
# Verify no additional scans occurred
|
|
||||||
assert mock_anime_service.rescan.call_count == initial_scan_count
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_manual_scan_blocks_scheduled_scan(
|
async def test_start_twice_raises(self, scheduler_service):
|
||||||
self,
|
"""Calling start() when already running raises SchedulerServiceError."""
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
|
||||||
"""Test that manual scan prevents concurrent scheduled scan."""
|
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
|
with pytest.raises(SchedulerServiceError, match="already running"):
|
||||||
# Make rescan slow to simulate long-running operation
|
await scheduler_service.start()
|
||||||
async def slow_rescan():
|
|
||||||
await asyncio.sleep(2)
|
|
||||||
|
|
||||||
mock_anime_service.rescan.side_effect = slow_rescan
|
|
||||||
|
|
||||||
# Trigger manual scan
|
|
||||||
task1 = asyncio.create_task(scheduler_service._perform_rescan())
|
|
||||||
|
|
||||||
# Wait a bit to ensure manual scan is in progress
|
|
||||||
await asyncio.sleep(0.5)
|
|
||||||
assert scheduler_service._scan_in_progress is True
|
|
||||||
|
|
||||||
# Try to trigger another scan (simulating scheduled trigger)
|
|
||||||
result = await scheduler_service.trigger_rescan()
|
|
||||||
|
|
||||||
# Second scan should be blocked
|
|
||||||
assert result is False
|
|
||||||
|
|
||||||
# Wait for first scan to complete
|
|
||||||
await task1
|
|
||||||
|
|
||||||
# Verify only one scan executed
|
|
||||||
assert mock_anime_service.rescan.call_count == 1
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler_service.stop()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_scheduler_state_persists_across_restart(
|
async def test_stop_when_not_running_is_noop(self, scheduler_service):
|
||||||
self,
|
"""stop() when not started does not raise."""
|
||||||
mock_config_service,
|
await scheduler_service.stop() # should not raise
|
||||||
mock_anime_service,
|
assert scheduler_service._is_running is False
|
||||||
mock_websocket_service
|
|
||||||
):
|
@pytest.mark.asyncio
|
||||||
"""Test that scheduler can restart with same configuration."""
|
async def test_start_loads_config(self, scheduler_service, mock_config_service):
|
||||||
# Create and start first scheduler instance
|
"""start() loads configuration via config_service."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
mock_config_service.load_config.assert_called_once()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_start_disabled_scheduler_no_job(self, mock_config_service):
|
||||||
|
"""Disabled scheduler starts but does not add an APScheduler job."""
|
||||||
|
mock_config_service.load_config.return_value = AppConfig(
|
||||||
|
scheduler=SchedulerConfig(enabled=False)
|
||||||
|
)
|
||||||
reset_scheduler_service()
|
reset_scheduler_service()
|
||||||
scheduler1 = SchedulerService()
|
svc = SchedulerService()
|
||||||
await scheduler1.start()
|
await svc.start()
|
||||||
|
assert svc._is_running is True
|
||||||
# Record configuration
|
# No job should be registered
|
||||||
original_config = scheduler1._config
|
assert svc._scheduler.get_job(_JOB_ID) is None
|
||||||
assert scheduler1._is_running is True
|
await svc.stop()
|
||||||
|
|
||||||
# Stop scheduler (simulating app shutdown)
|
|
||||||
await scheduler1.stop()
|
|
||||||
assert scheduler1._is_running is False
|
|
||||||
|
|
||||||
# Create new scheduler instance (simulating app restart)
|
|
||||||
reset_scheduler_service()
|
|
||||||
scheduler2 = SchedulerService()
|
|
||||||
|
|
||||||
# Start new scheduler with same configuration
|
|
||||||
await scheduler2.start()
|
|
||||||
|
|
||||||
# Verify it has same configuration and is running
|
|
||||||
assert scheduler2._is_running is True
|
|
||||||
assert scheduler2._config.enabled == original_config.enabled
|
|
||||||
assert scheduler2._config.interval_minutes == original_config.interval_minutes
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler2.stop()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_scheduler_recovers_from_rescan_failure(
|
async def test_start_registers_apscheduler_job(self, scheduler_service):
|
||||||
self,
|
"""Enabled scheduler registers a job with _JOB_ID."""
|
||||||
scheduler_service,
|
await scheduler_service.start()
|
||||||
mock_config_service,
|
job = scheduler_service._scheduler.get_job(_JOB_ID)
|
||||||
mock_anime_service,
|
assert job is not None
|
||||||
mock_websocket_service
|
|
||||||
):
|
@pytest.mark.asyncio
|
||||||
"""Test that scheduler continues after rescan failure."""
|
async def test_restart_after_stop(self, scheduler_service):
|
||||||
# Make first rescan fail, subsequent rescans succeed
|
"""Service can be started again after being stopped."""
|
||||||
call_count = {"count": 0}
|
|
||||||
|
|
||||||
async def failing_rescan():
|
|
||||||
call_count["count"] += 1
|
|
||||||
if call_count["count"] == 1:
|
|
||||||
raise Exception("Database connection error")
|
|
||||||
# Subsequent calls succeed
|
|
||||||
|
|
||||||
mock_anime_service.rescan.side_effect = failing_rescan
|
|
||||||
|
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
|
|
||||||
# Wait for multiple scan cycles (2 minutes + buffer)
|
|
||||||
await asyncio.sleep(130)
|
|
||||||
|
|
||||||
# Verify multiple scans were attempted despite failure
|
|
||||||
assert mock_anime_service.rescan.call_count >= 2
|
|
||||||
|
|
||||||
# Verify error was broadcast
|
|
||||||
error_broadcasts = [
|
|
||||||
b for b in mock_websocket_service.broadcasts
|
|
||||||
if b.get("type") == "scheduled_rescan_error"
|
|
||||||
]
|
|
||||||
assert len(error_broadcasts) >= 1
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler_service.stop()
|
await scheduler_service.stop()
|
||||||
|
await scheduler_service.start()
|
||||||
|
assert scheduler_service._is_running is True
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TestSchedulerTriggerRescan
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestSchedulerTriggerRescan:
|
||||||
|
"""Tests for manual trigger_rescan workflow."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_full_workflow_trigger_rescan_update_notify(
|
async def test_trigger_rescan_calls_anime_service(
|
||||||
self,
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
):
|
||||||
"""Test complete workflow: trigger → rescan → update → notify."""
|
"""trigger_rescan() calls anime_service.rescan()."""
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
|
|
||||||
# Trigger manual rescan
|
|
||||||
result = await scheduler_service.trigger_rescan()
|
result = await scheduler_service.trigger_rescan()
|
||||||
assert result is True
|
assert result is True
|
||||||
|
mock_anime_service.rescan.assert_called_once()
|
||||||
# Verify workflow steps
|
|
||||||
# 1. Rescan was performed
|
@pytest.mark.asyncio
|
||||||
assert mock_anime_service.rescan.call_count == 1
|
async def test_trigger_rescan_records_last_run(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
# 2. Database was updated with new series
|
):
|
||||||
assert len(mock_anime_service.series_list) == 2
|
"""trigger_rescan() updates _last_scan_time."""
|
||||||
|
await scheduler_service.start()
|
||||||
# 3. WebSocket notifications were sent
|
await scheduler_service.trigger_rescan()
|
||||||
assert len(mock_websocket_service.broadcasts) >= 2
|
|
||||||
|
|
||||||
# 4. Verify event sequence
|
|
||||||
event_types = [b["type"] for b in mock_websocket_service.broadcasts]
|
|
||||||
start_index = event_types.index("scheduled_rescan_started")
|
|
||||||
complete_index = event_types.index("scheduled_rescan_completed")
|
|
||||||
assert complete_index > start_index # Complete comes after start
|
|
||||||
|
|
||||||
# 5. Verify scan time was recorded
|
|
||||||
assert scheduler_service._last_scan_time is not None
|
assert scheduler_service._last_scan_time is not None
|
||||||
assert isinstance(scheduler_service._last_scan_time, datetime)
|
assert isinstance(scheduler_service._last_scan_time, datetime)
|
||||||
|
|
||||||
# 6. Scan is no longer in progress
|
@pytest.mark.asyncio
|
||||||
|
async def test_trigger_rescan_when_not_running_raises(self, scheduler_service):
|
||||||
|
"""trigger_rescan() without start() raises SchedulerServiceError."""
|
||||||
|
with pytest.raises(SchedulerServiceError, match="not running"):
|
||||||
|
await scheduler_service.trigger_rescan()
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_trigger_rescan_blocked_during_scan(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""Second trigger_rescan() returns False while a scan is in progress."""
|
||||||
|
async def slow_rescan():
|
||||||
|
await asyncio.sleep(0.3)
|
||||||
|
|
||||||
|
mock_anime_service.rescan.side_effect = slow_rescan
|
||||||
|
await scheduler_service.start()
|
||||||
|
|
||||||
|
task = asyncio.create_task(scheduler_service._perform_rescan())
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
|
assert scheduler_service._scan_in_progress is True
|
||||||
|
|
||||||
|
result = await scheduler_service.trigger_rescan()
|
||||||
|
assert result is False
|
||||||
|
|
||||||
|
await task
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_trigger_rescan_scan_in_progress_false_after_completion(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""scan_in_progress returns to False after trigger_rescan completes."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
await scheduler_service.trigger_rescan()
|
||||||
assert scheduler_service._scan_in_progress is False
|
assert scheduler_service._scan_in_progress is False
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler_service.stop()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_multiple_sequential_rescans(
|
async def test_multiple_sequential_rescans(
|
||||||
self,
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
):
|
||||||
"""Test multiple sequential rescans execute successfully."""
|
"""Three sequential manual rescans all execute successfully."""
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
|
for _ in range(3):
|
||||||
# Trigger 3 manual rescans sequentially
|
|
||||||
for i in range(3):
|
|
||||||
result = await scheduler_service.trigger_rescan()
|
result = await scheduler_service.trigger_rescan()
|
||||||
assert result is True
|
assert result is True
|
||||||
# Small delay between rescans
|
|
||||||
await asyncio.sleep(0.1)
|
|
||||||
|
|
||||||
# Verify all 3 rescans executed
|
|
||||||
assert mock_anime_service.rescan.call_count == 3
|
assert mock_anime_service.rescan.call_count == 3
|
||||||
|
|
||||||
# Verify 6 WebSocket broadcasts (start + complete for each scan)
|
|
||||||
assert len(mock_websocket_service.broadcasts) >= 6
|
# ---------------------------------------------------------------------------
|
||||||
|
# TestSchedulerWebSocketBroadcasts
|
||||||
# Cleanup
|
# ---------------------------------------------------------------------------
|
||||||
await scheduler_service.stop()
|
|
||||||
|
class TestSchedulerWebSocketBroadcasts:
|
||||||
|
"""Tests for WebSocket event emission during rescan."""
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_scheduler_status_accuracy_during_workflow(
|
async def test_rescan_broadcasts_started_event(
|
||||||
self,
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
):
|
||||||
"""Test that status accurately reflects scheduler state during workflow."""
|
"""_perform_rescan() broadcasts 'scheduled_rescan_started'."""
|
||||||
# Initial status
|
await scheduler_service.start()
|
||||||
|
await scheduler_service.trigger_rescan()
|
||||||
|
|
||||||
|
event_types = [b["type"] for b in mock_websocket_service.broadcasts]
|
||||||
|
assert "scheduled_rescan_started" in event_types
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_rescan_broadcasts_completed_event(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""_perform_rescan() broadcasts 'scheduled_rescan_completed'."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
await scheduler_service.trigger_rescan()
|
||||||
|
|
||||||
|
event_types = [b["type"] for b in mock_websocket_service.broadcasts]
|
||||||
|
assert "scheduled_rescan_completed" in event_types
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_rescan_broadcasts_error_on_failure(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""_perform_rescan() broadcasts 'scheduled_rescan_error' when rescan raises."""
|
||||||
|
mock_anime_service.rescan.side_effect = RuntimeError("DB failure")
|
||||||
|
await scheduler_service.start()
|
||||||
|
await scheduler_service._perform_rescan()
|
||||||
|
|
||||||
|
error_events = [
|
||||||
|
b for b in mock_websocket_service.broadcasts
|
||||||
|
if b["type"] == "scheduled_rescan_error"
|
||||||
|
]
|
||||||
|
assert len(error_events) >= 1
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_rescan_completed_event_order(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""'started' event precedes 'completed' event in broadcast sequence."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
await scheduler_service.trigger_rescan()
|
||||||
|
|
||||||
|
types = [b["type"] for b in mock_websocket_service.broadcasts]
|
||||||
|
started_idx = types.index("scheduled_rescan_started")
|
||||||
|
completed_idx = types.index("scheduled_rescan_completed")
|
||||||
|
assert completed_idx > started_idx
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TestSchedulerGetStatus
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestSchedulerGetStatus:
|
||||||
|
"""Tests for get_status() accuracy."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_status_not_running_before_start(self, scheduler_service):
|
||||||
|
"""is_running is False before start()."""
|
||||||
status = scheduler_service.get_status()
|
status = scheduler_service.get_status()
|
||||||
assert status["is_running"] is False
|
assert status["is_running"] is False
|
||||||
assert status["scan_in_progress"] is False
|
assert status["scan_in_progress"] is False
|
||||||
|
|
||||||
# Start scheduler
|
@pytest.mark.asyncio
|
||||||
|
async def test_status_is_running_after_start(self, scheduler_service):
|
||||||
|
"""is_running is True after start()."""
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
status = scheduler_service.get_status()
|
status = scheduler_service.get_status()
|
||||||
assert status["is_running"] is True
|
assert status["is_running"] is True
|
||||||
assert status["enabled"] is True
|
assert status["enabled"] is True
|
||||||
assert status["interval_minutes"] == 1
|
|
||||||
|
|
||||||
# Make rescan slow to check in-progress status
|
|
||||||
async def slow_rescan():
|
|
||||||
await asyncio.sleep(0.5)
|
|
||||||
|
|
||||||
mock_anime_service.rescan.side_effect = slow_rescan
|
|
||||||
|
|
||||||
# Start rescan
|
|
||||||
task = asyncio.create_task(scheduler_service._perform_rescan())
|
|
||||||
|
|
||||||
# Check status during rescan
|
|
||||||
await asyncio.sleep(0.1)
|
|
||||||
status = scheduler_service.get_status()
|
|
||||||
assert status["scan_in_progress"] is True
|
|
||||||
|
|
||||||
# Wait for rescan to complete
|
|
||||||
await task
|
|
||||||
|
|
||||||
# Check status after rescan
|
|
||||||
status = scheduler_service.get_status()
|
|
||||||
assert status["scan_in_progress"] is False
|
|
||||||
assert status["last_scan_time"] is not None
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
await scheduler_service.stop()
|
|
||||||
|
|
||||||
# Final status
|
|
||||||
status = scheduler_service.get_status()
|
|
||||||
assert status["is_running"] is False
|
|
||||||
|
|
||||||
|
|
||||||
class TestSchedulerEdgeCases:
|
|
||||||
"""Tests for edge cases in scheduler workflows."""
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_rapid_enable_disable_cycles(
|
async def test_status_last_run_populated_after_rescan(
|
||||||
self,
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
):
|
||||||
"""Test rapid enable/disable cycles don't cause issues."""
|
"""last_run is not None after a successful rescan."""
|
||||||
reset_scheduler_service()
|
|
||||||
scheduler = SchedulerService()
|
|
||||||
|
|
||||||
# Rapidly enable and disable 5 times
|
|
||||||
for i in range(5):
|
|
||||||
enabled_config = AppConfig(
|
|
||||||
scheduler=SchedulerConfig(
|
|
||||||
enabled=True,
|
|
||||||
interval_minutes=1
|
|
||||||
)
|
|
||||||
)
|
|
||||||
disabled_config = AppConfig(
|
|
||||||
scheduler=SchedulerConfig(
|
|
||||||
enabled=False,
|
|
||||||
interval_minutes=1
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
if i % 2 == 0:
|
|
||||||
mock_config_service.load_config.return_value = enabled_config
|
|
||||||
await scheduler.reload_config()
|
|
||||||
else:
|
|
||||||
mock_config_service.load_config.return_value = disabled_config
|
|
||||||
await scheduler.reload_config()
|
|
||||||
|
|
||||||
await asyncio.sleep(0.1)
|
|
||||||
|
|
||||||
# Final state should match last configuration (i=4 is even, so enabled)
|
|
||||||
status = scheduler.get_status()
|
|
||||||
assert status["is_running"] is True # Last config (i=4) was enabled
|
|
||||||
|
|
||||||
# Cleanup
|
|
||||||
if scheduler._is_running:
|
|
||||||
await scheduler.stop()
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
|
||||||
async def test_interval_change_during_active_scan(
|
|
||||||
self,
|
|
||||||
scheduler_service,
|
|
||||||
mock_config_service,
|
|
||||||
mock_anime_service,
|
|
||||||
mock_websocket_service
|
|
||||||
):
|
|
||||||
"""Test configuration change during active scan."""
|
|
||||||
await scheduler_service.start()
|
await scheduler_service.start()
|
||||||
|
await scheduler_service.trigger_rescan()
|
||||||
# Make rescan slow
|
status = scheduler_service.get_status()
|
||||||
|
assert status["last_run"] is not None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_status_scan_in_progress_during_slow_rescan(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""scan_in_progress is True while rescan is executing."""
|
||||||
async def slow_rescan():
|
async def slow_rescan():
|
||||||
await asyncio.sleep(1)
|
await asyncio.sleep(0.3)
|
||||||
|
|
||||||
mock_anime_service.rescan.side_effect = slow_rescan
|
mock_anime_service.rescan.side_effect = slow_rescan
|
||||||
|
await scheduler_service.start()
|
||||||
# Start a rescan
|
|
||||||
task = asyncio.create_task(scheduler_service._perform_rescan())
|
task = asyncio.create_task(scheduler_service._perform_rescan())
|
||||||
|
await asyncio.sleep(0.05)
|
||||||
# Change interval while scan is in progress
|
assert scheduler_service.get_status()["scan_in_progress"] is True
|
||||||
await asyncio.sleep(0.2)
|
|
||||||
new_config = AppConfig(
|
|
||||||
scheduler=SchedulerConfig(
|
|
||||||
enabled=True,
|
|
||||||
interval_minutes=5
|
|
||||||
)
|
|
||||||
)
|
|
||||||
mock_config_service.load_config.return_value = new_config
|
|
||||||
|
|
||||||
# Reload config (should restart scheduler)
|
|
||||||
await scheduler_service.reload_config()
|
|
||||||
|
|
||||||
# Wait for scan to complete
|
|
||||||
await task
|
await task
|
||||||
|
|
||||||
# Verify new interval is applied
|
@pytest.mark.asyncio
|
||||||
assert scheduler_service._config.interval_minutes == 5
|
async def test_status_is_running_false_after_stop(self, scheduler_service):
|
||||||
|
"""is_running is False after stop()."""
|
||||||
# Cleanup
|
await scheduler_service.start()
|
||||||
await scheduler_service.stop()
|
await scheduler_service.stop()
|
||||||
|
assert scheduler_service.get_status()["is_running"] is False
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_status_includes_cron_fields(self, scheduler_service):
|
||||||
|
"""get_status() includes schedule_time, schedule_days, auto_download keys."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
status = scheduler_service.get_status()
|
||||||
|
for key in ("schedule_time", "schedule_days", "auto_download_after_rescan", "next_run"):
|
||||||
|
assert key in status
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TestReloadConfig
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestReloadConfig:
|
||||||
|
"""Tests for reload_config() live reconfiguration."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reload_reschedules_job_on_time_change(self, scheduler_service):
|
||||||
|
"""Changing schedule_time reschedules the existing job."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
assert scheduler_service._scheduler.get_job(_JOB_ID) is not None
|
||||||
|
|
||||||
|
new_config = SchedulerConfig(enabled=True, schedule_time="08:00")
|
||||||
|
scheduler_service.reload_config(new_config)
|
||||||
|
|
||||||
|
job = scheduler_service._scheduler.get_job(_JOB_ID)
|
||||||
|
assert job is not None
|
||||||
|
assert scheduler_service._config.schedule_time == "08:00"
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reload_removes_job_when_disabled(self, scheduler_service):
|
||||||
|
"""Setting enabled=False removes the APScheduler job."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
assert scheduler_service._scheduler.get_job(_JOB_ID) is not None
|
||||||
|
|
||||||
|
scheduler_service.reload_config(
|
||||||
|
SchedulerConfig(enabled=False)
|
||||||
|
)
|
||||||
|
assert scheduler_service._scheduler.get_job(_JOB_ID) is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reload_removes_job_when_days_empty(self, scheduler_service):
|
||||||
|
"""Empty schedule_days removes the APScheduler job."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
scheduler_service.reload_config(
|
||||||
|
SchedulerConfig(enabled=True, schedule_days=[])
|
||||||
|
)
|
||||||
|
assert scheduler_service._scheduler.get_job(_JOB_ID) is None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reload_adds_job_when_reenabling(self, scheduler_service):
|
||||||
|
"""Re-enabling after disable adds a new job."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
scheduler_service.reload_config(SchedulerConfig(enabled=False))
|
||||||
|
assert scheduler_service._scheduler.get_job(_JOB_ID) is None
|
||||||
|
|
||||||
|
scheduler_service.reload_config(
|
||||||
|
SchedulerConfig(enabled=True, schedule_time="09:00")
|
||||||
|
)
|
||||||
|
assert scheduler_service._scheduler.get_job(_JOB_ID) is not None
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_reload_updates_config_attribute(self, scheduler_service):
|
||||||
|
"""reload_config() updates self._config with the supplied instance."""
|
||||||
|
await scheduler_service.start()
|
||||||
|
new = SchedulerConfig(enabled=True, schedule_time="14:30", schedule_days=["mon"])
|
||||||
|
scheduler_service.reload_config(new)
|
||||||
|
assert scheduler_service._config.schedule_time == "14:30"
|
||||||
|
assert scheduler_service._config.schedule_days == ["mon"]
|
||||||
|
|
||||||
|
def test_reload_before_start_stores_config(self, scheduler_service):
|
||||||
|
"""reload_config() before start() stores config without raising."""
|
||||||
|
new = SchedulerConfig(enabled=True, schedule_time="22:00")
|
||||||
|
scheduler_service.reload_config(new)
|
||||||
|
assert scheduler_service._config.schedule_time == "22:00"
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TestAutoDownloadWorkflow
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestAutoDownloadWorkflow:
|
||||||
|
"""Tests for auto-download-after-rescan integration."""
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_auto_download_triggered_when_enabled(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""_auto_download_missing() is called when auto_download_after_rescan=True."""
|
||||||
|
scheduler_service._config = SchedulerConfig(
|
||||||
|
enabled=True,
|
||||||
|
auto_download_after_rescan=True,
|
||||||
|
)
|
||||||
|
scheduler_service._is_running = True
|
||||||
|
|
||||||
|
called = []
|
||||||
|
|
||||||
|
async def fake_auto_download():
|
||||||
|
called.append(True)
|
||||||
|
|
||||||
|
scheduler_service._auto_download_missing = fake_auto_download
|
||||||
|
await scheduler_service._perform_rescan()
|
||||||
|
assert called == [True]
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_auto_download_not_called_when_disabled(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""_auto_download_missing() is NOT called when auto_download_after_rescan=False."""
|
||||||
|
scheduler_service._config = SchedulerConfig(
|
||||||
|
enabled=True,
|
||||||
|
auto_download_after_rescan=False,
|
||||||
|
)
|
||||||
|
scheduler_service._is_running = True
|
||||||
|
|
||||||
|
called = []
|
||||||
|
|
||||||
|
async def fake_auto_download():
|
||||||
|
called.append(True)
|
||||||
|
|
||||||
|
scheduler_service._auto_download_missing = fake_auto_download
|
||||||
|
await scheduler_service._perform_rescan()
|
||||||
|
assert called == []
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_auto_download_error_broadcasts_event(
|
||||||
|
self, scheduler_service, mock_anime_service, mock_websocket_service
|
||||||
|
):
|
||||||
|
"""Error in _auto_download_missing broadcasts 'auto_download_error'."""
|
||||||
|
scheduler_service._config = SchedulerConfig(
|
||||||
|
enabled=True,
|
||||||
|
auto_download_after_rescan=True,
|
||||||
|
)
|
||||||
|
scheduler_service._is_running = True
|
||||||
|
|
||||||
|
async def failing_auto_download():
|
||||||
|
raise RuntimeError("download failed")
|
||||||
|
|
||||||
|
scheduler_service._auto_download_missing = failing_auto_download
|
||||||
|
await scheduler_service._perform_rescan()
|
||||||
|
|
||||||
|
error_events = [
|
||||||
|
b for b in mock_websocket_service.broadcasts
|
||||||
|
if b["type"] == "auto_download_error"
|
||||||
|
]
|
||||||
|
assert len(error_events) == 1
|
||||||
|
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# TestSchedulerSingletonHelpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
|
||||||
|
class TestSchedulerSingletonHelpers:
|
||||||
|
"""Tests for module-level singleton helpers."""
|
||||||
|
|
||||||
|
def test_get_scheduler_service_returns_same_instance(self):
|
||||||
|
"""get_scheduler_service() returns the same object on repeated calls."""
|
||||||
|
svc1 = get_scheduler_service()
|
||||||
|
svc2 = get_scheduler_service()
|
||||||
|
assert svc1 is svc2
|
||||||
|
|
||||||
|
def test_reset_clears_singleton(self):
|
||||||
|
"""reset_scheduler_service() causes get_scheduler_service() to return a new instance."""
|
||||||
|
svc1 = get_scheduler_service()
|
||||||
|
reset_scheduler_service()
|
||||||
|
svc2 = get_scheduler_service()
|
||||||
|
assert svc1 is not svc2
|
||||||
|
|
||||||
|
@pytest.mark.asyncio
|
||||||
|
async def test_state_persists_across_restart(self, mock_config_service):
|
||||||
|
"""Stopping and restarting loads config from service each time."""
|
||||||
|
reset_scheduler_service()
|
||||||
|
svc = SchedulerService()
|
||||||
|
await svc.start()
|
||||||
|
original_time = svc._config.schedule_time
|
||||||
|
assert svc._is_running is True
|
||||||
|
|
||||||
|
await svc.stop()
|
||||||
|
assert svc._is_running is False
|
||||||
|
|
||||||
|
reset_scheduler_service()
|
||||||
|
svc2 = SchedulerService()
|
||||||
|
await svc2.start()
|
||||||
|
assert svc2._is_running is True
|
||||||
|
assert svc2._config.schedule_time == original_time
|
||||||
|
|
||||||
|
await svc2.stop()
|
||||||
|
|||||||
129
tests/unit/test_scheduler_config_model.py
Normal file
129
tests/unit/test_scheduler_config_model.py
Normal file
@@ -0,0 +1,129 @@
|
|||||||
|
"""Unit tests for SchedulerConfig model fields and validators (Task 3)."""
|
||||||
|
import pytest
|
||||||
|
from pydantic import ValidationError
|
||||||
|
|
||||||
|
from src.server.models.config import SchedulerConfig
|
||||||
|
|
||||||
|
ALL_DAYS = ["mon", "tue", "wed", "thu", "fri", "sat", "sun"]
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigDefaults:
|
||||||
|
"""3.1 – Default values."""
|
||||||
|
|
||||||
|
def test_default_schedule_time(self) -> None:
|
||||||
|
config = SchedulerConfig()
|
||||||
|
assert config.schedule_time == "03:00"
|
||||||
|
|
||||||
|
def test_default_schedule_days(self) -> None:
|
||||||
|
config = SchedulerConfig()
|
||||||
|
assert config.schedule_days == ALL_DAYS
|
||||||
|
|
||||||
|
def test_default_auto_download(self) -> None:
|
||||||
|
config = SchedulerConfig()
|
||||||
|
assert config.auto_download_after_rescan is False
|
||||||
|
|
||||||
|
def test_default_enabled(self) -> None:
|
||||||
|
config = SchedulerConfig()
|
||||||
|
assert config.enabled is True
|
||||||
|
|
||||||
|
def test_default_interval_minutes(self) -> None:
|
||||||
|
config = SchedulerConfig()
|
||||||
|
assert config.interval_minutes == 60
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigValidScheduleTime:
|
||||||
|
"""3.2 – Valid schedule_time values."""
|
||||||
|
|
||||||
|
@pytest.mark.parametrize("time_val", ["00:00", "03:00", "12:30", "23:59"])
|
||||||
|
def test_valid_times(self, time_val: str) -> None:
|
||||||
|
config = SchedulerConfig(schedule_time=time_val)
|
||||||
|
assert config.schedule_time == time_val
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigInvalidScheduleTime:
|
||||||
|
"""3.3 – Invalid schedule_time values must raise ValidationError."""
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"time_val",
|
||||||
|
["25:00", "3pm", "", "3:00pm", "24:00", "-1:00", "9:00", "1:60"],
|
||||||
|
)
|
||||||
|
def test_invalid_times(self, time_val: str) -> None:
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
SchedulerConfig(schedule_time=time_val)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigValidScheduleDays:
|
||||||
|
"""3.4 – Valid schedule_days values."""
|
||||||
|
|
||||||
|
def test_single_day(self) -> None:
|
||||||
|
config = SchedulerConfig(schedule_days=["mon"])
|
||||||
|
assert config.schedule_days == ["mon"]
|
||||||
|
|
||||||
|
def test_multiple_days(self) -> None:
|
||||||
|
config = SchedulerConfig(schedule_days=["mon", "fri"])
|
||||||
|
assert config.schedule_days == ["mon", "fri"]
|
||||||
|
|
||||||
|
def test_all_days(self) -> None:
|
||||||
|
config = SchedulerConfig(schedule_days=ALL_DAYS)
|
||||||
|
assert config.schedule_days == ALL_DAYS
|
||||||
|
|
||||||
|
def test_empty_list(self) -> None:
|
||||||
|
config = SchedulerConfig(schedule_days=[])
|
||||||
|
assert config.schedule_days == []
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigInvalidScheduleDays:
|
||||||
|
"""3.5 – Invalid schedule_days values must raise ValidationError."""
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
"days",
|
||||||
|
[
|
||||||
|
["monday"],
|
||||||
|
["xyz"],
|
||||||
|
["Mon"], # Case-sensitive — must be lowercase
|
||||||
|
[""],
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_invalid_days(self, days: list) -> None:
|
||||||
|
with pytest.raises(ValidationError):
|
||||||
|
SchedulerConfig(schedule_days=days)
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigAutoDownload:
|
||||||
|
"""3.6 – auto_download_after_rescan field."""
|
||||||
|
|
||||||
|
def test_set_true(self) -> None:
|
||||||
|
config = SchedulerConfig(auto_download_after_rescan=True)
|
||||||
|
assert config.auto_download_after_rescan is True
|
||||||
|
|
||||||
|
def test_set_false(self) -> None:
|
||||||
|
config = SchedulerConfig(auto_download_after_rescan=False)
|
||||||
|
assert config.auto_download_after_rescan is False
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigBackwardCompat:
|
||||||
|
"""3.7 – Backward compatibility: old fields still work."""
|
||||||
|
|
||||||
|
def test_legacy_fields_use_defaults(self) -> None:
|
||||||
|
config = SchedulerConfig(enabled=True, interval_minutes=30)
|
||||||
|
assert config.schedule_time == "03:00"
|
||||||
|
assert config.schedule_days == ALL_DAYS
|
||||||
|
assert config.auto_download_after_rescan is False
|
||||||
|
assert config.enabled is True
|
||||||
|
assert config.interval_minutes == 30
|
||||||
|
|
||||||
|
|
||||||
|
class TestSchedulerConfigSerialisation:
|
||||||
|
"""3.8 – Serialisation roundtrip."""
|
||||||
|
|
||||||
|
def test_roundtrip(self) -> None:
|
||||||
|
original = SchedulerConfig(
|
||||||
|
enabled=True,
|
||||||
|
interval_minutes=120,
|
||||||
|
schedule_time="04:30",
|
||||||
|
schedule_days=["mon", "wed", "fri"],
|
||||||
|
auto_download_after_rescan=True,
|
||||||
|
)
|
||||||
|
dumped = original.model_dump()
|
||||||
|
restored = SchedulerConfig(**dumped)
|
||||||
|
assert restored == original
|
||||||
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user