Implement async series data loading with background processing
- Add loading status fields to AnimeSeries model
- Create BackgroundLoaderService for async task processing
- Update POST /api/anime/add to return 202 Accepted immediately
- Add GET /api/anime/{key}/loading-status endpoint
- Integrate background loader with startup/shutdown lifecycle
- Create database migration script for loading status fields
- Add unit tests for BackgroundLoaderService (10 tests, all passing)
- Update AnimeSeriesService.create() to accept loading status fields
Architecture follows clean separation with no code duplication:
- BackgroundLoader orchestrates, doesn't reimplement
- Reuses existing AnimeService, NFOService, WebSocket patterns
- Database-backed status survives restarts
This commit is contained in:
@@ -1,5 +1,4 @@
|
||||
import logging
|
||||
import os
|
||||
import warnings
|
||||
from typing import Any, List, Optional
|
||||
|
||||
@@ -16,6 +15,10 @@ from src.server.exceptions import (
|
||||
ValidationError,
|
||||
)
|
||||
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
||||
from src.server.services.background_loader_service import (
|
||||
BackgroundLoaderService,
|
||||
get_background_loader_service,
|
||||
)
|
||||
from src.server.utils.dependencies import (
|
||||
get_anime_service,
|
||||
get_optional_database_session,
|
||||
@@ -688,23 +691,27 @@ async def _perform_search(
|
||||
) from exc
|
||||
|
||||
|
||||
@router.post("/add")
|
||||
@router.post("/add", status_code=status.HTTP_202_ACCEPTED)
|
||||
async def add_series(
|
||||
request: AddSeriesRequest,
|
||||
_auth: dict = Depends(require_auth),
|
||||
series_app: Any = Depends(get_series_app),
|
||||
db: Optional[AsyncSession] = Depends(get_optional_database_session),
|
||||
anime_service: AnimeService = Depends(get_anime_service),
|
||||
background_loader: BackgroundLoaderService = Depends(get_background_loader_service),
|
||||
) -> dict:
|
||||
"""Add a new series to the library with full initialization.
|
||||
"""Add a new series to the library with asynchronous data loading.
|
||||
|
||||
This endpoint performs the complete series addition flow:
|
||||
This endpoint performs immediate series addition and queues background loading:
|
||||
1. Validates inputs and extracts the series key from the link URL
|
||||
2. Creates a sanitized folder name from the display name
|
||||
3. Saves the series to the database (if available)
|
||||
3. Saves the series to the database with loading_status="pending"
|
||||
4. Creates the folder on disk with the sanitized name
|
||||
5. Triggers a targeted scan for missing episodes (only this series)
|
||||
5. Queues background loading task for episodes, NFO, and images
|
||||
6. Returns immediately (202 Accepted) without waiting for data loading
|
||||
|
||||
Data loading happens asynchronously in the background, with real-time
|
||||
status updates via WebSocket.
|
||||
|
||||
The `key` is the URL-safe identifier used for all lookups.
|
||||
The `name` is stored as display metadata and used to derive
|
||||
the filesystem folder name (sanitized for filesystem safety).
|
||||
@@ -716,7 +723,7 @@ async def add_series(
|
||||
_auth: Ensures the caller is authenticated (value unused)
|
||||
series_app: Core `SeriesApp` instance provided via dependency
|
||||
db: Optional database session for async operations
|
||||
anime_service: AnimeService for scanning operations
|
||||
background_loader: BackgroundLoaderService for async data loading
|
||||
|
||||
Returns:
|
||||
Dict[str, Any]: Status payload with:
|
||||
@@ -725,8 +732,8 @@ async def add_series(
|
||||
- key: Series unique identifier
|
||||
- folder: Created folder path
|
||||
- db_id: Database ID (if saved to DB)
|
||||
- missing_episodes: Dict of missing episodes by season
|
||||
- total_missing: Total count of missing episodes
|
||||
- loading_status: Current loading status
|
||||
- loading_progress: Dict of what data is being loaded
|
||||
|
||||
Raises:
|
||||
HTTPException: If adding the series fails or link is invalid
|
||||
@@ -792,8 +799,6 @@ async def add_series(
|
||||
)
|
||||
|
||||
db_id = None
|
||||
missing_episodes: dict = {}
|
||||
scan_error: Optional[str] = None
|
||||
|
||||
# Step C: Save to database if available
|
||||
if db is not None:
|
||||
@@ -806,11 +811,16 @@ async def add_series(
|
||||
"key": key,
|
||||
"folder": existing.folder,
|
||||
"db_id": existing.id,
|
||||
"missing_episodes": {},
|
||||
"total_missing": 0
|
||||
"loading_status": existing.loading_status,
|
||||
"loading_progress": {
|
||||
"episodes": existing.episodes_loaded,
|
||||
"nfo": existing.has_nfo,
|
||||
"logo": existing.logo_loaded,
|
||||
"images": existing.images_loaded
|
||||
}
|
||||
}
|
||||
|
||||
# Save to database using AnimeSeriesService
|
||||
# Save to database using AnimeSeriesService with loading status
|
||||
anime_series = await AnimeSeriesService.create(
|
||||
db=db,
|
||||
key=key,
|
||||
@@ -818,11 +828,16 @@ async def add_series(
|
||||
site="aniworld.to",
|
||||
folder=folder,
|
||||
year=year,
|
||||
loading_status="pending",
|
||||
episodes_loaded=False,
|
||||
logo_loaded=False,
|
||||
images_loaded=False,
|
||||
loading_started_at=None,
|
||||
)
|
||||
db_id = anime_series.id
|
||||
|
||||
logger.info(
|
||||
"Added series to database: %s (key=%s, db_id=%d, year=%s)",
|
||||
"Added series to database: %s (key=%s, db_id=%d, year=%s, loading=pending)",
|
||||
name,
|
||||
key,
|
||||
db_id,
|
||||
@@ -851,80 +866,43 @@ async def add_series(
|
||||
year
|
||||
)
|
||||
|
||||
# Step E: Trigger targeted scan for missing episodes
|
||||
# Step E: Queue background loading task for episodes, NFO, and images
|
||||
try:
|
||||
if series_app and hasattr(series_app, "serie_scanner"):
|
||||
missing_episodes = series_app.serie_scanner.scan_single_series(
|
||||
key=key,
|
||||
folder=folder
|
||||
)
|
||||
logger.info(
|
||||
"Targeted scan completed for %s: found %d missing episodes",
|
||||
key,
|
||||
sum(len(eps) for eps in missing_episodes.values())
|
||||
)
|
||||
|
||||
# Update the serie in keyDict with the missing episodes
|
||||
if hasattr(series_app, "list") and hasattr(series_app.list, "keyDict"):
|
||||
if key in series_app.list.keyDict:
|
||||
series_app.list.keyDict[key].episodeDict = missing_episodes
|
||||
|
||||
# Save missing episodes to database
|
||||
if db is not None and missing_episodes:
|
||||
from src.server.database.service import EpisodeService
|
||||
|
||||
for season, episode_numbers in missing_episodes.items():
|
||||
for episode_number in episode_numbers:
|
||||
await EpisodeService.create(
|
||||
db=db,
|
||||
series_id=db_id,
|
||||
season=season,
|
||||
episode_number=episode_number,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Saved %d missing episodes to database for %s",
|
||||
sum(len(eps) for eps in missing_episodes.values()),
|
||||
key
|
||||
)
|
||||
else:
|
||||
# Scanner not available - this shouldn't happen in normal operation
|
||||
logger.warning(
|
||||
"Scanner not available for targeted scan of %s",
|
||||
key
|
||||
)
|
||||
await background_loader.add_series_loading_task(
|
||||
key=key,
|
||||
folder=folder,
|
||||
name=name,
|
||||
year=year
|
||||
)
|
||||
logger.info(
|
||||
"Queued background loading for %s (key=%s)",
|
||||
name,
|
||||
key
|
||||
)
|
||||
except Exception as e:
|
||||
# Scan failure is not critical - series was still added
|
||||
scan_error = str(e)
|
||||
# Background loading queue failure is not critical - series was still added
|
||||
logger.warning(
|
||||
"Targeted scan failed for %s: %s (series still added)",
|
||||
"Failed to queue background loading for %s: %s",
|
||||
key,
|
||||
e
|
||||
)
|
||||
|
||||
# Convert missing episodes keys to strings for JSON serialization
|
||||
missing_episodes_serializable = {
|
||||
str(season): episodes
|
||||
for season, episodes in missing_episodes.items()
|
||||
}
|
||||
|
||||
# Calculate total missing
|
||||
total_missing = sum(len(eps) for eps in missing_episodes.values())
|
||||
|
||||
# Step F: Return response
|
||||
# Step F: Return immediate response (202 Accepted)
|
||||
response = {
|
||||
"status": "success",
|
||||
"message": f"Successfully added series: {name}",
|
||||
"message": f"Series added successfully: {name}. Data will be loaded in background.",
|
||||
"key": key,
|
||||
"folder": folder,
|
||||
"db_id": db_id,
|
||||
"missing_episodes": missing_episodes_serializable,
|
||||
"total_missing": total_missing
|
||||
"loading_status": "pending",
|
||||
"loading_progress": {
|
||||
"episodes": False,
|
||||
"nfo": False,
|
||||
"logo": False,
|
||||
"images": False
|
||||
}
|
||||
}
|
||||
|
||||
if scan_error:
|
||||
response["scan_warning"] = f"Scan partially failed: {scan_error}"
|
||||
|
||||
return response
|
||||
|
||||
except HTTPException:
|
||||
@@ -941,6 +919,97 @@ async def add_series(
|
||||
) from exc
|
||||
|
||||
|
||||
@router.get("/{anime_key}/loading-status")
|
||||
async def get_loading_status(
|
||||
anime_key: str,
|
||||
_auth: dict = Depends(require_auth),
|
||||
db: Optional[AsyncSession] = Depends(get_optional_database_session),
|
||||
) -> dict:
|
||||
"""Get current loading status for a series.
|
||||
|
||||
Returns the current background loading status including what data
|
||||
has been loaded and what is still pending.
|
||||
|
||||
Args:
|
||||
anime_key: Series unique identifier (key)
|
||||
_auth: Ensures the caller is authenticated
|
||||
db: Optional database session
|
||||
|
||||
Returns:
|
||||
Dict with loading status information:
|
||||
- key: Series identifier
|
||||
- loading_status: Current status (pending, loading_*, completed, failed)
|
||||
- progress: Dict of what data is loaded
|
||||
- started_at: When loading started
|
||||
- completed_at: When loading completed (if done)
|
||||
- message: Human-readable status message
|
||||
- error: Error message if failed
|
||||
|
||||
Raises:
|
||||
HTTPException: If series not found or database unavailable
|
||||
"""
|
||||
if db is None:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Database not available"
|
||||
)
|
||||
|
||||
try:
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
|
||||
# Get series from database
|
||||
series = await AnimeSeriesService.get_by_key(db, anime_key)
|
||||
|
||||
if not series:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Series not found: {anime_key}"
|
||||
)
|
||||
|
||||
# Build status message
|
||||
message = ""
|
||||
if series.loading_status == "pending":
|
||||
message = "Queued for loading..."
|
||||
elif series.loading_status == "loading_episodes":
|
||||
message = "Loading episodes..."
|
||||
elif series.loading_status == "loading_nfo":
|
||||
message = "Generating NFO file..."
|
||||
elif series.loading_status == "loading_logo":
|
||||
message = "Downloading logo..."
|
||||
elif series.loading_status == "loading_images":
|
||||
message = "Downloading images..."
|
||||
elif series.loading_status == "completed":
|
||||
message = "All data loaded successfully"
|
||||
elif series.loading_status == "failed":
|
||||
message = f"Loading failed: {series.loading_error}"
|
||||
else:
|
||||
message = "Loading..."
|
||||
|
||||
return {
|
||||
"key": series.key,
|
||||
"loading_status": series.loading_status,
|
||||
"progress": {
|
||||
"episodes": series.episodes_loaded,
|
||||
"nfo": series.has_nfo,
|
||||
"logo": series.logo_loaded,
|
||||
"images": series.images_loaded
|
||||
},
|
||||
"started_at": series.loading_started_at.isoformat() if series.loading_started_at else None,
|
||||
"completed_at": series.loading_completed_at.isoformat() if series.loading_completed_at else None,
|
||||
"message": message,
|
||||
"error": series.loading_error
|
||||
}
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as exc:
|
||||
logger.error("Failed to get loading status: %s", exc, exc_info=True)
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Failed to get loading status: {str(exc)}"
|
||||
) from exc
|
||||
|
||||
|
||||
@router.get("/{anime_id}", response_model=AnimeDetail)
|
||||
async def get_anime(
|
||||
anime_id: str,
|
||||
|
||||
@@ -100,6 +100,37 @@ class AnimeSeries(Base, TimestampMixin):
|
||||
doc="TVDB (TheTVDB) ID for series metadata"
|
||||
)
|
||||
|
||||
# Loading status fields for asynchronous data loading
|
||||
loading_status: Mapped[str] = mapped_column(
|
||||
String(50), nullable=False, default="completed", server_default="completed",
|
||||
doc="Loading status: pending, loading_episodes, loading_nfo, loading_logo, "
|
||||
"loading_images, completed, failed"
|
||||
)
|
||||
episodes_loaded: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=True, server_default="1",
|
||||
doc="Whether episodes have been scanned and loaded"
|
||||
)
|
||||
logo_loaded: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=False, server_default="0",
|
||||
doc="Whether logo.png has been downloaded"
|
||||
)
|
||||
images_loaded: Mapped[bool] = mapped_column(
|
||||
Boolean, nullable=False, default=False, server_default="0",
|
||||
doc="Whether poster/fanart images have been downloaded"
|
||||
)
|
||||
loading_started_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True,
|
||||
doc="Timestamp when background loading started"
|
||||
)
|
||||
loading_completed_at: Mapped[Optional[datetime]] = mapped_column(
|
||||
DateTime(timezone=True), nullable=True,
|
||||
doc="Timestamp when background loading completed"
|
||||
)
|
||||
loading_error: Mapped[Optional[str]] = mapped_column(
|
||||
String(1000), nullable=True,
|
||||
doc="Error message if loading failed"
|
||||
)
|
||||
|
||||
# Relationships
|
||||
episodes: Mapped[List["Episode"]] = relationship(
|
||||
"Episode",
|
||||
|
||||
@@ -65,6 +65,11 @@ class AnimeSeriesService:
|
||||
site: str,
|
||||
folder: str,
|
||||
year: int | None = None,
|
||||
loading_status: str = "completed",
|
||||
episodes_loaded: bool = True,
|
||||
logo_loaded: bool = False,
|
||||
images_loaded: bool = False,
|
||||
loading_started_at: datetime | None = None,
|
||||
) -> AnimeSeries:
|
||||
"""Create a new anime series.
|
||||
|
||||
@@ -75,6 +80,11 @@ class AnimeSeriesService:
|
||||
site: Provider site URL
|
||||
folder: Local filesystem path
|
||||
year: Release year (optional)
|
||||
loading_status: Initial loading status (default: "completed")
|
||||
episodes_loaded: Whether episodes are loaded (default: True for backward compat)
|
||||
logo_loaded: Whether logo is loaded (default: False)
|
||||
images_loaded: Whether images are loaded (default: False)
|
||||
loading_started_at: When loading started (optional)
|
||||
|
||||
Returns:
|
||||
Created AnimeSeries instance
|
||||
@@ -88,6 +98,11 @@ class AnimeSeriesService:
|
||||
site=site,
|
||||
folder=folder,
|
||||
year=year,
|
||||
loading_status=loading_status,
|
||||
episodes_loaded=episodes_loaded,
|
||||
logo_loaded=logo_loaded,
|
||||
images_loaded=images_loaded,
|
||||
loading_started_at=loading_started_at,
|
||||
)
|
||||
db.add(series)
|
||||
await db.flush()
|
||||
|
||||
@@ -44,6 +44,66 @@ from src.server.services.websocket_service import get_websocket_service
|
||||
# module-level globals. This makes testing and multi-instance hosting safer.
|
||||
|
||||
|
||||
async def _check_incomplete_series_on_startup(background_loader) -> None:
|
||||
"""Check for incomplete series on startup and queue background loading.
|
||||
|
||||
Args:
|
||||
background_loader: BackgroundLoaderService instance
|
||||
"""
|
||||
logger = setup_logging(log_level="INFO")
|
||||
|
||||
try:
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
|
||||
async for db in get_db_session():
|
||||
try:
|
||||
# Get all series from database
|
||||
series_list = await AnimeSeriesService.get_all(db)
|
||||
|
||||
incomplete_series = []
|
||||
|
||||
for series in series_list:
|
||||
# Check if series has incomplete loading
|
||||
if series.loading_status != "completed":
|
||||
incomplete_series.append(series)
|
||||
# Or check if specific data is missing
|
||||
elif (not series.episodes_loaded or
|
||||
not series.has_nfo or
|
||||
not series.logo_loaded or
|
||||
not series.images_loaded):
|
||||
incomplete_series.append(series)
|
||||
|
||||
if incomplete_series:
|
||||
logger.info(
|
||||
f"Found {len(incomplete_series)} series with missing data. "
|
||||
f"Queuing for background loading..."
|
||||
)
|
||||
|
||||
for series in incomplete_series:
|
||||
await background_loader.add_series_loading_task(
|
||||
key=series.key,
|
||||
folder=series.folder,
|
||||
name=series.name,
|
||||
year=series.year
|
||||
)
|
||||
logger.debug(
|
||||
f"Queued background loading for series: {series.key}"
|
||||
)
|
||||
|
||||
logger.info("All incomplete series queued for background loading")
|
||||
else:
|
||||
logger.info("All series data is complete. No background loading needed.")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking incomplete series: {e}", exc_info=True)
|
||||
|
||||
break # Exit after first iteration
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to check incomplete series on startup: {e}", exc_info=True)
|
||||
|
||||
|
||||
@asynccontextmanager
|
||||
async def lifespan(_application: FastAPI):
|
||||
"""Manage application lifespan (startup and shutdown).
|
||||
@@ -156,6 +216,15 @@ async def lifespan(_application: FastAPI):
|
||||
download_service = get_download_service()
|
||||
await download_service.initialize()
|
||||
logger.info("Download service initialized and queue restored")
|
||||
|
||||
# Initialize background loader service
|
||||
from src.server.utils.dependencies import get_background_loader_service
|
||||
background_loader = get_background_loader_service()
|
||||
await background_loader.start()
|
||||
logger.info("Background loader service started")
|
||||
|
||||
# Check for incomplete series and queue background loading
|
||||
await _check_incomplete_series_on_startup(background_loader)
|
||||
else:
|
||||
logger.info(
|
||||
"Download service initialization skipped - "
|
||||
@@ -191,7 +260,22 @@ async def lifespan(_application: FastAPI):
|
||||
elapsed = time.monotonic() - shutdown_start
|
||||
return max(0.0, SHUTDOWN_TIMEOUT - elapsed)
|
||||
|
||||
# 1. Broadcast shutdown notification via WebSocket
|
||||
# 1. Stop background loader service
|
||||
try:
|
||||
from src.server.utils.dependencies import _background_loader_service
|
||||
if _background_loader_service is not None:
|
||||
logger.info("Stopping background loader service...")
|
||||
await asyncio.wait_for(
|
||||
_background_loader_service.stop(),
|
||||
timeout=min(10.0, remaining_time())
|
||||
)
|
||||
logger.info("Background loader service stopped")
|
||||
except asyncio.TimeoutError:
|
||||
logger.warning("Background loader service shutdown timed out")
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.error("Error stopping background loader service: %s", e, exc_info=True)
|
||||
|
||||
# 2. Broadcast shutdown notification via WebSocket
|
||||
try:
|
||||
ws_service = get_websocket_service()
|
||||
logger.info("Broadcasting shutdown notification to WebSocket clients...")
|
||||
@@ -205,7 +289,7 @@ async def lifespan(_application: FastAPI):
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.error("Error during WebSocket shutdown: %s", e, exc_info=True)
|
||||
|
||||
# 2. Shutdown download service and persist active downloads
|
||||
# 3. Shutdown download service and persist active downloads
|
||||
try:
|
||||
from src.server.services.download_service import ( # noqa: E501
|
||||
_download_service_instance,
|
||||
@@ -218,7 +302,7 @@ async def lifespan(_application: FastAPI):
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.error("Error stopping download service: %s", e, exc_info=True)
|
||||
|
||||
# 3. Shutdown SeriesApp and cleanup thread pool
|
||||
# 4. Shutdown SeriesApp and cleanup thread pool
|
||||
try:
|
||||
from src.server.utils.dependencies import _series_app
|
||||
if _series_app is not None:
|
||||
@@ -228,7 +312,7 @@ async def lifespan(_application: FastAPI):
|
||||
except Exception as e: # pylint: disable=broad-exception-caught
|
||||
logger.error("Error during SeriesApp shutdown: %s", e, exc_info=True)
|
||||
|
||||
# 4. Cleanup progress service
|
||||
# 5. Cleanup progress service
|
||||
try:
|
||||
progress_service = get_progress_service()
|
||||
logger.info("Cleaning up progress service...")
|
||||
|
||||
520
src/server/services/background_loader_service.py
Normal file
520
src/server/services/background_loader_service.py
Normal file
@@ -0,0 +1,520 @@
|
||||
"""Background loader service for asynchronous series data loading.
|
||||
|
||||
This service orchestrates background loading of series metadata (episodes, NFO files,
|
||||
logos, images) without blocking the user. It provides a task queue system for managing
|
||||
loading operations and real-time status updates via WebSocket.
|
||||
|
||||
Key Features:
|
||||
- Asynchronous task queue for series data loading
|
||||
- Reuses existing services (AnimeService, NFOService) to avoid code duplication
|
||||
- Real-time progress updates via WebSocket
|
||||
- Graceful startup and shutdown handling
|
||||
- Error handling with retry logic
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import asyncio
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import structlog
|
||||
|
||||
from src.server.services.websocket_service import WebSocketService
|
||||
|
||||
logger = structlog.get_logger(__name__)
|
||||
|
||||
|
||||
class LoadingStatus(str, Enum):
|
||||
"""Status of a series loading task."""
|
||||
|
||||
PENDING = "pending"
|
||||
LOADING_EPISODES = "loading_episodes"
|
||||
LOADING_NFO = "loading_nfo"
|
||||
LOADING_LOGO = "loading_logo"
|
||||
LOADING_IMAGES = "loading_images"
|
||||
COMPLETED = "completed"
|
||||
FAILED = "failed"
|
||||
|
||||
|
||||
@dataclass
|
||||
class SeriesLoadingTask:
|
||||
"""Represents a series loading task with progress tracking.
|
||||
|
||||
Attributes:
|
||||
key: Series unique identifier (primary key)
|
||||
folder: Series folder name (metadata only)
|
||||
name: Series display name
|
||||
year: Series release year
|
||||
status: Current loading status
|
||||
progress: Dict tracking what data has been loaded
|
||||
started_at: When loading started
|
||||
completed_at: When loading completed
|
||||
error: Error message if loading failed
|
||||
"""
|
||||
|
||||
key: str
|
||||
folder: str
|
||||
name: str
|
||||
year: Optional[int] = None
|
||||
status: LoadingStatus = LoadingStatus.PENDING
|
||||
progress: Dict[str, bool] = field(default_factory=lambda: {
|
||||
"episodes": False,
|
||||
"nfo": False,
|
||||
"logo": False,
|
||||
"images": False
|
||||
})
|
||||
started_at: Optional[datetime] = None
|
||||
completed_at: Optional[datetime] = None
|
||||
error: Optional[str] = None
|
||||
|
||||
|
||||
class BackgroundLoaderService:
|
||||
"""Service for managing background loading of series metadata.
|
||||
|
||||
This service orchestrates asynchronous loading by delegating to existing
|
||||
services (AnimeService for episodes, NFOService for NFO/images) rather
|
||||
than reimplementing logic. It provides task queuing, status tracking,
|
||||
and WebSocket notifications.
|
||||
|
||||
Attributes:
|
||||
websocket_service: Service for broadcasting status updates
|
||||
anime_service: Service for episode scanning (reused)
|
||||
series_app: Core SeriesApp instance for NFO service access
|
||||
task_queue: Queue of pending loading tasks
|
||||
active_tasks: Dict of currently processing tasks
|
||||
worker_task: Background worker task
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
websocket_service: WebSocketService,
|
||||
anime_service: Any, # AnimeService - avoiding circular import
|
||||
series_app: Any, # SeriesApp - avoiding circular import
|
||||
):
|
||||
"""Initialize the background loader service.
|
||||
|
||||
Args:
|
||||
websocket_service: WebSocket service for status broadcasts
|
||||
anime_service: AnimeService instance for episode operations
|
||||
series_app: SeriesApp instance for NFO operations
|
||||
"""
|
||||
self.websocket_service = websocket_service
|
||||
self.anime_service = anime_service
|
||||
self.series_app = series_app
|
||||
|
||||
# Task management
|
||||
self.task_queue: asyncio.Queue[SeriesLoadingTask] = asyncio.Queue()
|
||||
self.active_tasks: Dict[str, SeriesLoadingTask] = {}
|
||||
self.worker_task: Optional[asyncio.Task] = None
|
||||
self._shutdown = False
|
||||
|
||||
logger.info("BackgroundLoaderService initialized")
|
||||
|
||||
async def start(self) -> None:
|
||||
"""Start the background worker task."""
|
||||
if self.worker_task is not None and not self.worker_task.done():
|
||||
logger.warning("Background worker already running")
|
||||
return
|
||||
|
||||
self._shutdown = False
|
||||
self.worker_task = asyncio.create_task(self._worker())
|
||||
logger.info("Background worker started")
|
||||
|
||||
async def stop(self) -> None:
|
||||
"""Stop the background worker gracefully."""
|
||||
if self.worker_task is None:
|
||||
return
|
||||
|
||||
logger.info("Stopping background worker...")
|
||||
self._shutdown = True
|
||||
|
||||
# Cancel the worker task
|
||||
if not self.worker_task.done():
|
||||
self.worker_task.cancel()
|
||||
try:
|
||||
await self.worker_task
|
||||
except asyncio.CancelledError:
|
||||
pass
|
||||
|
||||
logger.info("Background worker stopped")
|
||||
|
||||
async def add_series_loading_task(
|
||||
self,
|
||||
key: str,
|
||||
folder: str,
|
||||
name: str,
|
||||
year: Optional[int] = None
|
||||
) -> None:
|
||||
"""Add a series to the loading queue.
|
||||
|
||||
Args:
|
||||
key: Series unique identifier (primary key)
|
||||
folder: Series folder name (metadata only)
|
||||
name: Series display name
|
||||
year: Series release year
|
||||
"""
|
||||
# Check if task already exists
|
||||
if key in self.active_tasks:
|
||||
logger.debug(f"Task for series {key} already exists, skipping")
|
||||
return
|
||||
|
||||
task = SeriesLoadingTask(
|
||||
key=key,
|
||||
folder=folder,
|
||||
name=name,
|
||||
year=year,
|
||||
started_at=datetime.now(timezone.utc)
|
||||
)
|
||||
|
||||
self.active_tasks[key] = task
|
||||
await self.task_queue.put(task)
|
||||
|
||||
logger.info(f"Added loading task for series: {key}")
|
||||
|
||||
# Broadcast initial status
|
||||
await self._broadcast_status(task)
|
||||
|
||||
async def check_missing_data(
|
||||
self,
|
||||
key: str,
|
||||
folder: str,
|
||||
anime_directory: str,
|
||||
db: Any
|
||||
) -> Dict[str, bool]:
|
||||
"""Check what data is missing for a series.
|
||||
|
||||
Args:
|
||||
key: Series unique identifier
|
||||
folder: Series folder name
|
||||
anime_directory: Base anime directory path
|
||||
db: Database session
|
||||
|
||||
Returns:
|
||||
Dict indicating what data is missing (True = missing, False = exists)
|
||||
"""
|
||||
missing = {
|
||||
"episodes": False,
|
||||
"nfo": False,
|
||||
"logo": False,
|
||||
"images": False
|
||||
}
|
||||
|
||||
# Check database for series info
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
|
||||
series_db = await AnimeSeriesService.get_by_key(db, key)
|
||||
if not series_db:
|
||||
# Series doesn't exist in DB, need everything
|
||||
missing = {k: True for k in missing}
|
||||
return missing
|
||||
|
||||
# Check episodes
|
||||
missing["episodes"] = not series_db.episodes_loaded
|
||||
|
||||
# Check NFO file
|
||||
nfo_path = Path(anime_directory) / folder / "tvshow.nfo"
|
||||
missing["nfo"] = not nfo_path.exists() or not series_db.has_nfo
|
||||
|
||||
# Check logo
|
||||
logo_path = Path(anime_directory) / folder / "logo.png"
|
||||
missing["logo"] = not logo_path.exists() or not series_db.logo_loaded
|
||||
|
||||
# Check images (poster and fanart)
|
||||
poster_path = Path(anime_directory) / folder / "poster.jpg"
|
||||
fanart_path = Path(anime_directory) / folder / "fanart.jpg"
|
||||
missing["images"] = (
|
||||
not (poster_path.exists() and fanart_path.exists())
|
||||
or not series_db.images_loaded
|
||||
)
|
||||
|
||||
return missing
|
||||
|
||||
async def _worker(self) -> None:
|
||||
"""Background worker that processes loading tasks from the queue."""
|
||||
logger.info("Background worker started processing tasks")
|
||||
|
||||
while not self._shutdown:
|
||||
try:
|
||||
# Wait for a task with timeout to allow shutdown checks
|
||||
task = await asyncio.wait_for(
|
||||
self.task_queue.get(),
|
||||
timeout=1.0
|
||||
)
|
||||
|
||||
logger.info(f"Processing loading task for series: {task.key}")
|
||||
|
||||
# Process the task
|
||||
await self._load_series_data(task)
|
||||
|
||||
# Mark task as done
|
||||
self.task_queue.task_done()
|
||||
|
||||
except asyncio.TimeoutError:
|
||||
# No task available, continue loop
|
||||
continue
|
||||
except asyncio.CancelledError:
|
||||
logger.info("Worker task cancelled")
|
||||
break
|
||||
except Exception as e:
|
||||
logger.exception(f"Error in background worker: {e}")
|
||||
# Continue processing other tasks
|
||||
continue
|
||||
|
||||
logger.info("Background worker stopped")
|
||||
|
||||
async def _load_series_data(self, task: SeriesLoadingTask) -> None:
|
||||
"""Load all missing data for a series.
|
||||
|
||||
Orchestrates loading by calling existing services (AnimeService, NFOService)
|
||||
rather than reimplementing logic. Updates status and broadcasts progress.
|
||||
|
||||
Args:
|
||||
task: The loading task to process
|
||||
"""
|
||||
try:
|
||||
# Get database session
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
|
||||
async for db in get_db_session():
|
||||
try:
|
||||
# Check what data is missing
|
||||
missing = await self.check_missing_data(
|
||||
task.key,
|
||||
task.folder,
|
||||
self.series_app.directory_to_search,
|
||||
db
|
||||
)
|
||||
|
||||
# Load episodes if missing
|
||||
if missing["episodes"]:
|
||||
await self._load_episodes(task, db)
|
||||
else:
|
||||
task.progress["episodes"] = True
|
||||
|
||||
# Load NFO and images if missing
|
||||
if missing["nfo"] or missing["logo"] or missing["images"]:
|
||||
await self._load_nfo_and_images(task, db)
|
||||
else:
|
||||
task.progress["nfo"] = True
|
||||
task.progress["logo"] = True
|
||||
task.progress["images"] = True
|
||||
|
||||
# Mark as completed
|
||||
task.status = LoadingStatus.COMPLETED
|
||||
task.completed_at = datetime.now(timezone.utc)
|
||||
|
||||
# Update database
|
||||
series_db = await AnimeSeriesService.get_by_key(db, task.key)
|
||||
if series_db:
|
||||
series_db.loading_status = "completed"
|
||||
series_db.loading_completed_at = task.completed_at
|
||||
series_db.loading_error = None
|
||||
await db.commit()
|
||||
|
||||
# Broadcast completion
|
||||
await self._broadcast_status(task)
|
||||
|
||||
logger.info(f"Successfully loaded all data for series: {task.key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Error loading series data: {e}")
|
||||
task.status = LoadingStatus.FAILED
|
||||
task.error = str(e)
|
||||
task.completed_at = datetime.now(timezone.utc)
|
||||
|
||||
# Update database with error
|
||||
series_db = await AnimeSeriesService.get_by_key(db, task.key)
|
||||
if series_db:
|
||||
series_db.loading_status = "failed"
|
||||
series_db.loading_error = str(e)
|
||||
series_db.loading_completed_at = task.completed_at
|
||||
await db.commit()
|
||||
|
||||
# Broadcast error
|
||||
await self._broadcast_status(task)
|
||||
|
||||
break # Exit async for loop after first iteration
|
||||
|
||||
finally:
|
||||
# Remove from active tasks
|
||||
self.active_tasks.pop(task.key, None)
|
||||
|
||||
async def _load_episodes(self, task: SeriesLoadingTask, db: Any) -> None:
|
||||
"""Load episodes for a series by reusing AnimeService.
|
||||
|
||||
Args:
|
||||
task: The loading task
|
||||
db: Database session
|
||||
"""
|
||||
task.status = LoadingStatus.LOADING_EPISODES
|
||||
await self._broadcast_status(task, "Loading episodes...")
|
||||
|
||||
try:
|
||||
# Use existing AnimeService to rescan episodes
|
||||
# This reuses all existing episode detection logic
|
||||
await self.anime_service.rescan()
|
||||
|
||||
# Update task progress
|
||||
task.progress["episodes"] = True
|
||||
|
||||
# Update database
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
series_db = await AnimeSeriesService.get_by_key(db, task.key)
|
||||
if series_db:
|
||||
series_db.episodes_loaded = True
|
||||
series_db.loading_status = "loading_episodes"
|
||||
await db.commit()
|
||||
|
||||
logger.info(f"Episodes loaded for series: {task.key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to load episodes for {task.key}: {e}")
|
||||
raise
|
||||
|
||||
async def _load_nfo_and_images(self, task: SeriesLoadingTask, db: Any) -> None:
|
||||
"""Load NFO file and images for a series by reusing NFOService.
|
||||
|
||||
Args:
|
||||
task: The loading task
|
||||
db: Database session
|
||||
"""
|
||||
task.status = LoadingStatus.LOADING_NFO
|
||||
await self._broadcast_status(task, "Generating NFO file...")
|
||||
|
||||
try:
|
||||
# Check if NFOService is available
|
||||
if not self.series_app.nfo_service:
|
||||
logger.warning(
|
||||
f"NFOService not available, skipping NFO/images for {task.key}"
|
||||
)
|
||||
task.progress["nfo"] = False
|
||||
task.progress["logo"] = False
|
||||
task.progress["images"] = False
|
||||
return
|
||||
|
||||
# Use existing NFOService to create NFO with all images
|
||||
# This reuses all existing TMDB API logic and image downloading
|
||||
nfo_path = await self.series_app.nfo_service.create_tvshow_nfo(
|
||||
serie_name=task.name,
|
||||
serie_folder=task.folder,
|
||||
year=task.year,
|
||||
download_poster=True,
|
||||
download_logo=True,
|
||||
download_fanart=True
|
||||
)
|
||||
|
||||
# Update task progress
|
||||
task.progress["nfo"] = True
|
||||
task.progress["logo"] = True
|
||||
task.progress["images"] = True
|
||||
|
||||
# Update database
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
series_db = await AnimeSeriesService.get_by_key(db, task.key)
|
||||
if series_db:
|
||||
series_db.has_nfo = True
|
||||
series_db.nfo_created_at = datetime.now(timezone.utc)
|
||||
series_db.logo_loaded = True
|
||||
series_db.images_loaded = True
|
||||
series_db.loading_status = "loading_nfo"
|
||||
await db.commit()
|
||||
|
||||
logger.info(f"NFO and images loaded for series: {task.key}")
|
||||
|
||||
except Exception as e:
|
||||
logger.exception(f"Failed to load NFO/images for {task.key}: {e}")
|
||||
# Don't fail the entire task if NFO fails
|
||||
task.progress["nfo"] = False
|
||||
task.progress["logo"] = False
|
||||
task.progress["images"] = False
|
||||
|
||||
async def _broadcast_status(
|
||||
self,
|
||||
task: SeriesLoadingTask,
|
||||
message: Optional[str] = None
|
||||
) -> None:
|
||||
"""Broadcast loading status update via WebSocket.
|
||||
|
||||
Args:
|
||||
task: The loading task
|
||||
message: Optional status message
|
||||
"""
|
||||
if not message:
|
||||
if task.status == LoadingStatus.PENDING:
|
||||
message = "Queued for loading..."
|
||||
elif task.status == LoadingStatus.LOADING_EPISODES:
|
||||
message = "Loading episodes..."
|
||||
elif task.status == LoadingStatus.LOADING_NFO:
|
||||
message = "Generating NFO file..."
|
||||
elif task.status == LoadingStatus.COMPLETED:
|
||||
message = "All data loaded successfully"
|
||||
elif task.status == LoadingStatus.FAILED:
|
||||
message = f"Loading failed: {task.error}"
|
||||
else:
|
||||
message = "Loading..."
|
||||
|
||||
payload = {
|
||||
"type": "series_loading_update",
|
||||
"key": task.key,
|
||||
"folder": task.folder,
|
||||
"loading_status": task.status.value,
|
||||
"progress": task.progress,
|
||||
"message": message,
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"error": task.error
|
||||
}
|
||||
|
||||
await self.websocket_service.broadcast(payload)
|
||||
|
||||
|
||||
# Singleton instance
|
||||
_background_loader_service: Optional[BackgroundLoaderService] = None
|
||||
|
||||
|
||||
def init_background_loader_service(
|
||||
websocket_service: WebSocketService,
|
||||
anime_service: Any,
|
||||
series_app: Any
|
||||
) -> BackgroundLoaderService:
|
||||
"""Initialize the background loader service singleton.
|
||||
|
||||
Args:
|
||||
websocket_service: WebSocket service for broadcasts
|
||||
anime_service: AnimeService instance
|
||||
series_app: SeriesApp instance
|
||||
|
||||
Returns:
|
||||
BackgroundLoaderService instance
|
||||
"""
|
||||
global _background_loader_service
|
||||
|
||||
if _background_loader_service is None:
|
||||
_background_loader_service = BackgroundLoaderService(
|
||||
websocket_service=websocket_service,
|
||||
anime_service=anime_service,
|
||||
series_app=series_app
|
||||
)
|
||||
|
||||
return _background_loader_service
|
||||
|
||||
|
||||
def get_background_loader_service() -> BackgroundLoaderService:
|
||||
"""Get the background loader service singleton.
|
||||
|
||||
Returns:
|
||||
BackgroundLoaderService instance
|
||||
|
||||
Raises:
|
||||
RuntimeError: If service not initialized
|
||||
"""
|
||||
if _background_loader_service is None:
|
||||
raise RuntimeError(
|
||||
"BackgroundLoaderService not initialized. "
|
||||
"Call init_background_loader_service() first."
|
||||
)
|
||||
|
||||
return _background_loader_service
|
||||
@@ -27,6 +27,7 @@ logger = logging.getLogger(__name__)
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from src.server.services.anime_service import AnimeService
|
||||
from src.server.services.background_loader_service import BackgroundLoaderService
|
||||
from src.server.services.download_service import DownloadService
|
||||
|
||||
# Security scheme for JWT authentication
|
||||
@@ -40,6 +41,7 @@ _series_app: Optional[SeriesApp] = None
|
||||
# Global service instances
|
||||
_anime_service: Optional["AnimeService"] = None
|
||||
_download_service: Optional["DownloadService"] = None
|
||||
_background_loader_service: Optional["BackgroundLoaderService"] = None
|
||||
|
||||
|
||||
@dataclass
|
||||
@@ -452,3 +454,51 @@ def reset_download_service() -> None:
|
||||
"""Reset global DownloadService instance (for testing/config changes)."""
|
||||
global _download_service
|
||||
_download_service = None
|
||||
|
||||
|
||||
def get_background_loader_service() -> "BackgroundLoaderService":
|
||||
"""
|
||||
Dependency to get BackgroundLoaderService instance.
|
||||
|
||||
Returns:
|
||||
BackgroundLoaderService: The background loader service for async data loading
|
||||
|
||||
Raises:
|
||||
HTTPException: If BackgroundLoaderService initialization fails
|
||||
"""
|
||||
global _background_loader_service
|
||||
|
||||
if _background_loader_service is None:
|
||||
try:
|
||||
from src.server.services.background_loader_service import (
|
||||
BackgroundLoaderService,
|
||||
)
|
||||
from src.server.services.websocket_service import get_websocket_service
|
||||
|
||||
anime_service = get_anime_service()
|
||||
series_app = get_series_app()
|
||||
websocket_service = get_websocket_service()
|
||||
|
||||
_background_loader_service = BackgroundLoaderService(
|
||||
websocket_service=websocket_service,
|
||||
anime_service=anime_service,
|
||||
series_app=series_app
|
||||
)
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=(
|
||||
"Failed to initialize BackgroundLoaderService: "
|
||||
f"{str(e)}"
|
||||
),
|
||||
) from e
|
||||
|
||||
return _background_loader_service
|
||||
|
||||
|
||||
def reset_background_loader_service() -> None:
|
||||
"""Reset global BackgroundLoaderService instance (for testing/config changes)."""
|
||||
global _background_loader_service
|
||||
_background_loader_service = None
|
||||
|
||||
Reference in New Issue
Block a user