fix: handle lifespan errors gracefully

- Add error tracking in lifespan context manager
- Only cleanup services that were successfully initialized
- Properly handle startup errors without breaking async context
- Fixes RuntimeError: generator didn't stop after athrow()
This commit is contained in:
2026-01-23 17:13:30 +01:00
parent 314f535446
commit 611798b786
4 changed files with 409 additions and 349 deletions

View File

@@ -118,59 +118,3 @@ For each task completed:
---
## TODO List:
### Completed Tasks:
1.**NFO/Artwork Loading Isolation** (Completed: 2026-01-23)
- Task: Ensure during anime add, NFO, logo, art, etc. is loaded only for the specific anime being added
- Status: VERIFIED - Implementation is correct
- Details: BackgroundLoaderService.\_load_nfo_and_images() correctly passes serie_folder to create_tvshow_nfo(), which only processes that specific series
2.**Setup Redirect Flow** (Completed: 2026-01-23)
- Task: Implement redirect flow: setup -> loading -> login when user completes setup
- Changes:
- Added /loading to exempt paths in setup_redirect middleware
- Setup page redirects to loading with initialization in background
- Loading page connects to WebSocket for real-time progress
- After completion, loading redirects to login
3.**Close Setup and Loading Pages** (Completed: 2026-01-23)
- Task: Make setup and loading pages unavailable after completion to prevent re-access
- Changes:
- Check if setup is complete before allowing access to /setup
- Redirect to login if accessing /setup after completion
- Check if initialization is complete before allowing access to /loading
- Redirect to login if accessing /loading after initialization complete
4.**Fix Loading Page WebSocket Auth** (Completed: 2026-01-23)
- Task: Fix 403 Forbidden error on WebSocket connection
- Issue: Loading page was connecting to /ws/progress (doesn't exist)
- Changes:
- Changed WebSocket URL from /ws/progress to /ws/connect (correct endpoint)
- Added /ws/connect to exempt paths in auth middleware
- Subscribe to 'system' room after connection for progress updates
- Fixed message data handling to match WebSocket format
5.**Fix WebSocket Room Subscription Format** (Completed: 2026-01-23)
- Task: Fix "Invalid room name: ." error
- Issue: WebSocket message format was incorrect - sending {action: 'join', room: 'system'} instead of {action: 'join', data: {room: 'system'}}
- Changes: Updated loading.html to send correct message format with nested data object
6.**Fix Async Generator Exception Handling** (Completed: 2026-01-23)
- Task: Fix "RuntimeError: generator didn't stop after athrow()" error during anime add
- Issue: get_optional_database_session() async generator wasn't properly handling exceptions
- Changes:
- Simplified exception handling in both get_database_session() and get_optional_database_session()
- Removed nested try-except inside yield that was causing generator issues
- Let async context manager handle exceptions naturally
7.**Initialization Restart Protection** (Completed: 2026-01-23)
- Task: Ensure initialization doesn't restart on server restart by using database flags
- Status: VERIFIED - Already implemented correctly
- Details:
- SystemSettings.initial_scan_completed flag already prevents re-running initialization
- initialization_service checks this flag before performing series sync
- All initialization steps (scan, NFO, media) have individual completion flags
- Server restart properly skips already-completed steps
### Active Tasks:
(No active tasks - awaiting new requirements)
```
```

View File

@@ -182,9 +182,11 @@ class SeriesManagerService:
f"update: {serie_key}"
)
# Create or update NFO file if configured
# Create NFO file only if it doesn't exist and auto_create enabled
if not nfo_exists and self.auto_create_nfo:
logger.info(f"Creating NFO for '{serie_name}' ({serie_folder})")
logger.info(
f"Creating NFO for '{serie_name}' ({serie_folder})"
)
await self.nfo_service.create_tvshow_nfo(
serie_name=serie_name,
serie_folder=serie_folder,
@@ -194,20 +196,10 @@ class SeriesManagerService:
download_fanart=self.download_fanart
)
logger.info(f"Successfully created NFO for '{serie_name}'")
elif nfo_exists and self.update_on_scan:
logger.info(f"Updating NFO for '{serie_name}' ({serie_folder})")
try:
await self.nfo_service.update_tvshow_nfo(
serie_folder=serie_folder,
download_media=True
)
logger.info(f"Successfully updated NFO for '{serie_name}'")
except NotImplementedError:
logger.warning(
f"NFO update not yet implemented for '{serie_name}'. "
"Delete tvshow.nfo to recreate."
)
elif nfo_exists:
logger.debug(
f"NFO exists for '{serie_name}', skipping download"
)
except TMDBAPIError as e:
logger.error(f"TMDB API error processing '{serie_name}': {e}")

View File

@@ -113,7 +113,15 @@ async def lifespan(_application: FastAPI):
# Setup logging first with INFO level
logger = setup_logging(log_level="INFO")
# Track successful initialization steps
initialized = {
'database': False,
'services': False,
'background_loader': False
}
# Startup
startup_error = None
try:
logger.info("Starting FastAPI application...")
@@ -121,9 +129,11 @@ async def lifespan(_application: FastAPI):
try:
from src.server.database.connection import init_db
await init_db()
initialized['database'] = True
logger.info("Database initialized successfully")
except Exception as e:
logger.error("Failed to initialize database: %s", e, exc_info=True)
startup_error = e
raise # Database is required, fail startup if it fails
# Load configuration from config.json and sync with settings
@@ -216,6 +226,7 @@ async def lifespan(_application: FastAPI):
from src.server.utils.dependencies import get_download_service
download_service = get_download_service()
await download_service.initialize()
initialized['services'] = True
logger.info("Download service initialized and queue restored")
# Initialize background loader service
@@ -231,6 +242,7 @@ async def lifespan(_application: FastAPI):
series_app=series_app_instance
)
await background_loader.start()
initialized['background_loader'] = True
logger.info("Background loader service started")
# Run media scan only on first run
@@ -251,14 +263,34 @@ async def lifespan(_application: FastAPI):
)
except Exception as e:
logger.error("Error during startup: %s", e, exc_info=True)
raise # Re-raise to prevent app from starting in broken state
startup_error = e
# Don't re-raise here, let the finally/cleanup handle shutdown
# Yield control to the application
yield
# Yield control to the application (or immediately go to cleanup on error)
if startup_error is None:
try:
yield
except Exception as e:
logger.error("Error during application runtime: %s", e, exc_info=True)
else:
# Startup failed, but we still need to yield to satisfy the protocol
# The app won't actually run since we'll raise after cleanup
try:
yield
finally:
# After cleanup, re-raise the startup error
pass
# Shutdown - execute in proper order with timeout protection
logger.info("FastAPI application shutting down (graceful shutdown initiated)")
# Only cleanup what was successfully initialized
if not initialized['database']:
logger.info("Database was not initialized, skipping all cleanup")
if startup_error:
raise startup_error
return
# Define shutdown timeout (total time allowed for all shutdown operations)
SHUTDOWN_TIMEOUT = 30.0
@@ -270,20 +302,21 @@ async def lifespan(_application: FastAPI):
elapsed = time.monotonic() - shutdown_start
return max(0.0, SHUTDOWN_TIMEOUT - elapsed)
# 1. Stop background loader service
try:
from src.server.utils.dependencies import _background_loader_service
if _background_loader_service is not None:
logger.info("Stopping background loader service...")
await asyncio.wait_for(
_background_loader_service.stop(),
timeout=min(10.0, remaining_time())
)
logger.info("Background loader service stopped")
except asyncio.TimeoutError:
logger.warning("Background loader service shutdown timed out")
except Exception as e: # pylint: disable=broad-exception-caught
logger.error("Error stopping background loader service: %s", e, exc_info=True)
# 1. Stop background loader service (only if initialized)
if initialized['background_loader']:
try:
from src.server.utils.dependencies import _background_loader_service
if _background_loader_service is not None:
logger.info("Stopping background loader service...")
await asyncio.wait_for(
_background_loader_service.stop(),
timeout=min(10.0, remaining_time())
)
logger.info("Background loader service stopped")
except asyncio.TimeoutError:
logger.warning("Background loader service shutdown timed out")
except Exception as e: # pylint: disable=broad-exception-caught
logger.error("Error stopping background loader service: %s", e, exc_info=True)
# 2. Broadcast shutdown notification via WebSocket
try:
@@ -353,6 +386,10 @@ async def lifespan(_application: FastAPI):
"FastAPI application shutdown complete (took %.2fs)",
elapsed_total
)
# Re-raise startup error if it occurred
if startup_error:
raise startup_error
# Initialize FastAPI app with lifespan

View File

@@ -1,5 +1,5 @@
"""Centralized initialization service for application startup and setup."""
from typing import Optional
from typing import Callable
import structlog
@@ -9,6 +9,174 @@ from src.server.services.anime_service import sync_series_from_data_files
logger = structlog.get_logger(__name__)
async def _check_scan_status(
check_method: Callable,
scan_type: str,
log_completed_msg: str = None,
log_not_completed_msg: str = None
) -> bool:
"""Generic function to check if a scan has been completed.
Args:
check_method: SystemSettingsService method to check scan status
scan_type: Type of scan (e.g., "initial", "NFO", "media")
log_completed_msg: Optional custom message when scan is completed
log_not_completed_msg: Optional custom message when scan not completed
Returns:
bool: True if scan was completed, False otherwise
"""
from src.server.database.connection import get_db_session
from src.server.database.system_settings_service import SystemSettingsService
try:
async with get_db_session() as db:
is_completed = await check_method(SystemSettingsService, db)
if is_completed and log_completed_msg:
logger.info(log_completed_msg)
elif not is_completed and log_not_completed_msg:
logger.info(log_not_completed_msg)
return is_completed
except Exception as e:
logger.warning(
"Failed to check %s scan status: %s, assuming not done",
scan_type,
e
)
return False
async def _mark_scan_completed(
mark_method: Callable,
scan_type: str
) -> None:
"""Generic function to mark a scan as completed.
Args:
mark_method: SystemSettingsService method to mark scan as completed
scan_type: Type of scan (e.g., "initial", "NFO", "media")
"""
from src.server.database.connection import get_db_session
from src.server.database.system_settings_service import SystemSettingsService
try:
async with get_db_session() as db:
await mark_method(SystemSettingsService, db)
logger.info("Marked %s scan as completed", scan_type)
except Exception as e:
logger.warning("Failed to mark %s scan as completed: %s", scan_type, e)
async def _check_initial_scan_status() -> bool:
"""Check if initial scan has been completed.
Returns:
bool: True if scan was completed, False otherwise
"""
is_completed = await _check_scan_status(
check_method=lambda svc, db: svc.is_initial_scan_completed(db),
scan_type="initial",
log_completed_msg=(
"Initial scan already completed, skipping data file sync"
),
log_not_completed_msg=(
"Initial scan not completed, performing first-time setup"
)
)
return is_completed
async def _mark_initial_scan_completed() -> None:
"""Mark the initial scan as completed in system settings."""
await _mark_scan_completed(
mark_method=lambda svc, db: svc.mark_initial_scan_completed(db),
scan_type="initial"
)
async def _sync_anime_folders(progress_service=None) -> int:
"""Scan anime folders and sync series to database.
Args:
progress_service: Optional ProgressService for progress updates
Returns:
int: Number of series synced
"""
logger.info("Performing initial anime folder scan...")
if progress_service:
await progress_service.update_progress(
progress_id="series_sync",
current=25,
message="Scanning anime folders...",
metadata={"step_id": "series_sync"}
)
sync_count = await sync_series_from_data_files(settings.anime_directory)
logger.info("Data file sync complete. Added %d series.", sync_count)
if progress_service:
await progress_service.update_progress(
progress_id="series_sync",
current=75,
message=f"Synced {sync_count} series from data files",
metadata={"step_id": "series_sync"}
)
return sync_count
async def _load_series_into_memory(progress_service=None) -> None:
"""Load series from database into SeriesApp's in-memory cache.
Args:
progress_service: Optional ProgressService for progress updates
"""
from src.server.utils.dependencies import get_anime_service
anime_service = get_anime_service()
await anime_service._load_series_from_db()
logger.info("Series loaded from database into memory")
if progress_service:
await progress_service.complete_progress(
progress_id="series_sync",
message="Series loaded into memory",
metadata={"step_id": "series_sync"}
)
async def _validate_anime_directory(progress_service=None) -> bool:
"""Validate that anime directory is configured.
Args:
progress_service: Optional ProgressService for progress updates
Returns:
bool: True if directory is configured, False otherwise
"""
logger.info(
"Checking anime_directory setting: '%s'",
settings.anime_directory
)
if not settings.anime_directory:
logger.info("Initialization skipped - anime directory not configured")
if progress_service:
await progress_service.complete_progress(
progress_id="series_sync",
message="No anime directory configured",
metadata={"step_id": "series_sync"}
)
return False
return True
async def perform_initial_setup(progress_service=None):
"""Perform initial setup including series sync and scan completion marking.
@@ -21,14 +189,11 @@ async def perform_initial_setup(progress_service=None):
5. Media scan is performed
Args:
progress_service: Optional ProgressService instance for emitting updates
progress_service: Optional ProgressService for emitting updates
Returns:
bool: True if initialization was performed, False if skipped
"""
from src.server.database.connection import get_db_session
from src.server.database.system_settings_service import SystemSettingsService
# Send initial progress update
if progress_service:
from src.server.services.progress_service import ProgressType
@@ -41,110 +206,31 @@ async def perform_initial_setup(progress_service=None):
metadata={"step_id": "series_sync"}
)
# Check if initial setup has been completed
try:
async with get_db_session() as db:
is_initial_scan_done = (
await SystemSettingsService.is_initial_scan_completed(db)
)
if is_initial_scan_done:
logger.info(
"Initial scan already completed, skipping data file sync"
)
if progress_service:
await progress_service.complete_progress(
progress_id="series_sync",
message="Already completed",
metadata={"step_id": "series_sync"}
)
return False
else:
logger.info(
"Initial scan not completed, "
"performing first-time setup"
)
except Exception as e:
logger.warning(
"Failed to check system settings: %s, assuming first run", e
)
is_initial_scan_done = False
# Sync series from data files to database (only on first run)
try:
logger.info(
"Checking anime_directory setting: '%s'",
settings.anime_directory
)
if not settings.anime_directory:
logger.info(
"Initialization skipped - anime directory not configured"
)
if progress_service:
await progress_service.complete_progress(
progress_id="series_sync",
message="No anime directory configured",
metadata={"step_id": "series_sync"}
)
return False
# Only sync from data files on first run
if not is_initial_scan_done:
logger.info("Performing initial anime folder scan...")
if progress_service:
await progress_service.update_progress(
progress_id="series_sync",
current=25,
message="Scanning anime folders...",
metadata={"step_id": "series_sync"}
)
sync_count = await sync_series_from_data_files(
settings.anime_directory
)
logger.info(
"Data file sync complete. Added %d series.", sync_count
)
if progress_service:
await progress_service.update_progress(
progress_id="series_sync",
current=75,
message=f"Synced {sync_count} series from data files",
metadata={"step_id": "series_sync"}
)
# Mark initial scan as completed
try:
async with get_db_session() as db:
await (
SystemSettingsService
.mark_initial_scan_completed(db)
)
logger.info("Marked initial scan as completed")
except Exception as e:
logger.warning(
"Failed to mark initial scan as completed: %s", e
)
else:
logger.info(
"Skipping initial scan - "
"already completed on previous run"
)
# Load series from database into SeriesApp's in-memory cache
from src.server.utils.dependencies import get_anime_service
anime_service = get_anime_service()
await anime_service._load_series_from_db()
logger.info("Series loaded from database into memory")
# Check if initial setup has already been completed
is_initial_scan_done = await _check_initial_scan_status()
if is_initial_scan_done:
if progress_service:
await progress_service.complete_progress(
progress_id="series_sync",
message="Series loaded into memory",
message="Already completed",
metadata={"step_id": "series_sync"}
)
return False
# Validate that anime directory is configured
if not await _validate_anime_directory(progress_service):
return False
# Perform the actual initialization
try:
# Sync series from anime folders to database
await _sync_anime_folders(progress_service)
# Mark the initial scan as completed
await _mark_initial_scan_completed()
# Load series into memory from database
await _load_series_into_memory(progress_service)
return True
@@ -153,15 +239,86 @@ async def perform_initial_setup(progress_service=None):
return False
async def _check_nfo_scan_status() -> bool:
"""Check if initial NFO scan has been completed.
Returns:
bool: True if NFO scan was completed, False otherwise
"""
return await _check_scan_status(
check_method=lambda svc, db: svc.is_initial_nfo_scan_completed(db),
scan_type="NFO"
)
async def _mark_nfo_scan_completed() -> None:
"""Mark the initial NFO scan as completed in system settings."""
await _mark_scan_completed(
mark_method=lambda svc, db: svc.mark_initial_nfo_scan_completed(db),
scan_type="NFO"
)
async def _is_nfo_scan_configured() -> bool:
"""Check if NFO scan features are properly configured.
Returns:
bool: True if TMDB API key and NFO features are configured
"""
return settings.tmdb_api_key and (
settings.nfo_auto_create or settings.nfo_update_on_scan
)
async def _execute_nfo_scan(progress_service=None) -> None:
"""Execute the actual NFO scan with TMDB data.
Args:
progress_service: Optional ProgressService for progress updates
Raises:
Exception: If NFO scan fails
"""
from src.core.services.series_manager_service import SeriesManagerService
logger.info("Performing initial NFO scan...")
if progress_service:
await progress_service.update_progress(
progress_id="nfo_scan",
current=25,
message="Scanning series for NFO files...",
metadata={"step_id": "nfo_scan"}
)
manager = SeriesManagerService.from_settings()
if progress_service:
await progress_service.update_progress(
progress_id="nfo_scan",
current=50,
message="Processing NFO files with TMDB data...",
metadata={"step_id": "nfo_scan"}
)
await manager.scan_and_process_nfo()
await manager.close()
logger.info("Initial NFO scan completed")
if progress_service:
await progress_service.complete_progress(
progress_id="nfo_scan",
message="NFO scan completed successfully",
metadata={"step_id": "nfo_scan"}
)
async def perform_nfo_scan_if_needed(progress_service=None):
"""Perform initial NFO scan if not yet completed and configured.
Args:
progress_service: Optional ProgressService instance for emitting updates
progress_service: Optional ProgressService for emitting updates
"""
from src.server.database.connection import get_db_session
from src.server.database.system_settings_service import SystemSettingsService
if progress_service:
from src.server.services.progress_service import ProgressType
await progress_service.start_progress(
@@ -173,104 +330,17 @@ async def perform_nfo_scan_if_needed(progress_service=None):
metadata={"step_id": "nfo_scan"}
)
# Check if initial NFO scan has been completed
try:
async with get_db_session() as db:
is_nfo_scan_done = (
await SystemSettingsService
.is_initial_nfo_scan_completed(db)
)
except Exception as e:
logger.warning(
"Failed to check NFO scan status: %s, assuming not done",
e
)
is_nfo_scan_done = False
# Check if NFO scan was already completed
is_nfo_scan_done = await _check_nfo_scan_status()
# Run NFO scan only on first run (if configured)
if settings.tmdb_api_key and (
settings.nfo_auto_create or settings.nfo_update_on_scan
):
if not is_nfo_scan_done:
logger.info("Performing initial NFO scan...")
if progress_service:
await progress_service.update_progress(
progress_id="nfo_scan",
current=25,
message="Scanning series for NFO files...",
metadata={"step_id": "nfo_scan"}
)
try:
from src.core.services.series_manager_service import (
SeriesManagerService,
)
manager = SeriesManagerService.from_settings()
if progress_service:
await progress_service.update_progress(
progress_id="nfo_scan",
current=50,
message="Processing NFO files with TMDB data...",
metadata={"step_id": "nfo_scan"}
)
await manager.scan_and_process_nfo()
await manager.close()
logger.info("Initial NFO scan completed")
if progress_service:
await progress_service.complete_progress(
progress_id="nfo_scan",
message="NFO scan completed successfully",
metadata={"step_id": "nfo_scan"}
)
# Mark NFO scan as completed
try:
async with get_db_session() as db:
await (
SystemSettingsService
.mark_initial_nfo_scan_completed(db)
)
logger.info("Marked NFO scan as completed")
except Exception as e:
logger.warning(
"Failed to mark NFO scan as completed: %s",
e
)
except Exception as e:
logger.error(
"Failed to complete NFO scan: %s",
e,
exc_info=True
)
if progress_service:
await progress_service.fail_progress(
progress_id="nfo_scan",
error_message=f"NFO scan failed: {str(e)}",
metadata={"step_id": "nfo_scan"}
)
else:
logger.info(
"Skipping NFO scan - already completed on previous run"
)
if progress_service:
await progress_service.complete_progress(
progress_id="nfo_scan",
message="Already completed",
metadata={"step_id": "nfo_scan"}
)
else:
if not settings.tmdb_api_key:
logger.info(
"NFO scan skipped - TMDB API key not configured"
)
message = "Skipped - TMDB API key not configured"
else:
message = "Skipped - NFO features disabled"
# Check if NFO features are configured
if not await _is_nfo_scan_configured():
message = (
"Skipped - TMDB API key not configured"
if not settings.tmdb_api_key
else "Skipped - NFO features disabled"
)
logger.info(f"NFO scan skipped: {message}")
if progress_service:
await progress_service.complete_progress(
@@ -278,11 +348,67 @@ async def perform_nfo_scan_if_needed(progress_service=None):
message=message,
metadata={"step_id": "nfo_scan"}
)
else:
logger.info(
"NFO scan skipped - auto_create and update_on_scan "
"both disabled"
return
# Skip if already completed
if is_nfo_scan_done:
logger.info("Skipping NFO scan - already completed on previous run")
if progress_service:
await progress_service.complete_progress(
progress_id="nfo_scan",
message="Already completed",
metadata={"step_id": "nfo_scan"}
)
return
# Execute the NFO scan
try:
await _execute_nfo_scan(progress_service)
await _mark_nfo_scan_completed()
except Exception as e:
logger.error("Failed to complete NFO scan: %s", e, exc_info=True)
if progress_service:
await progress_service.fail_progress(
progress_id="nfo_scan",
error_message=f"NFO scan failed: {str(e)}",
metadata={"step_id": "nfo_scan"}
)
async def _check_media_scan_status() -> bool:
"""Check if initial media scan has been completed.
Returns:
bool: True if media scan was completed, False otherwise
"""
return await _check_scan_status(
check_method=lambda svc, db: svc.is_initial_media_scan_completed(db),
scan_type="media"
)
async def _mark_media_scan_completed() -> None:
"""Mark the initial media scan as completed in system settings."""
await _mark_scan_completed(
mark_method=lambda svc, db: svc.mark_initial_media_scan_completed(db),
scan_type="media"
)
async def _execute_media_scan(background_loader) -> None:
"""Execute the actual media scan and queue background loading.
Args:
background_loader: The background loader service instance
Raises:
Exception: If media scan fails
"""
from src.server.fastapi_app import _check_incomplete_series_on_startup
logger.info("Performing initial media scan...")
await _check_incomplete_series_on_startup(background_loader)
logger.info("Initial media scan completed")
async def perform_media_scan_if_needed(background_loader):
@@ -291,55 +417,16 @@ async def perform_media_scan_if_needed(background_loader):
Args:
background_loader: The background loader service instance
"""
from src.server.database.connection import get_db_session
from src.server.database.system_settings_service import SystemSettingsService
# Check if initial media scan has been completed
is_media_scan_done = False
try:
async with get_db_session() as db:
is_media_scan_done = (
await SystemSettingsService
.is_initial_media_scan_completed(db)
)
except Exception as e:
logger.warning(
"Failed to check media scan status: %s, assuming not done",
e
)
is_media_scan_done = False
# Check if media scan was already completed
is_media_scan_done = await _check_media_scan_status()
# Run media scan only on first run
if not is_media_scan_done:
logger.info("Performing initial media scan...")
try:
# Import the helper function from fastapi_app
from src.server.fastapi_app import _check_incomplete_series_on_startup
# Check for incomplete series and queue background loading
await _check_incomplete_series_on_startup(background_loader)
logger.info("Initial media scan completed")
# Mark media scan as completed
try:
async with get_db_session() as db:
await (
SystemSettingsService
.mark_initial_media_scan_completed(db)
)
logger.info("Marked media scan as completed")
except Exception as e:
logger.warning(
"Failed to mark media scan as completed: %s",
e
)
except Exception as e:
logger.error(
"Failed to complete media scan: %s",
e,
exc_info=True
)
else:
logger.info(
"Skipping media scan - already completed on previous run"
)
if is_media_scan_done:
logger.info("Skipping media scan - already completed on previous run")
return
# Execute the media scan
try:
await _execute_media_scan(background_loader)
await _mark_media_scan_completed()
except Exception as e:
logger.error("Failed to complete media scan: %s", e, exc_info=True)