fix: prevent folder scan during NFO processing on startup
- Modified SeriesManagerService to create SerieList with skip_load=True - Changed scan_and_process_nfo() to load series from database instead of filesystem - Fixed database transaction issue by creating separate session per task - Verified scans only run once during initial setup, not on normal startup
This commit is contained in:
@@ -58,7 +58,8 @@ class SeriesManagerService:
|
||||
image_size: Image size to download
|
||||
"""
|
||||
self.anime_directory = anime_directory
|
||||
self.serie_list = SerieList(anime_directory)
|
||||
# Skip automatic folder scanning - we load from database instead
|
||||
self.serie_list = SerieList(anime_directory, skip_load=True)
|
||||
|
||||
# NFO configuration
|
||||
self.auto_create_nfo = auto_create_nfo
|
||||
@@ -103,12 +104,11 @@ class SeriesManagerService:
|
||||
)
|
||||
|
||||
async def process_nfo_for_series(
|
||||
self,
|
||||
serie_folder: str,
|
||||
serie_name: str,
|
||||
self,
|
||||
serie_folder: str,
|
||||
serie_name: str,
|
||||
serie_key: str,
|
||||
year: Optional[int] = None,
|
||||
db=None
|
||||
year: Optional[int] = None
|
||||
):
|
||||
"""Process NFO file for a series (create or update).
|
||||
|
||||
@@ -117,7 +117,6 @@ class SeriesManagerService:
|
||||
serie_name: Series display name
|
||||
serie_key: Series unique identifier for database updates
|
||||
year: Release year (helps with TMDB matching)
|
||||
db: Optional database session for updating IDs
|
||||
"""
|
||||
if not self.nfo_service:
|
||||
return
|
||||
@@ -128,56 +127,60 @@ class SeriesManagerService:
|
||||
nfo_exists = await self.nfo_service.check_nfo_exists(serie_folder)
|
||||
|
||||
# If NFO exists, parse IDs and update database
|
||||
if nfo_exists and db:
|
||||
if nfo_exists:
|
||||
logger.debug(f"Parsing IDs from existing NFO for '{serie_name}'")
|
||||
ids = self.nfo_service.parse_nfo_ids(nfo_path)
|
||||
|
||||
if ids["tmdb_id"] or ids["tvdb_id"]:
|
||||
# Update database with extracted IDs
|
||||
# Create database session for this task
|
||||
from datetime import datetime, timezone
|
||||
|
||||
from sqlalchemy import select
|
||||
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.database.models import AnimeSeries
|
||||
|
||||
result = await db.execute(
|
||||
select(AnimeSeries).filter(AnimeSeries.key == serie_key)
|
||||
)
|
||||
series = result.scalars().first()
|
||||
|
||||
if series:
|
||||
now = datetime.now(timezone.utc)
|
||||
series.has_nfo = True
|
||||
|
||||
if series.nfo_created_at is None:
|
||||
series.nfo_created_at = now
|
||||
series.nfo_updated_at = now
|
||||
|
||||
if ids["tmdb_id"] is not None:
|
||||
series.tmdb_id = ids["tmdb_id"]
|
||||
logger.debug(
|
||||
f"Updated TMDB ID for '{serie_name}': "
|
||||
f"{ids['tmdb_id']}"
|
||||
async with get_db_session() as db:
|
||||
result = await db.execute(
|
||||
select(AnimeSeries).filter(
|
||||
AnimeSeries.key == serie_key
|
||||
)
|
||||
)
|
||||
series = result.scalars().first()
|
||||
|
||||
if ids["tvdb_id"] is not None:
|
||||
series.tvdb_id = ids["tvdb_id"]
|
||||
logger.debug(
|
||||
f"Updated TVDB ID for '{serie_name}': "
|
||||
f"{ids['tvdb_id']}"
|
||||
if series:
|
||||
now = datetime.now(timezone.utc)
|
||||
series.has_nfo = True
|
||||
|
||||
if series.nfo_created_at is None:
|
||||
series.nfo_created_at = now
|
||||
series.nfo_updated_at = now
|
||||
|
||||
if ids["tmdb_id"] is not None:
|
||||
series.tmdb_id = ids["tmdb_id"]
|
||||
logger.debug(
|
||||
f"Updated TMDB ID for '{serie_name}': "
|
||||
f"{ids['tmdb_id']}"
|
||||
)
|
||||
|
||||
if ids["tvdb_id"] is not None:
|
||||
series.tvdb_id = ids["tvdb_id"]
|
||||
logger.debug(
|
||||
f"Updated TVDB ID for '{serie_name}': "
|
||||
f"{ids['tvdb_id']}"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
logger.info(
|
||||
f"Updated database with IDs from NFO for "
|
||||
f"'{serie_name}' - TMDB: {ids['tmdb_id']}, "
|
||||
f"TVDB: {ids['tvdb_id']}"
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Series not found in database for NFO ID "
|
||||
f"update: {serie_key}"
|
||||
)
|
||||
|
||||
await db.commit()
|
||||
logger.info(
|
||||
f"Updated database with IDs from NFO for "
|
||||
f"'{serie_name}' - TMDB: {ids['tmdb_id']}, "
|
||||
f"TVDB: {ids['tvdb_id']}"
|
||||
)
|
||||
else:
|
||||
logger.warning(
|
||||
f"Series not found in database for NFO ID update: "
|
||||
f"{serie_key}"
|
||||
)
|
||||
|
||||
# Create or update NFO file if configured
|
||||
if not nfo_exists and self.auto_create_nfo:
|
||||
@@ -218,8 +221,9 @@ class SeriesManagerService:
|
||||
"""Scan all series and process NFO files based on configuration.
|
||||
|
||||
This method:
|
||||
1. Uses SerieList to scan series folders
|
||||
2. For each series with existing NFO, reads TMDB/TVDB IDs and updates database
|
||||
1. Loads series from database (avoiding filesystem scan)
|
||||
2. For each series with existing NFO, reads TMDB/TVDB IDs
|
||||
and updates database
|
||||
3. For each series without NFO (if auto_create=True), creates one
|
||||
4. For each series with NFO (if update_on_scan=True), updates it
|
||||
5. Runs operations concurrently for better performance
|
||||
@@ -228,51 +232,46 @@ class SeriesManagerService:
|
||||
logger.info("NFO service not enabled, skipping NFO processing")
|
||||
return
|
||||
|
||||
# Get all series from SerieList
|
||||
all_series = self.serie_list.get_all()
|
||||
# Import database dependencies
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.database.service import AnimeSeriesService
|
||||
|
||||
if not all_series:
|
||||
logger.info("No series found to process")
|
||||
# Load series from database (not from filesystem)
|
||||
async with get_db_session() as db:
|
||||
anime_series_list = await AnimeSeriesService.get_all(
|
||||
db, with_episodes=False
|
||||
)
|
||||
|
||||
if not anime_series_list:
|
||||
logger.info("No series found in database to process")
|
||||
return
|
||||
|
||||
logger.info(f"Processing NFO for {len(all_series)} series...")
|
||||
logger.info(f"Processing NFO for {len(anime_series_list)} series...")
|
||||
|
||||
# Import database session
|
||||
from src.server.database.connection import get_db_session
|
||||
|
||||
# Create database session for ID updates
|
||||
async with get_db_session() as db:
|
||||
# Create tasks for concurrent processing
|
||||
tasks = []
|
||||
for serie in all_series:
|
||||
# Extract year from first air date if available
|
||||
year = None
|
||||
if hasattr(serie, 'year') and serie.year:
|
||||
year = serie.year
|
||||
|
||||
task = self.process_nfo_for_series(
|
||||
serie_folder=serie.folder,
|
||||
serie_name=serie.name,
|
||||
serie_key=serie.key,
|
||||
year=year,
|
||||
db=db
|
||||
)
|
||||
tasks.append(task)
|
||||
# Create tasks for concurrent processing
|
||||
# Each task creates its own database session
|
||||
tasks = []
|
||||
for anime_series in anime_series_list:
|
||||
# Extract year if available
|
||||
year = getattr(anime_series, 'year', None)
|
||||
|
||||
# Process in batches to avoid overwhelming TMDB API
|
||||
batch_size = 5
|
||||
for i in range(0, len(tasks), batch_size):
|
||||
batch = tasks[i:i + batch_size]
|
||||
await asyncio.gather(*batch, return_exceptions=True)
|
||||
|
||||
# Small delay between batches to respect rate limits
|
||||
if i + batch_size < len(tasks):
|
||||
await asyncio.sleep(2)
|
||||
task = self.process_nfo_for_series(
|
||||
serie_folder=anime_series.folder,
|
||||
serie_name=anime_series.name,
|
||||
serie_key=anime_series.key,
|
||||
year=year
|
||||
)
|
||||
tasks.append(task)
|
||||
|
||||
logger.info("NFO processing complete")
|
||||
|
||||
def get_serie_list(self) -> SerieList:
|
||||
"""Get the underlying SerieList instance.
|
||||
# Process in batches to avoid overwhelming TMDB API
|
||||
batch_size = 5
|
||||
for i in range(0, len(tasks), batch_size):
|
||||
batch = tasks[i:i + batch_size]
|
||||
await asyncio.gather(*batch, return_exceptions=True)
|
||||
|
||||
# Small delay between batches to respect rate limits
|
||||
if i + batch_size < len(tasks):
|
||||
await asyncio.sleep(2)
|
||||
|
||||
Returns:
|
||||
SerieList instance
|
||||
|
||||
@@ -150,7 +150,7 @@ class TestMediaScanStartup:
|
||||
async def test_check_incomplete_series_integration(self):
|
||||
"""Test the _check_incomplete_series_on_startup function behavior."""
|
||||
from src.server.database.models import AnimeSeries
|
||||
|
||||
|
||||
# Mock database session
|
||||
mock_db = AsyncMock()
|
||||
|
||||
@@ -185,7 +185,7 @@ class TestMediaScanStartup:
|
||||
|
||||
# Import and call the function
|
||||
from src.server.fastapi_app import _check_incomplete_series_on_startup
|
||||
|
||||
|
||||
# Mock get_db_session (it's imported inside the function)
|
||||
with patch('src.server.database.connection.get_db_session') as mock_get_db:
|
||||
mock_get_db.return_value.__aenter__.return_value = mock_db
|
||||
@@ -277,7 +277,7 @@ class TestMediaScanStartup:
|
||||
mock_get_db.side_effect = Exception("Database connection error")
|
||||
|
||||
from src.server.fastapi_app import _check_incomplete_series_on_startup
|
||||
|
||||
|
||||
# Should not raise exception
|
||||
try:
|
||||
await _check_incomplete_series_on_startup(mock_background_loader)
|
||||
|
||||
Reference in New Issue
Block a user