better db model

This commit is contained in:
2025-12-04 19:22:42 +01:00
parent 942f14f746
commit 798461a1ea
18 changed files with 551 additions and 2161 deletions

View File

@@ -1,479 +0,0 @@
"""Example integration of database service with existing services.
This file demonstrates how to integrate the database service layer with
existing application services like AnimeService and DownloadService.
These examples show patterns for:
- Persisting scan results to database
- Loading queue from database on startup
- Syncing download progress to database
- Maintaining consistency between in-memory state and database
"""
from __future__ import annotations
import logging
from typing import List, Optional
from sqlalchemy.ext.asyncio import AsyncSession
from src.core.entities.series import Serie
from src.server.database.models import DownloadPriority, DownloadStatus
from src.server.database.service import (
AnimeSeriesService,
DownloadQueueService,
EpisodeService,
)
logger = logging.getLogger(__name__)
# ============================================================================
# Example 1: Persist Scan Results
# ============================================================================
async def persist_scan_results(
db: AsyncSession,
series_list: List[Serie],
) -> None:
"""Persist scan results to database.
Updates or creates anime series and their episodes based on
scan results from SerieScanner.
Args:
db: Database session
series_list: List of Serie objects from scan
"""
logger.info(f"Persisting {len(series_list)} series to database")
for serie in series_list:
# Check if series exists
existing = await AnimeSeriesService.get_by_key(db, serie.key)
if existing:
# Update existing series
await AnimeSeriesService.update(
db,
existing.id,
name=serie.name,
site=serie.site,
folder=serie.folder,
episode_dict=serie.episode_dict,
)
series_id = existing.id
else:
# Create new series
new_series = await AnimeSeriesService.create(
db,
key=serie.key,
name=serie.name,
site=serie.site,
folder=serie.folder,
episode_dict=serie.episode_dict,
)
series_id = new_series.id
# Update episodes for this series
await _update_episodes(db, series_id, serie)
await db.commit()
logger.info("Scan results persisted successfully")
async def _update_episodes(
db: AsyncSession,
series_id: int,
serie: Serie,
) -> None:
"""Update episodes for a series.
Args:
db: Database session
series_id: Series ID in database
serie: Serie object with episode information
"""
# Get existing episodes
existing_episodes = await EpisodeService.get_by_series(db, series_id)
existing_map = {
(ep.season, ep.episode_number): ep
for ep in existing_episodes
}
# Iterate through episode_dict to create/update episodes
for season, episodes in serie.episode_dict.items():
for ep_num in episodes:
key = (int(season), int(ep_num))
if key in existing_map:
# Episode exists, check if downloaded
episode = existing_map[key]
# Update if needed (e.g., file path changed)
if not episode.is_downloaded:
# Check if file exists locally
# This would be done by checking serie.local_episodes
pass
else:
# Create new episode
await EpisodeService.create(
db,
series_id=series_id,
season=int(season),
episode_number=int(ep_num),
is_downloaded=False,
)
# ============================================================================
# Example 2: Load Queue from Database
# ============================================================================
async def load_queue_from_database(
db: AsyncSession,
) -> List[dict]:
"""Load download queue from database.
Retrieves pending and active download items from database and
converts them to format suitable for DownloadService.
Args:
db: Database session
Returns:
List of download items as dictionaries
"""
logger.info("Loading download queue from database")
# Get pending and active items
pending = await DownloadQueueService.get_pending(db)
active = await DownloadQueueService.get_active(db)
all_items = pending + active
# Convert to dictionary format for DownloadService
queue_items = []
for item in all_items:
queue_items.append({
"id": item.id,
"series_id": item.series_id,
"season": item.season,
"episode_number": item.episode_number,
"status": item.status.value,
"priority": item.priority.value,
"progress_percent": item.progress_percent,
"downloaded_bytes": item.downloaded_bytes,
"total_bytes": item.total_bytes,
"download_speed": item.download_speed,
"error_message": item.error_message,
"retry_count": item.retry_count,
})
logger.info(f"Loaded {len(queue_items)} items from database")
return queue_items
# ============================================================================
# Example 3: Sync Download Progress to Database
# ============================================================================
async def sync_download_progress(
db: AsyncSession,
item_id: int,
progress_percent: float,
downloaded_bytes: int,
total_bytes: Optional[int] = None,
download_speed: Optional[float] = None,
) -> None:
"""Sync download progress to database.
Updates download queue item progress in database. This would be called
from the download progress callback.
Args:
db: Database session
item_id: Download queue item ID
progress_percent: Progress percentage (0-100)
downloaded_bytes: Bytes downloaded
total_bytes: Optional total file size
download_speed: Optional current speed (bytes/sec)
"""
await DownloadQueueService.update_progress(
db,
item_id,
progress_percent,
downloaded_bytes,
total_bytes,
download_speed,
)
await db.commit()
async def mark_download_complete(
db: AsyncSession,
item_id: int,
file_path: str,
file_size: int,
) -> None:
"""Mark download as complete in database.
Updates download queue item status and marks episode as downloaded.
Args:
db: Database session
item_id: Download queue item ID
file_path: Path to downloaded file
file_size: File size in bytes
"""
# Get download item
item = await DownloadQueueService.get_by_id(db, item_id)
if not item:
logger.error(f"Download item {item_id} not found")
return
# Update download status
await DownloadQueueService.update_status(
db,
item_id,
DownloadStatus.COMPLETED,
)
# Find or create episode and mark as downloaded
episode = await EpisodeService.get_by_episode(
db,
item.series_id,
item.season,
item.episode_number,
)
if episode:
await EpisodeService.mark_downloaded(
db,
episode.id,
file_path,
file_size,
)
else:
# Create episode
episode = await EpisodeService.create(
db,
series_id=item.series_id,
season=item.season,
episode_number=item.episode_number,
file_path=file_path,
file_size=file_size,
is_downloaded=True,
)
await db.commit()
logger.info(
f"Marked download complete: S{item.season:02d}E{item.episode_number:02d}"
)
async def mark_download_failed(
db: AsyncSession,
item_id: int,
error_message: str,
) -> None:
"""Mark download as failed in database.
Args:
db: Database session
item_id: Download queue item ID
error_message: Error description
"""
await DownloadQueueService.update_status(
db,
item_id,
DownloadStatus.FAILED,
error_message=error_message,
)
await db.commit()
# ============================================================================
# Example 4: Add Episodes to Download Queue
# ============================================================================
async def add_episodes_to_queue(
db: AsyncSession,
series_key: str,
episodes: List[tuple[int, int]], # List of (season, episode) tuples
priority: DownloadPriority = DownloadPriority.NORMAL,
) -> int:
"""Add multiple episodes to download queue.
Args:
db: Database session
series_key: Series provider key
episodes: List of (season, episode_number) tuples
priority: Download priority
Returns:
Number of episodes added to queue
"""
# Get series
series = await AnimeSeriesService.get_by_key(db, series_key)
if not series:
logger.error(f"Series not found: {series_key}")
return 0
added_count = 0
for season, episode_number in episodes:
# Check if already in queue
existing_items = await DownloadQueueService.get_all(db)
already_queued = any(
item.series_id == series.id
and item.season == season
and item.episode_number == episode_number
and item.status in (DownloadStatus.PENDING, DownloadStatus.DOWNLOADING)
for item in existing_items
)
if not already_queued:
await DownloadQueueService.create(
db,
series_id=series.id,
season=season,
episode_number=episode_number,
priority=priority,
)
added_count += 1
await db.commit()
logger.info(f"Added {added_count} episodes to download queue")
return added_count
# ============================================================================
# Example 5: Integration with AnimeService
# ============================================================================
class EnhancedAnimeService:
"""Enhanced AnimeService with database persistence.
This is an example of how to wrap the existing AnimeService with
database persistence capabilities.
"""
def __init__(self, db_session_factory):
"""Initialize enhanced anime service.
Args:
db_session_factory: Async session factory for database access
"""
self.db_session_factory = db_session_factory
async def rescan_with_persistence(self, directory: str) -> dict:
"""Rescan directory and persist results.
Args:
directory: Directory to scan
Returns:
Scan results dictionary
"""
# Import here to avoid circular dependencies
from src.core.SeriesApp import SeriesApp
# Perform scan
app = SeriesApp(directory)
series_list = app.ReScan()
# Persist to database
async with self.db_session_factory() as db:
await persist_scan_results(db, series_list)
return {
"total_series": len(series_list),
"message": "Scan completed and persisted to database",
}
async def get_series_with_missing_episodes(self) -> List[dict]:
"""Get series with missing episodes from database.
Returns:
List of series with missing episodes
"""
async with self.db_session_factory() as db:
# Get all series
all_series = await AnimeSeriesService.get_all(
db,
with_episodes=True,
)
# Filter series with missing episodes
series_with_missing = []
for series in all_series:
if series.episode_dict:
total_episodes = sum(
len(eps) for eps in series.episode_dict.values()
)
downloaded_episodes = sum(
1 for ep in series.episodes if ep.is_downloaded
)
if downloaded_episodes < total_episodes:
series_with_missing.append({
"id": series.id,
"key": series.key,
"name": series.name,
"total_episodes": total_episodes,
"downloaded_episodes": downloaded_episodes,
"missing_episodes": total_episodes - downloaded_episodes,
})
return series_with_missing
# ============================================================================
# Usage Example
# ============================================================================
async def example_usage():
"""Example usage of database service integration."""
from src.server.database import get_db_session
# Get database session
async with get_db_session() as db:
# Example 1: Add episodes to queue
added = await add_episodes_to_queue(
db,
series_key="attack-on-titan",
episodes=[(1, 1), (1, 2), (1, 3)],
priority=DownloadPriority.HIGH,
)
print(f"Added {added} episodes to queue")
# Example 2: Load queue
queue_items = await load_queue_from_database(db)
print(f"Queue has {len(queue_items)} items")
# Example 3: Update progress
if queue_items:
await sync_download_progress(
db,
item_id=queue_items[0]["id"],
progress_percent=50.0,
downloaded_bytes=500000,
total_bytes=1000000,
)
# Example 4: Mark complete
if queue_items:
await mark_download_complete(
db,
item_id=queue_items[0]["id"],
file_path="/path/to/file.mp4",
file_size=1000000,
)
if __name__ == "__main__":
import asyncio
asyncio.run(example_usage())

View File

@@ -47,7 +47,7 @@ EXPECTED_INDEXES = {
"episodes": ["ix_episodes_series_id"],
"download_queue": [
"ix_download_queue_series_id",
"ix_download_queue_status",
"ix_download_queue_episode_id",
],
"user_sessions": [
"ix_user_sessions_session_id",

View File

@@ -15,18 +15,7 @@ from datetime import datetime, timezone
from enum import Enum
from typing import List, Optional
from sqlalchemy import (
JSON,
Boolean,
DateTime,
Float,
ForeignKey,
Integer,
String,
Text,
func,
)
from sqlalchemy import Enum as SQLEnum
from sqlalchemy import Boolean, DateTime, ForeignKey, Integer, String, Text, func
from sqlalchemy.orm import Mapped, mapped_column, relationship, validates
from src.server.database.base import Base, TimestampMixin
@@ -51,10 +40,6 @@ class AnimeSeries(Base, TimestampMixin):
name: Display name of the series
site: Provider site URL
folder: Filesystem folder name (metadata only, not for lookups)
description: Optional series description
status: Current status (ongoing, completed, etc.)
total_episodes: Total number of episodes
cover_url: URL to series cover image
episodes: Relationship to Episode models (via id foreign key)
download_items: Relationship to DownloadQueueItem models (via id foreign key)
created_at: Creation timestamp (from TimestampMixin)
@@ -89,30 +74,6 @@ class AnimeSeries(Base, TimestampMixin):
doc="Filesystem folder name - METADATA ONLY, not for lookups"
)
# Metadata
description: Mapped[Optional[str]] = mapped_column(
Text, nullable=True,
doc="Series description"
)
status: Mapped[Optional[str]] = mapped_column(
String(50), nullable=True,
doc="Series status (ongoing, completed, etc.)"
)
total_episodes: Mapped[Optional[int]] = mapped_column(
Integer, nullable=True,
doc="Total number of episodes"
)
cover_url: Mapped[Optional[str]] = mapped_column(
String(1000), nullable=True,
doc="URL to cover image"
)
# JSON field for episode dictionary (season -> [episodes])
episode_dict: Mapped[Optional[dict]] = mapped_column(
JSON, nullable=True,
doc="Episode dictionary {season: [episodes]}"
)
# Relationships
episodes: Mapped[List["Episode"]] = relationship(
"Episode",
@@ -161,22 +122,6 @@ class AnimeSeries(Base, TimestampMixin):
raise ValueError("Folder path must be 1000 characters or less")
return value.strip()
@validates('cover_url')
def validate_cover_url(self, key: str, value: Optional[str]) -> Optional[str]:
"""Validate cover URL length."""
if value is not None and len(value) > 1000:
raise ValueError("Cover URL must be 1000 characters or less")
return value
@validates('total_episodes')
def validate_total_episodes(self, key: str, value: Optional[int]) -> Optional[int]:
"""Validate total episodes is positive."""
if value is not None and value < 0:
raise ValueError("Total episodes must be non-negative")
if value is not None and value > 10000:
raise ValueError("Total episodes must be 10000 or less")
return value
def __repr__(self) -> str:
return f"<AnimeSeries(id={self.id}, key='{self.key}', name='{self.name}')>"
@@ -194,9 +139,7 @@ class Episode(Base, TimestampMixin):
episode_number: Episode number within season
title: Episode title
file_path: Local file path if downloaded
file_size: File size in bytes
is_downloaded: Whether episode is downloaded
download_date: When episode was downloaded
series: Relationship to AnimeSeries
created_at: Creation timestamp (from TimestampMixin)
updated_at: Last update timestamp (from TimestampMixin)
@@ -234,18 +177,10 @@ class Episode(Base, TimestampMixin):
String(1000), nullable=True,
doc="Local file path"
)
file_size: Mapped[Optional[int]] = mapped_column(
Integer, nullable=True,
doc="File size in bytes"
)
is_downloaded: Mapped[bool] = mapped_column(
Boolean, default=False, nullable=False,
doc="Whether episode is downloaded"
)
download_date: Mapped[Optional[datetime]] = mapped_column(
DateTime(timezone=True), nullable=True,
doc="When episode was downloaded"
)
# Relationship
series: Mapped["AnimeSeries"] = relationship(
@@ -287,13 +222,6 @@ class Episode(Base, TimestampMixin):
raise ValueError("File path must be 1000 characters or less")
return value
@validates('file_size')
def validate_file_size(self, key: str, value: Optional[int]) -> Optional[int]:
"""Validate file size is non-negative."""
if value is not None and value < 0:
raise ValueError("File size must be non-negative")
return value
def __repr__(self) -> str:
return (
f"<Episode(id={self.id}, series_id={self.series_id}, "
@@ -321,27 +249,20 @@ class DownloadPriority(str, Enum):
class DownloadQueueItem(Base, TimestampMixin):
"""SQLAlchemy model for download queue items.
Tracks download queue with status, progress, and error information.
Tracks download queue with error information.
Provides persistence for the DownloadService queue state.
Attributes:
id: Primary key
series_id: Foreign key to AnimeSeries
season: Season number
episode_number: Episode number
status: Current download status
priority: Download priority
progress_percent: Download progress (0-100)
downloaded_bytes: Bytes downloaded
total_bytes: Total file size
download_speed: Current speed in bytes/sec
episode_id: Foreign key to Episode
error_message: Error description if failed
retry_count: Number of retry attempts
download_url: Provider download URL
file_destination: Target file path
started_at: When download started
completed_at: When download completed
series: Relationship to AnimeSeries
episode: Relationship to Episode
created_at: Creation timestamp (from TimestampMixin)
updated_at: Last update timestamp (from TimestampMixin)
"""
@@ -359,47 +280,11 @@ class DownloadQueueItem(Base, TimestampMixin):
index=True
)
# Episode identification
season: Mapped[int] = mapped_column(
Integer, nullable=False,
doc="Season number"
)
episode_number: Mapped[int] = mapped_column(
Integer, nullable=False,
doc="Episode number"
)
# Queue management
status: Mapped[str] = mapped_column(
SQLEnum(DownloadStatus),
default=DownloadStatus.PENDING,
# Foreign key to episode
episode_id: Mapped[int] = mapped_column(
ForeignKey("episodes.id", ondelete="CASCADE"),
nullable=False,
index=True,
doc="Current download status"
)
priority: Mapped[str] = mapped_column(
SQLEnum(DownloadPriority),
default=DownloadPriority.NORMAL,
nullable=False,
doc="Download priority"
)
# Progress tracking
progress_percent: Mapped[float] = mapped_column(
Float, default=0.0, nullable=False,
doc="Progress percentage (0-100)"
)
downloaded_bytes: Mapped[int] = mapped_column(
Integer, default=0, nullable=False,
doc="Bytes downloaded"
)
total_bytes: Mapped[Optional[int]] = mapped_column(
Integer, nullable=True,
doc="Total file size"
)
download_speed: Mapped[Optional[float]] = mapped_column(
Float, nullable=True,
doc="Current download speed (bytes/sec)"
index=True
)
# Error handling
@@ -407,10 +292,6 @@ class DownloadQueueItem(Base, TimestampMixin):
Text, nullable=True,
doc="Error description"
)
retry_count: Mapped[int] = mapped_column(
Integer, default=0, nullable=False,
doc="Number of retry attempts"
)
# Download details
download_url: Mapped[Optional[str]] = mapped_column(
@@ -437,67 +318,9 @@ class DownloadQueueItem(Base, TimestampMixin):
"AnimeSeries",
back_populates="download_items"
)
@validates('season')
def validate_season(self, key: str, value: int) -> int:
"""Validate season number is positive."""
if value < 0:
raise ValueError("Season number must be non-negative")
if value > 1000:
raise ValueError("Season number must be 1000 or less")
return value
@validates('episode_number')
def validate_episode_number(self, key: str, value: int) -> int:
"""Validate episode number is positive."""
if value < 0:
raise ValueError("Episode number must be non-negative")
if value > 10000:
raise ValueError("Episode number must be 10000 or less")
return value
@validates('progress_percent')
def validate_progress_percent(self, key: str, value: float) -> float:
"""Validate progress is between 0 and 100."""
if value < 0.0:
raise ValueError("Progress percent must be non-negative")
if value > 100.0:
raise ValueError("Progress percent cannot exceed 100")
return value
@validates('downloaded_bytes')
def validate_downloaded_bytes(self, key: str, value: int) -> int:
"""Validate downloaded bytes is non-negative."""
if value < 0:
raise ValueError("Downloaded bytes must be non-negative")
return value
@validates('total_bytes')
def validate_total_bytes(
self, key: str, value: Optional[int]
) -> Optional[int]:
"""Validate total bytes is non-negative."""
if value is not None and value < 0:
raise ValueError("Total bytes must be non-negative")
return value
@validates('download_speed')
def validate_download_speed(
self, key: str, value: Optional[float]
) -> Optional[float]:
"""Validate download speed is non-negative."""
if value is not None and value < 0.0:
raise ValueError("Download speed must be non-negative")
return value
@validates('retry_count')
def validate_retry_count(self, key: str, value: int) -> int:
"""Validate retry count is non-negative."""
if value < 0:
raise ValueError("Retry count must be non-negative")
if value > 100:
raise ValueError("Retry count cannot exceed 100")
return value
episode: Mapped["Episode"] = relationship(
"Episode"
)
@validates('download_url')
def validate_download_url(
@@ -523,8 +346,7 @@ class DownloadQueueItem(Base, TimestampMixin):
return (
f"<DownloadQueueItem(id={self.id}, "
f"series_id={self.series_id}, "
f"S{self.season:02d}E{self.episode_number:02d}, "
f"status={self.status})>"
f"episode_id={self.episode_id})>"
)

View File

@@ -15,7 +15,7 @@ from __future__ import annotations
import logging
from datetime import datetime, timedelta, timezone
from typing import Dict, List, Optional
from typing import List, Optional
from sqlalchemy import delete, select, update
from sqlalchemy.ext.asyncio import AsyncSession
@@ -23,9 +23,7 @@ from sqlalchemy.orm import Session, selectinload
from src.server.database.models import (
AnimeSeries,
DownloadPriority,
DownloadQueueItem,
DownloadStatus,
Episode,
UserSession,
)
@@ -57,11 +55,6 @@ class AnimeSeriesService:
name: str,
site: str,
folder: str,
description: Optional[str] = None,
status: Optional[str] = None,
total_episodes: Optional[int] = None,
cover_url: Optional[str] = None,
episode_dict: Optional[Dict] = None,
) -> AnimeSeries:
"""Create a new anime series.
@@ -71,11 +64,6 @@ class AnimeSeriesService:
name: Series name
site: Provider site URL
folder: Local filesystem path
description: Optional series description
status: Optional series status
total_episodes: Optional total episode count
cover_url: Optional cover image URL
episode_dict: Optional episode dictionary
Returns:
Created AnimeSeries instance
@@ -88,11 +76,6 @@ class AnimeSeriesService:
name=name,
site=site,
folder=folder,
description=description,
status=status,
total_episodes=total_episodes,
cover_url=cover_url,
episode_dict=episode_dict,
)
db.add(series)
await db.flush()
@@ -262,7 +245,6 @@ class EpisodeService:
episode_number: int,
title: Optional[str] = None,
file_path: Optional[str] = None,
file_size: Optional[int] = None,
is_downloaded: bool = False,
) -> Episode:
"""Create a new episode.
@@ -274,7 +256,6 @@ class EpisodeService:
episode_number: Episode number within season
title: Optional episode title
file_path: Optional local file path
file_size: Optional file size in bytes
is_downloaded: Whether episode is downloaded
Returns:
@@ -286,9 +267,7 @@ class EpisodeService:
episode_number=episode_number,
title=title,
file_path=file_path,
file_size=file_size,
is_downloaded=is_downloaded,
download_date=datetime.now(timezone.utc) if is_downloaded else None,
)
db.add(episode)
await db.flush()
@@ -372,7 +351,6 @@ class EpisodeService:
db: AsyncSession,
episode_id: int,
file_path: str,
file_size: int,
) -> Optional[Episode]:
"""Mark episode as downloaded.
@@ -380,7 +358,6 @@ class EpisodeService:
db: Database session
episode_id: Episode primary key
file_path: Local file path
file_size: File size in bytes
Returns:
Updated Episode instance or None if not found
@@ -391,8 +368,6 @@ class EpisodeService:
episode.is_downloaded = True
episode.file_path = file_path
episode.file_size = file_size
episode.download_date = datetime.now(timezone.utc)
await db.flush()
await db.refresh(episode)
@@ -427,17 +402,14 @@ class EpisodeService:
class DownloadQueueService:
"""Service for download queue CRUD operations.
Provides methods for managing the download queue with status tracking,
priority management, and progress updates.
Provides methods for managing the download queue.
"""
@staticmethod
async def create(
db: AsyncSession,
series_id: int,
season: int,
episode_number: int,
priority: DownloadPriority = DownloadPriority.NORMAL,
episode_id: int,
download_url: Optional[str] = None,
file_destination: Optional[str] = None,
) -> DownloadQueueItem:
@@ -446,9 +418,7 @@ class DownloadQueueService:
Args:
db: Database session
series_id: Foreign key to AnimeSeries
season: Season number
episode_number: Episode number
priority: Download priority
episode_id: Foreign key to Episode
download_url: Optional provider download URL
file_destination: Optional target file path
@@ -457,10 +427,7 @@ class DownloadQueueService:
"""
item = DownloadQueueItem(
series_id=series_id,
season=season,
episode_number=episode_number,
status=DownloadStatus.PENDING,
priority=priority,
episode_id=episode_id,
download_url=download_url,
file_destination=file_destination,
)
@@ -468,8 +435,8 @@ class DownloadQueueService:
await db.flush()
await db.refresh(item)
logger.info(
f"Added to download queue: S{season:02d}E{episode_number:02d} "
f"for series_id={series_id} with priority={priority}"
f"Added to download queue: episode_id={episode_id} "
f"for series_id={series_id}"
)
return item
@@ -493,68 +460,25 @@ class DownloadQueueService:
return result.scalar_one_or_none()
@staticmethod
async def get_by_status(
async def get_by_episode(
db: AsyncSession,
status: DownloadStatus,
limit: Optional[int] = None,
) -> List[DownloadQueueItem]:
"""Get download queue items by status.
episode_id: int,
) -> Optional[DownloadQueueItem]:
"""Get download queue item by episode ID.
Args:
db: Database session
status: Download status filter
limit: Optional limit for results
episode_id: Foreign key to Episode
Returns:
List of DownloadQueueItem instances
DownloadQueueItem instance or None if not found
"""
query = select(DownloadQueueItem).where(
DownloadQueueItem.status == status
)
# Order by priority (HIGH first) then creation time
query = query.order_by(
DownloadQueueItem.priority.desc(),
DownloadQueueItem.created_at.asc(),
)
if limit:
query = query.limit(limit)
result = await db.execute(query)
return list(result.scalars().all())
@staticmethod
async def get_pending(
db: AsyncSession,
limit: Optional[int] = None,
) -> List[DownloadQueueItem]:
"""Get pending download queue items.
Args:
db: Database session
limit: Optional limit for results
Returns:
List of pending DownloadQueueItem instances ordered by priority
"""
return await DownloadQueueService.get_by_status(
db, DownloadStatus.PENDING, limit
)
@staticmethod
async def get_active(db: AsyncSession) -> List[DownloadQueueItem]:
"""Get active download queue items.
Args:
db: Database session
Returns:
List of downloading DownloadQueueItem instances
"""
return await DownloadQueueService.get_by_status(
db, DownloadStatus.DOWNLOADING
result = await db.execute(
select(DownloadQueueItem).where(
DownloadQueueItem.episode_id == episode_id
)
)
return result.scalar_one_or_none()
@staticmethod
async def get_all(
@@ -576,7 +500,6 @@ class DownloadQueueService:
query = query.options(selectinload(DownloadQueueItem.series))
query = query.order_by(
DownloadQueueItem.priority.desc(),
DownloadQueueItem.created_at.asc(),
)
@@ -584,19 +507,17 @@ class DownloadQueueService:
return list(result.scalars().all())
@staticmethod
async def update_status(
async def set_error(
db: AsyncSession,
item_id: int,
status: DownloadStatus,
error_message: Optional[str] = None,
error_message: str,
) -> Optional[DownloadQueueItem]:
"""Update download queue item status.
"""Set error message on download queue item.
Args:
db: Database session
item_id: Item primary key
status: New download status
error_message: Optional error message for failed status
error_message: Error description
Returns:
Updated DownloadQueueItem instance or None if not found
@@ -605,61 +526,11 @@ class DownloadQueueService:
if not item:
return None
item.status = status
# Update timestamps based on status
if status == DownloadStatus.DOWNLOADING and not item.started_at:
item.started_at = datetime.now(timezone.utc)
elif status in (DownloadStatus.COMPLETED, DownloadStatus.FAILED):
item.completed_at = datetime.now(timezone.utc)
# Set error message for failed downloads
if status == DownloadStatus.FAILED and error_message:
item.error_message = error_message
item.retry_count += 1
await db.flush()
await db.refresh(item)
logger.debug(f"Updated download queue item {item_id} status to {status}")
return item
@staticmethod
async def update_progress(
db: AsyncSession,
item_id: int,
progress_percent: float,
downloaded_bytes: int,
total_bytes: Optional[int] = None,
download_speed: Optional[float] = None,
) -> Optional[DownloadQueueItem]:
"""Update download progress.
Args:
db: Database session
item_id: Item primary key
progress_percent: Progress percentage (0-100)
downloaded_bytes: Bytes downloaded
total_bytes: Optional total file size
download_speed: Optional current speed (bytes/sec)
Returns:
Updated DownloadQueueItem instance or None if not found
"""
item = await DownloadQueueService.get_by_id(db, item_id)
if not item:
return None
item.progress_percent = progress_percent
item.downloaded_bytes = downloaded_bytes
if total_bytes is not None:
item.total_bytes = total_bytes
if download_speed is not None:
item.download_speed = download_speed
item.error_message = error_message
await db.flush()
await db.refresh(item)
logger.debug(f"Set error on download queue item {item_id}")
return item
@staticmethod
@@ -682,57 +553,30 @@ class DownloadQueueService:
return deleted
@staticmethod
async def clear_completed(db: AsyncSession) -> int:
"""Clear completed downloads from queue.
async def delete_by_episode(
db: AsyncSession,
episode_id: int,
) -> bool:
"""Delete download queue item by episode ID.
Args:
db: Database session
episode_id: Foreign key to Episode
Returns:
Number of items cleared
True if deleted, False if not found
"""
result = await db.execute(
delete(DownloadQueueItem).where(
DownloadQueueItem.status == DownloadStatus.COMPLETED
DownloadQueueItem.episode_id == episode_id
)
)
count = result.rowcount
logger.info(f"Cleared {count} completed downloads from queue")
return count
@staticmethod
async def retry_failed(
db: AsyncSession,
max_retries: int = 3,
) -> List[DownloadQueueItem]:
"""Retry failed downloads that haven't exceeded max retries.
Args:
db: Database session
max_retries: Maximum number of retry attempts
Returns:
List of items marked for retry
"""
result = await db.execute(
select(DownloadQueueItem).where(
DownloadQueueItem.status == DownloadStatus.FAILED,
DownloadQueueItem.retry_count < max_retries,
deleted = result.rowcount > 0
if deleted:
logger.info(
f"Deleted download queue item with episode_id={episode_id}"
)
)
items = list(result.scalars().all())
for item in items:
item.status = DownloadStatus.PENDING
item.error_message = None
item.progress_percent = 0.0
item.downloaded_bytes = 0
item.started_at = None
item.completed_at = None
await db.flush()
logger.info(f"Marked {len(items)} failed downloads for retry")
return items
return deleted
# ============================================================================