From 48daeba0125597773da194ea7b52e32e80817d03 Mon Sep 17 00:00:00 2001 From: Lukas Date: Tue, 2 Dec 2025 14:15:19 +0100 Subject: [PATCH] added instruction for queue db data --- instructions.md | 1015 +++++++++++++++++++++++++++++++++++++++++++++++ 1 file changed, 1015 insertions(+) diff --git a/instructions.md b/instructions.md index 73e5e8e..962a421 100644 --- a/instructions.md +++ b/instructions.md @@ -120,3 +120,1018 @@ For each task completed: - Good foundation for future enhancements if needed --- + +## ๐Ÿ—„๏ธ Task: Migrate Download Queue from JSON to SQLite Database + +### Background + +The project currently has a **hybrid data persistence approach**: + +| Data Type | Current Storage | Target Storage | +| ------------------ | ------------------------------------------ | ------------------- | +| Anime Series | SQLite Database | โœ… Done | +| Episodes | SQLite Database | โœ… Done | +| User Sessions | SQLite Database | โœ… Done | +| **Download Queue** | **JSON File** (`data/download_queue.json`) | **SQLite Database** | + +The database infrastructure already exists in `src/server/database/`: + +- `DownloadQueueItem` model in `models.py` โœ… +- `DownloadQueueService` with full CRUD operations in `service.py` โœ… +- `DownloadStatus` and `DownloadPriority` enums โœ… + +**However**, the `DownloadService` in `src/server/services/download_service.py` still uses JSON file persistence instead of the database service. + +### Goal + +Migrate `DownloadService` to use SQLite via `DownloadQueueService` for queue persistence instead of JSON files. + +--- + +### Task 1: Create Database Queue Repository Adapter + +**File:** `src/server/services/queue_repository.py` + +**Objective:** Create a repository adapter that wraps `DownloadQueueService` and provides the interface needed by `DownloadService`. + +**Requirements:** + +- [ ] Create `QueueRepository` class with async methods +- [ ] Implement `save_item(item: DownloadItem) -> DownloadItem` +- [ ] Implement `get_item(item_id: str) -> Optional[DownloadItem]` +- [ ] Implement `get_pending_items() -> List[DownloadItem]` +- [ ] Implement `get_active_item() -> Optional[DownloadItem]` +- [ ] Implement `get_completed_items(limit: int) -> List[DownloadItem]` +- [ ] Implement `get_failed_items(limit: int) -> List[DownloadItem]` +- [ ] Implement `update_status(item_id: str, status: DownloadStatus, error: Optional[str]) -> bool` +- [ ] Implement `update_progress(item_id: str, progress: float, downloaded: int, total: int, speed: float) -> bool` +- [ ] Implement `delete_item(item_id: str) -> bool` +- [ ] Implement `clear_completed() -> int` +- [ ] Convert between `DownloadItem` (Pydantic model) and `DownloadQueueItem` (SQLAlchemy model) +- [ ] Handle database session management properly +- [ ] Add proper error handling and logging + +**Acceptance Criteria:** + +- Repository provides clean interface for queue operations +- All database operations are properly async +- Proper error handling for database failures +- Type hints for all methods + +--- + +### Task 2: Refactor DownloadService to Use Repository Pattern + +**File:** `src/server/services/download_service.py` + +**Objective:** Replace JSON file persistence with the new `QueueRepository`. + +**Requirements:** + +- [ ] Inject `QueueRepository` via constructor +- [ ] Remove `_persistence_path` attribute and JSON file handling +- [ ] Remove `_load_queue()` JSON loading method +- [ ] Remove `_save_queue()` JSON saving method +- [ ] Replace in-memory `deque` storage with database calls for persistence +- [ ] Keep in-memory cache for active operations (performance) +- [ ] Implement `_sync_from_database()` method for startup initialization +- [ ] Update `add_to_queue()` to save to database +- [ ] Update `_process_download()` to update database on status changes +- [ ] Update progress tracking to persist to database +- [ ] Update `remove_from_queue()` to delete from database +- [ ] Update `clear_completed()` to clear from database +- [ ] Ensure graceful shutdown persists final state + +**Acceptance Criteria:** + +- No JSON file operations remain in DownloadService +- Queue state persists across server restarts via SQLite +- Active downloads recover correctly after restart +- Performance remains acceptable (use caching where needed) +- All existing functionality preserved + +--- + +### Task 3: Update Dependency Injection and Application Startup + +**File:** `src/server/fastapi_app.py` and related files + +**Objective:** Wire up the new database-backed queue system. + +**Requirements:** + +- [ ] Update `DownloadService` initialization to use `QueueRepository` +- [ ] Ensure database session is available for queue operations +- [ ] Update any direct `DownloadService` instantiation +- [ ] Remove references to JSON persistence path configuration +- [ ] Update health check endpoints if they reference queue file + +**Acceptance Criteria:** + +- Application starts successfully with database-backed queue +- No JSON file references remain in startup code +- Dependency injection properly configured + +--- + +### Task 4: Update API Endpoints for Database-Backed Queue + +**File:** `src/server/api/download_routes.py` (or equivalent) + +**Objective:** Ensure all download API endpoints work with database-backed queue. + +**Requirements:** + +- [ ] Verify `GET /api/queue` returns items from database +- [ ] Verify `POST /api/queue` adds items to database +- [ ] Verify `DELETE /api/queue/{id}` removes from database +- [ ] Verify queue statistics reflect database state +- [ ] Verify WebSocket broadcasts still work correctly +- [ ] Update any endpoint that directly accessed JSON file +- [ ] Add new endpoint `GET /api/queue/history` for completed/failed items (optional) + +**Acceptance Criteria:** + +- All existing API contracts maintained +- Queue operations reflect database state +- Real-time updates via WebSocket work correctly + +--- + +### Task 5: Cleanup and Documentation + +**Objective:** Remove deprecated code and update documentation. + +**Requirements:** + +- [ ] Remove deprecated JSON persistence code from codebase +- [ ] Delete `data/download_queue.json` if it exists +- [ ] Update `infrastructure.md` with new queue architecture +- [ ] Update API documentation if needed +- [ ] Add database schema documentation for download_queue table +- [ ] Update configuration documentation (remove JSON path config) + +**Acceptance Criteria:** + +- No dead code remains +- Documentation accurately reflects new architecture + +--- + +## ๐Ÿงช Tests for Download Queue Database Migration + +### Unit Tests + +**File:** `tests/unit/test_queue_repository.py` + +```python +"""Unit tests for QueueRepository database adapter.""" +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +from datetime import datetime, timezone + +from src.server.services.queue_repository import QueueRepository +from src.server.models.download import DownloadItem, DownloadStatus, DownloadPriority +from src.server.database.models import DownloadQueueItem as DBDownloadQueueItem + + +class TestQueueRepository: + """Test suite for QueueRepository.""" + + @pytest.fixture + def mock_db_session(self): + """Create mock database session.""" + session = AsyncMock() + return session + + @pytest.fixture + def repository(self, mock_db_session): + """Create repository instance with mock session.""" + return QueueRepository(db_session_factory=lambda: mock_db_session) + + @pytest.fixture + def sample_download_item(self): + """Create sample DownloadItem for testing.""" + return DownloadItem( + id="test-uuid-123", + series_key="attack-on-titan", + series_name="Attack on Titan", + season=1, + episode=5, + status=DownloadStatus.PENDING, + priority=DownloadPriority.NORMAL, + progress_percent=0.0, + downloaded_bytes=0, + total_bytes=None, + ) + + # === Conversion Tests === + + async def test_convert_to_db_model(self, repository, sample_download_item): + """Test converting DownloadItem to database model.""" + # Arrange + series_id = 42 + + # Act + db_item = repository._to_db_model(sample_download_item, series_id) + + # Assert + assert db_item.series_id == series_id + assert db_item.season == sample_download_item.season + assert db_item.episode_number == sample_download_item.episode + assert db_item.status == sample_download_item.status + assert db_item.priority == sample_download_item.priority + + async def test_convert_from_db_model(self, repository): + """Test converting database model to DownloadItem.""" + # Arrange + db_item = MagicMock() + db_item.id = 1 + db_item.series_id = 42 + db_item.series.key = "attack-on-titan" + db_item.series.name = "Attack on Titan" + db_item.season = 1 + db_item.episode_number = 5 + db_item.status = DownloadStatus.PENDING + db_item.priority = DownloadPriority.NORMAL + db_item.progress_percent = 25.5 + db_item.downloaded_bytes = 1024000 + db_item.total_bytes = 4096000 + + # Act + item = repository._from_db_model(db_item) + + # Assert + assert item.series_key == "attack-on-titan" + assert item.series_name == "Attack on Titan" + assert item.season == 1 + assert item.episode == 5 + assert item.progress_percent == 25.5 + + # === CRUD Operation Tests === + + async def test_save_item_creates_new_record(self, repository, mock_db_session, sample_download_item): + """Test saving a new download item to database.""" + # Arrange + mock_db_session.execute.return_value.scalar_one_or_none.return_value = MagicMock(id=42) + + # Act + result = await repository.save_item(sample_download_item) + + # Assert + mock_db_session.add.assert_called_once() + mock_db_session.flush.assert_called_once() + assert result is not None + + async def test_get_pending_items_returns_ordered_list(self, repository, mock_db_session): + """Test retrieving pending items ordered by priority.""" + # Arrange + mock_items = [MagicMock(), MagicMock()] + mock_db_session.execute.return_value.scalars.return_value.all.return_value = mock_items + + # Act + result = await repository.get_pending_items() + + # Assert + assert len(result) == 2 + mock_db_session.execute.assert_called_once() + + async def test_update_status_success(self, repository, mock_db_session): + """Test updating item status.""" + # Arrange + mock_item = MagicMock() + mock_db_session.execute.return_value.scalar_one_or_none.return_value = mock_item + + # Act + result = await repository.update_status("test-id", DownloadStatus.DOWNLOADING) + + # Assert + assert result is True + assert mock_item.status == DownloadStatus.DOWNLOADING + + async def test_update_status_item_not_found(self, repository, mock_db_session): + """Test updating status for non-existent item.""" + # Arrange + mock_db_session.execute.return_value.scalar_one_or_none.return_value = None + + # Act + result = await repository.update_status("non-existent", DownloadStatus.DOWNLOADING) + + # Assert + assert result is False + + async def test_update_progress(self, repository, mock_db_session): + """Test updating download progress.""" + # Arrange + mock_item = MagicMock() + mock_db_session.execute.return_value.scalar_one_or_none.return_value = mock_item + + # Act + result = await repository.update_progress( + item_id="test-id", + progress=50.0, + downloaded=2048000, + total=4096000, + speed=1024000.0 + ) + + # Assert + assert result is True + assert mock_item.progress_percent == 50.0 + assert mock_item.downloaded_bytes == 2048000 + + async def test_delete_item_success(self, repository, mock_db_session): + """Test deleting download item.""" + # Arrange + mock_db_session.execute.return_value.rowcount = 1 + + # Act + result = await repository.delete_item("test-id") + + # Assert + assert result is True + + async def test_clear_completed_returns_count(self, repository, mock_db_session): + """Test clearing completed items returns count.""" + # Arrange + mock_db_session.execute.return_value.rowcount = 5 + + # Act + result = await repository.clear_completed() + + # Assert + assert result == 5 + + +class TestQueueRepositoryErrorHandling: + """Test error handling in QueueRepository.""" + + @pytest.fixture + def mock_db_session(self): + """Create mock database session.""" + return AsyncMock() + + @pytest.fixture + def repository(self, mock_db_session): + """Create repository instance.""" + return QueueRepository(db_session_factory=lambda: mock_db_session) + + async def test_save_item_handles_database_error(self, repository, mock_db_session): + """Test handling database errors on save.""" + # Arrange + mock_db_session.execute.side_effect = Exception("Database connection failed") + + # Act & Assert + with pytest.raises(Exception): + await repository.save_item(MagicMock()) + + async def test_get_items_handles_database_error(self, repository, mock_db_session): + """Test handling database errors on query.""" + # Arrange + mock_db_session.execute.side_effect = Exception("Query failed") + + # Act & Assert + with pytest.raises(Exception): + await repository.get_pending_items() +``` + +--- + +**File:** `tests/unit/test_download_service_database.py` + +```python +"""Unit tests for DownloadService with database persistence.""" +import pytest +from unittest.mock import AsyncMock, MagicMock, patch +from datetime import datetime, timezone + +from src.server.services.download_service import DownloadService +from src.server.models.download import DownloadItem, DownloadStatus, DownloadPriority + + +class TestDownloadServiceDatabasePersistence: + """Test DownloadService database persistence.""" + + @pytest.fixture + def mock_anime_service(self): + """Create mock anime service.""" + return AsyncMock() + + @pytest.fixture + def mock_queue_repository(self): + """Create mock queue repository.""" + repo = AsyncMock() + repo.get_pending_items.return_value = [] + repo.get_active_item.return_value = None + repo.get_completed_items.return_value = [] + repo.get_failed_items.return_value = [] + return repo + + @pytest.fixture + def download_service(self, mock_anime_service, mock_queue_repository): + """Create download service with mocked dependencies.""" + return DownloadService( + anime_service=mock_anime_service, + queue_repository=mock_queue_repository, + ) + + # === Persistence Tests === + + async def test_add_to_queue_saves_to_database( + self, download_service, mock_queue_repository + ): + """Test that adding to queue persists to database.""" + # Arrange + mock_queue_repository.save_item.return_value = MagicMock(id="new-id") + + # Act + result = await download_service.add_to_queue( + series_key="test-series", + season=1, + episode=1, + ) + + # Assert + mock_queue_repository.save_item.assert_called_once() + + async def test_startup_loads_from_database( + self, mock_anime_service, mock_queue_repository + ): + """Test that startup loads queue state from database.""" + # Arrange + pending_items = [ + MagicMock(id="1", status=DownloadStatus.PENDING), + MagicMock(id="2", status=DownloadStatus.PENDING), + ] + mock_queue_repository.get_pending_items.return_value = pending_items + + # Act + service = DownloadService( + anime_service=mock_anime_service, + queue_repository=mock_queue_repository, + ) + await service.initialize() + + # Assert + mock_queue_repository.get_pending_items.assert_called() + + async def test_download_completion_updates_database( + self, download_service, mock_queue_repository + ): + """Test that download completion updates database status.""" + # Arrange + item = MagicMock(id="test-id") + + # Act + await download_service._mark_completed(item) + + # Assert + mock_queue_repository.update_status.assert_called_with( + "test-id", DownloadStatus.COMPLETED, error=None + ) + + async def test_download_failure_updates_database( + self, download_service, mock_queue_repository + ): + """Test that download failure updates database with error.""" + # Arrange + item = MagicMock(id="test-id") + error_message = "Network timeout" + + # Act + await download_service._mark_failed(item, error_message) + + # Assert + mock_queue_repository.update_status.assert_called_with( + "test-id", DownloadStatus.FAILED, error=error_message + ) + + async def test_progress_update_persists_to_database( + self, download_service, mock_queue_repository + ): + """Test that progress updates are persisted.""" + # Arrange + item = MagicMock(id="test-id") + + # Act + await download_service._update_progress( + item, progress=50.0, downloaded=2048, total=4096, speed=1024.0 + ) + + # Assert + mock_queue_repository.update_progress.assert_called_with( + item_id="test-id", + progress=50.0, + downloaded=2048, + total=4096, + speed=1024.0, + ) + + async def test_remove_from_queue_deletes_from_database( + self, download_service, mock_queue_repository + ): + """Test that removing from queue deletes from database.""" + # Arrange + mock_queue_repository.delete_item.return_value = True + + # Act + result = await download_service.remove_from_queue("test-id") + + # Assert + mock_queue_repository.delete_item.assert_called_with("test-id") + assert result is True + + async def test_clear_completed_clears_database( + self, download_service, mock_queue_repository + ): + """Test that clearing completed items updates database.""" + # Arrange + mock_queue_repository.clear_completed.return_value = 5 + + # Act + result = await download_service.clear_completed() + + # Assert + mock_queue_repository.clear_completed.assert_called_once() + assert result == 5 + + +class TestDownloadServiceNoJsonFile: + """Verify DownloadService no longer uses JSON files.""" + + async def test_no_json_file_operations(self): + """Verify no JSON file read/write operations exist.""" + import inspect + from src.server.services.download_service import DownloadService + + source = inspect.getsource(DownloadService) + + # Assert no JSON file operations + assert "download_queue.json" not in source + assert "_load_queue" not in source or "database" in source.lower() + assert "_save_queue" not in source or "database" in source.lower() +``` + +--- + +### Integration Tests + +**File:** `tests/integration/test_queue_database_integration.py` + +```python +"""Integration tests for download queue database operations.""" +import pytest +from datetime import datetime, timezone + +from sqlalchemy.ext.asyncio import create_async_engine, AsyncSession +from sqlalchemy.orm import sessionmaker + +from src.server.database.base import Base +from src.server.database.models import AnimeSeries, DownloadQueueItem, DownloadStatus, DownloadPriority +from src.server.database.service import DownloadQueueService, AnimeSeriesService +from src.server.services.queue_repository import QueueRepository + + +@pytest.fixture +async def async_engine(): + """Create async test database engine.""" + engine = create_async_engine("sqlite+aiosqlite:///:memory:", echo=False) + async with engine.begin() as conn: + await conn.run_sync(Base.metadata.create_all) + yield engine + await engine.dispose() + + +@pytest.fixture +async def async_session(async_engine): + """Create async session for tests.""" + async_session_maker = sessionmaker( + async_engine, class_=AsyncSession, expire_on_commit=False + ) + async with async_session_maker() as session: + yield session + await session.rollback() + + +@pytest.fixture +async def test_series(async_session): + """Create test anime series.""" + series = await AnimeSeriesService.create( + db=async_session, + key="test-anime", + name="Test Anime", + site="https://example.com/test-anime", + folder="Test Anime (2024)", + ) + await async_session.commit() + return series + + +class TestQueueDatabaseIntegration: + """Integration tests for queue database operations.""" + + async def test_create_and_retrieve_queue_item(self, async_session, test_series): + """Test creating and retrieving a queue item.""" + # Create + item = await DownloadQueueService.create( + db=async_session, + series_id=test_series.id, + season=1, + episode_number=5, + priority=DownloadPriority.HIGH, + ) + await async_session.commit() + + # Retrieve + retrieved = await DownloadQueueService.get_by_id(async_session, item.id) + + # Assert + assert retrieved is not None + assert retrieved.series_id == test_series.id + assert retrieved.season == 1 + assert retrieved.episode_number == 5 + assert retrieved.priority == DownloadPriority.HIGH + assert retrieved.status == DownloadStatus.PENDING + + async def test_update_download_progress(self, async_session, test_series): + """Test updating download progress.""" + # Create item + item = await DownloadQueueService.create( + db=async_session, + series_id=test_series.id, + season=1, + episode_number=1, + ) + await async_session.commit() + + # Update progress + updated = await DownloadQueueService.update_progress( + db=async_session, + item_id=item.id, + progress_percent=75.5, + downloaded_bytes=3072000, + total_bytes=4096000, + download_speed=1024000.0, + ) + await async_session.commit() + + # Assert + assert updated.progress_percent == 75.5 + assert updated.downloaded_bytes == 3072000 + assert updated.total_bytes == 4096000 + assert updated.download_speed == 1024000.0 + + async def test_status_transitions(self, async_session, test_series): + """Test download status transitions.""" + # Create pending item + item = await DownloadQueueService.create( + db=async_session, + series_id=test_series.id, + season=1, + episode_number=1, + ) + await async_session.commit() + assert item.status == DownloadStatus.PENDING + + # Transition to downloading + item = await DownloadQueueService.update_status( + async_session, item.id, DownloadStatus.DOWNLOADING + ) + await async_session.commit() + assert item.status == DownloadStatus.DOWNLOADING + assert item.started_at is not None + + # Transition to completed + item = await DownloadQueueService.update_status( + async_session, item.id, DownloadStatus.COMPLETED + ) + await async_session.commit() + assert item.status == DownloadStatus.COMPLETED + assert item.completed_at is not None + + async def test_failed_download_with_retry(self, async_session, test_series): + """Test failed download with error message and retry count.""" + # Create item + item = await DownloadQueueService.create( + db=async_session, + series_id=test_series.id, + season=1, + episode_number=1, + ) + await async_session.commit() + + # Mark as failed with error + item = await DownloadQueueService.update_status( + async_session, + item.id, + DownloadStatus.FAILED, + error_message="Connection timeout", + ) + await async_session.commit() + + # Assert + assert item.status == DownloadStatus.FAILED + assert item.error_message == "Connection timeout" + assert item.retry_count == 1 + + async def test_get_pending_items_ordered_by_priority(self, async_session, test_series): + """Test retrieving pending items ordered by priority.""" + # Create items with different priorities + await DownloadQueueService.create( + async_session, test_series.id, 1, 1, priority=DownloadPriority.LOW + ) + await DownloadQueueService.create( + async_session, test_series.id, 1, 2, priority=DownloadPriority.HIGH + ) + await DownloadQueueService.create( + async_session, test_series.id, 1, 3, priority=DownloadPriority.NORMAL + ) + await async_session.commit() + + # Get pending items + pending = await DownloadQueueService.get_pending(async_session) + + # Assert order: HIGH -> NORMAL -> LOW + assert len(pending) == 3 + assert pending[0].priority == DownloadPriority.HIGH + assert pending[1].priority == DownloadPriority.NORMAL + assert pending[2].priority == DownloadPriority.LOW + + async def test_clear_completed_items(self, async_session, test_series): + """Test clearing completed download items.""" + # Create items + item1 = await DownloadQueueService.create( + async_session, test_series.id, 1, 1 + ) + item2 = await DownloadQueueService.create( + async_session, test_series.id, 1, 2 + ) + item3 = await DownloadQueueService.create( + async_session, test_series.id, 1, 3 + ) + + # Complete first two + await DownloadQueueService.update_status( + async_session, item1.id, DownloadStatus.COMPLETED + ) + await DownloadQueueService.update_status( + async_session, item2.id, DownloadStatus.COMPLETED + ) + await async_session.commit() + + # Clear completed + cleared = await DownloadQueueService.clear_completed(async_session) + await async_session.commit() + + # Assert + assert cleared == 2 + + # Verify pending item remains + remaining = await DownloadQueueService.get_all(async_session) + assert len(remaining) == 1 + assert remaining[0].id == item3.id + + async def test_cascade_delete_with_series(self, async_session, test_series): + """Test that queue items are deleted when series is deleted.""" + # Create queue items + await DownloadQueueService.create( + async_session, test_series.id, 1, 1 + ) + await DownloadQueueService.create( + async_session, test_series.id, 1, 2 + ) + await async_session.commit() + + # Delete series + await AnimeSeriesService.delete(async_session, test_series.id) + await async_session.commit() + + # Verify queue items are gone + all_items = await DownloadQueueService.get_all(async_session) + assert len(all_items) == 0 +``` + +--- + +### API Tests + +**File:** `tests/api/test_queue_endpoints_database.py` + +```python +"""API tests for queue endpoints with database persistence.""" +import pytest +from httpx import AsyncClient +from unittest.mock import patch, AsyncMock + + +class TestQueueAPIWithDatabase: + """Test queue API endpoints with database backend.""" + + @pytest.fixture + def auth_headers(self): + """Get authentication headers.""" + return {"Authorization": "Bearer test-token"} + + async def test_get_queue_returns_database_items( + self, client: AsyncClient, auth_headers + ): + """Test GET /api/queue returns items from database.""" + response = await client.get("/api/queue", headers=auth_headers) + + assert response.status_code == 200 + data = response.json() + assert "pending" in data + assert "active" in data + assert "completed" in data + + async def test_add_to_queue_persists_to_database( + self, client: AsyncClient, auth_headers + ): + """Test POST /api/queue persists item to database.""" + payload = { + "series_key": "test-anime", + "season": 1, + "episode": 1, + "priority": "normal", + } + + response = await client.post( + "/api/queue", + json=payload, + headers=auth_headers, + ) + + assert response.status_code == 201 + data = response.json() + assert "id" in data + + async def test_remove_from_queue_deletes_from_database( + self, client: AsyncClient, auth_headers + ): + """Test DELETE /api/queue/{id} removes from database.""" + # First add an item + add_response = await client.post( + "/api/queue", + json={"series_key": "test-anime", "season": 1, "episode": 1}, + headers=auth_headers, + ) + item_id = add_response.json()["id"] + + # Then delete it + response = await client.delete( + f"/api/queue/{item_id}", + headers=auth_headers, + ) + + assert response.status_code == 200 + + # Verify it's gone + get_response = await client.get("/api/queue", headers=auth_headers) + queue_data = get_response.json() + item_ids = [item["id"] for item in queue_data.get("pending", [])] + assert item_id not in item_ids + + async def test_queue_survives_server_restart( + self, client: AsyncClient, auth_headers + ): + """Test that queue items persist across simulated restart.""" + # Add item + add_response = await client.post( + "/api/queue", + json={"series_key": "test-anime", "season": 1, "episode": 5}, + headers=auth_headers, + ) + item_id = add_response.json()["id"] + + # Simulate restart by clearing in-memory cache + # (In real scenario, this would be a server restart) + + # Verify item still exists + response = await client.get("/api/queue", headers=auth_headers) + queue_data = response.json() + item_ids = [item["id"] for item in queue_data.get("pending", [])] + assert item_id in item_ids + + async def test_clear_completed_endpoint( + self, client: AsyncClient, auth_headers + ): + """Test POST /api/queue/clear-completed endpoint.""" + response = await client.post( + "/api/queue/clear-completed", + headers=auth_headers, + ) + + assert response.status_code == 200 + data = response.json() + assert "cleared_count" in data +``` + +--- + +### Performance Tests + +**File:** `tests/performance/test_queue_database_performance.py` + +```python +"""Performance tests for database-backed download queue.""" +import pytest +import asyncio +import time +from datetime import datetime + + +class TestQueueDatabasePerformance: + """Performance tests for queue database operations.""" + + @pytest.mark.performance + async def test_bulk_insert_performance(self, async_session, test_series): + """Test performance of bulk queue item insertion.""" + from src.server.database.service import DownloadQueueService + + start_time = time.time() + + # Insert 100 queue items + for i in range(100): + await DownloadQueueService.create( + async_session, + test_series.id, + season=1, + episode_number=i + 1, + ) + await async_session.commit() + + elapsed = time.time() - start_time + + # Should complete in under 2 seconds + assert elapsed < 2.0, f"Bulk insert took {elapsed:.2f}s, expected < 2s" + + @pytest.mark.performance + async def test_query_performance_with_many_items(self, async_session, test_series): + """Test query performance with many queue items.""" + from src.server.database.service import DownloadQueueService + + # Setup: Create 500 items + for i in range(500): + await DownloadQueueService.create( + async_session, + test_series.id, + season=(i // 12) + 1, + episode_number=(i % 12) + 1, + ) + await async_session.commit() + + # Test query performance + start_time = time.time() + + pending = await DownloadQueueService.get_pending(async_session) + + elapsed = time.time() - start_time + + # Query should complete in under 100ms + assert elapsed < 0.1, f"Query took {elapsed*1000:.1f}ms, expected < 100ms" + assert len(pending) == 500 + + @pytest.mark.performance + async def test_progress_update_performance(self, async_session, test_series): + """Test performance of frequent progress updates.""" + from src.server.database.service import DownloadQueueService + + # Create item + item = await DownloadQueueService.create( + async_session, test_series.id, 1, 1 + ) + await async_session.commit() + + start_time = time.time() + + # Simulate 100 progress updates (like during download) + for i in range(100): + await DownloadQueueService.update_progress( + async_session, + item.id, + progress_percent=i, + downloaded_bytes=i * 10240, + total_bytes=1024000, + download_speed=102400.0, + ) + await async_session.commit() + + elapsed = time.time() - start_time + + # 100 updates should complete in under 1 second + assert elapsed < 1.0, f"Progress updates took {elapsed:.2f}s, expected < 1s" +``` + +--- + +## Summary + +These tasks will migrate the download queue from JSON file persistence to SQLite database, providing: + +1. **Data Integrity**: ACID-compliant storage with proper relationships +2. **Query Capability**: Efficient filtering, sorting, and pagination +3. **Consistency**: Single source of truth for all application data +4. **Scalability**: Better performance for large queues +5. **Recovery**: Robust handling of crashes and restarts + +The existing database infrastructure (`DownloadQueueItem` model and `DownloadQueueService`) is already in place, making this primarily an integration task rather than new development.