Compare commits

..

No commits in common. "e42e223f282f285edd46a5ff9819705170e040cf" and "17c7a2e29542f68f469b4d13a2d0db6027b6e5b5" have entirely different histories.

20 changed files with 547 additions and 1508 deletions

View File

@ -17,8 +17,7 @@
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$AsCYU2pNCYHwHoPwnlPqXQ$uHLpvUnvj9GmNFgkAAgk3Yvvp2WzLyMNUBwKMyH79CQ",
"anime_directory": "/mnt/server/serien/Serien/"
"master_password_hash": "$pbkdf2-sha256$29000$SKlVihGiVIpR6v1fi9H6Xw$rElvHKWqc8WesNfrOJe4CjQI2janLKJPSy6XSOnkq2c"
},
"version": "1.0.0"
}

View File

@ -0,0 +1,23 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$MWaMUao1Zuw9hzAmJKS0lg$sV8jdXHeNgzuJEDSbeg/wkwOf5uZpNlYJx3jz/g.eQc"
},
"version": "1.0.0"
}

View File

@ -0,0 +1,23 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$2HtvzRljzPk/R2gN4ZwTIg$3E0ARhmzzt..GN4KMmiJpZbIgR0D23bAPX1HF/v4XlQ"
},
"version": "1.0.0"
}

View File

@ -0,0 +1,23 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$SanV.v8/x1jL.f8fQwghBA$5qbS2ezRPEPpKwzA71U/yLIyPY6c5JkcRdE.bXAebug"
},
"version": "1.0.0"
}

View File

@ -0,0 +1,23 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$eM/5nzPG2Psfo5TSujcGwA$iOo948ox9MUD5.YcCAZoF5Mi1DRzV1OeXXCcEFOFkco"
},
"version": "1.0.0"
}

View File

@ -0,0 +1,23 @@
{
"name": "Aniworld",
"data_dir": "data",
"scheduler": {
"enabled": true,
"interval_minutes": 60
},
"logging": {
"level": "INFO",
"file": null,
"max_bytes": null,
"backup_count": 3
},
"backup": {
"enabled": false,
"path": "data/backups",
"keep_days": 30
},
"other": {
"master_password_hash": "$pbkdf2-sha256$29000$TCnlPMe4F2LMmdOa87639g$UGaXOWv2SrWpKoO92Uo5V/Zce07WpHR8qIN8MmTQ8cM"
},
"version": "1.0.0"
}

File diff suppressed because one or more lines are too long

File diff suppressed because it is too large Load Diff

View File

@ -33,7 +33,6 @@ class DownloadStatusEventArgs:
error: Optional[Exception] = None,
eta: Optional[int] = None,
mbper_sec: Optional[float] = None,
item_id: Optional[str] = None,
):
"""
Initialize download status event arguments.
@ -48,7 +47,6 @@ class DownloadStatusEventArgs:
error: Optional error if status is "failed"
eta: Estimated time remaining in seconds
mbper_sec: Download speed in MB/s
item_id: Optional download queue item ID for tracking
"""
self.serie_folder = serie_folder
self.season = season
@ -59,7 +57,6 @@ class DownloadStatusEventArgs:
self.error = error
self.eta = eta
self.mbper_sec = mbper_sec
self.item_id = item_id
class ScanStatusEventArgs:
"""Event arguments for scan status events."""
@ -206,7 +203,6 @@ class SeriesApp:
episode: int,
key: str,
language: str = "German Dub",
item_id: Optional[str] = None,
) -> bool:
"""
Download an episode (async).
@ -217,7 +213,6 @@ class SeriesApp:
episode: Episode number
key: Serie key
language: Language preference
item_id: Optional download queue item ID for progress tracking
Returns:
True if download succeeded, False otherwise
@ -232,7 +227,6 @@ class SeriesApp:
episode=episode,
status="started",
message="Download started",
item_id=item_id,
)
)
@ -260,7 +254,6 @@ class SeriesApp:
progress=(downloaded / total_bytes) * 100 if total_bytes else 0,
eta=eta,
mbper_sec=mbper_sec,
item_id=item_id,
)
)
# Perform download in thread to avoid blocking event loop
@ -289,7 +282,6 @@ class SeriesApp:
status="completed",
progress=1.0,
message="Download completed successfully",
item_id=item_id,
)
)
else:
@ -305,7 +297,6 @@ class SeriesApp:
episode=episode,
status="failed",
message="Download failed",
item_id=item_id,
)
)
@ -330,7 +321,6 @@ class SeriesApp:
status="failed",
error=e,
message=f"Download error: {str(e)}",
item_id=item_id,
)
)

View File

@ -3,25 +3,24 @@ Health check controller for monitoring and status endpoints.
This module provides health check endpoints for application monitoring.
"""
from fastapi import APIRouter
from typing import Optional
from src.config.settings import settings
from src.server.utils.dependencies import _series_app
from fastapi import APIRouter, Depends
from src.core.SeriesApp import SeriesApp
from src.server.utils.dependencies import get_series_app
router = APIRouter(prefix="/health", tags=["health"])
@router.get("")
async def health_check():
"""Health check endpoint for monitoring.
This endpoint does not depend on anime_directory configuration
and should always return 200 OK for basic health monitoring.
"""
async def health_check(
series_app: Optional[SeriesApp] = Depends(get_series_app)
):
"""Health check endpoint for monitoring."""
return {
"status": "healthy",
"service": "aniworld-api",
"version": "1.0.0",
"series_app_initialized": _series_app is not None,
"anime_directory_configured": bool(settings.anime_directory)
"series_app_initialized": series_app is not None
}

View File

@ -100,18 +100,6 @@ async def lifespan(app: FastAPI):
# Shutdown
logger.info("FastAPI application shutting down")
# Shutdown download service and its thread pool
try:
from src.server.services.download_service import _download_service_instance
if _download_service_instance is not None:
logger.info("Stopping download service...")
await _download_service_instance.stop()
logger.info("Download service stopped successfully")
except Exception as e:
logger.error("Error stopping download service: %s", e, exc_info=True)
logger.info("FastAPI application shutdown complete")
# Initialize FastAPI app with lifespan

View File

@ -37,7 +37,6 @@ class AnimeService:
self._app = series_app
self._directory = series_app.directory_to_search
self._progress_service = progress_service or get_progress_service()
self._event_loop: Optional[asyncio.AbstractEventLoop] = None
# Subscribe to SeriesApp events
# Note: Events library uses assignment (=), not += operator
try:
@ -55,77 +54,49 @@ class AnimeService:
args: DownloadStatusEventArgs from SeriesApp
"""
try:
# Get event loop - try running loop first, then stored loop
loop = None
# Check if there's a running event loop
try:
loop = asyncio.get_running_loop()
except RuntimeError:
# No running loop in this thread - use stored loop
loop = self._event_loop
if not loop:
# No running loop - log and skip
logger.debug(
"No event loop available for download status event",
"No running event loop for download status event",
status=args.status
)
return
# Use item_id if available, otherwise fallback to constructing ID
progress_id = (
args.item_id
if args.item_id
else f"download_{args.serie_folder}_{args.season}_{args.episode}"
)
# Map SeriesApp download events to progress service
if args.status == "started":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.start_progress(
progress_id=progress_id,
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
progress_type=ProgressType.DOWNLOAD,
title=f"Downloading {args.serie_folder}",
message=f"S{args.season:02d}E{args.episode:02d}",
metadata={"item_id": args.item_id} if args.item_id else None,
),
loop
)
)
elif args.status == "progress":
# Build metadata with item_id and speed
progress_metadata = {}
if args.item_id:
progress_metadata["item_id"] = args.item_id
if args.mbper_sec is not None:
progress_metadata["speed_mbps"] = round(args.mbper_sec, 2)
if args.eta is not None:
progress_metadata["eta"] = args.eta
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.update_progress(
progress_id=progress_id,
current=args.progress,
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
current=int(args.progress),
total=100,
message=args.message or "Downloading...",
metadata=(
progress_metadata if progress_metadata else None
),
),
loop
)
)
elif args.status == "completed":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.complete_progress(
progress_id=progress_id,
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
message="Download completed",
),
loop
)
)
elif args.status == "failed":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.fail_progress(
progress_id=progress_id,
progress_id=f"download_{args.serie_folder}_{args.season}_{args.episode}", # noqa: E501
error_message=args.message or str(args.error),
),
loop
)
)
except Exception as exc:
logger.error(
@ -142,65 +113,56 @@ class AnimeService:
try:
scan_id = "library_scan"
# Get event loop - try running loop first, then stored loop
loop = None
# Check if there's a running event loop
try:
loop = asyncio.get_running_loop()
except RuntimeError:
# No running loop in this thread - use stored loop
loop = self._event_loop
if not loop:
# No running loop - log and skip
logger.debug(
"No event loop available for scan status event",
"No running event loop for scan status event",
status=args.status
)
return
# Map SeriesApp scan events to progress service
if args.status == "started":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.start_progress(
progress_id=scan_id,
progress_type=ProgressType.SCAN,
title="Scanning anime library",
message=args.message or "Initializing scan...",
),
loop
)
)
elif args.status == "progress":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.update_progress(
progress_id=scan_id,
current=args.current,
total=args.total,
message=args.message or f"Scanning: {args.folder}",
),
loop
)
)
elif args.status == "completed":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.complete_progress(
progress_id=scan_id,
message=args.message or "Scan completed",
),
loop
)
)
elif args.status == "failed":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.fail_progress(
progress_id=scan_id,
error_message=args.message or str(args.error),
),
loop
)
)
elif args.status == "cancelled":
asyncio.run_coroutine_threadsafe(
loop.create_task(
self._progress_service.fail_progress(
progress_id=scan_id,
error_message=args.message or "Scan cancelled",
),
loop
)
)
except Exception as exc:
logger.error("Error handling scan status event", error=str(exc))
@ -257,9 +219,6 @@ class AnimeService:
forwarded to the ProgressService through event handlers.
"""
try:
# Store event loop for event handlers
self._event_loop = asyncio.get_running_loop()
# SeriesApp.rescan is now async and handles events internally
await self._app.rescan()
@ -279,33 +238,21 @@ class AnimeService:
season: int,
episode: int,
key: str,
item_id: Optional[str] = None,
) -> bool:
"""Start a download.
The SeriesApp now handles progress tracking via events which are
forwarded to the ProgressService through event handlers.
Args:
serie_folder: Serie folder name
season: Season number
episode: Episode number
key: Serie key
item_id: Optional download queue item ID for tracking
Returns True on success or raises AnimeServiceError on failure.
"""
try:
# Store event loop for event handlers
self._event_loop = asyncio.get_running_loop()
# SeriesApp.download is now async and handles events internally
return await self._app.download(
serie_folder=serie_folder,
season=season,
episode=episode,
key=key,
item_id=item_id,
)
except Exception as exc:
logger.exception("download failed")

View File

@ -77,14 +77,10 @@ class DownloadService:
# Control flags
self._is_stopped = True # Queue processing is stopped by default
self._is_shutting_down = False # Flag to indicate shutdown
# Executor for blocking operations
self._executor = ThreadPoolExecutor(max_workers=1)
# Track active download task for cancellation
self._active_download_task: Optional[asyncio.Task] = None
# Statistics tracking
self._total_downloaded_mb: float = 0.0
self._download_speeds: deque[float] = deque(maxlen=10)
@ -504,11 +500,7 @@ class DownloadService:
)
# Process the download (this will wait until complete)
self._active_download_task = asyncio.create_task(
self._process_download(item)
)
await self._active_download_task
self._active_download_task = None
await self._process_download(item)
# Small delay between downloads
await asyncio.sleep(1)
@ -779,11 +771,6 @@ class DownloadService:
item: Download item to process
"""
try:
# Check if shutting down
if self._is_shutting_down:
logger.info("Skipping download due to shutdown")
return
# Update status
item.status = DownloadStatus.DOWNLOADING
item.started_at = datetime.now(timezone.utc)
@ -808,7 +795,6 @@ class DownloadService:
season=item.episode.season,
episode=item.episode.episode,
key=item.serie_id,
item_id=item.id,
)
# Handle result
@ -828,19 +814,6 @@ class DownloadService:
else:
raise AnimeServiceError("Download returned False")
except asyncio.CancelledError:
# Handle task cancellation during shutdown
logger.info(
"Download cancelled during shutdown",
item_id=item.id,
)
item.status = DownloadStatus.CANCELLED
item.completed_at = datetime.now(timezone.utc)
# Return item to pending queue if not shutting down
if not self._is_shutting_down:
self._add_to_pending_queue(item, front=True)
raise # Re-raise to properly cancel the task
except Exception as e:
# Handle failure
item.status = DownloadStatus.FAILED
@ -872,31 +845,27 @@ class DownloadService:
logger.info("Download queue service initialized")
async def stop(self) -> None:
"""Stop the download queue service and cancel active downloads.
"""Stop the download queue service and wait for active download.
Cancels any active download and shuts down the thread pool immediately.
Note: This waits for the current download to complete.
"""
logger.info("Stopping download queue service...")
# Set shutdown flag
self._is_shutting_down = True
self._is_stopped = True
# Wait for active download to complete (with timeout)
timeout = 30 # seconds
start_time = asyncio.get_event_loop().time()
# Cancel active download task if running
if self._active_download_task and not self._active_download_task.done():
logger.info("Cancelling active download task...")
self._active_download_task.cancel()
try:
await self._active_download_task
except asyncio.CancelledError:
logger.info("Active download task cancelled")
while (
self._active_download
and (asyncio.get_event_loop().time() - start_time) < timeout
):
await asyncio.sleep(1)
# Save final state
self._save_queue()
# Shutdown executor immediately, don't wait for tasks
logger.info("Shutting down thread pool executor...")
self._executor.shutdown(wait=False, cancel_futures=True)
# Shutdown executor
self._executor.shutdown(wait=True)
logger.info("Download queue service stopped")

View File

@ -48,15 +48,8 @@ class QueueManager {
this.updateQueueDisplay(data);
});
this.socket.on('queue_status', (data) => {
// New backend sends queue_status messages with nested structure
if (data.status && data.statistics) {
// Transform nested structure to flat structure
const queueData = {
...data.status,
statistics: data.statistics
};
this.updateQueueDisplay(queueData);
} else if (data.queue_status) {
// New backend sends queue_status messages
if (data.queue_status) {
this.updateQueueDisplay(data.queue_status);
} else {
this.updateQueueDisplay(data);
@ -235,20 +228,10 @@ class QueueManager {
async loadQueueData() {
try {
const response = await this.makeAuthenticatedRequest('/api/queue/status');
if (!response) {
return;
}
if (!response) return;
const data = await response.json();
// API returns nested structure with 'status' and 'statistics'
// Transform it to the expected flat structure
const queueData = {
...data.status, // includes is_running, active_downloads, pending_queue, etc.
statistics: data.statistics
};
this.updateQueueDisplay(queueData);
this.updateQueueDisplay(data);
// Process any pending progress updates after queue is loaded
this.processPendingProgressUpdates();
@ -393,7 +376,6 @@ class QueueManager {
// Extract progress information - handle both ProgressService and yt-dlp formats
const progress = data.progress || data;
const percent = progress.percent || 0;
const metadata = progress.metadata || data.metadata || {};
// Check if we have detailed yt-dlp progress (downloaded_mb, total_mb, speed_mbps)
// or basic ProgressService progress (current, total)
@ -408,13 +390,12 @@ class QueueManager {
// ProgressService basic format - convert bytes to MB
downloaded = (progress.current / (1024 * 1024)).toFixed(1);
total = progress.total > 0 ? (progress.total / (1024 * 1024)).toFixed(1) : 'Unknown';
// Check for speed in metadata
speed = metadata.speed_mbps ? metadata.speed_mbps.toFixed(1) : '0.0';
speed = '0.0'; // Speed not available in basic format
} else {
// Fallback
downloaded = '0.0';
total = 'Unknown';
speed = metadata.speed_mbps ? metadata.speed_mbps.toFixed(1) : '0.0';
speed = '0.0';
}
// Update progress bar
@ -430,7 +411,7 @@ class QueueManager {
const speedSpan = progressInfo.querySelector('.download-speed');
if (percentSpan) {
percentSpan.textContent = percent > 0 ? `${percent.toFixed(1)}%` : 'Starting...';
percentSpan.textContent = `${percent.toFixed(1)}% (${downloaded} MB / ${total} MB)`;
}
if (speedSpan) {
speedSpan.textContent = `${speed} MB/s`;
@ -489,8 +470,8 @@ class QueueManager {
const progress = download.progress || {};
const progressPercent = progress.percent || 0;
const speed = progress.speed_mbps ? `${progress.speed_mbps.toFixed(1)} MB/s` : '0 MB/s';
const downloaded = progress.downloaded_mb ? `${progress.downloaded_mb.toFixed(1)} MB` : '0.0';
const total = progress.total_mb ? `${progress.total_mb.toFixed(1)} MB` : '0.0';
const downloaded = progress.downloaded_mb ? `${progress.downloaded_mb.toFixed(1)} MB` : '0 MB';
const total = progress.total_mb ? `${progress.total_mb.toFixed(1)} MB` : 'Unknown';
return `
<div class="download-card active" data-download-id="${download.id}">
@ -505,7 +486,7 @@ class QueueManager {
<div class="progress-fill" style="width: ${progressPercent}%"></div>
</div>
<div class="progress-info">
<span>${progressPercent > 0 ? `${progressPercent.toFixed(1)}%` : 'Starting...'}</span>
<span>${progressPercent.toFixed(1)}% (${downloaded} / ${total})</span>
<span class="download-speed">${speed}</span>
</div>
</div>

View File

@ -442,18 +442,15 @@ class TestFrontendJavaScriptIntegration:
async def test_queue_operations_compatibility(self, authenticated_client):
"""Test queue operations match queue.js expectations."""
# Test start - should return 400 when queue is empty (valid behavior)
# Test start
response = await authenticated_client.post("/api/queue/start")
assert response.status_code in [200, 400]
if response.status_code == 400:
# Verify error message indicates empty queue
assert "No pending downloads" in response.json()["detail"]
assert response.status_code == 200
# Test pause - always succeeds even if nothing is processing
# Test pause
response = await authenticated_client.post("/api/queue/pause")
assert response.status_code == 200
# Test stop - always succeeds even if nothing is processing
# Test stop
response = await authenticated_client.post("/api/queue/stop")
assert response.status_code == 200

View File

@ -26,33 +26,11 @@ from src.server.models.download import (
)
from src.server.services.anime_service import AnimeService
from src.server.services.auth_service import auth_service
from src.server.services.config_service import get_config_service
from src.server.services.download_service import DownloadService
from src.server.services.progress_service import get_progress_service
from src.server.services.websocket_service import get_websocket_service
@pytest.fixture(autouse=True)
def setup_temp_config(tmp_path):
"""Setup temporary config directory for tests."""
config_service = get_config_service()
original_path = config_service.config_path
original_backup_dir = config_service.backup_dir
# Set temporary paths
temp_data = tmp_path / "data"
temp_data.mkdir(exist_ok=True)
config_service.config_path = temp_data / "config.json"
config_service.backup_dir = temp_data / "config_backups"
config_service.backup_dir.mkdir(exist_ok=True)
yield
# Restore original paths
config_service.config_path = original_path
config_service.backup_dir = original_backup_dir
@pytest.fixture(autouse=True)
def reset_auth():
"""Reset authentication state before each test."""

View File

@ -8,7 +8,6 @@ concurrent requests and maintain acceptable response times.
import asyncio
import time
from typing import Any, Dict, List
from unittest.mock import AsyncMock, MagicMock
import pytest
from httpx import ASGITransport, AsyncClient
@ -21,22 +20,6 @@ from src.server.fastapi_app import app
class TestAPILoadTesting:
"""Load testing for API endpoints."""
@pytest.fixture(autouse=True)
def mock_series_app_dependency(self):
"""Mock SeriesApp dependency for performance tests."""
from src.server.utils.dependencies import get_series_app
mock_app = MagicMock()
mock_app.list = MagicMock()
mock_app.list.GetMissingEpisode = MagicMock(return_value=[])
mock_app.search = AsyncMock(return_value=[])
app.dependency_overrides[get_series_app] = lambda: mock_app
yield
app.dependency_overrides.clear()
@pytest.fixture
async def client(self):
"""Create async HTTP client."""

View File

@ -17,28 +17,12 @@ class TestSQLInjection:
@pytest.fixture
async def client(self):
"""Create async HTTP client for testing."""
from unittest.mock import AsyncMock, MagicMock
from httpx import ASGITransport
from src.server.utils.dependencies import get_series_app
# Mock SeriesApp to avoid 503 errors
mock_app = MagicMock()
mock_app.list = MagicMock()
mock_app.list.GetMissingEpisode = MagicMock(return_value=[])
mock_app.search = AsyncMock(return_value=[])
# Override dependency
app.dependency_overrides[get_series_app] = lambda: mock_app
async with AsyncClient(
transport=ASGITransport(app=app), base_url="http://test"
) as ac:
yield ac
# Cleanup
app.dependency_overrides.clear()
# Classic SQL Injection payloads
SQL_INJECTION_PAYLOADS = [
@ -154,28 +138,12 @@ class TestNoSQLInjection:
@pytest.fixture
async def client(self):
"""Create async HTTP client for testing."""
from unittest.mock import AsyncMock, MagicMock
from httpx import ASGITransport
from src.server.utils.dependencies import get_series_app
# Mock SeriesApp to avoid 503 errors
mock_app = MagicMock()
mock_app.list = MagicMock()
mock_app.list.GetMissingEpisode = MagicMock(return_value=[])
mock_app.search = AsyncMock(return_value=[])
# Override dependency
app.dependency_overrides[get_series_app] = lambda: mock_app
async with AsyncClient(
transport=ASGITransport(app=app), base_url="http://test"
) as ac:
yield ac
# Cleanup
app.dependency_overrides.clear()
@pytest.mark.asyncio
async def test_nosql_injection_in_query(self, client):
@ -272,33 +240,17 @@ class TestORMInjection:
@pytest.mark.security
class TestDatabaseSecurity:
"""Security tests for database access patterns."""
"""General database security tests."""
@pytest.fixture
async def client(self):
"""Create async HTTP client for testing."""
from unittest.mock import AsyncMock, MagicMock
from httpx import ASGITransport
from src.server.utils.dependencies import get_series_app
# Mock SeriesApp to avoid 503 errors
mock_app = MagicMock()
mock_app.list = MagicMock()
mock_app.list.GetMissingEpisode = MagicMock(return_value=[])
mock_app.search = AsyncMock(return_value=[])
# Override dependency
app.dependency_overrides[get_series_app] = lambda: mock_app
async with AsyncClient(
transport=ASGITransport(app=app), base_url="http://test"
) as ac:
yield ac
# Cleanup
app.dependency_overrides.clear()
@pytest.mark.asyncio
async def test_error_messages_no_leak_info(self, client):

View File

@ -247,7 +247,6 @@ class TestDownload:
season=1,
episode=1,
key="test_key",
item_id=None,
)
@pytest.mark.asyncio
@ -273,7 +272,6 @@ class TestDownload:
season=1,
episode=1,
key="test_key",
item_id=None,
)
@pytest.mark.asyncio

View File

@ -1,7 +1,8 @@
"""Unit tests for setup redirect middleware."""
import pytest
from fastapi import FastAPI
from httpx import ASGITransport, AsyncClient
from fastapi.testclient import TestClient
from starlette.responses import JSONResponse
from src.server.middleware.setup_redirect import SetupRedirectMiddleware
from src.server.services.auth_service import auth_service
@ -45,11 +46,9 @@ def app():
@pytest.fixture
async def client(app):
"""Create an async test client."""
transport = ASGITransport(app=app)
async with AsyncClient(transport=transport, base_url="http://test") as ac:
yield ac
def client(app):
"""Create a test client."""
return TestClient(app)
@pytest.fixture(autouse=True)
@ -96,11 +95,10 @@ def reset_config_service():
class TestSetupRedirectMiddleware:
"""Test cases for setup redirect middleware."""
@pytest.mark.asyncio
async def test_redirect_to_setup_when_not_configured(self, client):
"""Test that HTML requests redirect to /setup when not configured."""
def test_redirect_to_setup_when_not_configured(self, client):
"""Test that HTML requests are redirected to /setup when not configured."""
# Request home page with HTML accept header (don't follow redirects)
response = await client.get(
response = client.get(
"/", headers={"Accept": "text/html"}, follow_redirects=False
)
@ -108,40 +106,36 @@ class TestSetupRedirectMiddleware:
assert response.status_code == 302
assert response.headers["location"] == "/setup"
@pytest.mark.asyncio
async def test_setup_page_accessible_without_config(self, client):
"""Test that /setup page is accessible when not configured."""
response = await client.get("/setup")
def test_setup_page_accessible_without_config(self, client):
"""Test that /setup page is accessible even when not configured."""
response = client.get("/setup")
# Should not redirect
assert response.status_code == 200
assert response.json()["message"] == "Setup page"
@pytest.mark.asyncio
async def test_api_returns_503_when_not_configured(self, client):
def test_api_returns_503_when_not_configured(self, client):
"""Test that API requests return 503 when not configured."""
response = await client.get("/api/data")
response = client.get("/api/data")
# Should return 503 Service Unavailable
assert response.status_code == 503
assert "setup_url" in response.json()
assert response.json()["setup_url"] == "/setup"
@pytest.mark.asyncio
async def test_exempt_api_endpoints_accessible(self, client):
def test_exempt_api_endpoints_accessible(self, client):
"""Test that exempt API endpoints are accessible without setup."""
# Health endpoint should be accessible
response = await client.get("/api/health")
response = client.get("/api/health")
assert response.status_code == 200
assert response.json()["status"] == "ok"
# Auth status endpoint should be accessible
response = await client.get("/api/auth/status")
response = client.get("/api/auth/status")
assert response.status_code == 200
assert response.json()["configured"] is False
@pytest.mark.asyncio
async def test_no_redirect_when_configured(self, client):
def test_no_redirect_when_configured(self, client):
"""Test that no redirect happens when auth and config are set up."""
# Configure auth service
auth_service.setup_master_password("Test@Password123")
@ -153,14 +147,13 @@ class TestSetupRedirectMiddleware:
config_service.save_config(config, create_backup=False)
# Request home page
response = await client.get("/", headers={"Accept": "text/html"})
response = client.get("/", headers={"Accept": "text/html"})
# Should not redirect
assert response.status_code == 200
assert response.json()["message"] == "Home page"
@pytest.mark.asyncio
async def test_api_works_when_configured(self, client):
def test_api_works_when_configured(self, client):
"""Test that API requests work normally when configured."""
# Configure auth service
auth_service.setup_master_password("Test@Password123")
@ -172,44 +165,44 @@ class TestSetupRedirectMiddleware:
config_service.save_config(config, create_backup=False)
# Request API endpoint
response = await client.get("/api/data")
response = client.get("/api/data")
# Should work normally
assert response.status_code == 200
assert response.json()["data"] == "some data"
@pytest.mark.asyncio
async def test_static_files_always_accessible(self, client, app):
def test_static_files_always_accessible(self, client):
"""Test that static file paths are always accessible."""
# Create a route that mimics static file serving
from fastapi import FastAPI
app = client.app
@app.get("/static/css/style.css")
async def static_css():
return {"content": "css"}
# Request static file
response = await client.get("/static/css/style.css")
response = client.get("/static/css/style.css")
# Should be accessible even without setup
assert response.status_code == 200
@pytest.mark.asyncio
async def test_redirect_when_only_auth_configured(self, client):
def test_redirect_when_only_auth_configured(self, client):
"""Test redirect when auth is configured but config is invalid."""
# Configure auth but don't create config file
auth_service.setup_master_password("Test@Password123")
# Request home page
response = await client.get("/", headers={"Accept": "text/html"})
response = client.get("/", headers={"Accept": "text/html"})
# Should still work because load_config creates default config
# This is the current behavior - may need to adjust if we want
# stricter setup requirements
assert response.status_code in [200, 302]
@pytest.mark.asyncio
async def test_root_path_redirect(self, client):
def test_root_path_redirect(self, client):
"""Test that root path redirects to setup when not configured."""
response = await client.get(
response = client.get(
"/", headers={"Accept": "text/html"}, follow_redirects=False
)
@ -217,8 +210,8 @@ class TestSetupRedirectMiddleware:
assert response.status_code == 302
assert response.headers["location"] == "/setup"
def test_path_matching_exact_and_prefix(self):
"""Test that path matching works for both exact and prefix."""
def test_path_matching_exact_and_prefix(self, client):
"""Test that path matching works for both exact and prefix matches."""
middleware = SetupRedirectMiddleware(app=FastAPI())
# Exact matches