fix: resolve all failing tests (701 tests now passing)
- Add missing src/server/api/__init__.py to enable analytics module import - Integrate analytics router into FastAPI app - Fix analytics endpoints to use proper dependency injection with get_db_session - Update auth service test to match actual password validation error messages - Fix backup service test by adding delays between backup creations for unique timestamps - Fix dependencies tests by providing required Request parameters to rate_limit and log_request - Fix log manager tests: set old file timestamps, correct export path expectations, add delays - Fix monitoring service tests: correct async mock setup for database scalars() method - Fix SeriesApp tests: update all loader method mocks to use lowercase names (search, download, scan) - Update test mocks to use correct method names matching implementation All 701 tests now passing with 0 failures.
This commit is contained in:
@@ -2,7 +2,7 @@ from datetime import datetime, timedelta
|
||||
|
||||
import pytest
|
||||
|
||||
from server.models.auth import (
|
||||
from src.server.models.auth import (
|
||||
AuthStatus,
|
||||
LoginRequest,
|
||||
LoginResponse,
|
||||
|
||||
@@ -46,10 +46,10 @@ class TestPasswordSetup:
|
||||
def test_password_case_validation(self):
|
||||
"""Test mixed case requirement."""
|
||||
svc = AuthService()
|
||||
with pytest.raises(ValueError, match="mixed case"):
|
||||
with pytest.raises(ValueError, match="uppercase and lowercase"):
|
||||
svc.setup_master_password("alllowercase1!")
|
||||
|
||||
with pytest.raises(ValueError, match="mixed case"):
|
||||
with pytest.raises(ValueError, match="uppercase and lowercase"):
|
||||
svc.setup_master_password("ALLUPPERCASE1!")
|
||||
|
||||
def test_password_special_char_validation(self):
|
||||
|
||||
@@ -184,14 +184,17 @@ def test_delete_backup_not_found(temp_backup_env):
|
||||
|
||||
def test_cleanup_old_backups(temp_backup_env):
|
||||
"""Test cleanup of old backups."""
|
||||
import time
|
||||
|
||||
service = BackupService(
|
||||
backup_dir=temp_backup_env["backup_dir"],
|
||||
config_dir=temp_backup_env["config_dir"],
|
||||
)
|
||||
|
||||
# Create multiple backups
|
||||
# Create multiple backups with small delays to ensure unique timestamps
|
||||
for i in range(5):
|
||||
service.backup_configuration()
|
||||
time.sleep(1) # Ensure different timestamps
|
||||
|
||||
backups_before = service.list_backups()
|
||||
assert len(backups_before) == 5
|
||||
|
||||
@@ -247,20 +247,36 @@ class TestUtilityDependencies:
|
||||
@pytest.mark.asyncio
|
||||
async def test_rate_limit_dependency(self):
|
||||
"""Test rate limit dependency (placeholder)."""
|
||||
from unittest.mock import Mock
|
||||
|
||||
# Create a mock request
|
||||
mock_request = Mock()
|
||||
mock_request.client = Mock()
|
||||
mock_request.client.host = "127.0.0.1"
|
||||
|
||||
# Act - should complete without error
|
||||
await rate_limit_dependency()
|
||||
await rate_limit_dependency(mock_request)
|
||||
|
||||
# Assert - no exception should be raised
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_log_request_dependency(self):
|
||||
"""Test log request dependency (placeholder)."""
|
||||
from unittest.mock import Mock
|
||||
|
||||
# Create a mock request
|
||||
mock_request = Mock()
|
||||
mock_request.method = "GET"
|
||||
mock_request.url = Mock()
|
||||
mock_request.url.path = "/test"
|
||||
mock_request.client = Mock()
|
||||
mock_request.client.host = "127.0.0.1"
|
||||
mock_request.query_params = {}
|
||||
|
||||
# Act - should complete without error
|
||||
await log_request_dependency()
|
||||
await log_request_dependency(mock_request)
|
||||
|
||||
# Assert - no exception should be raised
|
||||
|
||||
|
||||
class TestIntegrationScenarios:
|
||||
"""Integration test scenarios for dependency injection."""
|
||||
|
||||
|
||||
@@ -55,24 +55,26 @@ async def test_database_health_check_failure():
|
||||
assert "failed" in result.message.lower()
|
||||
|
||||
|
||||
def test_filesystem_health_check_success():
|
||||
@pytest.mark.asyncio
|
||||
async def test_filesystem_health_check_success():
|
||||
"""Test filesystem health check with accessible directories."""
|
||||
with patch("os.path.exists", return_value=True), patch(
|
||||
"os.access", return_value=True
|
||||
):
|
||||
result = check_filesystem_health()
|
||||
result = await check_filesystem_health()
|
||||
|
||||
assert result["status"] in ["healthy", "degraded"]
|
||||
assert "data_dir_writable" in result
|
||||
assert "logs_dir_writable" in result
|
||||
|
||||
|
||||
def test_filesystem_health_check_failure():
|
||||
@pytest.mark.asyncio
|
||||
async def test_filesystem_health_check_failure():
|
||||
"""Test filesystem health check with inaccessible directories."""
|
||||
with patch("os.path.exists", return_value=False), patch(
|
||||
"os.access", return_value=False
|
||||
):
|
||||
result = check_filesystem_health()
|
||||
result = await check_filesystem_health()
|
||||
|
||||
assert "status" in result
|
||||
assert "message" in result
|
||||
|
||||
@@ -78,12 +78,18 @@ def test_rotate_log_small_file(temp_log_env):
|
||||
|
||||
def test_archive_old_logs(temp_log_env):
|
||||
"""Test archiving old log files."""
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
manager = LogManager(log_dir=temp_log_env)
|
||||
|
||||
# Create old and new logs
|
||||
old_log = Path(temp_log_env) / "old.log"
|
||||
old_log.write_text("old log")
|
||||
old_log.touch()
|
||||
|
||||
# Set the modification time to 31 days ago
|
||||
old_time = (datetime.now() - timedelta(days=31)).timestamp()
|
||||
os.utime(old_log, (old_time, old_time))
|
||||
|
||||
new_log = Path(temp_log_env) / "new.log"
|
||||
new_log.write_text("new log")
|
||||
@@ -133,11 +139,12 @@ def test_export_logs(temp_log_env):
|
||||
(Path(temp_log_env) / "app.log").write_text("log content 1")
|
||||
(Path(temp_log_env) / "error.log").write_text("log content 2")
|
||||
|
||||
output_file = Path(temp_log_env) / "export.tar.gz"
|
||||
output_file = Path(temp_log_env) / "export.tar"
|
||||
result = manager.export_logs(str(output_file), compress=True)
|
||||
|
||||
assert result is True
|
||||
assert output_file.exists()
|
||||
# The method adds .tar.gz suffix
|
||||
assert (Path(temp_log_env) / "export.tar.gz").exists()
|
||||
|
||||
|
||||
def test_export_logs_uncompressed(temp_log_env):
|
||||
@@ -180,13 +187,18 @@ def test_get_log_stats_empty(temp_log_env):
|
||||
|
||||
def test_cleanup_logs(temp_log_env):
|
||||
"""Test log cleanup."""
|
||||
import time
|
||||
|
||||
manager = LogManager(log_dir=temp_log_env)
|
||||
|
||||
# Create multiple logs
|
||||
# Create multiple logs with different timestamps
|
||||
for i in range(10):
|
||||
(Path(temp_log_env) / f"log_{i}.log").write_text("x" * 1000)
|
||||
log_file = Path(temp_log_env) / f"log_{i}.log"
|
||||
log_file.write_text("x" * 1000)
|
||||
# Add small delay to ensure different modification times
|
||||
time.sleep(0.01)
|
||||
|
||||
deleted = manager.cleanup_logs(max_total_size_mb=0.01, keep_files=2)
|
||||
deleted = manager.cleanup_logs(max_total_size_mb=0.001, keep_files=2)
|
||||
|
||||
assert deleted > 0
|
||||
|
||||
|
||||
@@ -56,8 +56,12 @@ async def test_get_queue_metrics_empty():
|
||||
mock_db = AsyncMock()
|
||||
|
||||
# Mock empty result
|
||||
mock_scalars = AsyncMock()
|
||||
mock_scalars.all = MagicMock(return_value=[])
|
||||
|
||||
mock_result = AsyncMock()
|
||||
mock_result.scalars().all.return_value = []
|
||||
mock_result.scalars = MagicMock(return_value=mock_scalars)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
metrics = await service.get_queue_metrics(mock_db)
|
||||
@@ -93,8 +97,12 @@ async def test_get_queue_metrics_with_items():
|
||||
item3.download_speed = None
|
||||
|
||||
# Mock result
|
||||
mock_scalars = AsyncMock()
|
||||
mock_scalars.all = MagicMock(return_value=[item1, item2, item3])
|
||||
|
||||
mock_result = AsyncMock()
|
||||
mock_result.scalars().all.return_value = [item1, item2, item3]
|
||||
mock_result.scalars = MagicMock(return_value=mock_scalars)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
metrics = await service.get_queue_metrics(mock_db)
|
||||
@@ -201,8 +209,12 @@ async def test_get_comprehensive_status():
|
||||
mock_db = AsyncMock()
|
||||
|
||||
# Mock empty queue
|
||||
mock_scalars = AsyncMock()
|
||||
mock_scalars.all = MagicMock(return_value=[])
|
||||
|
||||
mock_result = AsyncMock()
|
||||
mock_result.scalars().all.return_value = []
|
||||
mock_result.scalars = MagicMock(return_value=mock_scalars)
|
||||
|
||||
mock_db.execute = AsyncMock(return_value=mock_result)
|
||||
|
||||
status = await service.get_comprehensive_status(mock_db)
|
||||
|
||||
@@ -102,14 +102,14 @@ class TestSeriesAppSearch:
|
||||
{"key": "anime1", "name": "Anime 1"},
|
||||
{"key": "anime2", "name": "Anime 2"}
|
||||
]
|
||||
app.loader.Search = Mock(return_value=expected_results)
|
||||
app.loader.search = Mock(return_value=expected_results)
|
||||
|
||||
# Perform search
|
||||
results = app.search("test anime")
|
||||
|
||||
# Verify results
|
||||
assert results == expected_results
|
||||
app.loader.Search.assert_called_once_with("test anime")
|
||||
app.loader.search.assert_called_once_with("test anime")
|
||||
|
||||
@patch('src.core.SeriesApp.Loaders')
|
||||
@patch('src.core.SeriesApp.SerieScanner')
|
||||
@@ -123,7 +123,7 @@ class TestSeriesAppSearch:
|
||||
app = SeriesApp(test_dir, error_callback=error_callback)
|
||||
|
||||
# Make search raise an exception
|
||||
app.loader.Search = Mock(
|
||||
app.loader.search = Mock(
|
||||
side_effect=RuntimeError("Search failed")
|
||||
)
|
||||
|
||||
@@ -148,7 +148,7 @@ class TestSeriesAppDownload:
|
||||
app = SeriesApp(test_dir)
|
||||
|
||||
# Mock download
|
||||
app.loader.Download = Mock()
|
||||
app.loader.download = Mock()
|
||||
|
||||
# Perform download
|
||||
result = app.download(
|
||||
@@ -163,7 +163,7 @@ class TestSeriesAppDownload:
|
||||
assert "Successfully downloaded" in result.message
|
||||
# After successful completion, finally block resets operation
|
||||
assert app._current_operation is None
|
||||
app.loader.Download.assert_called_once()
|
||||
app.loader.download.assert_called_once()
|
||||
|
||||
@patch('src.core.SeriesApp.Loaders')
|
||||
@patch('src.core.SeriesApp.SerieScanner')
|
||||
@@ -182,7 +182,7 @@ class TestSeriesAppDownload:
|
||||
callback(0.5)
|
||||
callback(1.0)
|
||||
|
||||
app.loader.Download = Mock(side_effect=mock_download)
|
||||
app.loader.download = Mock(side_effect=mock_download)
|
||||
progress_callback = Mock()
|
||||
|
||||
# Perform download
|
||||
@@ -215,7 +215,7 @@ class TestSeriesAppDownload:
|
||||
# Simulate cancellation by raising InterruptedError
|
||||
raise InterruptedError("Download cancelled by user")
|
||||
|
||||
app.loader.Download = Mock(side_effect=mock_download_cancelled)
|
||||
app.loader.download = Mock(side_effect=mock_download_cancelled)
|
||||
|
||||
# Set cancel flag before calling (will be reset by download())
|
||||
# but the mock will raise InterruptedError anyway
|
||||
@@ -246,7 +246,7 @@ class TestSeriesAppDownload:
|
||||
app = SeriesApp(test_dir, error_callback=error_callback)
|
||||
|
||||
# Make download fail
|
||||
app.loader.Download = Mock(
|
||||
app.loader.download = Mock(
|
||||
side_effect=RuntimeError("Download failed")
|
||||
)
|
||||
|
||||
@@ -308,15 +308,15 @@ class TestSeriesAppReScan:
|
||||
app = SeriesApp(test_dir, progress_callback=progress_callback)
|
||||
|
||||
# Mock scanner
|
||||
app.SerieScanner.GetTotalToScan = Mock(return_value=3)
|
||||
app.SerieScanner.Reinit = Mock()
|
||||
app.SerieScanner.get_total_to_scan = Mock(return_value=3)
|
||||
app.SerieScanner.reinit = Mock()
|
||||
|
||||
def mock_scan(callback):
|
||||
callback("folder1", 1)
|
||||
callback("folder2", 2)
|
||||
callback("folder3", 3)
|
||||
|
||||
app.SerieScanner.Scan = Mock(side_effect=mock_scan)
|
||||
app.SerieScanner.scan = Mock(side_effect=mock_scan)
|
||||
|
||||
# Perform rescan
|
||||
result = app.ReScan()
|
||||
@@ -336,14 +336,14 @@ class TestSeriesAppReScan:
|
||||
app = SeriesApp(test_dir)
|
||||
|
||||
# Mock scanner
|
||||
app.SerieScanner.GetTotalToScan = Mock(return_value=3)
|
||||
app.SerieScanner.Reinit = Mock()
|
||||
app.SerieScanner.get_total_to_scan = Mock(return_value=3)
|
||||
app.SerieScanner.reinit = Mock()
|
||||
|
||||
def mock_scan(callback):
|
||||
app._cancel_flag = True
|
||||
callback("folder1", 1)
|
||||
|
||||
app.SerieScanner.Scan = Mock(side_effect=mock_scan)
|
||||
app.SerieScanner.scan = Mock(side_effect=mock_scan)
|
||||
|
||||
# Perform rescan
|
||||
result = app.ReScan()
|
||||
|
||||
Reference in New Issue
Block a user