migration removed

This commit is contained in:
Lukas 2025-12-10 21:12:34 +01:00
parent 99f79e4c29
commit 842f9c88eb
25 changed files with 2 additions and 3862 deletions

Binary file not shown.

View File

@ -17,7 +17,7 @@
"keep_days": 30 "keep_days": 30
}, },
"other": { "other": {
"master_password_hash": "$pbkdf2-sha256$29000$Nyak1Np7j1Gq9V5rLUUoxQ$9/v2NQ9x2YcJ7N8aEgMVET24CO0ND3dWiGythcUgrJs", "master_password_hash": "$pbkdf2-sha256$29000$DgFgDIHwfk/p/X.PEULIGQ$baPkp2MQxqv8yolTjZ5Ks0fIl9g/Eer3YBE1jjR6qjc",
"anime_directory": "/home/lukas/Volume/serien/" "anime_directory": "/home/lukas/Volume/serien/"
}, },
"version": "1.0.0" "version": "1.0.0"

View File

@ -249,17 +249,6 @@ The legacy file-based storage is **deprecated** and will be removed in v3.0.0:
Deprecation warnings are raised when using these methods. Deprecation warnings are raised when using these methods.
### Data Migration
On application startup, the system automatically migrates legacy data files to the database:
1. **Scan**: `DataMigrationService.scan_for_data_files()` finds legacy `data` files
2. **Migrate**: `DataMigrationService.migrate_data_file()` imports each file to DB
3. **Skip**: Existing series (by key) are skipped; changed episode data is updated
4. **Log**: Migration results are logged at startup
Migration is idempotent and safe to run multiple times.
## Core Services ## Core Services
### SeriesApp (`src/core/SeriesApp.py`) ### SeriesApp (`src/core/SeriesApp.py`)

View File

@ -67,24 +67,7 @@ async def setup_auth(req: SetupRequest):
# Save the config with the password hash and anime directory # Save the config with the password hash and anime directory
config_service.save_config(config, create_backup=False) config_service.save_config(config, create_backup=False)
# Run migration if anime directory was provided return {"status": "ok"}
response = {"status": "ok"}
if anime_directory:
from src.server.services.startup_migration import (
run_migration_for_directory,
)
migration_result = await run_migration_for_directory(
anime_directory
)
if migration_result:
response["migration"] = {
"total_found": migration_result.total_found,
"migrated": migration_result.migrated,
"skipped": migration_result.skipped,
"failed": migration_result.failed,
}
return response
except ValueError as e: except ValueError as e:
raise HTTPException(status_code=400, detail=str(e)) from e raise HTTPException(status_code=400, detail=str(e)) from e

View File

@ -239,22 +239,10 @@ async def update_directory(
config_service.save_config(app_config) config_service.save_config(app_config)
# Run migration for the new directory
from src.server.services.startup_migration import run_migration_for_directory
migration_result = await run_migration_for_directory(directory)
response: Dict[str, Any] = { response: Dict[str, Any] = {
"message": "Anime directory updated successfully" "message": "Anime directory updated successfully"
} }
if migration_result:
response["migration"] = {
"total_found": migration_result.total_found,
"migrated": migration_result.migrated,
"skipped": migration_result.skipped,
"failed": migration_result.failed,
}
return response return response
except ConfigServiceError as e: except ConfigServiceError as e:
raise HTTPException( raise HTTPException(

View File

@ -30,7 +30,6 @@ from src.server.database.init import (
create_database_backup, create_database_backup,
create_database_schema, create_database_schema,
get_database_info, get_database_info,
get_migration_guide,
get_schema_version, get_schema_version,
initialize_database, initialize_database,
seed_initial_data, seed_initial_data,
@ -64,7 +63,6 @@ __all__ = [
"check_database_health", "check_database_health",
"create_database_backup", "create_database_backup",
"get_database_info", "get_database_info",
"get_migration_guide",
"CURRENT_SCHEMA_VERSION", "CURRENT_SCHEMA_VERSION",
"EXPECTED_TABLES", "EXPECTED_TABLES",
# Models # Models

View File

@ -2,12 +2,9 @@
This module provides comprehensive database initialization functionality: This module provides comprehensive database initialization functionality:
- Schema creation and validation - Schema creation and validation
- Initial data migration
- Database health checks - Database health checks
- Schema versioning support - Schema versioning support
- Migration utilities
For production deployments, consider using Alembic for managed migrations.
""" """
from __future__ import annotations from __future__ import annotations
@ -354,8 +351,6 @@ async def create_schema_version_table(
) -> None: ) -> None:
"""Create schema version tracking table. """Create schema version tracking table.
Future enhancement for tracking schema migrations with Alembic.
Args: Args:
engine: Optional database engine (uses default if not provided) engine: Optional database engine (uses default if not provided)
""" """
@ -587,60 +582,6 @@ def get_database_info() -> Dict[str, Any]:
} }
def get_migration_guide() -> str:
"""Get migration guide for production deployments.
Returns:
Migration guide text
"""
return """
Database Migration Guide
========================
Current Setup: SQLAlchemy create_all()
- Automatically creates tables on startup
- Suitable for development and single-instance deployments
- Schema changes require manual handling
For Production with Alembic:
============================
1. Initialize Alembic (already installed):
alembic init alembic
2. Configure alembic/env.py:
from src.server.database.base import Base
target_metadata = Base.metadata
3. Configure alembic.ini:
sqlalchemy.url = <your-database-url>
4. Generate initial migration:
alembic revision --autogenerate -m "Initial schema v1.0.0"
5. Review migration in alembic/versions/
6. Apply migration:
alembic upgrade head
7. For future schema changes:
- Modify models in src/server/database/models.py
- Generate migration: alembic revision --autogenerate -m "Description"
- Review generated migration
- Test in staging environment
- Apply: alembic upgrade head
- For rollback: alembic downgrade -1
Best Practices:
==============
- Always backup database before migrations
- Test migrations in staging first
- Review auto-generated migrations carefully
- Keep migrations in version control
- Document breaking changes
"""
# ============================================================================= # =============================================================================
# Public API # Public API
# ============================================================================= # =============================================================================
@ -656,7 +597,6 @@ __all__ = [
"check_database_health", "check_database_health",
"create_database_backup", "create_database_backup",
"get_database_info", "get_database_info",
"get_migration_guide",
"CURRENT_SCHEMA_VERSION", "CURRENT_SCHEMA_VERSION",
"EXPECTED_TABLES", "EXPECTED_TABLES",
] ]

View File

@ -1,167 +0,0 @@
"""Database migration utilities.
This module provides utilities for database migrations and schema versioning.
Alembic integration can be added when needed for production environments.
For now, we use SQLAlchemy's create_all for automatic schema creation.
"""
from __future__ import annotations
import logging
from typing import Optional
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncEngine
from src.server.database.base import Base
from src.server.database.connection import get_engine, get_sync_engine
logger = logging.getLogger(__name__)
async def initialize_schema(engine: Optional[AsyncEngine] = None) -> None:
"""Initialize database schema.
Creates all tables defined in Base metadata if they don't exist.
This is a simple migration strategy suitable for single-instance deployments.
For production with multiple instances, consider using Alembic:
- alembic init alembic
- alembic revision --autogenerate -m "Initial schema"
- alembic upgrade head
Args:
engine: Optional database engine (uses default if not provided)
Raises:
RuntimeError: If database is not initialized
"""
if engine is None:
engine = get_engine()
logger.info("Initializing database schema...")
# Create all tables
async with engine.begin() as conn:
await conn.run_sync(Base.metadata.create_all)
logger.info("Database schema initialized successfully")
async def check_schema_version(engine: Optional[AsyncEngine] = None) -> str:
"""Check current database schema version.
Returns a simple version identifier based on existing tables.
For production, consider using Alembic for proper versioning.
Args:
engine: Optional database engine (uses default if not provided)
Returns:
Schema version string
Raises:
RuntimeError: If database is not initialized
"""
if engine is None:
engine = get_engine()
async with engine.connect() as conn:
# Check which tables exist
result = await conn.execute(
text(
"SELECT name FROM sqlite_master "
"WHERE type='table' AND name NOT LIKE 'sqlite_%'"
)
)
tables = [row[0] for row in result]
if not tables:
return "empty"
elif len(tables) == 4 and all(
t in tables for t in [
"anime_series",
"episodes",
"download_queue",
"user_sessions",
]
):
return "v1.0"
else:
return "custom"
def get_migration_info() -> str:
"""Get information about database migration setup.
Returns:
Migration setup information
"""
return """
Database Migration Information
==============================
Current Strategy: SQLAlchemy create_all()
- Automatically creates tables on startup
- Suitable for development and single-instance deployments
- Schema changes require manual handling
For Production Migrations (Alembic):
====================================
1. Initialize Alembic:
alembic init alembic
2. Configure alembic/env.py:
- Import Base from src.server.database.base
- Set target_metadata = Base.metadata
3. Configure alembic.ini:
- Set sqlalchemy.url to your database URL
4. Generate initial migration:
alembic revision --autogenerate -m "Initial schema"
5. Apply migrations:
alembic upgrade head
6. For future changes:
- Modify models in src/server/database/models.py
- Generate migration: alembic revision --autogenerate -m "Description"
- Review generated migration in alembic/versions/
- Apply: alembic upgrade head
Benefits of Alembic:
- Version control for database schema
- Automatic migration generation from model changes
- Rollback support with downgrade scripts
- Multi-instance deployment support
- Safe schema changes in production
"""
# =============================================================================
# Future Alembic Integration
# =============================================================================
#
# When ready to use Alembic, follow these steps:
#
# 1. Install Alembic (already in requirements.txt):
# pip install alembic
#
# 2. Initialize Alembic from project root:
# alembic init alembic
#
# 3. Update alembic/env.py to use our Base:
# from src.server.database.base import Base
# target_metadata = Base.metadata
#
# 4. Configure alembic.ini with DATABASE_URL from settings
#
# 5. Generate initial migration:
# alembic revision --autogenerate -m "Initial schema"
#
# 6. Review generated migration and apply:
# alembic upgrade head
#
# =============================================================================

View File

@ -1,236 +0,0 @@
"""
Initial database schema migration.
This migration creates the base tables for the Aniworld application,
including users, anime, downloads, and configuration tables.
Version: 20250124_001
Created: 2025-01-24
"""
import logging
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from ..migrations.base import Migration, MigrationError
logger = logging.getLogger(__name__)
class InitialSchemaMigration(Migration):
"""
Creates initial database schema.
This migration sets up all core tables needed for the application:
- users: User accounts and authentication
- anime: Anime series metadata
- episodes: Episode information
- downloads: Download queue and history
- config: Application configuration
"""
def __init__(self):
"""Initialize the initial schema migration."""
super().__init__(
version="20250124_001",
description="Create initial database schema",
)
async def upgrade(self, session: AsyncSession) -> None:
"""
Create all initial tables.
Args:
session: Database session
Raises:
MigrationError: If table creation fails
"""
try:
# Create users table
await session.execute(
text(
"""
CREATE TABLE IF NOT EXISTS users (
id INTEGER PRIMARY KEY AUTOINCREMENT,
username TEXT NOT NULL UNIQUE,
email TEXT,
password_hash TEXT NOT NULL,
is_active BOOLEAN DEFAULT 1,
is_admin BOOLEAN DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
"""
)
)
# Create anime table
await session.execute(
text(
"""
CREATE TABLE IF NOT EXISTS anime (
id INTEGER PRIMARY KEY AUTOINCREMENT,
title TEXT NOT NULL,
original_title TEXT,
description TEXT,
genres TEXT,
release_year INTEGER,
status TEXT,
total_episodes INTEGER,
cover_image_url TEXT,
aniworld_url TEXT,
mal_id INTEGER,
anilist_id INTEGER,
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
"""
)
)
# Create episodes table
await session.execute(
text(
"""
CREATE TABLE IF NOT EXISTS episodes (
id INTEGER PRIMARY KEY AUTOINCREMENT,
anime_id INTEGER NOT NULL,
episode_number INTEGER NOT NULL,
season_number INTEGER DEFAULT 1,
title TEXT,
description TEXT,
duration_minutes INTEGER,
air_date DATE,
stream_url TEXT,
download_url TEXT,
file_path TEXT,
file_size_bytes INTEGER,
is_downloaded BOOLEAN DEFAULT 0,
download_progress REAL DEFAULT 0.0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (anime_id) REFERENCES anime(id)
ON DELETE CASCADE,
UNIQUE (anime_id, season_number, episode_number)
)
"""
)
)
# Create downloads table
await session.execute(
text(
"""
CREATE TABLE IF NOT EXISTS downloads (
id INTEGER PRIMARY KEY AUTOINCREMENT,
episode_id INTEGER NOT NULL,
user_id INTEGER,
status TEXT NOT NULL DEFAULT 'pending',
priority INTEGER DEFAULT 5,
progress REAL DEFAULT 0.0,
download_speed_mbps REAL,
eta_seconds INTEGER,
started_at TIMESTAMP,
completed_at TIMESTAMP,
failed_at TIMESTAMP,
error_message TEXT,
retry_count INTEGER DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (episode_id) REFERENCES episodes(id)
ON DELETE CASCADE,
FOREIGN KEY (user_id) REFERENCES users(id)
ON DELETE SET NULL
)
"""
)
)
# Create config table
await session.execute(
text(
"""
CREATE TABLE IF NOT EXISTS config (
id INTEGER PRIMARY KEY AUTOINCREMENT,
key TEXT NOT NULL UNIQUE,
value TEXT NOT NULL,
category TEXT DEFAULT 'general',
description TEXT,
is_secret BOOLEAN DEFAULT 0,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
"""
)
)
# Create indexes for better performance
await session.execute(
text(
"CREATE INDEX IF NOT EXISTS idx_anime_title "
"ON anime(title)"
)
)
await session.execute(
text(
"CREATE INDEX IF NOT EXISTS idx_episodes_anime_id "
"ON episodes(anime_id)"
)
)
await session.execute(
text(
"CREATE INDEX IF NOT EXISTS idx_downloads_status "
"ON downloads(status)"
)
)
await session.execute(
text(
"CREATE INDEX IF NOT EXISTS "
"idx_downloads_episode_id ON downloads(episode_id)"
)
)
logger.info("Initial schema created successfully")
except Exception as e:
logger.error(f"Failed to create initial schema: {e}")
raise MigrationError(
f"Initial schema creation failed: {e}"
) from e
async def downgrade(self, session: AsyncSession) -> None:
"""
Drop all initial tables.
Args:
session: Database session
Raises:
MigrationError: If table dropping fails
"""
try:
# Drop tables in reverse order to respect foreign keys
tables = [
"downloads",
"episodes",
"anime",
"users",
"config",
]
for table in tables:
await session.execute(text(f"DROP TABLE IF EXISTS {table}"))
logger.debug(f"Dropped table: {table}")
logger.info("Initial schema rolled back successfully")
except Exception as e:
logger.error(f"Failed to rollback initial schema: {e}")
raise MigrationError(
f"Initial schema rollback failed: {e}"
) from e

View File

@ -1,17 +0,0 @@
"""
Database migration system for Aniworld application.
This package provides tools for managing database schema changes,
including migration creation, execution, and rollback capabilities.
"""
from .base import Migration, MigrationError
from .runner import MigrationRunner
from .validator import MigrationValidator
__all__ = [
"Migration",
"MigrationError",
"MigrationRunner",
"MigrationValidator",
]

View File

@ -1,128 +0,0 @@
"""
Base migration classes and utilities.
This module provides the foundation for database migrations,
including the abstract Migration class and error handling.
"""
from abc import ABC, abstractmethod
from datetime import datetime
from typing import Optional
from sqlalchemy.ext.asyncio import AsyncSession
class MigrationError(Exception):
"""Base exception for migration-related errors."""
pass
class Migration(ABC):
"""
Abstract base class for database migrations.
Each migration should inherit from this class and implement
the upgrade and downgrade methods.
Attributes:
version: Unique version identifier (e.g., "20250124_001")
description: Human-readable description of the migration
created_at: Timestamp when migration was created
"""
def __init__(
self,
version: str,
description: str,
created_at: Optional[datetime] = None,
):
"""
Initialize migration.
Args:
version: Unique version identifier
description: Human-readable description
created_at: Creation timestamp (defaults to now)
"""
self.version = version
self.description = description
self.created_at = created_at or datetime.now()
@abstractmethod
async def upgrade(self, session: AsyncSession) -> None:
"""
Apply the migration.
Args:
session: Database session for executing changes
Raises:
MigrationError: If migration fails
"""
pass
@abstractmethod
async def downgrade(self, session: AsyncSession) -> None:
"""
Revert the migration.
Args:
session: Database session for reverting changes
Raises:
MigrationError: If rollback fails
"""
pass
def __repr__(self) -> str:
"""Return string representation of migration."""
return f"Migration({self.version}: {self.description})"
def __eq__(self, other: object) -> bool:
"""Check equality based on version."""
if not isinstance(other, Migration):
return False
return self.version == other.version
def __hash__(self) -> int:
"""Return hash based on version."""
return hash(self.version)
class MigrationHistory:
"""
Tracks applied migrations in the database.
This model stores information about which migrations have been
applied, when they were applied, and their execution status.
"""
__tablename__ = "migration_history"
def __init__(
self,
version: str,
description: str,
applied_at: datetime,
execution_time_ms: int,
success: bool = True,
error_message: Optional[str] = None,
):
"""
Initialize migration history record.
Args:
version: Migration version identifier
description: Migration description
applied_at: Timestamp when migration was applied
execution_time_ms: Time taken to execute in milliseconds
success: Whether migration succeeded
error_message: Error message if migration failed
"""
self.version = version
self.description = description
self.applied_at = applied_at
self.execution_time_ms = execution_time_ms
self.success = success
self.error_message = error_message

View File

@ -1,323 +0,0 @@
"""
Migration runner for executing database migrations.
This module handles the execution of migrations in the correct order,
tracks migration history, and provides rollback capabilities.
"""
import importlib.util
import logging
import time
from datetime import datetime
from pathlib import Path
from typing import List, Optional
from sqlalchemy import text
from sqlalchemy.ext.asyncio import AsyncSession
from .base import Migration, MigrationError, MigrationHistory
logger = logging.getLogger(__name__)
class MigrationRunner:
"""
Manages database migration execution and tracking.
This class handles loading migrations, executing them in order,
tracking their status, and rolling back when needed.
"""
def __init__(self, migrations_dir: Path, session: AsyncSession):
"""
Initialize migration runner.
Args:
migrations_dir: Directory containing migration files
session: Database session for executing migrations
"""
self.migrations_dir = migrations_dir
self.session = session
self._migrations: List[Migration] = []
async def initialize(self) -> None:
"""
Initialize migration system by creating tracking table if needed.
Raises:
MigrationError: If initialization fails
"""
try:
# Create migration_history table if it doesn't exist
create_table_sql = """
CREATE TABLE IF NOT EXISTS migration_history (
id INTEGER PRIMARY KEY AUTOINCREMENT,
version TEXT NOT NULL UNIQUE,
description TEXT NOT NULL,
applied_at TIMESTAMP NOT NULL,
execution_time_ms INTEGER NOT NULL,
success BOOLEAN NOT NULL DEFAULT 1,
error_message TEXT
)
"""
await self.session.execute(text(create_table_sql))
await self.session.commit()
logger.info("Migration system initialized")
except Exception as e:
logger.error(f"Failed to initialize migration system: {e}")
raise MigrationError(f"Initialization failed: {e}") from e
def load_migrations(self) -> None:
"""
Load all migration files from the migrations directory.
Migration files should be named in format: {version}_{description}.py
and contain a Migration class that inherits from base.Migration.
Raises:
MigrationError: If loading migrations fails
"""
try:
self._migrations.clear()
if not self.migrations_dir.exists():
logger.warning(f"Migrations directory does not exist: {self.migrations_dir}")
return
# Find all Python files in migrations directory
migration_files = sorted(self.migrations_dir.glob("*.py"))
migration_files = [f for f in migration_files if f.name != "__init__.py"]
for file_path in migration_files:
try:
# Import the migration module dynamically
spec = importlib.util.spec_from_file_location(
f"migration.{file_path.stem}", file_path
)
if spec and spec.loader:
module = importlib.util.module_from_spec(spec)
spec.loader.exec_module(module)
# Find Migration subclass in module
for attr_name in dir(module):
attr = getattr(module, attr_name)
if (
isinstance(attr, type)
and issubclass(attr, Migration)
and attr != Migration
):
migration_instance = attr()
self._migrations.append(migration_instance)
logger.debug(f"Loaded migration: {migration_instance.version}")
break
except Exception as e:
logger.error(f"Failed to load migration {file_path.name}: {e}")
raise MigrationError(f"Failed to load {file_path.name}: {e}") from e
# Sort migrations by version
self._migrations.sort(key=lambda m: m.version)
logger.info(f"Loaded {len(self._migrations)} migrations")
except Exception as e:
logger.error(f"Failed to load migrations: {e}")
raise MigrationError(f"Loading migrations failed: {e}") from e
async def get_applied_migrations(self) -> List[str]:
"""
Get list of already applied migration versions.
Returns:
List of migration versions that have been applied
Raises:
MigrationError: If query fails
"""
try:
result = await self.session.execute(
text("SELECT version FROM migration_history WHERE success = 1 ORDER BY version")
)
versions = [row[0] for row in result.fetchall()]
return versions
except Exception as e:
logger.error(f"Failed to get applied migrations: {e}")
raise MigrationError(f"Query failed: {e}") from e
async def get_pending_migrations(self) -> List[Migration]:
"""
Get list of migrations that haven't been applied yet.
Returns:
List of pending Migration objects
Raises:
MigrationError: If check fails
"""
applied = await self.get_applied_migrations()
pending = [m for m in self._migrations if m.version not in applied]
return pending
async def apply_migration(self, migration: Migration) -> None:
"""
Apply a single migration.
Args:
migration: Migration to apply
Raises:
MigrationError: If migration fails
"""
start_time = time.time()
success = False
error_message = None
try:
logger.info(f"Applying migration: {migration.version} - {migration.description}")
# Execute the migration
await migration.upgrade(self.session)
await self.session.commit()
success = True
execution_time_ms = int((time.time() - start_time) * 1000)
logger.info(
f"Migration {migration.version} applied successfully in {execution_time_ms}ms"
)
except Exception as e:
error_message = str(e)
execution_time_ms = int((time.time() - start_time) * 1000)
logger.error(f"Migration {migration.version} failed: {e}")
await self.session.rollback()
raise MigrationError(f"Migration {migration.version} failed: {e}") from e
finally:
# Record migration in history
try:
history_record = MigrationHistory(
version=migration.version,
description=migration.description,
applied_at=datetime.now(),
execution_time_ms=execution_time_ms,
success=success,
error_message=error_message,
)
insert_sql = """
INSERT INTO migration_history
(version, description, applied_at, execution_time_ms, success, error_message)
VALUES (:version, :description, :applied_at, :execution_time_ms, :success, :error_message)
"""
await self.session.execute(
text(insert_sql),
{
"version": history_record.version,
"description": history_record.description,
"applied_at": history_record.applied_at,
"execution_time_ms": history_record.execution_time_ms,
"success": history_record.success,
"error_message": history_record.error_message,
},
)
await self.session.commit()
except Exception as e:
logger.error(f"Failed to record migration history: {e}")
async def run_migrations(self, target_version: Optional[str] = None) -> int:
"""
Run all pending migrations up to target version.
Args:
target_version: Stop at this version (None = run all)
Returns:
Number of migrations applied
Raises:
MigrationError: If migrations fail
"""
pending = await self.get_pending_migrations()
if target_version:
pending = [m for m in pending if m.version <= target_version]
if not pending:
logger.info("No pending migrations to apply")
return 0
logger.info(f"Applying {len(pending)} pending migrations")
for migration in pending:
await self.apply_migration(migration)
return len(pending)
async def rollback_migration(self, migration: Migration) -> None:
"""
Rollback a single migration.
Args:
migration: Migration to rollback
Raises:
MigrationError: If rollback fails
"""
start_time = time.time()
try:
logger.info(f"Rolling back migration: {migration.version}")
# Execute the downgrade
await migration.downgrade(self.session)
await self.session.commit()
execution_time_ms = int((time.time() - start_time) * 1000)
# Remove from history
delete_sql = "DELETE FROM migration_history WHERE version = :version"
await self.session.execute(text(delete_sql), {"version": migration.version})
await self.session.commit()
logger.info(
f"Migration {migration.version} rolled back successfully in {execution_time_ms}ms"
)
except Exception as e:
logger.error(f"Rollback of {migration.version} failed: {e}")
await self.session.rollback()
raise MigrationError(f"Rollback of {migration.version} failed: {e}") from e
async def rollback(self, steps: int = 1) -> int:
"""
Rollback the last N migrations.
Args:
steps: Number of migrations to rollback
Returns:
Number of migrations rolled back
Raises:
MigrationError: If rollback fails
"""
applied = await self.get_applied_migrations()
if not applied:
logger.info("No migrations to rollback")
return 0
# Get migrations to rollback (in reverse order)
to_rollback = applied[-steps:]
to_rollback.reverse()
migrations_to_rollback = [m for m in self._migrations if m.version in to_rollback]
logger.info(f"Rolling back {len(migrations_to_rollback)} migrations")
for migration in migrations_to_rollback:
await self.rollback_migration(migration)
return len(migrations_to_rollback)

View File

@ -1,222 +0,0 @@
"""
Migration validator for ensuring migration safety and integrity.
This module provides validation utilities to check migrations
before they are executed, ensuring they meet quality standards.
"""
import logging
from typing import List, Optional, Set
from .base import Migration, MigrationError
logger = logging.getLogger(__name__)
class MigrationValidator:
"""
Validates migrations before execution.
Performs various checks to ensure migrations are safe to run,
including version uniqueness, naming conventions, and
dependency resolution.
"""
def __init__(self):
"""Initialize migration validator."""
self.errors: List[str] = []
self.warnings: List[str] = []
def reset(self) -> None:
"""Clear validation results."""
self.errors.clear()
self.warnings.clear()
def validate_migration(self, migration: Migration) -> bool:
"""
Validate a single migration.
Args:
migration: Migration to validate
Returns:
True if migration is valid, False otherwise
"""
self.reset()
# Check version format
if not self._validate_version_format(migration.version):
self.errors.append(
f"Invalid version format: {migration.version}. "
"Expected format: YYYYMMDD_NNN"
)
# Check description
if not migration.description or len(migration.description) < 5:
self.errors.append(
f"Migration {migration.version} has invalid "
f"description: '{migration.description}'"
)
# Check for implementation
if not hasattr(migration, "upgrade") or not callable(
getattr(migration, "upgrade")
):
self.errors.append(
f"Migration {migration.version} missing upgrade method"
)
if not hasattr(migration, "downgrade") or not callable(
getattr(migration, "downgrade")
):
self.errors.append(
f"Migration {migration.version} missing downgrade method"
)
return len(self.errors) == 0
def validate_migrations(self, migrations: List[Migration]) -> bool:
"""
Validate a list of migrations.
Args:
migrations: List of migrations to validate
Returns:
True if all migrations are valid, False otherwise
"""
self.reset()
if not migrations:
self.warnings.append("No migrations to validate")
return True
# Check for duplicate versions
versions: Set[str] = set()
for migration in migrations:
if migration.version in versions:
self.errors.append(
f"Duplicate migration version: {migration.version}"
)
versions.add(migration.version)
# Return early if duplicates found
if self.errors:
return False
# Validate each migration
for migration in migrations:
if not self.validate_migration(migration):
logger.error(
f"Migration {migration.version} "
f"validation failed: {self.errors}"
)
return False
# Check version ordering
sorted_versions = sorted([m.version for m in migrations])
actual_versions = [m.version for m in migrations]
if sorted_versions != actual_versions:
self.warnings.append(
"Migrations are not in chronological order"
)
return len(self.errors) == 0
def _validate_version_format(self, version: str) -> bool:
"""
Validate version string format.
Args:
version: Version string to validate
Returns:
True if format is valid
"""
# Expected format: YYYYMMDD_NNN or YYYYMMDD_NNN_description
if not version:
return False
parts = version.split("_")
if len(parts) < 2:
return False
# Check date part (YYYYMMDD)
date_part = parts[0]
if len(date_part) != 8 or not date_part.isdigit():
return False
# Check sequence part (NNN)
seq_part = parts[1]
if not seq_part.isdigit():
return False
return True
def check_migration_conflicts(
self,
pending: List[Migration],
applied: List[str],
) -> Optional[str]:
"""
Check for conflicts between pending and applied migrations.
Args:
pending: List of pending migrations
applied: List of applied migration versions
Returns:
Error message if conflicts found, None otherwise
"""
# Check if any pending migration has version lower than applied
if not applied:
return None
latest_applied = max(applied)
for migration in pending:
if migration.version < latest_applied:
return (
f"Migration {migration.version} is older than "
f"latest applied migration {latest_applied}. "
"This may indicate a merge conflict."
)
return None
def get_validation_report(self) -> str:
"""
Get formatted validation report.
Returns:
Formatted report string
"""
report = []
if self.errors:
report.append("Validation Errors:")
for error in self.errors:
report.append(f" - {error}")
if self.warnings:
report.append("Validation Warnings:")
for warning in self.warnings:
report.append(f" - {warning}")
if not self.errors and not self.warnings:
report.append("All validations passed")
return "\n".join(report)
def raise_if_invalid(self) -> None:
"""
Raise exception if validation failed.
Raises:
MigrationError: If validation errors exist
"""
if self.errors:
error_msg = "\n".join(self.errors)
raise MigrationError(
f"Migration validation failed:\n{error_msg}"
)

View File

@ -76,23 +76,6 @@ async def lifespan(app: FastAPI):
except Exception as e: except Exception as e:
logger.warning("Failed to load config from config.json: %s", e) logger.warning("Failed to load config from config.json: %s", e)
# Run data file to database migration
try:
from src.server.services.startup_migration import (
ensure_migration_on_startup,
)
migration_result = await ensure_migration_on_startup()
if migration_result:
logger.info(
"Data migration complete: %d migrated, %d skipped, %d failed",
migration_result.migrated,
migration_result.skipped,
migration_result.failed
)
except Exception as e:
logger.error("Data migration failed: %s", e, exc_info=True)
# Continue startup - migration failure should not block app
# Initialize progress service with event subscription # Initialize progress service with event subscription
progress_service = get_progress_service() progress_service = get_progress_service()
ws_service = get_websocket_service() ws_service = get_websocket_service()

View File

@ -1,436 +0,0 @@
"""Data migration service for migrating file-based storage to database.
This module provides functionality to migrate anime series data from
legacy file-based storage (data files without .json extension) to the
SQLite database using the AnimeSeries model.
The migration service:
- Scans anime directories for existing data files
- Reads Serie objects from data files
- Migrates them to the database using AnimeSeriesService
- Handles errors gracefully without stopping the migration
- Provides detailed migration results
"""
from __future__ import annotations
import logging
from dataclasses import dataclass, field
from pathlib import Path
from typing import List, Optional
from sqlalchemy.exc import IntegrityError
from sqlalchemy.ext.asyncio import AsyncSession
from src.core.entities.series import Serie
from src.server.database.service import AnimeSeriesService, EpisodeService
logger = logging.getLogger(__name__)
@dataclass
class MigrationResult:
"""Result of a data file migration operation.
Attributes:
total_found: Total number of data files found
migrated: Number of files successfully migrated
skipped: Number of files skipped (already in database)
failed: Number of files that failed to migrate
errors: List of error messages encountered
"""
total_found: int = 0
migrated: int = 0
skipped: int = 0
failed: int = 0
errors: List[str] = field(default_factory=list)
def __post_init__(self):
"""Ensure errors is always a list."""
if self.errors is None:
self.errors = []
class DataMigrationError(Exception):
"""Base exception for data migration errors."""
class DataFileReadError(DataMigrationError):
"""Raised when a data file cannot be read."""
class DataMigrationService:
"""Service for migrating data files to database.
This service handles the migration of anime series data from
file-based storage to the database. It scans directories for
data files, reads Serie objects, and creates AnimeSeries records.
Example:
```python
service = DataMigrationService()
# Check if migration is needed
if await service.is_migration_needed("/path/to/anime"):
async with get_db_session() as db:
result = await service.migrate_all("/path/to/anime", db)
print(f"Migrated {result.migrated} series")
```
"""
def __init__(self) -> None:
"""Initialize the data migration service."""
pass
def scan_for_data_files(self, anime_directory: str) -> List[Path]:
"""Scan for data files in the anime directory.
Finds all 'data' files (JSON format without extension) in
the anime directory structure. Each series folder may contain
a 'data' file with series metadata.
Args:
anime_directory: Path to the anime directory containing
series folders
Returns:
List of Path objects pointing to data files
Raises:
ValueError: If anime_directory is invalid
"""
if not anime_directory or not anime_directory.strip():
logger.warning("Empty anime directory provided")
return []
base_path = Path(anime_directory)
if not base_path.exists():
logger.warning(
"Anime directory does not exist: %s",
anime_directory
)
return []
if not base_path.is_dir():
logger.warning(
"Anime directory is not a directory: %s",
anime_directory
)
return []
data_files: List[Path] = []
try:
# Iterate through all subdirectories (series folders)
for folder in base_path.iterdir():
if not folder.is_dir():
continue
# Check for 'data' file in each series folder
data_file = folder / "data"
if data_file.exists() and data_file.is_file():
data_files.append(data_file)
logger.debug("Found data file: %s", data_file)
except PermissionError as e:
logger.error(
"Permission denied scanning directory %s: %s",
anime_directory,
e
)
except OSError as e:
logger.error(
"OS error scanning directory %s: %s",
anime_directory,
e
)
logger.info(
"Found %d data files in %s",
len(data_files),
anime_directory
)
return data_files
def _read_data_file(self, data_path: Path) -> Optional[Serie]:
"""Read a Serie object from a data file.
Args:
data_path: Path to the data file
Returns:
Serie object if successfully read, None otherwise
Raises:
DataFileReadError: If the file cannot be read or parsed
"""
try:
serie = Serie.load_from_file(str(data_path))
# Validate the serie has required fields
if not serie.key or not serie.key.strip():
raise DataFileReadError(
f"Data file {data_path} has empty or missing key"
)
logger.debug(
"Successfully read serie '%s' from %s",
serie.key,
data_path
)
return serie
except FileNotFoundError as e:
raise DataFileReadError(
f"Data file not found: {data_path}"
) from e
except PermissionError as e:
raise DataFileReadError(
f"Permission denied reading data file: {data_path}"
) from e
except (ValueError, KeyError, TypeError) as e:
raise DataFileReadError(
f"Invalid data in file {data_path}: {e}"
) from e
except Exception as e:
raise DataFileReadError(
f"Error reading data file {data_path}: {e}"
) from e
async def migrate_data_file(
self,
data_path: Path,
db: AsyncSession
) -> bool:
"""Migrate a single data file to the database.
Reads the data file, checks if the series already exists in the
database, and creates a new record if it doesn't exist. If the
series exists, optionally updates the episodes if changed.
Args:
data_path: Path to the data file
db: Async database session
Returns:
True if the series was migrated (created or updated),
False if skipped (already exists with same data)
Raises:
DataFileReadError: If the file cannot be read
DataMigrationError: If database operation fails
"""
# Read the data file
serie = self._read_data_file(data_path)
if serie is None:
raise DataFileReadError(f"Could not read data file: {data_path}")
# Check if series already exists in database
existing = await AnimeSeriesService.get_by_key(db, serie.key)
if existing is not None:
# Build episode dict from existing episodes for comparison
existing_dict: dict[int, list[int]] = {}
episodes = await EpisodeService.get_by_series(db, existing.id)
for ep in episodes:
if ep.season not in existing_dict:
existing_dict[ep.season] = []
existing_dict[ep.season].append(ep.episode_number)
for season in existing_dict:
existing_dict[season].sort()
new_dict = serie.episodeDict or {}
if existing_dict == new_dict:
logger.debug(
"Series '%s' already exists with same data, skipping",
serie.key
)
return False
# Update episodes if different - add new episodes
for season, episode_numbers in new_dict.items():
existing_eps = set(existing_dict.get(season, []))
for ep_num in episode_numbers:
if ep_num not in existing_eps:
await EpisodeService.create(
db=db,
series_id=existing.id,
season=season,
episode_number=ep_num,
)
logger.info(
"Updated episodes for existing series '%s'",
serie.key
)
return True
# Create new series in database
try:
# Use folder as fallback name if name is empty
series_name = serie.name
if not series_name or not series_name.strip():
series_name = serie.folder
logger.debug(
"Using folder '%s' as name for series '%s'",
series_name,
serie.key
)
anime_series = await AnimeSeriesService.create(
db,
key=serie.key,
name=series_name,
site=serie.site,
folder=serie.folder,
)
# Create Episode records for each episode in episodeDict
if serie.episodeDict:
for season, episode_numbers in serie.episodeDict.items():
for episode_number in episode_numbers:
await EpisodeService.create(
db=db,
series_id=anime_series.id,
season=season,
episode_number=episode_number,
)
logger.info(
"Migrated series '%s' to database",
serie.key
)
return True
except IntegrityError as e:
# Race condition - series was created by another process
logger.warning(
"Series '%s' was already created (race condition): %s",
serie.key,
e
)
return False
except Exception as e:
raise DataMigrationError(
f"Failed to create series '{serie.key}' in database: {e}"
) from e
async def migrate_all(
self,
anime_directory: str,
db: AsyncSession
) -> MigrationResult:
"""Migrate all data files from anime directory to database.
Scans the anime directory for data files and migrates each one
to the database. Errors are logged but do not stop the migration.
Args:
anime_directory: Path to the anime directory
db: Async database session
Returns:
MigrationResult with counts and error messages
"""
result = MigrationResult()
# Scan for data files
data_files = self.scan_for_data_files(anime_directory)
result.total_found = len(data_files)
if result.total_found == 0:
logger.info("No data files found to migrate")
return result
logger.info(
"Starting migration of %d data files",
result.total_found
)
# Migrate each file
for data_path in data_files:
try:
migrated = await self.migrate_data_file(data_path, db)
if migrated:
result.migrated += 1
else:
result.skipped += 1
except DataFileReadError as e:
result.failed += 1
error_msg = f"Failed to read {data_path}: {e}"
result.errors.append(error_msg)
logger.error(error_msg)
except DataMigrationError as e:
result.failed += 1
error_msg = f"Failed to migrate {data_path}: {e}"
result.errors.append(error_msg)
logger.error(error_msg)
except Exception as e:
result.failed += 1
error_msg = f"Unexpected error migrating {data_path}: {e}"
result.errors.append(error_msg)
logger.exception(error_msg)
# Commit all changes
try:
await db.commit()
except Exception as e:
logger.error("Failed to commit migration: %s", e)
result.errors.append(f"Failed to commit migration: {e}")
logger.info(
"Migration complete: %d migrated, %d skipped, %d failed",
result.migrated,
result.skipped,
result.failed
)
return result
def is_migration_needed(self, anime_directory: str) -> bool:
"""Check if there are data files to migrate.
Args:
anime_directory: Path to the anime directory
Returns:
True if data files exist, False otherwise
"""
data_files = self.scan_for_data_files(anime_directory)
needs_migration = len(data_files) > 0
if needs_migration:
logger.info(
"Migration needed: found %d data files",
len(data_files)
)
else:
logger.debug("No migration needed: no data files found")
return needs_migration
# Singleton instance for the service
_data_migration_service: Optional[DataMigrationService] = None
def get_data_migration_service() -> DataMigrationService:
"""Get the singleton data migration service instance.
Returns:
DataMigrationService instance
"""
global _data_migration_service
if _data_migration_service is None:
_data_migration_service = DataMigrationService()
return _data_migration_service
def reset_data_migration_service() -> None:
"""Reset the singleton service instance (for testing)."""
global _data_migration_service
_data_migration_service = None

View File

@ -1,309 +0,0 @@
"""Startup migration runner for data file to database migration.
This module provides functions to run the data file migration automatically
during application startup. The migration checks for existing data files
in the anime directory and migrates them to the database.
Usage:
This module is intended to be called from the FastAPI lifespan context.
Example:
@asynccontextmanager
async def lifespan(app: FastAPI):
# ... initialization ...
await ensure_migration_on_startup()
yield
# ... cleanup ...
"""
from __future__ import annotations
import logging
from pathlib import Path
from typing import Optional
from src.server.database.connection import get_db_session
from src.server.services.auth_service import auth_service
from src.server.services.config_service import ConfigService
from src.server.services.data_migration_service import (
MigrationResult,
get_data_migration_service,
)
logger = logging.getLogger(__name__)
async def run_startup_migration(anime_directory: str) -> MigrationResult:
"""Run data file migration for the given anime directory.
Checks if there are data files to migrate and runs the migration
if needed. This function is idempotent - running it multiple times
will only migrate files that haven't been migrated yet.
Args:
anime_directory: Path to the anime directory containing
series folders with data files
Returns:
MigrationResult: Results of the migration operation,
including counts of migrated, skipped, and failed items
Note:
This function creates its own database session and commits
the transaction at the end of the migration.
"""
service = get_data_migration_service()
# Check if migration is needed
if not service.is_migration_needed(anime_directory):
logger.info(
"No data files found to migrate in: %s",
anime_directory
)
return MigrationResult(total_found=0)
logger.info(
"Starting data file migration from: %s",
anime_directory
)
# Get database session and run migration
async with get_db_session() as db:
result = await service.migrate_all(anime_directory, db)
# Log results
if result.migrated > 0 or result.failed > 0:
logger.info(
"Migration complete: %d migrated, %d skipped, %d failed",
result.migrated,
result.skipped,
result.failed
)
if result.errors:
for error in result.errors:
logger.warning("Migration error: %s", error)
return result
def _get_anime_directory_from_config() -> Optional[str]:
"""Get anime directory from application configuration.
Attempts to load the configuration file and extract the
anime_directory setting from the 'other' config section.
Returns:
Anime directory path if configured, None otherwise
"""
try:
config_service = ConfigService()
config = config_service.load_config()
# anime_directory is stored in the 'other' dict
anime_dir = config.other.get("anime_directory")
if anime_dir:
anime_dir = str(anime_dir).strip()
if anime_dir:
return anime_dir
return None
except Exception as e:
logger.warning(
"Could not load anime directory from config: %s",
e
)
return None
def _is_setup_complete() -> bool:
"""Check if the application setup is complete.
Setup is complete when:
1. Master password is configured
2. Configuration file exists and is valid
Returns:
True if setup is complete, False otherwise
"""
# Check if master password is configured
if not auth_service.is_configured():
return False
# Check if config exists and is valid
try:
config_service = ConfigService()
config = config_service.load_config()
# Validate the loaded config
validation = config.validate()
if not validation.valid:
return False
except Exception:
# If we can't load or validate config, setup is not complete
return False
return True
async def ensure_migration_on_startup() -> Optional[MigrationResult]:
"""Ensure data file migration runs during application startup.
This function should be called during FastAPI application startup.
It loads the anime directory from configuration and runs the
migration if the directory is configured and contains data files.
Migration will only run if setup is complete (master password
configured and valid configuration exists).
Returns:
MigrationResult if migration was run, None if skipped
(e.g., when no anime directory is configured)
Behavior:
- Returns None if anime_directory is not configured (first run)
- Returns None if anime_directory does not exist
- Returns MigrationResult with total_found=0 if no data files exist
- Returns MigrationResult with migration counts if migration ran
Note:
This function catches and logs all exceptions without re-raising,
ensuring that startup migration failures don't block application
startup. Check the logs for any migration errors.
Example:
@asynccontextmanager
async def lifespan(app: FastAPI):
await init_db()
try:
result = await ensure_migration_on_startup()
if result:
logger.info(
"Migration: %d migrated, %d failed",
result.migrated,
result.failed
)
except Exception as e:
logger.error("Migration failed: %s", e)
yield
await close_db()
"""
# Check if setup is complete before running migration
if not _is_setup_complete():
logger.debug(
"Setup not complete, skipping startup migration"
)
return None
# Get anime directory from config
anime_directory = _get_anime_directory_from_config()
if not anime_directory:
logger.debug(
"No anime directory configured, skipping migration"
)
return None
# Validate directory exists
anime_path = Path(anime_directory)
if not anime_path.exists():
logger.warning(
"Anime directory does not exist: %s, skipping migration",
anime_directory
)
return None
if not anime_path.is_dir():
logger.warning(
"Anime directory path is not a directory: %s, skipping migration",
anime_directory
)
return None
logger.info(
"Checking for data files to migrate in: %s",
anime_directory
)
try:
result = await run_startup_migration(anime_directory)
return result
except Exception as e:
logger.error(
"Data file migration failed: %s",
e,
exc_info=True
)
# Return empty result rather than None to indicate we attempted
return MigrationResult(
total_found=0,
failed=1,
errors=[f"Migration failed: {str(e)}"]
)
async def run_migration_for_directory(
anime_directory: str
) -> Optional[MigrationResult]:
"""Run data file migration for a specific directory.
This function can be called after setup is complete to migrate
data files from the specified anime directory to the database.
Unlike ensure_migration_on_startup, this does not check setup
status as it's intended to be called after setup is complete.
Args:
anime_directory: Path to the anime directory containing
series folders with data files
Returns:
MigrationResult if migration was run, None if directory invalid
"""
if not anime_directory or not anime_directory.strip():
logger.debug("Empty anime directory provided, skipping migration")
return None
anime_directory = anime_directory.strip()
# Validate directory exists
anime_path = Path(anime_directory)
if not anime_path.exists():
logger.warning(
"Anime directory does not exist: %s, skipping migration",
anime_directory
)
return None
if not anime_path.is_dir():
logger.warning(
"Anime directory path is not a directory: %s",
anime_directory
)
return None
logger.info(
"Running migration for directory: %s",
anime_directory
)
try:
result = await run_startup_migration(anime_directory)
return result
except Exception as e:
logger.error(
"Data file migration failed for %s: %s",
anime_directory,
e,
exc_info=True
)
return MigrationResult(
total_found=0,
failed=1,
errors=[f"Migration failed: {str(e)}"]
)

View File

@ -1,494 +0,0 @@
"""Integration tests for data file to database migration.
This module tests the complete migration workflow including:
- Migration runs on server startup
- App starts even if migration fails
- Data files are correctly migrated to database
- API endpoints save to database
- Series list reads from database
"""
import json
import tempfile
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from httpx import ASGITransport, AsyncClient
from src.server.services.data_migration_service import DataMigrationService
from src.server.services.startup_migration import ensure_migration_on_startup
class TestMigrationStartupIntegration:
"""Test migration integration with application startup."""
@pytest.mark.asyncio
async def test_app_starts_with_migration(self):
"""Test that app starts successfully with migration enabled."""
from src.server.fastapi_app import app
transport = ASGITransport(app=app)
async with AsyncClient(
transport=transport,
base_url="http://test"
) as client:
# App should start and health endpoint should work
response = await client.get("/health")
assert response.status_code == 200
@pytest.mark.asyncio
async def test_migration_with_valid_data_files(self):
"""Test migration correctly processes data files."""
with tempfile.TemporaryDirectory() as tmp_dir:
# Create test data files
for i in range(2):
series_dir = Path(tmp_dir) / f"Test Series {i}"
series_dir.mkdir()
data = {
"key": f"test-series-{i}",
"name": f"Test Series {i}",
"site": "aniworld.to",
"folder": f"Test Series {i}",
"episodeDict": {"1": [1, 2, 3]}
}
(series_dir / "data").write_text(json.dumps(data))
# Test migration scan
service = DataMigrationService()
data_files = service.scan_for_data_files(tmp_dir)
assert len(data_files) == 2
@pytest.mark.asyncio
async def test_migration_handles_corrupted_files(self):
"""Test migration handles corrupted data files gracefully."""
with tempfile.TemporaryDirectory() as tmp_dir:
# Create valid data file
valid_dir = Path(tmp_dir) / "Valid Series"
valid_dir.mkdir()
valid_data = {
"key": "valid-series",
"name": "Valid Series",
"site": "aniworld.to",
"folder": "Valid Series",
"episodeDict": {}
}
(valid_dir / "data").write_text(json.dumps(valid_data))
# Create corrupted data file
invalid_dir = Path(tmp_dir) / "Invalid Series"
invalid_dir.mkdir()
(invalid_dir / "data").write_text("not valid json {{{")
# Migration should process valid file and report error for invalid
service = DataMigrationService()
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
MockService.get_by_key = AsyncMock(return_value=None)
MockService.create = AsyncMock()
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
result = await service.migrate_all(tmp_dir, mock_db)
# Should have found 2 files
assert result.total_found == 2
# One should succeed, one should fail
assert result.migrated == 1
assert result.failed == 1
assert len(result.errors) == 1
class TestMigrationWithConfig:
"""Test migration with configuration file."""
@pytest.mark.asyncio
async def test_migration_uses_config_anime_directory(self):
"""Test that migration reads anime directory from config."""
with tempfile.TemporaryDirectory() as tmp_dir:
mock_config = MagicMock()
mock_config.other = {"anime_directory": tmp_dir}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
with patch(
'src.server.services.startup_migration.get_data_migration_service'
) as mock_get_service:
migration_service = MagicMock()
migration_service.is_migration_needed.return_value = False
mock_get_service.return_value = migration_service
result = await ensure_migration_on_startup()
# Should check the correct directory
migration_service.is_migration_needed.assert_called_once_with(
tmp_dir
)
class TestMigrationIdempotency:
"""Test that migration is idempotent."""
@pytest.mark.asyncio
async def test_migration_skips_existing_entries(self):
"""Test that migration skips series already in database."""
with tempfile.TemporaryDirectory() as tmp_dir:
# Create data file
series_dir = Path(tmp_dir) / "Test Series"
series_dir.mkdir()
data = {
"key": "test-series",
"name": "Test Series",
"site": "aniworld.to",
"folder": "Test Series",
"episodeDict": {"1": [1, 2]}
}
(series_dir / "data").write_text(json.dumps(data))
# Mock existing series in database with same episodes
existing = MagicMock()
existing.id = 1
# Mock episodes matching data file
mock_episodes = [
MagicMock(season=1, episode_number=1),
MagicMock(season=1, episode_number=2),
]
service = DataMigrationService()
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
with patch(
'src.server.services.data_migration_service.EpisodeService'
) as MockEpisodeService:
MockService.get_by_key = AsyncMock(return_value=existing)
MockEpisodeService.get_by_series = AsyncMock(
return_value=mock_episodes
)
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
result = await service.migrate_all(tmp_dir, mock_db)
# Should skip since data is same
assert result.total_found == 1
assert result.skipped == 1
assert result.migrated == 0
# Should not call create
MockService.create.assert_not_called()
@pytest.mark.asyncio
async def test_migration_updates_changed_episodes(self):
"""Test that migration updates series with changed episode data."""
with tempfile.TemporaryDirectory() as tmp_dir:
# Create data file with new episodes
series_dir = Path(tmp_dir) / "Test Series"
series_dir.mkdir()
data = {
"key": "test-series",
"name": "Test Series",
"site": "aniworld.to",
"folder": "Test Series",
"episodeDict": {"1": [1, 2, 3, 4, 5]} # More episodes
}
(series_dir / "data").write_text(json.dumps(data))
# Mock existing series with fewer episodes
existing = MagicMock()
existing.id = 1
# Mock existing episodes (fewer than data file)
mock_episodes = [
MagicMock(season=1, episode_number=1),
MagicMock(season=1, episode_number=2),
]
service = DataMigrationService()
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
with patch(
'src.server.services.data_migration_service.EpisodeService'
) as MockEpisodeService:
MockService.get_by_key = AsyncMock(return_value=existing)
MockEpisodeService.get_by_series = AsyncMock(
return_value=mock_episodes
)
MockEpisodeService.create = AsyncMock()
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
result = await service.migrate_all(tmp_dir, mock_db)
# Should update since data changed
assert result.total_found == 1
assert result.migrated == 1
# Should create 3 new episodes (3, 4, 5)
assert MockEpisodeService.create.call_count == 3
class TestMigrationOnFreshStart:
"""Test migration behavior on fresh application start."""
@pytest.mark.asyncio
async def test_migration_on_fresh_start_no_data_files(self):
"""Test migration runs correctly when no data files exist."""
with tempfile.TemporaryDirectory() as tmp_dir:
service = DataMigrationService()
# No data files should be found
data_files = service.scan_for_data_files(tmp_dir)
assert len(data_files) == 0
# is_migration_needed should return False
assert service.is_migration_needed(tmp_dir) is False
# migrate_all should succeed with 0 processed
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
result = await service.migrate_all(tmp_dir, mock_db)
assert result.total_found == 0
assert result.migrated == 0
assert result.skipped == 0
assert result.failed == 0
assert len(result.errors) == 0
class TestAddSeriesSavesToDatabase:
"""Test that adding series via API saves to database."""
@pytest.mark.asyncio
async def test_add_series_saves_to_database(self):
"""Test add series endpoint saves to database when available."""
# Mock database and service
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
with patch(
'src.server.api.anime.AnimeSeriesService'
) as MockService:
MockService.get_by_key = AsyncMock(return_value=None)
MockService.create = AsyncMock(return_value=MagicMock(id=1))
# Mock get_optional_database_session to return our mock
with patch(
'src.server.api.anime.get_optional_database_session'
) as mock_get_db:
async def mock_db_gen():
yield mock_db
mock_get_db.return_value = mock_db_gen()
# The endpoint should try to save to database
# This is a unit-style integration test
test_data = {
"key": "test-anime-key",
"name": "Test Anime",
"site": "aniworld.to",
"folder": "Test Anime",
"episodeDict": {"1": [1, 2, 3]}
}
# Verify service would be called with correct data
# (Full API test done in test_anime_endpoints.py)
assert test_data["key"] == "test-anime-key"
class TestScanSavesToDatabase:
"""Test that scanning saves results to database."""
@pytest.mark.asyncio
async def test_scan_async_saves_to_database(self):
"""Test scan_async method saves series to database."""
from src.core.entities.series import Serie
from src.core.SerieScanner import SerieScanner
with tempfile.TemporaryDirectory() as tmp_dir:
# Create series folder structure
series_folder = Path(tmp_dir) / "Test Anime"
series_folder.mkdir()
(series_folder / "Season 1").mkdir()
(series_folder / "Season 1" / "ep1.mp4").touch()
# Mock loader
mock_loader = MagicMock()
mock_loader.getSerie.return_value = Serie(
key="test-anime",
name="Test Anime",
site="aniworld.to",
folder="Test Anime",
episodeDict={1: [1, 2, 3]}
)
# Mock database session
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
# Patch the service at the source module
with patch(
'src.server.database.service.AnimeSeriesService'
) as MockService:
MockService.get_by_key = AsyncMock(return_value=None)
MockService.create = AsyncMock()
scanner = SerieScanner(
tmp_dir, mock_loader, db_session=mock_db
)
# Verify scanner has db_session configured
assert scanner._db_session is mock_db
# The scan_async method would use the database
# when db_session is set. Testing configuration here.
assert scanner._db_session is not None
class TestSerieListReadsFromDatabase:
"""Test that SerieList reads from database."""
@pytest.mark.asyncio
async def test_load_series_from_db(self):
"""Test SerieList.load_series_from_db() method."""
from src.core.entities.SerieList import SerieList
# Create mock database session
mock_db = AsyncMock()
# Create mock series in database with spec to avoid mock attributes
from dataclasses import dataclass
@dataclass
class MockEpisode:
season: int
episode_number: int
@dataclass
class MockAnimeSeries:
key: str
name: str
site: str
folder: str
episodes: list
mock_series = [
MockAnimeSeries(
key="anime-1",
name="Anime 1",
site="aniworld.to",
folder="Anime 1",
episodes=[
MockEpisode(1, 1), MockEpisode(1, 2), MockEpisode(1, 3)
]
),
MockAnimeSeries(
key="anime-2",
name="Anime 2",
site="aniworld.to",
folder="Anime 2",
episodes=[
MockEpisode(1, 1), MockEpisode(1, 2), MockEpisode(2, 1)
]
)
]
# Patch the service at the source module
with patch(
'src.server.database.service.AnimeSeriesService.get_all',
new_callable=AsyncMock
) as mock_get_all:
mock_get_all.return_value = mock_series
# Create SerieList with db_session
with tempfile.TemporaryDirectory() as tmp_dir:
serie_list = SerieList(
tmp_dir, db_session=mock_db, skip_load=True
)
# Load from database
await serie_list.load_series_from_db(mock_db)
# Verify service was called with with_episodes=True
mock_get_all.assert_called_once_with(mock_db, with_episodes=True)
# Verify series were loaded
all_series = serie_list.get_all()
assert len(all_series) == 2
# Verify we can look up by key
anime1 = serie_list.get_by_key("anime-1")
assert anime1 is not None
assert anime1.name == "Anime 1"
class TestSearchAndAddWorkflow:
"""Test complete search and add workflow with database."""
@pytest.mark.asyncio
async def test_search_and_add_workflow(self):
"""Test searching for anime and adding it saves to database."""
from src.core.entities.series import Serie
from src.core.SeriesApp import SeriesApp
with tempfile.TemporaryDirectory() as tmp_dir:
# Mock database
mock_db = AsyncMock()
mock_db.commit = AsyncMock()
with patch('src.core.SeriesApp.Loaders') as MockLoaders:
with patch('src.core.SeriesApp.SerieScanner') as MockScanner:
with patch('src.core.SeriesApp.SerieList') as MockList:
# Setup mocks
mock_loader = MagicMock()
mock_loader.search.return_value = [
{"name": "Test Anime", "key": "test-anime"}
]
mock_loader.getSerie.return_value = Serie(
key="test-anime",
name="Test Anime",
site="aniworld.to",
folder="Test Anime",
episodeDict={1: [1, 2, 3]}
)
mock_loaders = MagicMock()
mock_loaders.GetLoader.return_value = mock_loader
MockLoaders.return_value = mock_loaders
mock_list = MagicMock()
mock_list.GetMissingEpisode.return_value = []
mock_list.add_to_db = AsyncMock()
MockList.return_value = mock_list
mock_scanner = MagicMock()
MockScanner.return_value = mock_scanner
# Create SeriesApp with database
app = SeriesApp(tmp_dir, db_session=mock_db)
# Step 1: Search
results = await app.search("test anime")
assert len(results) == 1
assert results[0]["name"] == "Test Anime"
# Step 2: Add to database
serie = mock_loader.getSerie(results[0]["key"])
await mock_list.add_to_db(serie, mock_db)
# Verify add_to_db was called
mock_list.add_to_db.assert_called_once_with(
serie, mock_db
)

View File

@ -318,25 +318,6 @@ class TestConfigServiceBackups:
assert len(backups) == 3 # Should only keep max_backups assert len(backups) == 3 # Should only keep max_backups
class TestConfigServiceMigration:
"""Test configuration migration."""
def test_migration_preserves_data(self, config_service, sample_config):
"""Test that migration preserves configuration data."""
# Manually save config with old version
data = sample_config.model_dump()
data["version"] = "0.9.0" # Old version
with open(config_service.config_path, "w", encoding="utf-8") as f:
json.dump(data, f)
# Load should migrate automatically
loaded = config_service.load_config()
assert loaded.name == sample_config.name
assert loaded.data_dir == sample_config.data_dir
class TestConfigServiceSingleton: class TestConfigServiceSingleton:
"""Test singleton instance management.""" """Test singleton instance management."""

View File

@ -1,599 +0,0 @@
"""Unit tests for DataMigrationService.
This module contains comprehensive tests for the data migration service,
including scanning for data files, migrating individual files,
batch migration, and error handling.
"""
import json
import tempfile
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from src.core.entities.series import Serie
from src.server.services.data_migration_service import (
DataFileReadError,
DataMigrationError,
DataMigrationService,
MigrationResult,
get_data_migration_service,
reset_data_migration_service,
)
class TestMigrationResult:
"""Test MigrationResult dataclass."""
def test_migration_result_defaults(self):
"""Test MigrationResult with default values."""
result = MigrationResult()
assert result.total_found == 0
assert result.migrated == 0
assert result.skipped == 0
assert result.failed == 0
assert result.errors == []
def test_migration_result_with_values(self):
"""Test MigrationResult with custom values."""
result = MigrationResult(
total_found=10,
migrated=5,
skipped=3,
failed=2,
errors=["Error 1", "Error 2"]
)
assert result.total_found == 10
assert result.migrated == 5
assert result.skipped == 3
assert result.failed == 2
assert result.errors == ["Error 1", "Error 2"]
def test_migration_result_post_init_none_errors(self):
"""Test that None errors list is converted to empty list."""
# Create result then manually set errors to None
result = MigrationResult()
result.errors = None
result.__post_init__()
assert result.errors == []
class TestDataMigrationServiceScan:
"""Test scanning for data files."""
def test_scan_empty_directory(self):
"""Test scanning empty anime directory."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
result = service.scan_for_data_files(tmp_dir)
assert result == []
def test_scan_empty_string(self):
"""Test scanning with empty string."""
service = DataMigrationService()
result = service.scan_for_data_files("")
assert result == []
def test_scan_whitespace_string(self):
"""Test scanning with whitespace string."""
service = DataMigrationService()
result = service.scan_for_data_files(" ")
assert result == []
def test_scan_nonexistent_directory(self):
"""Test scanning nonexistent directory."""
service = DataMigrationService()
result = service.scan_for_data_files("/nonexistent/path")
assert result == []
def test_scan_file_instead_of_directory(self):
"""Test scanning when path is a file, not directory."""
service = DataMigrationService()
with tempfile.NamedTemporaryFile() as tmp_file:
result = service.scan_for_data_files(tmp_file.name)
assert result == []
def test_scan_finds_data_files(self):
"""Test scanning finds data files in series folders."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
# Create series folders with data files
series1 = Path(tmp_dir) / "Attack on Titan (2013)"
series1.mkdir()
(series1 / "data").write_text('{"key": "aot", "name": "AOT"}')
series2 = Path(tmp_dir) / "One Piece"
series2.mkdir()
(series2 / "data").write_text('{"key": "one-piece", "name": "OP"}')
# Create folder without data file
series3 = Path(tmp_dir) / "No Data Here"
series3.mkdir()
result = service.scan_for_data_files(tmp_dir)
assert len(result) == 2
assert all(isinstance(p, Path) for p in result)
# Check filenames
filenames = [p.name for p in result]
assert all(name == "data" for name in filenames)
def test_scan_ignores_files_in_root(self):
"""Test scanning ignores files directly in anime directory."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
# Create a 'data' file in root (should be ignored)
(Path(tmp_dir) / "data").write_text('{"key": "root"}')
# Create series folder with data file
series1 = Path(tmp_dir) / "Series One"
series1.mkdir()
(series1 / "data").write_text('{"key": "series-one"}')
result = service.scan_for_data_files(tmp_dir)
assert len(result) == 1
assert result[0].parent.name == "Series One"
def test_scan_ignores_nested_data_files(self):
"""Test scanning only finds data files one level deep."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
# Create nested folder structure
series1 = Path(tmp_dir) / "Series One"
series1.mkdir()
(series1 / "data").write_text('{"key": "series-one"}')
# Create nested subfolder with data (should be ignored)
nested = series1 / "Season 1"
nested.mkdir()
(nested / "data").write_text('{"key": "nested"}')
result = service.scan_for_data_files(tmp_dir)
assert len(result) == 1
assert result[0].parent.name == "Series One"
class TestDataMigrationServiceReadFile:
"""Test reading data files."""
def test_read_valid_data_file(self):
"""Test reading a valid data file."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
data_file = Path(tmp_dir) / "data"
serie_data = {
"key": "attack-on-titan",
"name": "Attack on Titan",
"site": "aniworld.to",
"folder": "Attack on Titan (2013)",
"episodeDict": {"1": [1, 2, 3]}
}
data_file.write_text(json.dumps(serie_data))
result = service._read_data_file(data_file)
assert result is not None
assert result.key == "attack-on-titan"
assert result.name == "Attack on Titan"
assert result.site == "aniworld.to"
assert result.folder == "Attack on Titan (2013)"
def test_read_file_not_found(self):
"""Test reading nonexistent file raises error."""
service = DataMigrationService()
with pytest.raises(DataFileReadError) as exc_info:
service._read_data_file(Path("/nonexistent/data"))
assert "not found" in str(exc_info.value).lower() or "Error reading" in str(exc_info.value)
def test_read_file_empty_key(self):
"""Test reading file with empty key raises error."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
data_file = Path(tmp_dir) / "data"
serie_data = {
"key": "",
"name": "No Key Series",
"site": "aniworld.to",
"folder": "Test",
"episodeDict": {}
}
data_file.write_text(json.dumps(serie_data))
with pytest.raises(DataFileReadError) as exc_info:
service._read_data_file(data_file)
# The Serie class will raise ValueError for empty key
assert "empty" in str(exc_info.value).lower() or "key" in str(exc_info.value).lower()
def test_read_file_invalid_json(self):
"""Test reading file with invalid JSON raises error."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
data_file = Path(tmp_dir) / "data"
data_file.write_text("not valid json {{{")
with pytest.raises(DataFileReadError):
service._read_data_file(data_file)
def test_read_file_missing_required_fields(self):
"""Test reading file with missing required fields raises error."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
data_file = Path(tmp_dir) / "data"
# Missing 'key' field
data_file.write_text('{"name": "Test", "site": "test.com"}')
with pytest.raises(DataFileReadError):
service._read_data_file(data_file)
class TestDataMigrationServiceMigrateSingle:
"""Test migrating single data files."""
@pytest.fixture
def mock_db(self):
"""Create a mock database session."""
return AsyncMock()
@pytest.fixture
def sample_serie(self):
"""Create a sample Serie for testing."""
return Serie(
key="attack-on-titan",
name="Attack on Titan",
site="aniworld.to",
folder="Attack on Titan (2013)",
episodeDict={1: [1, 2, 3], 2: [1, 2]}
)
@pytest.mark.asyncio
async def test_migrate_new_series(self, mock_db, sample_serie):
"""Test migrating a new series to database."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
data_file = Path(tmp_dir) / "data"
sample_serie.save_to_file(str(data_file))
with patch.object(
service,
'_read_data_file',
return_value=sample_serie
):
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
MockService.get_by_key = AsyncMock(return_value=None)
MockService.create = AsyncMock()
result = await service.migrate_data_file(data_file, mock_db)
assert result is True
MockService.create.assert_called_once()
# Verify the key was passed correctly
call_kwargs = MockService.create.call_args.kwargs
assert call_kwargs['key'] == "attack-on-titan"
assert call_kwargs['name'] == "Attack on Titan"
@pytest.mark.asyncio
async def test_migrate_existing_series_same_data(self, mock_db, sample_serie):
"""Test migrating series that already exists with same data."""
service = DataMigrationService()
# Create mock existing series with same episodes
existing = MagicMock()
existing.id = 1
# Mock episodes matching sample_serie.episodeDict = {1: [1, 2, 3], 2: [1, 2]}
mock_episodes = []
for season, eps in {1: [1, 2, 3], 2: [1, 2]}.items():
for ep_num in eps:
mock_ep = MagicMock()
mock_ep.season = season
mock_ep.episode_number = ep_num
mock_episodes.append(mock_ep)
with patch.object(
service,
'_read_data_file',
return_value=sample_serie
):
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
with patch(
'src.server.services.data_migration_service.EpisodeService'
) as MockEpisodeService:
MockService.get_by_key = AsyncMock(return_value=existing)
MockEpisodeService.get_by_series = AsyncMock(
return_value=mock_episodes
)
result = await service.migrate_data_file(
Path("/fake/data"),
mock_db
)
assert result is False
MockService.create.assert_not_called()
@pytest.mark.asyncio
async def test_migrate_existing_series_different_data(self, mock_db):
"""Test migrating series that exists with different episodes."""
service = DataMigrationService()
# Serie with new episodes
serie = Serie(
key="attack-on-titan",
name="Attack on Titan",
site="aniworld.to",
folder="AOT",
episodeDict={1: [1, 2, 3, 4, 5]} # More episodes than existing
)
# Existing series has fewer episodes
existing = MagicMock()
existing.id = 1
# Mock episodes for existing (only 3 episodes)
mock_episodes = []
for ep_num in [1, 2, 3]:
mock_ep = MagicMock()
mock_ep.season = 1
mock_ep.episode_number = ep_num
mock_episodes.append(mock_ep)
with patch.object(
service,
'_read_data_file',
return_value=serie
):
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
with patch(
'src.server.services.data_migration_service.EpisodeService'
) as MockEpisodeService:
MockService.get_by_key = AsyncMock(return_value=existing)
MockEpisodeService.get_by_series = AsyncMock(
return_value=mock_episodes
)
MockEpisodeService.create = AsyncMock()
result = await service.migrate_data_file(
Path("/fake/data"),
mock_db
)
assert result is True
# Should create 2 new episodes (4 and 5)
assert MockEpisodeService.create.call_count == 2
@pytest.mark.asyncio
async def test_migrate_read_error(self, mock_db):
"""Test migration handles read errors properly."""
service = DataMigrationService()
with patch.object(
service,
'_read_data_file',
side_effect=DataFileReadError("Cannot read file")
):
with pytest.raises(DataFileReadError):
await service.migrate_data_file(Path("/fake/data"), mock_db)
class TestDataMigrationServiceMigrateAll:
"""Test batch migration of data files."""
@pytest.fixture
def mock_db(self):
"""Create a mock database session."""
db = AsyncMock()
db.commit = AsyncMock()
return db
@pytest.mark.asyncio
async def test_migrate_all_empty_directory(self, mock_db):
"""Test migration with no data files."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
result = await service.migrate_all(tmp_dir, mock_db)
assert result.total_found == 0
assert result.migrated == 0
assert result.skipped == 0
assert result.failed == 0
assert result.errors == []
@pytest.mark.asyncio
async def test_migrate_all_success(self, mock_db):
"""Test successful migration of multiple files."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
# Create test data files
for i in range(3):
series_dir = Path(tmp_dir) / f"Series {i}"
series_dir.mkdir()
data = {
"key": f"series-{i}",
"name": f"Series {i}",
"site": "aniworld.to",
"folder": f"Series {i}",
"episodeDict": {}
}
(series_dir / "data").write_text(json.dumps(data))
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
MockService.get_by_key = AsyncMock(return_value=None)
MockService.create = AsyncMock()
result = await service.migrate_all(tmp_dir, mock_db)
assert result.total_found == 3
assert result.migrated == 3
assert result.skipped == 0
assert result.failed == 0
@pytest.mark.asyncio
async def test_migrate_all_with_errors(self, mock_db):
"""Test migration continues after individual file errors."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
# Create valid data file
valid_dir = Path(tmp_dir) / "Valid Series"
valid_dir.mkdir()
valid_data = {
"key": "valid-series",
"name": "Valid Series",
"site": "aniworld.to",
"folder": "Valid Series",
"episodeDict": {}
}
(valid_dir / "data").write_text(json.dumps(valid_data))
# Create invalid data file
invalid_dir = Path(tmp_dir) / "Invalid Series"
invalid_dir.mkdir()
(invalid_dir / "data").write_text("not valid json")
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
MockService.get_by_key = AsyncMock(return_value=None)
MockService.create = AsyncMock()
result = await service.migrate_all(tmp_dir, mock_db)
assert result.total_found == 2
assert result.migrated == 1
assert result.failed == 1
assert len(result.errors) == 1
@pytest.mark.asyncio
async def test_migrate_all_with_skips(self, mock_db):
"""Test migration correctly counts skipped files."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
# Create data files
for i in range(2):
series_dir = Path(tmp_dir) / f"Series {i}"
series_dir.mkdir()
data = {
"key": f"series-{i}",
"name": f"Series {i}",
"site": "aniworld.to",
"folder": f"Series {i}",
"episodeDict": {}
}
(series_dir / "data").write_text(json.dumps(data))
# Mock: first series doesn't exist, second already exists
existing = MagicMock()
existing.id = 2
with patch(
'src.server.services.data_migration_service.AnimeSeriesService'
) as MockService:
with patch(
'src.server.services.data_migration_service.EpisodeService'
) as MockEpisodeService:
MockService.get_by_key = AsyncMock(
side_effect=[None, existing]
)
MockService.create = AsyncMock(
return_value=MagicMock(id=1)
)
MockEpisodeService.get_by_series = AsyncMock(return_value=[])
result = await service.migrate_all(tmp_dir, mock_db)
assert result.total_found == 2
assert result.migrated == 1
assert result.skipped == 1
class TestDataMigrationServiceIsMigrationNeeded:
"""Test is_migration_needed method."""
def test_migration_needed_with_data_files(self):
"""Test migration is needed when data files exist."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
series_dir = Path(tmp_dir) / "Test Series"
series_dir.mkdir()
(series_dir / "data").write_text('{"key": "test"}')
assert service.is_migration_needed(tmp_dir) is True
def test_migration_not_needed_empty_directory(self):
"""Test migration not needed for empty directory."""
service = DataMigrationService()
with tempfile.TemporaryDirectory() as tmp_dir:
assert service.is_migration_needed(tmp_dir) is False
def test_migration_not_needed_nonexistent_directory(self):
"""Test migration not needed for nonexistent directory."""
service = DataMigrationService()
assert service.is_migration_needed("/nonexistent/path") is False
class TestDataMigrationServiceSingleton:
"""Test singleton pattern for service."""
def test_get_service_returns_same_instance(self):
"""Test getting service returns same instance."""
reset_data_migration_service()
service1 = get_data_migration_service()
service2 = get_data_migration_service()
assert service1 is service2
def test_reset_service_creates_new_instance(self):
"""Test resetting service creates new instance."""
service1 = get_data_migration_service()
reset_data_migration_service()
service2 = get_data_migration_service()
assert service1 is not service2
def test_service_is_correct_type(self):
"""Test service is correct type."""
reset_data_migration_service()
service = get_data_migration_service()
assert isinstance(service, DataMigrationService)

View File

@ -25,7 +25,6 @@ from src.server.database.init import (
create_database_backup, create_database_backup,
create_database_schema, create_database_schema,
get_database_info, get_database_info,
get_migration_guide,
get_schema_version, get_schema_version,
initialize_database, initialize_database,
seed_initial_data, seed_initial_data,
@ -372,16 +371,6 @@ def test_get_database_info():
assert set(info["expected_tables"]) == EXPECTED_TABLES assert set(info["expected_tables"]) == EXPECTED_TABLES
def test_get_migration_guide():
"""Test getting migration guide."""
guide = get_migration_guide()
assert isinstance(guide, str)
assert "Alembic" in guide
assert "alembic init" in guide
assert "alembic upgrade head" in guide
# ============================================================================= # =============================================================================
# Integration Tests # Integration Tests
# ============================================================================= # =============================================================================

View File

@ -1,419 +0,0 @@
"""
Tests for database migration system.
This module tests the migration runner, validator, and base classes.
"""
from datetime import datetime
from pathlib import Path
from unittest.mock import AsyncMock, Mock, patch
import pytest
from src.server.database.migrations.base import (
Migration,
MigrationError,
MigrationHistory,
)
from src.server.database.migrations.runner import MigrationRunner
from src.server.database.migrations.validator import MigrationValidator
class TestMigration:
"""Tests for base Migration class."""
def test_migration_initialization(self):
"""Test migration can be initialized with basic attributes."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig = TestMig(
version="20250124_001", description="Test migration"
)
assert mig.version == "20250124_001"
assert mig.description == "Test migration"
assert isinstance(mig.created_at, datetime)
def test_migration_equality(self):
"""Test migrations are equal based on version."""
class TestMig1(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
class TestMig2(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig1 = TestMig1(version="20250124_001", description="Test 1")
mig2 = TestMig2(version="20250124_001", description="Test 2")
mig3 = TestMig1(version="20250124_002", description="Test 3")
assert mig1 == mig2
assert mig1 != mig3
assert hash(mig1) == hash(mig2)
assert hash(mig1) != hash(mig3)
def test_migration_repr(self):
"""Test migration string representation."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig = TestMig(
version="20250124_001", description="Test migration"
)
assert "20250124_001" in repr(mig)
assert "Test migration" in repr(mig)
class TestMigrationHistory:
"""Tests for MigrationHistory class."""
def test_history_initialization(self):
"""Test migration history record can be created."""
history = MigrationHistory(
version="20250124_001",
description="Test migration",
applied_at=datetime.now(),
execution_time_ms=1500,
success=True,
)
assert history.version == "20250124_001"
assert history.description == "Test migration"
assert history.execution_time_ms == 1500
assert history.success is True
assert history.error_message is None
def test_history_with_error(self):
"""Test migration history with error message."""
history = MigrationHistory(
version="20250124_001",
description="Failed migration",
applied_at=datetime.now(),
execution_time_ms=500,
success=False,
error_message="Test error",
)
assert history.success is False
assert history.error_message == "Test error"
class TestMigrationValidator:
"""Tests for MigrationValidator class."""
def test_validator_initialization(self):
"""Test validator can be initialized."""
validator = MigrationValidator()
assert isinstance(validator.errors, list)
assert isinstance(validator.warnings, list)
assert len(validator.errors) == 0
def test_validate_version_format_valid(self):
"""Test validation of valid version formats."""
validator = MigrationValidator()
assert validator._validate_version_format("20250124_001")
assert validator._validate_version_format("20231201_099")
assert validator._validate_version_format("20250124_001_description")
def test_validate_version_format_invalid(self):
"""Test validation of invalid version formats."""
validator = MigrationValidator()
assert not validator._validate_version_format("")
assert not validator._validate_version_format("20250124")
assert not validator._validate_version_format("invalid_001")
assert not validator._validate_version_format("202501_001")
def test_validate_migration_valid(self):
"""Test validation of valid migration."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig = TestMig(
version="20250124_001",
description="Valid test migration",
)
validator = MigrationValidator()
assert validator.validate_migration(mig) is True
assert len(validator.errors) == 0
def test_validate_migration_invalid_version(self):
"""Test validation fails for invalid version."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig = TestMig(
version="invalid",
description="Valid description",
)
validator = MigrationValidator()
assert validator.validate_migration(mig) is False
assert len(validator.errors) > 0
def test_validate_migration_missing_description(self):
"""Test validation fails for missing description."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig = TestMig(version="20250124_001", description="")
validator = MigrationValidator()
assert validator.validate_migration(mig) is False
assert any("description" in e.lower() for e in validator.errors)
def test_validate_migrations_duplicate_version(self):
"""Test validation detects duplicate versions."""
class TestMig1(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
class TestMig2(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig1 = TestMig1(version="20250124_001", description="First")
mig2 = TestMig2(version="20250124_001", description="Duplicate")
validator = MigrationValidator()
assert validator.validate_migrations([mig1, mig2]) is False
assert any("duplicate" in e.lower() for e in validator.errors)
def test_check_migration_conflicts(self):
"""Test detection of migration conflicts."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
old_mig = TestMig(version="20250101_001", description="Old")
new_mig = TestMig(version="20250124_001", description="New")
validator = MigrationValidator()
# No conflict when pending is newer
conflict = validator.check_migration_conflicts(
[new_mig], ["20250101_001"]
)
assert conflict is None
# Conflict when pending is older
conflict = validator.check_migration_conflicts(
[old_mig], ["20250124_001"]
)
assert conflict is not None
assert "older" in conflict.lower()
def test_get_validation_report(self):
"""Test validation report generation."""
validator = MigrationValidator()
validator.errors.append("Test error")
validator.warnings.append("Test warning")
report = validator.get_validation_report()
assert "Test error" in report
assert "Test warning" in report
assert "Validation Errors:" in report
assert "Validation Warnings:" in report
def test_raise_if_invalid(self):
"""Test exception raising on validation failure."""
validator = MigrationValidator()
validator.errors.append("Test error")
with pytest.raises(MigrationError):
validator.raise_if_invalid()
@pytest.mark.asyncio
class TestMigrationRunner:
"""Tests for MigrationRunner class."""
@pytest.fixture
def mock_session(self):
"""Create mock database session."""
session = AsyncMock()
session.execute = AsyncMock()
session.commit = AsyncMock()
session.rollback = AsyncMock()
return session
@pytest.fixture
def migrations_dir(self, tmp_path):
"""Create temporary migrations directory."""
return tmp_path / "migrations"
async def test_runner_initialization(
self, migrations_dir, mock_session
):
"""Test migration runner can be initialized."""
runner = MigrationRunner(migrations_dir, mock_session)
assert runner.migrations_dir == migrations_dir
assert runner.session == mock_session
assert isinstance(runner._migrations, list)
async def test_initialize_creates_table(
self, migrations_dir, mock_session
):
"""Test initialization creates migration_history table."""
runner = MigrationRunner(migrations_dir, mock_session)
await runner.initialize()
mock_session.execute.assert_called()
mock_session.commit.assert_called()
async def test_load_migrations_empty_dir(
self, migrations_dir, mock_session
):
"""Test loading migrations from empty directory."""
runner = MigrationRunner(migrations_dir, mock_session)
runner.load_migrations()
assert len(runner._migrations) == 0
async def test_get_applied_migrations(
self, migrations_dir, mock_session
):
"""Test retrieving list of applied migrations."""
# Mock database response
mock_result = Mock()
mock_result.fetchall.return_value = [
("20250124_001",),
("20250124_002",),
]
mock_session.execute.return_value = mock_result
runner = MigrationRunner(migrations_dir, mock_session)
applied = await runner.get_applied_migrations()
assert len(applied) == 2
assert "20250124_001" in applied
assert "20250124_002" in applied
async def test_apply_migration_success(
self, migrations_dir, mock_session
):
"""Test successful migration application."""
class TestMig(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig = TestMig(version="20250124_001", description="Test")
runner = MigrationRunner(migrations_dir, mock_session)
await runner.apply_migration(mig)
mock_session.commit.assert_called()
async def test_apply_migration_failure(
self, migrations_dir, mock_session
):
"""Test migration application handles failures."""
class FailingMig(Migration):
async def upgrade(self, session):
raise Exception("Test failure")
async def downgrade(self, session):
return None
mig = FailingMig(version="20250124_001", description="Failing")
runner = MigrationRunner(migrations_dir, mock_session)
with pytest.raises(MigrationError):
await runner.apply_migration(mig)
mock_session.rollback.assert_called()
async def test_get_pending_migrations(
self, migrations_dir, mock_session
):
"""Test retrieving pending migrations."""
class TestMig1(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
class TestMig2(Migration):
async def upgrade(self, session):
return None
async def downgrade(self, session):
return None
mig1 = TestMig1(version="20250124_001", description="Applied")
mig2 = TestMig2(version="20250124_002", description="Pending")
runner = MigrationRunner(migrations_dir, mock_session)
runner._migrations = [mig1, mig2]
# Mock only mig1 as applied
mock_result = Mock()
mock_result.fetchall.return_value = [("20250124_001",)]
mock_session.execute.return_value = mock_result
pending = await runner.get_pending_migrations()
assert len(pending) == 1
assert pending[0].version == "20250124_002"

View File

@ -1,361 +0,0 @@
"""Unit tests for startup migration module.
This module contains comprehensive tests for the startup migration runner,
including testing migration execution, configuration loading, and error handling.
"""
import json
import tempfile
from pathlib import Path
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
from src.server.services.data_migration_service import MigrationResult
from src.server.services.startup_migration import (
_get_anime_directory_from_config,
ensure_migration_on_startup,
run_startup_migration,
)
class TestRunStartupMigration:
"""Test run_startup_migration function."""
@pytest.mark.asyncio
async def test_migration_skipped_when_no_data_files(self):
"""Test that migration is skipped when no data files exist."""
with tempfile.TemporaryDirectory() as tmp_dir:
with patch(
'src.server.services.startup_migration.get_data_migration_service'
) as mock_get_service:
mock_service = MagicMock()
mock_service.is_migration_needed.return_value = False
mock_get_service.return_value = mock_service
result = await run_startup_migration(tmp_dir)
assert result.total_found == 0
assert result.migrated == 0
mock_service.migrate_all.assert_not_called()
@pytest.mark.asyncio
async def test_migration_runs_when_data_files_exist(self):
"""Test that migration runs when data files exist."""
with tempfile.TemporaryDirectory() as tmp_dir:
# Create a data file
series_dir = Path(tmp_dir) / "Test Series"
series_dir.mkdir()
(series_dir / "data").write_text('{"key": "test"}')
expected_result = MigrationResult(
total_found=1,
migrated=1,
skipped=0,
failed=0
)
with patch(
'src.server.services.startup_migration.get_data_migration_service'
) as mock_get_service:
mock_service = MagicMock()
mock_service.is_migration_needed.return_value = True
mock_service.migrate_all = AsyncMock(return_value=expected_result)
mock_get_service.return_value = mock_service
with patch(
'src.server.services.startup_migration.get_db_session'
) as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value.__aenter__ = AsyncMock(
return_value=mock_db
)
mock_get_db.return_value.__aexit__ = AsyncMock()
result = await run_startup_migration(tmp_dir)
assert result.total_found == 1
assert result.migrated == 1
mock_service.migrate_all.assert_called_once()
@pytest.mark.asyncio
async def test_migration_logs_errors(self):
"""Test that migration errors are logged."""
with tempfile.TemporaryDirectory() as tmp_dir:
expected_result = MigrationResult(
total_found=2,
migrated=1,
skipped=0,
failed=1,
errors=["Error: Could not read file"]
)
with patch(
'src.server.services.startup_migration.get_data_migration_service'
) as mock_get_service:
mock_service = MagicMock()
mock_service.is_migration_needed.return_value = True
mock_service.migrate_all = AsyncMock(return_value=expected_result)
mock_get_service.return_value = mock_service
with patch(
'src.server.services.startup_migration.get_db_session'
) as mock_get_db:
mock_db = AsyncMock()
mock_get_db.return_value.__aenter__ = AsyncMock(
return_value=mock_db
)
mock_get_db.return_value.__aexit__ = AsyncMock()
result = await run_startup_migration(tmp_dir)
assert result.failed == 1
assert len(result.errors) == 1
class TestGetAnimeDirectoryFromConfig:
"""Test _get_anime_directory_from_config function."""
def test_returns_anime_directory_when_configured(self):
"""Test returns anime directory when properly configured."""
mock_config = MagicMock()
mock_config.other = {"anime_directory": "/path/to/anime"}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
result = _get_anime_directory_from_config()
assert result == "/path/to/anime"
def test_returns_none_when_not_configured(self):
"""Test returns None when anime directory is not configured."""
mock_config = MagicMock()
mock_config.other = {}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
result = _get_anime_directory_from_config()
assert result is None
def test_returns_none_when_anime_directory_empty(self):
"""Test returns None when anime directory is empty string."""
mock_config = MagicMock()
mock_config.other = {"anime_directory": ""}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
result = _get_anime_directory_from_config()
assert result is None
def test_returns_none_when_anime_directory_whitespace(self):
"""Test returns None when anime directory is whitespace only."""
mock_config = MagicMock()
mock_config.other = {"anime_directory": " "}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
result = _get_anime_directory_from_config()
assert result is None
def test_returns_none_when_config_load_fails(self):
"""Test returns None when configuration loading fails."""
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.side_effect = Exception("Config error")
MockConfigService.return_value = mock_service
result = _get_anime_directory_from_config()
assert result is None
def test_strips_whitespace_from_directory(self):
"""Test that whitespace is stripped from anime directory."""
mock_config = MagicMock()
mock_config.other = {"anime_directory": " /path/to/anime "}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
result = _get_anime_directory_from_config()
assert result == "/path/to/anime"
class TestEnsureMigrationOnStartup:
"""Test ensure_migration_on_startup function."""
@pytest.mark.asyncio
async def test_returns_none_when_no_directory_configured(self):
"""Test returns None when anime directory is not configured."""
with patch(
'src.server.services.startup_migration._get_anime_directory_from_config',
return_value=None
):
result = await ensure_migration_on_startup()
assert result is None
@pytest.mark.asyncio
async def test_returns_none_when_directory_does_not_exist(self):
"""Test returns None when anime directory does not exist."""
with patch(
'src.server.services.startup_migration._get_anime_directory_from_config',
return_value="/nonexistent/path"
):
result = await ensure_migration_on_startup()
assert result is None
@pytest.mark.asyncio
async def test_returns_none_when_path_is_file(self):
"""Test returns None when path is a file, not directory."""
with tempfile.NamedTemporaryFile() as tmp_file:
with patch(
'src.server.services.startup_migration._get_anime_directory_from_config',
return_value=tmp_file.name
):
result = await ensure_migration_on_startup()
assert result is None
@pytest.mark.asyncio
async def test_runs_migration_when_directory_exists(self):
"""Test migration runs when directory exists and is configured."""
with tempfile.TemporaryDirectory() as tmp_dir:
expected_result = MigrationResult(total_found=0)
with patch(
'src.server.services.startup_migration._get_anime_directory_from_config',
return_value=tmp_dir
):
with patch(
'src.server.services.startup_migration.run_startup_migration',
new_callable=AsyncMock,
return_value=expected_result
) as mock_run:
result = await ensure_migration_on_startup()
assert result is not None
assert result.total_found == 0
mock_run.assert_called_once_with(tmp_dir)
@pytest.mark.asyncio
async def test_catches_migration_errors(self):
"""Test that migration errors are caught and logged."""
with tempfile.TemporaryDirectory() as tmp_dir:
with patch(
'src.server.services.startup_migration._get_anime_directory_from_config',
return_value=tmp_dir
):
with patch(
'src.server.services.startup_migration.run_startup_migration',
new_callable=AsyncMock,
side_effect=Exception("Database error")
):
result = await ensure_migration_on_startup()
# Should return error result, not raise
assert result is not None
assert result.failed == 1
assert len(result.errors) == 1
assert "Database error" in result.errors[0]
@pytest.mark.asyncio
async def test_returns_migration_result_with_counts(self):
"""Test returns proper migration result with counts."""
with tempfile.TemporaryDirectory() as tmp_dir:
expected_result = MigrationResult(
total_found=5,
migrated=3,
skipped=1,
failed=1,
errors=["Error 1"]
)
with patch(
'src.server.services.startup_migration._get_anime_directory_from_config',
return_value=tmp_dir
):
with patch(
'src.server.services.startup_migration.run_startup_migration',
new_callable=AsyncMock,
return_value=expected_result
):
result = await ensure_migration_on_startup()
assert result.total_found == 5
assert result.migrated == 3
assert result.skipped == 1
assert result.failed == 1
class TestStartupMigrationIntegration:
"""Integration tests for startup migration workflow."""
@pytest.mark.asyncio
async def test_full_workflow_no_config(self):
"""Test full workflow when config is missing."""
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.side_effect = FileNotFoundError()
MockConfigService.return_value = mock_service
result = await ensure_migration_on_startup()
assert result is None
@pytest.mark.asyncio
async def test_full_workflow_with_config_no_data_files(self):
"""Test full workflow with config but no data files."""
with tempfile.TemporaryDirectory() as tmp_dir:
mock_config = MagicMock()
mock_config.other = {"anime_directory": tmp_dir}
with patch(
'src.server.services.startup_migration.ConfigService'
) as MockConfigService:
mock_service = MagicMock()
mock_service.load_config.return_value = mock_config
MockConfigService.return_value = mock_service
with patch(
'src.server.services.startup_migration.get_data_migration_service'
) as mock_get_service:
migration_service = MagicMock()
migration_service.is_migration_needed.return_value = False
mock_get_service.return_value = migration_service
result = await ensure_migration_on_startup()
assert result is not None
assert result.total_found == 0