migration removed
This commit is contained in:
@@ -67,24 +67,7 @@ async def setup_auth(req: SetupRequest):
|
||||
# Save the config with the password hash and anime directory
|
||||
config_service.save_config(config, create_backup=False)
|
||||
|
||||
# Run migration if anime directory was provided
|
||||
response = {"status": "ok"}
|
||||
if anime_directory:
|
||||
from src.server.services.startup_migration import (
|
||||
run_migration_for_directory,
|
||||
)
|
||||
migration_result = await run_migration_for_directory(
|
||||
anime_directory
|
||||
)
|
||||
if migration_result:
|
||||
response["migration"] = {
|
||||
"total_found": migration_result.total_found,
|
||||
"migrated": migration_result.migrated,
|
||||
"skipped": migration_result.skipped,
|
||||
"failed": migration_result.failed,
|
||||
}
|
||||
|
||||
return response
|
||||
return {"status": "ok"}
|
||||
|
||||
except ValueError as e:
|
||||
raise HTTPException(status_code=400, detail=str(e)) from e
|
||||
|
||||
@@ -239,22 +239,10 @@ async def update_directory(
|
||||
|
||||
config_service.save_config(app_config)
|
||||
|
||||
# Run migration for the new directory
|
||||
from src.server.services.startup_migration import run_migration_for_directory
|
||||
migration_result = await run_migration_for_directory(directory)
|
||||
|
||||
response: Dict[str, Any] = {
|
||||
"message": "Anime directory updated successfully"
|
||||
}
|
||||
|
||||
if migration_result:
|
||||
response["migration"] = {
|
||||
"total_found": migration_result.total_found,
|
||||
"migrated": migration_result.migrated,
|
||||
"skipped": migration_result.skipped,
|
||||
"failed": migration_result.failed,
|
||||
}
|
||||
|
||||
return response
|
||||
except ConfigServiceError as e:
|
||||
raise HTTPException(
|
||||
|
||||
@@ -30,7 +30,6 @@ from src.server.database.init import (
|
||||
create_database_backup,
|
||||
create_database_schema,
|
||||
get_database_info,
|
||||
get_migration_guide,
|
||||
get_schema_version,
|
||||
initialize_database,
|
||||
seed_initial_data,
|
||||
@@ -64,7 +63,6 @@ __all__ = [
|
||||
"check_database_health",
|
||||
"create_database_backup",
|
||||
"get_database_info",
|
||||
"get_migration_guide",
|
||||
"CURRENT_SCHEMA_VERSION",
|
||||
"EXPECTED_TABLES",
|
||||
# Models
|
||||
|
||||
@@ -2,12 +2,9 @@
|
||||
|
||||
This module provides comprehensive database initialization functionality:
|
||||
- Schema creation and validation
|
||||
- Initial data migration
|
||||
- Database health checks
|
||||
- Schema versioning support
|
||||
- Migration utilities
|
||||
|
||||
For production deployments, consider using Alembic for managed migrations.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
@@ -354,8 +351,6 @@ async def create_schema_version_table(
|
||||
) -> None:
|
||||
"""Create schema version tracking table.
|
||||
|
||||
Future enhancement for tracking schema migrations with Alembic.
|
||||
|
||||
Args:
|
||||
engine: Optional database engine (uses default if not provided)
|
||||
"""
|
||||
@@ -587,60 +582,6 @@ def get_database_info() -> Dict[str, Any]:
|
||||
}
|
||||
|
||||
|
||||
def get_migration_guide() -> str:
|
||||
"""Get migration guide for production deployments.
|
||||
|
||||
Returns:
|
||||
Migration guide text
|
||||
"""
|
||||
return """
|
||||
Database Migration Guide
|
||||
========================
|
||||
|
||||
Current Setup: SQLAlchemy create_all()
|
||||
- Automatically creates tables on startup
|
||||
- Suitable for development and single-instance deployments
|
||||
- Schema changes require manual handling
|
||||
|
||||
For Production with Alembic:
|
||||
============================
|
||||
|
||||
1. Initialize Alembic (already installed):
|
||||
alembic init alembic
|
||||
|
||||
2. Configure alembic/env.py:
|
||||
from src.server.database.base import Base
|
||||
target_metadata = Base.metadata
|
||||
|
||||
3. Configure alembic.ini:
|
||||
sqlalchemy.url = <your-database-url>
|
||||
|
||||
4. Generate initial migration:
|
||||
alembic revision --autogenerate -m "Initial schema v1.0.0"
|
||||
|
||||
5. Review migration in alembic/versions/
|
||||
|
||||
6. Apply migration:
|
||||
alembic upgrade head
|
||||
|
||||
7. For future schema changes:
|
||||
- Modify models in src/server/database/models.py
|
||||
- Generate migration: alembic revision --autogenerate -m "Description"
|
||||
- Review generated migration
|
||||
- Test in staging environment
|
||||
- Apply: alembic upgrade head
|
||||
- For rollback: alembic downgrade -1
|
||||
|
||||
Best Practices:
|
||||
==============
|
||||
- Always backup database before migrations
|
||||
- Test migrations in staging first
|
||||
- Review auto-generated migrations carefully
|
||||
- Keep migrations in version control
|
||||
- Document breaking changes
|
||||
"""
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Public API
|
||||
# =============================================================================
|
||||
@@ -656,7 +597,6 @@ __all__ = [
|
||||
"check_database_health",
|
||||
"create_database_backup",
|
||||
"get_database_info",
|
||||
"get_migration_guide",
|
||||
"CURRENT_SCHEMA_VERSION",
|
||||
"EXPECTED_TABLES",
|
||||
]
|
||||
|
||||
@@ -1,167 +0,0 @@
|
||||
"""Database migration utilities.
|
||||
|
||||
This module provides utilities for database migrations and schema versioning.
|
||||
Alembic integration can be added when needed for production environments.
|
||||
|
||||
For now, we use SQLAlchemy's create_all for automatic schema creation.
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncEngine
|
||||
|
||||
from src.server.database.base import Base
|
||||
from src.server.database.connection import get_engine, get_sync_engine
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def initialize_schema(engine: Optional[AsyncEngine] = None) -> None:
|
||||
"""Initialize database schema.
|
||||
|
||||
Creates all tables defined in Base metadata if they don't exist.
|
||||
This is a simple migration strategy suitable for single-instance deployments.
|
||||
|
||||
For production with multiple instances, consider using Alembic:
|
||||
- alembic init alembic
|
||||
- alembic revision --autogenerate -m "Initial schema"
|
||||
- alembic upgrade head
|
||||
|
||||
Args:
|
||||
engine: Optional database engine (uses default if not provided)
|
||||
|
||||
Raises:
|
||||
RuntimeError: If database is not initialized
|
||||
"""
|
||||
if engine is None:
|
||||
engine = get_engine()
|
||||
|
||||
logger.info("Initializing database schema...")
|
||||
|
||||
# Create all tables
|
||||
async with engine.begin() as conn:
|
||||
await conn.run_sync(Base.metadata.create_all)
|
||||
|
||||
logger.info("Database schema initialized successfully")
|
||||
|
||||
|
||||
async def check_schema_version(engine: Optional[AsyncEngine] = None) -> str:
|
||||
"""Check current database schema version.
|
||||
|
||||
Returns a simple version identifier based on existing tables.
|
||||
For production, consider using Alembic for proper versioning.
|
||||
|
||||
Args:
|
||||
engine: Optional database engine (uses default if not provided)
|
||||
|
||||
Returns:
|
||||
Schema version string
|
||||
|
||||
Raises:
|
||||
RuntimeError: If database is not initialized
|
||||
"""
|
||||
if engine is None:
|
||||
engine = get_engine()
|
||||
|
||||
async with engine.connect() as conn:
|
||||
# Check which tables exist
|
||||
result = await conn.execute(
|
||||
text(
|
||||
"SELECT name FROM sqlite_master "
|
||||
"WHERE type='table' AND name NOT LIKE 'sqlite_%'"
|
||||
)
|
||||
)
|
||||
tables = [row[0] for row in result]
|
||||
|
||||
if not tables:
|
||||
return "empty"
|
||||
elif len(tables) == 4 and all(
|
||||
t in tables for t in [
|
||||
"anime_series",
|
||||
"episodes",
|
||||
"download_queue",
|
||||
"user_sessions",
|
||||
]
|
||||
):
|
||||
return "v1.0"
|
||||
else:
|
||||
return "custom"
|
||||
|
||||
|
||||
def get_migration_info() -> str:
|
||||
"""Get information about database migration setup.
|
||||
|
||||
Returns:
|
||||
Migration setup information
|
||||
"""
|
||||
return """
|
||||
Database Migration Information
|
||||
==============================
|
||||
|
||||
Current Strategy: SQLAlchemy create_all()
|
||||
- Automatically creates tables on startup
|
||||
- Suitable for development and single-instance deployments
|
||||
- Schema changes require manual handling
|
||||
|
||||
For Production Migrations (Alembic):
|
||||
====================================
|
||||
|
||||
1. Initialize Alembic:
|
||||
alembic init alembic
|
||||
|
||||
2. Configure alembic/env.py:
|
||||
- Import Base from src.server.database.base
|
||||
- Set target_metadata = Base.metadata
|
||||
|
||||
3. Configure alembic.ini:
|
||||
- Set sqlalchemy.url to your database URL
|
||||
|
||||
4. Generate initial migration:
|
||||
alembic revision --autogenerate -m "Initial schema"
|
||||
|
||||
5. Apply migrations:
|
||||
alembic upgrade head
|
||||
|
||||
6. For future changes:
|
||||
- Modify models in src/server/database/models.py
|
||||
- Generate migration: alembic revision --autogenerate -m "Description"
|
||||
- Review generated migration in alembic/versions/
|
||||
- Apply: alembic upgrade head
|
||||
|
||||
Benefits of Alembic:
|
||||
- Version control for database schema
|
||||
- Automatic migration generation from model changes
|
||||
- Rollback support with downgrade scripts
|
||||
- Multi-instance deployment support
|
||||
- Safe schema changes in production
|
||||
"""
|
||||
|
||||
|
||||
# =============================================================================
|
||||
# Future Alembic Integration
|
||||
# =============================================================================
|
||||
#
|
||||
# When ready to use Alembic, follow these steps:
|
||||
#
|
||||
# 1. Install Alembic (already in requirements.txt):
|
||||
# pip install alembic
|
||||
#
|
||||
# 2. Initialize Alembic from project root:
|
||||
# alembic init alembic
|
||||
#
|
||||
# 3. Update alembic/env.py to use our Base:
|
||||
# from src.server.database.base import Base
|
||||
# target_metadata = Base.metadata
|
||||
#
|
||||
# 4. Configure alembic.ini with DATABASE_URL from settings
|
||||
#
|
||||
# 5. Generate initial migration:
|
||||
# alembic revision --autogenerate -m "Initial schema"
|
||||
#
|
||||
# 6. Review generated migration and apply:
|
||||
# alembic upgrade head
|
||||
#
|
||||
# =============================================================================
|
||||
@@ -1,236 +0,0 @@
|
||||
"""
|
||||
Initial database schema migration.
|
||||
|
||||
This migration creates the base tables for the Aniworld application,
|
||||
including users, anime, downloads, and configuration tables.
|
||||
|
||||
Version: 20250124_001
|
||||
Created: 2025-01-24
|
||||
"""
|
||||
|
||||
import logging
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from ..migrations.base import Migration, MigrationError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class InitialSchemaMigration(Migration):
|
||||
"""
|
||||
Creates initial database schema.
|
||||
|
||||
This migration sets up all core tables needed for the application:
|
||||
- users: User accounts and authentication
|
||||
- anime: Anime series metadata
|
||||
- episodes: Episode information
|
||||
- downloads: Download queue and history
|
||||
- config: Application configuration
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize the initial schema migration."""
|
||||
super().__init__(
|
||||
version="20250124_001",
|
||||
description="Create initial database schema",
|
||||
)
|
||||
|
||||
async def upgrade(self, session: AsyncSession) -> None:
|
||||
"""
|
||||
Create all initial tables.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
|
||||
Raises:
|
||||
MigrationError: If table creation fails
|
||||
"""
|
||||
try:
|
||||
# Create users table
|
||||
await session.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS users (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
username TEXT NOT NULL UNIQUE,
|
||||
email TEXT,
|
||||
password_hash TEXT NOT NULL,
|
||||
is_active BOOLEAN DEFAULT 1,
|
||||
is_admin BOOLEAN DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Create anime table
|
||||
await session.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS anime (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
title TEXT NOT NULL,
|
||||
original_title TEXT,
|
||||
description TEXT,
|
||||
genres TEXT,
|
||||
release_year INTEGER,
|
||||
status TEXT,
|
||||
total_episodes INTEGER,
|
||||
cover_image_url TEXT,
|
||||
aniworld_url TEXT,
|
||||
mal_id INTEGER,
|
||||
anilist_id INTEGER,
|
||||
added_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Create episodes table
|
||||
await session.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS episodes (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
anime_id INTEGER NOT NULL,
|
||||
episode_number INTEGER NOT NULL,
|
||||
season_number INTEGER DEFAULT 1,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
duration_minutes INTEGER,
|
||||
air_date DATE,
|
||||
stream_url TEXT,
|
||||
download_url TEXT,
|
||||
file_path TEXT,
|
||||
file_size_bytes INTEGER,
|
||||
is_downloaded BOOLEAN DEFAULT 0,
|
||||
download_progress REAL DEFAULT 0.0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (anime_id) REFERENCES anime(id)
|
||||
ON DELETE CASCADE,
|
||||
UNIQUE (anime_id, season_number, episode_number)
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Create downloads table
|
||||
await session.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS downloads (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
episode_id INTEGER NOT NULL,
|
||||
user_id INTEGER,
|
||||
status TEXT NOT NULL DEFAULT 'pending',
|
||||
priority INTEGER DEFAULT 5,
|
||||
progress REAL DEFAULT 0.0,
|
||||
download_speed_mbps REAL,
|
||||
eta_seconds INTEGER,
|
||||
started_at TIMESTAMP,
|
||||
completed_at TIMESTAMP,
|
||||
failed_at TIMESTAMP,
|
||||
error_message TEXT,
|
||||
retry_count INTEGER DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (episode_id) REFERENCES episodes(id)
|
||||
ON DELETE CASCADE,
|
||||
FOREIGN KEY (user_id) REFERENCES users(id)
|
||||
ON DELETE SET NULL
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Create config table
|
||||
await session.execute(
|
||||
text(
|
||||
"""
|
||||
CREATE TABLE IF NOT EXISTS config (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
key TEXT NOT NULL UNIQUE,
|
||||
value TEXT NOT NULL,
|
||||
category TEXT DEFAULT 'general',
|
||||
description TEXT,
|
||||
is_secret BOOLEAN DEFAULT 0,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
"""
|
||||
)
|
||||
)
|
||||
|
||||
# Create indexes for better performance
|
||||
await session.execute(
|
||||
text(
|
||||
"CREATE INDEX IF NOT EXISTS idx_anime_title "
|
||||
"ON anime(title)"
|
||||
)
|
||||
)
|
||||
|
||||
await session.execute(
|
||||
text(
|
||||
"CREATE INDEX IF NOT EXISTS idx_episodes_anime_id "
|
||||
"ON episodes(anime_id)"
|
||||
)
|
||||
)
|
||||
|
||||
await session.execute(
|
||||
text(
|
||||
"CREATE INDEX IF NOT EXISTS idx_downloads_status "
|
||||
"ON downloads(status)"
|
||||
)
|
||||
)
|
||||
|
||||
await session.execute(
|
||||
text(
|
||||
"CREATE INDEX IF NOT EXISTS "
|
||||
"idx_downloads_episode_id ON downloads(episode_id)"
|
||||
)
|
||||
)
|
||||
|
||||
logger.info("Initial schema created successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create initial schema: {e}")
|
||||
raise MigrationError(
|
||||
f"Initial schema creation failed: {e}"
|
||||
) from e
|
||||
|
||||
async def downgrade(self, session: AsyncSession) -> None:
|
||||
"""
|
||||
Drop all initial tables.
|
||||
|
||||
Args:
|
||||
session: Database session
|
||||
|
||||
Raises:
|
||||
MigrationError: If table dropping fails
|
||||
"""
|
||||
try:
|
||||
# Drop tables in reverse order to respect foreign keys
|
||||
tables = [
|
||||
"downloads",
|
||||
"episodes",
|
||||
"anime",
|
||||
"users",
|
||||
"config",
|
||||
]
|
||||
|
||||
for table in tables:
|
||||
await session.execute(text(f"DROP TABLE IF EXISTS {table}"))
|
||||
logger.debug(f"Dropped table: {table}")
|
||||
|
||||
logger.info("Initial schema rolled back successfully")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to rollback initial schema: {e}")
|
||||
raise MigrationError(
|
||||
f"Initial schema rollback failed: {e}"
|
||||
) from e
|
||||
@@ -1,17 +0,0 @@
|
||||
"""
|
||||
Database migration system for Aniworld application.
|
||||
|
||||
This package provides tools for managing database schema changes,
|
||||
including migration creation, execution, and rollback capabilities.
|
||||
"""
|
||||
|
||||
from .base import Migration, MigrationError
|
||||
from .runner import MigrationRunner
|
||||
from .validator import MigrationValidator
|
||||
|
||||
__all__ = [
|
||||
"Migration",
|
||||
"MigrationError",
|
||||
"MigrationRunner",
|
||||
"MigrationValidator",
|
||||
]
|
||||
@@ -1,128 +0,0 @@
|
||||
"""
|
||||
Base migration classes and utilities.
|
||||
|
||||
This module provides the foundation for database migrations,
|
||||
including the abstract Migration class and error handling.
|
||||
"""
|
||||
|
||||
from abc import ABC, abstractmethod
|
||||
from datetime import datetime
|
||||
from typing import Optional
|
||||
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
|
||||
class MigrationError(Exception):
|
||||
"""Base exception for migration-related errors."""
|
||||
|
||||
pass
|
||||
|
||||
|
||||
class Migration(ABC):
|
||||
"""
|
||||
Abstract base class for database migrations.
|
||||
|
||||
Each migration should inherit from this class and implement
|
||||
the upgrade and downgrade methods.
|
||||
|
||||
Attributes:
|
||||
version: Unique version identifier (e.g., "20250124_001")
|
||||
description: Human-readable description of the migration
|
||||
created_at: Timestamp when migration was created
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
version: str,
|
||||
description: str,
|
||||
created_at: Optional[datetime] = None,
|
||||
):
|
||||
"""
|
||||
Initialize migration.
|
||||
|
||||
Args:
|
||||
version: Unique version identifier
|
||||
description: Human-readable description
|
||||
created_at: Creation timestamp (defaults to now)
|
||||
"""
|
||||
self.version = version
|
||||
self.description = description
|
||||
self.created_at = created_at or datetime.now()
|
||||
|
||||
@abstractmethod
|
||||
async def upgrade(self, session: AsyncSession) -> None:
|
||||
"""
|
||||
Apply the migration.
|
||||
|
||||
Args:
|
||||
session: Database session for executing changes
|
||||
|
||||
Raises:
|
||||
MigrationError: If migration fails
|
||||
"""
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
async def downgrade(self, session: AsyncSession) -> None:
|
||||
"""
|
||||
Revert the migration.
|
||||
|
||||
Args:
|
||||
session: Database session for reverting changes
|
||||
|
||||
Raises:
|
||||
MigrationError: If rollback fails
|
||||
"""
|
||||
pass
|
||||
|
||||
def __repr__(self) -> str:
|
||||
"""Return string representation of migration."""
|
||||
return f"Migration({self.version}: {self.description})"
|
||||
|
||||
def __eq__(self, other: object) -> bool:
|
||||
"""Check equality based on version."""
|
||||
if not isinstance(other, Migration):
|
||||
return False
|
||||
return self.version == other.version
|
||||
|
||||
def __hash__(self) -> int:
|
||||
"""Return hash based on version."""
|
||||
return hash(self.version)
|
||||
|
||||
|
||||
class MigrationHistory:
|
||||
"""
|
||||
Tracks applied migrations in the database.
|
||||
|
||||
This model stores information about which migrations have been
|
||||
applied, when they were applied, and their execution status.
|
||||
"""
|
||||
|
||||
__tablename__ = "migration_history"
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
version: str,
|
||||
description: str,
|
||||
applied_at: datetime,
|
||||
execution_time_ms: int,
|
||||
success: bool = True,
|
||||
error_message: Optional[str] = None,
|
||||
):
|
||||
"""
|
||||
Initialize migration history record.
|
||||
|
||||
Args:
|
||||
version: Migration version identifier
|
||||
description: Migration description
|
||||
applied_at: Timestamp when migration was applied
|
||||
execution_time_ms: Time taken to execute in milliseconds
|
||||
success: Whether migration succeeded
|
||||
error_message: Error message if migration failed
|
||||
"""
|
||||
self.version = version
|
||||
self.description = description
|
||||
self.applied_at = applied_at
|
||||
self.execution_time_ms = execution_time_ms
|
||||
self.success = success
|
||||
self.error_message = error_message
|
||||
@@ -1,323 +0,0 @@
|
||||
"""
|
||||
Migration runner for executing database migrations.
|
||||
|
||||
This module handles the execution of migrations in the correct order,
|
||||
tracks migration history, and provides rollback capabilities.
|
||||
"""
|
||||
|
||||
import importlib.util
|
||||
import logging
|
||||
import time
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy import text
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from .base import Migration, MigrationError, MigrationHistory
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MigrationRunner:
|
||||
"""
|
||||
Manages database migration execution and tracking.
|
||||
|
||||
This class handles loading migrations, executing them in order,
|
||||
tracking their status, and rolling back when needed.
|
||||
"""
|
||||
|
||||
def __init__(self, migrations_dir: Path, session: AsyncSession):
|
||||
"""
|
||||
Initialize migration runner.
|
||||
|
||||
Args:
|
||||
migrations_dir: Directory containing migration files
|
||||
session: Database session for executing migrations
|
||||
"""
|
||||
self.migrations_dir = migrations_dir
|
||||
self.session = session
|
||||
self._migrations: List[Migration] = []
|
||||
|
||||
async def initialize(self) -> None:
|
||||
"""
|
||||
Initialize migration system by creating tracking table if needed.
|
||||
|
||||
Raises:
|
||||
MigrationError: If initialization fails
|
||||
"""
|
||||
try:
|
||||
# Create migration_history table if it doesn't exist
|
||||
create_table_sql = """
|
||||
CREATE TABLE IF NOT EXISTS migration_history (
|
||||
id INTEGER PRIMARY KEY AUTOINCREMENT,
|
||||
version TEXT NOT NULL UNIQUE,
|
||||
description TEXT NOT NULL,
|
||||
applied_at TIMESTAMP NOT NULL,
|
||||
execution_time_ms INTEGER NOT NULL,
|
||||
success BOOLEAN NOT NULL DEFAULT 1,
|
||||
error_message TEXT
|
||||
)
|
||||
"""
|
||||
await self.session.execute(text(create_table_sql))
|
||||
await self.session.commit()
|
||||
logger.info("Migration system initialized")
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to initialize migration system: {e}")
|
||||
raise MigrationError(f"Initialization failed: {e}") from e
|
||||
|
||||
def load_migrations(self) -> None:
|
||||
"""
|
||||
Load all migration files from the migrations directory.
|
||||
|
||||
Migration files should be named in format: {version}_{description}.py
|
||||
and contain a Migration class that inherits from base.Migration.
|
||||
|
||||
Raises:
|
||||
MigrationError: If loading migrations fails
|
||||
"""
|
||||
try:
|
||||
self._migrations.clear()
|
||||
|
||||
if not self.migrations_dir.exists():
|
||||
logger.warning(f"Migrations directory does not exist: {self.migrations_dir}")
|
||||
return
|
||||
|
||||
# Find all Python files in migrations directory
|
||||
migration_files = sorted(self.migrations_dir.glob("*.py"))
|
||||
migration_files = [f for f in migration_files if f.name != "__init__.py"]
|
||||
|
||||
for file_path in migration_files:
|
||||
try:
|
||||
# Import the migration module dynamically
|
||||
spec = importlib.util.spec_from_file_location(
|
||||
f"migration.{file_path.stem}", file_path
|
||||
)
|
||||
if spec and spec.loader:
|
||||
module = importlib.util.module_from_spec(spec)
|
||||
spec.loader.exec_module(module)
|
||||
|
||||
# Find Migration subclass in module
|
||||
for attr_name in dir(module):
|
||||
attr = getattr(module, attr_name)
|
||||
if (
|
||||
isinstance(attr, type)
|
||||
and issubclass(attr, Migration)
|
||||
and attr != Migration
|
||||
):
|
||||
migration_instance = attr()
|
||||
self._migrations.append(migration_instance)
|
||||
logger.debug(f"Loaded migration: {migration_instance.version}")
|
||||
break
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load migration {file_path.name}: {e}")
|
||||
raise MigrationError(f"Failed to load {file_path.name}: {e}") from e
|
||||
|
||||
# Sort migrations by version
|
||||
self._migrations.sort(key=lambda m: m.version)
|
||||
logger.info(f"Loaded {len(self._migrations)} migrations")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to load migrations: {e}")
|
||||
raise MigrationError(f"Loading migrations failed: {e}") from e
|
||||
|
||||
async def get_applied_migrations(self) -> List[str]:
|
||||
"""
|
||||
Get list of already applied migration versions.
|
||||
|
||||
Returns:
|
||||
List of migration versions that have been applied
|
||||
|
||||
Raises:
|
||||
MigrationError: If query fails
|
||||
"""
|
||||
try:
|
||||
result = await self.session.execute(
|
||||
text("SELECT version FROM migration_history WHERE success = 1 ORDER BY version")
|
||||
)
|
||||
versions = [row[0] for row in result.fetchall()]
|
||||
return versions
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to get applied migrations: {e}")
|
||||
raise MigrationError(f"Query failed: {e}") from e
|
||||
|
||||
async def get_pending_migrations(self) -> List[Migration]:
|
||||
"""
|
||||
Get list of migrations that haven't been applied yet.
|
||||
|
||||
Returns:
|
||||
List of pending Migration objects
|
||||
|
||||
Raises:
|
||||
MigrationError: If check fails
|
||||
"""
|
||||
applied = await self.get_applied_migrations()
|
||||
pending = [m for m in self._migrations if m.version not in applied]
|
||||
return pending
|
||||
|
||||
async def apply_migration(self, migration: Migration) -> None:
|
||||
"""
|
||||
Apply a single migration.
|
||||
|
||||
Args:
|
||||
migration: Migration to apply
|
||||
|
||||
Raises:
|
||||
MigrationError: If migration fails
|
||||
"""
|
||||
start_time = time.time()
|
||||
success = False
|
||||
error_message = None
|
||||
|
||||
try:
|
||||
logger.info(f"Applying migration: {migration.version} - {migration.description}")
|
||||
|
||||
# Execute the migration
|
||||
await migration.upgrade(self.session)
|
||||
await self.session.commit()
|
||||
|
||||
success = True
|
||||
execution_time_ms = int((time.time() - start_time) * 1000)
|
||||
|
||||
logger.info(
|
||||
f"Migration {migration.version} applied successfully in {execution_time_ms}ms"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
error_message = str(e)
|
||||
execution_time_ms = int((time.time() - start_time) * 1000)
|
||||
logger.error(f"Migration {migration.version} failed: {e}")
|
||||
await self.session.rollback()
|
||||
raise MigrationError(f"Migration {migration.version} failed: {e}") from e
|
||||
|
||||
finally:
|
||||
# Record migration in history
|
||||
try:
|
||||
history_record = MigrationHistory(
|
||||
version=migration.version,
|
||||
description=migration.description,
|
||||
applied_at=datetime.now(),
|
||||
execution_time_ms=execution_time_ms,
|
||||
success=success,
|
||||
error_message=error_message,
|
||||
)
|
||||
|
||||
insert_sql = """
|
||||
INSERT INTO migration_history
|
||||
(version, description, applied_at, execution_time_ms, success, error_message)
|
||||
VALUES (:version, :description, :applied_at, :execution_time_ms, :success, :error_message)
|
||||
"""
|
||||
|
||||
await self.session.execute(
|
||||
text(insert_sql),
|
||||
{
|
||||
"version": history_record.version,
|
||||
"description": history_record.description,
|
||||
"applied_at": history_record.applied_at,
|
||||
"execution_time_ms": history_record.execution_time_ms,
|
||||
"success": history_record.success,
|
||||
"error_message": history_record.error_message,
|
||||
},
|
||||
)
|
||||
await self.session.commit()
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to record migration history: {e}")
|
||||
|
||||
async def run_migrations(self, target_version: Optional[str] = None) -> int:
|
||||
"""
|
||||
Run all pending migrations up to target version.
|
||||
|
||||
Args:
|
||||
target_version: Stop at this version (None = run all)
|
||||
|
||||
Returns:
|
||||
Number of migrations applied
|
||||
|
||||
Raises:
|
||||
MigrationError: If migrations fail
|
||||
"""
|
||||
pending = await self.get_pending_migrations()
|
||||
|
||||
if target_version:
|
||||
pending = [m for m in pending if m.version <= target_version]
|
||||
|
||||
if not pending:
|
||||
logger.info("No pending migrations to apply")
|
||||
return 0
|
||||
|
||||
logger.info(f"Applying {len(pending)} pending migrations")
|
||||
|
||||
for migration in pending:
|
||||
await self.apply_migration(migration)
|
||||
|
||||
return len(pending)
|
||||
|
||||
async def rollback_migration(self, migration: Migration) -> None:
|
||||
"""
|
||||
Rollback a single migration.
|
||||
|
||||
Args:
|
||||
migration: Migration to rollback
|
||||
|
||||
Raises:
|
||||
MigrationError: If rollback fails
|
||||
"""
|
||||
start_time = time.time()
|
||||
|
||||
try:
|
||||
logger.info(f"Rolling back migration: {migration.version}")
|
||||
|
||||
# Execute the downgrade
|
||||
await migration.downgrade(self.session)
|
||||
await self.session.commit()
|
||||
|
||||
execution_time_ms = int((time.time() - start_time) * 1000)
|
||||
|
||||
# Remove from history
|
||||
delete_sql = "DELETE FROM migration_history WHERE version = :version"
|
||||
await self.session.execute(text(delete_sql), {"version": migration.version})
|
||||
await self.session.commit()
|
||||
|
||||
logger.info(
|
||||
f"Migration {migration.version} rolled back successfully in {execution_time_ms}ms"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Rollback of {migration.version} failed: {e}")
|
||||
await self.session.rollback()
|
||||
raise MigrationError(f"Rollback of {migration.version} failed: {e}") from e
|
||||
|
||||
async def rollback(self, steps: int = 1) -> int:
|
||||
"""
|
||||
Rollback the last N migrations.
|
||||
|
||||
Args:
|
||||
steps: Number of migrations to rollback
|
||||
|
||||
Returns:
|
||||
Number of migrations rolled back
|
||||
|
||||
Raises:
|
||||
MigrationError: If rollback fails
|
||||
"""
|
||||
applied = await self.get_applied_migrations()
|
||||
|
||||
if not applied:
|
||||
logger.info("No migrations to rollback")
|
||||
return 0
|
||||
|
||||
# Get migrations to rollback (in reverse order)
|
||||
to_rollback = applied[-steps:]
|
||||
to_rollback.reverse()
|
||||
|
||||
migrations_to_rollback = [m for m in self._migrations if m.version in to_rollback]
|
||||
|
||||
logger.info(f"Rolling back {len(migrations_to_rollback)} migrations")
|
||||
|
||||
for migration in migrations_to_rollback:
|
||||
await self.rollback_migration(migration)
|
||||
|
||||
return len(migrations_to_rollback)
|
||||
@@ -1,222 +0,0 @@
|
||||
"""
|
||||
Migration validator for ensuring migration safety and integrity.
|
||||
|
||||
This module provides validation utilities to check migrations
|
||||
before they are executed, ensuring they meet quality standards.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import List, Optional, Set
|
||||
|
||||
from .base import Migration, MigrationError
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class MigrationValidator:
|
||||
"""
|
||||
Validates migrations before execution.
|
||||
|
||||
Performs various checks to ensure migrations are safe to run,
|
||||
including version uniqueness, naming conventions, and
|
||||
dependency resolution.
|
||||
"""
|
||||
|
||||
def __init__(self):
|
||||
"""Initialize migration validator."""
|
||||
self.errors: List[str] = []
|
||||
self.warnings: List[str] = []
|
||||
|
||||
def reset(self) -> None:
|
||||
"""Clear validation results."""
|
||||
self.errors.clear()
|
||||
self.warnings.clear()
|
||||
|
||||
def validate_migration(self, migration: Migration) -> bool:
|
||||
"""
|
||||
Validate a single migration.
|
||||
|
||||
Args:
|
||||
migration: Migration to validate
|
||||
|
||||
Returns:
|
||||
True if migration is valid, False otherwise
|
||||
"""
|
||||
self.reset()
|
||||
|
||||
# Check version format
|
||||
if not self._validate_version_format(migration.version):
|
||||
self.errors.append(
|
||||
f"Invalid version format: {migration.version}. "
|
||||
"Expected format: YYYYMMDD_NNN"
|
||||
)
|
||||
|
||||
# Check description
|
||||
if not migration.description or len(migration.description) < 5:
|
||||
self.errors.append(
|
||||
f"Migration {migration.version} has invalid "
|
||||
f"description: '{migration.description}'"
|
||||
)
|
||||
|
||||
# Check for implementation
|
||||
if not hasattr(migration, "upgrade") or not callable(
|
||||
getattr(migration, "upgrade")
|
||||
):
|
||||
self.errors.append(
|
||||
f"Migration {migration.version} missing upgrade method"
|
||||
)
|
||||
|
||||
if not hasattr(migration, "downgrade") or not callable(
|
||||
getattr(migration, "downgrade")
|
||||
):
|
||||
self.errors.append(
|
||||
f"Migration {migration.version} missing downgrade method"
|
||||
)
|
||||
|
||||
return len(self.errors) == 0
|
||||
|
||||
def validate_migrations(self, migrations: List[Migration]) -> bool:
|
||||
"""
|
||||
Validate a list of migrations.
|
||||
|
||||
Args:
|
||||
migrations: List of migrations to validate
|
||||
|
||||
Returns:
|
||||
True if all migrations are valid, False otherwise
|
||||
"""
|
||||
self.reset()
|
||||
|
||||
if not migrations:
|
||||
self.warnings.append("No migrations to validate")
|
||||
return True
|
||||
|
||||
# Check for duplicate versions
|
||||
versions: Set[str] = set()
|
||||
for migration in migrations:
|
||||
if migration.version in versions:
|
||||
self.errors.append(
|
||||
f"Duplicate migration version: {migration.version}"
|
||||
)
|
||||
versions.add(migration.version)
|
||||
|
||||
# Return early if duplicates found
|
||||
if self.errors:
|
||||
return False
|
||||
|
||||
# Validate each migration
|
||||
for migration in migrations:
|
||||
if not self.validate_migration(migration):
|
||||
logger.error(
|
||||
f"Migration {migration.version} "
|
||||
f"validation failed: {self.errors}"
|
||||
)
|
||||
return False
|
||||
|
||||
# Check version ordering
|
||||
sorted_versions = sorted([m.version for m in migrations])
|
||||
actual_versions = [m.version for m in migrations]
|
||||
if sorted_versions != actual_versions:
|
||||
self.warnings.append(
|
||||
"Migrations are not in chronological order"
|
||||
)
|
||||
|
||||
return len(self.errors) == 0
|
||||
|
||||
def _validate_version_format(self, version: str) -> bool:
|
||||
"""
|
||||
Validate version string format.
|
||||
|
||||
Args:
|
||||
version: Version string to validate
|
||||
|
||||
Returns:
|
||||
True if format is valid
|
||||
"""
|
||||
# Expected format: YYYYMMDD_NNN or YYYYMMDD_NNN_description
|
||||
if not version:
|
||||
return False
|
||||
|
||||
parts = version.split("_")
|
||||
if len(parts) < 2:
|
||||
return False
|
||||
|
||||
# Check date part (YYYYMMDD)
|
||||
date_part = parts[0]
|
||||
if len(date_part) != 8 or not date_part.isdigit():
|
||||
return False
|
||||
|
||||
# Check sequence part (NNN)
|
||||
seq_part = parts[1]
|
||||
if not seq_part.isdigit():
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
def check_migration_conflicts(
|
||||
self,
|
||||
pending: List[Migration],
|
||||
applied: List[str],
|
||||
) -> Optional[str]:
|
||||
"""
|
||||
Check for conflicts between pending and applied migrations.
|
||||
|
||||
Args:
|
||||
pending: List of pending migrations
|
||||
applied: List of applied migration versions
|
||||
|
||||
Returns:
|
||||
Error message if conflicts found, None otherwise
|
||||
"""
|
||||
# Check if any pending migration has version lower than applied
|
||||
if not applied:
|
||||
return None
|
||||
|
||||
latest_applied = max(applied)
|
||||
|
||||
for migration in pending:
|
||||
if migration.version < latest_applied:
|
||||
return (
|
||||
f"Migration {migration.version} is older than "
|
||||
f"latest applied migration {latest_applied}. "
|
||||
"This may indicate a merge conflict."
|
||||
)
|
||||
|
||||
return None
|
||||
|
||||
def get_validation_report(self) -> str:
|
||||
"""
|
||||
Get formatted validation report.
|
||||
|
||||
Returns:
|
||||
Formatted report string
|
||||
"""
|
||||
report = []
|
||||
|
||||
if self.errors:
|
||||
report.append("Validation Errors:")
|
||||
for error in self.errors:
|
||||
report.append(f" - {error}")
|
||||
|
||||
if self.warnings:
|
||||
report.append("Validation Warnings:")
|
||||
for warning in self.warnings:
|
||||
report.append(f" - {warning}")
|
||||
|
||||
if not self.errors and not self.warnings:
|
||||
report.append("All validations passed")
|
||||
|
||||
return "\n".join(report)
|
||||
|
||||
def raise_if_invalid(self) -> None:
|
||||
"""
|
||||
Raise exception if validation failed.
|
||||
|
||||
Raises:
|
||||
MigrationError: If validation errors exist
|
||||
"""
|
||||
if self.errors:
|
||||
error_msg = "\n".join(self.errors)
|
||||
raise MigrationError(
|
||||
f"Migration validation failed:\n{error_msg}"
|
||||
)
|
||||
@@ -76,23 +76,6 @@ async def lifespan(app: FastAPI):
|
||||
except Exception as e:
|
||||
logger.warning("Failed to load config from config.json: %s", e)
|
||||
|
||||
# Run data file to database migration
|
||||
try:
|
||||
from src.server.services.startup_migration import (
|
||||
ensure_migration_on_startup,
|
||||
)
|
||||
migration_result = await ensure_migration_on_startup()
|
||||
if migration_result:
|
||||
logger.info(
|
||||
"Data migration complete: %d migrated, %d skipped, %d failed",
|
||||
migration_result.migrated,
|
||||
migration_result.skipped,
|
||||
migration_result.failed
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Data migration failed: %s", e, exc_info=True)
|
||||
# Continue startup - migration failure should not block app
|
||||
|
||||
# Initialize progress service with event subscription
|
||||
progress_service = get_progress_service()
|
||||
ws_service = get_websocket_service()
|
||||
|
||||
@@ -1,436 +0,0 @@
|
||||
"""Data migration service for migrating file-based storage to database.
|
||||
|
||||
This module provides functionality to migrate anime series data from
|
||||
legacy file-based storage (data files without .json extension) to the
|
||||
SQLite database using the AnimeSeries model.
|
||||
|
||||
The migration service:
|
||||
- Scans anime directories for existing data files
|
||||
- Reads Serie objects from data files
|
||||
- Migrates them to the database using AnimeSeriesService
|
||||
- Handles errors gracefully without stopping the migration
|
||||
- Provides detailed migration results
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from dataclasses import dataclass, field
|
||||
from pathlib import Path
|
||||
from typing import List, Optional
|
||||
|
||||
from sqlalchemy.exc import IntegrityError
|
||||
from sqlalchemy.ext.asyncio import AsyncSession
|
||||
|
||||
from src.core.entities.series import Serie
|
||||
from src.server.database.service import AnimeSeriesService, EpisodeService
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MigrationResult:
|
||||
"""Result of a data file migration operation.
|
||||
|
||||
Attributes:
|
||||
total_found: Total number of data files found
|
||||
migrated: Number of files successfully migrated
|
||||
skipped: Number of files skipped (already in database)
|
||||
failed: Number of files that failed to migrate
|
||||
errors: List of error messages encountered
|
||||
"""
|
||||
total_found: int = 0
|
||||
migrated: int = 0
|
||||
skipped: int = 0
|
||||
failed: int = 0
|
||||
errors: List[str] = field(default_factory=list)
|
||||
|
||||
def __post_init__(self):
|
||||
"""Ensure errors is always a list."""
|
||||
if self.errors is None:
|
||||
self.errors = []
|
||||
|
||||
|
||||
class DataMigrationError(Exception):
|
||||
"""Base exception for data migration errors."""
|
||||
|
||||
|
||||
class DataFileReadError(DataMigrationError):
|
||||
"""Raised when a data file cannot be read."""
|
||||
|
||||
|
||||
class DataMigrationService:
|
||||
"""Service for migrating data files to database.
|
||||
|
||||
This service handles the migration of anime series data from
|
||||
file-based storage to the database. It scans directories for
|
||||
data files, reads Serie objects, and creates AnimeSeries records.
|
||||
|
||||
Example:
|
||||
```python
|
||||
service = DataMigrationService()
|
||||
|
||||
# Check if migration is needed
|
||||
if await service.is_migration_needed("/path/to/anime"):
|
||||
async with get_db_session() as db:
|
||||
result = await service.migrate_all("/path/to/anime", db)
|
||||
print(f"Migrated {result.migrated} series")
|
||||
```
|
||||
"""
|
||||
|
||||
def __init__(self) -> None:
|
||||
"""Initialize the data migration service."""
|
||||
pass
|
||||
|
||||
def scan_for_data_files(self, anime_directory: str) -> List[Path]:
|
||||
"""Scan for data files in the anime directory.
|
||||
|
||||
Finds all 'data' files (JSON format without extension) in
|
||||
the anime directory structure. Each series folder may contain
|
||||
a 'data' file with series metadata.
|
||||
|
||||
Args:
|
||||
anime_directory: Path to the anime directory containing
|
||||
series folders
|
||||
|
||||
Returns:
|
||||
List of Path objects pointing to data files
|
||||
|
||||
Raises:
|
||||
ValueError: If anime_directory is invalid
|
||||
"""
|
||||
if not anime_directory or not anime_directory.strip():
|
||||
logger.warning("Empty anime directory provided")
|
||||
return []
|
||||
|
||||
base_path = Path(anime_directory)
|
||||
|
||||
if not base_path.exists():
|
||||
logger.warning(
|
||||
"Anime directory does not exist: %s",
|
||||
anime_directory
|
||||
)
|
||||
return []
|
||||
|
||||
if not base_path.is_dir():
|
||||
logger.warning(
|
||||
"Anime directory is not a directory: %s",
|
||||
anime_directory
|
||||
)
|
||||
return []
|
||||
|
||||
data_files: List[Path] = []
|
||||
|
||||
try:
|
||||
# Iterate through all subdirectories (series folders)
|
||||
for folder in base_path.iterdir():
|
||||
if not folder.is_dir():
|
||||
continue
|
||||
|
||||
# Check for 'data' file in each series folder
|
||||
data_file = folder / "data"
|
||||
if data_file.exists() and data_file.is_file():
|
||||
data_files.append(data_file)
|
||||
logger.debug("Found data file: %s", data_file)
|
||||
|
||||
except PermissionError as e:
|
||||
logger.error(
|
||||
"Permission denied scanning directory %s: %s",
|
||||
anime_directory,
|
||||
e
|
||||
)
|
||||
except OSError as e:
|
||||
logger.error(
|
||||
"OS error scanning directory %s: %s",
|
||||
anime_directory,
|
||||
e
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Found %d data files in %s",
|
||||
len(data_files),
|
||||
anime_directory
|
||||
)
|
||||
return data_files
|
||||
|
||||
def _read_data_file(self, data_path: Path) -> Optional[Serie]:
|
||||
"""Read a Serie object from a data file.
|
||||
|
||||
Args:
|
||||
data_path: Path to the data file
|
||||
|
||||
Returns:
|
||||
Serie object if successfully read, None otherwise
|
||||
|
||||
Raises:
|
||||
DataFileReadError: If the file cannot be read or parsed
|
||||
"""
|
||||
try:
|
||||
serie = Serie.load_from_file(str(data_path))
|
||||
|
||||
# Validate the serie has required fields
|
||||
if not serie.key or not serie.key.strip():
|
||||
raise DataFileReadError(
|
||||
f"Data file {data_path} has empty or missing key"
|
||||
)
|
||||
|
||||
logger.debug(
|
||||
"Successfully read serie '%s' from %s",
|
||||
serie.key,
|
||||
data_path
|
||||
)
|
||||
return serie
|
||||
|
||||
except FileNotFoundError as e:
|
||||
raise DataFileReadError(
|
||||
f"Data file not found: {data_path}"
|
||||
) from e
|
||||
except PermissionError as e:
|
||||
raise DataFileReadError(
|
||||
f"Permission denied reading data file: {data_path}"
|
||||
) from e
|
||||
except (ValueError, KeyError, TypeError) as e:
|
||||
raise DataFileReadError(
|
||||
f"Invalid data in file {data_path}: {e}"
|
||||
) from e
|
||||
except Exception as e:
|
||||
raise DataFileReadError(
|
||||
f"Error reading data file {data_path}: {e}"
|
||||
) from e
|
||||
|
||||
async def migrate_data_file(
|
||||
self,
|
||||
data_path: Path,
|
||||
db: AsyncSession
|
||||
) -> bool:
|
||||
"""Migrate a single data file to the database.
|
||||
|
||||
Reads the data file, checks if the series already exists in the
|
||||
database, and creates a new record if it doesn't exist. If the
|
||||
series exists, optionally updates the episodes if changed.
|
||||
|
||||
Args:
|
||||
data_path: Path to the data file
|
||||
db: Async database session
|
||||
|
||||
Returns:
|
||||
True if the series was migrated (created or updated),
|
||||
False if skipped (already exists with same data)
|
||||
|
||||
Raises:
|
||||
DataFileReadError: If the file cannot be read
|
||||
DataMigrationError: If database operation fails
|
||||
"""
|
||||
# Read the data file
|
||||
serie = self._read_data_file(data_path)
|
||||
if serie is None:
|
||||
raise DataFileReadError(f"Could not read data file: {data_path}")
|
||||
|
||||
# Check if series already exists in database
|
||||
existing = await AnimeSeriesService.get_by_key(db, serie.key)
|
||||
|
||||
if existing is not None:
|
||||
# Build episode dict from existing episodes for comparison
|
||||
existing_dict: dict[int, list[int]] = {}
|
||||
episodes = await EpisodeService.get_by_series(db, existing.id)
|
||||
for ep in episodes:
|
||||
if ep.season not in existing_dict:
|
||||
existing_dict[ep.season] = []
|
||||
existing_dict[ep.season].append(ep.episode_number)
|
||||
for season in existing_dict:
|
||||
existing_dict[season].sort()
|
||||
|
||||
new_dict = serie.episodeDict or {}
|
||||
|
||||
if existing_dict == new_dict:
|
||||
logger.debug(
|
||||
"Series '%s' already exists with same data, skipping",
|
||||
serie.key
|
||||
)
|
||||
return False
|
||||
|
||||
# Update episodes if different - add new episodes
|
||||
for season, episode_numbers in new_dict.items():
|
||||
existing_eps = set(existing_dict.get(season, []))
|
||||
for ep_num in episode_numbers:
|
||||
if ep_num not in existing_eps:
|
||||
await EpisodeService.create(
|
||||
db=db,
|
||||
series_id=existing.id,
|
||||
season=season,
|
||||
episode_number=ep_num,
|
||||
)
|
||||
logger.info(
|
||||
"Updated episodes for existing series '%s'",
|
||||
serie.key
|
||||
)
|
||||
return True
|
||||
|
||||
# Create new series in database
|
||||
try:
|
||||
# Use folder as fallback name if name is empty
|
||||
series_name = serie.name
|
||||
if not series_name or not series_name.strip():
|
||||
series_name = serie.folder
|
||||
logger.debug(
|
||||
"Using folder '%s' as name for series '%s'",
|
||||
series_name,
|
||||
serie.key
|
||||
)
|
||||
|
||||
anime_series = await AnimeSeriesService.create(
|
||||
db,
|
||||
key=serie.key,
|
||||
name=series_name,
|
||||
site=serie.site,
|
||||
folder=serie.folder,
|
||||
)
|
||||
|
||||
# Create Episode records for each episode in episodeDict
|
||||
if serie.episodeDict:
|
||||
for season, episode_numbers in serie.episodeDict.items():
|
||||
for episode_number in episode_numbers:
|
||||
await EpisodeService.create(
|
||||
db=db,
|
||||
series_id=anime_series.id,
|
||||
season=season,
|
||||
episode_number=episode_number,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Migrated series '%s' to database",
|
||||
serie.key
|
||||
)
|
||||
return True
|
||||
|
||||
except IntegrityError as e:
|
||||
# Race condition - series was created by another process
|
||||
logger.warning(
|
||||
"Series '%s' was already created (race condition): %s",
|
||||
serie.key,
|
||||
e
|
||||
)
|
||||
return False
|
||||
except Exception as e:
|
||||
raise DataMigrationError(
|
||||
f"Failed to create series '{serie.key}' in database: {e}"
|
||||
) from e
|
||||
|
||||
async def migrate_all(
|
||||
self,
|
||||
anime_directory: str,
|
||||
db: AsyncSession
|
||||
) -> MigrationResult:
|
||||
"""Migrate all data files from anime directory to database.
|
||||
|
||||
Scans the anime directory for data files and migrates each one
|
||||
to the database. Errors are logged but do not stop the migration.
|
||||
|
||||
Args:
|
||||
anime_directory: Path to the anime directory
|
||||
db: Async database session
|
||||
|
||||
Returns:
|
||||
MigrationResult with counts and error messages
|
||||
"""
|
||||
result = MigrationResult()
|
||||
|
||||
# Scan for data files
|
||||
data_files = self.scan_for_data_files(anime_directory)
|
||||
result.total_found = len(data_files)
|
||||
|
||||
if result.total_found == 0:
|
||||
logger.info("No data files found to migrate")
|
||||
return result
|
||||
|
||||
logger.info(
|
||||
"Starting migration of %d data files",
|
||||
result.total_found
|
||||
)
|
||||
|
||||
# Migrate each file
|
||||
for data_path in data_files:
|
||||
try:
|
||||
migrated = await self.migrate_data_file(data_path, db)
|
||||
|
||||
if migrated:
|
||||
result.migrated += 1
|
||||
else:
|
||||
result.skipped += 1
|
||||
|
||||
except DataFileReadError as e:
|
||||
result.failed += 1
|
||||
error_msg = f"Failed to read {data_path}: {e}"
|
||||
result.errors.append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
except DataMigrationError as e:
|
||||
result.failed += 1
|
||||
error_msg = f"Failed to migrate {data_path}: {e}"
|
||||
result.errors.append(error_msg)
|
||||
logger.error(error_msg)
|
||||
|
||||
except Exception as e:
|
||||
result.failed += 1
|
||||
error_msg = f"Unexpected error migrating {data_path}: {e}"
|
||||
result.errors.append(error_msg)
|
||||
logger.exception(error_msg)
|
||||
|
||||
# Commit all changes
|
||||
try:
|
||||
await db.commit()
|
||||
except Exception as e:
|
||||
logger.error("Failed to commit migration: %s", e)
|
||||
result.errors.append(f"Failed to commit migration: {e}")
|
||||
|
||||
logger.info(
|
||||
"Migration complete: %d migrated, %d skipped, %d failed",
|
||||
result.migrated,
|
||||
result.skipped,
|
||||
result.failed
|
||||
)
|
||||
|
||||
return result
|
||||
|
||||
def is_migration_needed(self, anime_directory: str) -> bool:
|
||||
"""Check if there are data files to migrate.
|
||||
|
||||
Args:
|
||||
anime_directory: Path to the anime directory
|
||||
|
||||
Returns:
|
||||
True if data files exist, False otherwise
|
||||
"""
|
||||
data_files = self.scan_for_data_files(anime_directory)
|
||||
needs_migration = len(data_files) > 0
|
||||
|
||||
if needs_migration:
|
||||
logger.info(
|
||||
"Migration needed: found %d data files",
|
||||
len(data_files)
|
||||
)
|
||||
else:
|
||||
logger.debug("No migration needed: no data files found")
|
||||
|
||||
return needs_migration
|
||||
|
||||
|
||||
# Singleton instance for the service
|
||||
_data_migration_service: Optional[DataMigrationService] = None
|
||||
|
||||
|
||||
def get_data_migration_service() -> DataMigrationService:
|
||||
"""Get the singleton data migration service instance.
|
||||
|
||||
Returns:
|
||||
DataMigrationService instance
|
||||
"""
|
||||
global _data_migration_service
|
||||
if _data_migration_service is None:
|
||||
_data_migration_service = DataMigrationService()
|
||||
return _data_migration_service
|
||||
|
||||
|
||||
def reset_data_migration_service() -> None:
|
||||
"""Reset the singleton service instance (for testing)."""
|
||||
global _data_migration_service
|
||||
_data_migration_service = None
|
||||
@@ -1,309 +0,0 @@
|
||||
"""Startup migration runner for data file to database migration.
|
||||
|
||||
This module provides functions to run the data file migration automatically
|
||||
during application startup. The migration checks for existing data files
|
||||
in the anime directory and migrates them to the database.
|
||||
|
||||
Usage:
|
||||
This module is intended to be called from the FastAPI lifespan context.
|
||||
|
||||
Example:
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
# ... initialization ...
|
||||
await ensure_migration_on_startup()
|
||||
yield
|
||||
# ... cleanup ...
|
||||
"""
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.services.auth_service import auth_service
|
||||
from src.server.services.config_service import ConfigService
|
||||
from src.server.services.data_migration_service import (
|
||||
MigrationResult,
|
||||
get_data_migration_service,
|
||||
)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
async def run_startup_migration(anime_directory: str) -> MigrationResult:
|
||||
"""Run data file migration for the given anime directory.
|
||||
|
||||
Checks if there are data files to migrate and runs the migration
|
||||
if needed. This function is idempotent - running it multiple times
|
||||
will only migrate files that haven't been migrated yet.
|
||||
|
||||
Args:
|
||||
anime_directory: Path to the anime directory containing
|
||||
series folders with data files
|
||||
|
||||
Returns:
|
||||
MigrationResult: Results of the migration operation,
|
||||
including counts of migrated, skipped, and failed items
|
||||
|
||||
Note:
|
||||
This function creates its own database session and commits
|
||||
the transaction at the end of the migration.
|
||||
"""
|
||||
service = get_data_migration_service()
|
||||
|
||||
# Check if migration is needed
|
||||
if not service.is_migration_needed(anime_directory):
|
||||
logger.info(
|
||||
"No data files found to migrate in: %s",
|
||||
anime_directory
|
||||
)
|
||||
return MigrationResult(total_found=0)
|
||||
|
||||
logger.info(
|
||||
"Starting data file migration from: %s",
|
||||
anime_directory
|
||||
)
|
||||
|
||||
# Get database session and run migration
|
||||
async with get_db_session() as db:
|
||||
result = await service.migrate_all(anime_directory, db)
|
||||
|
||||
# Log results
|
||||
if result.migrated > 0 or result.failed > 0:
|
||||
logger.info(
|
||||
"Migration complete: %d migrated, %d skipped, %d failed",
|
||||
result.migrated,
|
||||
result.skipped,
|
||||
result.failed
|
||||
)
|
||||
|
||||
if result.errors:
|
||||
for error in result.errors:
|
||||
logger.warning("Migration error: %s", error)
|
||||
|
||||
return result
|
||||
|
||||
|
||||
def _get_anime_directory_from_config() -> Optional[str]:
|
||||
"""Get anime directory from application configuration.
|
||||
|
||||
Attempts to load the configuration file and extract the
|
||||
anime_directory setting from the 'other' config section.
|
||||
|
||||
Returns:
|
||||
Anime directory path if configured, None otherwise
|
||||
"""
|
||||
try:
|
||||
config_service = ConfigService()
|
||||
config = config_service.load_config()
|
||||
|
||||
# anime_directory is stored in the 'other' dict
|
||||
anime_dir = config.other.get("anime_directory")
|
||||
|
||||
if anime_dir:
|
||||
anime_dir = str(anime_dir).strip()
|
||||
if anime_dir:
|
||||
return anime_dir
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Could not load anime directory from config: %s",
|
||||
e
|
||||
)
|
||||
return None
|
||||
|
||||
|
||||
def _is_setup_complete() -> bool:
|
||||
"""Check if the application setup is complete.
|
||||
|
||||
Setup is complete when:
|
||||
1. Master password is configured
|
||||
2. Configuration file exists and is valid
|
||||
|
||||
Returns:
|
||||
True if setup is complete, False otherwise
|
||||
"""
|
||||
# Check if master password is configured
|
||||
if not auth_service.is_configured():
|
||||
return False
|
||||
|
||||
# Check if config exists and is valid
|
||||
try:
|
||||
config_service = ConfigService()
|
||||
config = config_service.load_config()
|
||||
|
||||
# Validate the loaded config
|
||||
validation = config.validate()
|
||||
if not validation.valid:
|
||||
return False
|
||||
|
||||
except Exception:
|
||||
# If we can't load or validate config, setup is not complete
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
|
||||
async def ensure_migration_on_startup() -> Optional[MigrationResult]:
|
||||
"""Ensure data file migration runs during application startup.
|
||||
|
||||
This function should be called during FastAPI application startup.
|
||||
It loads the anime directory from configuration and runs the
|
||||
migration if the directory is configured and contains data files.
|
||||
|
||||
Migration will only run if setup is complete (master password
|
||||
configured and valid configuration exists).
|
||||
|
||||
Returns:
|
||||
MigrationResult if migration was run, None if skipped
|
||||
(e.g., when no anime directory is configured)
|
||||
|
||||
Behavior:
|
||||
- Returns None if anime_directory is not configured (first run)
|
||||
- Returns None if anime_directory does not exist
|
||||
- Returns MigrationResult with total_found=0 if no data files exist
|
||||
- Returns MigrationResult with migration counts if migration ran
|
||||
|
||||
Note:
|
||||
This function catches and logs all exceptions without re-raising,
|
||||
ensuring that startup migration failures don't block application
|
||||
startup. Check the logs for any migration errors.
|
||||
|
||||
Example:
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
await init_db()
|
||||
|
||||
try:
|
||||
result = await ensure_migration_on_startup()
|
||||
if result:
|
||||
logger.info(
|
||||
"Migration: %d migrated, %d failed",
|
||||
result.migrated,
|
||||
result.failed
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error("Migration failed: %s", e)
|
||||
|
||||
yield
|
||||
await close_db()
|
||||
"""
|
||||
# Check if setup is complete before running migration
|
||||
if not _is_setup_complete():
|
||||
logger.debug(
|
||||
"Setup not complete, skipping startup migration"
|
||||
)
|
||||
return None
|
||||
|
||||
# Get anime directory from config
|
||||
anime_directory = _get_anime_directory_from_config()
|
||||
|
||||
if not anime_directory:
|
||||
logger.debug(
|
||||
"No anime directory configured, skipping migration"
|
||||
)
|
||||
return None
|
||||
|
||||
# Validate directory exists
|
||||
anime_path = Path(anime_directory)
|
||||
if not anime_path.exists():
|
||||
logger.warning(
|
||||
"Anime directory does not exist: %s, skipping migration",
|
||||
anime_directory
|
||||
)
|
||||
return None
|
||||
|
||||
if not anime_path.is_dir():
|
||||
logger.warning(
|
||||
"Anime directory path is not a directory: %s, skipping migration",
|
||||
anime_directory
|
||||
)
|
||||
return None
|
||||
|
||||
logger.info(
|
||||
"Checking for data files to migrate in: %s",
|
||||
anime_directory
|
||||
)
|
||||
|
||||
try:
|
||||
result = await run_startup_migration(anime_directory)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Data file migration failed: %s",
|
||||
e,
|
||||
exc_info=True
|
||||
)
|
||||
# Return empty result rather than None to indicate we attempted
|
||||
return MigrationResult(
|
||||
total_found=0,
|
||||
failed=1,
|
||||
errors=[f"Migration failed: {str(e)}"]
|
||||
)
|
||||
|
||||
|
||||
async def run_migration_for_directory(
|
||||
anime_directory: str
|
||||
) -> Optional[MigrationResult]:
|
||||
"""Run data file migration for a specific directory.
|
||||
|
||||
This function can be called after setup is complete to migrate
|
||||
data files from the specified anime directory to the database.
|
||||
Unlike ensure_migration_on_startup, this does not check setup
|
||||
status as it's intended to be called after setup is complete.
|
||||
|
||||
Args:
|
||||
anime_directory: Path to the anime directory containing
|
||||
series folders with data files
|
||||
|
||||
Returns:
|
||||
MigrationResult if migration was run, None if directory invalid
|
||||
"""
|
||||
if not anime_directory or not anime_directory.strip():
|
||||
logger.debug("Empty anime directory provided, skipping migration")
|
||||
return None
|
||||
|
||||
anime_directory = anime_directory.strip()
|
||||
|
||||
# Validate directory exists
|
||||
anime_path = Path(anime_directory)
|
||||
if not anime_path.exists():
|
||||
logger.warning(
|
||||
"Anime directory does not exist: %s, skipping migration",
|
||||
anime_directory
|
||||
)
|
||||
return None
|
||||
|
||||
if not anime_path.is_dir():
|
||||
logger.warning(
|
||||
"Anime directory path is not a directory: %s",
|
||||
anime_directory
|
||||
)
|
||||
return None
|
||||
|
||||
logger.info(
|
||||
"Running migration for directory: %s",
|
||||
anime_directory
|
||||
)
|
||||
|
||||
try:
|
||||
result = await run_startup_migration(anime_directory)
|
||||
return result
|
||||
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Data file migration failed for %s: %s",
|
||||
anime_directory,
|
||||
e,
|
||||
exc_info=True
|
||||
)
|
||||
return MigrationResult(
|
||||
total_found=0,
|
||||
failed=1,
|
||||
errors=[f"Migration failed: {str(e)}"]
|
||||
)
|
||||
Reference in New Issue
Block a user