This commit is contained in:
2025-10-12 18:05:31 +02:00
parent 57d49bcf78
commit 7a71715183
130 changed files with 30010 additions and 50631 deletions

View File

@@ -1,782 +0,0 @@
"""
Anime Management API Endpoints
This module provides REST API endpoints for anime CRUD operations,
including creation, reading, updating, deletion, and search functionality.
"""
import uuid
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Query, status
from pydantic import BaseModel, Field
# Import SeriesApp for business logic
from src.core.SeriesApp import SeriesApp
# FastAPI dependencies and models
from src.server.fastapi_app import get_current_user, settings
# Pydantic models for requests
class AnimeSearchRequest(BaseModel):
"""Request model for anime search."""
query: str = Field(..., min_length=1, max_length=100)
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
genre: Optional[str] = None
year: Optional[int] = Field(None, ge=1900, le=2100)
class AnimeResponse(BaseModel):
"""Response model for anime data."""
id: str
title: str
description: Optional[str] = None
status: str = "Unknown"
folder: Optional[str] = None
episodes: int = 0
class AnimeCreateRequest(BaseModel):
"""Request model for creating anime entries."""
name: str = Field(..., min_length=1, max_length=255)
folder: str = Field(..., min_length=1)
description: Optional[str] = None
status: str = Field(default="planned", pattern="^(ongoing|completed|planned|dropped|paused)$")
genre: Optional[str] = None
year: Optional[int] = Field(None, ge=1900, le=2100)
class AnimeUpdateRequest(BaseModel):
"""Request model for updating anime entries."""
name: Optional[str] = Field(None, min_length=1, max_length=255)
folder: Optional[str] = None
description: Optional[str] = None
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
genre: Optional[str] = None
year: Optional[int] = Field(None, ge=1900, le=2100)
class PaginatedAnimeResponse(BaseModel):
"""Paginated response model for anime lists."""
success: bool = True
data: List[AnimeResponse]
pagination: Dict[str, Any]
class AnimeSearchResponse(BaseModel):
"""Response model for anime search results."""
success: bool = True
data: List[AnimeResponse]
pagination: Dict[str, Any]
search: Dict[str, Any]
class RescanResponse(BaseModel):
"""Response model for rescan operations."""
success: bool
message: str
total_series: int
# Dependency to get SeriesApp instance
def get_series_app() -> SeriesApp:
"""Get SeriesApp instance for business logic operations."""
if not settings.anime_directory:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Anime directory not configured"
)
return SeriesApp(settings.anime_directory)
# Create FastAPI router for anime management endpoints
router = APIRouter(prefix='/api/v1/anime', tags=['anime'])
@router.get('', response_model=PaginatedAnimeResponse)
async def list_anime(
status: Optional[str] = Query(None, pattern="^(ongoing|completed|planned|dropped|paused)$"),
genre: Optional[str] = Query(None),
year: Optional[int] = Query(None, ge=1900, le=2100),
search: Optional[str] = Query(None),
page: int = Query(1, ge=1),
per_page: int = Query(50, ge=1, le=1000),
current_user: Optional[Dict] = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> PaginatedAnimeResponse:
"""
Get all anime with optional filtering and pagination.
Query Parameters:
- status: Filter by anime status (ongoing, completed, planned, dropped, paused)
- genre: Filter by genre
- year: Filter by release year
- search: Search in name and description
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of anime with metadata
"""
try:
# Get the series list from SeriesApp
anime_list = series_app.series_list
# Convert to list of AnimeResponse objects
anime_responses = []
for series_item in anime_list:
anime_response = AnimeResponse(
id=getattr(series_item, 'id', str(uuid.uuid4())),
title=getattr(series_item, 'name', 'Unknown'),
folder=getattr(series_item, 'folder', ''),
description=getattr(series_item, 'description', ''),
status='ongoing', # Default status
episodes=getattr(series_item, 'total_episodes', 0)
)
# Apply search filter if provided
if search:
if search.lower() not in anime_response.title.lower():
continue
anime_responses.append(anime_response)
# Apply pagination
total = len(anime_responses)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_anime = anime_responses[start_idx:end_idx]
return PaginatedAnimeResponse(
data=paginated_anime,
pagination={
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
"has_next": end_idx < total,
"has_prev": page > 1
}
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error retrieving anime list: {str(e)}"
)
@anime_bp.route('/<int:anime_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('anime_id')
@optional_auth
def get_anime(anime_id: int) -> Dict[str, Any]:
"""
Get specific anime by ID.
Args:
anime_id: Unique identifier for the anime
Returns:
Anime details with episodes summary
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
anime = anime_repository.get_anime_by_id(anime_id)
if not anime:
raise NotFoundError("Anime not found")
# Format anime data
anime_data = format_anime_response(anime.__dict__)
# Add episodes summary
episodes_summary = anime_repository.get_episodes_summary(anime_id)
anime_data['episodes_summary'] = episodes_summary
return create_success_response(anime_data)
@anime_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['name', 'folder'],
optional_fields=['key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
field_types={
'name': str,
'folder': str,
'key': str,
'description': str,
'genres': list,
'release_year': int,
'status': str,
'total_episodes': int,
'poster_url': str,
'custom_metadata': dict
}
)
@require_auth
def create_anime() -> Dict[str, Any]:
"""
Create a new anime record.
Required Fields:
- name: Anime name
- folder: Folder path where anime files are stored
Optional Fields:
- key: Unique key identifier
- description: Anime description
- genres: List of genres
- release_year: Year of release
- status: Status (ongoing, completed, planned, dropped, paused)
- total_episodes: Total number of episodes
- poster_url: URL to poster image
- custom_metadata: Additional metadata as key-value pairs
Returns:
Created anime details with generated ID
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
data = request.get_json()
# Validate status if provided
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
# Check if anime with same folder already exists
existing_anime = anime_repository.get_anime_by_folder(data['folder'])
if existing_anime:
raise ValidationError("Anime with this folder already exists")
# Create anime metadata object
try:
anime = AnimeMetadata(
anime_id=str(uuid.uuid4()),
name=data['name'],
folder=data['folder'],
key=data.get('key'),
description=data.get('description'),
genres=data.get('genres', []),
release_year=data.get('release_year'),
status=data.get('status', 'planned'),
total_episodes=data.get('total_episodes'),
poster_url=data.get('poster_url'),
custom_metadata=data.get('custom_metadata', {})
)
except Exception as e:
raise ValidationError(f"Invalid anime data: {str(e)}")
# Save to database
success = anime_repository.create_anime(anime)
if not success:
raise APIException("Failed to create anime", 500)
# Return created anime
anime_data = format_anime_response(anime.__dict__)
return create_success_response(
data=anime_data,
message="Anime created successfully",
status_code=201
)
@anime_bp.route('/<int:anime_id>', methods=['PUT'])
@handle_api_errors
@validate_id_parameter('anime_id')
@validate_json_input(
optional_fields=['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
field_types={
'name': str,
'folder': str,
'key': str,
'description': str,
'genres': list,
'release_year': int,
'status': str,
'total_episodes': int,
'poster_url': str,
'custom_metadata': dict
}
)
@require_auth
def update_anime(anime_id: int) -> Dict[str, Any]:
"""
Update an existing anime record.
Args:
anime_id: Unique identifier for the anime
Optional Fields:
- name: Anime name
- folder: Folder path where anime files are stored
- key: Unique key identifier
- description: Anime description
- genres: List of genres
- release_year: Year of release
- status: Status (ongoing, completed, planned, dropped, paused)
- total_episodes: Total number of episodes
- poster_url: URL to poster image
- custom_metadata: Additional metadata as key-value pairs
Returns:
Updated anime details
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
data = request.get_json()
# Get existing anime
existing_anime = anime_repository.get_anime_by_id(anime_id)
if not existing_anime:
raise NotFoundError("Anime not found")
# Validate status if provided
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
# Check if folder is being changed and if it conflicts
if 'folder' in data and data['folder'] != existing_anime.folder:
conflicting_anime = anime_repository.get_anime_by_folder(data['folder'])
if conflicting_anime and conflicting_anime.anime_id != anime_id:
raise ValidationError("Another anime with this folder already exists")
# Update fields
update_fields = {}
for field in ['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url']:
if field in data:
update_fields[field] = data[field]
# Handle custom metadata update (merge instead of replace)
if 'custom_metadata' in data:
existing_metadata = existing_anime.custom_metadata or {}
existing_metadata.update(data['custom_metadata'])
update_fields['custom_metadata'] = existing_metadata
# Perform update
success = anime_repository.update_anime(anime_id, update_fields)
if not success:
raise APIException("Failed to update anime", 500)
# Get updated anime
updated_anime = anime_repository.get_anime_by_id(anime_id)
anime_data = format_anime_response(updated_anime.__dict__)
return create_success_response(
data=anime_data,
message="Anime updated successfully"
)
@anime_bp.route('/<int:anime_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('anime_id')
@require_auth
def delete_anime(anime_id: int) -> Dict[str, Any]:
"""
Delete an anime record and all related data.
Args:
anime_id: Unique identifier for the anime
Query Parameters:
- force: Set to 'true' to force deletion even if episodes exist
Returns:
Deletion confirmation
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
# Check if anime exists
existing_anime = anime_repository.get_anime_by_id(anime_id)
if not existing_anime:
raise NotFoundError("Anime not found")
# Check for existing episodes unless force deletion
force_delete = request.args.get('force', 'false').lower() == 'true'
if not force_delete:
episode_count = anime_repository.get_episode_count(anime_id)
if episode_count > 0:
raise ValidationError(
f"Cannot delete anime with {episode_count} episodes. "
"Use ?force=true to force deletion or delete episodes first."
)
# Perform deletion (this should cascade to episodes, downloads, etc.)
success = anime_repository.delete_anime(anime_id)
if not success:
raise APIException("Failed to delete anime", 500)
return create_success_response(
message=f"Anime '{existing_anime.name}' deleted successfully"
)
@router.get('/search', response_model=AnimeSearchResponse)
async def search_anime(
q: str = Query(..., min_length=2, description="Search query"),
page: int = Query(1, ge=1),
per_page: int = Query(20, ge=1, le=100),
current_user: Optional[Dict] = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> AnimeSearchResponse:
"""
Search anime by name using SeriesApp.
Query Parameters:
- q: Search query (required, min 2 characters)
- page: Page number (default: 1)
- per_page: Items per page (default: 20, max: 100)
Returns:
Paginated search results
"""
try:
# Use SeriesApp to perform search
search_results = series_app.search(q)
# Convert search results to AnimeResponse objects
anime_responses = []
for result in search_results:
anime_response = AnimeResponse(
id=getattr(result, 'id', str(uuid.uuid4())),
title=getattr(result, 'name', getattr(result, 'title', 'Unknown')),
description=getattr(result, 'description', ''),
status='available',
episodes=getattr(result, 'episodes', 0),
folder=getattr(result, 'key', '')
)
anime_responses.append(anime_response)
# Apply pagination
total = len(anime_responses)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = anime_responses[start_idx:end_idx]
return AnimeSearchResponse(
data=paginated_results,
pagination={
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
"has_next": end_idx < total,
"has_prev": page > 1
},
search={
"query": q,
"total_results": total
}
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Search failed: {str(e)}"
)
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
# Create response with search metadata
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='anime.search_anime',
q=search_term,
fields=','.join(search_fields)
)
# Add search metadata
response['search'] = {
'query': search_term,
'fields': search_fields,
'total_results': total
}
return response
@anime_bp.route('/<int:anime_id>/episodes', methods=['GET'])
@handle_api_errors
@validate_id_parameter('anime_id')
@validate_pagination_params
@optional_auth
def get_anime_episodes(anime_id: int) -> Dict[str, Any]:
"""
Get all episodes for a specific anime.
Args:
anime_id: Unique identifier for the anime
Query Parameters:
- status: Filter by episode status
- downloaded: Filter by download status (true/false)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of episodes for the anime
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
# Check if anime exists
anime = anime_repository.get_anime_by_id(anime_id)
if not anime:
raise NotFoundError("Anime not found")
# Get filters
status_filter = request.args.get('status')
downloaded_filter = request.args.get('downloaded')
# Validate downloaded filter
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
raise ValidationError("Downloaded filter must be 'true' or 'false'")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get episodes
episodes = anime_repository.get_episodes_for_anime(
anime_id=anime_id,
status_filter=status_filter,
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None
)
# Format episodes (this would use episode formatting from episodes.py)
formatted_episodes = []
for episode in episodes:
formatted_episodes.append({
'id': episode.id,
'episode_number': episode.episode_number,
'title': episode.title,
'url': episode.url,
'status': episode.status,
'is_downloaded': episode.is_downloaded,
'file_path': episode.file_path,
'file_size': episode.file_size,
'created_at': episode.created_at.isoformat() if episode.created_at else None,
'updated_at': episode.updated_at.isoformat() if episode.updated_at else None
})
# Apply pagination
total = len(formatted_episodes)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_episodes = formatted_episodes[start_idx:end_idx]
return create_paginated_response(
data=paginated_episodes,
page=page,
per_page=per_page,
total=total,
endpoint='anime.get_anime_episodes',
anime_id=anime_id
)
@anime_bp.route('/bulk', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['action', 'anime_ids'],
optional_fields=['data'],
field_types={
'action': str,
'anime_ids': list,
'data': dict
}
)
@require_auth
def bulk_anime_operation() -> Dict[str, Any]:
"""
Perform bulk operations on multiple anime.
Required Fields:
- action: Operation to perform (update_status, delete, update_metadata)
- anime_ids: List of anime IDs to operate on
Optional Fields:
- data: Additional data for the operation
Returns:
Results of the bulk operation
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
data = request.get_json()
action = data['action']
anime_ids = data['anime_ids']
operation_data = data.get('data', {})
# Validate action
valid_actions = ['update_status', 'delete', 'update_metadata', 'update_genres']
if action not in valid_actions:
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
# Validate anime_ids
if not isinstance(anime_ids, list) or not anime_ids:
raise ValidationError("anime_ids must be a non-empty list")
if len(anime_ids) > 100:
raise ValidationError("Cannot operate on more than 100 anime at once")
# Validate anime IDs are integers
try:
anime_ids = [int(aid) for aid in anime_ids]
except ValueError:
raise ValidationError("All anime_ids must be valid integers")
# Perform bulk operation
successful_items = []
failed_items = []
for anime_id in anime_ids:
try:
if action == 'update_status':
if 'status' not in operation_data:
raise ValueError("Status is required for update_status action")
success = anime_repository.update_anime(anime_id, {'status': operation_data['status']})
if success:
successful_items.append({'anime_id': anime_id, 'action': 'status_updated'})
else:
failed_items.append({'anime_id': anime_id, 'error': 'Update failed'})
elif action == 'delete':
success = anime_repository.delete_anime(anime_id)
if success:
successful_items.append({'anime_id': anime_id, 'action': 'deleted'})
else:
failed_items.append({'anime_id': anime_id, 'error': 'Deletion failed'})
elif action == 'update_metadata':
success = anime_repository.update_anime(anime_id, operation_data)
if success:
successful_items.append({'anime_id': anime_id, 'action': 'metadata_updated'})
else:
failed_items.append({'anime_id': anime_id, 'error': 'Metadata update failed'})
except Exception as e:
failed_items.append({'anime_id': anime_id, 'error': str(e)})
# Create batch response
from ...shared.response_helpers import create_batch_response
return create_batch_response(
successful_items=successful_items,
failed_items=failed_items,
message=f"Bulk {action} operation completed"
)
@router.post('/rescan', response_model=RescanResponse)
async def rescan_anime_directory(
current_user: Dict = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> RescanResponse:
"""
Rescan the anime directory for new episodes and series.
Returns:
Status of the rescan operation
"""
try:
# Use SeriesApp to perform rescan with a simple callback
def progress_callback(progress_info):
# Simple progress tracking - in a real implementation,
# this could be sent via WebSocket or stored for polling
pass
series_app.ReScan(progress_callback)
return RescanResponse(
success=True,
message="Anime directory rescanned successfully",
total_series=len(series_app.series_list) if hasattr(series_app, 'series_list') else 0
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Rescan failed: {str(e)}"
)
# Additional endpoints for legacy API compatibility
class AddSeriesRequest(BaseModel):
"""Request model for adding a new series."""
link: str = Field(..., min_length=1)
name: str = Field(..., min_length=1, max_length=255)
class AddSeriesResponse(BaseModel):
"""Response model for add series operation."""
status: str
message: str
class DownloadRequest(BaseModel):
"""Request model for downloading series."""
folders: List[str] = Field(..., min_items=1)
class DownloadResponse(BaseModel):
"""Response model for download operation."""
status: str
message: str
@router.post('/add_series', response_model=AddSeriesResponse)
async def add_series(
request_data: AddSeriesRequest,
current_user: Dict = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> AddSeriesResponse:
"""
Add a new series to the collection.
Args:
request_data: Contains link and name of the series to add
Returns:
Status of the add operation
"""
try:
# For now, just return success - actual implementation would use SeriesApp
# to add the series to the collection
return AddSeriesResponse(
status="success",
message=f"Series '{request_data.name}' added successfully"
)
except Exception as e:
return AddSeriesResponse(
status="error",
message=f"Failed to add series: {str(e)}"
)
@router.post('/download', response_model=DownloadResponse)
async def download_series(
request_data: DownloadRequest,
current_user: Dict = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> DownloadResponse:
"""
Start downloading selected series folders.
Args:
request_data: Contains list of folder names to download
Returns:
Status of the download operation
"""
try:
# For now, just return success - actual implementation would use SeriesApp
# to start downloads
folder_count = len(request_data.folders)
return DownloadResponse(
status="success",
message=f"Download started for {folder_count} series"
)
except Exception as e:
return DownloadResponse(
status="error",
message=f"Failed to start download: {str(e)}"
)

View File

@@ -1,773 +0,0 @@
"""
Authentication API endpoints.
This module handles all authentication-related operations including:
- User authentication
- Session management
- Password management
- API key management
"""
from flask import Blueprint, request, session, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import hashlib
import secrets
import time
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import (
validate_json_input, validate_query_params, is_valid_email, sanitize_string
)
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, format_user_data
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def optional_auth(f): return f
def handle_api_errors(f): return f
def validate_json_input(**kwargs): return lambda f: f
def validate_query_params(**kwargs): return lambda f: f
def is_valid_email(email): return '@' in email
def sanitize_string(s): return str(s).strip()
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def format_user_data(data): return data
# Import authentication components
try:
from src.data.user_manager import UserManager
from src.data.session_manager import SessionManager
from src.data.api_key_manager import APIKeyManager
except ImportError:
# Fallback for development
class UserManager:
def authenticate_user(self, username, password): return None
def get_user_by_id(self, id): return None
def get_user_by_username(self, username): return None
def get_user_by_email(self, email): return None
def create_user(self, **kwargs): return 1
def update_user(self, id, **kwargs): return True
def delete_user(self, id): return True
def change_password(self, id, new_password): return True
def reset_password(self, email): return 'reset_token'
def verify_reset_token(self, token): return None
def get_user_sessions(self, user_id): return []
def get_user_activity(self, user_id): return []
class SessionManager:
def create_session(self, user_id): return 'session_token'
def validate_session(self, token): return None
def destroy_session(self, token): return True
def destroy_all_sessions(self, user_id): return True
def get_session_info(self, token): return None
def update_session_activity(self, token): return True
class APIKeyManager:
def create_api_key(self, user_id, name): return {'id': 1, 'key': 'api_key', 'name': name}
def get_user_api_keys(self, user_id): return []
def revoke_api_key(self, key_id): return True
def validate_api_key(self, key): return None
# Create blueprint
auth_bp = Blueprint('auth', __name__)
# Initialize managers
user_manager = UserManager()
session_manager = SessionManager()
api_key_manager = APIKeyManager()
logger = logging.getLogger(__name__)
@auth_bp.route('/auth/login', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['username', 'password'],
optional_fields=['remember_me'],
field_types={'username': str, 'password': str, 'remember_me': bool}
)
def login() -> Tuple[Any, int]:
"""
Authenticate user and create session.
Request Body:
- username: Username or email
- password: User password
- remember_me: Extend session duration (optional)
Returns:
JSON response with authentication result
"""
data = request.get_json()
username = sanitize_string(data['username'])
password = data['password']
remember_me = data.get('remember_me', False)
try:
# Authenticate user
user = user_manager.authenticate_user(username, password)
if not user:
logger.warning(f"Failed login attempt for username: {username}")
return create_error_response("Invalid username or password", 401)
# Create session
session_token = session_manager.create_session(
user['id'],
extended=remember_me
)
# Set session data
session['user_id'] = user['id']
session['username'] = user['username']
session['session_token'] = session_token
session.permanent = remember_me
# Format user data (exclude sensitive information)
user_data = format_user_data(user, include_sensitive=False)
response_data = {
'user': user_data,
'session_token': session_token,
'expires_at': (datetime.now() + timedelta(days=30 if remember_me else 7)).isoformat()
}
logger.info(f"User {user['username']} (ID: {user['id']}) logged in successfully")
return create_success_response("Login successful", 200, response_data)
except Exception as e:
logger.error(f"Error during login for username {username}: {str(e)}")
return create_error_response("Login failed", 500)
@auth_bp.route('/auth/logout', methods=['POST'])
@require_auth
@handle_api_errors
def logout() -> Tuple[Any, int]:
"""
Logout user and destroy session.
Returns:
JSON response with logout result
"""
try:
# Get session token
session_token = session.get('session_token')
user_id = session.get('user_id')
if session_token:
# Destroy session in database
session_manager.destroy_session(session_token)
# Clear Flask session
session.clear()
logger.info(f"User ID {user_id} logged out successfully")
return create_success_response("Logout successful")
except Exception as e:
logger.error(f"Error during logout: {str(e)}")
return create_error_response("Logout failed", 500)
@auth_bp.route('/auth/register', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['username', 'email', 'password'],
optional_fields=['full_name'],
field_types={'username': str, 'email': str, 'password': str, 'full_name': str}
)
def register() -> Tuple[Any, int]:
"""
Register new user account.
Request Body:
- username: Unique username
- email: User email address
- password: User password
- full_name: User's full name (optional)
Returns:
JSON response with registration result
"""
data = request.get_json()
username = sanitize_string(data['username'])
email = sanitize_string(data['email'])
password = data['password']
full_name = sanitize_string(data.get('full_name', ''))
# Validate input
if len(username) < 3:
return create_error_response("Username must be at least 3 characters long", 400)
if len(password) < 8:
return create_error_response("Password must be at least 8 characters long", 400)
if not is_valid_email(email):
return create_error_response("Invalid email address", 400)
try:
# Check if username already exists
existing_user = user_manager.get_user_by_username(username)
if existing_user:
return create_error_response("Username already exists", 409)
# Check if email already exists
existing_email = user_manager.get_user_by_email(email)
if existing_email:
return create_error_response("Email already registered", 409)
# Create user
user_id = user_manager.create_user(
username=username,
email=email,
password=password,
full_name=full_name
)
# Get created user
user = user_manager.get_user_by_id(user_id)
user_data = format_user_data(user, include_sensitive=False)
logger.info(f"New user registered: {username} (ID: {user_id})")
return create_success_response("Registration successful", 201, user_data)
except Exception as e:
logger.error(f"Error during registration for username {username}: {str(e)}")
return create_error_response("Registration failed", 500)
@auth_bp.route('/auth/me', methods=['GET'])
@require_auth
@handle_api_errors
def get_current_user() -> Tuple[Any, int]:
"""
Get current user information.
Returns:
JSON response with current user data
"""
try:
user_id = session.get('user_id')
user = user_manager.get_user_by_id(user_id)
if not user:
return create_error_response("User not found", 404)
user_data = format_user_data(user, include_sensitive=False)
return create_success_response("User information retrieved", 200, user_data)
except Exception as e:
logger.error(f"Error getting current user: {str(e)}")
return create_error_response("Failed to get user information", 500)
@auth_bp.route('/auth/me', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['email', 'full_name'],
field_types={'email': str, 'full_name': str}
)
def update_current_user() -> Tuple[Any, int]:
"""
Update current user information.
Request Body:
- email: New email address (optional)
- full_name: New full name (optional)
Returns:
JSON response with update result
"""
data = request.get_json()
user_id = session.get('user_id')
# Validate email if provided
if 'email' in data and not is_valid_email(data['email']):
return create_error_response("Invalid email address", 400)
try:
# Check if email is already taken by another user
if 'email' in data:
existing_user = user_manager.get_user_by_email(data['email'])
if existing_user and existing_user['id'] != user_id:
return create_error_response("Email already registered", 409)
# Update user
success = user_manager.update_user(user_id, **data)
if success:
# Get updated user
user = user_manager.get_user_by_id(user_id)
user_data = format_user_data(user, include_sensitive=False)
logger.info(f"User {user_id} updated their profile")
return create_success_response("Profile updated successfully", 200, user_data)
else:
return create_error_response("Failed to update profile", 500)
except Exception as e:
logger.error(f"Error updating user {user_id}: {str(e)}")
return create_error_response("Failed to update profile", 500)
@auth_bp.route('/auth/change-password', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['current_password', 'new_password'],
field_types={'current_password': str, 'new_password': str}
)
def change_password() -> Tuple[Any, int]:
"""
Change user password.
Request Body:
- current_password: Current password
- new_password: New password
Returns:
JSON response with change result
"""
data = request.get_json()
user_id = session.get('user_id')
current_password = data['current_password']
new_password = data['new_password']
# Validate new password
if len(new_password) < 8:
return create_error_response("New password must be at least 8 characters long", 400)
try:
# Get user
user = user_manager.get_user_by_id(user_id)
# Verify current password
authenticated_user = user_manager.authenticate_user(user['username'], current_password)
if not authenticated_user:
return create_error_response("Current password is incorrect", 401)
# Change password
success = user_manager.change_password(user_id, new_password)
if success:
logger.info(f"User {user_id} changed their password")
return create_success_response("Password changed successfully")
else:
return create_error_response("Failed to change password", 500)
except Exception as e:
logger.error(f"Error changing password for user {user_id}: {str(e)}")
return create_error_response("Failed to change password", 500)
@auth_bp.route('/auth/forgot-password', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['email'],
field_types={'email': str}
)
def forgot_password() -> Tuple[Any, int]:
"""
Request password reset.
Request Body:
- email: User email address
Returns:
JSON response with reset result
"""
data = request.get_json()
email = sanitize_string(data['email'])
if not is_valid_email(email):
return create_error_response("Invalid email address", 400)
try:
# Check if user exists
user = user_manager.get_user_by_email(email)
if user:
# Generate reset token
reset_token = user_manager.reset_password(email)
# In a real application, you would send this token via email
logger.info(f"Password reset requested for user {user['id']} (email: {email})")
# For security, always return success even if email doesn't exist
return create_success_response("If the email exists, a reset link has been sent")
else:
# For security, don't reveal that email doesn't exist
logger.warning(f"Password reset requested for non-existent email: {email}")
return create_success_response("If the email exists, a reset link has been sent")
except Exception as e:
logger.error(f"Error processing password reset for email {email}: {str(e)}")
return create_error_response("Failed to process password reset", 500)
@auth_bp.route('/auth/reset-password', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['token', 'new_password'],
field_types={'token': str, 'new_password': str}
)
def reset_password() -> Tuple[Any, int]:
"""
Reset password using token.
Request Body:
- token: Password reset token
- new_password: New password
Returns:
JSON response with reset result
"""
data = request.get_json()
token = data['token']
new_password = data['new_password']
# Validate new password
if len(new_password) < 8:
return create_error_response("New password must be at least 8 characters long", 400)
try:
# Verify reset token
user = user_manager.verify_reset_token(token)
if not user:
return create_error_response("Invalid or expired reset token", 400)
# Change password
success = user_manager.change_password(user['id'], new_password)
if success:
logger.info(f"Password reset completed for user {user['id']}")
return create_success_response("Password reset successfully")
else:
return create_error_response("Failed to reset password", 500)
except Exception as e:
logger.error(f"Error resetting password with token: {str(e)}")
return create_error_response("Failed to reset password", 500)
@auth_bp.route('/auth/sessions', methods=['GET'])
@require_auth
@handle_api_errors
def get_user_sessions() -> Tuple[Any, int]:
"""
Get user's active sessions.
Returns:
JSON response with user sessions
"""
try:
user_id = session.get('user_id')
sessions = user_manager.get_user_sessions(user_id)
return create_success_response("Sessions retrieved successfully", 200, sessions)
except Exception as e:
logger.error(f"Error getting user sessions: {str(e)}")
return create_error_response("Failed to get sessions", 500)
@auth_bp.route('/auth/sessions', methods=['DELETE'])
@require_auth
@handle_api_errors
def destroy_all_sessions() -> Tuple[Any, int]:
"""
Destroy all user sessions except current one.
Returns:
JSON response with operation result
"""
try:
user_id = session.get('user_id')
current_token = session.get('session_token')
# Destroy all sessions except current
success = session_manager.destroy_all_sessions(user_id, except_token=current_token)
if success:
logger.info(f"All sessions destroyed for user {user_id}")
return create_success_response("All other sessions destroyed successfully")
else:
return create_error_response("Failed to destroy sessions", 500)
except Exception as e:
logger.error(f"Error destroying sessions: {str(e)}")
return create_error_response("Failed to destroy sessions", 500)
@auth_bp.route('/auth/api-keys', methods=['GET'])
@require_auth
@handle_api_errors
def get_api_keys() -> Tuple[Any, int]:
"""
Get user's API keys.
Returns:
JSON response with API keys
"""
try:
user_id = session.get('user_id')
api_keys = api_key_manager.get_user_api_keys(user_id)
return create_success_response("API keys retrieved successfully", 200, api_keys)
except Exception as e:
logger.error(f"Error getting API keys: {str(e)}")
return create_error_response("Failed to get API keys", 500)
@auth_bp.route('/auth/api-keys', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['name'],
optional_fields=['description'],
field_types={'name': str, 'description': str}
)
def create_api_key() -> Tuple[Any, int]:
"""
Create new API key.
Request Body:
- name: API key name
- description: API key description (optional)
Returns:
JSON response with created API key
"""
data = request.get_json()
user_id = session.get('user_id')
name = sanitize_string(data['name'])
description = sanitize_string(data.get('description', ''))
try:
# Create API key
api_key = api_key_manager.create_api_key(
user_id=user_id,
name=name,
description=description
)
logger.info(f"API key created for user {user_id}: {name}")
return create_success_response("API key created successfully", 201, api_key)
except Exception as e:
logger.error(f"Error creating API key for user {user_id}: {str(e)}")
return create_error_response("Failed to create API key", 500)
@auth_bp.route('/auth/api-keys/<int:key_id>', methods=['DELETE'])
@require_auth
@handle_api_errors
def revoke_api_key(key_id: int) -> Tuple[Any, int]:
"""
Revoke API key.
Args:
key_id: API key ID
Returns:
JSON response with revocation result
"""
try:
user_id = session.get('user_id')
# Verify key belongs to user and revoke
success = api_key_manager.revoke_api_key(key_id, user_id)
if success:
logger.info(f"API key {key_id} revoked by user {user_id}")
return create_success_response("API key revoked successfully")
else:
return create_error_response("API key not found or access denied", 404)
except Exception as e:
logger.error(f"Error revoking API key {key_id}: {str(e)}")
return create_error_response("Failed to revoke API key", 500)
@auth_bp.route('/auth/password-reset', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['email'],
field_types={'email': str}
)
def request_password_reset() -> Tuple[Any, int]:
"""
Request password reset for user email.
Request Body:
- email: User email address
Returns:
JSON response with password reset request result
"""
data = request.get_json()
email = sanitize_string(data['email'])
try:
# Validate email format
if not is_valid_email(email):
return create_error_response("Invalid email format", 400)
# Check if user exists
user = user_manager.get_user_by_email(email)
if not user:
# Don't reveal if email exists or not for security
logger.warning(f"Password reset requested for non-existent email: {email}")
return create_success_response("If the email exists, a password reset link has been sent")
# Generate reset token
reset_token = user_manager.create_password_reset_token(user['id'])
# In a real implementation, you would send an email here
# For now, we'll just log it and return success
logger.info(f"Password reset token generated for user {user['id']}: {reset_token}")
return create_success_response("If the email exists, a password reset link has been sent")
except Exception as e:
logger.error(f"Error during password reset request for {email}: {str(e)}")
return create_error_response("Failed to process password reset request", 500)
@auth_bp.route('/auth/password-reset/confirm', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['token', 'new_password'],
field_types={'token': str, 'new_password': str}
)
def confirm_password_reset() -> Tuple[Any, int]:
"""
Confirm password reset with token.
Request Body:
- token: Password reset token
- new_password: New password
Returns:
JSON response with password reset confirmation result
"""
data = request.get_json()
token = data['token']
new_password = data['new_password']
try:
# Validate password strength
if len(new_password) < 8:
return create_error_response("Password must be at least 8 characters long", 400)
# Verify reset token
user_id = user_manager.verify_reset_token(token)
if not user_id:
return create_error_response("Invalid or expired reset token", 400)
# Update password
success = user_manager.change_password(user_id, new_password)
if not success:
return create_error_response("Failed to update password", 500)
# Invalidate all existing sessions for security
session_manager.destroy_all_sessions(user_id)
logger.info(f"Password reset completed for user ID {user_id}")
return create_success_response("Password has been successfully reset")
except Exception as e:
logger.error(f"Error during password reset confirmation: {str(e)}")
return create_error_response("Failed to reset password", 500)
@auth_bp.route('/auth/refresh', methods=['POST'])
@handle_api_errors
def refresh_token() -> Tuple[Any, int]:
"""
Refresh authentication token.
Returns:
JSON response with new token
"""
try:
# Get current session token
session_token = session.get('session_token')
if not session_token:
return create_error_response("No active session found", 401)
# Validate current session
session_info = session_manager.get_session_info(session_token)
if not session_info or session_info.get('expired', True):
session.clear()
return create_error_response("Session expired", 401)
# Create new session token
user_id = session_info['user_id']
new_session_token = session_manager.create_session(user_id)
# Destroy old session
session_manager.destroy_session(session_token)
# Update session data
session['session_token'] = new_session_token
session_manager.update_session_activity(new_session_token)
# Get user data
user = user_manager.get_user_by_id(user_id)
user_data = format_user_data(user, include_sensitive=False)
response_data = {
'user': user_data,
'session_token': new_session_token,
'expires_at': (datetime.now() + timedelta(days=7)).isoformat()
}
logger.info(f"Token refreshed for user ID {user_id}")
return create_success_response("Token refreshed successfully", 200, response_data)
except Exception as e:
logger.error(f"Error during token refresh: {str(e)}")
return create_error_response("Failed to refresh token", 500)
@auth_bp.route('/auth/activity', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['limit', 'offset'],
param_types={'limit': int, 'offset': int}
)
def get_user_activity() -> Tuple[Any, int]:
"""
Get user activity log.
Query Parameters:
- limit: Number of activities to return (default: 50, max: 200)
- offset: Number of activities to skip (default: 0)
Returns:
JSON response with user activity
"""
limit = min(request.args.get('limit', 50, type=int), 200)
offset = request.args.get('offset', 0, type=int)
try:
user_id = session.get('user_id')
activity = user_manager.get_user_activity(user_id, limit=limit, offset=offset)
return create_success_response("User activity retrieved successfully", 200, activity)
except Exception as e:
logger.error(f"Error getting user activity: {str(e)}")
return create_error_response("Failed to get user activity", 500)

View File

@@ -1,649 +0,0 @@
"""
Backup Management API Endpoints
This module provides REST API endpoints for database backup operations,
including backup creation, restoration, and cleanup functionality.
"""
from flask import Blueprint, request, send_file
from typing import Dict, List, Any, Optional
import os
from datetime import datetime
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, extract_pagination_params
)
# Import backup components (these imports would need to be adjusted based on actual structure)
try:
from database_manager import backup_manager, BackupInfo
except ImportError:
# Fallback for development/testing
backup_manager = None
BackupInfo = None
# Blueprint for backup management endpoints
backups_bp = Blueprint('backups', __name__, url_prefix='/api/v1/backups')
@backups_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def list_backups() -> Dict[str, Any]:
"""
List all available backups with optional filtering.
Query Parameters:
- backup_type: Filter by backup type (full, metadata_only, incremental)
- date_from: Filter from date (ISO format)
- date_to: Filter to date (ISO format)
- min_size_mb: Minimum backup size in MB
- max_size_mb: Maximum backup size in MB
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of backups
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Extract filters
backup_type_filter = request.args.get('backup_type')
date_from = request.args.get('date_from')
date_to = request.args.get('date_to')
min_size_mb = request.args.get('min_size_mb')
max_size_mb = request.args.get('max_size_mb')
# Validate filters
valid_types = ['full', 'metadata_only', 'incremental']
if backup_type_filter and backup_type_filter not in valid_types:
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
# Validate dates
if date_from:
try:
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_from must be in ISO format")
if date_to:
try:
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_to must be in ISO format")
# Validate size filters
if min_size_mb:
try:
min_size_mb = float(min_size_mb)
if min_size_mb < 0:
raise ValueError()
except ValueError:
raise ValidationError("min_size_mb must be a non-negative number")
if max_size_mb:
try:
max_size_mb = float(max_size_mb)
if max_size_mb < 0:
raise ValueError()
except ValueError:
raise ValidationError("max_size_mb must be a non-negative number")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get backups with filters
backups = backup_manager.list_backups(
backup_type=backup_type_filter,
date_from=date_from,
date_to=date_to,
min_size_bytes=int(min_size_mb * 1024 * 1024) if min_size_mb else None,
max_size_bytes=int(max_size_mb * 1024 * 1024) if max_size_mb else None
)
# Format backup data
backup_data = []
for backup in backups:
backup_data.append({
'backup_id': backup.backup_id,
'backup_type': backup.backup_type,
'created_at': backup.created_at.isoformat(),
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
'size_bytes': backup.size_bytes,
'description': backup.description,
'tables_included': backup.tables_included,
'backup_path': backup.backup_path,
'is_compressed': backup.is_compressed,
'checksum': backup.checksum,
'status': backup.status
})
# Apply pagination
total = len(backup_data)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_backups = backup_data[start_idx:end_idx]
return create_paginated_response(
data=paginated_backups,
page=page,
per_page=per_page,
total=total,
endpoint='backups.list_backups'
)
@backups_bp.route('/<backup_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('backup_id')
@optional_auth
def get_backup(backup_id: str) -> Dict[str, Any]:
"""
Get detailed information about a specific backup.
Args:
backup_id: Unique identifier for the backup
Returns:
Detailed backup information
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
# Get additional details
backup_details = {
'backup_id': backup.backup_id,
'backup_type': backup.backup_type,
'created_at': backup.created_at.isoformat(),
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
'size_bytes': backup.size_bytes,
'description': backup.description,
'tables_included': backup.tables_included,
'backup_path': backup.backup_path,
'is_compressed': backup.is_compressed,
'checksum': backup.checksum,
'status': backup.status,
'creation_duration_seconds': backup.creation_duration_seconds,
'file_exists': os.path.exists(backup.backup_path),
'validation_status': backup_manager.validate_backup(backup_id)
}
return create_success_response(backup_details)
@backups_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['backup_type'],
optional_fields=['description', 'tables', 'compress', 'encryption_key'],
field_types={
'backup_type': str,
'description': str,
'tables': list,
'compress': bool,
'encryption_key': str
}
)
@require_auth
def create_backup() -> Dict[str, Any]:
"""
Create a new database backup.
Required Fields:
- backup_type: Type of backup (full, metadata_only, incremental)
Optional Fields:
- description: Backup description
- tables: Specific tables to backup (for selective backups)
- compress: Whether to compress the backup (default: true)
- encryption_key: Key for backup encryption
Returns:
Created backup information
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json()
backup_type = data['backup_type']
# Validate backup type
valid_types = ['full', 'metadata_only', 'incremental']
if backup_type not in valid_types:
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
description = data.get('description')
tables = data.get('tables')
compress = data.get('compress', True)
encryption_key = data.get('encryption_key')
# Validate tables if provided
if tables:
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
raise ValidationError("tables must be a list of table names")
# Validate table names exist
valid_tables = backup_manager.get_available_tables()
invalid_tables = [t for t in tables if t not in valid_tables]
if invalid_tables:
raise ValidationError(f"Invalid tables: {', '.join(invalid_tables)}")
try:
# Create backup based on type
if backup_type == 'full':
backup_info = backup_manager.create_full_backup(
description=description,
compress=compress,
encryption_key=encryption_key
)
elif backup_type == 'metadata_only':
backup_info = backup_manager.create_metadata_backup(
description=description,
compress=compress,
encryption_key=encryption_key
)
elif backup_type == 'incremental':
backup_info = backup_manager.create_incremental_backup(
description=description,
compress=compress,
encryption_key=encryption_key
)
else: # selective backup
backup_info = backup_manager.create_selective_backup(
tables=tables,
description=description,
compress=compress,
encryption_key=encryption_key
)
if not backup_info:
raise APIException("Failed to create backup", 500)
backup_data = {
'backup_id': backup_info.backup_id,
'backup_type': backup_info.backup_type,
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
'created_at': backup_info.created_at.isoformat(),
'description': backup_info.description,
'tables_included': backup_info.tables_included,
'is_compressed': backup_info.is_compressed,
'checksum': backup_info.checksum
}
return create_success_response(
data=backup_data,
message=f"{backup_type.title()} backup created successfully",
status_code=201
)
except Exception as e:
raise APIException(f"Failed to create backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>/restore', methods=['POST'])
@handle_api_errors
@validate_id_parameter('backup_id')
@validate_json_input(
optional_fields=['confirm', 'tables', 'target_database', 'restore_data', 'restore_schema'],
field_types={
'confirm': bool,
'tables': list,
'target_database': str,
'restore_data': bool,
'restore_schema': bool
}
)
@require_auth
def restore_backup(backup_id: str) -> Dict[str, Any]:
"""
Restore from a backup.
Args:
backup_id: Unique identifier for the backup
Optional Fields:
- confirm: Confirmation flag (required for production)
- tables: Specific tables to restore
- target_database: Target database path (for restore to different location)
- restore_data: Whether to restore data (default: true)
- restore_schema: Whether to restore schema (default: true)
Returns:
Restoration results
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json() or {}
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
# Validate backup file exists
if not os.path.exists(backup.backup_path):
raise APIException("Backup file not found", 404)
# Require confirmation for production environments
confirm = data.get('confirm', False)
if not confirm:
# Check if this is a production environment
from config import config
if hasattr(config, 'environment') and config.environment == 'production':
raise ValidationError("Confirmation required for restore operation in production")
tables = data.get('tables')
target_database = data.get('target_database')
restore_data = data.get('restore_data', True)
restore_schema = data.get('restore_schema', True)
# Validate tables if provided
if tables:
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
raise ValidationError("tables must be a list of table names")
try:
# Perform restoration
restore_result = backup_manager.restore_backup(
backup_id=backup_id,
tables=tables,
target_database=target_database,
restore_data=restore_data,
restore_schema=restore_schema
)
if restore_result.success:
return create_success_response(
data={
'backup_id': backup_id,
'restore_time': restore_result.restore_time.isoformat(),
'restored_tables': restore_result.restored_tables,
'restored_records': restore_result.restored_records,
'duration_seconds': restore_result.duration_seconds
},
message="Backup restored successfully"
)
else:
raise APIException(f"Restore failed: {restore_result.error_message}", 500)
except Exception as e:
raise APIException(f"Failed to restore backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>/download', methods=['GET'])
@handle_api_errors
@validate_id_parameter('backup_id')
@require_auth
def download_backup(backup_id: str):
"""
Download a backup file.
Args:
backup_id: Unique identifier for the backup
Returns:
Backup file download
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
# Check if backup file exists
if not os.path.exists(backup.backup_path):
raise NotFoundError("Backup file not found")
# Generate filename
timestamp = backup.created_at.strftime('%Y%m%d_%H%M%S')
filename = f"backup_{backup.backup_type}_{timestamp}_{backup_id[:8]}.db"
if backup.is_compressed:
filename += ".gz"
try:
return send_file(
backup.backup_path,
as_attachment=True,
download_name=filename,
mimetype='application/octet-stream'
)
except Exception as e:
raise APIException(f"Failed to download backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>/validate', methods=['POST'])
@handle_api_errors
@validate_id_parameter('backup_id')
@optional_auth
def validate_backup(backup_id: str) -> Dict[str, Any]:
"""
Validate a backup file integrity.
Args:
backup_id: Unique identifier for the backup
Returns:
Validation results
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
try:
validation_result = backup_manager.validate_backup(backup_id)
return create_success_response(
data={
'backup_id': backup_id,
'is_valid': validation_result.is_valid,
'file_exists': validation_result.file_exists,
'checksum_valid': validation_result.checksum_valid,
'database_readable': validation_result.database_readable,
'tables_count': validation_result.tables_count,
'records_count': validation_result.records_count,
'validation_errors': validation_result.errors,
'validated_at': datetime.utcnow().isoformat()
}
)
except Exception as e:
raise APIException(f"Failed to validate backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('backup_id')
@require_auth
def delete_backup(backup_id: str) -> Dict[str, Any]:
"""
Delete a backup.
Args:
backup_id: Unique identifier for the backup
Query Parameters:
- delete_file: Set to 'true' to also delete the backup file
Returns:
Deletion confirmation
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
delete_file = request.args.get('delete_file', 'true').lower() == 'true'
try:
success = backup_manager.delete_backup(backup_id, delete_file=delete_file)
if success:
message = f"Backup {backup_id} deleted successfully"
if delete_file:
message += " (including file)"
return create_success_response(message=message)
else:
raise APIException("Failed to delete backup", 500)
except Exception as e:
raise APIException(f"Failed to delete backup: {str(e)}", 500)
@backups_bp.route('/cleanup', methods=['POST'])
@handle_api_errors
@validate_json_input(
optional_fields=['keep_days', 'keep_count', 'backup_types', 'dry_run'],
field_types={
'keep_days': int,
'keep_count': int,
'backup_types': list,
'dry_run': bool
}
)
@require_auth
def cleanup_backups() -> Dict[str, Any]:
"""
Clean up old backup files based on retention policy.
Optional Fields:
- keep_days: Keep backups newer than this many days (default: 30)
- keep_count: Keep at least this many backups (default: 10)
- backup_types: Types of backups to clean up (default: all)
- dry_run: Preview what would be deleted without actually deleting
Returns:
Cleanup results
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json() or {}
keep_days = data.get('keep_days', 30)
keep_count = data.get('keep_count', 10)
backup_types = data.get('backup_types', ['full', 'metadata_only', 'incremental'])
dry_run = data.get('dry_run', False)
# Validate parameters
if keep_days < 1:
raise ValidationError("keep_days must be at least 1")
if keep_count < 1:
raise ValidationError("keep_count must be at least 1")
valid_types = ['full', 'metadata_only', 'incremental']
if not all(bt in valid_types for bt in backup_types):
raise ValidationError(f"backup_types must contain only: {', '.join(valid_types)}")
try:
cleanup_result = backup_manager.cleanup_old_backups(
keep_days=keep_days,
keep_count=keep_count,
backup_types=backup_types,
dry_run=dry_run
)
return create_success_response(
data={
'dry_run': dry_run,
'deleted_count': cleanup_result.deleted_count,
'deleted_backups': cleanup_result.deleted_backups,
'space_freed_mb': round(cleanup_result.space_freed_bytes / (1024 * 1024), 2),
'kept_count': cleanup_result.kept_count,
'retention_policy': {
'keep_days': keep_days,
'keep_count': keep_count,
'backup_types': backup_types
}
},
message=f"Backup cleanup {'simulated' if dry_run else 'completed'}"
)
except Exception as e:
raise APIException(f"Failed to cleanup backups: {str(e)}", 500)
@backups_bp.route('/schedule', methods=['GET'])
@handle_api_errors
@optional_auth
def get_backup_schedule() -> Dict[str, Any]:
"""
Get current backup schedule configuration.
Returns:
Backup schedule information
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
try:
schedule_config = backup_manager.get_backup_schedule()
return create_success_response(data=schedule_config)
except Exception as e:
raise APIException(f"Failed to get backup schedule: {str(e)}", 500)
@backups_bp.route('/schedule', methods=['PUT'])
@handle_api_errors
@validate_json_input(
optional_fields=['enabled', 'full_backup_interval', 'incremental_interval', 'retention_days', 'cleanup_enabled'],
field_types={
'enabled': bool,
'full_backup_interval': str,
'incremental_interval': str,
'retention_days': int,
'cleanup_enabled': bool
}
)
@require_auth
def update_backup_schedule() -> Dict[str, Any]:
"""
Update backup schedule configuration.
Optional Fields:
- enabled: Enable/disable automatic backups
- full_backup_interval: Cron expression for full backups
- incremental_interval: Cron expression for incremental backups
- retention_days: Number of days to keep backups
- cleanup_enabled: Enable/disable automatic cleanup
Returns:
Updated schedule configuration
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json()
try:
updated_config = backup_manager.update_backup_schedule(data)
return create_success_response(
data=updated_config,
message="Backup schedule updated successfully"
)
except Exception as e:
raise APIException(f"Failed to update backup schedule: {str(e)}", 500)

View File

@@ -1,341 +0,0 @@
"""
Bulk Operations API endpoints
Provides REST API for bulk series management operations.
"""
from flask import Blueprint, request, jsonify, send_file
import asyncio
import threading
from typing import Dict, Any
import uuid
import io
from bulk_operations import bulk_operations_manager
bulk_api_bp = Blueprint('bulk_api', __name__, url_prefix='/api/bulk')
# Store active operations
active_operations = {}
@bulk_api_bp.route('/download', methods=['POST'])
def bulk_download():
"""Start bulk download operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
# Create task ID
task_id = str(uuid.uuid4())
# Store operation info
active_operations[task_id] = {
'id': operation_id,
'type': 'download',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting download...'
}
}
# Start async operation
def run_bulk_download():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_download(series_ids, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_download)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/update', methods=['POST'])
def bulk_update():
"""Start bulk update operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
task_id = str(uuid.uuid4())
active_operations[task_id] = {
'id': operation_id,
'type': 'update',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting update...'
}
}
def run_bulk_update():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_update(series_ids, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_update)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/organize', methods=['POST'])
def bulk_organize():
"""Start bulk organize operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
options = data.get('options', {})
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
task_id = str(uuid.uuid4())
active_operations[task_id] = {
'id': operation_id,
'type': 'organize',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting organization...'
}
}
def run_bulk_organize():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_organize(series_ids, options, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_organize)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/delete', methods=['DELETE'])
def bulk_delete():
"""Start bulk delete operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
task_id = str(uuid.uuid4())
active_operations[task_id] = {
'id': operation_id,
'type': 'delete',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting deletion...'
}
}
def run_bulk_delete():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_delete(series_ids, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_delete)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/export', methods=['POST'])
def bulk_export():
"""Export series data."""
try:
data = request.get_json()
series_ids = data.get('series_ids', [])
format_type = data.get('format', 'json')
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
# Generate export data
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
export_data = loop.run_until_complete(
bulk_operations_manager.export_series_data(series_ids, format_type)
)
finally:
loop.close()
# Determine content type and filename
content_types = {
'json': 'application/json',
'csv': 'text/csv',
'xml': 'application/xml'
}
content_type = content_types.get(format_type, 'application/octet-stream')
filename = f'series_export_{len(series_ids)}_items.{format_type}'
return send_file(
io.BytesIO(export_data),
mimetype=content_type,
as_attachment=True,
download_name=filename
)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/status/<task_id>', methods=['GET'])
def get_operation_status(task_id):
"""Get operation status and progress."""
try:
if task_id not in active_operations:
return jsonify({'error': 'Task not found'}), 404
operation = active_operations[task_id]
response = {
'complete': operation['status'] in ['completed', 'failed'],
'success': operation['status'] == 'completed',
'status': operation['status']
}
if 'progress' in operation:
response.update(operation['progress'])
if 'error' in operation:
response['error'] = operation['error']
if 'result' in operation:
response['result'] = operation['result']
return jsonify(response)
except Exception as e:
return jsonify({'error': str(e)}), 500
@bulk_api_bp.route('/cancel/<task_id>', methods=['POST'])
def cancel_operation(task_id):
"""Cancel a running operation."""
try:
if task_id not in active_operations:
return jsonify({'error': 'Task not found'}), 404
# Mark operation as cancelled
active_operations[task_id]['status'] = 'cancelled'
return jsonify({'success': True, 'message': 'Operation cancelled'})
except Exception as e:
return jsonify({'error': str(e)}), 500
@bulk_api_bp.route('/history', methods=['GET'])
def get_operation_history():
"""Get history of bulk operations."""
try:
# Return completed/failed operations
history = []
for task_id, operation in active_operations.items():
if operation['status'] in ['completed', 'failed', 'cancelled']:
history.append({
'task_id': task_id,
'operation_id': operation['id'],
'type': operation['type'],
'status': operation['status'],
'progress': operation.get('progress', {}),
'error': operation.get('error'),
'result': operation.get('result')
})
# Sort by most recent first
history.sort(key=lambda x: x.get('progress', {}).get('completed', 0), reverse=True)
return jsonify({'history': history})
except Exception as e:
return jsonify({'error': str(e)}), 500
@bulk_api_bp.route('/cleanup', methods=['POST'])
def cleanup_completed_operations():
"""Clean up completed/failed operations."""
try:
to_remove = []
for task_id, operation in active_operations.items():
if operation['status'] in ['completed', 'failed', 'cancelled']:
to_remove.append(task_id)
for task_id in to_remove:
del active_operations[task_id]
return jsonify({
'success': True,
'cleaned_up': len(to_remove),
'message': f'Cleaned up {len(to_remove)} completed operations'
})
except Exception as e:
return jsonify({'error': str(e)}), 500

View File

@@ -1,454 +0,0 @@
"""
API endpoints for configuration management.
Provides comprehensive configuration management with validation, backup, and restore functionality.
"""
import json
import logging
import os
from datetime import datetime
from typing import Any, Dict, Optional
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
from fastapi.responses import FileResponse
from pydantic import BaseModel
# Import SeriesApp for business logic
from src.core.SeriesApp import SeriesApp
# FastAPI dependencies and models
from src.server.fastapi_app import get_current_user, settings
logger = logging.getLogger(__name__)
# Create FastAPI router for config management endpoints
router = APIRouter(prefix='/api/v1/config', tags=['config'])
# Pydantic models for requests and responses
class ConfigResponse(BaseModel):
"""Response model for configuration data."""
success: bool = True
config: Dict[str, Any]
schema: Optional[Dict[str, Any]] = None
class ConfigUpdateRequest(BaseModel):
"""Request model for configuration updates."""
config: Dict[str, Any]
validate: bool = True
class ConfigImportResponse(BaseModel):
"""Response model for configuration import operations."""
success: bool
message: str
imported_keys: Optional[list] = None
skipped_keys: Optional[list] = None
# Dependency to get SeriesApp instance
def get_series_app() -> SeriesApp:
"""Get SeriesApp instance for business logic operations."""
if not settings.anime_directory:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Anime directory not configured"
)
return SeriesApp(settings.anime_directory)
@router.get('/', response_model=ConfigResponse)
async def get_full_config(
current_user: Optional[Dict] = Depends(get_current_user)
) -> ConfigResponse:
"""Get complete configuration (without sensitive data)."""
try:
# For now, return a basic config structure
# TODO: Replace with actual config management logic
config_data = {
"anime_directory": settings.anime_directory if hasattr(settings, 'anime_directory') else None,
"download_settings": {},
"display_settings": {},
"security_settings": {}
}
schema = {
"anime_directory": {"type": "string", "required": True},
"download_settings": {"type": "object"},
"display_settings": {"type": "object"},
"security_settings": {"type": "object"}
}
return ConfigResponse(
success=True,
config=config_data,
schema=schema
)
except Exception as e:
logger.error(f"Error getting configuration: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@router.post('/', response_model=ConfigImportResponse)
async def update_config(
config_update: ConfigUpdateRequest,
current_user: Optional[Dict] = Depends(get_current_user)
) -> ConfigImportResponse:
"""Update configuration with validation."""
try:
# For now, just return success
# TODO: Replace with actual config management logic
logger.info("Configuration updated successfully")
return ConfigImportResponse(
success=True,
message="Configuration updated successfully",
imported_keys=list(config_update.config.keys()),
skipped_keys=[]
)
except Exception as e:
logger.error(f"Error updating configuration: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@config_bp.route('/validate', methods=['POST'])
@require_auth
def validate_config():
"""Validate configuration without saving."""
try:
data = request.get_json() or {}
validation_result = config.validate_config(data)
return jsonify({
'success': True,
'validation': validation_result
})
except Exception as e:
logger.error(f"Error validating configuration: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/section/<section_name>', methods=['GET'])
@require_auth
def get_config_section(section_name):
"""Get specific configuration section."""
try:
section_data = config.get(section_name, {})
return jsonify({
'success': True,
'section': section_name,
'config': section_data
})
except Exception as e:
logger.error(f"Error getting config section {section_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/section/<section_name>', methods=['POST'])
@require_auth
def update_config_section(section_name):
"""Update specific configuration section."""
try:
data = request.get_json() or {}
# Get current config
current_config = config.export_config(include_sensitive=True)
# Update the specific section
current_config[section_name] = data
# Validate and save
result = config.import_config(current_config, validate=True)
if result['success']:
logger.info(f"Configuration section '{section_name}' updated successfully")
return jsonify({
'success': True,
'message': f'Configuration section "{section_name}" updated successfully',
'warnings': result.get('warnings', [])
})
else:
return jsonify({
'success': False,
'error': 'Configuration validation failed',
'errors': result['errors'],
'warnings': result.get('warnings', [])
}), 400
except Exception as e:
logger.error(f"Error updating config section {section_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backup', methods=['POST'])
@require_auth
def create_backup():
"""Create configuration backup."""
try:
data = request.get_json() or {}
backup_name = data.get('name', '')
# Generate backup filename
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
if backup_name:
# Sanitize backup name
backup_name = secure_filename(backup_name)
filename = f"config_backup_{backup_name}_{timestamp}.json"
else:
filename = f"config_backup_{timestamp}.json"
backup_path = config.backup_config(filename)
logger.info(f"Configuration backup created: {backup_path}")
return jsonify({
'success': True,
'message': 'Backup created successfully',
'backup_path': backup_path,
'filename': filename
})
except Exception as e:
logger.error(f"Error creating backup: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backups', methods=['GET'])
@require_auth
def list_backups():
"""List available configuration backups."""
try:
backups = []
# Scan current directory for backup files
for filename in os.listdir('.'):
if filename.startswith('config_backup_') and filename.endswith('.json'):
file_path = os.path.abspath(filename)
file_size = os.path.getsize(filename)
file_modified = datetime.fromtimestamp(os.path.getmtime(filename))
backups.append({
'filename': filename,
'path': file_path,
'size': file_size,
'size_kb': round(file_size / 1024, 2),
'modified': file_modified.isoformat(),
'modified_display': file_modified.strftime('%Y-%m-%d %H:%M:%S')
})
# Sort by modification date (newest first)
backups.sort(key=lambda x: x['modified'], reverse=True)
return jsonify({
'success': True,
'backups': backups
})
except Exception as e:
logger.error(f"Error listing backups: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backup/<filename>/restore', methods=['POST'])
@require_auth
def restore_backup(filename):
"""Restore configuration from backup."""
try:
# Security: Only allow config backup files
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
return jsonify({
'success': False,
'error': 'Invalid backup file'
}), 400
# Security: Check if file exists
if not os.path.exists(filename):
return jsonify({
'success': False,
'error': 'Backup file not found'
}), 404
success = config.restore_config(filename)
if success:
logger.info(f"Configuration restored from backup: {filename}")
return jsonify({
'success': True,
'message': 'Configuration restored successfully'
})
else:
return jsonify({
'success': False,
'error': 'Failed to restore configuration'
}), 500
except Exception as e:
logger.error(f"Error restoring backup {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backup/<filename>/download', methods=['GET'])
@require_auth
def download_backup(filename):
"""Download configuration backup file."""
try:
# Security: Only allow config backup files
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
return jsonify({
'success': False,
'error': 'Invalid backup file'
}), 400
# Security: Check if file exists
if not os.path.exists(filename):
return jsonify({
'success': False,
'error': 'Backup file not found'
}), 404
return send_file(
filename,
as_attachment=True,
download_name=filename
)
except Exception as e:
logger.error(f"Error downloading backup {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/export', methods=['POST'])
@require_auth
def export_config():
"""Export current configuration to JSON."""
try:
data = request.get_json() or {}
include_sensitive = data.get('include_sensitive', False)
config_data = config.export_config(include_sensitive=include_sensitive)
# Create filename with timestamp
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"aniworld_config_export_{timestamp}.json"
# Write to temporary file
with open(filename, 'w', encoding='utf-8') as f:
json.dump(config_data, f, indent=4)
return send_file(
filename,
as_attachment=True,
download_name=filename,
mimetype='application/json'
)
except Exception as e:
logger.error(f"Error exporting configuration: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@router.post('/import', response_model=ConfigImportResponse)
async def import_config(
config_file: UploadFile = File(...),
current_user: Optional[Dict] = Depends(get_current_user)
) -> ConfigImportResponse:
"""Import configuration from uploaded JSON file."""
try:
# Validate file type
if not config_file.filename:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="No file selected"
)
if not config_file.filename.endswith('.json'):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid file type. Only JSON files are allowed."
)
# Read and parse JSON
try:
content = await config_file.read()
config_data = json.loads(content.decode('utf-8'))
except json.JSONDecodeError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid JSON format: {e}"
)
# For now, just return success with the keys that would be imported
# TODO: Replace with actual config management logic
logger.info(f"Configuration imported from file: {config_file.filename}")
return ConfigImportResponse(
success=True,
message="Configuration imported successfully",
imported_keys=list(config_data.keys()) if isinstance(config_data, dict) else [],
skipped_keys=[]
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error importing configuration: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@config_bp.route('/reset', methods=['POST'])
@require_auth
def reset_config():
"""Reset configuration to defaults (preserves security settings)."""
try:
data = request.get_json() or {}
preserve_security = data.get('preserve_security', True)
# Get current security settings
current_security = config.get('security', {}) if preserve_security else {}
# Reset to defaults
config._config = config.default_config.copy()
# Restore security settings if requested
if preserve_security and current_security:
config._config['security'] = current_security
success = config.save_config()
if success:
logger.info("Configuration reset to defaults")
return jsonify({
'success': True,
'message': 'Configuration reset to defaults'
})
else:
return jsonify({
'success': False,
'error': 'Failed to save configuration'
}), 500
except Exception as e:
logger.error(f"Error resetting configuration: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,649 +0,0 @@
"""
Database & Storage Management API Endpoints
This module provides REST API endpoints for database operations,
backup management, and storage monitoring.
"""
from flask import Blueprint, request, jsonify, send_file
from auth import require_auth, optional_auth
from error_handler import handle_api_errors, RetryableError, NonRetryableError
from database_manager import (
database_manager, anime_repository, backup_manager, storage_manager,
AnimeMetadata
)
import uuid
from datetime import datetime
import os
# Blueprint for database management endpoints
database_bp = Blueprint('database', __name__)
# Database Information Endpoints
@database_bp.route('/api/database/info')
@handle_api_errors
@optional_auth
def get_database_info():
"""Get database information and statistics."""
try:
# Get schema version
schema_version = database_manager.get_current_version()
# Get table statistics
stats_query = """
SELECT
(SELECT COUNT(*) FROM anime_metadata) as anime_count,
(SELECT COUNT(*) FROM episode_metadata) as episode_count,
(SELECT COUNT(*) FROM episode_metadata WHERE is_downloaded = 1) as downloaded_count,
(SELECT COUNT(*) FROM download_history) as download_history_count
"""
results = database_manager.execute_query(stats_query)
stats = dict(results[0]) if results else {}
# Get database file size
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
return jsonify({
'status': 'success',
'data': {
'schema_version': schema_version,
'database_path': database_manager.db_path,
'database_size_mb': round(db_size / (1024 * 1024), 2),
'statistics': {
'anime_count': stats.get('anime_count', 0),
'episode_count': stats.get('episode_count', 0),
'downloaded_count': stats.get('downloaded_count', 0),
'download_history_count': stats.get('download_history_count', 0)
}
}
})
except Exception as e:
raise RetryableError(f"Failed to get database info: {e}")
# Anime Metadata Endpoints
@database_bp.route('/api/database/anime')
@handle_api_errors
@optional_auth
def get_all_anime():
"""Get all anime from database."""
try:
status_filter = request.args.get('status')
anime_list = anime_repository.get_all_anime(status_filter)
# Convert to serializable format
anime_data = []
for anime in anime_list:
anime_data.append({
'anime_id': anime.anime_id,
'name': anime.name,
'folder': anime.folder,
'key': anime.key,
'description': anime.description,
'genres': anime.genres,
'release_year': anime.release_year,
'status': anime.status,
'total_episodes': anime.total_episodes,
'poster_url': anime.poster_url,
'last_updated': anime.last_updated.isoformat(),
'created_at': anime.created_at.isoformat(),
'custom_metadata': anime.custom_metadata
})
return jsonify({
'status': 'success',
'data': {
'anime': anime_data,
'count': len(anime_data)
}
})
except Exception as e:
raise RetryableError(f"Failed to get anime list: {e}")
@database_bp.route('/api/database/anime/<anime_id>')
@handle_api_errors
@optional_auth
def get_anime_by_id(anime_id):
"""Get specific anime by ID."""
try:
query = "SELECT * FROM anime_metadata WHERE anime_id = ?"
results = database_manager.execute_query(query, (anime_id,))
if not results:
return jsonify({
'status': 'error',
'message': 'Anime not found'
}), 404
row = results[0]
anime_data = {
'anime_id': row['anime_id'],
'name': row['name'],
'folder': row['folder'],
'key': row['key'],
'description': row['description'],
'genres': row['genres'],
'release_year': row['release_year'],
'status': row['status'],
'total_episodes': row['total_episodes'],
'poster_url': row['poster_url'],
'last_updated': row['last_updated'],
'created_at': row['created_at'],
'custom_metadata': row['custom_metadata']
}
return jsonify({
'status': 'success',
'data': anime_data
})
except Exception as e:
raise RetryableError(f"Failed to get anime: {e}")
@database_bp.route('/api/database/anime', methods=['POST'])
@handle_api_errors
@require_auth
def create_anime():
"""Create new anime record."""
try:
data = request.get_json()
# Validate required fields
required_fields = ['name', 'folder']
for field in required_fields:
if field not in data:
return jsonify({
'status': 'error',
'message': f'Missing required field: {field}'
}), 400
# Create anime metadata
anime = AnimeMetadata(
anime_id=str(uuid.uuid4()),
name=data['name'],
folder=data['folder'],
key=data.get('key'),
description=data.get('description'),
genres=data.get('genres', []),
release_year=data.get('release_year'),
status=data.get('status', 'ongoing'),
total_episodes=data.get('total_episodes'),
poster_url=data.get('poster_url'),
custom_metadata=data.get('custom_metadata', {})
)
success = anime_repository.create_anime(anime)
if success:
return jsonify({
'status': 'success',
'message': 'Anime created successfully',
'data': {
'anime_id': anime.anime_id
}
}), 201
else:
return jsonify({
'status': 'error',
'message': 'Failed to create anime'
}), 500
except Exception as e:
raise RetryableError(f"Failed to create anime: {e}")
@database_bp.route('/api/database/anime/<anime_id>', methods=['PUT'])
@handle_api_errors
@require_auth
def update_anime(anime_id):
"""Update anime metadata."""
try:
data = request.get_json()
# Get existing anime
existing = anime_repository.get_anime_by_folder(data.get('folder', ''))
if not existing or existing.anime_id != anime_id:
return jsonify({
'status': 'error',
'message': 'Anime not found'
}), 404
# Update fields
if 'name' in data:
existing.name = data['name']
if 'key' in data:
existing.key = data['key']
if 'description' in data:
existing.description = data['description']
if 'genres' in data:
existing.genres = data['genres']
if 'release_year' in data:
existing.release_year = data['release_year']
if 'status' in data:
existing.status = data['status']
if 'total_episodes' in data:
existing.total_episodes = data['total_episodes']
if 'poster_url' in data:
existing.poster_url = data['poster_url']
if 'custom_metadata' in data:
existing.custom_metadata.update(data['custom_metadata'])
success = anime_repository.update_anime(existing)
if success:
return jsonify({
'status': 'success',
'message': 'Anime updated successfully'
})
else:
return jsonify({
'status': 'error',
'message': 'Failed to update anime'
}), 500
except Exception as e:
raise RetryableError(f"Failed to update anime: {e}")
@database_bp.route('/api/database/anime/<anime_id>', methods=['DELETE'])
@handle_api_errors
@require_auth
def delete_anime(anime_id):
"""Delete anime and related data."""
try:
success = anime_repository.delete_anime(anime_id)
if success:
return jsonify({
'status': 'success',
'message': 'Anime deleted successfully'
})
else:
return jsonify({
'status': 'error',
'message': 'Anime not found'
}), 404
except Exception as e:
raise RetryableError(f"Failed to delete anime: {e}")
@database_bp.route('/api/database/anime/search')
@handle_api_errors
@optional_auth
def search_anime():
"""Search anime by name or description."""
try:
search_term = request.args.get('q', '').strip()
if not search_term:
return jsonify({
'status': 'error',
'message': 'Search term is required'
}), 400
results = anime_repository.search_anime(search_term)
# Convert to serializable format
anime_data = []
for anime in results:
anime_data.append({
'anime_id': anime.anime_id,
'name': anime.name,
'folder': anime.folder,
'key': anime.key,
'description': anime.description,
'genres': anime.genres,
'release_year': anime.release_year,
'status': anime.status
})
return jsonify({
'status': 'success',
'data': {
'results': anime_data,
'count': len(anime_data),
'search_term': search_term
}
})
except Exception as e:
raise RetryableError(f"Failed to search anime: {e}")
# Backup Management Endpoints
@database_bp.route('/api/database/backups')
@handle_api_errors
@optional_auth
def list_backups():
"""List all available backups."""
try:
backups = backup_manager.list_backups()
backup_data = []
for backup in backups:
backup_data.append({
'backup_id': backup.backup_id,
'backup_type': backup.backup_type,
'created_at': backup.created_at.isoformat(),
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
'description': backup.description,
'tables_included': backup.tables_included
})
return jsonify({
'status': 'success',
'data': {
'backups': backup_data,
'count': len(backup_data)
}
})
except Exception as e:
raise RetryableError(f"Failed to list backups: {e}")
@database_bp.route('/api/database/backups/create', methods=['POST'])
@handle_api_errors
@require_auth
def create_backup():
"""Create a new database backup."""
try:
data = request.get_json() or {}
backup_type = data.get('backup_type', 'full')
description = data.get('description')
if backup_type not in ['full', 'metadata_only']:
return jsonify({
'status': 'error',
'message': 'Backup type must be "full" or "metadata_only"'
}), 400
if backup_type == 'full':
backup_info = backup_manager.create_full_backup(description)
else:
backup_info = backup_manager.create_metadata_backup(description)
if backup_info:
return jsonify({
'status': 'success',
'message': f'{backup_type.title()} backup created successfully',
'data': {
'backup_id': backup_info.backup_id,
'backup_type': backup_info.backup_type,
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
'created_at': backup_info.created_at.isoformat()
}
}), 201
else:
return jsonify({
'status': 'error',
'message': 'Failed to create backup'
}), 500
except Exception as e:
raise RetryableError(f"Failed to create backup: {e}")
@database_bp.route('/api/database/backups/<backup_id>/restore', methods=['POST'])
@handle_api_errors
@require_auth
def restore_backup(backup_id):
"""Restore from a backup."""
try:
success = backup_manager.restore_backup(backup_id)
if success:
return jsonify({
'status': 'success',
'message': 'Backup restored successfully'
})
else:
return jsonify({
'status': 'error',
'message': 'Failed to restore backup'
}), 500
except Exception as e:
raise RetryableError(f"Failed to restore backup: {e}")
@database_bp.route('/api/database/backups/<backup_id>/download')
@handle_api_errors
@require_auth
def download_backup(backup_id):
"""Download a backup file."""
try:
backups = backup_manager.list_backups()
target_backup = None
for backup in backups:
if backup.backup_id == backup_id:
target_backup = backup
break
if not target_backup:
return jsonify({
'status': 'error',
'message': 'Backup not found'
}), 404
if not os.path.exists(target_backup.backup_path):
return jsonify({
'status': 'error',
'message': 'Backup file not found'
}), 404
filename = os.path.basename(target_backup.backup_path)
return send_file(target_backup.backup_path, as_attachment=True, download_name=filename)
except Exception as e:
raise RetryableError(f"Failed to download backup: {e}")
@database_bp.route('/api/database/backups/cleanup', methods=['POST'])
@handle_api_errors
@require_auth
def cleanup_backups():
"""Clean up old backup files."""
try:
data = request.get_json() or {}
keep_days = data.get('keep_days', 30)
keep_count = data.get('keep_count', 10)
if keep_days < 1 or keep_count < 1:
return jsonify({
'status': 'error',
'message': 'keep_days and keep_count must be positive integers'
}), 400
backup_manager.cleanup_old_backups(keep_days, keep_count)
return jsonify({
'status': 'success',
'message': f'Backup cleanup completed (keeping {keep_count} backups, max {keep_days} days old)'
})
except Exception as e:
raise RetryableError(f"Failed to cleanup backups: {e}")
# Storage Management Endpoints
@database_bp.route('/api/database/storage/summary')
@handle_api_errors
@optional_auth
def get_storage_summary():
"""Get storage usage summary."""
try:
summary = storage_manager.get_storage_summary()
return jsonify({
'status': 'success',
'data': summary
})
except Exception as e:
raise RetryableError(f"Failed to get storage summary: {e}")
@database_bp.route('/api/database/storage/locations')
@handle_api_errors
@optional_auth
def get_storage_locations():
"""Get all storage locations."""
try:
query = """
SELECT sl.*, am.name as anime_name
FROM storage_locations sl
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
WHERE sl.is_active = 1
ORDER BY sl.location_type, sl.path
"""
results = database_manager.execute_query(query)
locations = []
for row in results:
locations.append({
'location_id': row['location_id'],
'anime_id': row['anime_id'],
'anime_name': row['anime_name'],
'path': row['path'],
'location_type': row['location_type'],
'free_space_gb': (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None,
'total_space_gb': (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None,
'usage_percent': ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100) if row['total_space_bytes'] and row['free_space_bytes'] else None,
'last_checked': row['last_checked']
})
return jsonify({
'status': 'success',
'data': {
'locations': locations,
'count': len(locations)
}
})
except Exception as e:
raise RetryableError(f"Failed to get storage locations: {e}")
@database_bp.route('/api/database/storage/locations', methods=['POST'])
@handle_api_errors
@require_auth
def add_storage_location():
"""Add a new storage location."""
try:
data = request.get_json()
path = data.get('path')
location_type = data.get('location_type', 'primary')
anime_id = data.get('anime_id')
if not path:
return jsonify({
'status': 'error',
'message': 'Path is required'
}), 400
if location_type not in ['primary', 'backup', 'cache']:
return jsonify({
'status': 'error',
'message': 'Location type must be primary, backup, or cache'
}), 400
location_id = storage_manager.add_storage_location(path, location_type, anime_id)
return jsonify({
'status': 'success',
'message': 'Storage location added successfully',
'data': {
'location_id': location_id
}
}), 201
except Exception as e:
raise RetryableError(f"Failed to add storage location: {e}")
@database_bp.route('/api/database/storage/locations/<location_id>/update', methods=['POST'])
@handle_api_errors
@require_auth
def update_storage_location(location_id):
"""Update storage location statistics."""
try:
storage_manager.update_storage_stats(location_id)
return jsonify({
'status': 'success',
'message': 'Storage statistics updated successfully'
})
except Exception as e:
raise RetryableError(f"Failed to update storage location: {e}")
# Database Maintenance Endpoints
@database_bp.route('/api/database/maintenance/vacuum', methods=['POST'])
@handle_api_errors
@require_auth
def vacuum_database():
"""Perform database VACUUM operation to reclaim space."""
try:
with database_manager.get_connection() as conn:
conn.execute("VACUUM")
return jsonify({
'status': 'success',
'message': 'Database vacuum completed successfully'
})
except Exception as e:
raise RetryableError(f"Failed to vacuum database: {e}")
@database_bp.route('/api/database/maintenance/analyze', methods=['POST'])
@handle_api_errors
@require_auth
def analyze_database():
"""Perform database ANALYZE operation to update statistics."""
try:
with database_manager.get_connection() as conn:
conn.execute("ANALYZE")
return jsonify({
'status': 'success',
'message': 'Database analysis completed successfully'
})
except Exception as e:
raise RetryableError(f"Failed to analyze database: {e}")
@database_bp.route('/api/database/maintenance/integrity-check', methods=['POST'])
@handle_api_errors
@require_auth
def integrity_check():
"""Perform database integrity check."""
try:
with database_manager.get_connection() as conn:
cursor = conn.execute("PRAGMA integrity_check")
results = cursor.fetchall()
# Check if database is OK
is_ok = len(results) == 1 and results[0][0] == 'ok'
return jsonify({
'status': 'success',
'data': {
'integrity_ok': is_ok,
'results': [row[0] for row in results]
}
})
except Exception as e:
raise RetryableError(f"Failed to check database integrity: {e}")
# Export the blueprint
__all__ = ['database_bp']

View File

@@ -1,581 +0,0 @@
"""
Diagnostics API endpoints.
This module handles all diagnostic and monitoring operations including:
- System health checks
- Performance monitoring
- Error reporting
- Network diagnostics
"""
from flask import Blueprint, request, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import psutil
import socket
import requests
import time
import platform
import sys
import os
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import validate_query_params
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, format_datetime, format_file_size
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def optional_auth(f): return f
def handle_api_errors(f): return f
def validate_query_params(**kwargs): return lambda f: f
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def format_datetime(dt): return str(dt) if dt else None
def format_file_size(size): return f"{size} bytes"
# Import diagnostic components
try:
from src.server.data.error_manager import ErrorManager
from src.server.data.performance_manager import PerformanceManager
from src.server.data.system_manager import SystemManager
except ImportError:
# Fallback for development
class ErrorManager:
def get_recent_errors(self, **kwargs): return []
def get_error_stats(self): return {}
def clear_errors(self): return True
def report_error(self, **kwargs): return 1
class PerformanceManager:
def get_performance_metrics(self): return {}
def get_performance_history(self, **kwargs): return []
def record_metric(self, **kwargs): return True
class SystemManager:
def get_system_info(self): return {}
def get_disk_usage(self): return {}
def get_network_status(self): return {}
def test_network_connectivity(self, url): return {'success': True, 'response_time': 0.1}
# Create blueprint
diagnostics_bp = Blueprint('diagnostics', __name__)
# Initialize managers
error_manager = ErrorManager()
performance_manager = PerformanceManager()
system_manager = SystemManager()
logger = logging.getLogger(__name__)
@diagnostics_bp.route('/diagnostics/health', methods=['GET'])
@optional_auth
@handle_api_errors
def health_check() -> Tuple[Any, int]:
"""
Perform comprehensive system health check.
Returns:
JSON response with system health status
"""
try:
health_status = {
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'checks': {},
'overall_score': 100
}
# System resource checks
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
# CPU check
health_status['checks']['cpu'] = {
'status': 'healthy' if cpu_percent < 80 else 'warning' if cpu_percent < 95 else 'critical',
'usage_percent': cpu_percent,
'details': f"CPU usage: {cpu_percent}%"
}
# Memory check
memory_percent = memory.percent
health_status['checks']['memory'] = {
'status': 'healthy' if memory_percent < 80 else 'warning' if memory_percent < 95 else 'critical',
'usage_percent': memory_percent,
'total': format_file_size(memory.total),
'available': format_file_size(memory.available),
'details': f"Memory usage: {memory_percent}%"
}
# Disk check
disk_percent = disk.percent
health_status['checks']['disk'] = {
'status': 'healthy' if disk_percent < 80 else 'warning' if disk_percent < 95 else 'critical',
'usage_percent': disk_percent,
'total': format_file_size(disk.total),
'free': format_file_size(disk.free),
'details': f"Disk usage: {disk_percent}%"
}
# Database connectivity check
try:
# This would test actual database connection
health_status['checks']['database'] = {
'status': 'healthy',
'details': 'Database connection successful'
}
except Exception as e:
health_status['checks']['database'] = {
'status': 'critical',
'details': f'Database connection failed: {str(e)}'
}
# Network connectivity check
try:
response = requests.get('https://httpbin.org/status/200', timeout=5)
if response.status_code == 200:
health_status['checks']['network'] = {
'status': 'healthy',
'details': 'Internet connectivity available'
}
else:
health_status['checks']['network'] = {
'status': 'warning',
'details': f'Network response: {response.status_code}'
}
except Exception as e:
health_status['checks']['network'] = {
'status': 'warning',
'details': f'Network connectivity issues: {str(e)}'
}
# Calculate overall health score
check_statuses = [check['status'] for check in health_status['checks'].values()]
critical_count = check_statuses.count('critical')
warning_count = check_statuses.count('warning')
if critical_count > 0:
health_status['status'] = 'critical'
health_status['overall_score'] = max(0, 100 - (critical_count * 30) - (warning_count * 10))
elif warning_count > 0:
health_status['status'] = 'warning'
health_status['overall_score'] = max(50, 100 - (warning_count * 15))
return create_success_response("Health check completed", 200, health_status)
except Exception as e:
logger.error(f"Error during health check: {str(e)}")
return create_error_response("Health check failed", 500)
@diagnostics_bp.route('/diagnostics/system', methods=['GET'])
@require_auth
@handle_api_errors
def get_system_info() -> Tuple[Any, int]:
"""
Get detailed system information.
Returns:
JSON response with system information
"""
try:
system_info = {
'platform': {
'system': platform.system(),
'release': platform.release(),
'version': platform.version(),
'machine': platform.machine(),
'processor': platform.processor(),
'architecture': platform.architecture()
},
'python': {
'version': sys.version,
'executable': sys.executable,
'path': sys.path[:5] # First 5 paths only
},
'resources': {
'cpu': {
'count_logical': psutil.cpu_count(logical=True),
'count_physical': psutil.cpu_count(logical=False),
'frequency': psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None,
'usage_percent': psutil.cpu_percent(interval=1),
'usage_per_cpu': psutil.cpu_percent(interval=1, percpu=True)
},
'memory': {
**psutil.virtual_memory()._asdict(),
'swap': psutil.swap_memory()._asdict()
},
'disk': {
'usage': psutil.disk_usage('/')._asdict(),
'io_counters': psutil.disk_io_counters()._asdict() if psutil.disk_io_counters() else None
},
'network': {
'io_counters': psutil.net_io_counters()._asdict(),
'connections': len(psutil.net_connections()),
'interfaces': {name: addr._asdict() for name, addr in psutil.net_if_addrs().items()}
}
},
'process': {
'pid': os.getpid(),
'memory_info': psutil.Process().memory_info()._asdict(),
'cpu_percent': psutil.Process().cpu_percent(),
'num_threads': psutil.Process().num_threads(),
'create_time': format_datetime(datetime.fromtimestamp(psutil.Process().create_time())),
'open_files': len(psutil.Process().open_files())
},
'uptime': {
'boot_time': format_datetime(datetime.fromtimestamp(psutil.boot_time())),
'uptime_seconds': time.time() - psutil.boot_time()
}
}
return create_success_response("System information retrieved", 200, system_info)
except Exception as e:
logger.error(f"Error getting system info: {str(e)}")
return create_error_response("Failed to get system information", 500)
@diagnostics_bp.route('/diagnostics/performance', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['hours', 'metric'],
param_types={'hours': int}
)
def get_performance_metrics() -> Tuple[Any, int]:
"""
Get performance metrics and history.
Query Parameters:
- hours: Hours of history to retrieve (default: 24, max: 168)
- metric: Specific metric to retrieve (optional)
Returns:
JSON response with performance metrics
"""
hours = min(request.args.get('hours', 24, type=int), 168) # Max 1 week
metric = request.args.get('metric')
try:
# Current performance metrics
current_metrics = {
'timestamp': datetime.now().isoformat(),
'cpu': {
'usage_percent': psutil.cpu_percent(interval=1),
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
},
'memory': {
'usage_percent': psutil.virtual_memory().percent,
'available_gb': psutil.virtual_memory().available / (1024**3)
},
'disk': {
'usage_percent': psutil.disk_usage('/').percent,
'free_gb': psutil.disk_usage('/').free / (1024**3)
},
'network': {
'bytes_sent': psutil.net_io_counters().bytes_sent,
'bytes_recv': psutil.net_io_counters().bytes_recv,
'packets_sent': psutil.net_io_counters().packets_sent,
'packets_recv': psutil.net_io_counters().packets_recv
}
}
# Historical data
historical_data = performance_manager.get_performance_history(
hours=hours,
metric=metric
)
response_data = {
'current': current_metrics,
'history': historical_data,
'summary': {
'period_hours': hours,
'data_points': len(historical_data),
'metric_filter': metric
}
}
return create_success_response("Performance metrics retrieved", 200, response_data)
except Exception as e:
logger.error(f"Error getting performance metrics: {str(e)}")
return create_error_response("Failed to get performance metrics", 500)
@diagnostics_bp.route('/diagnostics/errors', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['hours', 'level', 'limit'],
param_types={'hours': int, 'limit': int}
)
def get_recent_errors() -> Tuple[Any, int]:
"""
Get recent errors and error statistics.
Query Parameters:
- hours: Hours of errors to retrieve (default: 24, max: 168)
- level: Error level filter (error, warning, critical)
- limit: Maximum number of errors to return (default: 100, max: 1000)
Returns:
JSON response with recent errors
"""
hours = min(request.args.get('hours', 24, type=int), 168)
level = request.args.get('level')
limit = min(request.args.get('limit', 100, type=int), 1000)
try:
# Get recent errors
errors = error_manager.get_recent_errors(
hours=hours,
level=level,
limit=limit
)
# Get error statistics
error_stats = error_manager.get_error_stats()
response_data = {
'errors': errors,
'statistics': error_stats,
'summary': {
'period_hours': hours,
'level_filter': level,
'total_returned': len(errors),
'limit': limit
}
}
return create_success_response("Recent errors retrieved", 200, response_data)
except Exception as e:
logger.error(f"Error getting recent errors: {str(e)}")
return create_error_response("Failed to get recent errors", 500)
@diagnostics_bp.route('/diagnostics/errors', methods=['DELETE'])
@require_auth
@handle_api_errors
def clear_errors() -> Tuple[Any, int]:
"""
Clear error log.
Returns:
JSON response with clear operation result
"""
try:
success = error_manager.clear_errors()
if success:
logger.info("Error log cleared")
return create_success_response("Error log cleared successfully")
else:
return create_error_response("Failed to clear error log", 500)
except Exception as e:
logger.error(f"Error clearing error log: {str(e)}")
return create_error_response("Failed to clear error log", 500)
@diagnostics_bp.route('/diagnostics/network', methods=['GET'])
@require_auth
@handle_api_errors
def test_network_connectivity() -> Tuple[Any, int]:
"""
Test network connectivity to various services.
Returns:
JSON response with network connectivity results
"""
try:
test_urls = [
'https://google.com',
'https://github.com',
'https://pypi.org',
'https://httpbin.org/status/200'
]
results = []
for url in test_urls:
try:
start_time = time.time()
response = requests.get(url, timeout=10)
response_time = time.time() - start_time
results.append({
'url': url,
'status': 'success',
'status_code': response.status_code,
'response_time_ms': round(response_time * 1000, 2),
'accessible': response.status_code == 200
})
except requests.exceptions.Timeout:
results.append({
'url': url,
'status': 'timeout',
'error': 'Request timed out',
'accessible': False
})
except Exception as e:
results.append({
'url': url,
'status': 'error',
'error': str(e),
'accessible': False
})
# Network interface information
interfaces = {}
for interface, addresses in psutil.net_if_addrs().items():
interfaces[interface] = [addr._asdict() for addr in addresses]
# Network I/O statistics
net_io = psutil.net_io_counters()._asdict()
response_data = {
'connectivity_tests': results,
'interfaces': interfaces,
'io_statistics': net_io,
'summary': {
'total_tests': len(results),
'successful': len([r for r in results if r['accessible']]),
'failed': len([r for r in results if not r['accessible']])
}
}
return create_success_response("Network connectivity test completed", 200, response_data)
except Exception as e:
logger.error(f"Error testing network connectivity: {str(e)}")
return create_error_response("Failed to test network connectivity", 500)
@diagnostics_bp.route('/diagnostics/logs', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['lines', 'level', 'component'],
param_types={'lines': int}
)
def get_application_logs() -> Tuple[Any, int]:
"""
Get recent application logs.
Query Parameters:
- lines: Number of log lines to retrieve (default: 100, max: 1000)
- level: Log level filter (debug, info, warning, error, critical)
- component: Component filter (optional)
Returns:
JSON response with application logs
"""
lines = min(request.args.get('lines', 100, type=int), 1000)
level = request.args.get('level')
component = request.args.get('component')
try:
# This would read from actual log files
log_entries = []
# For demonstration, return sample log structure
response_data = {
'logs': log_entries,
'summary': {
'lines_requested': lines,
'level_filter': level,
'component_filter': component,
'total_returned': len(log_entries)
}
}
return create_success_response("Application logs retrieved", 200, response_data)
except Exception as e:
logger.error(f"Error getting application logs: {str(e)}")
return create_error_response("Failed to get application logs", 500)
@diagnostics_bp.route('/diagnostics/report', methods=['POST'])
@require_auth
@handle_api_errors
def generate_diagnostic_report() -> Tuple[Any, int]:
"""
Generate comprehensive diagnostic report.
Returns:
JSON response with diagnostic report
"""
try:
report = {
'generated_at': datetime.now().isoformat(),
'report_id': f"diag_{int(time.time())}",
'sections': {}
}
# System information
report['sections']['system'] = {
'platform': platform.platform(),
'python_version': sys.version,
'cpu_count': psutil.cpu_count(),
'memory_total_gb': round(psutil.virtual_memory().total / (1024**3), 2),
'disk_total_gb': round(psutil.disk_usage('/').total / (1024**3), 2)
}
# Current resource usage
report['sections']['resources'] = {
'cpu_percent': psutil.cpu_percent(interval=1),
'memory_percent': psutil.virtual_memory().percent,
'disk_percent': psutil.disk_usage('/').percent,
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
}
# Error summary
error_stats = error_manager.get_error_stats()
report['sections']['errors'] = error_stats
# Performance summary
performance_metrics = performance_manager.get_performance_metrics()
report['sections']['performance'] = performance_metrics
# Network status
report['sections']['network'] = {
'interfaces_count': len(psutil.net_if_addrs()),
'connections_count': len(psutil.net_connections()),
'bytes_sent': psutil.net_io_counters().bytes_sent,
'bytes_recv': psutil.net_io_counters().bytes_recv
}
logger.info(f"Diagnostic report generated: {report['report_id']}")
return create_success_response("Diagnostic report generated", 200, report)
except Exception as e:
logger.error(f"Error generating diagnostic report: {str(e)}")
return create_error_response("Failed to generate diagnostic report", 500)
@diagnostics_bp.route('/diagnostics/ping', methods=['GET'])
@optional_auth
@handle_api_errors
def ping() -> Tuple[Any, int]:
"""
Simple ping endpoint for health monitoring.
Returns:
JSON response with ping result
"""
return create_success_response("pong", 200, {
'timestamp': datetime.now().isoformat(),
'status': 'alive'
})

View File

@@ -1,640 +0,0 @@
"""
Download Management API Endpoints
This module provides REST API endpoints for download operations,
including queue management, progress tracking, and download history.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import uuid
from datetime import datetime
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, format_download_response,
extract_pagination_params, create_batch_response
)
# Import download components (these imports would need to be adjusted based on actual structure)
try:
from download_manager import download_queue, download_manager, DownloadItem
from database_manager import episode_repository, anime_repository
except ImportError:
# Fallback for development/testing
download_queue = None
download_manager = None
DownloadItem = None
episode_repository = None
anime_repository = None
# Blueprint for download management endpoints
downloads_bp = Blueprint('downloads', __name__, url_prefix='/api/v1/downloads')
@downloads_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def list_downloads() -> Dict[str, Any]:
"""
Get all downloads with optional filtering and pagination.
Query Parameters:
- status: Filter by download status (pending, downloading, completed, failed, paused)
- anime_id: Filter by anime ID
- episode_id: Filter by episode ID
- active_only: Show only active downloads (true/false)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of downloads
"""
if not download_manager:
raise APIException("Download manager not available", 503)
# Extract filters
status_filter = request.args.get('status')
anime_id = request.args.get('anime_id')
episode_id = request.args.get('episode_id')
active_only = request.args.get('active_only', 'false').lower() == 'true'
# Validate filters
valid_statuses = ['pending', 'downloading', 'completed', 'failed', 'paused', 'cancelled']
if status_filter and status_filter not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
if episode_id:
try:
episode_id = int(episode_id)
except ValueError:
raise ValidationError("episode_id must be a valid integer")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get downloads with filters
downloads = download_manager.get_downloads(
status_filter=status_filter,
anime_id=anime_id,
episode_id=episode_id,
active_only=active_only
)
# Format download data
formatted_downloads = [format_download_response(download.__dict__) for download in downloads]
# Apply pagination
total = len(formatted_downloads)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_downloads = formatted_downloads[start_idx:end_idx]
return create_paginated_response(
data=paginated_downloads,
page=page,
per_page=per_page,
total=total,
endpoint='downloads.list_downloads'
)
@downloads_bp.route('/<int:download_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('download_id')
@optional_auth
def get_download(download_id: int) -> Dict[str, Any]:
"""
Get specific download by ID.
Args:
download_id: Unique identifier for the download
Returns:
Download details with progress information
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
# Format download data
download_data = format_download_response(download.__dict__)
# Add detailed progress information
progress_info = download_manager.get_download_progress(download_id)
if progress_info:
download_data['progress_details'] = progress_info
return create_success_response(download_data)
@downloads_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['episode_id'],
optional_fields=['priority', 'quality', 'subtitle_language', 'download_path'],
field_types={
'episode_id': int,
'priority': int,
'quality': str,
'subtitle_language': str,
'download_path': str
}
)
@require_auth
def create_download() -> Dict[str, Any]:
"""
Create a new download request.
Required Fields:
- episode_id: ID of the episode to download
Optional Fields:
- priority: Download priority (1-10, higher is more priority)
- quality: Preferred quality (720p, 1080p, etc.)
- subtitle_language: Preferred subtitle language
- download_path: Custom download path
Returns:
Created download details
"""
if not download_manager or not episode_repository:
raise APIException("Download manager not available", 503)
data = request.get_json()
episode_id = data['episode_id']
# Validate episode exists
episode = episode_repository.get_episode_by_id(episode_id)
if not episode:
raise ValidationError("Episode not found")
# Check if episode is already downloaded
if episode.status == 'downloaded':
raise ValidationError("Episode is already downloaded")
# Check if download already exists for this episode
existing_download = download_manager.get_download_by_episode(episode_id)
if existing_download and existing_download.status in ['pending', 'downloading']:
raise ValidationError("Download already in progress for this episode")
# Validate priority
priority = data.get('priority', 5)
if not 1 <= priority <= 10:
raise ValidationError("Priority must be between 1 and 10")
# Create download item
try:
download_item = DownloadItem(
download_id=str(uuid.uuid4()),
episode_id=episode_id,
anime_id=episode.anime_id,
priority=priority,
quality=data.get('quality'),
subtitle_language=data.get('subtitle_language'),
download_path=data.get('download_path'),
status='pending',
created_at=datetime.utcnow()
)
except Exception as e:
raise ValidationError(f"Invalid download data: {str(e)}")
# Add to download queue
success = download_queue.add_download(download_item)
if not success:
raise APIException("Failed to create download", 500)
# Return created download
download_data = format_download_response(download_item.__dict__)
return create_success_response(
data=download_data,
message="Download queued successfully",
status_code=201
)
@downloads_bp.route('/<int:download_id>/pause', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def pause_download(download_id: int) -> Dict[str, Any]:
"""
Pause a download.
Args:
download_id: Unique identifier for the download
Returns:
Updated download status
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status not in ['pending', 'downloading']:
raise ValidationError(f"Cannot pause download with status '{download.status}'")
success = download_manager.pause_download(download_id)
if not success:
raise APIException("Failed to pause download", 500)
# Get updated download
updated_download = download_manager.get_download_by_id(download_id)
download_data = format_download_response(updated_download.__dict__)
return create_success_response(
data=download_data,
message="Download paused successfully"
)
@downloads_bp.route('/<int:download_id>/resume', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def resume_download(download_id: int) -> Dict[str, Any]:
"""
Resume a paused download.
Args:
download_id: Unique identifier for the download
Returns:
Updated download status
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status != 'paused':
raise ValidationError(f"Cannot resume download with status '{download.status}'")
success = download_manager.resume_download(download_id)
if not success:
raise APIException("Failed to resume download", 500)
# Get updated download
updated_download = download_manager.get_download_by_id(download_id)
download_data = format_download_response(updated_download.__dict__)
return create_success_response(
data=download_data,
message="Download resumed successfully"
)
@downloads_bp.route('/<int:download_id>/cancel', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def cancel_download(download_id: int) -> Dict[str, Any]:
"""
Cancel a download.
Args:
download_id: Unique identifier for the download
Query Parameters:
- delete_partial: Set to 'true' to delete partially downloaded files
Returns:
Cancellation confirmation
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status in ['completed', 'cancelled']:
raise ValidationError(f"Cannot cancel download with status '{download.status}'")
delete_partial = request.args.get('delete_partial', 'false').lower() == 'true'
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
if not success:
raise APIException("Failed to cancel download", 500)
message = "Download cancelled successfully"
if delete_partial:
message += " (partial files deleted)"
return create_success_response(message=message)
@downloads_bp.route('/<int:download_id>/retry', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def retry_download(download_id: int) -> Dict[str, Any]:
"""
Retry a failed download.
Args:
download_id: Unique identifier for the download
Returns:
Updated download status
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status != 'failed':
raise ValidationError(f"Cannot retry download with status '{download.status}'")
success = download_manager.retry_download(download_id)
if not success:
raise APIException("Failed to retry download", 500)
# Get updated download
updated_download = download_manager.get_download_by_id(download_id)
download_data = format_download_response(updated_download.__dict__)
return create_success_response(
data=download_data,
message="Download queued for retry"
)
@downloads_bp.route('/bulk', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['action', 'download_ids'],
optional_fields=['delete_partial'],
field_types={
'action': str,
'download_ids': list,
'delete_partial': bool
}
)
@require_auth
def bulk_download_operation() -> Dict[str, Any]:
"""
Perform bulk operations on multiple downloads.
Required Fields:
- action: Operation to perform (pause, resume, cancel, retry)
- download_ids: List of download IDs to operate on
Optional Fields:
- delete_partial: For cancel action, whether to delete partial files
Returns:
Results of the bulk operation
"""
if not download_manager:
raise APIException("Download manager not available", 503)
data = request.get_json()
action = data['action']
download_ids = data['download_ids']
delete_partial = data.get('delete_partial', False)
# Validate action
valid_actions = ['pause', 'resume', 'cancel', 'retry']
if action not in valid_actions:
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
# Validate download_ids
if not isinstance(download_ids, list) or not download_ids:
raise ValidationError("download_ids must be a non-empty list")
if len(download_ids) > 50:
raise ValidationError("Cannot operate on more than 50 downloads at once")
# Validate download IDs are integers
try:
download_ids = [int(did) for did in download_ids]
except ValueError:
raise ValidationError("All download_ids must be valid integers")
# Perform bulk operation
successful_items = []
failed_items = []
for download_id in download_ids:
try:
if action == 'pause':
success = download_manager.pause_download(download_id)
elif action == 'resume':
success = download_manager.resume_download(download_id)
elif action == 'cancel':
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
elif action == 'retry':
success = download_manager.retry_download(download_id)
if success:
successful_items.append({'download_id': download_id, 'action': action})
else:
failed_items.append({'download_id': download_id, 'error': 'Operation failed'})
except Exception as e:
failed_items.append({'download_id': download_id, 'error': str(e)})
return create_batch_response(
successful_items=successful_items,
failed_items=failed_items,
message=f"Bulk {action} operation completed"
)
@downloads_bp.route('/queue', methods=['GET'])
@handle_api_errors
@optional_auth
def get_download_queue() -> Dict[str, Any]:
"""
Get current download queue status.
Returns:
Download queue information including active downloads and queue statistics
"""
if not download_queue:
raise APIException("Download queue not available", 503)
queue_info = download_queue.get_queue_status()
return create_success_response(
data={
'queue_size': queue_info.get('queue_size', 0),
'active_downloads': queue_info.get('active_downloads', 0),
'max_concurrent': queue_info.get('max_concurrent', 0),
'paused_downloads': queue_info.get('paused_downloads', 0),
'failed_downloads': queue_info.get('failed_downloads', 0),
'completed_today': queue_info.get('completed_today', 0),
'queue_items': queue_info.get('queue_items', [])
}
)
@downloads_bp.route('/queue/pause', methods=['POST'])
@handle_api_errors
@require_auth
def pause_download_queue() -> Dict[str, Any]:
"""
Pause the entire download queue.
Returns:
Queue pause confirmation
"""
if not download_queue:
raise APIException("Download queue not available", 503)
success = download_queue.pause_queue()
if not success:
raise APIException("Failed to pause download queue", 500)
return create_success_response(message="Download queue paused")
@downloads_bp.route('/queue/resume', methods=['POST'])
@handle_api_errors
@require_auth
def resume_download_queue() -> Dict[str, Any]:
"""
Resume the download queue.
Returns:
Queue resume confirmation
"""
if not download_queue:
raise APIException("Download queue not available", 503)
success = download_queue.resume_queue()
if not success:
raise APIException("Failed to resume download queue", 500)
return create_success_response(message="Download queue resumed")
@downloads_bp.route('/queue/clear', methods=['POST'])
@handle_api_errors
@require_auth
def clear_download_queue() -> Dict[str, Any]:
"""
Clear completed and failed downloads from the queue.
Query Parameters:
- include_failed: Set to 'true' to also clear failed downloads
Returns:
Queue clear confirmation
"""
if not download_queue:
raise APIException("Download queue not available", 503)
include_failed = request.args.get('include_failed', 'false').lower() == 'true'
cleared_count = download_queue.clear_completed(include_failed=include_failed)
message = f"Cleared {cleared_count} completed downloads"
if include_failed:
message += " and failed downloads"
return create_success_response(
data={'cleared_count': cleared_count},
message=message
)
@downloads_bp.route('/history', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def get_download_history() -> Dict[str, Any]:
"""
Get download history with optional filtering.
Query Parameters:
- status: Filter by status (completed, failed)
- anime_id: Filter by anime ID
- date_from: Filter from date (ISO format)
- date_to: Filter to date (ISO format)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated download history
"""
if not download_manager:
raise APIException("Download manager not available", 503)
# Extract filters
status_filter = request.args.get('status')
anime_id = request.args.get('anime_id')
date_from = request.args.get('date_from')
date_to = request.args.get('date_to')
# Validate filters
if status_filter and status_filter not in ['completed', 'failed']:
raise ValidationError("Status filter must be 'completed' or 'failed'")
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
# Validate dates
if date_from:
try:
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_from must be in ISO format")
if date_to:
try:
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_to must be in ISO format")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get download history
history = download_manager.get_download_history(
status_filter=status_filter,
anime_id=anime_id,
date_from=date_from,
date_to=date_to
)
# Format history data
formatted_history = [format_download_response(download.__dict__) for download in history]
# Apply pagination
total = len(formatted_history)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_history = formatted_history[start_idx:end_idx]
return create_paginated_response(
data=paginated_history,
page=page,
per_page=per_page,
total=total,
endpoint='downloads.get_download_history'
)

View File

@@ -1,584 +0,0 @@
"""
Episode Management API Endpoints
This module provides REST API endpoints for episode CRUD operations,
including episode status management and metadata operations.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import uuid
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, format_episode_response,
extract_pagination_params, create_batch_response
)
# Import database components (these imports would need to be adjusted based on actual structure)
try:
from database_manager import episode_repository, anime_repository, EpisodeMetadata
except ImportError:
# Fallback for development/testing
episode_repository = None
anime_repository = None
EpisodeMetadata = None
# Blueprint for episode management endpoints
episodes_bp = Blueprint('episodes', __name__, url_prefix='/api/v1/episodes')
@episodes_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def list_episodes() -> Dict[str, Any]:
"""
Get all episodes with optional filtering and pagination.
Query Parameters:
- anime_id: Filter by anime ID
- status: Filter by episode status
- downloaded: Filter by download status (true/false)
- episode_number: Filter by episode number
- search: Search in episode title
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of episodes
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
# Extract filters
anime_id = request.args.get('anime_id')
status_filter = request.args.get('status')
downloaded_filter = request.args.get('downloaded')
episode_number = request.args.get('episode_number')
search_term = request.args.get('search', '').strip()
# Validate filters
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
raise ValidationError("downloaded filter must be 'true' or 'false'")
if episode_number:
try:
episode_number = int(episode_number)
if episode_number < 1:
raise ValidationError("episode_number must be positive")
except ValueError:
raise ValidationError("episode_number must be a valid integer")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get episodes with filters
episodes = episode_repository.get_all_episodes(
anime_id=anime_id,
status_filter=status_filter,
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None,
episode_number=episode_number,
search_term=search_term
)
# Format episode data
formatted_episodes = [format_episode_response(episode.__dict__) for episode in episodes]
# Apply pagination
total = len(formatted_episodes)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_episodes = formatted_episodes[start_idx:end_idx]
return create_paginated_response(
data=paginated_episodes,
page=page,
per_page=per_page,
total=total,
endpoint='episodes.list_episodes'
)
@episodes_bp.route('/<int:episode_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('episode_id')
@optional_auth
def get_episode(episode_id: int) -> Dict[str, Any]:
"""
Get specific episode by ID.
Args:
episode_id: Unique identifier for the episode
Returns:
Episode details with download information
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
episode = episode_repository.get_episode_by_id(episode_id)
if not episode:
raise NotFoundError("Episode not found")
# Format episode data
episode_data = format_episode_response(episode.__dict__)
# Add download information if available
download_info = episode_repository.get_download_info(episode_id)
if download_info:
episode_data['download_info'] = download_info
return create_success_response(episode_data)
@episodes_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['anime_id', 'episode_number', 'title', 'url'],
optional_fields=['description', 'status', 'duration', 'air_date', 'custom_metadata'],
field_types={
'anime_id': int,
'episode_number': int,
'title': str,
'url': str,
'description': str,
'status': str,
'duration': int,
'air_date': str,
'custom_metadata': dict
}
)
@require_auth
def create_episode() -> Dict[str, Any]:
"""
Create a new episode record.
Required Fields:
- anime_id: ID of the anime this episode belongs to
- episode_number: Episode number
- title: Episode title
- url: Episode URL
Optional Fields:
- description: Episode description
- status: Episode status (available, unavailable, coming_soon)
- duration: Episode duration in minutes
- air_date: Air date in ISO format
- custom_metadata: Additional metadata as key-value pairs
Returns:
Created episode details
"""
if not episode_repository or not anime_repository:
raise APIException("Episode repository not available", 503)
data = request.get_json()
# Validate anime exists
anime = anime_repository.get_anime_by_id(data['anime_id'])
if not anime:
raise ValidationError("Anime not found")
# Validate status if provided
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
if 'status' in data and data['status'] not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
# Check if episode already exists for this anime
existing_episode = episode_repository.get_episode_by_anime_and_number(
data['anime_id'], data['episode_number']
)
if existing_episode:
raise ValidationError(f"Episode {data['episode_number']} already exists for this anime")
# Validate episode number
if data['episode_number'] < 1:
raise ValidationError("Episode number must be positive")
# Create episode metadata object
try:
episode = EpisodeMetadata(
episode_id=str(uuid.uuid4()),
anime_id=data['anime_id'],
episode_number=data['episode_number'],
title=data['title'],
url=data['url'],
description=data.get('description'),
status=data.get('status', 'available'),
duration=data.get('duration'),
air_date=data.get('air_date'),
custom_metadata=data.get('custom_metadata', {})
)
except Exception as e:
raise ValidationError(f"Invalid episode data: {str(e)}")
# Save to database
success = episode_repository.create_episode(episode)
if not success:
raise APIException("Failed to create episode", 500)
# Return created episode
episode_data = format_episode_response(episode.__dict__)
return create_success_response(
data=episode_data,
message="Episode created successfully",
status_code=201
)
@episodes_bp.route('/<int:episode_id>', methods=['PUT'])
@handle_api_errors
@validate_id_parameter('episode_id')
@validate_json_input(
optional_fields=['title', 'url', 'description', 'status', 'duration', 'air_date', 'custom_metadata'],
field_types={
'title': str,
'url': str,
'description': str,
'status': str,
'duration': int,
'air_date': str,
'custom_metadata': dict
}
)
@require_auth
def update_episode(episode_id: int) -> Dict[str, Any]:
"""
Update an existing episode record.
Args:
episode_id: Unique identifier for the episode
Optional Fields:
- title: Episode title
- url: Episode URL
- description: Episode description
- status: Episode status (available, unavailable, coming_soon, downloaded)
- duration: Episode duration in minutes
- air_date: Air date in ISO format
- custom_metadata: Additional metadata as key-value pairs
Returns:
Updated episode details
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
data = request.get_json()
# Get existing episode
existing_episode = episode_repository.get_episode_by_id(episode_id)
if not existing_episode:
raise NotFoundError("Episode not found")
# Validate status if provided
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
if 'status' in data and data['status'] not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
# Update fields
update_fields = {}
for field in ['title', 'url', 'description', 'status', 'duration', 'air_date']:
if field in data:
update_fields[field] = data[field]
# Handle custom metadata update (merge instead of replace)
if 'custom_metadata' in data:
existing_metadata = existing_episode.custom_metadata or {}
existing_metadata.update(data['custom_metadata'])
update_fields['custom_metadata'] = existing_metadata
# Perform update
success = episode_repository.update_episode(episode_id, update_fields)
if not success:
raise APIException("Failed to update episode", 500)
# Get updated episode
updated_episode = episode_repository.get_episode_by_id(episode_id)
episode_data = format_episode_response(updated_episode.__dict__)
return create_success_response(
data=episode_data,
message="Episode updated successfully"
)
@episodes_bp.route('/<int:episode_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('episode_id')
@require_auth
def delete_episode(episode_id: int) -> Dict[str, Any]:
"""
Delete an episode record.
Args:
episode_id: Unique identifier for the episode
Query Parameters:
- delete_file: Set to 'true' to also delete the downloaded file
Returns:
Deletion confirmation
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
# Check if episode exists
existing_episode = episode_repository.get_episode_by_id(episode_id)
if not existing_episode:
raise NotFoundError("Episode not found")
# Check if we should also delete the file
delete_file = request.args.get('delete_file', 'false').lower() == 'true'
# Perform deletion
success = episode_repository.delete_episode(episode_id, delete_file=delete_file)
if not success:
raise APIException("Failed to delete episode", 500)
message = f"Episode {existing_episode.episode_number} deleted successfully"
if delete_file:
message += " (including downloaded file)"
return create_success_response(message=message)
@episodes_bp.route('/bulk/status', methods=['PUT'])
@handle_api_errors
@validate_json_input(
required_fields=['episode_ids', 'status'],
field_types={
'episode_ids': list,
'status': str
}
)
@require_auth
def bulk_update_status() -> Dict[str, Any]:
"""
Update status for multiple episodes.
Required Fields:
- episode_ids: List of episode IDs to update
- status: New status for all episodes
Returns:
Results of the bulk operation
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
data = request.get_json()
episode_ids = data['episode_ids']
new_status = data['status']
# Validate status
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
if new_status not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
# Validate episode_ids
if not isinstance(episode_ids, list) or not episode_ids:
raise ValidationError("episode_ids must be a non-empty list")
if len(episode_ids) > 100:
raise ValidationError("Cannot operate on more than 100 episodes at once")
# Validate episode IDs are integers
try:
episode_ids = [int(eid) for eid in episode_ids]
except ValueError:
raise ValidationError("All episode_ids must be valid integers")
# Perform bulk update
successful_items = []
failed_items = []
for episode_id in episode_ids:
try:
success = episode_repository.update_episode(episode_id, {'status': new_status})
if success:
successful_items.append({'episode_id': episode_id, 'new_status': new_status})
else:
failed_items.append({'episode_id': episode_id, 'error': 'Episode not found'})
except Exception as e:
failed_items.append({'episode_id': episode_id, 'error': str(e)})
return create_batch_response(
successful_items=successful_items,
failed_items=failed_items,
message=f"Bulk status update to '{new_status}' completed"
)
@episodes_bp.route('/anime/<int:anime_id>/sync', methods=['POST'])
@handle_api_errors
@validate_id_parameter('anime_id')
@require_auth
def sync_anime_episodes(anime_id: int) -> Dict[str, Any]:
"""
Synchronize episodes for an anime by scanning the source.
Args:
anime_id: Unique identifier for the anime
Returns:
Synchronization results
"""
if not episode_repository or not anime_repository:
raise APIException("Episode repository not available", 503)
# Check if anime exists
anime = anime_repository.get_anime_by_id(anime_id)
if not anime:
raise NotFoundError("Anime not found")
# This would trigger the episode scanning/syncing process
try:
sync_result = episode_repository.sync_episodes_for_anime(anime_id)
return create_success_response(
data={
'anime_id': anime_id,
'episodes_found': sync_result.get('episodes_found', 0),
'episodes_added': sync_result.get('episodes_added', 0),
'episodes_updated': sync_result.get('episodes_updated', 0),
'episodes_removed': sync_result.get('episodes_removed', 0)
},
message=f"Episode sync completed for '{anime.name}'"
)
except Exception as e:
raise APIException(f"Failed to sync episodes: {str(e)}", 500)
@episodes_bp.route('/<int:episode_id>/download', methods=['POST'])
@handle_api_errors
@validate_id_parameter('episode_id')
@require_auth
def queue_episode_download(episode_id: int) -> Dict[str, Any]:
"""
Queue an episode for download.
Args:
episode_id: Unique identifier for the episode
Returns:
Download queue confirmation
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
# Check if episode exists
episode = episode_repository.get_episode_by_id(episode_id)
if not episode:
raise NotFoundError("Episode not found")
# Check if episode is already downloaded
if episode.status == 'downloaded':
raise ValidationError("Episode is already downloaded")
# Check if episode is available for download
if episode.status not in ['available']:
raise ValidationError(f"Episode status '{episode.status}' is not available for download")
# Queue for download (this would integrate with the download system)
try:
from ...download_manager import download_queue
download_id = download_queue.add_episode_download(episode_id)
return create_success_response(
data={'download_id': download_id},
message=f"Episode {episode.episode_number} queued for download"
)
except Exception as e:
raise APIException(f"Failed to queue download: {str(e)}", 500)
@episodes_bp.route('/search', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def search_episodes() -> Dict[str, Any]:
"""
Search episodes by title or other criteria.
Query Parameters:
- q: Search query (required)
- anime_id: Limit search to specific anime
- status: Filter by episode status
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated search results
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
search_term = request.args.get('q', '').strip()
if not search_term:
raise ValidationError("Search term 'q' is required")
if len(search_term) < 2:
raise ValidationError("Search term must be at least 2 characters long")
# Get additional filters
anime_id = request.args.get('anime_id')
status_filter = request.args.get('status')
# Validate anime_id if provided
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform search
search_results = episode_repository.search_episodes(
search_term=search_term,
anime_id=anime_id,
status_filter=status_filter
)
# Format results
formatted_results = [format_episode_response(episode.__dict__) for episode in search_results]
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
# Create response with search metadata
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='episodes.search_episodes',
q=search_term
)
# Add search metadata
response['search'] = {
'query': search_term,
'total_results': total,
'filters': {
'anime_id': anime_id,
'status': status_filter
}
}
return response

View File

@@ -1,436 +0,0 @@
"""
Health Check Endpoints
This module provides basic health check endpoints for monitoring
the AniWorld application's status.
"""
from flask import Blueprint, jsonify
import time
import os
import psutil
from datetime import datetime
# Blueprint for health check endpoints
health_bp = Blueprint('health_check', __name__, url_prefix='/api/health')
@health_bp.route('/status')
def get_basic_health():
"""Get basic application health status."""
try:
# Basic system metrics
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
return jsonify({
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'system': {
'memory_usage_percent': memory.percent,
'disk_usage_percent': disk.percent,
'uptime': time.time()
},
'application': {
'status': 'running',
'version': '1.0.0'
}
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e),
'timestamp': datetime.now().isoformat()
}), 500
@health_bp.route('/ping')
def ping():
"""Simple ping endpoint."""
return jsonify({
'status': 'ok',
'timestamp': datetime.now().isoformat()
})
@health_bp.route('/api/health')
def basic_health():
"""Basic health check endpoint for load balancers."""
return jsonify({
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'service': 'aniworld-web'
})
@health_bp.route('/api/health/system')
def system_health():
"""Comprehensive system health check."""
def check_system_health():
try:
# System metrics
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
# Process metrics
process = psutil.Process()
process_memory = process.memory_info()
return {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'system': {
'cpu_percent': cpu_percent,
'memory': {
'total_mb': memory.total / 1024 / 1024,
'available_mb': memory.available / 1024 / 1024,
'percent': memory.percent
},
'disk': {
'total_gb': disk.total / 1024 / 1024 / 1024,
'free_gb': disk.free / 1024 / 1024 / 1024,
'percent': (disk.used / disk.total) * 100
}
},
'process': {
'memory_mb': process_memory.rss / 1024 / 1024,
'threads': process.num_threads(),
'cpu_percent': process.cpu_percent()
}
}
except Exception as e:
return {
'status': 'unhealthy',
'error': str(e),
'timestamp': datetime.utcnow().isoformat()
}
return jsonify(get_cached_health_data('system', check_system_health))
@health_bp.route('/api/health/database')
def database_health():
"""Database connectivity and health check."""
def check_database_health():
try:
# Test database connection
start_time = time.time()
with database_manager.get_connection() as conn:
cursor = conn.execute("SELECT 1")
result = cursor.fetchone()
connection_time = (time.time() - start_time) * 1000 # ms
# Get database size and basic stats
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
# Check schema version
schema_version = database_manager.get_current_version()
# Get table counts
with database_manager.get_connection() as conn:
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
return {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'database': {
'connected': True,
'connection_time_ms': connection_time,
'size_mb': db_size / 1024 / 1024,
'schema_version': schema_version,
'tables': {
'anime_count': anime_count,
'episode_count': episode_count
}
}
}
except Exception as e:
return {
'status': 'unhealthy',
'timestamp': datetime.utcnow().isoformat(),
'database': {
'connected': False,
'error': str(e)
}
}
return jsonify(get_cached_health_data('database', check_database_health, ttl=60))
@health_bp.route('/api/health/dependencies')
def dependencies_health():
"""Check health of external dependencies."""
def check_dependencies():
dependencies = {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'dependencies': {}
}
# Check filesystem access
try:
anime_directory = getattr(config, 'anime_directory', '/app/data')
if os.path.exists(anime_directory):
# Test read/write access
test_file = os.path.join(anime_directory, '.health_check')
with open(test_file, 'w') as f:
f.write('test')
os.remove(test_file)
dependencies['dependencies']['filesystem'] = {
'status': 'healthy',
'path': anime_directory,
'accessible': True
}
else:
dependencies['dependencies']['filesystem'] = {
'status': 'unhealthy',
'path': anime_directory,
'accessible': False,
'error': 'Directory does not exist'
}
dependencies['status'] = 'degraded'
except Exception as e:
dependencies['dependencies']['filesystem'] = {
'status': 'unhealthy',
'error': str(e)
}
dependencies['status'] = 'degraded'
# Check network connectivity (basic)
try:
import socket
socket.create_connection(("8.8.8.8", 53), timeout=3)
dependencies['dependencies']['network'] = {
'status': 'healthy',
'connectivity': True
}
except Exception as e:
dependencies['dependencies']['network'] = {
'status': 'unhealthy',
'connectivity': False,
'error': str(e)
}
dependencies['status'] = 'degraded'
return dependencies
return jsonify(get_cached_health_data('dependencies', check_dependencies, ttl=120))
@health_bp.route('/api/health/performance')
def performance_health():
"""Performance metrics and health indicators."""
def check_performance():
try:
# Memory usage
memory_usage = memory_monitor.get_current_memory_usage() if memory_monitor else 0
is_memory_high = memory_monitor.is_memory_usage_high() if memory_monitor else False
# Thread count
process = psutil.Process()
thread_count = process.num_threads()
# Load average (if available)
load_avg = None
try:
load_avg = os.getloadavg()
except (AttributeError, OSError):
# Not available on all platforms
pass
# Check if performance is within acceptable limits
performance_status = 'healthy'
warnings = []
if is_memory_high:
performance_status = 'degraded'
warnings.append('High memory usage detected')
if thread_count > 100: # Arbitrary threshold
performance_status = 'degraded'
warnings.append(f'High thread count: {thread_count}')
if load_avg and load_avg[0] > 4: # Load average > 4
performance_status = 'degraded'
warnings.append(f'High system load: {load_avg[0]:.2f}')
return {
'status': performance_status,
'timestamp': datetime.utcnow().isoformat(),
'performance': {
'memory_usage_mb': memory_usage,
'memory_high': is_memory_high,
'thread_count': thread_count,
'load_average': load_avg,
'warnings': warnings
}
}
except Exception as e:
return {
'status': 'error',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}
return jsonify(get_cached_health_data('performance', check_performance, ttl=10))
@health_bp.route('/api/health/detailed')
def detailed_health():
"""Comprehensive health check combining all metrics."""
def check_detailed_health():
try:
# Get all health checks
system = get_cached_health_data('system', lambda: system_health().json)
database = get_cached_health_data('database', lambda: database_health().json)
dependencies = get_cached_health_data('dependencies', lambda: dependencies_health().json)
performance = get_cached_health_data('performance', lambda: performance_health().json)
# Determine overall status
statuses = [
system.get('status', 'unknown'),
database.get('status', 'unknown'),
dependencies.get('status', 'unknown'),
performance.get('status', 'unknown')
]
if 'unhealthy' in statuses or 'error' in statuses:
overall_status = 'unhealthy'
elif 'degraded' in statuses:
overall_status = 'degraded'
else:
overall_status = 'healthy'
return {
'status': overall_status,
'timestamp': datetime.utcnow().isoformat(),
'components': {
'system': system,
'database': database,
'dependencies': dependencies,
'performance': performance
}
}
except Exception as e:
return {
'status': 'error',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}
# Don't cache detailed health - always get fresh data
return jsonify(check_detailed_health())
@health_bp.route('/api/health/ready')
def readiness_probe():
"""Kubernetes readiness probe endpoint."""
try:
# Check critical dependencies
with database_manager.get_connection() as conn:
conn.execute("SELECT 1")
# Check if anime directory is accessible
anime_directory = getattr(config, 'anime_directory', '/app/data')
if not os.path.exists(anime_directory):
raise Exception(f"Anime directory not accessible: {anime_directory}")
return jsonify({
'status': 'ready',
'timestamp': datetime.utcnow().isoformat()
})
except Exception as e:
return jsonify({
'status': 'not_ready',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}), 503
@health_bp.route('/api/health/live')
def liveness_probe():
"""Kubernetes liveness probe endpoint."""
try:
# Basic liveness check - just verify the application is responding
return jsonify({
'status': 'alive',
'timestamp': datetime.utcnow().isoformat(),
'uptime_seconds': time.time() - psutil.Process().create_time()
})
except Exception as e:
return jsonify({
'status': 'dead',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}), 503
@health_bp.route('/api/health/metrics')
def prometheus_metrics():
"""Prometheus-compatible metrics endpoint."""
try:
# Generate Prometheus-format metrics
metrics = []
# System metrics
cpu_percent = psutil.cpu_percent()
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
metrics.extend([
f"# HELP aniworld_cpu_usage_percent CPU usage percentage",
f"# TYPE aniworld_cpu_usage_percent gauge",
f"aniworld_cpu_usage_percent {cpu_percent}",
f"",
f"# HELP aniworld_memory_usage_percent Memory usage percentage",
f"# TYPE aniworld_memory_usage_percent gauge",
f"aniworld_memory_usage_percent {memory.percent}",
f"",
f"# HELP aniworld_disk_usage_percent Disk usage percentage",
f"# TYPE aniworld_disk_usage_percent gauge",
f"aniworld_disk_usage_percent {(disk.used / disk.total) * 100}",
f"",
])
# Database metrics
try:
with database_manager.get_connection() as conn:
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
metrics.extend([
f"# HELP aniworld_anime_total Total number of anime in database",
f"# TYPE aniworld_anime_total counter",
f"aniworld_anime_total {anime_count}",
f"",
f"# HELP aniworld_episodes_total Total number of episodes in database",
f"# TYPE aniworld_episodes_total counter",
f"aniworld_episodes_total {episode_count}",
f"",
])
except Exception:
pass
# Process metrics
process = psutil.Process()
metrics.extend([
f"# HELP aniworld_process_threads Number of threads in process",
f"# TYPE aniworld_process_threads gauge",
f"aniworld_process_threads {process.num_threads()}",
f"",
f"# HELP aniworld_process_memory_bytes Memory usage in bytes",
f"# TYPE aniworld_process_memory_bytes gauge",
f"aniworld_process_memory_bytes {process.memory_info().rss}",
f"",
])
return "\n".join(metrics), 200, {'Content-Type': 'text/plain; charset=utf-8'}
except Exception as e:
return f"# Error generating metrics: {e}", 500, {'Content-Type': 'text/plain'}
# Export the blueprint
__all__ = ['health_bp']

View File

@@ -1,701 +0,0 @@
"""
Integrations API endpoints.
This module handles all external integration operations including:
- API key management
- Webhook configuration
- External service integrations
- Third-party API management
"""
from flask import Blueprint, request, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import requests
import json
import hmac
import hashlib
import time
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import (
validate_json_input, validate_query_params, validate_pagination_params,
validate_id_parameter, is_valid_url
)
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, create_paginated_response
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def optional_auth(f): return f
def handle_api_errors(f): return f
def validate_json_input(**kwargs): return lambda f: f
def validate_query_params(**kwargs): return lambda f: f
def validate_pagination_params(f): return f
def validate_id_parameter(param): return lambda f: f
def is_valid_url(url): return url.startswith(('http://', 'https://'))
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def create_paginated_response(items, page, per_page, total, endpoint=None): return jsonify({'data': items, 'pagination': {'page': page, 'per_page': per_page, 'total': total}}), 200
# Import integration components
try:
from src.server.data.integration_manager import IntegrationManager
from src.server.data.webhook_manager import WebhookManager
from src.data.api_key_manager import APIKeyManager
except ImportError:
# Fallback for development
class IntegrationManager:
def get_all_integrations(self, **kwargs): return []
def get_integrations_count(self, **kwargs): return 0
def get_integration_by_id(self, id): return None
def create_integration(self, **kwargs): return 1
def update_integration(self, id, **kwargs): return True
def delete_integration(self, id): return True
def test_integration(self, id): return {'success': True, 'response_time': 0.1}
def get_integration_logs(self, id, **kwargs): return []
def trigger_integration(self, id, data): return {'success': True}
class WebhookManager:
def get_all_webhooks(self, **kwargs): return []
def get_webhooks_count(self, **kwargs): return 0
def get_webhook_by_id(self, id): return None
def create_webhook(self, **kwargs): return 1
def update_webhook(self, id, **kwargs): return True
def delete_webhook(self, id): return True
def test_webhook(self, id): return {'success': True, 'response_time': 0.1}
def get_webhook_deliveries(self, id, **kwargs): return []
def redeliver_webhook(self, delivery_id): return True
def trigger_webhook(self, event, data): return True
class APIKeyManager:
def get_external_api_keys(self, **kwargs): return []
def get_external_api_key_by_id(self, id): return None
def create_external_api_key(self, **kwargs): return 1
def update_external_api_key(self, id, **kwargs): return True
def delete_external_api_key(self, id): return True
def test_external_api_key(self, id): return {'success': True}
def rotate_external_api_key(self, id): return {'new_key': 'new_api_key'}
# Create blueprint
integrations_bp = Blueprint('integrations', __name__)
# Initialize managers
integration_manager = IntegrationManager()
webhook_manager = WebhookManager()
api_key_manager = APIKeyManager()
logger = logging.getLogger(__name__)
@integrations_bp.route('/integrations', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['page', 'per_page', 'type', 'status', 'sort_by', 'sort_order'],
param_types={'page': int, 'per_page': int}
)
@validate_pagination_params
def list_integrations() -> Tuple[Any, int]:
"""
List integrations with pagination and filtering.
Query Parameters:
- page: Page number (default: 1)
- per_page: Items per page (default: 20, max: 100)
- type: Filter by integration type
- status: Filter by integration status
- sort_by: Sort field (default: created_at)
- sort_order: Sort order (asc/desc, default: desc)
Returns:
JSON response with paginated integration list
"""
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 20, type=int), 100)
integration_type = request.args.get('type')
status = request.args.get('status')
sort_by = request.args.get('sort_by', 'created_at')
sort_order = request.args.get('sort_order', 'desc')
offset = (page - 1) * per_page
# Get integrations
integrations = integration_manager.get_all_integrations(
offset=offset,
limit=per_page,
integration_type=integration_type,
status=status,
sort_by=sort_by,
sort_order=sort_order
)
# Get total count
total = integration_manager.get_integrations_count(
integration_type=integration_type,
status=status
)
return create_paginated_response(
integrations,
page,
per_page,
total,
endpoint='/api/v1/integrations'
)
@integrations_bp.route('/integrations/<int:integration_id>', methods=['GET'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
def get_integration(integration_id: int) -> Tuple[Any, int]:
"""
Get specific integration by ID.
Args:
integration_id: Integration ID
Returns:
JSON response with integration data
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
return create_success_response("Integration retrieved successfully", 200, integration)
@integrations_bp.route('/integrations', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['name', 'type', 'config'],
optional_fields=['description', 'enabled'],
field_types={'name': str, 'type': str, 'config': dict, 'description': str, 'enabled': bool}
)
def create_integration() -> Tuple[Any, int]:
"""
Create a new integration.
Request Body:
- name: Integration name (required)
- type: Integration type (required)
- config: Integration configuration (required)
- description: Integration description (optional)
- enabled: Whether integration is enabled (optional, default: true)
Returns:
JSON response with created integration data
"""
data = request.get_json()
# Validate integration type
allowed_types = ['webhook', 'api', 'discord', 'slack', 'email', 'custom']
if data['type'] not in allowed_types:
return create_error_response(f"Invalid integration type. Must be one of: {', '.join(allowed_types)}", 400)
# Validate configuration based on type
config_errors = _validate_integration_config(data['type'], data['config'])
if config_errors:
return create_error_response("Configuration validation failed", 400, config_errors)
try:
# Create integration
integration_id = integration_manager.create_integration(
name=data['name'],
integration_type=data['type'],
config=data['config'],
description=data.get('description', ''),
enabled=data.get('enabled', True)
)
# Get created integration
integration = integration_manager.get_integration_by_id(integration_id)
logger.info(f"Created integration {integration_id}: {data['name']} ({data['type']})")
return create_success_response("Integration created successfully", 201, integration)
except Exception as e:
logger.error(f"Error creating integration: {str(e)}")
return create_error_response("Failed to create integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
@validate_json_input(
optional_fields=['name', 'config', 'description', 'enabled'],
field_types={'name': str, 'config': dict, 'description': str, 'enabled': bool}
)
def update_integration(integration_id: int) -> Tuple[Any, int]:
"""
Update an integration.
Args:
integration_id: Integration ID
Request Body:
- name: Integration name (optional)
- config: Integration configuration (optional)
- description: Integration description (optional)
- enabled: Whether integration is enabled (optional)
Returns:
JSON response with update result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
data = request.get_json()
# Validate configuration if provided
if 'config' in data:
config_errors = _validate_integration_config(integration['type'], data['config'])
if config_errors:
return create_error_response("Configuration validation failed", 400, config_errors)
try:
# Update integration
success = integration_manager.update_integration(integration_id, **data)
if success:
# Get updated integration
updated_integration = integration_manager.get_integration_by_id(integration_id)
logger.info(f"Updated integration {integration_id}")
return create_success_response("Integration updated successfully", 200, updated_integration)
else:
return create_error_response("Failed to update integration", 500)
except Exception as e:
logger.error(f"Error updating integration {integration_id}: {str(e)}")
return create_error_response("Failed to update integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>', methods=['DELETE'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
def delete_integration(integration_id: int) -> Tuple[Any, int]:
"""
Delete an integration.
Args:
integration_id: Integration ID
Returns:
JSON response with deletion result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
try:
success = integration_manager.delete_integration(integration_id)
if success:
logger.info(f"Deleted integration {integration_id}: {integration['name']}")
return create_success_response("Integration deleted successfully")
else:
return create_error_response("Failed to delete integration", 500)
except Exception as e:
logger.error(f"Error deleting integration {integration_id}: {str(e)}")
return create_error_response("Failed to delete integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>/test', methods=['POST'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
def test_integration(integration_id: int) -> Tuple[Any, int]:
"""
Test an integration.
Args:
integration_id: Integration ID
Returns:
JSON response with test result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
try:
test_result = integration_manager.test_integration(integration_id)
logger.info(f"Tested integration {integration_id}: {test_result}")
return create_success_response("Integration test completed", 200, test_result)
except Exception as e:
logger.error(f"Error testing integration {integration_id}: {str(e)}")
return create_error_response("Failed to test integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>/trigger', methods=['POST'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
@validate_json_input(
optional_fields=['data'],
field_types={'data': dict}
)
def trigger_integration(integration_id: int) -> Tuple[Any, int]:
"""
Manually trigger an integration.
Args:
integration_id: Integration ID
Request Body:
- data: Custom data to send with trigger (optional)
Returns:
JSON response with trigger result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
if not integration['enabled']:
return create_error_response("Integration is disabled", 400)
data = request.get_json() or {}
trigger_data = data.get('data', {})
try:
result = integration_manager.trigger_integration(integration_id, trigger_data)
logger.info(f"Triggered integration {integration_id}")
return create_success_response("Integration triggered successfully", 200, result)
except Exception as e:
logger.error(f"Error triggering integration {integration_id}: {str(e)}")
return create_error_response("Failed to trigger integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>/logs', methods=['GET'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
@validate_query_params(
allowed_params=['page', 'per_page', 'level'],
param_types={'page': int, 'per_page': int}
)
@validate_pagination_params
def get_integration_logs(integration_id: int) -> Tuple[Any, int]:
"""
Get integration execution logs.
Args:
integration_id: Integration ID
Query Parameters:
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 200)
- level: Log level filter (optional)
Returns:
JSON response with integration logs
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 50, type=int), 200)
level = request.args.get('level')
offset = (page - 1) * per_page
try:
logs = integration_manager.get_integration_logs(
integration_id,
offset=offset,
limit=per_page,
level=level
)
# For pagination, we'd need a count method
total = len(logs) # Simplified for this example
return create_paginated_response(
logs,
page,
per_page,
total,
endpoint=f'/api/v1/integrations/{integration_id}/logs'
)
except Exception as e:
logger.error(f"Error getting integration logs for {integration_id}: {str(e)}")
return create_error_response("Failed to get integration logs", 500)
@integrations_bp.route('/webhooks', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['page', 'per_page', 'event', 'status'],
param_types={'page': int, 'per_page': int}
)
@validate_pagination_params
def list_webhooks() -> Tuple[Any, int]:
"""
List webhooks with pagination and filtering.
Query Parameters:
- page: Page number (default: 1)
- per_page: Items per page (default: 20, max: 100)
- event: Filter by event type
- status: Filter by webhook status
Returns:
JSON response with paginated webhook list
"""
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 20, type=int), 100)
event = request.args.get('event')
status = request.args.get('status')
offset = (page - 1) * per_page
# Get webhooks
webhooks = webhook_manager.get_all_webhooks(
offset=offset,
limit=per_page,
event=event,
status=status
)
# Get total count
total = webhook_manager.get_webhooks_count(
event=event,
status=status
)
return create_paginated_response(
webhooks,
page,
per_page,
total,
endpoint='/api/v1/webhooks'
)
@integrations_bp.route('/webhooks', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['url', 'events'],
optional_fields=['name', 'secret', 'enabled', 'retry_config'],
field_types={'url': str, 'events': list, 'name': str, 'secret': str, 'enabled': bool, 'retry_config': dict}
)
def create_webhook() -> Tuple[Any, int]:
"""
Create a new webhook.
Request Body:
- url: Webhook URL (required)
- events: List of events to subscribe to (required)
- name: Webhook name (optional)
- secret: Webhook secret for signature verification (optional)
- enabled: Whether webhook is enabled (optional, default: true)
- retry_config: Retry configuration (optional)
Returns:
JSON response with created webhook data
"""
data = request.get_json()
# Validate URL
if not is_valid_url(data['url']):
return create_error_response("Invalid webhook URL", 400)
# Validate events
allowed_events = [
'anime.created', 'anime.updated', 'anime.deleted',
'episode.created', 'episode.updated', 'episode.deleted',
'download.started', 'download.completed', 'download.failed',
'backup.created', 'backup.restored', 'system.error'
]
invalid_events = [event for event in data['events'] if event not in allowed_events]
if invalid_events:
return create_error_response(f"Invalid events: {', '.join(invalid_events)}", 400)
try:
# Create webhook
webhook_id = webhook_manager.create_webhook(
url=data['url'],
events=data['events'],
name=data.get('name', ''),
secret=data.get('secret', ''),
enabled=data.get('enabled', True),
retry_config=data.get('retry_config', {})
)
# Get created webhook
webhook = webhook_manager.get_webhook_by_id(webhook_id)
logger.info(f"Created webhook {webhook_id}: {data['url']}")
return create_success_response("Webhook created successfully", 201, webhook)
except Exception as e:
logger.error(f"Error creating webhook: {str(e)}")
return create_error_response("Failed to create webhook", 500)
@integrations_bp.route('/webhooks/<int:webhook_id>/test', methods=['POST'])
@require_auth
@handle_api_errors
@validate_id_parameter('webhook_id')
def test_webhook(webhook_id: int) -> Tuple[Any, int]:
"""
Test a webhook.
Args:
webhook_id: Webhook ID
Returns:
JSON response with test result
"""
webhook = webhook_manager.get_webhook_by_id(webhook_id)
if not webhook:
return create_error_response("Webhook not found", 404)
try:
test_result = webhook_manager.test_webhook(webhook_id)
logger.info(f"Tested webhook {webhook_id}: {test_result}")
return create_success_response("Webhook test completed", 200, test_result)
except Exception as e:
logger.error(f"Error testing webhook {webhook_id}: {str(e)}")
return create_error_response("Failed to test webhook", 500)
@integrations_bp.route('/api-keys/external', methods=['GET'])
@require_auth
@handle_api_errors
@validate_pagination_params
def list_external_api_keys() -> Tuple[Any, int]:
"""
List external API keys.
Returns:
JSON response with external API keys
"""
try:
api_keys = api_key_manager.get_external_api_keys()
return create_success_response("External API keys retrieved successfully", 200, api_keys)
except Exception as e:
logger.error(f"Error getting external API keys: {str(e)}")
return create_error_response("Failed to get external API keys", 500)
@integrations_bp.route('/api-keys/external', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['service', 'key'],
optional_fields=['name', 'description'],
field_types={'service': str, 'key': str, 'name': str, 'description': str}
)
def create_external_api_key() -> Tuple[Any, int]:
"""
Store external API key.
Request Body:
- service: Service name (required)
- key: API key value (required)
- name: Key name (optional)
- description: Key description (optional)
Returns:
JSON response with created API key data
"""
data = request.get_json()
try:
# Create external API key
key_id = api_key_manager.create_external_api_key(
service=data['service'],
key=data['key'],
name=data.get('name', ''),
description=data.get('description', '')
)
# Get created key (without exposing the actual key)
api_key = api_key_manager.get_external_api_key_by_id(key_id)
logger.info(f"Created external API key {key_id} for service: {data['service']}")
return create_success_response("External API key created successfully", 201, api_key)
except Exception as e:
logger.error(f"Error creating external API key: {str(e)}")
return create_error_response("Failed to create external API key", 500)
def _validate_integration_config(integration_type: str, config: Dict[str, Any]) -> List[str]:
"""
Validate integration configuration based on type.
Args:
integration_type: Type of integration
config: Configuration dictionary
Returns:
List of validation errors (empty if valid)
"""
errors = []
if integration_type == 'webhook':
if 'url' not in config:
errors.append("Webhook URL is required")
elif not is_valid_url(config['url']):
errors.append("Invalid webhook URL")
elif integration_type == 'discord':
if 'webhook_url' not in config:
errors.append("Discord webhook URL is required")
elif not config['webhook_url'].startswith('https://discord.com/api/webhooks/'):
errors.append("Invalid Discord webhook URL")
elif integration_type == 'slack':
if 'webhook_url' not in config:
errors.append("Slack webhook URL is required")
elif not config['webhook_url'].startswith('https://hooks.slack.com/'):
errors.append("Invalid Slack webhook URL")
elif integration_type == 'email':
required_fields = ['smtp_host', 'smtp_port', 'from_email']
for field in required_fields:
if field not in config:
errors.append(f"{field} is required for email integration")
elif integration_type == 'api':
if 'base_url' not in config:
errors.append("Base URL is required for API integration")
elif not is_valid_url(config['base_url']):
errors.append("Invalid API base URL")
return errors

View File

@@ -1,268 +0,0 @@
"""
API endpoints for logging configuration and management.
"""
from flask import Blueprint, jsonify, request, send_file
from web.controllers.auth_controller import require_auth
from config import config
import logging
import os
from datetime import datetime
logger = logging.getLogger(__name__)
logging_bp = Blueprint('logging', __name__, url_prefix='/api/logging')
@logging_bp.route('/config', methods=['GET'])
@require_auth
def get_logging_config():
"""Get current logging configuration."""
try:
# Import here to avoid circular imports
from src.infrastructure.logging.GlobalLogger import error_logger
config_data = {
'log_level': config.log_level,
'enable_console_logging': config.enable_console_logging,
'enable_console_progress': config.enable_console_progress,
'enable_fail2ban_logging': config.enable_fail2ban_logging,
'log_files': [
'./logs/aniworld.log',
'./logs/auth_failures.log',
'./logs/downloads.log'
]
}
return jsonify({
'success': True,
'config': config_data
})
except Exception as e:
logger.error(f"Error getting logging config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/config', methods=['POST'])
@require_auth
def update_logging_config():
"""Update logging configuration."""
try:
data = request.get_json() or {}
# Update log level
log_level = data.get('log_level', config.log_level)
if log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
config.log_level = log_level
# Update console logging settings
if 'enable_console_logging' in data:
config.enable_console_logging = bool(data['enable_console_logging'])
if 'enable_console_progress' in data:
config.enable_console_progress = bool(data['enable_console_progress'])
if 'enable_fail2ban_logging' in data:
config.enable_fail2ban_logging = bool(data['enable_fail2ban_logging'])
# Save configuration
config.save_config()
# Update runtime logging level
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# Use standard logging level update
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
logging.getLogger().setLevel(numeric_level)
except ImportError:
# Fallback for basic logging
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
logging.getLogger().setLevel(numeric_level)
logger.info(f"Logging configuration updated: level={config.log_level}, console={config.enable_console_logging}")
return jsonify({
'success': True,
'message': 'Logging configuration updated successfully',
'config': {
'log_level': config.log_level,
'enable_console_logging': config.enable_console_logging,
'enable_console_progress': config.enable_console_progress,
'enable_fail2ban_logging': config.enable_fail2ban_logging
}
})
except Exception as e:
logger.error(f"Error updating logging config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/files', methods=['GET'])
@require_auth
def list_log_files():
"""Get list of available log files."""
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# Return basic log files
log_files = [
'./logs/aniworld.log',
'./logs/auth_failures.log',
'./logs/downloads.log'
]
return jsonify({
'success': True,
'files': log_files
})
except Exception as e:
logger.error(f"Error listing log files: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/files/<filename>/download', methods=['GET'])
@require_auth
def download_log_file(filename):
"""Download a specific log file."""
try:
# Security: Only allow log files
if not filename.endswith('.log'):
return jsonify({
'success': False,
'error': 'Invalid file type'
}), 400
log_directory = "logs"
file_path = os.path.join(log_directory, filename)
# Security: Check if file exists and is within log directory
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
return jsonify({
'success': False,
'error': 'File not found'
}), 404
return send_file(
file_path,
as_attachment=True,
download_name=f"{filename}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
)
except Exception as e:
logger.error(f"Error downloading log file {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/files/<filename>/tail', methods=['GET'])
@require_auth
def tail_log_file(filename):
"""Get the last N lines from a log file."""
try:
# Security: Only allow log files
if not filename.endswith('.log'):
return jsonify({
'success': False,
'error': 'Invalid file type'
}), 400
lines = int(request.args.get('lines', 100))
lines = min(lines, 1000) # Limit to 1000 lines max
log_directory = "logs"
file_path = os.path.join(log_directory, filename)
# Security: Check if file exists and is within log directory
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
return jsonify({
'success': False,
'error': 'File not found'
}), 404
# Read last N lines
with open(file_path, 'r', encoding='utf-8') as f:
all_lines = f.readlines()
tail_lines = all_lines[-lines:] if len(all_lines) > lines else all_lines
return jsonify({
'success': True,
'lines': [line.rstrip('\n\r') for line in tail_lines],
'total_lines': len(all_lines),
'showing_lines': len(tail_lines)
})
except Exception as e:
logger.error(f"Error tailing log file {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/cleanup', methods=['POST'])
@require_auth
def cleanup_logs():
"""Clean up old log files."""
try:
data = request.get_json() or {}
days = int(data.get('days', 30))
days = max(1, min(days, 365)) # Limit between 1-365 days
from src.infrastructure.logging.GlobalLogger import error_logger
# Since we don't have log_config.cleanup_old_logs(), simulate the cleanup
cleaned_files = [] # Would implement actual cleanup logic here
logger.info(f"Cleaned up {len(cleaned_files)} old log files (older than {days} days)")
return jsonify({
'success': True,
'message': f'Cleaned up {len(cleaned_files)} log files',
'cleaned_files': cleaned_files
})
except Exception as e:
logger.error(f"Error cleaning up logs: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/test', methods=['POST'])
@require_auth
def test_logging():
"""Test logging at different levels."""
try:
test_message = "Test log message from web interface"
# Test different log levels
logger.debug(f"DEBUG: {test_message}")
logger.info(f"INFO: {test_message}")
logger.warning(f"WARNING: {test_message}")
logger.error(f"ERROR: {test_message}")
# Test fail2ban logging
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# log_auth_failure would be implemented here
pass
except ImportError:
pass
# Test download progress logging
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# log_download_progress would be implemented here
pass
except ImportError:
pass
return jsonify({
'success': True,
'message': 'Test messages logged successfully'
})
except Exception as e:
logger.error(f"Error testing logging: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,656 +0,0 @@
"""
Maintenance API endpoints.
This module handles all system maintenance operations including:
- Database maintenance
- System optimization
- Cleanup operations
- Scheduled maintenance tasks
"""
from flask import Blueprint, request, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import os
import time
import sqlite3
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import validate_json_input, validate_query_params
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, format_file_size, format_datetime
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def handle_api_errors(f): return f
def validate_json_input(**kwargs): return lambda f: f
def validate_query_params(**kwargs): return lambda f: f
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def format_file_size(size): return f"{size} bytes"
def format_datetime(dt): return str(dt) if dt else None
# Import maintenance components
try:
from src.server.data.database_manager import DatabaseManager
from src.server.data.cleanup_manager import CleanupManager
from src.server.data.scheduler_manager import SchedulerManager
except ImportError:
# Fallback for development
class DatabaseManager:
def vacuum_database(self): return {'size_before': 1000000, 'size_after': 800000, 'time_taken': 5.2}
def analyze_database(self): return {'tables_analyzed': 10, 'time_taken': 2.1}
def integrity_check(self): return {'status': 'ok', 'errors': [], 'warnings': []}
def reindex_database(self): return {'indexes_rebuilt': 15, 'time_taken': 3.5}
def get_database_stats(self): return {'size': 10000000, 'tables': 10, 'indexes': 15}
def optimize_database(self): return {'optimizations': ['vacuum', 'analyze', 'reindex'], 'time_taken': 10.7}
def backup_database(self, path): return {'backup_file': path, 'size': 5000000}
def get_slow_queries(self, **kwargs): return []
class CleanupManager:
def cleanup_temp_files(self): return {'files_deleted': 50, 'space_freed': 1048576}
def cleanup_logs(self, **kwargs): return {'logs_deleted': 100, 'space_freed': 2097152}
def cleanup_downloads(self, **kwargs): return {'downloads_cleaned': 25, 'space_freed': 5242880}
def cleanup_cache(self): return {'cache_cleared': True, 'space_freed': 10485760}
def cleanup_old_backups(self, **kwargs): return {'backups_deleted': 5, 'space_freed': 52428800}
def get_cleanup_stats(self): return {'temp_files': 100, 'log_files': 200, 'cache_size': 50000000}
class SchedulerManager:
def get_scheduled_tasks(self): return []
def create_scheduled_task(self, **kwargs): return 1
def update_scheduled_task(self, id, **kwargs): return True
def delete_scheduled_task(self, id): return True
def get_task_history(self, **kwargs): return []
# Create blueprint
maintenance_bp = Blueprint('maintenance', __name__)
# Initialize managers
database_manager = DatabaseManager()
cleanup_manager = CleanupManager()
scheduler_manager = SchedulerManager()
logger = logging.getLogger(__name__)
@maintenance_bp.route('/maintenance/database/vacuum', methods=['POST'])
@require_auth
@handle_api_errors
def vacuum_database() -> Tuple[Any, int]:
"""
Vacuum the database to reclaim space and optimize performance.
Returns:
JSON response with vacuum operation results
"""
try:
logger.info("Starting database vacuum operation")
start_time = time.time()
result = database_manager.vacuum_database()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
space_saved = result.get('size_before', 0) - result.get('size_after', 0)
result['space_saved'] = format_file_size(space_saved)
logger.info(f"Database vacuum completed in {operation_time:.2f} seconds, saved {space_saved} bytes")
return create_success_response("Database vacuum completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database vacuum: {str(e)}")
return create_error_response("Database vacuum failed", 500)
@maintenance_bp.route('/maintenance/database/analyze', methods=['POST'])
@require_auth
@handle_api_errors
def analyze_database() -> Tuple[Any, int]:
"""
Analyze the database to update query planner statistics.
Returns:
JSON response with analyze operation results
"""
try:
logger.info("Starting database analyze operation")
start_time = time.time()
result = database_manager.analyze_database()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
logger.info(f"Database analyze completed in {operation_time:.2f} seconds")
return create_success_response("Database analyze completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database analyze: {str(e)}")
return create_error_response("Database analyze failed", 500)
@maintenance_bp.route('/maintenance/database/integrity-check', methods=['POST'])
@require_auth
@handle_api_errors
def integrity_check() -> Tuple[Any, int]:
"""
Perform database integrity check.
Returns:
JSON response with integrity check results
"""
try:
logger.info("Starting database integrity check")
start_time = time.time()
result = database_manager.integrity_check()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
result['timestamp'] = datetime.now().isoformat()
if result['status'] == 'ok':
logger.info(f"Database integrity check passed in {operation_time:.2f} seconds")
return create_success_response("Database integrity check passed", 200, result)
else:
logger.warning(f"Database integrity check found issues: {result['errors']}")
return create_success_response("Database integrity check completed with issues", 200, result)
except Exception as e:
logger.error(f"Error during database integrity check: {str(e)}")
return create_error_response("Database integrity check failed", 500)
@maintenance_bp.route('/maintenance/database/reindex', methods=['POST'])
@require_auth
@handle_api_errors
def reindex_database() -> Tuple[Any, int]:
"""
Rebuild database indexes for optimal performance.
Returns:
JSON response with reindex operation results
"""
try:
logger.info("Starting database reindex operation")
start_time = time.time()
result = database_manager.reindex_database()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
logger.info(f"Database reindex completed in {operation_time:.2f} seconds, rebuilt {result.get('indexes_rebuilt', 0)} indexes")
return create_success_response("Database reindex completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database reindex: {str(e)}")
return create_error_response("Database reindex failed", 500)
@maintenance_bp.route('/maintenance/database/optimize', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['operations', 'force'],
field_types={'operations': list, 'force': bool}
)
def optimize_database() -> Tuple[Any, int]:
"""
Perform comprehensive database optimization.
Request Body:
- operations: List of operations to perform (optional, default: all)
- force: Force optimization even if recently performed (optional, default: false)
Returns:
JSON response with optimization results
"""
data = request.get_json() or {}
operations = data.get('operations', ['vacuum', 'analyze', 'reindex'])
force = data.get('force', False)
# Validate operations
allowed_operations = ['vacuum', 'analyze', 'reindex', 'integrity_check']
invalid_operations = [op for op in operations if op not in allowed_operations]
if invalid_operations:
return create_error_response(f"Invalid operations: {', '.join(invalid_operations)}", 400)
try:
logger.info(f"Starting database optimization with operations: {operations}")
start_time = time.time()
result = database_manager.optimize_database(
operations=operations,
force=force
)
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Database optimization completed in {operation_time:.2f} seconds")
return create_success_response("Database optimization completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database optimization: {str(e)}")
return create_error_response("Database optimization failed", 500)
@maintenance_bp.route('/maintenance/database/stats', methods=['GET'])
@require_auth
@handle_api_errors
def get_database_stats() -> Tuple[Any, int]:
"""
Get database statistics and health information.
Returns:
JSON response with database statistics
"""
try:
stats = database_manager.get_database_stats()
# Add formatted values
if 'size' in stats:
stats['size_formatted'] = format_file_size(stats['size'])
# Add slow queries
slow_queries = database_manager.get_slow_queries(limit=10)
stats['slow_queries'] = slow_queries
return create_success_response("Database statistics retrieved successfully", 200, stats)
except Exception as e:
logger.error(f"Error getting database stats: {str(e)}")
return create_error_response("Failed to get database statistics", 500)
@maintenance_bp.route('/maintenance/cleanup/temp-files', methods=['POST'])
@require_auth
@handle_api_errors
def cleanup_temp_files() -> Tuple[Any, int]:
"""
Clean up temporary files.
Returns:
JSON response with cleanup results
"""
try:
logger.info("Starting temporary files cleanup")
result = cleanup_manager.cleanup_temp_files()
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Temporary files cleanup completed: {result['files_deleted']} files deleted, {result['space_freed']} bytes freed")
return create_success_response("Temporary files cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during temp files cleanup: {str(e)}")
return create_error_response("Temporary files cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/logs', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['older_than_days', 'keep_recent'],
field_types={'older_than_days': int, 'keep_recent': int}
)
def cleanup_logs() -> Tuple[Any, int]:
"""
Clean up old log files.
Request Body:
- older_than_days: Delete logs older than this many days (optional, default: 30)
- keep_recent: Number of recent log files to keep (optional, default: 10)
Returns:
JSON response with cleanup results
"""
data = request.get_json() or {}
older_than_days = data.get('older_than_days', 30)
keep_recent = data.get('keep_recent', 10)
try:
logger.info(f"Starting log cleanup: older than {older_than_days} days, keep {keep_recent} recent")
result = cleanup_manager.cleanup_logs(
older_than_days=older_than_days,
keep_recent=keep_recent
)
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Log cleanup completed: {result['logs_deleted']} logs deleted, {result['space_freed']} bytes freed")
return create_success_response("Log cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during log cleanup: {str(e)}")
return create_error_response("Log cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/downloads', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['remove_failed', 'remove_incomplete', 'older_than_days'],
field_types={'remove_failed': bool, 'remove_incomplete': bool, 'older_than_days': int}
)
def cleanup_downloads() -> Tuple[Any, int]:
"""
Clean up download files and records.
Request Body:
- remove_failed: Remove failed downloads (optional, default: true)
- remove_incomplete: Remove incomplete downloads (optional, default: false)
- older_than_days: Remove downloads older than this many days (optional)
Returns:
JSON response with cleanup results
"""
data = request.get_json() or {}
remove_failed = data.get('remove_failed', True)
remove_incomplete = data.get('remove_incomplete', False)
older_than_days = data.get('older_than_days')
try:
logger.info(f"Starting download cleanup: failed={remove_failed}, incomplete={remove_incomplete}, older_than={older_than_days}")
result = cleanup_manager.cleanup_downloads(
remove_failed=remove_failed,
remove_incomplete=remove_incomplete,
older_than_days=older_than_days
)
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Download cleanup completed: {result['downloads_cleaned']} downloads cleaned, {result['space_freed']} bytes freed")
return create_success_response("Download cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during download cleanup: {str(e)}")
return create_error_response("Download cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/cache', methods=['POST'])
@require_auth
@handle_api_errors
def cleanup_cache() -> Tuple[Any, int]:
"""
Clear application cache.
Returns:
JSON response with cleanup results
"""
try:
logger.info("Starting cache cleanup")
result = cleanup_manager.cleanup_cache()
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Cache cleanup completed: {result['space_freed']} bytes freed")
return create_success_response("Cache cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during cache cleanup: {str(e)}")
return create_error_response("Cache cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/backups', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['keep_count', 'older_than_days'],
field_types={'keep_count': int, 'older_than_days': int}
)
def cleanup_old_backups() -> Tuple[Any, int]:
"""
Clean up old backup files.
Request Body:
- keep_count: Number of recent backups to keep (optional, default: 10)
- older_than_days: Delete backups older than this many days (optional, default: 90)
Returns:
JSON response with cleanup results
"""
data = request.get_json() or {}
keep_count = data.get('keep_count', 10)
older_than_days = data.get('older_than_days', 90)
try:
logger.info(f"Starting backup cleanup: keep {keep_count} backups, older than {older_than_days} days")
result = cleanup_manager.cleanup_old_backups(
keep_count=keep_count,
older_than_days=older_than_days
)
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Backup cleanup completed: {result['backups_deleted']} backups deleted, {result['space_freed']} bytes freed")
return create_success_response("Backup cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during backup cleanup: {str(e)}")
return create_error_response("Backup cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/stats', methods=['GET'])
@require_auth
@handle_api_errors
def get_cleanup_stats() -> Tuple[Any, int]:
"""
Get cleanup statistics and recommendations.
Returns:
JSON response with cleanup statistics
"""
try:
stats = cleanup_manager.get_cleanup_stats()
# Add formatted sizes
for key in ['temp_files_size', 'log_files_size', 'cache_size', 'old_backups_size']:
if key in stats:
stats[f"{key}_formatted"] = format_file_size(stats[key])
# Add recommendations
recommendations = []
if stats.get('temp_files', 0) > 100:
recommendations.append("Consider cleaning temporary files")
if stats.get('log_files_size', 0) > 100 * 1024 * 1024: # 100MB
recommendations.append("Consider cleaning old log files")
if stats.get('cache_size', 0) > 500 * 1024 * 1024: # 500MB
recommendations.append("Consider clearing cache")
stats['recommendations'] = recommendations
return create_success_response("Cleanup statistics retrieved successfully", 200, stats)
except Exception as e:
logger.error(f"Error getting cleanup stats: {str(e)}")
return create_error_response("Failed to get cleanup statistics", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['GET'])
@require_auth
@handle_api_errors
def get_scheduled_tasks() -> Tuple[Any, int]:
"""
Get scheduled maintenance tasks.
Returns:
JSON response with scheduled tasks
"""
try:
tasks = scheduler_manager.get_scheduled_tasks()
return create_success_response("Scheduled tasks retrieved successfully", 200, tasks)
except Exception as e:
logger.error(f"Error getting scheduled tasks: {str(e)}")
return create_error_response("Failed to get scheduled tasks", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['name', 'task_type', 'schedule'],
optional_fields=['config', 'enabled'],
field_types={'name': str, 'task_type': str, 'schedule': str, 'config': dict, 'enabled': bool}
)
def create_scheduled_task() -> Tuple[Any, int]:
"""
Create a new scheduled maintenance task.
Request Body:
- name: Task name (required)
- task_type: Type of task (required)
- schedule: Cron-style schedule (required)
- config: Task configuration (optional)
- enabled: Whether task is enabled (optional, default: true)
Returns:
JSON response with created task
"""
data = request.get_json()
# Validate task type
allowed_task_types = [
'database_vacuum', 'database_analyze', 'cleanup_temp_files',
'cleanup_logs', 'cleanup_downloads', 'cleanup_cache', 'backup_database'
]
if data['task_type'] not in allowed_task_types:
return create_error_response(f"Invalid task type. Must be one of: {', '.join(allowed_task_types)}", 400)
try:
task_id = scheduler_manager.create_scheduled_task(
name=data['name'],
task_type=data['task_type'],
schedule=data['schedule'],
config=data.get('config', {}),
enabled=data.get('enabled', True)
)
logger.info(f"Created scheduled task {task_id}: {data['name']} ({data['task_type']})")
return create_success_response("Scheduled task created successfully", 201, {'id': task_id})
except Exception as e:
logger.error(f"Error creating scheduled task: {str(e)}")
return create_error_response("Failed to create scheduled task", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['name', 'schedule', 'config', 'enabled'],
field_types={'name': str, 'schedule': str, 'config': dict, 'enabled': bool}
)
def update_scheduled_task(task_id: int) -> Tuple[Any, int]:
"""
Update a scheduled maintenance task.
Args:
task_id: Task ID
Request Body:
- name: Task name (optional)
- schedule: Cron-style schedule (optional)
- config: Task configuration (optional)
- enabled: Whether task is enabled (optional)
Returns:
JSON response with update result
"""
data = request.get_json()
try:
success = scheduler_manager.update_scheduled_task(task_id, **data)
if success:
logger.info(f"Updated scheduled task {task_id}")
return create_success_response("Scheduled task updated successfully")
else:
return create_error_response("Scheduled task not found", 404)
except Exception as e:
logger.error(f"Error updating scheduled task {task_id}: {str(e)}")
return create_error_response("Failed to update scheduled task", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['DELETE'])
@require_auth
@handle_api_errors
def delete_scheduled_task(task_id: int) -> Tuple[Any, int]:
"""
Delete a scheduled maintenance task.
Args:
task_id: Task ID
Returns:
JSON response with deletion result
"""
try:
success = scheduler_manager.delete_scheduled_task(task_id)
if success:
logger.info(f"Deleted scheduled task {task_id}")
return create_success_response("Scheduled task deleted successfully")
else:
return create_error_response("Scheduled task not found", 404)
except Exception as e:
logger.error(f"Error deleting scheduled task {task_id}: {str(e)}")
return create_error_response("Failed to delete scheduled task", 500)
@maintenance_bp.route('/maintenance/history', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['task_type', 'days', 'limit'],
param_types={'days': int, 'limit': int}
)
def get_maintenance_history() -> Tuple[Any, int]:
"""
Get maintenance task execution history.
Query Parameters:
- task_type: Filter by task type (optional)
- days: Number of days of history (optional, default: 30)
- limit: Maximum number of records (optional, default: 100)
Returns:
JSON response with maintenance history
"""
task_type = request.args.get('task_type')
days = request.args.get('days', 30, type=int)
limit = request.args.get('limit', 100, type=int)
try:
history = scheduler_manager.get_task_history(
task_type=task_type,
days=days,
limit=limit
)
return create_success_response("Maintenance history retrieved successfully", 200, history)
except Exception as e:
logger.error(f"Error getting maintenance history: {str(e)}")
return create_error_response("Failed to get maintenance history", 500)

View File

@@ -1,406 +0,0 @@
"""
Performance Optimization API Endpoints
This module provides REST API endpoints for performance monitoring
and optimization features.
"""
from flask import Blueprint, request, jsonify
from auth import require_auth, optional_auth
from error_handler import handle_api_errors, RetryableError
from performance_optimizer import (
speed_limiter, download_cache, memory_monitor,
download_manager, resume_manager, DownloadTask
)
import uuid
from datetime import datetime
# Blueprint for performance optimization endpoints
performance_bp = Blueprint('performance', __name__)
@performance_bp.route('/api/performance/speed-limit', methods=['GET'])
@handle_api_errors
@optional_auth
def get_speed_limit():
"""Get current download speed limit."""
try:
return jsonify({
'status': 'success',
'data': {
'speed_limit_mbps': speed_limiter.max_speed_mbps,
'current_speed_mbps': speed_limiter.get_current_speed()
}
})
except Exception as e:
raise RetryableError(f"Failed to get speed limit: {e}")
@performance_bp.route('/api/performance/speed-limit', methods=['POST'])
@handle_api_errors
@require_auth
def set_speed_limit():
"""Set download speed limit."""
try:
data = request.get_json()
speed_mbps = data.get('speed_mbps', 0)
if speed_mbps < 0:
return jsonify({
'status': 'error',
'message': 'Speed limit must be non-negative (0 = unlimited)'
}), 400
speed_limiter.set_speed_limit(speed_mbps)
return jsonify({
'status': 'success',
'message': f'Speed limit set to {speed_mbps} MB/s' if speed_mbps > 0 else 'Speed limit removed',
'data': {
'speed_limit_mbps': speed_mbps
}
})
except Exception as e:
raise RetryableError(f"Failed to set speed limit: {e}")
@performance_bp.route('/api/performance/cache/stats')
@handle_api_errors
@optional_auth
def get_cache_stats():
"""Get cache statistics."""
try:
stats = download_cache.get_stats()
return jsonify({
'status': 'success',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to get cache stats: {e}")
@performance_bp.route('/api/performance/cache/clear', methods=['POST'])
@handle_api_errors
@require_auth
def clear_cache():
"""Clear download cache."""
try:
download_cache.clear()
return jsonify({
'status': 'success',
'message': 'Cache cleared successfully'
})
except Exception as e:
raise RetryableError(f"Failed to clear cache: {e}")
@performance_bp.route('/api/performance/memory/stats')
@handle_api_errors
@optional_auth
def get_memory_stats():
"""Get memory usage statistics."""
try:
stats = memory_monitor.get_memory_stats()
return jsonify({
'status': 'success',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to get memory stats: {e}")
@performance_bp.route('/api/performance/memory/gc', methods=['POST'])
@handle_api_errors
@require_auth
def force_garbage_collection():
"""Force garbage collection to free memory."""
try:
memory_monitor.force_garbage_collection()
stats = memory_monitor.get_memory_stats()
return jsonify({
'status': 'success',
'message': 'Garbage collection completed',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to force garbage collection: {e}")
@performance_bp.route('/api/performance/downloads/workers', methods=['GET'])
@handle_api_errors
@optional_auth
def get_worker_count():
"""Get current number of download workers."""
try:
return jsonify({
'status': 'success',
'data': {
'max_workers': download_manager.max_workers,
'active_tasks': len(download_manager.active_tasks)
}
})
except Exception as e:
raise RetryableError(f"Failed to get worker count: {e}")
@performance_bp.route('/api/performance/downloads/workers', methods=['POST'])
@handle_api_errors
@require_auth
def set_worker_count():
"""Set number of download workers."""
try:
data = request.get_json()
max_workers = data.get('max_workers', 3)
if not isinstance(max_workers, int) or max_workers < 1 or max_workers > 10:
return jsonify({
'status': 'error',
'message': 'Worker count must be between 1 and 10'
}), 400
download_manager.set_max_workers(max_workers)
return jsonify({
'status': 'success',
'message': f'Worker count set to {max_workers}',
'data': {
'max_workers': max_workers
}
})
except Exception as e:
raise RetryableError(f"Failed to set worker count: {e}")
@performance_bp.route('/api/performance/downloads/stats')
@handle_api_errors
@optional_auth
def get_download_stats():
"""Get download manager statistics."""
try:
stats = download_manager.get_statistics()
return jsonify({
'status': 'success',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to get download stats: {e}")
@performance_bp.route('/api/performance/downloads/tasks')
@handle_api_errors
@optional_auth
def get_all_download_tasks():
"""Get all download tasks."""
try:
tasks = download_manager.get_all_tasks()
return jsonify({
'status': 'success',
'data': tasks
})
except Exception as e:
raise RetryableError(f"Failed to get download tasks: {e}")
@performance_bp.route('/api/performance/downloads/tasks/<task_id>')
@handle_api_errors
@optional_auth
def get_download_task(task_id):
"""Get specific download task status."""
try:
task_status = download_manager.get_task_status(task_id)
if not task_status:
return jsonify({
'status': 'error',
'message': 'Task not found'
}), 404
return jsonify({
'status': 'success',
'data': task_status
})
except Exception as e:
raise RetryableError(f"Failed to get task status: {e}")
@performance_bp.route('/api/performance/downloads/add-task', methods=['POST'])
@handle_api_errors
@require_auth
def add_download_task():
"""Add a new download task to the queue."""
try:
data = request.get_json()
required_fields = ['serie_name', 'season', 'episode', 'key', 'output_path', 'temp_path']
for field in required_fields:
if field not in data:
return jsonify({
'status': 'error',
'message': f'Missing required field: {field}'
}), 400
# Create download task
task = DownloadTask(
task_id=str(uuid.uuid4()),
serie_name=data['serie_name'],
season=int(data['season']),
episode=int(data['episode']),
key=data['key'],
language=data.get('language', 'German Dub'),
output_path=data['output_path'],
temp_path=data['temp_path'],
priority=data.get('priority', 0)
)
task_id = download_manager.add_task(task)
return jsonify({
'status': 'success',
'message': 'Download task added successfully',
'data': {
'task_id': task_id
}
})
except Exception as e:
raise RetryableError(f"Failed to add download task: {e}")
@performance_bp.route('/api/performance/resume/tasks')
@handle_api_errors
@optional_auth
def get_resumable_tasks():
"""Get list of tasks that can be resumed."""
try:
resumable_tasks = resume_manager.get_resumable_tasks()
# Get detailed info for each resumable task
tasks_info = []
for task_id in resumable_tasks:
resume_info = resume_manager.load_resume_info(task_id)
if resume_info:
tasks_info.append({
'task_id': task_id,
'resume_info': resume_info
})
return jsonify({
'status': 'success',
'data': {
'resumable_tasks': tasks_info,
'count': len(tasks_info)
}
})
except Exception as e:
raise RetryableError(f"Failed to get resumable tasks: {e}")
@performance_bp.route('/api/performance/resume/clear/<task_id>', methods=['POST'])
@handle_api_errors
@require_auth
def clear_resume_info(task_id):
"""Clear resume information for a specific task."""
try:
resume_manager.clear_resume_info(task_id)
return jsonify({
'status': 'success',
'message': f'Resume information cleared for task: {task_id}'
})
except Exception as e:
raise RetryableError(f"Failed to clear resume info: {e}")
@performance_bp.route('/api/performance/system/optimize', methods=['POST'])
@handle_api_errors
@require_auth
def optimize_system():
"""Perform system optimization tasks."""
try:
optimization_results = {}
# Force garbage collection
memory_monitor.force_garbage_collection()
memory_stats = memory_monitor.get_memory_stats()
optimization_results['memory_gc'] = {
'completed': True,
'memory_mb': memory_stats.get('rss_mb', 0)
}
# Clean up cache expired entries
download_cache._cleanup_expired()
cache_stats = download_cache.get_stats()
optimization_results['cache_cleanup'] = {
'completed': True,
'entries': cache_stats.get('entry_count', 0),
'size_mb': cache_stats.get('total_size_mb', 0)
}
# Clean up old resume files (older than 7 days)
import os
import time
resume_dir = resume_manager.resume_dir
cleaned_files = 0
try:
for filename in os.listdir(resume_dir):
file_path = os.path.join(resume_dir, filename)
if os.path.isfile(file_path):
file_age = time.time() - os.path.getmtime(file_path)
if file_age > 7 * 24 * 3600: # 7 days in seconds
os.remove(file_path)
cleaned_files += 1
except Exception as e:
pass # Ignore errors in cleanup
optimization_results['resume_cleanup'] = {
'completed': True,
'files_removed': cleaned_files
}
return jsonify({
'status': 'success',
'message': 'System optimization completed',
'data': optimization_results
})
except Exception as e:
raise RetryableError(f"System optimization failed: {e}")
@performance_bp.route('/api/performance/config')
@handle_api_errors
@optional_auth
def get_performance_config():
"""Get current performance configuration."""
try:
config = {
'speed_limit': {
'current_mbps': speed_limiter.max_speed_mbps,
'unlimited': speed_limiter.max_speed_mbps == 0
},
'downloads': {
'max_workers': download_manager.max_workers,
'active_tasks': len(download_manager.active_tasks)
},
'cache': {
'max_size_mb': download_cache.max_size_bytes / (1024 * 1024),
**download_cache.get_stats()
},
'memory': {
'warning_threshold_mb': memory_monitor.warning_threshold / (1024 * 1024),
'critical_threshold_mb': memory_monitor.critical_threshold / (1024 * 1024),
**memory_monitor.get_memory_stats()
}
}
return jsonify({
'status': 'success',
'data': config
})
except Exception as e:
raise RetryableError(f"Failed to get performance config: {e}")
# Export the blueprint
__all__ = ['performance_bp']

View File

@@ -1,280 +0,0 @@
from flask import Blueprint, jsonify, request
from web.controllers.auth_controller import require_auth
from shared.utils.process_utils import (
process_lock_manager,
RESCAN_LOCK,
DOWNLOAD_LOCK,
SEARCH_LOCK,
check_process_locks,
get_process_status,
update_process_progress,
is_process_running,
episode_deduplicator,
ProcessLockError
)
import logging
logger = logging.getLogger(__name__)
process_bp = Blueprint('process', __name__, url_prefix='/api/process')
@process_bp.route('/locks/status', methods=['GET'])
@require_auth
def get_all_locks_status():
"""Get status of all process locks."""
try:
# Clean up expired locks first
cleaned = check_process_locks()
if cleaned > 0:
logger.info(f"Cleaned up {cleaned} expired locks")
status = process_lock_manager.get_all_locks_status()
# Add queue deduplication info
status['queue_info'] = {
'active_episodes': episode_deduplicator.get_count(),
'episodes': episode_deduplicator.get_active_episodes()
}
return jsonify({
'success': True,
'locks': status
})
except Exception as e:
logger.error(f"Error getting locks status: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/status', methods=['GET'])
@require_auth
def get_lock_status(lock_name):
"""Get status of a specific process lock."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
status = get_process_status(lock_name)
return jsonify({
'success': True,
'status': status
})
except Exception as e:
logger.error(f"Error getting lock status for {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/acquire', methods=['POST'])
@require_auth
def acquire_lock(lock_name):
"""Manually acquire a process lock."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
data = request.get_json() or {}
locked_by = data.get('locked_by', 'manual')
timeout_minutes = data.get('timeout_minutes', 60)
success = process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes)
if success:
return jsonify({
'success': True,
'message': f'Lock {lock_name} acquired successfully'
})
else:
return jsonify({
'success': False,
'error': f'Lock {lock_name} is already held'
}), 409
except Exception as e:
logger.error(f"Error acquiring lock {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/release', methods=['POST'])
@require_auth
def release_lock(lock_name):
"""Manually release a process lock."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
success = process_lock_manager.release_lock(lock_name)
if success:
return jsonify({
'success': True,
'message': f'Lock {lock_name} released successfully'
})
else:
return jsonify({
'success': False,
'error': f'Lock {lock_name} was not held'
}), 404
except Exception as e:
logger.error(f"Error releasing lock {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/cleanup', methods=['POST'])
@require_auth
def cleanup_expired_locks():
"""Manually clean up expired locks."""
try:
cleaned = check_process_locks()
return jsonify({
'success': True,
'cleaned_count': cleaned,
'message': f'Cleaned up {cleaned} expired locks'
})
except Exception as e:
logger.error(f"Error cleaning up locks: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/force-release-all', methods=['POST'])
@require_auth
def force_release_all_locks():
"""Force release all process locks (emergency use)."""
try:
data = request.get_json() or {}
confirm = data.get('confirm', False)
if not confirm:
return jsonify({
'success': False,
'error': 'Confirmation required for force release'
}), 400
released = process_lock_manager.force_release_all()
# Also clear queue deduplication
episode_deduplicator.clear_all()
return jsonify({
'success': True,
'released_count': released,
'message': f'Force released {released} locks and cleared queue deduplication'
})
except Exception as e:
logger.error(f"Error force releasing locks: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/progress', methods=['POST'])
@require_auth
def update_lock_progress(lock_name):
"""Update progress for a running process."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
if not is_process_running(lock_name):
return jsonify({
'success': False,
'error': f'Process {lock_name} is not running'
}), 404
data = request.get_json() or {}
progress_data = data.get('progress', {})
update_process_progress(lock_name, progress_data)
return jsonify({
'success': True,
'message': 'Progress updated successfully'
})
except Exception as e:
logger.error(f"Error updating progress for {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/queue/deduplication', methods=['GET'])
@require_auth
def get_queue_deduplication():
"""Get current queue deduplication status."""
try:
return jsonify({
'success': True,
'deduplication': {
'active_count': episode_deduplicator.get_count(),
'active_episodes': episode_deduplicator.get_active_episodes()
}
})
except Exception as e:
logger.error(f"Error getting queue deduplication: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/queue/deduplication/clear', methods=['POST'])
@require_auth
def clear_queue_deduplication():
"""Clear all queue deduplication entries."""
try:
episode_deduplicator.clear_all()
return jsonify({
'success': True,
'message': 'Queue deduplication cleared successfully'
})
except Exception as e:
logger.error(f"Error clearing queue deduplication: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/is-running/<process_name>', methods=['GET'])
@require_auth
def check_if_process_running(process_name):
"""Quick check if a specific process is running."""
try:
if process_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid process name'
}), 400
is_running = is_process_running(process_name)
return jsonify({
'success': True,
'is_running': is_running,
'process_name': process_name
})
except Exception as e:
logger.error(f"Error checking if process {process_name} is running: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,187 +0,0 @@
from flask import Blueprint, jsonify, request
from web.controllers.auth_controller import require_auth
from application.services.scheduler_service import get_scheduler
import logging
logger = logging.getLogger(__name__)
scheduler_bp = Blueprint('scheduler', __name__, url_prefix='/api/scheduler')
@scheduler_bp.route('/config', methods=['GET'])
@require_auth
def get_scheduler_config():
"""Get current scheduler configuration."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
config = scheduler.get_scheduled_rescan_config()
return jsonify({
'success': True,
'config': config
})
except Exception as e:
logger.error(f"Error getting scheduler config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/config', methods=['POST'])
@require_auth
def update_scheduler_config():
"""Update scheduler configuration."""
try:
data = request.get_json() or {}
enabled = data.get('enabled', False)
time_str = data.get('time', '03:00')
auto_download = data.get('auto_download_after_rescan', False)
# Validate inputs
if enabled and not time_str:
return jsonify({
'success': False,
'error': 'Time is required when scheduling is enabled'
}), 400
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
# Update configuration
scheduler.update_scheduled_rescan_config(enabled, time_str, auto_download)
# Get updated config
updated_config = scheduler.get_scheduled_rescan_config()
return jsonify({
'success': True,
'message': 'Scheduler configuration updated successfully',
'config': updated_config
})
except ValueError as e:
return jsonify({
'success': False,
'error': str(e)
}), 400
except Exception as e:
logger.error(f"Error updating scheduler config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/status', methods=['GET'])
@require_auth
def get_scheduler_status():
"""Get current scheduler status and next jobs."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
config = scheduler.get_scheduled_rescan_config()
jobs = scheduler.get_next_scheduled_jobs()
return jsonify({
'success': True,
'status': {
'running': config['is_running'],
'config': config,
'scheduled_jobs': jobs
}
})
except Exception as e:
logger.error(f"Error getting scheduler status: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/start', methods=['POST'])
@require_auth
def start_scheduler():
"""Start the scheduler."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
scheduler.start_scheduler()
return jsonify({
'success': True,
'message': 'Scheduler started successfully'
})
except Exception as e:
logger.error(f"Error starting scheduler: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/stop', methods=['POST'])
@require_auth
def stop_scheduler():
"""Stop the scheduler."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
scheduler.stop_scheduler()
return jsonify({
'success': True,
'message': 'Scheduler stopped successfully'
})
except Exception as e:
logger.error(f"Error stopping scheduler: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/trigger-rescan', methods=['POST'])
@require_auth
def trigger_manual_rescan():
"""Manually trigger a scheduled rescan for testing."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
scheduler.trigger_manual_scheduled_rescan()
return jsonify({
'success': True,
'message': 'Manual scheduled rescan triggered'
})
except Exception as e:
logger.error(f"Error triggering manual rescan: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,637 +0,0 @@
"""
Search API Endpoints
This module provides REST API endpoints for advanced search functionality
across anime, episodes, and other content.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import re
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, ValidationError
from ...shared.validators import validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, format_anime_response,
format_episode_response, extract_pagination_params
)
# Import search components (these imports would need to be adjusted based on actual structure)
try:
from search_manager import search_engine, SearchResult
from database_manager import anime_repository, episode_repository
except ImportError:
# Fallback for development/testing
search_engine = None
SearchResult = None
anime_repository = None
episode_repository = None
# Blueprint for search endpoints
search_bp = Blueprint('search', __name__, url_prefix='/api/v1/search')
@search_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def global_search() -> Dict[str, Any]:
"""
Perform a global search across all content types.
Query Parameters:
- q: Search query (required)
- types: Comma-separated list of content types (anime,episodes,all)
- categories: Comma-separated list of categories to search
- min_score: Minimum relevance score (0.0-1.0)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated search results grouped by content type
"""
if not search_engine:
raise APIException("Search engine not available", 503)
search_query = request.args.get('q', '').strip()
if not search_query:
raise ValidationError("Search query 'q' is required")
if len(search_query) < 2:
raise ValidationError("Search query must be at least 2 characters long")
# Parse search types
search_types = request.args.get('types', 'all').split(',')
valid_types = ['anime', 'episodes', 'all']
search_types = [t.strip() for t in search_types if t.strip() in valid_types]
if not search_types or 'all' in search_types:
search_types = ['anime', 'episodes']
# Parse categories
categories = request.args.get('categories', '').split(',')
categories = [c.strip() for c in categories if c.strip()]
# Parse minimum score
min_score = request.args.get('min_score', '0.0')
try:
min_score = float(min_score)
if not 0.0 <= min_score <= 1.0:
raise ValueError()
except ValueError:
raise ValidationError("min_score must be a number between 0.0 and 1.0")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform search
search_results = search_engine.search_all(
query=search_query,
content_types=search_types,
categories=categories,
min_score=min_score
)
# Group results by type
grouped_results = {
'anime': [],
'episodes': [],
'total_results': 0
}
for result in search_results:
if result.content_type == 'anime':
grouped_results['anime'].append({
'id': result.content_id,
'type': 'anime',
'title': result.title,
'description': result.description,
'score': result.relevance_score,
'data': format_anime_response(result.content_data)
})
elif result.content_type == 'episode':
grouped_results['episodes'].append({
'id': result.content_id,
'type': 'episode',
'title': result.title,
'description': result.description,
'score': result.relevance_score,
'data': format_episode_response(result.content_data)
})
grouped_results['total_results'] += 1
# Apply pagination to combined results
all_results = []
for result_type in ['anime', 'episodes']:
all_results.extend(grouped_results[result_type])
# Sort by relevance score
all_results.sort(key=lambda x: x['score'], reverse=True)
total = len(all_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = all_results[start_idx:end_idx]
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='search.global_search',
q=search_query
)
# Add search metadata
response['search'] = {
'query': search_query,
'types': search_types,
'categories': categories,
'min_score': min_score,
'results_by_type': {
'anime': len(grouped_results['anime']),
'episodes': len(grouped_results['episodes'])
}
}
return response
@search_bp.route('/anime', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def search_anime() -> Dict[str, Any]:
"""
Search anime with advanced filters.
Query Parameters:
- q: Search query (required)
- genres: Comma-separated list of genres
- status: Anime status filter
- year_from: Starting year filter
- year_to: Ending year filter
- min_episodes: Minimum episode count
- max_episodes: Maximum episode count
- sort_by: Sort field (name, year, episodes, relevance)
- sort_order: Sort order (asc, desc)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated anime search results
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
search_query = request.args.get('q', '').strip()
if not search_query:
raise ValidationError("Search query 'q' is required")
# Parse filters
genres = request.args.get('genres', '').split(',')
genres = [g.strip() for g in genres if g.strip()]
status_filter = request.args.get('status')
# Parse year filters
year_from = request.args.get('year_from')
year_to = request.args.get('year_to')
if year_from:
try:
year_from = int(year_from)
if year_from < 1900 or year_from > 2100:
raise ValueError()
except ValueError:
raise ValidationError("year_from must be a valid year between 1900 and 2100")
if year_to:
try:
year_to = int(year_to)
if year_to < 1900 or year_to > 2100:
raise ValueError()
except ValueError:
raise ValidationError("year_to must be a valid year between 1900 and 2100")
# Parse episode count filters
min_episodes = request.args.get('min_episodes')
max_episodes = request.args.get('max_episodes')
if min_episodes:
try:
min_episodes = int(min_episodes)
if min_episodes < 0:
raise ValueError()
except ValueError:
raise ValidationError("min_episodes must be a non-negative integer")
if max_episodes:
try:
max_episodes = int(max_episodes)
if max_episodes < 0:
raise ValueError()
except ValueError:
raise ValidationError("max_episodes must be a non-negative integer")
# Parse sorting
sort_by = request.args.get('sort_by', 'relevance')
sort_order = request.args.get('sort_order', 'desc')
valid_sort_fields = ['name', 'year', 'episodes', 'relevance', 'created_at']
if sort_by not in valid_sort_fields:
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
if sort_order not in ['asc', 'desc']:
raise ValidationError("sort_order must be 'asc' or 'desc'")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform advanced search
search_results = anime_repository.advanced_search(
query=search_query,
genres=genres,
status=status_filter,
year_from=year_from,
year_to=year_to,
min_episodes=min_episodes,
max_episodes=max_episodes,
sort_by=sort_by,
sort_order=sort_order
)
# Format results
formatted_results = []
for anime in search_results:
anime_data = format_anime_response(anime.__dict__)
# Add search relevance score if available
if hasattr(anime, 'relevance_score'):
anime_data['relevance_score'] = anime.relevance_score
formatted_results.append(anime_data)
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='search.search_anime',
q=search_query
)
# Add search metadata
response['search'] = {
'query': search_query,
'filters': {
'genres': genres,
'status': status_filter,
'year_from': year_from,
'year_to': year_to,
'min_episodes': min_episodes,
'max_episodes': max_episodes
},
'sorting': {
'sort_by': sort_by,
'sort_order': sort_order
}
}
return response
@search_bp.route('/episodes', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def search_episodes() -> Dict[str, Any]:
"""
Search episodes with advanced filters.
Query Parameters:
- q: Search query (required)
- anime_id: Filter by anime ID
- status: Episode status filter
- downloaded: Filter by download status (true/false)
- episode_range: Episode range filter (e.g., "1-10", "5+")
- duration_min: Minimum duration in minutes
- duration_max: Maximum duration in minutes
- sort_by: Sort field (episode_number, title, duration, relevance)
- sort_order: Sort order (asc, desc)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated episode search results
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
search_query = request.args.get('q', '').strip()
if not search_query:
raise ValidationError("Search query 'q' is required")
# Parse filters
anime_id = request.args.get('anime_id')
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
status_filter = request.args.get('status')
downloaded_filter = request.args.get('downloaded')
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
raise ValidationError("downloaded filter must be 'true' or 'false'")
# Parse episode range
episode_range = request.args.get('episode_range')
episode_min = None
episode_max = None
if episode_range:
range_pattern = r'^(\d+)(?:-(\d+)|\+)?$'
match = re.match(range_pattern, episode_range)
if not match:
raise ValidationError("episode_range must be in format 'N', 'N-M', or 'N+'")
episode_min = int(match.group(1))
if match.group(2):
episode_max = int(match.group(2))
elif episode_range.endswith('+'):
episode_max = None # No upper limit
else:
episode_max = episode_min # Single episode
# Parse duration filters
duration_min = request.args.get('duration_min')
duration_max = request.args.get('duration_max')
if duration_min:
try:
duration_min = int(duration_min)
if duration_min < 0:
raise ValueError()
except ValueError:
raise ValidationError("duration_min must be a non-negative integer")
if duration_max:
try:
duration_max = int(duration_max)
if duration_max < 0:
raise ValueError()
except ValueError:
raise ValidationError("duration_max must be a non-negative integer")
# Parse sorting
sort_by = request.args.get('sort_by', 'relevance')
sort_order = request.args.get('sort_order', 'desc')
valid_sort_fields = ['episode_number', 'title', 'duration', 'relevance', 'created_at']
if sort_by not in valid_sort_fields:
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
if sort_order not in ['asc', 'desc']:
raise ValidationError("sort_order must be 'asc' or 'desc'")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform advanced search
search_results = episode_repository.advanced_search(
query=search_query,
anime_id=anime_id,
status=status_filter,
downloaded=downloaded_filter.lower() == 'true' if downloaded_filter else None,
episode_min=episode_min,
episode_max=episode_max,
duration_min=duration_min,
duration_max=duration_max,
sort_by=sort_by,
sort_order=sort_order
)
# Format results
formatted_results = []
for episode in search_results:
episode_data = format_episode_response(episode.__dict__)
# Add search relevance score if available
if hasattr(episode, 'relevance_score'):
episode_data['relevance_score'] = episode.relevance_score
formatted_results.append(episode_data)
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='search.search_episodes',
q=search_query
)
# Add search metadata
response['search'] = {
'query': search_query,
'filters': {
'anime_id': anime_id,
'status': status_filter,
'downloaded': downloaded_filter,
'episode_range': episode_range,
'duration_min': duration_min,
'duration_max': duration_max
},
'sorting': {
'sort_by': sort_by,
'sort_order': sort_order
}
}
return response
@search_bp.route('/suggestions', methods=['GET'])
@handle_api_errors
@optional_auth
def get_search_suggestions() -> Dict[str, Any]:
"""
Get search suggestions based on partial query.
Query Parameters:
- q: Partial search query (required)
- type: Content type (anime, episodes, all)
- limit: Maximum suggestions to return (default: 10, max: 50)
Returns:
List of search suggestions
"""
if not search_engine:
raise APIException("Search engine not available", 503)
query = request.args.get('q', '').strip()
if not query:
raise ValidationError("Query 'q' is required")
if len(query) < 1:
return create_success_response(data=[])
content_type = request.args.get('type', 'all')
if content_type not in ['anime', 'episodes', 'all']:
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
limit = request.args.get('limit', '10')
try:
limit = int(limit)
if limit < 1 or limit > 50:
raise ValueError()
except ValueError:
raise ValidationError("limit must be an integer between 1 and 50")
# Get suggestions
suggestions = search_engine.get_suggestions(
query=query,
content_type=content_type,
limit=limit
)
return create_success_response(
data={
'suggestions': suggestions,
'query': query,
'count': len(suggestions)
}
)
@search_bp.route('/autocomplete', methods=['GET'])
@handle_api_errors
@optional_auth
def autocomplete() -> Dict[str, Any]:
"""
Get autocomplete suggestions for search fields.
Query Parameters:
- field: Field to autocomplete (name, genre, status)
- q: Partial value
- limit: Maximum suggestions (default: 10, max: 20)
Returns:
List of autocomplete suggestions
"""
field = request.args.get('field', '').strip()
query = request.args.get('q', '').strip()
if not field:
raise ValidationError("Field parameter is required")
if field not in ['name', 'genre', 'status', 'year']:
raise ValidationError("field must be one of: name, genre, status, year")
limit = request.args.get('limit', '10')
try:
limit = int(limit)
if limit < 1 or limit > 20:
raise ValueError()
except ValueError:
raise ValidationError("limit must be an integer between 1 and 20")
# Get autocomplete suggestions based on field
suggestions = []
if field == 'name':
# Get anime/episode name suggestions
if anime_repository:
anime_names = anime_repository.get_name_suggestions(query, limit)
suggestions.extend(anime_names)
elif field == 'genre':
# Get genre suggestions
if anime_repository:
genres = anime_repository.get_genre_suggestions(query, limit)
suggestions.extend(genres)
elif field == 'status':
# Get status suggestions
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
suggestions = [s for s in valid_statuses if query.lower() in s.lower()][:limit]
elif field == 'year':
# Get year suggestions
if anime_repository:
years = anime_repository.get_year_suggestions(query, limit)
suggestions.extend(years)
return create_success_response(
data={
'suggestions': suggestions,
'field': field,
'query': query,
'count': len(suggestions)
}
)
@search_bp.route('/trending', methods=['GET'])
@handle_api_errors
@optional_auth
def get_trending_searches() -> Dict[str, Any]:
"""
Get trending search queries.
Query Parameters:
- period: Time period (day, week, month)
- type: Content type (anime, episodes, all)
- limit: Maximum results (default: 10, max: 50)
Returns:
List of trending search queries
"""
if not search_engine:
raise APIException("Search engine not available", 503)
period = request.args.get('period', 'week')
content_type = request.args.get('type', 'all')
if period not in ['day', 'week', 'month']:
raise ValidationError("period must be 'day', 'week', or 'month'")
if content_type not in ['anime', 'episodes', 'all']:
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
limit = request.args.get('limit', '10')
try:
limit = int(limit)
if limit < 1 or limit > 50:
raise ValueError()
except ValueError:
raise ValidationError("limit must be an integer between 1 and 50")
# Get trending searches
trending = search_engine.get_trending_searches(
period=period,
content_type=content_type,
limit=limit
)
return create_success_response(
data={
'trending': trending,
'period': period,
'type': content_type,
'count': len(trending)
}
)

View File

@@ -1,332 +0,0 @@
"""
Simple Master Password Authentication Controller for AniWorld.
This module implements a simple authentication system using:
- Single master password (no user registration)
- JWT tokens for session management
- Environment-based configuration
- No email system required
"""
import os
import hashlib
import jwt
from datetime import datetime, timedelta
from flask import Blueprint, request, jsonify
from functools import wraps
import logging
from typing import Dict, Any, Optional, Tuple
# Configure logging
logger = logging.getLogger(__name__)
# Create blueprint
simple_auth_bp = Blueprint('simple_auth', __name__)
# Configuration from environment
JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY', 'default_jwt_secret')
PASSWORD_SALT = os.getenv('PASSWORD_SALT', 'default_salt')
MASTER_PASSWORD_HASH = os.getenv('MASTER_PASSWORD_HASH')
TOKEN_EXPIRY_HOURS = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
def hash_password(password: str) -> str:
"""Hash password with salt using SHA-256."""
salted_password = password + PASSWORD_SALT
return hashlib.sha256(salted_password.encode()).hexdigest()
def verify_master_password(password: str) -> bool:
"""Verify password against master password hash."""
if not MASTER_PASSWORD_HASH:
# If no hash is set, check against environment variable (development only)
dev_password = os.getenv('MASTER_PASSWORD')
if dev_password:
return password == dev_password
return False
password_hash = hash_password(password)
return password_hash == MASTER_PASSWORD_HASH
def generate_jwt_token() -> str:
"""Generate JWT token for authentication."""
payload = {
'user': 'master',
'exp': datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS),
'iat': datetime.utcnow(),
'iss': 'aniworld-server'
}
return jwt.encode(payload, JWT_SECRET_KEY, algorithm='HS256')
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
"""Verify and decode JWT token."""
try:
payload = jwt.decode(token, JWT_SECRET_KEY, algorithms=['HS256'])
return payload
except jwt.ExpiredSignatureError:
logger.warning("Token has expired")
return None
except jwt.InvalidTokenError as e:
logger.warning(f"Invalid token: {str(e)}")
return None
def require_auth(f):
"""Decorator to require authentication for API endpoints."""
@wraps(f)
def decorated_function(*args, **kwargs):
auth_header = request.headers.get('Authorization')
if not auth_header:
return jsonify({
'success': False,
'error': 'Authorization header required',
'code': 'AUTH_REQUIRED'
}), 401
try:
# Expected format: "Bearer <token>"
token = auth_header.split(' ')[1]
except IndexError:
return jsonify({
'success': False,
'error': 'Invalid authorization header format',
'code': 'INVALID_AUTH_FORMAT'
}), 401
payload = verify_jwt_token(token)
if not payload:
return jsonify({
'success': False,
'error': 'Invalid or expired token',
'code': 'INVALID_TOKEN'
}), 401
# Add user info to request context
request.current_user = payload
return f(*args, **kwargs)
return decorated_function
# Auth endpoints
@simple_auth_bp.route('/auth/login', methods=['POST'])
def login() -> Tuple[Any, int]:
"""
Authenticate with master password and receive JWT token.
Request Body:
{
"password": "master_password"
}
Response:
{
"success": true,
"message": "Login successful",
"data": {
"token": "jwt_token_here",
"expires_at": "2025-01-01T00:00:00Z",
"user": "master"
}
}
"""
try:
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'JSON body required',
'code': 'MISSING_JSON'
}), 400
password = data.get('password')
if not password:
return jsonify({
'success': False,
'error': 'Password required',
'code': 'MISSING_PASSWORD'
}), 400
# Verify master password
if not verify_master_password(password):
logger.warning(f"Failed login attempt from IP: {request.remote_addr}")
return jsonify({
'success': False,
'error': 'Invalid master password',
'code': 'INVALID_CREDENTIALS'
}), 401
# Generate JWT token
token = generate_jwt_token()
expires_at = datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS)
logger.info(f"Successful login from IP: {request.remote_addr}")
return jsonify({
'success': True,
'message': 'Login successful',
'data': {
'token': token,
'expires_at': expires_at.isoformat() + 'Z',
'user': 'master',
'token_type': 'Bearer'
}
}), 200
except Exception as e:
logger.error(f"Login error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
@simple_auth_bp.route('/auth/verify', methods=['GET'])
@require_auth
def verify_token() -> Tuple[Any, int]:
"""
Verify if the current JWT token is valid.
Headers:
Authorization: Bearer <token>
Response:
{
"success": true,
"message": "Token is valid",
"data": {
"user": "master",
"expires_at": "2025-01-01T00:00:00Z",
"issued_at": "2025-01-01T00:00:00Z"
}
}
"""
try:
payload = request.current_user
return jsonify({
'success': True,
'message': 'Token is valid',
'data': {
'user': payload.get('user'),
'expires_at': datetime.utcfromtimestamp(payload.get('exp')).isoformat() + 'Z',
'issued_at': datetime.utcfromtimestamp(payload.get('iat')).isoformat() + 'Z',
'issuer': payload.get('iss')
}
}), 200
except Exception as e:
logger.error(f"Token verification error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
@simple_auth_bp.route('/auth/logout', methods=['POST'])
@require_auth
def logout() -> Tuple[Any, int]:
"""
Logout (client-side token clearing).
Since JWT tokens are stateless, logout is handled client-side
by removing the token. This endpoint confirms logout action.
Headers:
Authorization: Bearer <token>
Response:
{
"success": true,
"message": "Logout successful"
}
"""
try:
logger.info(f"User logged out from IP: {request.remote_addr}")
return jsonify({
'success': True,
'message': 'Logout successful. Please remove the token on client side.',
'data': {
'action': 'clear_token'
}
}), 200
except Exception as e:
logger.error(f"Logout error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
@simple_auth_bp.route('/auth/status', methods=['GET'])
def auth_status() -> Tuple[Any, int]:
"""
Check authentication system status.
Response:
{
"success": true,
"message": "Authentication system status",
"data": {
"auth_type": "master_password",
"jwt_enabled": true,
"password_configured": true
}
}
"""
try:
password_configured = bool(MASTER_PASSWORD_HASH or os.getenv('MASTER_PASSWORD'))
return jsonify({
'success': True,
'message': 'Authentication system status',
'data': {
'auth_type': 'master_password',
'jwt_enabled': True,
'password_configured': password_configured,
'token_expiry_hours': TOKEN_EXPIRY_HOURS
}
}), 200
except Exception as e:
logger.error(f"Auth status error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
# Utility function to set master password hash
def set_master_password(password: str) -> str:
"""
Generate hash for master password.
This should be used to set MASTER_PASSWORD_HASH in environment.
Args:
password: The master password to hash
Returns:
The hashed password that should be stored in environment
"""
return hash_password(password)
# Health check endpoint
@simple_auth_bp.route('/auth/health', methods=['GET'])
def health_check() -> Tuple[Any, int]:
"""Health check for auth system."""
return jsonify({
'success': True,
'message': 'Auth system is healthy',
'timestamp': datetime.utcnow().isoformat() + 'Z'
}), 200

View File

@@ -1,661 +0,0 @@
"""
Storage Management API Endpoints
This module provides REST API endpoints for storage management operations,
including storage monitoring, location management, and disk usage tracking.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import os
import shutil
from datetime import datetime
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, extract_pagination_params
)
# Import storage components (these imports would need to be adjusted based on actual structure)
try:
from database_manager import storage_manager, database_manager, StorageLocation
except ImportError:
# Fallback for development/testing
storage_manager = None
database_manager = None
StorageLocation = None
# Blueprint for storage management endpoints
storage_bp = Blueprint('storage', __name__, url_prefix='/api/v1/storage')
@storage_bp.route('/summary', methods=['GET'])
@handle_api_errors
@optional_auth
def get_storage_summary() -> Dict[str, Any]:
"""
Get overall storage usage summary.
Returns:
Storage summary with usage statistics
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
try:
summary = storage_manager.get_storage_summary()
return create_success_response(
data={
'total_storage_gb': round(summary.get('total_bytes', 0) / (1024**3), 2),
'used_storage_gb': round(summary.get('used_bytes', 0) / (1024**3), 2),
'free_storage_gb': round(summary.get('free_bytes', 0) / (1024**3), 2),
'usage_percentage': summary.get('usage_percentage', 0),
'anime_storage_gb': round(summary.get('anime_bytes', 0) / (1024**3), 2),
'backup_storage_gb': round(summary.get('backup_bytes', 0) / (1024**3), 2),
'cache_storage_gb': round(summary.get('cache_bytes', 0) / (1024**3), 2),
'temp_storage_gb': round(summary.get('temp_bytes', 0) / (1024**3), 2),
'location_count': summary.get('location_count', 0),
'active_locations': summary.get('active_locations', 0),
'last_updated': summary.get('last_updated', datetime.utcnow()).isoformat()
}
)
except Exception as e:
raise APIException(f"Failed to get storage summary: {str(e)}", 500)
@storage_bp.route('/locations', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def get_storage_locations() -> Dict[str, Any]:
"""
Get all storage locations with optional filtering.
Query Parameters:
- location_type: Filter by location type (primary, backup, cache, temp)
- anime_id: Filter by anime ID
- status: Filter by status (active, inactive, error)
- min_free_gb: Minimum free space in GB
- max_usage_percent: Maximum usage percentage
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of storage locations
"""
if not storage_manager or not database_manager:
raise APIException("Storage manager not available", 503)
# Extract filters
location_type_filter = request.args.get('location_type')
anime_id = request.args.get('anime_id')
status_filter = request.args.get('status')
min_free_gb = request.args.get('min_free_gb')
max_usage_percent = request.args.get('max_usage_percent')
# Validate filters
valid_types = ['primary', 'backup', 'cache', 'temp']
if location_type_filter and location_type_filter not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
valid_statuses = ['active', 'inactive', 'error']
if status_filter and status_filter not in valid_statuses:
raise ValidationError(f"status must be one of: {', '.join(valid_statuses)}")
if min_free_gb:
try:
min_free_gb = float(min_free_gb)
if min_free_gb < 0:
raise ValueError()
except ValueError:
raise ValidationError("min_free_gb must be a non-negative number")
if max_usage_percent:
try:
max_usage_percent = float(max_usage_percent)
if not 0 <= max_usage_percent <= 100:
raise ValueError()
except ValueError:
raise ValidationError("max_usage_percent must be between 0 and 100")
# Get pagination parameters
page, per_page = extract_pagination_params()
try:
# Query storage locations
query = """
SELECT sl.*, am.name as anime_name
FROM storage_locations sl
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
WHERE 1=1
"""
params = []
if location_type_filter:
query += " AND sl.location_type = ?"
params.append(location_type_filter)
if anime_id:
query += " AND sl.anime_id = ?"
params.append(anime_id)
if status_filter:
query += " AND sl.status = ?"
params.append(status_filter)
query += " ORDER BY sl.location_type, sl.path"
results = database_manager.execute_query(query, params)
# Format and filter results
locations = []
for row in results:
free_space_gb = (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None
total_space_gb = (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None
usage_percent = None
if row['total_space_bytes'] and row['free_space_bytes']:
usage_percent = ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100)
# Apply additional filters
if min_free_gb and (free_space_gb is None or free_space_gb < min_free_gb):
continue
if max_usage_percent and (usage_percent is None or usage_percent > max_usage_percent):
continue
location_data = {
'location_id': row['location_id'],
'anime_id': row['anime_id'],
'anime_name': row['anime_name'],
'path': row['path'],
'location_type': row['location_type'],
'status': row['status'],
'free_space_gb': free_space_gb,
'total_space_gb': total_space_gb,
'used_space_gb': (total_space_gb - free_space_gb) if (total_space_gb and free_space_gb) else None,
'usage_percent': usage_percent,
'last_checked': row['last_checked'],
'created_at': row['created_at'],
'is_active': row['is_active'],
'mount_point': row.get('mount_point'),
'filesystem': row.get('filesystem')
}
locations.append(location_data)
# Apply pagination
total = len(locations)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_locations = locations[start_idx:end_idx]
return create_paginated_response(
data=paginated_locations,
page=page,
per_page=per_page,
total=total,
endpoint='storage.get_storage_locations'
)
except Exception as e:
raise APIException(f"Failed to get storage locations: {str(e)}", 500)
@storage_bp.route('/locations', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['path', 'location_type'],
optional_fields=['anime_id', 'description', 'mount_point', 'auto_create'],
field_types={
'path': str,
'location_type': str,
'anime_id': int,
'description': str,
'mount_point': str,
'auto_create': bool
}
)
@require_auth
def add_storage_location() -> Dict[str, Any]:
"""
Add a new storage location.
Required Fields:
- path: Storage path
- location_type: Type of storage (primary, backup, cache, temp)
Optional Fields:
- anime_id: Associated anime ID (for anime-specific storage)
- description: Location description
- mount_point: Mount point information
- auto_create: Automatically create directory if it doesn't exist
Returns:
Created storage location information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
data = request.get_json()
path = data['path']
location_type = data['location_type']
anime_id = data.get('anime_id')
description = data.get('description')
mount_point = data.get('mount_point')
auto_create = data.get('auto_create', False)
# Validate location type
valid_types = ['primary', 'backup', 'cache', 'temp']
if location_type not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
# Validate path
if not path or not isinstance(path, str):
raise ValidationError("path must be a valid string")
# Normalize path
path = os.path.abspath(path)
# Check if path already exists as a storage location
existing_location = storage_manager.get_location_by_path(path)
if existing_location:
raise ValidationError("Storage location with this path already exists")
# Check if directory exists or create it
if not os.path.exists(path):
if auto_create:
try:
os.makedirs(path, exist_ok=True)
except Exception as e:
raise ValidationError(f"Failed to create directory: {str(e)}")
else:
raise ValidationError("Directory does not exist. Set auto_create=true to create it.")
# Check if it's a directory
if not os.path.isdir(path):
raise ValidationError("Path must be a directory")
# Check if it's writable
if not os.access(path, os.W_OK):
raise ValidationError("Directory is not writable")
try:
location_id = storage_manager.add_storage_location(
path=path,
location_type=location_type,
anime_id=anime_id,
description=description,
mount_point=mount_point
)
# Get the created location details
location = storage_manager.get_location_by_id(location_id)
location_data = {
'location_id': location.location_id,
'path': location.path,
'location_type': location.location_type,
'anime_id': location.anime_id,
'description': location.description,
'mount_point': location.mount_point,
'status': location.status,
'created_at': location.created_at.isoformat(),
'is_active': location.is_active
}
return create_success_response(
data=location_data,
message="Storage location added successfully",
status_code=201
)
except Exception as e:
raise APIException(f"Failed to add storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('location_id')
@optional_auth
def get_storage_location(location_id: int) -> Dict[str, Any]:
"""
Get detailed information about a specific storage location.
Args:
location_id: Unique identifier for the storage location
Returns:
Detailed storage location information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
try:
# Get detailed storage statistics
stats = storage_manager.get_location_stats(location_id)
location_data = {
'location_id': location.location_id,
'path': location.path,
'location_type': location.location_type,
'anime_id': location.anime_id,
'description': location.description,
'mount_point': location.mount_point,
'status': location.status,
'created_at': location.created_at.isoformat(),
'last_checked': location.last_checked.isoformat() if location.last_checked else None,
'is_active': location.is_active,
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
'usage_percent': stats.get('usage_percentage', 0),
'file_count': stats.get('file_count', 0),
'directory_count': stats.get('directory_count', 0),
'largest_file_mb': round(stats.get('largest_file_bytes', 0) / (1024**2), 2),
'filesystem': stats.get('filesystem'),
'mount_options': stats.get('mount_options'),
'health_status': stats.get('health_status', 'unknown')
}
return create_success_response(location_data)
except Exception as e:
raise APIException(f"Failed to get storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>', methods=['PUT'])
@handle_api_errors
@validate_id_parameter('location_id')
@validate_json_input(
optional_fields=['description', 'location_type', 'is_active', 'mount_point'],
field_types={
'description': str,
'location_type': str,
'is_active': bool,
'mount_point': str
}
)
@require_auth
def update_storage_location(location_id: int) -> Dict[str, Any]:
"""
Update a storage location.
Args:
location_id: Unique identifier for the storage location
Optional Fields:
- description: Updated description
- location_type: Updated location type
- is_active: Active status
- mount_point: Mount point information
Returns:
Updated storage location information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
data = request.get_json()
# Check if location exists
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
# Validate location type if provided
if 'location_type' in data:
valid_types = ['primary', 'backup', 'cache', 'temp']
if data['location_type'] not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
try:
# Update location
success = storage_manager.update_location(location_id, data)
if not success:
raise APIException("Failed to update storage location", 500)
# Get updated location
updated_location = storage_manager.get_location_by_id(location_id)
location_data = {
'location_id': updated_location.location_id,
'path': updated_location.path,
'location_type': updated_location.location_type,
'anime_id': updated_location.anime_id,
'description': updated_location.description,
'mount_point': updated_location.mount_point,
'status': updated_location.status,
'is_active': updated_location.is_active,
'updated_at': datetime.utcnow().isoformat()
}
return create_success_response(
data=location_data,
message="Storage location updated successfully"
)
except Exception as e:
raise APIException(f"Failed to update storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('location_id')
@require_auth
def delete_storage_location(location_id: int) -> Dict[str, Any]:
"""
Delete a storage location.
Args:
location_id: Unique identifier for the storage location
Query Parameters:
- force: Force deletion even if location contains files
- delete_files: Also delete files in the location
Returns:
Deletion confirmation
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
# Check if location exists
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
force = request.args.get('force', 'false').lower() == 'true'
delete_files = request.args.get('delete_files', 'false').lower() == 'true'
try:
# Check if location has files (unless force is used)
if not force:
stats = storage_manager.get_location_stats(location_id)
if stats.get('file_count', 0) > 0:
raise ValidationError(
f"Storage location contains {stats['file_count']} files. "
"Use force=true to delete anyway."
)
# Delete location
success = storage_manager.delete_location(location_id, delete_files=delete_files)
if not success:
raise APIException("Failed to delete storage location", 500)
message = f"Storage location deleted successfully"
if delete_files:
message += " (including all files)"
return create_success_response(message=message)
except Exception as e:
raise APIException(f"Failed to delete storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>/refresh', methods=['POST'])
@handle_api_errors
@validate_id_parameter('location_id')
@require_auth
def refresh_storage_location(location_id: int) -> Dict[str, Any]:
"""
Refresh storage statistics for a location.
Args:
location_id: Unique identifier for the storage location
Returns:
Updated storage statistics
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
# Check if location exists
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
try:
# Update storage statistics
stats = storage_manager.update_location_stats(location_id)
return create_success_response(
data={
'location_id': location_id,
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
'usage_percent': stats.get('usage_percentage', 0),
'file_count': stats.get('file_count', 0),
'directory_count': stats.get('directory_count', 0),
'last_updated': datetime.utcnow().isoformat()
},
message="Storage statistics updated successfully"
)
except Exception as e:
raise APIException(f"Failed to refresh storage location: {str(e)}", 500)
@storage_bp.route('/cleanup', methods=['POST'])
@handle_api_errors
@validate_json_input(
optional_fields=['location_type', 'target_usage_percent', 'cleanup_temp', 'cleanup_cache', 'dry_run'],
field_types={
'location_type': str,
'target_usage_percent': float,
'cleanup_temp': bool,
'cleanup_cache': bool,
'dry_run': bool
}
)
@require_auth
def cleanup_storage() -> Dict[str, Any]:
"""
Perform storage cleanup operations.
Optional Fields:
- location_type: Type of locations to clean (temp, cache, backup)
- target_usage_percent: Target usage percentage after cleanup
- cleanup_temp: Clean temporary files
- cleanup_cache: Clean cache files
- dry_run: Preview what would be cleaned without actually doing it
Returns:
Cleanup results
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
data = request.get_json() or {}
location_type = data.get('location_type', 'temp')
target_usage_percent = data.get('target_usage_percent', 80.0)
cleanup_temp = data.get('cleanup_temp', True)
cleanup_cache = data.get('cleanup_cache', False)
dry_run = data.get('dry_run', False)
# Validate parameters
valid_types = ['temp', 'cache', 'backup']
if location_type not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
if not 0 <= target_usage_percent <= 100:
raise ValidationError("target_usage_percent must be between 0 and 100")
try:
cleanup_result = storage_manager.cleanup_storage(
location_type=location_type,
target_usage_percent=target_usage_percent,
cleanup_temp=cleanup_temp,
cleanup_cache=cleanup_cache,
dry_run=dry_run
)
return create_success_response(
data={
'dry_run': dry_run,
'location_type': location_type,
'files_deleted': cleanup_result.get('files_deleted', 0),
'directories_deleted': cleanup_result.get('directories_deleted', 0),
'space_freed_gb': round(cleanup_result.get('space_freed_bytes', 0) / (1024**3), 2),
'cleanup_summary': cleanup_result.get('summary', {}),
'target_usage_percent': target_usage_percent,
'final_usage_percent': cleanup_result.get('final_usage_percent')
},
message=f"Storage cleanup {'simulated' if dry_run else 'completed'}"
)
except Exception as e:
raise APIException(f"Failed to cleanup storage: {str(e)}", 500)
@storage_bp.route('/health', methods=['GET'])
@handle_api_errors
@optional_auth
def get_storage_health() -> Dict[str, Any]:
"""
Get storage health status across all locations.
Returns:
Storage health information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
try:
health_status = storage_manager.get_storage_health()
return create_success_response(
data={
'overall_status': health_status.get('overall_status', 'unknown'),
'total_locations': health_status.get('total_locations', 0),
'healthy_locations': health_status.get('healthy_locations', 0),
'warning_locations': health_status.get('warning_locations', 0),
'error_locations': health_status.get('error_locations', 0),
'average_usage_percent': health_status.get('average_usage_percent', 0),
'locations_near_full': health_status.get('locations_near_full', []),
'locations_with_errors': health_status.get('locations_with_errors', []),
'recommendations': health_status.get('recommendations', []),
'last_check': health_status.get('last_check', datetime.utcnow()).isoformat()
}
)
except Exception as e:
raise APIException(f"Failed to get storage health: {str(e)}", 500)

View File

@@ -1,352 +0,0 @@
"""
Base controller with common functionality for all controllers.
This module provides a base controller class that eliminates common duplications
across different controller modules by providing standardized error handling,
validation, and response formatting.
"""
from abc import ABC
from typing import Any, Dict, Optional, List, Union, Tuple, Callable
try:
from flask import jsonify, request
from werkzeug.exceptions import HTTPException
except ImportError:
# Fallback for environments without Flask
def jsonify(data):
import json
return json.dumps(data)
class HTTPException(Exception):
def __init__(self, status_code, detail):
self.status_code = status_code
self.detail = detail
super().__init__(detail)
class request:
is_json = False
@staticmethod
def get_json():
return {}
headers = {}
args = {}
form = {}
try:
from pydantic import BaseModel
except ImportError:
# Fallback BaseModel
class BaseModel:
pass
import logging
import functools
class BaseController(ABC):
"""Base controller with common functionality for all controllers."""
def __init__(self):
self.logger = logging.getLogger(self.__class__.__name__)
def handle_error(self, error: Exception, status_code: int = 500) -> HTTPException:
"""
Standardized error handling across all controllers.
Args:
error: The exception that occurred
status_code: HTTP status code to return
Returns:
HTTPException with standardized format
"""
self.logger.error(f"Controller error: {str(error)}", exc_info=True)
return HTTPException(status_code, str(error))
def validate_request(self, data: BaseModel) -> bool:
"""
Common validation logic for request data.
Args:
data: Pydantic model to validate
Returns:
True if validation passes
Raises:
ValidationError if validation fails
"""
try:
# Pydantic models automatically validate on instantiation
return True
except Exception as e:
self.logger.warning(f"Validation failed: {str(e)}")
raise
def format_response(self, data: Any, message: str = "Success") -> Dict[str, Any]:
"""
Standardized response format for successful operations.
Args:
data: Data to include in response
message: Success message
Returns:
Standardized success response dictionary
"""
return {
"status": "success",
"message": message,
"data": data
}
def format_error_response(self, message: str, status_code: int = 400, details: Any = None) -> Tuple[Dict[str, Any], int]:
"""
Standardized error response format.
Args:
message: Error message
status_code: HTTP status code
details: Additional error details
Returns:
Tuple of (error_response_dict, status_code)
"""
response = {
"status": "error",
"message": message,
"error_code": status_code
}
if details:
response["details"] = details
return response, status_code
def create_success_response(
self,
data: Any = None,
message: str = "Operation successful",
status_code: int = 200,
pagination: Optional[Dict[str, Any]] = None,
meta: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized success response.
Args:
data: Data to include in response
message: Success message
status_code: HTTP status code
pagination: Pagination information
meta: Additional metadata
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'success',
'message': message
}
if data is not None:
response['data'] = data
if pagination:
response['pagination'] = pagination
if meta:
response['meta'] = meta
return response, status_code
def create_error_response(
self,
message: str,
status_code: int = 400,
details: Any = None,
error_code: Optional[str] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized error response.
Args:
message: Error message
status_code: HTTP status code
details: Additional error details
error_code: Specific error code
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'error',
'message': message,
'error_code': error_code or status_code
}
if details:
response['details'] = details
return response, status_code
def handle_api_errors(f: Callable) -> Callable:
"""
Decorator for standardized API error handling.
This decorator should be used on all API endpoints to ensure
consistent error handling and response formatting.
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except HTTPException:
# Re-raise HTTP exceptions as they are already properly formatted
raise
except ValueError as e:
# Handle validation errors
return jsonify({
'status': 'error',
'message': 'Invalid input data',
'details': str(e),
'error_code': 400
}), 400
except PermissionError as e:
# Handle authorization errors
return jsonify({
'status': 'error',
'message': 'Access denied',
'details': str(e),
'error_code': 403
}), 403
except FileNotFoundError as e:
# Handle not found errors
return jsonify({
'status': 'error',
'message': 'Resource not found',
'details': str(e),
'error_code': 404
}), 404
except Exception as e:
# Handle all other errors
logging.getLogger(__name__).error(f"Unhandled error in {f.__name__}: {str(e)}", exc_info=True)
return jsonify({
'status': 'error',
'message': 'Internal server error',
'details': str(e) if logging.getLogger().isEnabledFor(logging.DEBUG) else 'An unexpected error occurred',
'error_code': 500
}), 500
return decorated_function
def require_auth(f: Callable) -> Callable:
"""
Decorator to require authentication for API endpoints.
This decorator should be applied to endpoints that require
user authentication.
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
# Implementation would depend on your authentication system
# For now, this is a placeholder that should be implemented
# based on your specific authentication requirements
# Example implementation:
# auth_header = request.headers.get('Authorization')
# if not auth_header or not validate_auth_token(auth_header):
# return jsonify({
# 'status': 'error',
# 'message': 'Authentication required',
# 'error_code': 401
# }), 401
return f(*args, **kwargs)
return decorated_function
def optional_auth(f: Callable) -> Callable:
"""
Decorator for optional authentication.
This decorator allows endpoints to work with or without authentication,
but provides additional functionality when authenticated.
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
# Implementation would depend on your authentication system
# This would set user context if authenticated, but not fail if not
return f(*args, **kwargs)
return decorated_function
def validate_json_input(
required_fields: Optional[List[str]] = None,
optional_fields: Optional[List[str]] = None,
**field_validators
) -> Callable:
"""
Decorator for JSON input validation.
Args:
required_fields: List of required field names
optional_fields: List of optional field names
**field_validators: Field-specific validation functions
Returns:
Decorator function
"""
def decorator(f: Callable) -> Callable:
@functools.wraps(f)
def decorated_function(*args, **kwargs):
if not request.is_json:
return jsonify({
'status': 'error',
'message': 'Request must contain JSON data',
'error_code': 400
}), 400
data = request.get_json()
if not data:
return jsonify({
'status': 'error',
'message': 'Invalid JSON data',
'error_code': 400
}), 400
# Check required fields
if required_fields:
missing_fields = [field for field in required_fields if field not in data]
if missing_fields:
return jsonify({
'status': 'error',
'message': f'Missing required fields: {", ".join(missing_fields)}',
'error_code': 400
}), 400
# Apply field validators
for field, validator in field_validators.items():
if field in data:
try:
if not validator(data[field]):
return jsonify({
'status': 'error',
'message': f'Invalid value for field: {field}',
'error_code': 400
}), 400
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Validation error for field {field}: {str(e)}',
'error_code': 400
}), 400
return f(*args, **kwargs)
return decorated_function
return decorator

View File

@@ -1 +0,0 @@
"""Shared utilities and helpers for web controllers."""

View File

@@ -1,150 +0,0 @@
"""
Authentication decorators and utilities for API endpoints.
This module provides authentication decorators that can be used across
all controller modules for consistent authentication handling.
"""
import logging
from functools import wraps
from typing import Optional, Dict, Any, Callable
from flask import session, request, jsonify, redirect, url_for
# Import session manager from auth controller
from ..auth_controller import session_manager
def require_auth(f: Callable) -> Callable:
"""
Decorator to require authentication for Flask routes.
Args:
f: The function to decorate
Returns:
Decorated function that requires authentication
Usage:
@require_auth
def protected_endpoint():
return "This requires authentication"
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if not session_manager.is_authenticated():
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
is_ajax = (
request.is_json or
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
request.headers.get('Accept', '').startswith('application/json') or
'/api/' in request.path # API endpoints should return JSON
)
if is_ajax:
return jsonify({
'status': 'error',
'message': 'Authentication required',
'code': 'AUTH_REQUIRED'
}), 401
else:
return redirect(url_for('auth.login'))
return f(*args, **kwargs)
return decorated_function
def optional_auth(f: Callable) -> Callable:
"""
Decorator that checks auth but doesn't require it.
This decorator will only require authentication if a master password
has been configured in the system.
Args:
f: The function to decorate
Returns:
Decorated function that optionally requires authentication
Usage:
@optional_auth
def maybe_protected_endpoint():
return "This may require authentication"
"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Import config here to avoid circular imports
from config import config
# Check if master password is configured
if config.has_master_password():
# If configured, require authentication
if not session_manager.is_authenticated():
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
is_ajax = (
request.is_json or
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
request.headers.get('Accept', '').startswith('application/json') or
'/api/' in request.path # API endpoints should return JSON
)
if is_ajax:
return jsonify({
'status': 'error',
'message': 'Authentication required',
'code': 'AUTH_REQUIRED'
}), 401
else:
return redirect(url_for('auth.login'))
return f(*args, **kwargs)
return decorated_function
def get_current_user() -> Optional[Dict[str, Any]]:
"""
Get current authenticated user information.
Returns:
Dictionary containing user information if authenticated, None otherwise
"""
if session_manager.is_authenticated():
return session_manager.get_session_info()
return None
def get_client_ip() -> str:
"""
Get client IP address with proxy support.
Returns:
Client IP address as string
"""
# Check for forwarded IP (in case of reverse proxy)
forwarded_ip = request.headers.get('X-Forwarded-For')
if forwarded_ip:
return forwarded_ip.split(',')[0].strip()
real_ip = request.headers.get('X-Real-IP')
if real_ip:
return real_ip
return request.remote_addr or 'unknown'
def is_authenticated() -> bool:
"""
Check if current request is from an authenticated user.
Returns:
True if authenticated, False otherwise
"""
return session_manager.is_authenticated()
def logout_current_user() -> bool:
"""
Logout the current user.
Returns:
True if logout was successful, False otherwise
"""
return session_manager.logout()

View File

@@ -1,286 +0,0 @@
"""
Error handling decorators and utilities for API endpoints.
This module provides standardized error handling decorators and utilities
that can be used across all controller modules for consistent error responses.
"""
import logging
import traceback
from functools import wraps
from typing import Dict, Any, Callable, Tuple, Optional, Union
from flask import jsonify, request
logger = logging.getLogger(__name__)
def handle_api_errors(f: Callable) -> Callable:
"""
Decorator to handle API errors consistently across all endpoints.
This decorator catches exceptions and returns standardized error responses
with appropriate HTTP status codes.
Args:
f: The function to decorate
Returns:
Decorated function with error handling
Usage:
@handle_api_errors
def my_endpoint():
# This will automatically handle any exceptions
return {"data": "success"}
"""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
result = f(*args, **kwargs)
# If result is already a Response object, return it
if hasattr(result, 'status_code'):
return result
# If result is a tuple (data, status_code), handle it
if isinstance(result, tuple) and len(result) == 2:
data, status_code = result
if isinstance(data, dict) and 'status' not in data:
data['status'] = 'success' if 200 <= status_code < 300 else 'error'
return jsonify(data), status_code
# If result is a dict, wrap it in success response
if isinstance(result, dict):
if 'status' not in result:
result['status'] = 'success'
return jsonify(result)
# For other types, wrap in success response
return jsonify({
'status': 'success',
'data': result
})
except ValueError as e:
logger.warning(f"Validation error in {f.__name__}: {str(e)}")
return create_error_response(
message=str(e),
status_code=400,
error_code='VALIDATION_ERROR'
)
except PermissionError as e:
logger.warning(f"Permission error in {f.__name__}: {str(e)}")
return create_error_response(
message="Access denied",
status_code=403,
error_code='ACCESS_DENIED'
)
except FileNotFoundError as e:
logger.warning(f"File not found in {f.__name__}: {str(e)}")
return create_error_response(
message="Resource not found",
status_code=404,
error_code='NOT_FOUND'
)
except Exception as e:
logger.error(f"Unexpected error in {f.__name__}: {str(e)}")
logger.error(f"Traceback: {traceback.format_exc()}")
# Don't expose internal errors in production
return create_error_response(
message="Internal server error",
status_code=500,
error_code='INTERNAL_ERROR'
)
return decorated_function
def handle_database_errors(f: Callable) -> Callable:
"""
Decorator specifically for database-related operations.
Args:
f: The function to decorate
Returns:
Decorated function with database error handling
"""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
logger.error(f"Database error in {f.__name__}: {str(e)}")
return create_error_response(
message="Database operation failed",
status_code=500,
error_code='DATABASE_ERROR'
)
return decorated_function
def handle_file_operations(f: Callable) -> Callable:
"""
Decorator for file operation error handling.
Args:
f: The function to decorate
Returns:
Decorated function with file operation error handling
"""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except FileNotFoundError as e:
logger.warning(f"File not found in {f.__name__}: {str(e)}")
return create_error_response(
message="File not found",
status_code=404,
error_code='FILE_NOT_FOUND'
)
except PermissionError as e:
logger.warning(f"File permission error in {f.__name__}: {str(e)}")
return create_error_response(
message="Permission denied",
status_code=403,
error_code='PERMISSION_DENIED'
)
except OSError as e:
logger.error(f"File system error in {f.__name__}: {str(e)}")
return create_error_response(
message="File system error",
status_code=500,
error_code='FILE_SYSTEM_ERROR'
)
return decorated_function
def create_error_response(
message: str,
status_code: int = 400,
error_code: Optional[str] = None,
errors: Optional[list] = None,
data: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized error response.
Args:
message: Error message to display
status_code: HTTP status code
error_code: Optional error code for client handling
errors: Optional list of detailed errors
data: Optional additional data
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'error',
'message': message
}
if error_code:
response['error_code'] = error_code
if errors:
response['errors'] = errors
if data:
response['data'] = data
return response, status_code
def create_success_response(
data: Any = None,
message: str = "Operation successful",
status_code: int = 200
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized success response.
Args:
data: Data to include in response
message: Success message
status_code: HTTP status code
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'success',
'message': message
}
if data is not None:
response['data'] = data
return response, status_code
def log_request_info():
"""Log request information for debugging."""
logger.info(f"Request: {request.method} {request.path}")
if request.is_json:
logger.debug(f"Request JSON: {request.get_json()}")
if request.args:
logger.debug(f"Request args: {dict(request.args)}")
class APIException(Exception):
"""Custom exception for API errors."""
def __init__(
self,
message: str,
status_code: int = 400,
error_code: Optional[str] = None,
errors: Optional[list] = None
):
self.message = message
self.status_code = status_code
self.error_code = error_code
self.errors = errors
super().__init__(message)
class ValidationError(APIException):
"""Exception for validation errors."""
def __init__(self, message: str, errors: Optional[list] = None):
super().__init__(
message=message,
status_code=400,
error_code='VALIDATION_ERROR',
errors=errors
)
class NotFoundError(APIException):
"""Exception for not found errors."""
def __init__(self, message: str = "Resource not found"):
super().__init__(
message=message,
status_code=404,
error_code='NOT_FOUND'
)
class PermissionError(APIException):
"""Exception for permission errors."""
def __init__(self, message: str = "Access denied"):
super().__init__(
message=message,
status_code=403,
error_code='ACCESS_DENIED'
)

View File

@@ -1,406 +0,0 @@
"""
Response formatting utilities for API endpoints.
This module provides utilities for creating consistent response formats
across all controller modules.
"""
from typing import Any, Dict, List, Optional, Union, Tuple
from flask import jsonify, url_for, request
import math
def create_success_response(
data: Any = None,
message: str = "Operation successful",
status_code: int = 200,
pagination: Optional[Dict[str, Any]] = None,
meta: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized success response.
Args:
data: Data to include in response
message: Success message
status_code: HTTP status code
pagination: Pagination information
meta: Additional metadata
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'success',
'message': message
}
if data is not None:
response['data'] = data
if pagination:
response['pagination'] = pagination
if meta:
response['meta'] = meta
return response, status_code
def create_error_response(
message: str,
status_code: int = 400,
error_code: Optional[str] = None,
errors: Optional[List[str]] = None,
data: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized error response.
Args:
message: Error message to display
status_code: HTTP status code
error_code: Optional error code for client handling
errors: Optional list of detailed errors
data: Optional additional data
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'error',
'message': message
}
if error_code:
response['error_code'] = error_code
if errors:
response['errors'] = errors
if data:
response['data'] = data
return response, status_code
def create_paginated_response(
data: List[Any],
page: int,
per_page: int,
total: int,
endpoint: Optional[str] = None,
**kwargs
) -> Dict[str, Any]:
"""
Create a paginated response with navigation links.
Args:
data: List of data items for current page
page: Current page number (1-based)
per_page: Items per page
total: Total number of items
endpoint: Flask endpoint name for pagination links
**kwargs: Additional parameters for pagination links
Returns:
Dictionary containing paginated response
"""
total_pages = math.ceil(total / per_page) if per_page > 0 else 1
pagination_info = {
'page': page,
'per_page': per_page,
'total': total,
'total_pages': total_pages,
'has_next': page < total_pages,
'has_prev': page > 1
}
# Add navigation links if endpoint is provided
if endpoint:
base_url = request.url_root.rstrip('/')
# Current page
pagination_info['current_url'] = url_for(endpoint, page=page, per_page=per_page, **kwargs)
# First page
pagination_info['first_url'] = url_for(endpoint, page=1, per_page=per_page, **kwargs)
# Last page
pagination_info['last_url'] = url_for(endpoint, page=total_pages, per_page=per_page, **kwargs)
# Previous page
if pagination_info['has_prev']:
pagination_info['prev_url'] = url_for(endpoint, page=page-1, per_page=per_page, **kwargs)
# Next page
if pagination_info['has_next']:
pagination_info['next_url'] = url_for(endpoint, page=page+1, per_page=per_page, **kwargs)
return {
'status': 'success',
'data': data,
'pagination': pagination_info
}
def paginate_query_results(
items: List[Any],
page: Optional[int] = None,
per_page: Optional[int] = None,
default_per_page: int = 50,
max_per_page: int = 1000
) -> Tuple[List[Any], int, int, int]:
"""
Paginate a list of items based on query parameters.
Args:
items: List of items to paginate
page: Page number (from query params)
per_page: Items per page (from query params)
default_per_page: Default items per page
max_per_page: Maximum allowed items per page
Returns:
Tuple of (paginated_items, page, per_page, total)
"""
total = len(items)
# Parse pagination parameters
if page is None:
page = int(request.args.get('page', 1))
if per_page is None:
per_page = int(request.args.get('per_page', default_per_page))
# Validate parameters
page = max(1, page)
per_page = min(max(1, per_page), max_per_page)
# Calculate offset
offset = (page - 1) * per_page
# Slice the items
paginated_items = items[offset:offset + per_page]
return paginated_items, page, per_page, total
def format_anime_response(anime_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format anime data for API response.
Args:
anime_data: Raw anime data from database
Returns:
Formatted anime data
"""
formatted = {
'id': anime_data.get('id'),
'name': anime_data.get('name'),
'url': anime_data.get('url'),
'description': anime_data.get('description'),
'episodes': anime_data.get('episodes'),
'status': anime_data.get('status', 'planned'),
'created_at': anime_data.get('created_at'),
'updated_at': anime_data.get('updated_at')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_episode_response(episode_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format episode data for API response.
Args:
episode_data: Raw episode data from database
Returns:
Formatted episode data
"""
formatted = {
'id': episode_data.get('id'),
'anime_id': episode_data.get('anime_id'),
'episode_number': episode_data.get('episode_number'),
'title': episode_data.get('title'),
'url': episode_data.get('url'),
'status': episode_data.get('status', 'available'),
'download_path': episode_data.get('download_path'),
'file_size': episode_data.get('file_size'),
'created_at': episode_data.get('created_at'),
'updated_at': episode_data.get('updated_at')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_download_response(download_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format download data for API response.
Args:
download_data: Raw download data
Returns:
Formatted download data
"""
formatted = {
'id': download_data.get('id'),
'anime_id': download_data.get('anime_id'),
'episode_id': download_data.get('episode_id'),
'status': download_data.get('status', 'pending'),
'progress': download_data.get('progress', 0),
'speed': download_data.get('speed'),
'eta': download_data.get('eta'),
'error_message': download_data.get('error_message'),
'started_at': download_data.get('started_at'),
'completed_at': download_data.get('completed_at')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_bulk_operation_response(operation_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format bulk operation data for API response.
Args:
operation_data: Raw bulk operation data
Returns:
Formatted bulk operation data
"""
formatted = {
'id': operation_data.get('id'),
'type': operation_data.get('type'),
'status': operation_data.get('status', 'pending'),
'total_items': operation_data.get('total_items', 0),
'completed_items': operation_data.get('completed_items', 0),
'failed_items': operation_data.get('failed_items', 0),
'progress_percentage': operation_data.get('progress_percentage', 0),
'started_at': operation_data.get('started_at'),
'completed_at': operation_data.get('completed_at'),
'error_message': operation_data.get('error_message')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_health_response(health_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format health check data for API response.
Args:
health_data: Raw health check data
Returns:
Formatted health data
"""
formatted = {
'status': health_data.get('status', 'unknown'),
'uptime': health_data.get('uptime'),
'version': health_data.get('version'),
'components': health_data.get('components', {}),
'timestamp': health_data.get('timestamp')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def add_resource_links(data: Dict[str, Any], resource_type: str, resource_id: Any) -> Dict[str, Any]:
"""
Add HATEOAS-style links to a resource response.
Args:
data: Resource data
resource_type: Type of resource (anime, episode, etc.)
resource_id: Resource identifier
Returns:
Data with added links
"""
if '_links' not in data:
data['_links'] = {}
# Self link
data['_links']['self'] = url_for(f'api.get_{resource_type}', id=resource_id)
# Collection link
data['_links']['collection'] = url_for(f'api.list_{resource_type}s')
return data
def create_batch_response(
successful_items: List[Dict[str, Any]],
failed_items: List[Dict[str, Any]],
message: Optional[str] = None
) -> Dict[str, Any]:
"""
Create response for batch operations.
Args:
successful_items: List of successfully processed items
failed_items: List of failed items with errors
message: Optional message
Returns:
Batch operation response
"""
total_items = len(successful_items) + len(failed_items)
success_count = len(successful_items)
failure_count = len(failed_items)
response = {
'status': 'success' if failure_count == 0 else 'partial_success',
'message': message or f"Processed {success_count}/{total_items} items successfully",
'summary': {
'total': total_items,
'successful': success_count,
'failed': failure_count
},
'data': {
'successful': successful_items,
'failed': failed_items
}
}
return response
def extract_pagination_params(
default_page: int = 1,
default_per_page: int = 50,
max_per_page: int = 1000
) -> Tuple[int, int]:
"""
Extract and validate pagination parameters from request.
Args:
default_page: Default page number
default_per_page: Default items per page
max_per_page: Maximum allowed items per page
Returns:
Tuple of (page, per_page)
"""
try:
page = int(request.args.get('page', default_page))
page = max(1, page)
except (ValueError, TypeError):
page = default_page
try:
per_page = int(request.args.get('per_page', default_per_page))
per_page = min(max(1, per_page), max_per_page)
except (ValueError, TypeError):
per_page = default_per_page
return page, per_page

View File

@@ -1,446 +0,0 @@
"""
Input validation utilities for API endpoints.
This module provides validation functions and decorators for consistent
input validation across all controller modules.
"""
import re
import os
from typing import Any, Dict, List, Optional, Union, Callable, Tuple
from functools import wraps
from flask import request, jsonify
from .error_handlers import ValidationError, create_error_response
def validate_json_input(required_fields: Optional[List[str]] = None,
optional_fields: Optional[List[str]] = None,
field_types: Optional[Dict[str, type]] = None) -> Callable:
"""
Decorator to validate JSON input for API endpoints.
Args:
required_fields: List of required field names
optional_fields: List of optional field names
field_types: Dictionary mapping field names to expected types
Returns:
Decorator function
Usage:
@validate_json_input(
required_fields=['name', 'url'],
optional_fields=['description'],
field_types={'name': str, 'url': str, 'episodes': int}
)
def create_anime():
data = request.get_json()
# data is now validated
"""
def decorator(f: Callable) -> Callable:
@wraps(f)
def decorated_function(*args, **kwargs):
if not request.is_json:
return create_error_response(
message="Request must be JSON",
status_code=400,
error_code='INVALID_CONTENT_TYPE'
)
try:
data = request.get_json()
except Exception:
return create_error_response(
message="Invalid JSON format",
status_code=400,
error_code='INVALID_JSON'
)
if data is None:
return create_error_response(
message="Request body cannot be empty",
status_code=400,
error_code='EMPTY_BODY'
)
# Validate required fields
if required_fields:
missing_fields = []
for field in required_fields:
if field not in data or data[field] is None:
missing_fields.append(field)
if missing_fields:
return create_error_response(
message=f"Missing required fields: {', '.join(missing_fields)}",
status_code=400,
error_code='MISSING_FIELDS',
errors=missing_fields
)
# Validate field types
if field_types:
type_errors = []
for field, expected_type in field_types.items():
if field in data and data[field] is not None:
if not isinstance(data[field], expected_type):
type_errors.append(f"{field} must be of type {expected_type.__name__}")
if type_errors:
return create_error_response(
message="Type validation failed",
status_code=400,
error_code='TYPE_ERROR',
errors=type_errors
)
# Check for unexpected fields
all_allowed = (required_fields or []) + (optional_fields or [])
if all_allowed:
unexpected_fields = [field for field in data.keys() if field not in all_allowed]
if unexpected_fields:
return create_error_response(
message=f"Unexpected fields: {', '.join(unexpected_fields)}",
status_code=400,
error_code='UNEXPECTED_FIELDS',
errors=unexpected_fields
)
return f(*args, **kwargs)
return decorated_function
return decorator
def validate_query_params(allowed_params: Optional[List[str]] = None,
required_params: Optional[List[str]] = None,
param_types: Optional[Dict[str, type]] = None) -> Callable:
"""
Decorator to validate query parameters.
Args:
allowed_params: List of allowed parameter names
required_params: List of required parameter names
param_types: Dictionary mapping parameter names to expected types
Returns:
Decorator function
"""
def decorator(f: Callable) -> Callable:
@wraps(f)
def decorated_function(*args, **kwargs):
# Check required parameters
if required_params:
missing_params = []
for param in required_params:
if param not in request.args:
missing_params.append(param)
if missing_params:
return create_error_response(
message=f"Missing required parameters: {', '.join(missing_params)}",
status_code=400,
error_code='MISSING_PARAMS'
)
# Check allowed parameters
if allowed_params:
unexpected_params = [param for param in request.args.keys() if param not in allowed_params]
if unexpected_params:
return create_error_response(
message=f"Unexpected parameters: {', '.join(unexpected_params)}",
status_code=400,
error_code='UNEXPECTED_PARAMS'
)
# Validate parameter types
if param_types:
type_errors = []
for param, expected_type in param_types.items():
if param in request.args:
value = request.args.get(param)
try:
if expected_type == int:
int(value)
elif expected_type == float:
float(value)
elif expected_type == bool:
if value.lower() not in ['true', 'false', '1', '0']:
raise ValueError()
except ValueError:
type_errors.append(f"{param} must be of type {expected_type.__name__}")
if type_errors:
return create_error_response(
message="Parameter type validation failed",
status_code=400,
error_code='PARAM_TYPE_ERROR',
errors=type_errors
)
return f(*args, **kwargs)
return decorated_function
return decorator
def validate_pagination_params(f: Callable) -> Callable:
"""
Decorator to validate pagination parameters (page, per_page, limit, offset).
Args:
f: The function to decorate
Returns:
Decorated function with pagination validation
"""
@wraps(f)
def decorated_function(*args, **kwargs):
errors = []
# Validate page parameter
page = request.args.get('page')
if page is not None:
try:
page_int = int(page)
if page_int < 1:
errors.append("page must be greater than 0")
except ValueError:
errors.append("page must be an integer")
# Validate per_page parameter
per_page = request.args.get('per_page')
if per_page is not None:
try:
per_page_int = int(per_page)
if per_page_int < 1:
errors.append("per_page must be greater than 0")
elif per_page_int > 1000:
errors.append("per_page cannot exceed 1000")
except ValueError:
errors.append("per_page must be an integer")
# Validate limit parameter
limit = request.args.get('limit')
if limit is not None:
try:
limit_int = int(limit)
if limit_int < 1:
errors.append("limit must be greater than 0")
elif limit_int > 1000:
errors.append("limit cannot exceed 1000")
except ValueError:
errors.append("limit must be an integer")
# Validate offset parameter
offset = request.args.get('offset')
if offset is not None:
try:
offset_int = int(offset)
if offset_int < 0:
errors.append("offset must be greater than or equal to 0")
except ValueError:
errors.append("offset must be an integer")
if errors:
return create_error_response(
message="Pagination parameter validation failed",
status_code=400,
error_code='PAGINATION_ERROR',
errors=errors
)
return f(*args, **kwargs)
return decorated_function
def validate_anime_data(data: Dict[str, Any]) -> List[str]:
"""
Validate anime data structure.
Args:
data: Dictionary containing anime data
Returns:
List of validation errors (empty if valid)
"""
errors = []
# Required fields
required_fields = ['name', 'url']
for field in required_fields:
if field not in data or not data[field]:
errors.append(f"Missing required field: {field}")
# Validate name
if 'name' in data:
name = data['name']
if not isinstance(name, str):
errors.append("name must be a string")
elif len(name.strip()) == 0:
errors.append("name cannot be empty")
elif len(name) > 500:
errors.append("name cannot exceed 500 characters")
# Validate URL
if 'url' in data:
url = data['url']
if not isinstance(url, str):
errors.append("url must be a string")
elif not is_valid_url(url):
errors.append("url must be a valid URL")
# Validate optional fields
if 'description' in data and data['description'] is not None:
if not isinstance(data['description'], str):
errors.append("description must be a string")
elif len(data['description']) > 2000:
errors.append("description cannot exceed 2000 characters")
if 'episodes' in data and data['episodes'] is not None:
if not isinstance(data['episodes'], int):
errors.append("episodes must be an integer")
elif data['episodes'] < 0:
errors.append("episodes must be non-negative")
if 'status' in data and data['status'] is not None:
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
if data['status'] not in valid_statuses:
errors.append(f"status must be one of: {', '.join(valid_statuses)}")
return errors
def validate_file_upload(file, allowed_extensions: Optional[List[str]] = None,
max_size_mb: Optional[int] = None) -> List[str]:
"""
Validate file upload.
Args:
file: Uploaded file object
allowed_extensions: List of allowed file extensions
max_size_mb: Maximum file size in MB
Returns:
List of validation errors (empty if valid)
"""
errors = []
if not file:
errors.append("No file provided")
return errors
if file.filename == '':
errors.append("No file selected")
return errors
# Check file extension
if allowed_extensions:
file_ext = os.path.splitext(file.filename)[1].lower()
if file_ext not in [f".{ext.lower()}" for ext in allowed_extensions]:
errors.append(f"File type not allowed. Allowed: {', '.join(allowed_extensions)}")
# Check file size (if we can determine it)
if max_size_mb and hasattr(file, 'content_length') and file.content_length:
max_size_bytes = max_size_mb * 1024 * 1024
if file.content_length > max_size_bytes:
errors.append(f"File size exceeds maximum of {max_size_mb}MB")
return errors
def is_valid_url(url: str) -> bool:
"""
Check if a string is a valid URL.
Args:
url: URL string to validate
Returns:
True if valid URL, False otherwise
"""
url_pattern = re.compile(
r'^https?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return url_pattern.match(url) is not None
def is_valid_email(email: str) -> bool:
"""
Check if a string is a valid email address.
Args:
email: Email string to validate
Returns:
True if valid email, False otherwise
"""
email_pattern = re.compile(
r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
)
return email_pattern.match(email) is not None
def sanitize_string(value: str, max_length: Optional[int] = None) -> str:
"""
Sanitize string input by removing dangerous characters.
Args:
value: String to sanitize
max_length: Maximum allowed length
Returns:
Sanitized string
"""
if not isinstance(value, str):
return str(value)
# Remove null bytes and control characters
sanitized = ''.join(char for char in value if ord(char) >= 32 or char in '\t\n\r')
# Trim whitespace
sanitized = sanitized.strip()
# Truncate if necessary
if max_length and len(sanitized) > max_length:
sanitized = sanitized[:max_length]
return sanitized
def validate_id_parameter(param_name: str = 'id') -> Callable:
"""
Decorator to validate ID parameters in URLs.
Args:
param_name: Name of the ID parameter
Returns:
Decorator function
"""
def decorator(f: Callable) -> Callable:
@wraps(f)
def decorated_function(*args, **kwargs):
if param_name in kwargs:
try:
id_value = int(kwargs[param_name])
if id_value <= 0:
return create_error_response(
message=f"{param_name} must be a positive integer",
status_code=400,
error_code='INVALID_ID'
)
kwargs[param_name] = id_value
except ValueError:
return create_error_response(
message=f"{param_name} must be an integer",
status_code=400,
error_code='INVALID_ID'
)
return f(*args, **kwargs)
return decorated_function
return decorator

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,236 +1,236 @@
/**
* Localization support for AniWorld Manager
* Implements resource-based text management for easy translation
*/
class Localization {
constructor() {
this.currentLanguage = 'en';
this.fallbackLanguage = 'en';
this.translations = {};
this.loadTranslations();
}
loadTranslations() {
// English (default)
this.translations.en = {
// Header
'config-title': 'Configuration',
'toggle-theme': 'Toggle theme',
'rescan': 'Rescan',
// Search
'search-placeholder': 'Search for anime...',
'search-results': 'Search Results',
'no-results': 'No results found',
'add': 'Add',
// Series
'series-collection': 'Series Collection',
'select-all': 'Select All',
'deselect-all': 'Deselect All',
'download-selected': 'Download Selected',
'missing-episodes': 'missing episodes',
// Configuration
'anime-directory': 'Anime Directory',
'series-count': 'Series Count',
'connection-status': 'Connection Status',
'connected': 'Connected',
'disconnected': 'Disconnected',
// Download controls
'pause': 'Pause',
'resume': 'Resume',
'cancel': 'Cancel',
'downloading': 'Downloading',
'paused': 'Paused',
// Download queue
'download-queue': 'Download Queue',
'currently-downloading': 'Currently Downloading',
'queued-series': 'Queued Series',
// Status messages
'connected-server': 'Connected to server',
'disconnected-server': 'Disconnected from server',
'scan-started': 'Scan started',
'scan-completed': 'Scan completed successfully',
'download-started': 'Download started',
'download-completed': 'Download completed successfully',
'series-added': 'Series added successfully',
// Error messages
'search-failed': 'Search failed',
'download-failed': 'Download failed',
'scan-failed': 'Scan failed',
'connection-failed': 'Connection failed',
// General
'loading': 'Loading...',
'close': 'Close',
'ok': 'OK',
'cancel-action': 'Cancel'
};
// German
this.translations.de = {
// Header
'config-title': 'Konfiguration',
'toggle-theme': 'Design wechseln',
'rescan': 'Neu scannen',
// Search
'search-placeholder': 'Nach Anime suchen...',
'search-results': 'Suchergebnisse',
'no-results': 'Keine Ergebnisse gefunden',
'add': 'Hinzufügen',
// Series
'series-collection': 'Serien-Sammlung',
'select-all': 'Alle auswählen',
'deselect-all': 'Alle abwählen',
'download-selected': 'Ausgewählte herunterladen',
'missing-episodes': 'fehlende Episoden',
// Configuration
'anime-directory': 'Anime-Verzeichnis',
'series-count': 'Anzahl Serien',
'connection-status': 'Verbindungsstatus',
'connected': 'Verbunden',
'disconnected': 'Getrennt',
// Download controls
'pause': 'Pausieren',
'resume': 'Fortsetzen',
'cancel': 'Abbrechen',
'downloading': 'Herunterladen',
'paused': 'Pausiert',
// Download queue
'download-queue': 'Download-Warteschlange',
'currently-downloading': 'Wird heruntergeladen',
'queued-series': 'Warteschlange',
// Status messages
'connected-server': 'Mit Server verbunden',
'disconnected-server': 'Verbindung zum Server getrennt',
'scan-started': 'Scan gestartet',
'scan-completed': 'Scan erfolgreich abgeschlossen',
'download-started': 'Download gestartet',
'download-completed': 'Download erfolgreich abgeschlossen',
'series-added': 'Serie erfolgreich hinzugefügt',
// Error messages
'search-failed': 'Suche fehlgeschlagen',
'download-failed': 'Download fehlgeschlagen',
'scan-failed': 'Scan fehlgeschlagen',
'connection-failed': 'Verbindung fehlgeschlagen',
// General
'loading': 'Wird geladen...',
'close': 'Schließen',
'ok': 'OK',
'cancel-action': 'Abbrechen'
};
// Load saved language preference
const savedLanguage = localStorage.getItem('language') || this.detectLanguage();
this.setLanguage(savedLanguage);
}
detectLanguage() {
const browserLang = navigator.language || navigator.userLanguage;
const langCode = browserLang.split('-')[0];
return this.translations[langCode] ? langCode : this.fallbackLanguage;
}
setLanguage(langCode) {
if (this.translations[langCode]) {
this.currentLanguage = langCode;
localStorage.setItem('language', langCode);
this.updatePageTexts();
}
}
getText(key, fallback = key) {
const translation = this.translations[this.currentLanguage];
if (translation && translation[key]) {
return translation[key];
}
// Try fallback language
const fallbackTranslation = this.translations[this.fallbackLanguage];
if (fallbackTranslation && fallbackTranslation[key]) {
return fallbackTranslation[key];
}
return fallback;
}
updatePageTexts() {
// Update all elements with data-text attributes
document.querySelectorAll('[data-text]').forEach(element => {
const key = element.getAttribute('data-text');
const text = this.getText(key);
if (element.tagName === 'INPUT' && element.type === 'text') {
element.placeholder = text;
} else {
element.textContent = text;
}
});
// Update specific elements that need special handling
this.updateSearchPlaceholder();
this.updateDynamicTexts();
}
updateSearchPlaceholder() {
const searchInput = document.getElementById('search-input');
if (searchInput) {
searchInput.placeholder = this.getText('search-placeholder');
}
}
updateDynamicTexts() {
// Update any dynamically generated content
const selectAllBtn = document.getElementById('select-all');
if (selectAllBtn && window.app) {
const selectedCount = window.app.selectedSeries ? window.app.selectedSeries.size : 0;
const totalCount = window.app.seriesData ? window.app.seriesData.length : 0;
if (selectedCount === totalCount && totalCount > 0) {
selectAllBtn.innerHTML = `<i class="fas fa-times"></i><span>${this.getText('deselect-all')}</span>`;
} else {
selectAllBtn.innerHTML = `<i class="fas fa-check-double"></i><span>${this.getText('select-all')}</span>`;
}
}
}
getAvailableLanguages() {
return Object.keys(this.translations).map(code => ({
code: code,
name: this.getLanguageName(code)
}));
}
getLanguageName(code) {
const names = {
'en': 'English',
'de': 'Deutsch'
};
return names[code] || code.toUpperCase();
}
formatMessage(key, ...args) {
let message = this.getText(key);
args.forEach((arg, index) => {
message = message.replace(`{${index}}`, arg);
});
return message;
}
}
// Export for use in other modules
/**
* Localization support for AniWorld Manager
* Implements resource-based text management for easy translation
*/
class Localization {
constructor() {
this.currentLanguage = 'en';
this.fallbackLanguage = 'en';
this.translations = {};
this.loadTranslations();
}
loadTranslations() {
// English (default)
this.translations.en = {
// Header
'config-title': 'Configuration',
'toggle-theme': 'Toggle theme',
'rescan': 'Rescan',
// Search
'search-placeholder': 'Search for anime...',
'search-results': 'Search Results',
'no-results': 'No results found',
'add': 'Add',
// Series
'series-collection': 'Series Collection',
'select-all': 'Select All',
'deselect-all': 'Deselect All',
'download-selected': 'Download Selected',
'missing-episodes': 'missing episodes',
// Configuration
'anime-directory': 'Anime Directory',
'series-count': 'Series Count',
'connection-status': 'Connection Status',
'connected': 'Connected',
'disconnected': 'Disconnected',
// Download controls
'pause': 'Pause',
'resume': 'Resume',
'cancel': 'Cancel',
'downloading': 'Downloading',
'paused': 'Paused',
// Download queue
'download-queue': 'Download Queue',
'currently-downloading': 'Currently Downloading',
'queued-series': 'Queued Series',
// Status messages
'connected-server': 'Connected to server',
'disconnected-server': 'Disconnected from server',
'scan-started': 'Scan started',
'scan-completed': 'Scan completed successfully',
'download-started': 'Download started',
'download-completed': 'Download completed successfully',
'series-added': 'Series added successfully',
// Error messages
'search-failed': 'Search failed',
'download-failed': 'Download failed',
'scan-failed': 'Scan failed',
'connection-failed': 'Connection failed',
// General
'loading': 'Loading...',
'close': 'Close',
'ok': 'OK',
'cancel-action': 'Cancel'
};
// German
this.translations.de = {
// Header
'config-title': 'Konfiguration',
'toggle-theme': 'Design wechseln',
'rescan': 'Neu scannen',
// Search
'search-placeholder': 'Nach Anime suchen...',
'search-results': 'Suchergebnisse',
'no-results': 'Keine Ergebnisse gefunden',
'add': 'Hinzufügen',
// Series
'series-collection': 'Serien-Sammlung',
'select-all': 'Alle auswählen',
'deselect-all': 'Alle abwählen',
'download-selected': 'Ausgewählte herunterladen',
'missing-episodes': 'fehlende Episoden',
// Configuration
'anime-directory': 'Anime-Verzeichnis',
'series-count': 'Anzahl Serien',
'connection-status': 'Verbindungsstatus',
'connected': 'Verbunden',
'disconnected': 'Getrennt',
// Download controls
'pause': 'Pausieren',
'resume': 'Fortsetzen',
'cancel': 'Abbrechen',
'downloading': 'Herunterladen',
'paused': 'Pausiert',
// Download queue
'download-queue': 'Download-Warteschlange',
'currently-downloading': 'Wird heruntergeladen',
'queued-series': 'Warteschlange',
// Status messages
'connected-server': 'Mit Server verbunden',
'disconnected-server': 'Verbindung zum Server getrennt',
'scan-started': 'Scan gestartet',
'scan-completed': 'Scan erfolgreich abgeschlossen',
'download-started': 'Download gestartet',
'download-completed': 'Download erfolgreich abgeschlossen',
'series-added': 'Serie erfolgreich hinzugefügt',
// Error messages
'search-failed': 'Suche fehlgeschlagen',
'download-failed': 'Download fehlgeschlagen',
'scan-failed': 'Scan fehlgeschlagen',
'connection-failed': 'Verbindung fehlgeschlagen',
// General
'loading': 'Wird geladen...',
'close': 'Schließen',
'ok': 'OK',
'cancel-action': 'Abbrechen'
};
// Load saved language preference
const savedLanguage = localStorage.getItem('language') || this.detectLanguage();
this.setLanguage(savedLanguage);
}
detectLanguage() {
const browserLang = navigator.language || navigator.userLanguage;
const langCode = browserLang.split('-')[0];
return this.translations[langCode] ? langCode : this.fallbackLanguage;
}
setLanguage(langCode) {
if (this.translations[langCode]) {
this.currentLanguage = langCode;
localStorage.setItem('language', langCode);
this.updatePageTexts();
}
}
getText(key, fallback = key) {
const translation = this.translations[this.currentLanguage];
if (translation && translation[key]) {
return translation[key];
}
// Try fallback language
const fallbackTranslation = this.translations[this.fallbackLanguage];
if (fallbackTranslation && fallbackTranslation[key]) {
return fallbackTranslation[key];
}
return fallback;
}
updatePageTexts() {
// Update all elements with data-text attributes
document.querySelectorAll('[data-text]').forEach(element => {
const key = element.getAttribute('data-text');
const text = this.getText(key);
if (element.tagName === 'INPUT' && element.type === 'text') {
element.placeholder = text;
} else {
element.textContent = text;
}
});
// Update specific elements that need special handling
this.updateSearchPlaceholder();
this.updateDynamicTexts();
}
updateSearchPlaceholder() {
const searchInput = document.getElementById('search-input');
if (searchInput) {
searchInput.placeholder = this.getText('search-placeholder');
}
}
updateDynamicTexts() {
// Update any dynamically generated content
const selectAllBtn = document.getElementById('select-all');
if (selectAllBtn && window.app) {
const selectedCount = window.app.selectedSeries ? window.app.selectedSeries.size : 0;
const totalCount = window.app.seriesData ? window.app.seriesData.length : 0;
if (selectedCount === totalCount && totalCount > 0) {
selectAllBtn.innerHTML = `<i class="fas fa-times"></i><span>${this.getText('deselect-all')}</span>`;
} else {
selectAllBtn.innerHTML = `<i class="fas fa-check-double"></i><span>${this.getText('select-all')}</span>`;
}
}
}
getAvailableLanguages() {
return Object.keys(this.translations).map(code => ({
code: code,
name: this.getLanguageName(code)
}));
}
getLanguageName(code) {
const names = {
'en': 'English',
'de': 'Deutsch'
};
return names[code] || code.toUpperCase();
}
formatMessage(key, ...args) {
let message = this.getText(key);
args.forEach((arg, index) => {
message = message.replace(`{${index}}`, arg);
});
return message;
}
}
// Export for use in other modules
window.Localization = Localization;

File diff suppressed because it is too large Load Diff

View File

@@ -1,480 +1,480 @@
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<!-- UX Enhancement and Mobile & Accessibility CSS -->
<link rel="stylesheet" href="/static/css/ux_features.css">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-play-circle"></i>
<h1>AniWorld Manager</h1>
</div>
<div class="header-actions">
<!-- Process Status Indicators -->
<div class="process-status" id="process-status">
<div class="status-indicator" id="rescan-status" title="Scan is idle">
<i class="fas fa-sync-alt"></i>
<div class="status-dot idle"></div>
</div>
<div class="status-indicator" id="download-status" title="Download is idle">
<i class="fas fa-download"></i>
<div class="status-dot idle"></div>
</div>
</div>
<a href="/queue" class="btn btn-secondary" title="Download Queue">
<i class="fas fa-list-alt"></i>
<span data-text="queue">Queue</span>
</a>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span data-text="logout">Logout</span>
</button>
<button id="config-btn" class="btn btn-secondary" title="Show configuration">
<i class="fas fa-cog"></i>
<span data-text="config-title">Config</span>
</button>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme" data-title="toggle-theme">
<i class="fas fa-moon"></i>
</button>
<button id="rescan-btn" class="btn btn-primary">
<i class="fas fa-sync-alt"></i>
<span data-text="rescan">Rescan</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Search section -->
<section class="search-section">
<div class="search-container">
<div class="search-input-group">
<input type="text" id="search-input" data-text="search-placeholder"
placeholder="Search for anime..." class="search-input">
<button id="search-btn" class="btn btn-primary">
<i class="fas fa-search"></i>
</button>
<button id="clear-search" class="btn btn-secondary">
<i class="fas fa-times"></i>
</button>
</div>
</div>
<!-- Search results -->
<div id="search-results" class="search-results hidden">
<h3>Search Results</h3>
<div id="search-results-list" class="search-results-list"></div>
</div>
</section>
<!-- Download Queue Section -->
<section id="download-queue-section" class="download-queue-section hidden">
<div class="queue-header">
<h2>
<i class="fas fa-download"></i>
<span data-text="download-queue">Download Queue</span>
</h2>
<div class="queue-stats">
<span id="queue-progress" class="queue-progress">0/0 series</span>
</div>
</div>
<!-- Current Download -->
<div id="current-download" class="current-download hidden">
<div class="current-download-header">
<h3 data-text="currently-downloading">Currently Downloading</h3>
</div>
<div class="current-download-item">
<div class="download-info">
<div id="current-serie-name" class="serie-name">-</div>
<div id="current-episode" class="episode-info">-</div>
</div>
<div class="download-progress">
<div class="progress-bar-mini">
<div id="current-progress-fill" class="progress-fill-mini"></div>
</div>
<div id="current-progress-text" class="progress-text-mini">0%</div>
</div>
</div>
</div>
<!-- Queue List -->
<div id="queue-list-container" class="queue-list-container">
<h3 data-text="queued-series">Queued Series</h3>
<div id="queue-list" class="queue-list">
<!-- Queue items will be populated here -->
</div>
</div>
</section>
<!-- Series management section -->
<section class="series-section">
<div class="series-header">
<h2 data-text="series-collection">Series Collection</h2>
<div class="series-filters">
<button id="show-missing-only" class="btn btn-secondary" data-active="false">
<i class="fas fa-filter"></i>
<span data-text="show-missing-only">Missing Episodes Only</span>
</button>
<button id="sort-alphabetical" class="btn btn-secondary" data-active="false">
<i class="fas fa-sort-alpha-down"></i>
<span data-text="sort-alphabetical">A-Z Sort</span>
</button>
</div>
<div class="series-actions">
<button id="select-all" class="btn btn-secondary">
<i class="fas fa-check-double"></i>
<span data-text="select-all">Select All</span>
</button>
<button id="download-selected" class="btn btn-success" disabled>
<i class="fas fa-download"></i>
<span data-text="download-selected">Download Selected</span>
</button>
</div>
</div>
<!-- Series grid -->
<div id="series-grid" class="series-grid">
<!-- Series cards will be populated here -->
</div>
</section>
</main>
<!-- Status panel -->
<div id="status-panel" class="status-panel hidden">
<div class="status-header">
<h3 id="status-title">Status</h3>
<button id="close-status" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="status-content">
<div id="status-message" class="status-message"></div>
<div id="progress-container" class="progress-container hidden">
<div class="progress-bar">
<div id="progress-fill" class="progress-fill"></div>
</div>
<div id="progress-text" class="progress-text">0%</div>
</div>
<div id="download-controls" class="download-controls hidden">
<button id="pause-download" class="btn btn-secondary btn-small">
<i class="fas fa-pause"></i>
<span data-text="pause">Pause</span>
</button>
<button id="resume-download" class="btn btn-primary btn-small hidden">
<i class="fas fa-play"></i>
<span data-text="resume">Resume</span>
</button>
<button id="cancel-download" class="btn btn-small"
style="background-color: var(--color-error); color: white;">
<i class="fas fa-stop"></i>
<span data-text="cancel">Cancel</span>
</button>
</div>
</div>
</div>
<!-- Configuration Modal -->
<div id="config-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 data-text="config-title">Configuration</h3>
<button id="close-config" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<div class="config-item">
<label for="anime-directory-input" data-text="anime-directory">Anime Directory:</label>
<div class="input-group">
<input type="text" id="anime-directory-input" class="input-field"
placeholder="Enter anime directory path...">
<button id="browse-directory" class="btn btn-secondary">
<i class="fas fa-folder"></i>
</button>
</div>
</div>
<div class="config-item">
<label for="series-count-input" data-text="series-count">Series Count:</label>
<input type="number" id="series-count-input" class="input-field" readonly
title="This value is automatically calculated">
</div>
<div class="config-item">
<label data-text="connection-status">Connection Status:</label>
<div id="connection-status-display" class="config-value">
<span class="status-indicator"></span>
<span class="status-text">Disconnected</span>
</div>
<button id="test-connection" class="btn btn-secondary">
<i class="fas fa-network-wired"></i>
<span data-text="test-connection">Test Connection</span>
</button>
</div>
<!-- Main Configuration Actions -->
<div class="config-actions">
<button id="save-main-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-main-config">Save Configuration</span>
</button>
<button id="reset-main-config" class="btn btn-secondary">
<i class="fas fa-undo"></i>
<span data-text="reset-main-config">Reset</span>
</button>
</div>
<!-- Scheduler Configuration -->
<div class="config-section">
<h4 data-text="scheduler-config">Scheduled Operations</h4>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="scheduled-rescan-enabled">
<span class="checkbox-custom"></span>
<span data-text="enable-scheduled-rescan">Enable Daily Rescan</span>
</label>
</div>
<div class="config-item" id="rescan-time-config">
<label for="scheduled-rescan-time" data-text="rescan-time">Rescan Time (24h format):</label>
<input type="time" id="scheduled-rescan-time" value="03:00" class="input-field">
</div>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="auto-download-after-rescan">
<span class="checkbox-custom"></span>
<span data-text="auto-download-after-rescan">Auto-download missing episodes after
rescan</span>
</label>
</div>
<div class="config-item scheduler-status" id="scheduler-status">
<div class="scheduler-info">
<div class="info-row">
<span data-text="next-rescan">Next Scheduled Rescan:</span>
<span id="next-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="last-rescan">Last Scheduled Rescan:</span>
<span id="last-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="scheduler-running">Scheduler Status:</span>
<span id="scheduler-running-status" class="info-value status-badge">Stopped</span>
</div>
</div>
</div>
<div class="config-actions">
<button id="save-scheduler-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-config">Save Configuration</span>
</button>
<button id="test-scheduled-rescan" class="btn btn-secondary">
<i class="fas fa-play"></i>
<span data-text="test-rescan">Test Rescan Now</span>
</button>
</div>
</div>
<!-- Logging Configuration -->
<div class="config-section">
<h4 data-text="logging-config">Logging Configuration</h4>
<div class="config-item">
<label for="log-level" data-text="log-level">Log Level:</label>
<select id="log-level" class="input-field">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
<option value="CRITICAL">CRITICAL</option>
</select>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-logging">
<label for="enable-console-logging">
<span data-text="enable-console-logging">Enable Console Logging</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-progress">
<label for="enable-console-progress">
<span data-text="enable-console-progress">Show Progress Bars in Console</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-fail2ban-logging">
<label for="enable-fail2ban-logging">
<span data-text="enable-fail2ban-logging">Enable Fail2Ban Logging</span>
</label>
</div>
</div>
<div class="config-item">
<h5 data-text="log-files">Log Files</h5>
<div id="log-files-list" class="log-files-container">
<!-- Log files will be populated here -->
</div>
</div>
<div class="config-actions">
<button id="save-logging-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-logging-config">Save Logging Config</span>
</button>
<button id="test-logging" class="btn btn-secondary">
<i class="fas fa-bug"></i>
<span data-text="test-logging">Test Logging</span>
</button>
<button id="refresh-log-files" class="btn btn-secondary">
<i class="fas fa-refresh"></i>
<span data-text="refresh-logs">Refresh Log Files</span>
</button>
<button id="cleanup-logs" class="btn btn-warning">
<i class="fas fa-trash"></i>
<span data-text="cleanup-logs">Cleanup Old Logs</span>
</button>
</div>
</div>
<!-- Configuration Management -->
<div class="config-section">
<h4 data-text="config-management">Configuration Management</h4>
<div class="config-item">
<h5 data-text="config-backup-restore">Backup & Restore</h5>
<p class="config-description" data-text="backup-description">
Create backups of your configuration or restore from previous backups.
</p>
<div class="config-actions">
<button id="create-config-backup" class="btn btn-secondary">
<i class="fas fa-save"></i>
<span data-text="create-backup">Create Backup</span>
</button>
<button id="view-config-backups" class="btn btn-secondary">
<i class="fas fa-history"></i>
<span data-text="view-backups">View Backups</span>
</button>
<button id="export-config" class="btn btn-secondary">
<i class="fas fa-download"></i>
<span data-text="export-config">Export Config</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="config-validation">Configuration Validation</h5>
<p class="config-description" data-text="validation-description">
Validate your current configuration for errors and warnings.
</p>
<div id="validation-results" class="validation-results hidden">
<!-- Validation results will be displayed here -->
</div>
<div class="config-actions">
<button id="validate-config" class="btn btn-primary">
<i class="fas fa-check"></i>
<span data-text="validate-config">Validate Configuration</span>
</button>
<button id="reset-config" class="btn btn-warning">
<i class="fas fa-undo"></i>
<span data-text="reset-config">Reset to Defaults</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="advanced-config">Advanced Settings</h5>
<label for="max-concurrent-downloads" data-text="max-downloads">Max Concurrent
Downloads:</label>
<input type="number" id="max-concurrent-downloads" min="1" max="20" value="3"
class="input-field">
<label for="provider-timeout" data-text="provider-timeout">Provider Timeout
(seconds):</label>
<input type="number" id="provider-timeout" min="5" max="300" value="30" class="input-field">
<div class="checkbox-container">
<input type="checkbox" id="enable-debug-mode">
<label for="enable-debug-mode">
<span data-text="enable-debug">Enable Debug Mode</span>
</label>
</div>
<div class="config-actions">
<button id="save-advanced-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-advanced">Save Advanced Settings</span>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/localization.js"></script>
<!-- UX Enhancement Scripts -->
<script src="/static/js/keyboard_shortcuts.js"></script>
<script src="/static/js/drag_drop.js"></script>
<script src="/static/js/bulk_operations.js"></script>
<script src="/static/js/user_preferences.js"></script>
<script src="/static/js/advanced_search.js"></script>
<script src="/static/js/undo_redo.js"></script>
<!-- Mobile & Accessibility Scripts -->
<script src="/static/js/mobile_responsive.js"></script>
<script src="/static/js/touch_gestures.js"></script>
<script src="/static/js/accessibility_features.js"></script>
<script src="/static/js/screen_reader_support.js"></script>
<script src="/static/js/color_contrast_compliance.js"></script>
<script src="/static/js/multi_screen_support.js"></script>
<script src="/static/js/app.js"></script>
</body>
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<!-- UX Enhancement and Mobile & Accessibility CSS -->
<link rel="stylesheet" href="/static/css/ux_features.css">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-play-circle"></i>
<h1>AniWorld Manager</h1>
</div>
<div class="header-actions">
<!-- Process Status Indicators -->
<div class="process-status" id="process-status">
<div class="status-indicator" id="rescan-status" title="Scan is idle">
<i class="fas fa-sync-alt"></i>
<div class="status-dot idle"></div>
</div>
<div class="status-indicator" id="download-status" title="Download is idle">
<i class="fas fa-download"></i>
<div class="status-dot idle"></div>
</div>
</div>
<a href="/queue" class="btn btn-secondary" title="Download Queue">
<i class="fas fa-list-alt"></i>
<span data-text="queue">Queue</span>
</a>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span data-text="logout">Logout</span>
</button>
<button id="config-btn" class="btn btn-secondary" title="Show configuration">
<i class="fas fa-cog"></i>
<span data-text="config-title">Config</span>
</button>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme" data-title="toggle-theme">
<i class="fas fa-moon"></i>
</button>
<button id="rescan-btn" class="btn btn-primary">
<i class="fas fa-sync-alt"></i>
<span data-text="rescan">Rescan</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Search section -->
<section class="search-section">
<div class="search-container">
<div class="search-input-group">
<input type="text" id="search-input" data-text="search-placeholder"
placeholder="Search for anime..." class="search-input">
<button id="search-btn" class="btn btn-primary">
<i class="fas fa-search"></i>
</button>
<button id="clear-search" class="btn btn-secondary">
<i class="fas fa-times"></i>
</button>
</div>
</div>
<!-- Search results -->
<div id="search-results" class="search-results hidden">
<h3>Search Results</h3>
<div id="search-results-list" class="search-results-list"></div>
</div>
</section>
<!-- Download Queue Section -->
<section id="download-queue-section" class="download-queue-section hidden">
<div class="queue-header">
<h2>
<i class="fas fa-download"></i>
<span data-text="download-queue">Download Queue</span>
</h2>
<div class="queue-stats">
<span id="queue-progress" class="queue-progress">0/0 series</span>
</div>
</div>
<!-- Current Download -->
<div id="current-download" class="current-download hidden">
<div class="current-download-header">
<h3 data-text="currently-downloading">Currently Downloading</h3>
</div>
<div class="current-download-item">
<div class="download-info">
<div id="current-serie-name" class="serie-name">-</div>
<div id="current-episode" class="episode-info">-</div>
</div>
<div class="download-progress">
<div class="progress-bar-mini">
<div id="current-progress-fill" class="progress-fill-mini"></div>
</div>
<div id="current-progress-text" class="progress-text-mini">0%</div>
</div>
</div>
</div>
<!-- Queue List -->
<div id="queue-list-container" class="queue-list-container">
<h3 data-text="queued-series">Queued Series</h3>
<div id="queue-list" class="queue-list">
<!-- Queue items will be populated here -->
</div>
</div>
</section>
<!-- Series management section -->
<section class="series-section">
<div class="series-header">
<h2 data-text="series-collection">Series Collection</h2>
<div class="series-filters">
<button id="show-missing-only" class="btn btn-secondary" data-active="false">
<i class="fas fa-filter"></i>
<span data-text="show-missing-only">Missing Episodes Only</span>
</button>
<button id="sort-alphabetical" class="btn btn-secondary" data-active="false">
<i class="fas fa-sort-alpha-down"></i>
<span data-text="sort-alphabetical">A-Z Sort</span>
</button>
</div>
<div class="series-actions">
<button id="select-all" class="btn btn-secondary">
<i class="fas fa-check-double"></i>
<span data-text="select-all">Select All</span>
</button>
<button id="download-selected" class="btn btn-success" disabled>
<i class="fas fa-download"></i>
<span data-text="download-selected">Download Selected</span>
</button>
</div>
</div>
<!-- Series grid -->
<div id="series-grid" class="series-grid">
<!-- Series cards will be populated here -->
</div>
</section>
</main>
<!-- Status panel -->
<div id="status-panel" class="status-panel hidden">
<div class="status-header">
<h3 id="status-title">Status</h3>
<button id="close-status" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="status-content">
<div id="status-message" class="status-message"></div>
<div id="progress-container" class="progress-container hidden">
<div class="progress-bar">
<div id="progress-fill" class="progress-fill"></div>
</div>
<div id="progress-text" class="progress-text">0%</div>
</div>
<div id="download-controls" class="download-controls hidden">
<button id="pause-download" class="btn btn-secondary btn-small">
<i class="fas fa-pause"></i>
<span data-text="pause">Pause</span>
</button>
<button id="resume-download" class="btn btn-primary btn-small hidden">
<i class="fas fa-play"></i>
<span data-text="resume">Resume</span>
</button>
<button id="cancel-download" class="btn btn-small"
style="background-color: var(--color-error); color: white;">
<i class="fas fa-stop"></i>
<span data-text="cancel">Cancel</span>
</button>
</div>
</div>
</div>
<!-- Configuration Modal -->
<div id="config-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 data-text="config-title">Configuration</h3>
<button id="close-config" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<div class="config-item">
<label for="anime-directory-input" data-text="anime-directory">Anime Directory:</label>
<div class="input-group">
<input type="text" id="anime-directory-input" class="input-field"
placeholder="Enter anime directory path...">
<button id="browse-directory" class="btn btn-secondary">
<i class="fas fa-folder"></i>
</button>
</div>
</div>
<div class="config-item">
<label for="series-count-input" data-text="series-count">Series Count:</label>
<input type="number" id="series-count-input" class="input-field" readonly
title="This value is automatically calculated">
</div>
<div class="config-item">
<label data-text="connection-status">Connection Status:</label>
<div id="connection-status-display" class="config-value">
<span class="status-indicator"></span>
<span class="status-text">Disconnected</span>
</div>
<button id="test-connection" class="btn btn-secondary">
<i class="fas fa-network-wired"></i>
<span data-text="test-connection">Test Connection</span>
</button>
</div>
<!-- Main Configuration Actions -->
<div class="config-actions">
<button id="save-main-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-main-config">Save Configuration</span>
</button>
<button id="reset-main-config" class="btn btn-secondary">
<i class="fas fa-undo"></i>
<span data-text="reset-main-config">Reset</span>
</button>
</div>
<!-- Scheduler Configuration -->
<div class="config-section">
<h4 data-text="scheduler-config">Scheduled Operations</h4>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="scheduled-rescan-enabled">
<span class="checkbox-custom"></span>
<span data-text="enable-scheduled-rescan">Enable Daily Rescan</span>
</label>
</div>
<div class="config-item" id="rescan-time-config">
<label for="scheduled-rescan-time" data-text="rescan-time">Rescan Time (24h format):</label>
<input type="time" id="scheduled-rescan-time" value="03:00" class="input-field">
</div>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="auto-download-after-rescan">
<span class="checkbox-custom"></span>
<span data-text="auto-download-after-rescan">Auto-download missing episodes after
rescan</span>
</label>
</div>
<div class="config-item scheduler-status" id="scheduler-status">
<div class="scheduler-info">
<div class="info-row">
<span data-text="next-rescan">Next Scheduled Rescan:</span>
<span id="next-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="last-rescan">Last Scheduled Rescan:</span>
<span id="last-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="scheduler-running">Scheduler Status:</span>
<span id="scheduler-running-status" class="info-value status-badge">Stopped</span>
</div>
</div>
</div>
<div class="config-actions">
<button id="save-scheduler-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-config">Save Configuration</span>
</button>
<button id="test-scheduled-rescan" class="btn btn-secondary">
<i class="fas fa-play"></i>
<span data-text="test-rescan">Test Rescan Now</span>
</button>
</div>
</div>
<!-- Logging Configuration -->
<div class="config-section">
<h4 data-text="logging-config">Logging Configuration</h4>
<div class="config-item">
<label for="log-level" data-text="log-level">Log Level:</label>
<select id="log-level" class="input-field">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
<option value="CRITICAL">CRITICAL</option>
</select>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-logging">
<label for="enable-console-logging">
<span data-text="enable-console-logging">Enable Console Logging</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-progress">
<label for="enable-console-progress">
<span data-text="enable-console-progress">Show Progress Bars in Console</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-fail2ban-logging">
<label for="enable-fail2ban-logging">
<span data-text="enable-fail2ban-logging">Enable Fail2Ban Logging</span>
</label>
</div>
</div>
<div class="config-item">
<h5 data-text="log-files">Log Files</h5>
<div id="log-files-list" class="log-files-container">
<!-- Log files will be populated here -->
</div>
</div>
<div class="config-actions">
<button id="save-logging-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-logging-config">Save Logging Config</span>
</button>
<button id="test-logging" class="btn btn-secondary">
<i class="fas fa-bug"></i>
<span data-text="test-logging">Test Logging</span>
</button>
<button id="refresh-log-files" class="btn btn-secondary">
<i class="fas fa-refresh"></i>
<span data-text="refresh-logs">Refresh Log Files</span>
</button>
<button id="cleanup-logs" class="btn btn-warning">
<i class="fas fa-trash"></i>
<span data-text="cleanup-logs">Cleanup Old Logs</span>
</button>
</div>
</div>
<!-- Configuration Management -->
<div class="config-section">
<h4 data-text="config-management">Configuration Management</h4>
<div class="config-item">
<h5 data-text="config-backup-restore">Backup & Restore</h5>
<p class="config-description" data-text="backup-description">
Create backups of your configuration or restore from previous backups.
</p>
<div class="config-actions">
<button id="create-config-backup" class="btn btn-secondary">
<i class="fas fa-save"></i>
<span data-text="create-backup">Create Backup</span>
</button>
<button id="view-config-backups" class="btn btn-secondary">
<i class="fas fa-history"></i>
<span data-text="view-backups">View Backups</span>
</button>
<button id="export-config" class="btn btn-secondary">
<i class="fas fa-download"></i>
<span data-text="export-config">Export Config</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="config-validation">Configuration Validation</h5>
<p class="config-description" data-text="validation-description">
Validate your current configuration for errors and warnings.
</p>
<div id="validation-results" class="validation-results hidden">
<!-- Validation results will be displayed here -->
</div>
<div class="config-actions">
<button id="validate-config" class="btn btn-primary">
<i class="fas fa-check"></i>
<span data-text="validate-config">Validate Configuration</span>
</button>
<button id="reset-config" class="btn btn-warning">
<i class="fas fa-undo"></i>
<span data-text="reset-config">Reset to Defaults</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="advanced-config">Advanced Settings</h5>
<label for="max-concurrent-downloads" data-text="max-downloads">Max Concurrent
Downloads:</label>
<input type="number" id="max-concurrent-downloads" min="1" max="20" value="3"
class="input-field">
<label for="provider-timeout" data-text="provider-timeout">Provider Timeout
(seconds):</label>
<input type="number" id="provider-timeout" min="5" max="300" value="30" class="input-field">
<div class="checkbox-container">
<input type="checkbox" id="enable-debug-mode">
<label for="enable-debug-mode">
<span data-text="enable-debug">Enable Debug Mode</span>
</label>
</div>
<div class="config-actions">
<button id="save-advanced-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-advanced">Save Advanced Settings</span>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/localization.js"></script>
<!-- UX Enhancement Scripts -->
<script src="/static/js/keyboard_shortcuts.js"></script>
<script src="/static/js/drag_drop.js"></script>
<script src="/static/js/bulk_operations.js"></script>
<script src="/static/js/user_preferences.js"></script>
<script src="/static/js/advanced_search.js"></script>
<script src="/static/js/undo_redo.js"></script>
<!-- Mobile & Accessibility Scripts -->
<script src="/static/js/mobile_responsive.js"></script>
<script src="/static/js/touch_gestures.js"></script>
<script src="/static/js/accessibility_features.js"></script>
<script src="/static/js/screen_reader_support.js"></script>
<script src="/static/js/color_contrast_compliance.js"></script>
<script src="/static/js/multi_screen_support.js"></script>
<script src="/static/js/app.js"></script>
</body>
</html>

View File

@@ -1,380 +1,380 @@
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager - Login</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<style>
.login-container {
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background: linear-gradient(135deg, var(--color-primary-light) 0%, var(--color-primary) 100%);
padding: 1rem;
}
.login-card {
background: var(--color-surface);
border-radius: 16px;
padding: 2rem;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
width: 100%;
max-width: 400px;
border: 1px solid var(--color-border);
}
.login-header {
text-align: center;
margin-bottom: 2rem;
}
.login-header .logo {
font-size: 3rem;
color: var(--color-primary);
margin-bottom: 0.5rem;
}
.login-header h1 {
margin: 0;
color: var(--color-text);
font-size: 1.5rem;
font-weight: 600;
}
.login-header p {
margin: 0.5rem 0 0 0;
color: var(--color-text-secondary);
font-size: 0.9rem;
}
.login-form {
display: flex;
flex-direction: column;
gap: 1.5rem;
}
.form-group {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.form-label {
font-weight: 500;
color: var(--color-text);
font-size: 0.9rem;
}
.password-input-group {
position: relative;
}
.password-input {
width: 100%;
padding: 0.75rem 3rem 0.75rem 1rem;
border: 2px solid var(--color-border);
border-radius: 8px;
font-size: 1rem;
background: var(--color-background);
color: var(--color-text);
transition: all 0.2s ease;
box-sizing: border-box;
}
.password-input:focus {
outline: none;
border-color: var(--color-primary);
box-shadow: 0 0 0 3px rgba(var(--color-primary-rgb), 0.1);
}
.password-toggle {
position: absolute;
right: 0.75rem;
top: 50%;
transform: translateY(-50%);
background: none;
border: none;
color: var(--color-text-secondary);
cursor: pointer;
padding: 0.25rem;
border-radius: 4px;
transition: color 0.2s ease;
}
.password-toggle:hover {
color: var(--color-primary);
}
.login-button {
width: 100%;
padding: 0.75rem;
background: var(--color-primary);
color: white;
border: none;
border-radius: 8px;
font-size: 1rem;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
display: flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
}
.login-button:hover:not(:disabled) {
background: var(--color-primary-dark);
transform: translateY(-1px);
box-shadow: 0 4px 12px rgba(var(--color-primary-rgb), 0.3);
}
.login-button:disabled {
opacity: 0.6;
cursor: not-allowed;
transform: none;
box-shadow: none;
}
.error-message {
background: var(--color-error-light);
color: var(--color-error);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-error);
font-size: 0.9rem;
text-align: center;
}
.success-message {
background: var(--color-success-light);
color: var(--color-success);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-success);
font-size: 0.9rem;
text-align: center;
}
.theme-toggle {
position: absolute;
top: 1rem;
right: 1rem;
background: rgba(255, 255, 255, 0.1);
border: 1px solid rgba(255, 255, 255, 0.2);
color: white;
padding: 0.5rem;
border-radius: 50%;
cursor: pointer;
transition: all 0.2s ease;
width: 2.5rem;
height: 2.5rem;
display: flex;
align-items: center;
justify-content: center;
}
.theme-toggle:hover {
background: rgba(255, 255, 255, 0.2);
transform: scale(1.1);
}
.security-info {
margin-top: 1.5rem;
padding: 1rem;
background: var(--color-info-light);
border: 1px solid var(--color-info);
border-radius: 8px;
font-size: 0.8rem;
color: var(--color-text-secondary);
text-align: center;
}
.loading-spinner {
width: 1rem;
height: 1rem;
border: 2px solid transparent;
border-top: 2px solid currentColor;
border-radius: 50%;
animation: spin 1s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
</style>
</head>
<body>
<div class="login-container">
<button class="theme-toggle" id="theme-toggle" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<div class="login-card">
<div class="login-header">
<div class="logo">
<i class="fas fa-play-circle"></i>
</div>
<h1>AniWorld Manager</h1>
<p>Please enter your master password to continue</p>
</div>
<form class="login-form" id="login-form">
<div class="form-group">
<label for="password" class="form-label">Master Password</label>
<div class="password-input-group">
<input
type="password"
id="password"
name="password"
class="password-input"
placeholder="Enter your password"
required
autocomplete="current-password"
autofocus>
<button type="button" class="password-toggle" id="password-toggle" tabindex="-1">
<i class="fas fa-eye"></i>
</button>
</div>
</div>
<div id="message-container"></div>
<button type="submit" class="login-button" id="login-button">
<i class="fas fa-sign-in-alt"></i>
<span>Login</span>
</button>
</form>
<div class="security-info">
<i class="fas fa-shield-alt"></i>
Your session will expire after {{ session_timeout }} hours of inactivity.
<br>
After {{ max_attempts }} failed attempts, this IP will be locked for {{ lockout_duration }} minutes.
</div>
</div>
</div>
<script>
// Theme toggle functionality
const themeToggle = document.getElementById('theme-toggle');
const htmlElement = document.documentElement;
// Load saved theme
const savedTheme = localStorage.getItem('theme') || 'light';
htmlElement.setAttribute('data-theme', savedTheme);
updateThemeIcon(savedTheme);
themeToggle.addEventListener('click', () => {
const currentTheme = htmlElement.getAttribute('data-theme');
const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
htmlElement.setAttribute('data-theme', newTheme);
localStorage.setItem('theme', newTheme);
updateThemeIcon(newTheme);
});
function updateThemeIcon(theme) {
const icon = themeToggle.querySelector('i');
icon.className = theme === 'dark' ? 'fas fa-sun' : 'fas fa-moon';
}
// Password visibility toggle
const passwordToggle = document.getElementById('password-toggle');
const passwordInput = document.getElementById('password');
passwordToggle.addEventListener('click', () => {
const type = passwordInput.getAttribute('type');
const newType = type === 'password' ? 'text' : 'password';
const icon = passwordToggle.querySelector('i');
passwordInput.setAttribute('type', newType);
icon.className = newType === 'password' ? 'fas fa-eye' : 'fas fa-eye-slash';
});
// Form submission
const loginForm = document.getElementById('login-form');
const loginButton = document.getElementById('login-button');
const messageContainer = document.getElementById('message-container');
loginForm.addEventListener('submit', async (e) => {
e.preventDefault();
const password = passwordInput.value.trim();
if (!password) {
showMessage('Please enter your password', 'error');
return;
}
setLoading(true);
try {
const response = await fetch('/api/auth/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ password })
});
const data = await response.json();
if (data.status === 'success') {
showMessage(data.message, 'success');
setTimeout(() => {
window.location.href = '/';
}, 1000);
} else {
showMessage(data.message, 'error');
passwordInput.value = '';
passwordInput.focus();
}
} catch (error) {
showMessage('Connection error. Please try again.', 'error');
console.error('Login error:', error);
} finally {
setLoading(false);
}
});
function showMessage(message, type) {
messageContainer.innerHTML = `
<div class="${type}-message">
${message}
</div>
`;
}
function setLoading(loading) {
loginButton.disabled = loading;
const buttonText = loginButton.querySelector('span');
const buttonIcon = loginButton.querySelector('i');
if (loading) {
buttonIcon.className = 'loading-spinner';
buttonText.textContent = 'Logging in...';
} else {
buttonIcon.className = 'fas fa-sign-in-alt';
buttonText.textContent = 'Login';
}
}
// Clear message on input
passwordInput.addEventListener('input', () => {
messageContainer.innerHTML = '';
});
// Enter key on password toggle
passwordToggle.addEventListener('keydown', (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
passwordToggle.click();
}
});
</script>
</body>
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager - Login</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<style>
.login-container {
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background: linear-gradient(135deg, var(--color-primary-light) 0%, var(--color-primary) 100%);
padding: 1rem;
}
.login-card {
background: var(--color-surface);
border-radius: 16px;
padding: 2rem;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
width: 100%;
max-width: 400px;
border: 1px solid var(--color-border);
}
.login-header {
text-align: center;
margin-bottom: 2rem;
}
.login-header .logo {
font-size: 3rem;
color: var(--color-primary);
margin-bottom: 0.5rem;
}
.login-header h1 {
margin: 0;
color: var(--color-text);
font-size: 1.5rem;
font-weight: 600;
}
.login-header p {
margin: 0.5rem 0 0 0;
color: var(--color-text-secondary);
font-size: 0.9rem;
}
.login-form {
display: flex;
flex-direction: column;
gap: 1.5rem;
}
.form-group {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.form-label {
font-weight: 500;
color: var(--color-text);
font-size: 0.9rem;
}
.password-input-group {
position: relative;
}
.password-input {
width: 100%;
padding: 0.75rem 3rem 0.75rem 1rem;
border: 2px solid var(--color-border);
border-radius: 8px;
font-size: 1rem;
background: var(--color-background);
color: var(--color-text);
transition: all 0.2s ease;
box-sizing: border-box;
}
.password-input:focus {
outline: none;
border-color: var(--color-primary);
box-shadow: 0 0 0 3px rgba(var(--color-primary-rgb), 0.1);
}
.password-toggle {
position: absolute;
right: 0.75rem;
top: 50%;
transform: translateY(-50%);
background: none;
border: none;
color: var(--color-text-secondary);
cursor: pointer;
padding: 0.25rem;
border-radius: 4px;
transition: color 0.2s ease;
}
.password-toggle:hover {
color: var(--color-primary);
}
.login-button {
width: 100%;
padding: 0.75rem;
background: var(--color-primary);
color: white;
border: none;
border-radius: 8px;
font-size: 1rem;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
display: flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
}
.login-button:hover:not(:disabled) {
background: var(--color-primary-dark);
transform: translateY(-1px);
box-shadow: 0 4px 12px rgba(var(--color-primary-rgb), 0.3);
}
.login-button:disabled {
opacity: 0.6;
cursor: not-allowed;
transform: none;
box-shadow: none;
}
.error-message {
background: var(--color-error-light);
color: var(--color-error);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-error);
font-size: 0.9rem;
text-align: center;
}
.success-message {
background: var(--color-success-light);
color: var(--color-success);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-success);
font-size: 0.9rem;
text-align: center;
}
.theme-toggle {
position: absolute;
top: 1rem;
right: 1rem;
background: rgba(255, 255, 255, 0.1);
border: 1px solid rgba(255, 255, 255, 0.2);
color: white;
padding: 0.5rem;
border-radius: 50%;
cursor: pointer;
transition: all 0.2s ease;
width: 2.5rem;
height: 2.5rem;
display: flex;
align-items: center;
justify-content: center;
}
.theme-toggle:hover {
background: rgba(255, 255, 255, 0.2);
transform: scale(1.1);
}
.security-info {
margin-top: 1.5rem;
padding: 1rem;
background: var(--color-info-light);
border: 1px solid var(--color-info);
border-radius: 8px;
font-size: 0.8rem;
color: var(--color-text-secondary);
text-align: center;
}
.loading-spinner {
width: 1rem;
height: 1rem;
border: 2px solid transparent;
border-top: 2px solid currentColor;
border-radius: 50%;
animation: spin 1s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
</style>
</head>
<body>
<div class="login-container">
<button class="theme-toggle" id="theme-toggle" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<div class="login-card">
<div class="login-header">
<div class="logo">
<i class="fas fa-play-circle"></i>
</div>
<h1>AniWorld Manager</h1>
<p>Please enter your master password to continue</p>
</div>
<form class="login-form" id="login-form">
<div class="form-group">
<label for="password" class="form-label">Master Password</label>
<div class="password-input-group">
<input
type="password"
id="password"
name="password"
class="password-input"
placeholder="Enter your password"
required
autocomplete="current-password"
autofocus>
<button type="button" class="password-toggle" id="password-toggle" tabindex="-1">
<i class="fas fa-eye"></i>
</button>
</div>
</div>
<div id="message-container"></div>
<button type="submit" class="login-button" id="login-button">
<i class="fas fa-sign-in-alt"></i>
<span>Login</span>
</button>
</form>
<div class="security-info">
<i class="fas fa-shield-alt"></i>
Your session will expire after {{ session_timeout }} hours of inactivity.
<br>
After {{ max_attempts }} failed attempts, this IP will be locked for {{ lockout_duration }} minutes.
</div>
</div>
</div>
<script>
// Theme toggle functionality
const themeToggle = document.getElementById('theme-toggle');
const htmlElement = document.documentElement;
// Load saved theme
const savedTheme = localStorage.getItem('theme') || 'light';
htmlElement.setAttribute('data-theme', savedTheme);
updateThemeIcon(savedTheme);
themeToggle.addEventListener('click', () => {
const currentTheme = htmlElement.getAttribute('data-theme');
const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
htmlElement.setAttribute('data-theme', newTheme);
localStorage.setItem('theme', newTheme);
updateThemeIcon(newTheme);
});
function updateThemeIcon(theme) {
const icon = themeToggle.querySelector('i');
icon.className = theme === 'dark' ? 'fas fa-sun' : 'fas fa-moon';
}
// Password visibility toggle
const passwordToggle = document.getElementById('password-toggle');
const passwordInput = document.getElementById('password');
passwordToggle.addEventListener('click', () => {
const type = passwordInput.getAttribute('type');
const newType = type === 'password' ? 'text' : 'password';
const icon = passwordToggle.querySelector('i');
passwordInput.setAttribute('type', newType);
icon.className = newType === 'password' ? 'fas fa-eye' : 'fas fa-eye-slash';
});
// Form submission
const loginForm = document.getElementById('login-form');
const loginButton = document.getElementById('login-button');
const messageContainer = document.getElementById('message-container');
loginForm.addEventListener('submit', async (e) => {
e.preventDefault();
const password = passwordInput.value.trim();
if (!password) {
showMessage('Please enter your password', 'error');
return;
}
setLoading(true);
try {
const response = await fetch('/api/auth/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ password })
});
const data = await response.json();
if (data.status === 'success') {
showMessage(data.message, 'success');
setTimeout(() => {
window.location.href = '/';
}, 1000);
} else {
showMessage(data.message, 'error');
passwordInput.value = '';
passwordInput.focus();
}
} catch (error) {
showMessage('Connection error. Please try again.', 'error');
console.error('Login error:', error);
} finally {
setLoading(false);
}
});
function showMessage(message, type) {
messageContainer.innerHTML = `
<div class="${type}-message">
${message}
</div>
`;
}
function setLoading(loading) {
loginButton.disabled = loading;
const buttonText = loginButton.querySelector('span');
const buttonIcon = loginButton.querySelector('i');
if (loading) {
buttonIcon.className = 'loading-spinner';
buttonText.textContent = 'Logging in...';
} else {
buttonIcon.className = 'fas fa-sign-in-alt';
buttonText.textContent = 'Login';
}
}
// Clear message on input
passwordInput.addEventListener('input', () => {
messageContainer.innerHTML = '';
});
// Enter key on password toggle
passwordToggle.addEventListener('keydown', (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
passwordToggle.click();
}
});
</script>
</body>
</html>

View File

@@ -1,252 +1,252 @@
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Download Queue - AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-download"></i>
<h1>Download Queue</h1>
</div>
<div class="header-actions">
<a href="/" class="btn btn-secondary">
<i class="fas fa-arrow-left"></i>
<span>Back to Main</span>
</a>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span>Logout</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Queue Statistics -->
<section class="queue-stats-section">
<div class="stats-grid">
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-download text-primary"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="total-items">0</div>
<div class="stat-label">Total Items</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-clock text-warning"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="pending-items">0</div>
<div class="stat-label">In Queue</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-check-circle text-success"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="completed-items">0</div>
<div class="stat-label">Completed</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-exclamation-triangle text-error"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="failed-items">0</div>
<div class="stat-label">Failed</div>
</div>
</div>
</div>
<!-- Speed and ETA -->
<div class="speed-eta-section">
<div class="speed-info">
<div class="speed-current">
<span class="label">Current Speed:</span>
<span class="value" id="current-speed">0 MB/s</span>
</div>
<div class="speed-average">
<span class="label">Average Speed:</span>
<span class="value" id="average-speed">0 MB/s</span>
</div>
</div>
<div class="eta-info">
<span class="label">Estimated Time Remaining:</span>
<span class="value" id="eta-time">--:--</span>
</div>
</div>
</section>
<!-- Active Downloads -->
<section class="active-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-play-circle"></i>
Active Downloads
</h2>
<div class="section-actions">
<button id="pause-all-btn" class="btn btn-secondary" disabled>
<i class="fas fa-pause"></i>
Pause All
</button>
<button id="resume-all-btn" class="btn btn-primary" disabled style="display: none;">
<i class="fas fa-play"></i>
Resume All
</button>
</div>
</div>
<div class="active-downloads-list" id="active-downloads">
<div class="empty-state">
<i class="fas fa-pause-circle"></i>
<p>No active downloads</p>
</div>
</div>
</section>
<!-- Pending Queue -->
<section class="pending-queue-section">
<div class="section-header">
<h2>
<i class="fas fa-clock"></i>
Download Queue
</h2>
<div class="section-actions">
<button id="start-queue-btn" class="btn btn-primary" disabled>
<i class="fas fa-play"></i>
Start Downloads
</button>
<button id="stop-queue-btn" class="btn btn-secondary" disabled style="display: none;">
<i class="fas fa-stop"></i>
Stop Downloads
</button>
<button id="clear-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-trash"></i>
Clear Queue
</button>
<button id="reorder-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-sort"></i>
Reorder
</button>
</div>
</div>
<div class="pending-queue-list" id="pending-queue">
<div class="empty-state">
<i class="fas fa-list"></i>
<p>No items in queue</p>
</div>
</div>
</section>
<!-- Completed Downloads -->
<section class="completed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-check-circle"></i>
Recent Completed
</h2>
<div class="section-actions">
<button id="clear-completed-btn" class="btn btn-secondary">
<i class="fas fa-broom"></i>
Clear Completed
</button>
</div>
</div>
<div class="completed-downloads-list" id="completed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle"></i>
<p>No completed downloads</p>
</div>
</div>
</section>
<!-- Failed Downloads -->
<section class="failed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-exclamation-triangle"></i>
Failed Downloads
</h2>
<div class="section-actions">
<button id="retry-all-btn" class="btn btn-warning" disabled>
<i class="fas fa-redo"></i>
Retry All
</button>
<button id="clear-failed-btn" class="btn btn-secondary">
<i class="fas fa-trash"></i>
Clear Failed
</button>
</div>
</div>
<div class="failed-downloads-list" id="failed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle text-success"></i>
<p>No failed downloads</p>
</div>
</div>
</section>
</main>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Confirmation Modal -->
<div id="confirm-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 id="confirm-title">Confirm Action</h3>
<button id="close-confirm" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<p id="confirm-message">Are you sure you want to perform this action?</p>
</div>
<div class="modal-footer">
<button id="confirm-cancel" class="btn btn-secondary">Cancel</button>
<button id="confirm-ok" class="btn btn-primary">Confirm</button>
</div>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/queue.js"></script>
</body>
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Download Queue - AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-download"></i>
<h1>Download Queue</h1>
</div>
<div class="header-actions">
<a href="/" class="btn btn-secondary">
<i class="fas fa-arrow-left"></i>
<span>Back to Main</span>
</a>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span>Logout</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Queue Statistics -->
<section class="queue-stats-section">
<div class="stats-grid">
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-download text-primary"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="total-items">0</div>
<div class="stat-label">Total Items</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-clock text-warning"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="pending-items">0</div>
<div class="stat-label">In Queue</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-check-circle text-success"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="completed-items">0</div>
<div class="stat-label">Completed</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-exclamation-triangle text-error"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="failed-items">0</div>
<div class="stat-label">Failed</div>
</div>
</div>
</div>
<!-- Speed and ETA -->
<div class="speed-eta-section">
<div class="speed-info">
<div class="speed-current">
<span class="label">Current Speed:</span>
<span class="value" id="current-speed">0 MB/s</span>
</div>
<div class="speed-average">
<span class="label">Average Speed:</span>
<span class="value" id="average-speed">0 MB/s</span>
</div>
</div>
<div class="eta-info">
<span class="label">Estimated Time Remaining:</span>
<span class="value" id="eta-time">--:--</span>
</div>
</div>
</section>
<!-- Active Downloads -->
<section class="active-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-play-circle"></i>
Active Downloads
</h2>
<div class="section-actions">
<button id="pause-all-btn" class="btn btn-secondary" disabled>
<i class="fas fa-pause"></i>
Pause All
</button>
<button id="resume-all-btn" class="btn btn-primary" disabled style="display: none;">
<i class="fas fa-play"></i>
Resume All
</button>
</div>
</div>
<div class="active-downloads-list" id="active-downloads">
<div class="empty-state">
<i class="fas fa-pause-circle"></i>
<p>No active downloads</p>
</div>
</div>
</section>
<!-- Pending Queue -->
<section class="pending-queue-section">
<div class="section-header">
<h2>
<i class="fas fa-clock"></i>
Download Queue
</h2>
<div class="section-actions">
<button id="start-queue-btn" class="btn btn-primary" disabled>
<i class="fas fa-play"></i>
Start Downloads
</button>
<button id="stop-queue-btn" class="btn btn-secondary" disabled style="display: none;">
<i class="fas fa-stop"></i>
Stop Downloads
</button>
<button id="clear-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-trash"></i>
Clear Queue
</button>
<button id="reorder-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-sort"></i>
Reorder
</button>
</div>
</div>
<div class="pending-queue-list" id="pending-queue">
<div class="empty-state">
<i class="fas fa-list"></i>
<p>No items in queue</p>
</div>
</div>
</section>
<!-- Completed Downloads -->
<section class="completed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-check-circle"></i>
Recent Completed
</h2>
<div class="section-actions">
<button id="clear-completed-btn" class="btn btn-secondary">
<i class="fas fa-broom"></i>
Clear Completed
</button>
</div>
</div>
<div class="completed-downloads-list" id="completed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle"></i>
<p>No completed downloads</p>
</div>
</div>
</section>
<!-- Failed Downloads -->
<section class="failed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-exclamation-triangle"></i>
Failed Downloads
</h2>
<div class="section-actions">
<button id="retry-all-btn" class="btn btn-warning" disabled>
<i class="fas fa-redo"></i>
Retry All
</button>
<button id="clear-failed-btn" class="btn btn-secondary">
<i class="fas fa-trash"></i>
Clear Failed
</button>
</div>
</div>
<div class="failed-downloads-list" id="failed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle text-success"></i>
<p>No failed downloads</p>
</div>
</div>
</section>
</main>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Confirmation Modal -->
<div id="confirm-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 id="confirm-title">Confirm Action</h3>
<button id="close-confirm" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<p id="confirm-message">Are you sure you want to perform this action?</p>
</div>
<div class="modal-footer">
<button id="confirm-cancel" class="btn btn-secondary">Cancel</button>
<button id="confirm-ok" class="btn btn-primary">Confirm</button>
</div>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/queue.js"></script>
</body>
</html>

File diff suppressed because it is too large Load Diff