instruction2

This commit is contained in:
Lukas 2025-10-12 22:39:51 +02:00
parent e48cb29131
commit 7481a33c15
40 changed files with 639 additions and 14993 deletions

44
.env
View File

@ -1,44 +0,0 @@
# Aniworld Server Environment Configuration
# Security (REQUIRED - Generate secure random values)
SECRET_KEY=dev_secret_key_change_in_production_12345
JWT_SECRET_KEY=jwt_secret_key_change_in_production_67890
PASSWORD_SALT=salt_change_in_production_abcdef
# Master Password Authentication (Simple system)
MASTER_PASSWORD_HASH=8cf532e926e9493630820ce80005f6e2239305ac64c34069e869be5106e2af10
# MASTER_PASSWORD=admin123 # Used for development only, remove in production
# Database Configuration
DATABASE_URL=sqlite:///data/aniworld.db
DATABASE_POOL_SIZE=10
DATABASE_MAX_OVERFLOW=20
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (for caching and sessions)
REDIS_URL=redis://localhost:6379/0
REDIS_MAX_CONNECTIONS=10
REDIS_SOCKET_TIMEOUT=5
# Security Settings
SESSION_TIMEOUT_HOURS=24
MAX_FAILED_LOGIN_ATTEMPTS=5
LOCKOUT_DURATION_MINUTES=30
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
API_RATE_LIMIT_PER_MINUTE=100
# Application Settings
DEBUG=true
HOST=127.0.0.1
PORT=5000
# Anime and Download Settings
ANIME_DIRECTORY=./downloads
MAX_CONCURRENT_DOWNLOADS=3
# Logging
LOG_LEVEL=INFO
LOG_FILE=logs/aniworld.log

View File

@ -1,56 +0,0 @@
# Aniworld Server Environment Configuration
# Copy this file to .env and fill in your values
# Security (REQUIRED - Generate secure random values)
SECRET_KEY=your_secret_key_here
JWT_SECRET_KEY=your_jwt_secret_here
PASSWORD_SALT=your_password_salt_here
# Database Configuration
DATABASE_URL=sqlite:///data/aniworld.db
# DATABASE_PASSWORD=your_db_password_here
DATABASE_POOL_SIZE=10
DATABASE_MAX_OVERFLOW=20
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (for caching and sessions)
REDIS_URL=redis://localhost:6379/0
# REDIS_PASSWORD=your_redis_password_here
REDIS_MAX_CONNECTIONS=10
REDIS_SOCKET_TIMEOUT=5
# Email Configuration (for password reset emails)
SMTP_SERVER=localhost
SMTP_PORT=587
# SMTP_USERNAME=your_smtp_username
# SMTP_PASSWORD=your_smtp_password
SMTP_USE_TLS=true
FROM_EMAIL=noreply@aniworld.local
# External API Keys
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
# TMDB_API_KEY=your_tmdb_api_key
# Security Settings
SESSION_TIMEOUT_HOURS=24
MAX_FAILED_LOGIN_ATTEMPTS=5
LOCKOUT_DURATION_MINUTES=30
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
API_RATE_LIMIT_PER_MINUTE=100
# Application Settings
DEBUG=false
HOST=127.0.0.1
PORT=5000
# Anime and Download Settings
ANIME_DIRECTORY=./downloads
MAX_CONCURRENT_DOWNLOADS=3
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
# Logging
LOG_LEVEL=INFO
LOG_FILE=logs/aniworld.log

28
.flake8
View File

@ -1,28 +0,0 @@
[flake8]
max-line-length = 88
exclude =
.git,
__pycache__,
build,
dist,
.venv,
venv,
aniworld,
migrations,
.pytest_cache,
.mypy_cache,
.coverage,
htmlcov
extend-ignore =
# E203: whitespace before ':' (conflicts with black)
E203,
# W503: line break before binary operator (conflicts with black)
W503,
# E501: line too long (handled by black)
E501
per-file-ignores =
__init__.py:F401
tests/*:F401,F811
max-complexity = 10
docstring-convention = google
import-order-style = google

View File

@ -4,136 +4,118 @@ These instructions define how GitHub Copilot should assist with this project. Th
## 🧠 Context
- **Project Type**: Web API / Data Pipeline / CLI Tool / ML App
- **Language**: Python
- **Framework / Libraries**: FastAPI / Flask / Django / Pandas / Pydantic / Poetry
- **Architecture**: MVC / Clean Architecture / Event-Driven / Microservices
- **Project Type**: Web API / Data Pipeline / CLI Tool / ML App
- **Language**: Python
- **Framework / Libraries**: FastAPI / Flask / Django / Pandas / Pydantic / Poetry
- **Architecture**: MVC / Clean Architecture / Event-Driven / Microservices
## 🔧 General Guidelines
- Use Pythonic patterns (PEP8, PEP257).
- Prefer named functions and class-based structures over inline lambdas.
- Use type hints where applicable (`typing` module).
- Follow black or isort for formatting and import order.
- Use meaningful naming; avoid cryptic variables.
- Emphasize simplicity, readability, and DRY principles.
## 📁 File Structure
Use this structure as a guide when creating or updating files:
```text
src/
controllers/
services/
repositories/
schemas/
utils/
config/
tests/
unit/
integration/
```
- Use Pythonic patterns (PEP8, PEP257).
- Prefer named functions and class-based structures over inline lambdas.
- Use type hints where applicable (`typing` module).
- Follow black or isort for formatting and import order.
- Use meaningful naming; avoid cryptic variables.
- Emphasize simplicity, readability, and DRY principles.
## 🧶 Patterns
### ✅ Patterns to Follow
- Use the Repository Pattern and Dependency Injection (e.g., via `Depends` in FastAPI).
- Validate data using Pydantic models.
- Use custom exceptions and centralized error handling.
- Use environment variables via `dotenv` or `os.environ`.
- Use logging via the `logging` module or structlog.
- Write modular, reusable code organized by concerns (e.g., controller, service, data layer).
- Favor async endpoints for I/O-bound services (FastAPI, aiohttp).
- Document functions and classes with docstrings.
- Use the Repository Pattern and Dependency Injection (e.g., via `Depends` in FastAPI).
- Validate data using Pydantic models.
- Use custom exceptions and centralized error handling.
- Use environment variables via `dotenv` or `os.environ`.
- Use logging via the `logging` module or structlog.
- Write modular, reusable code organized by concerns (e.g., controller, service, data layer).
- Favor async endpoints for I/O-bound services (FastAPI, aiohttp).
- Document functions and classes with docstrings.
### 🚫 Patterns to Avoid
- Dont use wildcard imports (`from module import *`).
- Avoid global state unless encapsulated in a singleton or config manager.
- Dont hardcode secrets or config values—use `.env`.
- Dont expose internal stack traces in production environments.
- Avoid business logic inside views/routes.
- Dont use wildcard imports (`from module import *`).
- Avoid global state unless encapsulated in a singleton or config manager.
- Dont hardcode secrets or config values—use `.env`.
- Dont expose internal stack traces in production environments.
- Avoid business logic inside views/routes.
## 🧪 Testing Guidelines
- Use `pytest` or `unittest` for unit and integration tests.
- Mock external services with `unittest.mock` or `pytest-mock`.
- Use fixtures to set up and tear down test data.
- Aim for high coverage on core logic and low-level utilities.
- Test both happy paths and edge cases.
- Use `pytest` or `unittest` for unit and integration tests.
- Mock external services with `unittest.mock` or `pytest-mock`.
- Use fixtures to set up and tear down test data.
- Aim for high coverage on core logic and low-level utilities.
- Test both happy paths and edge cases.
## 🧩 Example Prompts
- `Copilot, create a FastAPI endpoint that returns all users from the database.`
- `Copilot, write a Pydantic model for a product with id, name, and optional price.`
- `Copilot, implement a CLI command that uploads a CSV file and logs a summary.`
- `Copilot, write a pytest test for the transform_data function using a mock input.`
- `Copilot, create a FastAPI endpoint that returns all users from the database.`
- `Copilot, write a Pydantic model for a product with id, name, and optional price.`
- `Copilot, implement a CLI command that uploads a CSV file and logs a summary.`
- `Copilot, write a pytest test for the transform_data function using a mock input.`
## 🔁 Iteration & Review
- Review Copilot output before committing.
- Add comments to clarify intent if Copilot generates incorrect or unclear suggestions.
- Use linters (flake8, pylint) and formatters (black, isort) as part of the review pipeline.
- Refactor output to follow project conventions.
- Review Copilot output before committing.
- Add comments to clarify intent if Copilot generates incorrect or unclear suggestions.
- Use linters (flake8, pylint) and formatters (black, isort) as part of the review pipeline.
- Refactor output to follow project conventions.
## 📚 References
- [PEP 8 Style Guide for Python Code](https://peps.python.org/pep-0008/)
- [PEP 484 Type Hints](https://peps.python.org/pep-0484/)
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
- [Django Documentation](https://docs.djangoproject.com/en/stable/)
- [Flask Documentation](https://flask.palletsprojects.com/)
- [Pytest Documentation](https://docs.pytest.org/en/stable/)
- [Pydantic Documentation](https://docs.pydantic.dev/)
- [Python Logging Best Practices](https://docs.python.org/3/howto/logging.html)
- [Black Code Formatter](https://black.readthedocs.io/)
- [Poetry](https://python-poetry.org/docs/)
- [PEP 8 Style Guide for Python Code](https://peps.python.org/pep-0008/)
- [PEP 484 Type Hints](https://peps.python.org/pep-0484/)
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
- [Django Documentation](https://docs.djangoproject.com/en/stable/)
- [Flask Documentation](https://flask.palletsprojects.com/)
- [Pytest Documentation](https://docs.pytest.org/en/stable/)
- [Pydantic Documentation](https://docs.pydantic.dev/)
- [Python Logging Best Practices](https://docs.python.org/3/howto/logging.html)
- [Black Code Formatter](https://black.readthedocs.io/)
- [Poetry](https://python-poetry.org/docs/)
## 1. General Philosophy
* **Clarity is King:** Code should be easy to understand at a glance.
* **Consistency Matters:** Adhere to these standards across all projects.
* **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
* **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
* **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
- **Clarity is King:** Code should be easy to understand at a glance.
- **Consistency Matters:** Adhere to these standards across all projects.
- **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
- **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
- **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
* CleanCode, Keep it simple, MVVM
- CleanCode, Keep it simple, MVVM
## 2. Security Considerations
* **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
* **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
* **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
* **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
* **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
* **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
- **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
- **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
- **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
- **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
- **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
- **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
## 3. Performance Optimization
* **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
* **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
* **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
* **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
* **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
* **Profiling:** Use profiling tools to identify performance bottlenecks.
- **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
- **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
- **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
- **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
- **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
- **Profiling:** Use profiling tools to identify performance bottlenecks.
## 4. GUI
* **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
* **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
* **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
* **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
* **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
* **Fluent UI design:** Use Fluent UI design
* **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
* **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
- **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
- **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
- **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
- **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
- **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
- **Fluent UI design:** Use Fluent UI design
- **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
- **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
Run till you are realy finished.
Do not gues, open and read files if you dont know something.
Run till you are realy finished.
Do not gues, open and read files if you dont know something.

View File

@ -1,191 +0,0 @@
# AniWorld FastAPI Documentation
## Overview
AniWorld has been successfully migrated from Flask to FastAPI, providing improved performance, automatic API documentation, and modern async support.
## Accessing API Documentation
### Interactive API Documentation
FastAPI automatically generates interactive API documentation that you can access at:
- **Swagger UI**: `http://localhost:8000/docs`
- **ReDoc**: `http://localhost:8000/redoc`
These interfaces allow you to:
- Browse all available endpoints
- View request/response schemas
- Test API endpoints directly from the browser
- Download OpenAPI schema
### OpenAPI Schema
The complete OpenAPI 3.0 schema is available at:
- **JSON Format**: `http://localhost:8000/openapi.json`
## Authentication
### Master Password Authentication
AniWorld uses a simple master password authentication system with JWT tokens.
#### Login Process
1. **POST** `/auth/login`
- Send master password in request body
- Receive JWT token in response
- Token expires in 24 hours
```json
{
"password": "your_master_password"
}
```
Response:
```json
{
"success": true,
"token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...",
"message": "Login successful"
}
```
#### Using Authentication Token
Include the token in the `Authorization` header for authenticated requests:
```
Authorization: Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...
```
## API Endpoints
### System Health
- **GET** `/health` - Check system health and status
- **GET** `/api/system/database/health` - Check database connectivity
- **GET** `/api/system/config` - Get system configuration
### Authentication
- **POST** `/auth/login` - Authenticate and get JWT token
- **GET** `/auth/verify` - Verify current token validity
- **POST** `/auth/logout` - Logout and invalidate token
- **GET** `/api/auth/status` - Get current authentication status
### Anime Management
- **GET** `/api/anime/search` - Search for anime series
- **GET** `/api/anime/{anime_id}` - Get specific anime details
- **GET** `/api/anime/{anime_id}/episodes` - Get episodes for an anime
### Episode Management
- **GET** `/api/episodes/{episode_id}` - Get specific episode details
### Series Management
- **POST** `/api/add_series` - Add a new series to tracking
- **POST** `/api/download` - Start episode download
### Web Interface
- **GET** `/` - Main application interface
- **GET** `/app` - Application dashboard
- **GET** `/login` - Login page
- **GET** `/setup` - Setup page
- **GET** `/queue` - Download queue interface
## Response Formats
### Success Responses
All successful API responses follow this structure:
```json
{
"success": true,
"data": {...},
"message": "Operation completed successfully"
}
```
### Error Responses
Error responses include detailed error information:
```json
{
"success": false,
"error": "Error description",
"code": "ERROR_CODE",
"details": {...}
}
```
## Status Codes
- **200 OK** - Successful operation
- **201 Created** - Resource created successfully
- **400 Bad Request** - Invalid request data
- **401 Unauthorized** - Authentication required
- **403 Forbidden** - Insufficient permissions
- **404 Not Found** - Resource not found
- **422 Unprocessable Entity** - Validation error
- **500 Internal Server Error** - Server error
## Rate Limiting
Currently, no rate limiting is implemented, but it may be added in future versions.
## WebSocket Support
Real-time updates are available through WebSocket connections for:
- Download progress updates
- Scan progress updates
- System status changes
## Migration Notes
### Changes from Flask
1. **Automatic Documentation**: FastAPI provides built-in OpenAPI documentation
2. **Type Safety**: Full request/response validation with Pydantic
3. **Async Support**: Native async/await support for better performance
4. **Modern Standards**: OpenAPI 3.0, JSON Schema validation
5. **Better Error Handling**: Structured error responses with detailed information
### Breaking Changes
- Authentication tokens are now JWT-based instead of session-based
- Request/response formats may have slight differences
- Some endpoint URLs may have changed
- WebSocket endpoints use FastAPI WebSocket pattern
## Development
### Running the Server
```bash
# Development mode with auto-reload
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
# Production mode
uvicorn src.server.fastapi_app:app --host 0.0.0.0 --port 8000
```
### Environment Variables
- `MASTER_PASSWORD_HASH` - Hashed master password
- `JWT_SECRET_KEY` - Secret key for JWT token signing
- `LOG_LEVEL` - Logging level (DEBUG, INFO, WARNING, ERROR)
## Support
For issues, questions, or contributions, please visit the project repository or contact the development team.

View File

@ -1,74 +0,0 @@
# AniWorld Project Overview
## 📁 Folder Structure
The project follows a modular, layered architecture inspired by MVC and Clean Architecture principles. The main directories are:
```
src/
controllers/ # API endpoints and route handlers
services/ # Business logic and orchestration
repositories/ # Data access layer (DB, external APIs)
schemas/ # Pydantic models for validation/serialization
utils/ # Utility functions and helpers
config/ # Configuration management (env, settings)
tests/
unit/ # Unit tests for core logic
integration/ # Integration tests for end-to-end scenarios
```
## 🏗️ Architecture
- **MVC & Clean Architecture:** Separation of concerns between controllers (views), services (business logic), and repositories (data access).
- **Dependency Injection:** Used for service/repository wiring, especially with FastAPI's `Depends`.
- **Event-Driven & Microservices Ready:** Modular design allows for future scaling into microservices or event-driven workflows.
- **Centralized Error Handling:** Custom exceptions and error middleware for consistent API responses.
## 🧰 Used Libraries & Frameworks
- **Python** (PEP8, PEP257, type hints)
- **FastAPI**: High-performance async web API framework
- **Pydantic**: Data validation and serialization
- **Poetry**: Dependency management and packaging
- **dotenv / os.environ**: Environment variable management
- **logging / structlog**: Structured logging
- **pytest / unittest**: Testing frameworks
- **aiohttp**: Async HTTP client (where needed)
- **SQLAlchemy / asyncpg / databases**: Database ORM and async drivers (if present)
- **Prometheus**: Metrics endpoint integration
- **Other**: As required for integrations (webhooks, third-party APIs)
## 🧩 Patterns & Conventions
- **Repository Pattern:** All data access is abstracted via repositories.
- **Service Layer:** Business logic is encapsulated in services, not controllers.
- **Pydantic Models:** Used for all input/output validation.
- **Async Endpoints:** All I/O-bound endpoints are async for scalability.
- **Environment Configuration:** All secrets/configs are loaded from `.env` or environment variables.
- **Logging:** All logs are structured and configurable.
- **Testing:** High coverage with fixtures and mocks for external dependencies.
## 🛡️ Security & Performance
- **JWT Authentication:** Secure endpoints with token-based auth.
- **Input Validation:** All user input is validated via Pydantic.
- **No Hardcoded Secrets:** All sensitive data is externalized.
- **Performance Optimization:** Async I/O, caching, and profiling tools.
## 🎨 UI & CLI
- **Theme Support:** Light/dark/auto modes.
- **Accessibility:** Screen reader, color contrast, keyboard shortcuts.
- **CLI Tool:** For bulk operations, scanning, and management.
## 📚 References
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
- [Pydantic Documentation](https://docs.pydantic.dev/)
- [Poetry](https://python-poetry.org/docs/)
- [PEP 8](https://peps.python.org/pep-0008/)
- [Black Formatter](https://black.readthedocs.io/)
---
**For details on individual features and endpoints, see `features.md`.**

268
README.md
View File

@ -1,268 +0,0 @@
# AniWorld - Anime Series Management System
A powerful anime series management system that helps you track, organize, and download your favorite anime series. Recently migrated from Flask to FastAPI for improved performance and modern API capabilities.
## 🚀 Features
### Core Functionality
- **Series Tracking**: Automatically detect missing episodes in your anime collection
- **Smart Downloads**: Queue-based download system with progress tracking
- **File Organization**: Automatic file scanning and folder structure management
- **Search Integration**: Search for anime series across multiple providers
- **Real-time Updates**: Live progress updates via WebSocket connections
### Web Interface
- **Modern UI**: Clean, responsive web interface with dark/light theme support
- **Download Queue**: Visual download queue management
- **Progress Tracking**: Real-time download and scan progress
- **Mobile Support**: Fully responsive design for mobile devices
### API & Integration
- **FastAPI Backend**: High-performance async API with automatic documentation
- **RESTful API**: Complete REST API for programmatic access
- **OpenAPI Documentation**: Interactive API documentation at `/docs`
- **Authentication**: Secure master password authentication with JWT tokens
## 🎯 Recent Migration: Flask → FastAPI
This project has been successfully migrated from Flask to FastAPI, bringing significant improvements:
### Performance Benefits
- **Async Support**: Native async/await for better concurrency
- **Faster Response Times**: Up to 2-3x performance improvement
- **Better Resource Utilization**: More efficient handling of concurrent requests
### Developer Experience
- **Automatic Documentation**: Built-in OpenAPI/Swagger documentation
- **Type Safety**: Full request/response validation with Pydantic
- **Modern Standards**: OpenAPI 3.0 compliance and JSON Schema validation
- **Better Error Handling**: Structured error responses with detailed information
### API Improvements
- **Interactive Documentation**: Test API endpoints directly from `/docs`
- **Schema Validation**: Automatic request/response validation
- **Better Error Messages**: Detailed validation errors with field-level feedback
## 🛠️ Installation & Setup
### Prerequisites
- Python 3.11+
- Conda package manager
- Windows OS (currently optimized for Windows)
### Quick Start
1. **Clone the Repository**
```bash
git clone <repository-url>
cd Aniworld
```
2. **Create and Activate Conda Environment**
```bash
conda create -n AniWorld python=3.11
conda activate AniWorld
```
3. **Install Dependencies**
```bash
pip install -r requirements.txt
```
4. **Set Environment Variables**
```bash
# Set your master password (will be hashed automatically)
set MASTER_PASSWORD=your_secure_password
```
5. **Start the FastAPI Server**
```bash
# Development mode with auto-reload
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
# Or use the VS Code task: "Run FastAPI Server"
```
6. **Access the Application**
- **Web Interface**: http://localhost:8000
- **API Documentation**: http://localhost:8000/docs
- **Alternative API Docs**: http://localhost:8000/redoc
### Alternative: Using VS Code Tasks
If you're using VS Code, you can use the pre-configured tasks:
- `Ctrl+Shift+P` → "Tasks: Run Task" → "Run FastAPI Server"
## 🔧 Configuration
### Environment Variables
- `MASTER_PASSWORD` - Your master password (will be hashed automatically)
- `MASTER_PASSWORD_HASH` - Pre-hashed password (alternative to MASTER_PASSWORD)
- `JWT_SECRET_KEY` - Secret key for JWT token signing (auto-generated if not set)
- `LOG_LEVEL` - Logging level (DEBUG, INFO, WARNING, ERROR)
### Directory Structure
```
Aniworld/
├── src/
│ ├── core/ # Core business logic
│ │ ├── SeriesApp.py # Main application controller
│ │ ├── entities/ # Data models
│ │ └── providers/ # Content providers
│ ├── server/ # FastAPI server
│ │ ├── fastapi_app.py # Main FastAPI application
│ │ └── web/ # Web interface and controllers
│ └── infrastructure/ # Infrastructure components
├── data/ # Application data and databases
├── logs/ # Application logs
└── requirements.txt # Python dependencies
```
## 🌐 API Usage
### Authentication
1. **Login to get JWT token**:
```bash
curl -X POST "http://localhost:8000/auth/login" \
-H "Content-Type: application/json" \
-d '{"password": "your_master_password"}'
```
2. **Use token in requests**:
```bash
curl -X GET "http://localhost:8000/api/anime/search?query=naruto" \
-H "Authorization: Bearer your_jwt_token_here"
```
### Key Endpoints
- **Authentication**: `/auth/login`, `/auth/verify`, `/auth/logout`
- **System**: `/health`, `/api/system/config`
- **Anime**: `/api/anime/search`, `/api/anime/{id}`
- **Episodes**: `/api/episodes/{id}`, `/api/anime/{id}/episodes`
- **Downloads**: `/api/download`, `/api/add_series`
For complete API documentation, visit `/docs` when the server is running.
## 🖥️ Web Interface
### Main Features
- **Dashboard**: Overview of your anime collection and missing episodes
- **Search**: Find and add new anime series to track
- **Downloads**: Manage download queue and monitor progress
- **Settings**: Configure application preferences
### Responsive Design
The web interface is fully responsive and supports:
- Desktop browsers (Chrome, Firefox, Edge, Safari)
- Mobile devices (iOS Safari, Android Chrome)
- Tablet devices
- Dark and light themes
## 🔍 Troubleshooting
### Common Issues
1. **Server won't start**
- Check that the AniWorld conda environment is activated
- Verify all dependencies are installed: `pip install -r requirements.txt`
- Check for port conflicts (default: 8000)
2. **Authentication errors**
- Verify the master password is set correctly
- Check environment variables are properly configured
- Clear browser cache/cookies
3. **Import errors**
- Ensure all required packages are installed
- Check Python path configuration
- Verify conda environment is activated
### Logs
Application logs are stored in the `logs/` directory:
- `aniworld.log` - General application logs
- `errors.log` - Error-specific logs
- `auth_failures.log` - Authentication failure logs
## 🚦 Development
### Running in Development Mode
```bash
# With auto-reload for development
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload --log-level debug
```
### Testing
```bash
# Run all tests
python -m pytest tests/ -v
# Run with coverage
python -m pytest tests/ --cov=src --cov-report=html
```
### Code Quality
```bash
# Format code
black src/
isort src/
# Lint code
pylint src/
flake8 src/
```
## 📚 Documentation
- **API Documentation**: Available at `/docs` (Swagger UI) and `/redoc` (ReDoc)
- **Migration Guide**: See `API_DOCUMENTATION.md` for detailed migration information
- **FastAPI Specific**: See `src/server/README_FastAPI.md` for server-specific documentation
## 🤝 Contributing
1. Fork the repository
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
3. Commit your changes (`git commit -m 'Add amazing feature'`)
4. Push to the branch (`git push origin feature/amazing-feature`)
5. Open a Pull Request
## 📄 License
This project is licensed under the MIT License - see the LICENSE file for details.
## 🙏 Acknowledgments
- FastAPI team for the excellent framework
- The original Flask implementation that served as the foundation
- All contributors and users of the AniWorld project
---
**Note**: This application is for personal use only. Please respect copyright laws and terms of service of content providers.

Binary file not shown.

View File

@ -1,135 +1,24 @@
# AniWorld Application Features
## 1. Authentication & Security
- Master password authentication (JWT-based)
- `POST /auth/login`: Login and receive JWT token
- `GET /auth/verify`: Verify JWT token validity
- `POST /auth/logout`: Logout (stateless)
- Password hashing (SHA-256 + salt)
- Configurable session timeout
- Secure environment variable management
## 2. Health & System Monitoring
- Health check endpoints
- `/health`: Basic health status
- `/api/health`: Load balancer health
- `/api/health/system`: System metrics (CPU, memory, disk)
- `/api/health/database`: Database connectivity
- `/api/health/dependencies`: External dependencies
- `/api/health/performance`: Performance metrics
- `/api/health/metrics`: Prometheus metrics
- `/api/health/ready`: Readiness probe (Kubernetes)
## 3. Anime & Episode Management
- Search anime
- `GET /api/anime/search`: Search anime by title (pagination)
- Get anime details
- `GET /api/anime/{anime_id}`: Anime details
- `GET /api/anime/{anime_id}/episodes`: List episodes
- `GET /api/episodes/{episode_id}`: Episode details
## 4. Database & Storage Management
- Database info and statistics
- `GET /api/database/info`: Database stats
- Maintenance operations
- `/maintenance/database/vacuum`: Vacuum database
- `/maintenance/database/analyze`: Analyze database
- `/maintenance/database/integrity-check`: Integrity check
- `/maintenance/database/reindex`: Reindex database
- `/maintenance/database/optimize`: Optimize database
- `/maintenance/database/stats`: Get database stats
## 5. Bulk Operations
- Bulk download, update, organize, delete, export
- `/api/bulk/download`: Start bulk download
- `/api/bulk/update`: Bulk update
- `/api/bulk/organize`: Organize series
- `/api/bulk/delete`: Delete series
- `/api/bulk/export`: Export series data
## 6. Performance Optimization
- Speed limit management
- `/api/performance/speed-limit`: Get/set download speed limit
- Cache statistics
- `/api/performance/cache/stats`: Cache stats
- Memory management
- `/api/performance/memory/stats`: Memory usage stats
- `/api/performance/memory/gc`: Force garbage collection
- Download queue management
- `/api/performance/downloads/tasks`: List download tasks
- `/api/performance/downloads/add-task`: Add download task
- `/api/performance/resume/tasks`: List resumable tasks
## 7. Diagnostics & Logging
- Diagnostic report generation
- `/diagnostics/report`: Generate diagnostic report
- Error reporting and stats
- Logging configuration and log file management
## 8. Integrations
- API key management
- Webhook configuration
- Third-party API integrations
## 9. User Preferences & UI
- Theme management (light/dark/auto)
- Language selection
- Accessibility features (screen reader, color contrast, mobile support)
- Keyboard shortcuts
- UI density and grid/list view options
## 10. CLI Tool
- Series scanning and management
- Search, download, rescan, display series
- Progress bar for downloads
- Retry logic for operations
## 11. Miscellaneous
- Environment configuration via `.env`
- Modular, extensible architecture (MVC, Clean Architecture)
- Automated testing (pytest, unittest)
- Centralized error handling
## Authentication & Setup Flow
### Application Initialization Flow
- **Setup Page**: Display application setup page when the application is run for the first time and no configuration exists
- Check for presence of configuration file/database setup
- Guide user through initial application configuration
- Set up database connections, initial admin user, and core settings
- Mark setup as completed in configuration
- **Authentication Gate**: Redirect to authentication page when user token is invalid or missing
- Validate existing authentication tokens
- Display login/registration interface for unauthenticated users
- Handle token refresh and session management
- Redirect authenticated users to main application
- **Main Application**: Show index.html for authenticated users with valid tokens
- Display main application interface
- Provide access to all authenticated user features
- Maintain session state and handle token expiration gracefully
### User Flow Priority
1. Check if application setup is completed → Show setup page if not
2. Check if user is authenticated → Show auth page if not
3. Show main application (index.html) for authenticated users
---
**Note:** Each feature is implemented via modular controllers, services, and utilities. See the respective source files for detailed function/class definitions.
# Aniworld Web Application Features
## Authentication & Security
- **Master Password Login**: Secure access to the application with a master password system
## Configuration Management
- **Setup Page**: Initial configuration interface for server setup and basic settings
- **Config Page**: View and modify application configuration settings
## User Interface
- **Dark Mode**: Toggle between light and dark themes for better user experience
## Anime Management
- **Anime Library Page**: Display list of anime series with missing episodes
- **Series Selection**: Select individual anime series and add episodes to download queue
- **Anime Search Page**: Search functionality to find and add new anime series to the library
## Download Management
- **Download Queue Page**: View and manage the current download queue
- **Download Status Display**: Real-time status updates and progress of current downloads
- **Queue Operations**: Add, remove, and prioritize items in the download queue
## Core Functionality Overview
The web application provides a complete interface for managing anime downloads with user-friendly pages for configuration, library management, search capabilities, and download monitoring.

136
infrastructure.md Normal file
View File

@ -0,0 +1,136 @@
# Aniworld Web Application Infrastructure
conda activate AniWorld
## Project Structure
```
/home/lukas/Volume/repo/Aniworld/
├── src/
│ ├── server/ # FastAPI web application
│ │ ├── main.py # FastAPI application entry point
│ │ ├── api/ # API route handlers
│ │ │ ├── __init__.py
│ │ │ ├── auth.py # Authentication endpoints
│ │ │ ├── config.py # Configuration endpoints
│ │ │ ├── anime.py # Anime management endpoints
│ │ │ ├── download.py # Download queue endpoints
│ │ │ └── search.py # Search endpoints
│ │ ├── models/ # Pydantic models
│ │ │ ├── __init__.py
│ │ │ ├── auth.py
│ │ │ ├── config.py
│ │ │ ├── anime.py
│ │ │ └── download.py
│ │ ├── services/ # Business logic services
│ │ │ ├── __init__.py
│ │ │ ├── auth_service.py
│ │ │ ├── config_service.py
│ │ │ ├── anime_service.py
│ │ │ └── download_service.py
│ │ ├── static/ # Static web assets
│ │ │ ├── css/
│ │ │ ├── js/
│ │ │ └── images/
│ │ ├── templates/ # Jinja2 HTML templates
│ │ │ ├── base.html
│ │ │ ├── login.html
│ │ │ ├── setup.html
│ │ │ ├── config.html
│ │ │ ├── anime.html
│ │ │ ├── download.html
│ │ │ └── search.html
│ │ └── utils/ # Utility functions
│ │ ├── __init__.py
│ │ ├── security.py
│ │ └── dependencies.py
│ ├── core/ # Existing core functionality
│ └── cli/ # Existing CLI application
├── data/ # Application data storage
│ ├── config.json # Application configuration
│ ├── anime_library.db # SQLite database for anime library
│ ├── download_queue.json # Download queue state
│ └── cache/ # Temporary cache files
├── logs/ # Application logs
│ ├── app.log # Main application log
│ ├── download.log # Download-specific logs
│ └── error.log # Error logs
├── requirements.txt # Python dependencies
├── docker-compose.yml # Docker deployment configuration
└── README.md
```
## Technology Stack
### Backend
- **FastAPI**: Modern Python web framework for building APIs
- **Uvicorn**: ASGI server for running FastAPI applications
- **SQLite**: Lightweight database for storing anime library and configuration
- **Pydantic**: Data validation and serialization
- **Jinja2**: Template engine for server-side rendering
### Frontend
- **HTML5/CSS3**: Core web technologies
- **JavaScript (Vanilla)**: Client-side interactivity
- **Bootstrap 5**: CSS framework for responsive design
- **HTMX**: Modern approach for dynamic web applications
### Security
- **Passlib**: Password hashing and verification
- **python-jose**: JWT token handling
- **bcrypt**: Secure password hashing
## Configuration
### Data Storage
- **Configuration**: JSON files in `data/` directory
- **Anime Library**: SQLite database with series information
- **Download Queue**: JSON file with current download status
- **Logs**: Structured logging to files in `logs/` directory
## API Endpoints
### Authentication
- `POST /api/auth/login` - Master password authentication
- `POST /api/auth/logout` - Logout and invalidate session
- `GET /api/auth/status` - Check authentication status
### Configuration
- `GET /api/config` - Get current configuration
- `PUT /api/config` - Update configuration
- `POST /api/setup` - Initial setup
### Anime Management
- `GET /api/anime` - List anime with missing episodes
- `POST /api/anime/{id}/download` - Add episodes to download queue
- `GET /api/anime/{id}` - Get anime details
### Download Management
- `GET /api/downloads` - Get download queue status
- `DELETE /api/downloads/{id}` - Remove from queue
- `POST /api/downloads/priority` - Change download priority
### Search
- `GET /api/search?q={query}` - Search for anime
- `POST /api/search/add` - Add anime to library
## Logging
### Log Levels
- **INFO**: General application information
- **WARNING**: Potential issues that don't stop execution
- **ERROR**: Errors that affect functionality
- **DEBUG**: Detailed debugging information (development only)
### Log Files
- `app.log`: General application logs
- `download.log`: Download-specific operations
- `error.log`: Error and exception logs
## Security Considerations
- Master password protection for application access
- Secure session management with JWT tokens
- Input validation and sanitization
- Rate limiting on API endpoints
- HTTPS enforcement in production
- Secure file path handling to prevent directory traversal

View File

@ -1,100 +0,0 @@
# AniWorld FastAPI Refactoring Instructions
This guide details the steps for refactoring `fastapi_app.py` to improve modularity and maintainability, following project and Copilot conventions.
---
## 1. Move Settings to `src/config/settings.py`
**Goal:**
Centralize configuration logic.
**Steps:**
3. Update all imports in `fastapi_app.py` and other files to:
```python
from src.config.settings import settings
```
4. Ensure `.env` loading and all environment variable logic remains functional.
---
## 2. Move API Endpoint Code to Controllers
**Goal:**
Organize endpoints by concern.
**Steps:**
1. Create controller modules in `src/server/controllers/`:
- `auth_controller.py` (authentication endpoints)
- `anime_controller.py` (anime endpoints)
- `episode_controller.py` (episode endpoints)
- `system_controller.py` (system/config/health endpoints)
- `setup_controller.py` (setup endpoints)
2. In each controller, define a FastAPI `APIRouter` and move relevant endpoint functions from `fastapi_app.py`.
3. Import and include routers in `fastapi_app.py`:
```python
from src.controllers.auth_controller import router as auth_router
app.include_router(auth_router)
```
Repeat for other controllers.
4. Remove endpoint definitions from `fastapi_app.py` after moving.
5. Ensure all dependencies (e.g., `get_current_user`, models, utilities) are properly imported in controllers.
---
## 3. General Clean-Up
- Remove unused code/imports from `fastapi_app.py`.
- Confirm all endpoints are registered via routers.
- Test application startup and endpoint accessibility.
---
## 4. Coding Conventions
- Use type hints, docstrings, and PEP8/black formatting.
- Use dependency injection (`Depends`) and repository pattern.
- Validate data with Pydantic models.
- Centralize error handling.
- Do not hardcode secrets/configs; use `.env`.
---
**Summary:**
- `Settings``src/server/config/settings.py`
- Endpoints → `src/server/controllers/*_controller.py`
- Main app setup and router wiring remain in `fastapi_app.py`
---
## 5. Tests (new)
Add unit tests under `src/server/tests/`. Only write tests for:
- Settings loading/validation
- Controllers (basic router contracts, inclusion, and minimal runtime sanity)
Guidelines:
- Use `pytest`.
- Keep tests deterministic: mock env-vars with `monkeypatch`.
- Controllers tests should not rely on external services — use `fastapi.TestClient` and import controller `router` objects directly.
- When the exact setting fields or route paths are unknown, provide templates and TODOs. Update templates to match real field names / route names.
- Run tests with:
```
pytest -q src/server/tests
```
Example test templates to add (place and adapt as needed):
- settings test template: `src/server/tests/test_settings.py`
- Checks that `settings` is a Pydantic BaseSettings-like object and that environment variables affect values. Replace placeholder names with real fields from your `Settings` class.
- controllers test template: `src/server/tests/test_controllers.py`
- Imports controller modules, ensures each exposes a `router`, that the router has at least one route, and that routers can be included in a FastAPI app without raising.
Add these templates and update TODOs to match your actual codebase.

403
instructions.md Normal file
View File

@ -0,0 +1,403 @@
# Aniworld Web Application Development Instructions
This document provides detailed tasks for AI agents to implement a modern web application for the Aniworld anime download manager. All tasks should follow the coding guidelines specified in the project's copilot instructions.
## Project Overview
The goal is to create a FastAPI-based web application that provides a modern interface for the existing Aniworld anime download functionality. The core anime logic should remain in `SeriesApp.py` while the web layer provides REST API endpoints and a responsive UI.
## Architecture Principles
- **Single Responsibility**: Each file/class has one clear purpose
- **Dependency Injection**: Use FastAPI's dependency system
- **Clean Separation**: Web layer calls core logic, never the reverse
- **File Size Limit**: Maximum 500 lines per file
- **Type Hints**: Use comprehensive type annotations
- **Error Handling**: Proper exception handling and logging
## How you work
1. Task the next task
2. Process the task
3. Make Tests.
4. Remove task from instructions.md
5. Commit in git
6. goto 1.
## Core Tasks
### 1. Project Structure Setup
#### [] Create main FastAPI application structure
- Create `src/server/main.py`
- Configure FastAPI app with CORS, middleware
- Set up static file serving for existing frontend assets
- Configure Jinja2 templates
- Add health check endpoint
#### [] Set up dependency injection system
- Create `src/server/utils/dependencies.py`
- Implement SeriesApp dependency injection
- Add database session dependency
- Create authentication dependency
#### [] Configure logging system
- Create `src/server/utils/logging.py`
- Set up structured logging with multiple handlers
- Configure log rotation and cleanup
- Add request/response logging middleware
### 2. Authentication System
#### [] Implement authentication models
- Create `src/server/models/auth.py`
- Define LoginRequest, LoginResponse models
- Add SetupRequest, AuthStatus models
- Include session management models
#### [] Create authentication service
- Create `src/server/services/auth_service.py`
- Implement master password setup/validation
- Add session management with JWT tokens
- Include failed attempt tracking and lockout
- Add password strength validation
#### [] Implement authentication API endpoints
- Create `src/server/api/auth.py`
- Add POST `/api/auth/setup` - initial setup
- Add POST `/api/auth/login` - login endpoint
- Add POST `/api/auth/logout` - logout endpoint
- Add GET `/api/auth/status` - authentication status
#### [] Create authentication middleware
- Create `src/server/middleware/auth.py`
- Implement JWT token validation
- Add request authentication checking
- Include rate limiting for auth endpoints
### 3. Configuration Management
#### [] Implement configuration models
- Create `src/server/models/config.py`
- Define ConfigResponse, ConfigUpdate models
- Add SchedulerConfig, LoggingConfig models
- Include ValidationResult model
#### [] Create configuration service
- Create `src/server/services/config_service.py`
- Implement configuration loading/saving
- Add configuration validation
- Include backup/restore functionality
- Add scheduler configuration management
#### [] Implement configuration API endpoints
- Create `src/server/api/config.py`
- Add GET `/api/config` - get configuration
- Add PUT `/api/config` - update configuration
- Add POST `/api/config/validate` - validate config
- Add GET/POST `/api/config/backup` - backup management
### 4. Anime Management Integration
#### [] Implement anime models
- Create `src/server/models/anime.py`
- Define AnimeSeriesResponse, EpisodeInfo models
- Add SearchRequest, SearchResult models
- Include MissingEpisodeInfo model
#### [] Create anime service wrapper
- Create `src/server/services/anime_service.py`
- Wrap SeriesApp functionality for web layer
- Implement async wrappers for blocking operations
- Add caching for frequently accessed data
- Include error handling and logging
#### [] Implement anime API endpoints
- Create `src/server/api/anime.py`
- Add GET `/api/v1/anime` - list series with missing episodes
- Add POST `/api/v1/anime/rescan` - trigger rescan
- Add POST `/api/v1/anime/search` - search for new anime
- Add GET `/api/v1/anime/{id}` - get series details
### 5. Download Queue Management
#### [] Implement download queue models
- Create `src/server/models/download.py`
- Define DownloadItem, QueueStatus models
- Add DownloadProgress, QueueStats models
- Include DownloadRequest model
#### [] Create download queue service
- Create `src/server/services/download_service.py`
- Implement queue management (add, remove, reorder)
- Add download progress tracking
- Include queue persistence and recovery
- Add concurrent download management
#### [] Implement download API endpoints
- Create `src/server/api/download.py`
- Add GET `/api/queue/status` - get queue status
- Add POST `/api/queue/add` - add to queue
- Add DELETE `/api/queue/{id}` - remove from queue
- Add POST `/api/queue/start` - start downloads
- Add POST `/api/queue/stop` - stop downloads
### 6. WebSocket Real-time Updates
#### [] Implement WebSocket manager
- Create `src/server/services/websocket_service.py`
- Add connection management
- Implement broadcast functionality
- Include room-based messaging
- Add connection cleanup
#### [] Add real-time progress updates
- Create `src/server/services/progress_service.py`
- Implement download progress broadcasting
- Add scan progress updates
- Include queue status changes
- Add error notifications
#### [] Integrate WebSocket with core services
- Update download service to emit progress
- Add scan progress notifications
- Include queue change broadcasts
- Add error/completion notifications
### 7. Frontend Integration
#### [] Integrate existing HTML templates
- Review and integrate existing HTML templates in `src/server/web/templates/`
- Ensure templates work with FastAPI Jinja2 setup
- Update template paths and static file references if needed
- Maintain existing responsive layout and theme switching
#### [] Integrate existing JavaScript functionality
- Review existing JavaScript files in `src/server/web/static/js/`
- Update API endpoint URLs to match FastAPI routes
- Ensure WebSocket connections work with new backend
- Maintain existing functionality for app.js and queue.js
#### [] Integrate existing CSS styling
- Review and integrate existing CSS files in `src/server/web/static/css/`
- Ensure styling works with FastAPI static file serving
- Maintain existing responsive design and theme support
- Update any hardcoded paths if necessary
#### [] Update frontend-backend integration
- Ensure existing JavaScript calls match new API endpoints
- Update authentication flow to work with new auth system
- Verify WebSocket events match new service implementations
- Test all existing UI functionality with new backend
### 8. Core Logic Integration
#### [] Enhance SeriesApp for web integration
- Update `src/core/SeriesApp.py`
- Add async callback support
- Implement progress reporting
- Include better error handling
- Add cancellation support
#### [] Create progress callback system
- Add progress callback interface
- Implement scan progress reporting
- Add download progress tracking
- Include error/completion callbacks
#### [] Add configuration persistence
- Implement configuration file management
- Add settings validation
- Include backup/restore functionality
- Add migration support for config updates
### 9. Database Layer
#### [] Implement database models
- Create `src/server/database/models.py`
- Add SQLAlchemy models for anime series
- Implement download queue persistence
- Include user session storage
#### [] Create database service
- Create `src/server/database/service.py`
- Add CRUD operations for anime data
- Implement queue persistence
- Include database migration support
#### [] Add database initialization
- Create `src/server/database/init.py`
- Implement database setup
- Add initial data migration
- Include schema validation
### 10. Testing
#### [] Create unit tests for services
- Create `tests/unit/test_auth_service.py`
- Create `tests/unit/test_anime_service.py`
- Create `tests/unit/test_download_service.py`
- Create `tests/unit/test_config_service.py`
#### [] Create API endpoint tests
- Create `tests/api/test_auth_endpoints.py`
- Create `tests/api/test_anime_endpoints.py`
- Create `tests/api/test_download_endpoints.py`
- Create `tests/api/test_config_endpoints.py`
#### [] Create integration tests
- Create `tests/integration/test_download_flow.py`
- Create `tests/integration/test_auth_flow.py`
- Create `tests/integration/test_websocket.py`
#### [] Create frontend integration tests
- Create `tests/frontend/test_existing_ui_integration.py`
- Test existing JavaScript functionality with new backend
- Verify WebSocket connections and real-time updates
- Test authentication flow with existing frontend
### 11. Deployment and Configuration
#### [] Create Docker configuration
- Create `Dockerfile`
- Create `docker-compose.yml`
- Add environment configuration
- Include volume mappings for existing web assets
#### [] Create production configuration
- Create `src/server/config/production.py`
- Add environment variable handling
- Include security settings
- Add performance optimizations
#### [] Create startup scripts
- Create `scripts/start.sh`
- Create `scripts/setup.py`
- Add dependency installation
- Include database initialization
### 12. Documentation and Error Handling
#### [] Create API documentation
- Add OpenAPI/Swagger documentation
- Include endpoint descriptions
- Add request/response examples
- Include authentication details
#### [] Implement comprehensive error handling
- Create custom exception classes
- Add error logging and tracking
- Implement user-friendly error messages
- Include error recovery mechanisms
#### [] Create user documentation
- Create `docs/user_guide.md`
- Add installation instructions
- Include configuration guide
- Add troubleshooting section
## File Size Guidelines
- **Models**: Max 200 lines each
- **Services**: Max 450 lines each
- **API Endpoints**: Max 350 lines each
- **Templates**: Max 400 lines each
- **JavaScript**: Max 500 lines each
- **CSS**: Max 500 lines each
- **Tests**: Max 400 lines each
## Existing Frontend Assets
The following frontend assets already exist and should be integrated:
- **Templates**: Located in `src/server/web/templates/`
- **JavaScript**: Located in `src/server/web/static/js/` (app.js, queue.js, etc.)
- **CSS**: Located in `src/server/web/static/css/`
- **Static Assets**: Images and other assets in `src/server/web/static/`
When working with these files:
- Review existing functionality before making changes
- Maintain existing UI/UX patterns and design
- Update API calls to match new FastAPI endpoints
- Preserve existing WebSocket event handling
- Keep existing theme and responsive design features
## Quality Assurance
#### [] Code quality checks
- Run linting with flake8/pylint
- Check type hints with mypy
- Validate formatting with black
- Run security checks with bandit
#### [] Performance testing
- Load test API endpoints
- Test WebSocket connection limits
- Validate download performance
- Check memory usage patterns
#### [] Security validation
- Test authentication bypass attempts
- Validate input sanitization
- Check for injection vulnerabilities
- Test session management security
## Implementation Order
1. Start with project structure and dependencies
2. Implement authentication system completely
3. Add configuration management
4. Create anime service wrapper
5. Implement download queue management
6. Add WebSocket real-time updates
7. Integrate existing frontend assets with new backend
8. Enhance core logic integration
9. Implement database layer
10. Create comprehensive tests
11. Add deployment configuration
12. Complete documentation
Each task should be implemented with proper error handling, logging, and type hints according to the project's coding standards.

File diff suppressed because it is too large Load Diff

View File

View File

View File

View File

@ -1,18 +0,0 @@
[tool:pytest]
testpaths = src/tests
python_files = test_*.py
python_classes = Test*
python_functions = test_*
addopts =
-v
--tb=short
--strict-markers
--disable-warnings
markers =
unit: Unit tests
integration: Integration tests
e2e: End-to-end tests
slow: Slow running tests
filterwarnings =
ignore::DeprecationWarning
ignore::PendingDeprecationWarning

Binary file not shown.

View File

@ -1,6 +0,0 @@
"""
Infrastructure package for the Aniworld server.
This package contains repository implementations, database connections,
caching, and other infrastructure concerns.
"""

View File

@ -1,916 +0,0 @@
"""
Database & Storage Management for AniWorld App
This module provides database schema management, data migration,
backup/restore functionality, and storage optimization.
"""
import os
import sqlite3
import json
import shutil
import time
import hashlib
import logging
import threading
import zipfile
import uuid
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Tuple
from dataclasses import dataclass, field
from contextlib import contextmanager
import glob
from pathlib import Path
@dataclass
class AnimeMetadata:
"""Represents anime metadata stored in database."""
anime_id: str
name: str
folder: str
key: Optional[str] = None
description: Optional[str] = None
genres: List[str] = field(default_factory=list)
release_year: Optional[int] = None
status: str = 'ongoing' # ongoing, completed, cancelled
total_episodes: Optional[int] = None
poster_url: Optional[str] = None
last_updated: datetime = field(default_factory=datetime.now)
created_at: datetime = field(default_factory=datetime.now)
custom_metadata: Dict[str, Any] = field(default_factory=dict)
@dataclass
class EpisodeMetadata:
"""Represents episode metadata stored in database."""
episode_id: str
anime_id: str
season: int
episode: int
title: Optional[str] = None
description: Optional[str] = None
duration_seconds: Optional[int] = None
file_path: Optional[str] = None
file_size_bytes: Optional[int] = None
download_date: Optional[datetime] = None
last_watched: Optional[datetime] = None
watch_count: int = 0
is_downloaded: bool = False
quality: Optional[str] = None
language: str = 'German Dub'
@dataclass
class BackupInfo:
"""Represents backup metadata."""
backup_id: str
backup_path: str
backup_type: str # full, incremental, metadata_only
created_at: datetime
size_bytes: int
description: Optional[str] = None
tables_included: List[str] = field(default_factory=list)
checksum: Optional[str] = None
class DatabaseManager:
"""Manage SQLite database with migrations and maintenance."""
def __init__(self, db_path: str = "./data/aniworld.db"):
self.db_path = db_path
self.db_dir = os.path.dirname(db_path)
self.logger = logging.getLogger(__name__)
self.lock = threading.Lock()
# Create database directory
os.makedirs(self.db_dir, exist_ok=True)
# Initialize database
self.initialize_database()
# Run migrations
self.run_migrations()
@contextmanager
def get_connection(self):
"""Get database connection with proper error handling."""
conn = None
try:
conn = sqlite3.connect(self.db_path, timeout=30)
conn.row_factory = sqlite3.Row # Enable dict-like access
yield conn
except Exception as e:
if conn:
conn.rollback()
self.logger.error(f"Database connection error: {e}")
raise
finally:
if conn:
conn.close()
def initialize_database(self):
"""Initialize database with base schema."""
with self.get_connection() as conn:
# Create schema version table
conn.execute("""
CREATE TABLE IF NOT EXISTS schema_version (
version INTEGER PRIMARY KEY,
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
description TEXT
)
""")
# Insert initial version if not exists
conn.execute("""
INSERT OR IGNORE INTO schema_version (version, description)
VALUES (0, 'Initial schema')
""")
conn.commit()
def get_current_version(self) -> int:
"""Get current database schema version."""
with self.get_connection() as conn:
cursor = conn.execute("SELECT MAX(version) FROM schema_version")
result = cursor.fetchone()
return result[0] if result and result[0] is not None else 0
def run_migrations(self):
"""Run database migrations."""
current_version = self.get_current_version()
migrations = self.get_migrations()
for version, migration in migrations.items():
if version > current_version:
self.logger.info(f"Running migration to version {version}")
try:
with self.get_connection() as conn:
migration['up'](conn)
# Record migration
conn.execute("""
INSERT INTO schema_version (version, description)
VALUES (?, ?)
""", (version, migration['description']))
conn.commit()
self.logger.info(f"Migration to version {version} completed")
except Exception as e:
self.logger.error(f"Migration to version {version} failed: {e}")
raise
def get_migrations(self) -> Dict[int, Dict[str, Any]]:
"""Define database migrations."""
return {
1: {
'description': 'Create anime metadata table',
'up': self._migration_001_anime_table
},
2: {
'description': 'Create episode metadata table',
'up': self._migration_002_episode_table
},
3: {
'description': 'Create download history table',
'up': self._migration_003_download_history
},
4: {
'description': 'Create user preferences table',
'up': self._migration_004_user_preferences
},
5: {
'description': 'Create storage locations table',
'up': self._migration_005_storage_locations
},
6: {
'description': 'Add indexes for performance',
'up': self._migration_006_indexes
}
}
def _migration_001_anime_table(self, conn: sqlite3.Connection):
"""Create anime metadata table."""
conn.execute("""
CREATE TABLE anime_metadata (
anime_id TEXT PRIMARY KEY,
name TEXT NOT NULL,
folder TEXT NOT NULL UNIQUE,
key TEXT,
description TEXT,
genres TEXT, -- JSON array
release_year INTEGER,
status TEXT DEFAULT 'ongoing',
total_episodes INTEGER,
poster_url TEXT,
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
custom_metadata TEXT -- JSON object
)
""")
def _migration_002_episode_table(self, conn: sqlite3.Connection):
"""Create episode metadata table."""
conn.execute("""
CREATE TABLE episode_metadata (
episode_id TEXT PRIMARY KEY,
anime_id TEXT NOT NULL,
season INTEGER NOT NULL,
episode INTEGER NOT NULL,
title TEXT,
description TEXT,
duration_seconds INTEGER,
file_path TEXT,
file_size_bytes INTEGER,
download_date TIMESTAMP,
last_watched TIMESTAMP,
watch_count INTEGER DEFAULT 0,
is_downloaded BOOLEAN DEFAULT FALSE,
quality TEXT,
language TEXT DEFAULT 'German Dub',
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id),
UNIQUE(anime_id, season, episode, language)
)
""")
def _migration_003_download_history(self, conn: sqlite3.Connection):
"""Create download history table."""
conn.execute("""
CREATE TABLE download_history (
download_id TEXT PRIMARY KEY,
anime_id TEXT NOT NULL,
season INTEGER NOT NULL,
episode INTEGER NOT NULL,
language TEXT NOT NULL,
download_started TIMESTAMP NOT NULL,
download_completed TIMESTAMP,
download_status TEXT NOT NULL, -- started, completed, failed, cancelled
file_size_bytes INTEGER,
download_speed_mbps REAL,
error_message TEXT,
retry_count INTEGER DEFAULT 0,
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
)
""")
def _migration_004_user_preferences(self, conn: sqlite3.Connection):
"""Create user preferences table."""
conn.execute("""
CREATE TABLE user_preferences (
key TEXT PRIMARY KEY,
value TEXT NOT NULL, -- JSON value
category TEXT NOT NULL,
description TEXT,
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
)
""")
def _migration_005_storage_locations(self, conn: sqlite3.Connection):
"""Create storage locations table."""
conn.execute("""
CREATE TABLE storage_locations (
location_id TEXT PRIMARY KEY,
anime_id TEXT,
path TEXT NOT NULL,
location_type TEXT NOT NULL, -- primary, backup, cache
is_active BOOLEAN DEFAULT TRUE,
free_space_bytes INTEGER,
total_space_bytes INTEGER,
last_checked TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
)
""")
def _migration_006_indexes(self, conn: sqlite3.Connection):
"""Add indexes for performance."""
indexes = [
"CREATE INDEX idx_anime_name ON anime_metadata(name)",
"CREATE INDEX idx_anime_folder ON anime_metadata(folder)",
"CREATE INDEX idx_anime_status ON anime_metadata(status)",
"CREATE INDEX idx_episode_anime_id ON episode_metadata(anime_id)",
"CREATE INDEX idx_episode_season_episode ON episode_metadata(season, episode)",
"CREATE INDEX idx_episode_downloaded ON episode_metadata(is_downloaded)",
"CREATE INDEX idx_download_status ON download_history(download_status)",
"CREATE INDEX idx_download_date ON download_history(download_started)",
"CREATE INDEX idx_storage_active ON storage_locations(is_active)",
"CREATE INDEX idx_storage_type ON storage_locations(location_type)"
]
for index_sql in indexes:
try:
conn.execute(index_sql)
except sqlite3.OperationalError as e:
if "already exists" not in str(e):
raise
def execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
"""Execute a SELECT query and return results."""
with self.get_connection() as conn:
cursor = conn.execute(query, params)
return cursor.fetchall()
def execute_update(self, query: str, params: tuple = ()) -> int:
"""Execute an UPDATE/INSERT/DELETE query and return affected rows."""
with self.get_connection() as conn:
cursor = conn.execute(query, params)
conn.commit()
return cursor.rowcount
class AnimeRepository:
"""Repository for anime data operations."""
def __init__(self, db_manager: DatabaseManager):
self.db = db_manager
self.logger = logging.getLogger(__name__)
def create_anime(self, metadata: AnimeMetadata) -> bool:
"""Create new anime record."""
try:
query = """
INSERT INTO anime_metadata (
anime_id, name, folder, key, description, genres,
release_year, status, total_episodes, poster_url,
custom_metadata
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
"""
params = (
metadata.anime_id,
metadata.name,
metadata.folder,
metadata.key,
metadata.description,
json.dumps(metadata.genres),
metadata.release_year,
metadata.status,
metadata.total_episodes,
metadata.poster_url,
json.dumps(metadata.custom_metadata)
)
rows_affected = self.db.execute_update(query, params)
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to create anime {metadata.name}: {e}")
return False
def get_anime_by_folder(self, folder: str) -> Optional[AnimeMetadata]:
"""Get anime by folder name."""
try:
query = """
SELECT * FROM anime_metadata WHERE folder = ?
"""
results = self.db.execute_query(query, (folder,))
if results:
row = results[0]
return self._row_to_anime_metadata(row)
return None
except Exception as e:
self.logger.error(f"Failed to get anime by folder {folder}: {e}")
return None
def get_all_anime(self, status_filter: Optional[str] = None) -> List[AnimeMetadata]:
"""Get all anime, optionally filtered by status."""
try:
if status_filter:
query = "SELECT * FROM anime_metadata WHERE status = ? ORDER BY name"
params = (status_filter,)
else:
query = "SELECT * FROM anime_metadata ORDER BY name"
params = ()
results = self.db.execute_query(query, params)
return [self._row_to_anime_metadata(row) for row in results]
except Exception as e:
self.logger.error(f"Failed to get all anime: {e}")
return []
def update_anime(self, metadata: AnimeMetadata) -> bool:
"""Update anime metadata."""
try:
query = """
UPDATE anime_metadata SET
name = ?, key = ?, description = ?, genres = ?,
release_year = ?, status = ?, total_episodes = ?,
poster_url = ?, last_updated = CURRENT_TIMESTAMP,
custom_metadata = ?
WHERE anime_id = ?
"""
params = (
metadata.name,
metadata.key,
metadata.description,
json.dumps(metadata.genres),
metadata.release_year,
metadata.status,
metadata.total_episodes,
metadata.poster_url,
json.dumps(metadata.custom_metadata),
metadata.anime_id
)
rows_affected = self.db.execute_update(query, params)
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to update anime {metadata.anime_id}: {e}")
return False
def delete_anime(self, anime_id: str) -> bool:
"""Delete anime and related data."""
try:
# Delete episodes first (foreign key constraint)
self.db.execute_update("DELETE FROM episode_metadata WHERE anime_id = ?", (anime_id,))
self.db.execute_update("DELETE FROM download_history WHERE anime_id = ?", (anime_id,))
self.db.execute_update("DELETE FROM storage_locations WHERE anime_id = ?", (anime_id,))
# Delete anime
rows_affected = self.db.execute_update("DELETE FROM anime_metadata WHERE anime_id = ?", (anime_id,))
return rows_affected > 0
except Exception as e:
self.logger.error(f"Failed to delete anime {anime_id}: {e}")
return False
def search_anime(self, search_term: str) -> List[AnimeMetadata]:
"""Search anime by name or description."""
try:
query = """
SELECT * FROM anime_metadata
WHERE name LIKE ? OR description LIKE ?
ORDER BY name
"""
search_pattern = f"%{search_term}%"
results = self.db.execute_query(query, (search_pattern, search_pattern))
return [self._row_to_anime_metadata(row) for row in results]
except Exception as e:
self.logger.error(f"Failed to search anime: {e}")
return []
def _row_to_anime_metadata(self, row: sqlite3.Row) -> AnimeMetadata:
"""Convert database row to AnimeMetadata object."""
return AnimeMetadata(
anime_id=row['anime_id'],
name=row['name'],
folder=row['folder'],
key=row['key'],
description=row['description'],
genres=json.loads(row['genres'] or '[]'),
release_year=row['release_year'],
status=row['status'],
total_episodes=row['total_episodes'],
poster_url=row['poster_url'],
last_updated=datetime.fromisoformat(row['last_updated']) if row['last_updated'] else datetime.now(),
created_at=datetime.fromisoformat(row['created_at']) if row['created_at'] else datetime.now(),
custom_metadata=json.loads(row['custom_metadata'] or '{}')
)
class BackupManager:
"""Manage database backups and restore operations."""
def __init__(self, db_manager: DatabaseManager, backup_dir: str = "./backups"):
self.db = db_manager
self.backup_dir = backup_dir
self.logger = logging.getLogger(__name__)
# Create backup directory
os.makedirs(backup_dir, exist_ok=True)
def create_full_backup(self, description: str = None) -> Optional[BackupInfo]:
"""Create a full database backup."""
try:
backup_id = f"full_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
backup_filename = f"{backup_id}.db"
backup_path = os.path.join(self.backup_dir, backup_filename)
# Copy database file
shutil.copy2(self.db.db_path, backup_path)
# Calculate checksum
checksum = self._calculate_file_checksum(backup_path)
# Get file size
size_bytes = os.path.getsize(backup_path)
# Get table list
with self.db.get_connection() as conn:
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
tables = [row[0] for row in cursor.fetchall()]
backup_info = BackupInfo(
backup_id=backup_id,
backup_path=backup_path,
backup_type='full',
created_at=datetime.now(),
size_bytes=size_bytes,
description=description or f"Full backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
tables_included=tables,
checksum=checksum
)
# Save backup metadata
self._save_backup_metadata(backup_info)
self.logger.info(f"Full backup created: {backup_id}")
return backup_info
except Exception as e:
self.logger.error(f"Failed to create full backup: {e}")
return None
def create_metadata_backup(self, description: str = None) -> Optional[BackupInfo]:
"""Create a metadata-only backup (excluding large binary data)."""
try:
backup_id = f"metadata_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
backup_filename = f"{backup_id}.json"
backup_path = os.path.join(self.backup_dir, backup_filename)
# Export metadata as JSON
metadata = self._export_metadata()
with open(backup_path, 'w', encoding='utf-8') as f:
json.dump(metadata, f, indent=2, default=str)
# Calculate checksum
checksum = self._calculate_file_checksum(backup_path)
# Get file size
size_bytes = os.path.getsize(backup_path)
backup_info = BackupInfo(
backup_id=backup_id,
backup_path=backup_path,
backup_type='metadata_only',
created_at=datetime.now(),
size_bytes=size_bytes,
description=description or f"Metadata backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
tables_included=['anime_metadata', 'episode_metadata', 'user_preferences'],
checksum=checksum
)
# Save backup metadata
self._save_backup_metadata(backup_info)
self.logger.info(f"Metadata backup created: {backup_id}")
return backup_info
except Exception as e:
self.logger.error(f"Failed to create metadata backup: {e}")
return None
def restore_backup(self, backup_id: str) -> bool:
"""Restore from a backup."""
try:
backup_info = self._load_backup_metadata(backup_id)
if not backup_info:
self.logger.error(f"Backup not found: {backup_id}")
return False
if not os.path.exists(backup_info.backup_path):
self.logger.error(f"Backup file not found: {backup_info.backup_path}")
return False
# Verify backup integrity
if not self._verify_backup_integrity(backup_info):
self.logger.error(f"Backup integrity check failed: {backup_id}")
return False
# Create a backup of current database before restore
current_backup = self.create_full_backup(f"Pre-restore backup before restoring {backup_id}")
if backup_info.backup_type == 'full':
# Replace database file
shutil.copy2(backup_info.backup_path, self.db.db_path)
elif backup_info.backup_type == 'metadata_only':
# Restore metadata from JSON
with open(backup_info.backup_path, 'r', encoding='utf-8') as f:
metadata = json.load(f)
self._import_metadata(metadata)
self.logger.info(f"Backup restored successfully: {backup_id}")
return True
except Exception as e:
self.logger.error(f"Failed to restore backup {backup_id}: {e}")
return False
def list_backups(self) -> List[BackupInfo]:
"""List all available backups."""
backups = []
try:
# Look for backup metadata files
metadata_pattern = os.path.join(self.backup_dir, "*.backup_info.json")
for metadata_file in glob.glob(metadata_pattern):
try:
with open(metadata_file, 'r') as f:
backup_data = json.load(f)
backup_info = BackupInfo(
backup_id=backup_data['backup_id'],
backup_path=backup_data['backup_path'],
backup_type=backup_data['backup_type'],
created_at=datetime.fromisoformat(backup_data['created_at']),
size_bytes=backup_data['size_bytes'],
description=backup_data.get('description'),
tables_included=backup_data.get('tables_included', []),
checksum=backup_data.get('checksum')
)
backups.append(backup_info)
except Exception as e:
self.logger.warning(f"Failed to load backup metadata from {metadata_file}: {e}")
# Sort by creation date (newest first)
backups.sort(key=lambda b: b.created_at, reverse=True)
except Exception as e:
self.logger.error(f"Failed to list backups: {e}")
return backups
def cleanup_old_backups(self, keep_days: int = 30, keep_count: int = 10):
"""Clean up old backup files."""
try:
backups = self.list_backups()
cutoff_date = datetime.now() - timedelta(days=keep_days)
# Keep at least keep_count backups regardless of age
backups_to_delete = []
for i, backup in enumerate(backups):
if i >= keep_count and backup.created_at < cutoff_date:
backups_to_delete.append(backup)
for backup in backups_to_delete:
try:
# Remove backup file
if os.path.exists(backup.backup_path):
os.remove(backup.backup_path)
# Remove metadata file
metadata_file = f"{backup.backup_path}.backup_info.json"
if os.path.exists(metadata_file):
os.remove(metadata_file)
self.logger.info(f"Removed old backup: {backup.backup_id}")
except Exception as e:
self.logger.warning(f"Failed to remove backup {backup.backup_id}: {e}")
if backups_to_delete:
self.logger.info(f"Cleaned up {len(backups_to_delete)} old backups")
except Exception as e:
self.logger.error(f"Failed to cleanup old backups: {e}")
def _export_metadata(self) -> Dict[str, Any]:
"""Export database metadata to dictionary."""
metadata = {
'export_date': datetime.now().isoformat(),
'schema_version': self.db.get_current_version(),
'tables': {}
}
# Export specific tables
tables_to_export = ['anime_metadata', 'episode_metadata', 'user_preferences', 'storage_locations']
with self.db.get_connection() as conn:
for table in tables_to_export:
try:
cursor = conn.execute(f"SELECT * FROM {table}")
rows = cursor.fetchall()
# Convert rows to dictionaries
metadata['tables'][table] = [dict(row) for row in rows]
except Exception as e:
self.logger.warning(f"Failed to export table {table}: {e}")
return metadata
def _import_metadata(self, metadata: Dict[str, Any]):
"""Import metadata from dictionary to database."""
with self.db.get_connection() as conn:
for table_name, rows in metadata.get('tables', {}).items():
if not rows:
continue
try:
# Clear existing data (be careful!)
conn.execute(f"DELETE FROM {table_name}")
# Insert new data
if rows:
columns = list(rows[0].keys())
placeholders = ','.join(['?' for _ in columns])
insert_sql = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({placeholders})"
for row in rows:
values = [row[col] for col in columns]
conn.execute(insert_sql, values)
conn.commit()
self.logger.info(f"Imported {len(rows)} rows to {table_name}")
except Exception as e:
self.logger.error(f"Failed to import table {table_name}: {e}")
conn.rollback()
raise
def _calculate_file_checksum(self, file_path: str) -> str:
"""Calculate SHA256 checksum of file."""
hash_sha256 = hashlib.sha256()
with open(file_path, 'rb') as f:
for chunk in iter(lambda: f.read(4096), b""):
hash_sha256.update(chunk)
return hash_sha256.hexdigest()
def _verify_backup_integrity(self, backup_info: BackupInfo) -> bool:
"""Verify backup file integrity using checksum."""
if not backup_info.checksum:
return True # No checksum to verify
current_checksum = self._calculate_file_checksum(backup_info.backup_path)
return current_checksum == backup_info.checksum
def _save_backup_metadata(self, backup_info: BackupInfo):
"""Save backup metadata to file."""
metadata_file = f"{backup_info.backup_path}.backup_info.json"
metadata = {
'backup_id': backup_info.backup_id,
'backup_path': backup_info.backup_path,
'backup_type': backup_info.backup_type,
'created_at': backup_info.created_at.isoformat(),
'size_bytes': backup_info.size_bytes,
'description': backup_info.description,
'tables_included': backup_info.tables_included,
'checksum': backup_info.checksum
}
with open(metadata_file, 'w') as f:
json.dump(metadata, f, indent=2)
def _load_backup_metadata(self, backup_id: str) -> Optional[BackupInfo]:
"""Load backup metadata from file."""
# Look for metadata file
metadata_pattern = os.path.join(self.backup_dir, f"{backup_id}.*.backup_info.json")
metadata_files = glob.glob(metadata_pattern)
if not metadata_files:
return None
try:
with open(metadata_files[0], 'r') as f:
backup_data = json.load(f)
return BackupInfo(
backup_id=backup_data['backup_id'],
backup_path=backup_data['backup_path'],
backup_type=backup_data['backup_type'],
created_at=datetime.fromisoformat(backup_data['created_at']),
size_bytes=backup_data['size_bytes'],
description=backup_data.get('description'),
tables_included=backup_data.get('tables_included', []),
checksum=backup_data.get('checksum')
)
except Exception as e:
self.logger.error(f"Failed to load backup metadata for {backup_id}: {e}")
return None
class StorageManager:
"""Manage storage locations and usage monitoring."""
def __init__(self, db_manager: DatabaseManager):
self.db = db_manager
self.logger = logging.getLogger(__name__)
def add_storage_location(self, path: str, location_type: str = 'primary', anime_id: str = None) -> str:
"""Add a new storage location."""
location_id = str(uuid.uuid4())
query = """
INSERT INTO storage_locations
(location_id, anime_id, path, location_type, is_active)
VALUES (?, ?, ?, ?, ?)
"""
self.db.execute_update(query, (location_id, anime_id, path, location_type, True))
# Update storage stats
self.update_storage_stats(location_id)
return location_id
def update_storage_stats(self, location_id: str):
"""Update storage statistics for a location."""
try:
# Get location path
query = "SELECT path FROM storage_locations WHERE location_id = ?"
results = self.db.execute_query(query, (location_id,))
if not results:
return
path = results[0]['path']
if os.path.exists(path):
# Get disk usage
stat = shutil.disk_usage(path)
# Update database
update_query = """
UPDATE storage_locations
SET free_space_bytes = ?, total_space_bytes = ?, last_checked = CURRENT_TIMESTAMP
WHERE location_id = ?
"""
self.db.execute_update(update_query, (stat.free, stat.total, location_id))
except Exception as e:
self.logger.error(f"Failed to update storage stats for {location_id}: {e}")
def get_storage_summary(self) -> Dict[str, Any]:
"""Get storage usage summary."""
query = """
SELECT
location_type,
COUNT(*) as location_count,
SUM(free_space_bytes) as total_free,
SUM(total_space_bytes) as total_space
FROM storage_locations
WHERE is_active = 1
GROUP BY location_type
"""
results = self.db.execute_query(query)
summary = {}
for row in results:
summary[row['location_type']] = {
'location_count': row['location_count'],
'total_free_gb': (row['total_free'] or 0) / (1024**3),
'total_space_gb': (row['total_space'] or 0) / (1024**3),
'usage_percent': ((row['total_space'] - row['total_free']) / row['total_space'] * 100) if row['total_space'] else 0
}
return summary
# Global instances
database_manager = DatabaseManager()
anime_repository = AnimeRepository(database_manager)
backup_manager = BackupManager(database_manager)
storage_manager = StorageManager(database_manager)
def init_database_system():
"""Initialize database system."""
# Database is initialized on creation
pass
def cleanup_database_system():
"""Clean up database resources."""
# No specific cleanup needed for SQLite
pass
# Export main components
__all__ = [
'DatabaseManager',
'AnimeRepository',
'BackupManager',
'StorageManager',
'AnimeMetadata',
'EpisodeMetadata',
'BackupInfo',
'database_manager',
'anime_repository',
'backup_manager',
'storage_manager',
'init_database_system',
'cleanup_database_system'
]

View File

@ -1,6 +0,0 @@
"""
Repository package for data access layer.
This package contains repository implementations following the Repository pattern
for clean separation of data access logic from business logic.
"""

View File

@ -1,24 +0,0 @@
# AniWorld FastAPI Server Configuration
# Authentication Configuration
JWT_SECRET_KEY=your-super-secure-jwt-secret-key-change-this-in-production
PASSWORD_SALT=c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1
MASTER_PASSWORD=admin123
# MASTER_PASSWORD_HASH=bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5
SESSION_TIMEOUT_HOURS=24
# Application Configuration
ANIME_DIRECTORY=\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien
LOG_LEVEL=INFO
# Database Configuration (if needed)
DATABASE_URL=sqlite:///./aniworld.db
# Security Configuration
CORS_ORIGINS=*
API_RATE_LIMIT=100
# Provider Configuration
DEFAULT_PROVIDER=aniworld.to
PROVIDER_TIMEOUT=30
RETRY_ATTEMPTS=3

View File

@ -1,257 +0,0 @@
# AniWorld FastAPI Server
A comprehensive FastAPI-based server implementation for AniWorld following the project instructions.
## 🚀 Features
### ✅ Authentication System (Completed)
- **Simple Master Password Authentication**: Single master password for the entire application
- **JWT Token Management**: Stateless authentication using JWT tokens
- **Environment Configuration**: Secure password hash stored in environment variables
- **Session Management**: Configurable token expiry (default: 24 hours)
- **Security Features**: SHA-256 password hashing with salt
### ✅ API Endpoints (Implemented)
#### Authentication Endpoints
- `POST /auth/login` - Login with master password and receive JWT token
- `GET /auth/verify` - Verify JWT token validity (protected)
- `POST /auth/logout` - Logout endpoint (stateless - client removes token)
#### System Endpoints
- `GET /` - Root endpoint with API information
- `GET /health` - Health check endpoint
- `GET /api/system/config` - System configuration (protected)
- `GET /api/system/database/health` - Database health check (protected)
#### Anime & Episode Endpoints (Protected)
- `GET /api/anime/search` - Search anime by title with pagination
- `GET /api/anime/{anime_id}` - Get specific anime details
- `GET /api/anime/{anime_id}/episodes` - Get all episodes for anime
- `GET /api/episodes/{episode_id}` - Get specific episode details
### 🔧 Technical Features
- **FastAPI Framework**: Modern, fast (high-performance) web framework
- **OpenAPI Documentation**: Automatic API documentation at `/docs`
- **CORS Support**: Configurable cross-origin resource sharing
- **Request Validation**: Pydantic models for request/response validation
- **Error Handling**: Centralized error handling with proper HTTP status codes
- **Logging**: Comprehensive logging system with file and console output
- **Environment Configuration**: Secure configuration via environment variables
## 🛠️ Installation & Setup
### Prerequisites
- Python 3.11+ (AniWorld conda environment)
- Conda package manager
### 1. Activate AniWorld Environment
```bash
conda activate AniWorld
```
### 2. Install Dependencies
```bash
cd src/server
pip install -r requirements_fastapi.txt
```
### 3. Configure Environment
Create or update `.env` file:
```env
# Authentication
JWT_SECRET_KEY=your-super-secure-jwt-secret-key
PASSWORD_SALT=your-secure-salt
MASTER_PASSWORD=admin123
SESSION_TIMEOUT_HOURS=24
# Application
ANIME_DIRECTORY=your-anime-directory-path
LOG_LEVEL=INFO
# Optional
DATABASE_URL=sqlite:///./aniworld.db
CORS_ORIGINS=*
```
### 4. Start the Server
#### Option 1: Direct Python Execution
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py
```
#### Option 2: Using Batch Script (Windows)
```cmd
cd src/server
run_and_test.bat
```
#### Option 3: Using Shell Script (Linux/Mac)
```bash
cd src/server
chmod +x start_fastapi_server.sh
./start_fastapi_server.sh
```
## 📖 API Usage
### 1. Access Documentation
Visit: http://localhost:8000/docs
### 2. Authentication Flow
#### Step 1: Login
```bash
curl -X POST "http://localhost:8000/auth/login" \
-H "Content-Type: application/json" \
-d '{"password": "admin123"}'
```
Response:
```json
{
"success": true,
"message": "Authentication successful",
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"expires_at": "2025-10-06T18:19:24.710065"
}
```
#### Step 2: Use Token for Protected Endpoints
```bash
curl -X GET "http://localhost:8000/api/anime/search?query=naruto&limit=5" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
### 3. Example API Calls
#### Health Check
```bash
curl "http://localhost:8000/health"
```
#### Search Anime
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/search?query=naruto&limit=10"
```
#### Get Anime Details
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/anime_123"
```
## 🧪 Testing
### Automated Testing
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
```
### Manual Testing
1. Start the server
2. Visit http://localhost:8000/docs
3. Use the interactive API documentation
4. Test authentication with password: `admin123`
## 📁 Project Structure
```
src/server/
├── fastapi_app.py # Main FastAPI application
├── .env # Environment configuration
├── requirements_fastapi.txt # Python dependencies
├── test_fastapi.py # Test script
├── start_fastapi_server.bat # Windows startup script
├── start_fastapi_server.sh # Linux/Mac startup script
├── run_and_test.bat # Windows test runner
└── logs/ # Log files
```
## 🔐 Security
### Authentication
- Master password authentication (no user registration required)
- JWT tokens with configurable expiry
- Secure password hashing (SHA-256 + salt)
- Environment-based secret management
### API Security
- All anime/episode endpoints require authentication
- CORS protection
- Input validation using Pydantic
- Error handling without sensitive data exposure
## 🔧 Configuration
### Environment Variables
- `JWT_SECRET_KEY`: Secret key for JWT token signing
- `PASSWORD_SALT`: Salt for password hashing
- `MASTER_PASSWORD`: Master password (development only)
- `MASTER_PASSWORD_HASH`: Hashed master password (production)
- `SESSION_TIMEOUT_HOURS`: JWT token expiry time
- `ANIME_DIRECTORY`: Path to anime files
- `LOG_LEVEL`: Logging level (DEBUG, INFO, WARNING, ERROR)
### Production Configuration
1. Set `MASTER_PASSWORD_HASH` instead of `MASTER_PASSWORD`
2. Use a strong `JWT_SECRET_KEY`
3. Set appropriate `CORS_ORIGINS`
4. Configure proper logging levels
## 📊 API Status
| Endpoint Category | Status | Coverage |
|------------------|--------|----------|
| Authentication | ✅ Complete | 100% |
| Health/System | ✅ Complete | 100% |
| Anime Search | ✅ Implemented | Mock data |
| Episode Management | ✅ Implemented | Mock data |
| Database Integration | 🔄 Placeholder | Todo |
| Real Data Provider | 🔄 Placeholder | Todo |
## 🚧 Future Enhancements
### High Priority
- [ ] Connect to actual anime database/provider
- [ ] Implement real anime search functionality
- [ ] Add episode streaming capabilities
- [ ] Database connection pooling
### Medium Priority
- [ ] Redis caching layer
- [ ] Rate limiting middleware
- [ ] Background task processing
- [ ] WebSocket support
### Low Priority
- [ ] Advanced search filters
- [ ] User preferences (multi-user support)
- [ ] Download progress tracking
- [ ] Statistics and analytics
## 📝 License
This project follows the AniWorld project licensing terms.
## 🤝 Contributing
1. Follow the coding standards in `.github/copilot-instructions.md`
2. Use type hints and Pydantic models
3. Add comprehensive logging
4. Include tests for new features
5. Update documentation
## 📞 Support
- API Documentation: http://localhost:8000/docs
- Health Check: http://localhost:8000/health
- Logs: Check `logs/aniworld.log` for detailed information
---
**Note**: This FastAPI implementation provides a solid foundation following the project instructions. The authentication system is complete and production-ready, while anime/episode endpoints currently return mock data pending integration with the actual data providers.

View File

@ -1,573 +0,0 @@
import os
import json
import hashlib
import secrets
from typing import Dict, Any, Optional
from datetime import datetime, timedelta
class Config:
"""Configuration management for AniWorld Flask app."""
def __init__(self, config_file: str = "data/config.json"):
self.config_file = config_file
self.default_config = {
"security": {
"master_password_hash": None,
"salt": None,
"session_timeout_hours": 24,
"max_failed_attempts": 5,
"lockout_duration_minutes": 30
},
"anime": {
"directory": os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"),
"download_threads": 3,
"download_speed_limit": None,
"auto_rescan_time": "03:00",
"auto_download_after_rescan": False
},
"logging": {
"level": "INFO",
"enable_console_logging": True,
"enable_console_progress": False,
"enable_fail2ban_logging": True,
"log_file": "./logs/aniworld.log",
"max_log_size_mb": 10,
"log_backup_count": 5
},
"providers": {
"default_provider": "aniworld.to",
"preferred_language": "German Dub",
"fallback_providers": ["aniworld.to"],
"provider_timeout": 30,
"retry_attempts": 3,
"provider_settings": {
"aniworld.to": {
"enabled": True,
"priority": 1,
"quality_preference": "720p"
}
}
},
"advanced": {
"max_concurrent_downloads": 3,
"download_buffer_size": 8192,
"connection_timeout": 30,
"read_timeout": 300,
"enable_debug_mode": False,
"cache_duration_minutes": 60
}
}
self._config = self._load_config()
def _load_config(self) -> Dict[str, Any]:
"""Load configuration from file or create default."""
try:
if os.path.exists(self.config_file):
with open(self.config_file, 'r', encoding='utf-8') as f:
config = json.load(f)
# Merge with defaults to ensure all keys exist
return self._merge_configs(self.default_config, config)
else:
return self.default_config.copy()
except Exception as e:
print(f"Error loading config: {e}")
return self.default_config.copy()
def _merge_configs(self, default: Dict[str, Any], user: Dict[str, Any]) -> Dict[str, Any]:
"""Recursively merge user config with defaults."""
result = default.copy()
for key, value in user.items():
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
result[key] = self._merge_configs(result[key], value)
else:
result[key] = value
return result
def save_config(self) -> bool:
"""Save current configuration to file."""
try:
with open(self.config_file, 'w', encoding='utf-8') as f:
json.dump(self._config, f, indent=4)
return True
except Exception as e:
print(f"Error saving config: {e}")
return False
def get(self, key_path: str, default: Any = None) -> Any:
"""Get config value using dot notation (e.g., 'security.master_password_hash')."""
keys = key_path.split('.')
value = self._config
for key in keys:
if isinstance(value, dict) and key in value:
value = value[key]
else:
return default
return value
def set(self, key_path: str, value: Any) -> bool:
"""Set config value using dot notation."""
keys = key_path.split('.')
config = self._config
# Navigate to parent
for key in keys[:-1]:
if key not in config:
config[key] = {}
config = config[key]
# Set final value
config[keys[-1]] = value
return self.save_config()
def set_master_password(self, password: str) -> bool:
"""Set master password with secure hashing."""
try:
# Generate salt
salt = secrets.token_hex(32)
# Hash password with salt
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
# Save to config
self.set("security.salt", salt)
self.set("security.master_password_hash", password_hash)
return True
except Exception as e:
print(f"Error setting master password: {e}")
return False
def verify_password(self, password: str) -> bool:
"""Verify password against stored hash."""
try:
stored_hash = self.get("security.master_password_hash")
salt = self.get("security.salt")
if not stored_hash or not salt:
return False
# Hash provided password with stored salt
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
return password_hash == stored_hash
except Exception as e:
print(f"Error verifying password: {e}")
return False
def has_master_password(self) -> bool:
"""Check if master password is configured."""
return bool(self.get("security.master_password_hash"))
def backup_config(self, backup_path: Optional[str] = None) -> str:
"""Create backup of current configuration."""
if not backup_path:
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
backup_path = f"config_backup_{timestamp}.json"
try:
with open(backup_path, 'w', encoding='utf-8') as f:
json.dump(self._config, f, indent=4)
return backup_path
except Exception as e:
raise Exception(f"Failed to create backup: {e}")
def restore_config(self, backup_path: str) -> bool:
"""Restore configuration from backup."""
try:
with open(backup_path, 'r', encoding='utf-8') as f:
config = json.load(f)
# Validate config before restoring
validation_result = self.validate_config(config)
if not validation_result['valid']:
raise Exception(f"Invalid configuration: {validation_result['errors']}")
self._config = self._merge_configs(self.default_config, config)
return self.save_config()
except Exception as e:
print(f"Error restoring config: {e}")
return False
def validate_config(self, config: Dict[str, Any] = None) -> Dict[str, Any]:
"""Validate configuration structure and values."""
if config is None:
config = self._config
errors = []
warnings = []
# Validate security settings
security = config.get('security', {})
if security.get('session_timeout_hours', 0) < 1 or security.get('session_timeout_hours', 0) > 168:
errors.append("Session timeout must be between 1 and 168 hours")
if security.get('max_failed_attempts', 0) < 1 or security.get('max_failed_attempts', 0) > 50:
errors.append("Max failed attempts must be between 1 and 50")
if security.get('lockout_duration_minutes', 0) < 1 or security.get('lockout_duration_minutes', 0) > 1440:
errors.append("Lockout duration must be between 1 and 1440 minutes")
# Validate anime settings
anime = config.get('anime', {})
directory = anime.get('directory', '')
if directory and not os.path.exists(directory) and not directory.startswith('\\\\'):
warnings.append(f"Anime directory does not exist: {directory}")
download_threads = anime.get('download_threads', 1)
if download_threads < 1 or download_threads > 10:
errors.append("Download threads must be between 1 and 10")
# Validate logging settings
logging_config = config.get('logging', {})
log_level = logging_config.get('level', 'INFO')
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
errors.append(f"Invalid log level: {log_level}")
# Validate provider settings
providers = config.get('providers', {})
provider_timeout = providers.get('provider_timeout', 30)
if provider_timeout < 5 or provider_timeout > 300:
errors.append("Provider timeout must be between 5 and 300 seconds")
retry_attempts = providers.get('retry_attempts', 3)
if retry_attempts < 0 or retry_attempts > 10:
errors.append("Retry attempts must be between 0 and 10")
# Validate advanced settings
advanced = config.get('advanced', {})
max_concurrent = advanced.get('max_concurrent_downloads', 3)
if max_concurrent < 1 or max_concurrent > 20:
errors.append("Max concurrent downloads must be between 1 and 20")
connection_timeout = advanced.get('connection_timeout', 30)
if connection_timeout < 5 or connection_timeout > 300:
errors.append("Connection timeout must be between 5 and 300 seconds")
return {
'valid': len(errors) == 0,
'errors': errors,
'warnings': warnings
}
def get_config_schema(self) -> Dict[str, Any]:
"""Get configuration schema for UI generation."""
return {
"security": {
"title": "Security Settings",
"fields": {
"session_timeout_hours": {
"type": "number",
"title": "Session Timeout (hours)",
"description": "How long sessions remain active",
"min": 1,
"max": 168,
"default": 24
},
"max_failed_attempts": {
"type": "number",
"title": "Max Failed Login Attempts",
"description": "Number of failed attempts before lockout",
"min": 1,
"max": 50,
"default": 5
},
"lockout_duration_minutes": {
"type": "number",
"title": "Lockout Duration (minutes)",
"description": "How long to lock account after failed attempts",
"min": 1,
"max": 1440,
"default": 30
}
}
},
"anime": {
"title": "Anime Settings",
"fields": {
"directory": {
"type": "text",
"title": "Anime Directory",
"description": "Base directory for anime storage",
"required": True
},
"download_threads": {
"type": "number",
"title": "Download Threads",
"description": "Number of concurrent download threads",
"min": 1,
"max": 10,
"default": 3
},
"download_speed_limit": {
"type": "number",
"title": "Speed Limit (KB/s)",
"description": "Download speed limit (0 = unlimited)",
"min": 0,
"max": 102400,
"default": 0
}
}
},
"providers": {
"title": "Provider Settings",
"fields": {
"default_provider": {
"type": "select",
"title": "Default Provider",
"description": "Primary anime provider",
"options": ["aniworld.to"],
"default": "aniworld.to"
},
"preferred_language": {
"type": "select",
"title": "Preferred Language",
"description": "Default language preference",
"options": ["German Dub", "German Sub", "English Dub", "English Sub", "Japanese"],
"default": "German Dub"
},
"provider_timeout": {
"type": "number",
"title": "Provider Timeout (seconds)",
"description": "Timeout for provider requests",
"min": 5,
"max": 300,
"default": 30
},
"retry_attempts": {
"type": "number",
"title": "Retry Attempts",
"description": "Number of retry attempts for failed requests",
"min": 0,
"max": 10,
"default": 3
}
}
},
"advanced": {
"title": "Advanced Settings",
"fields": {
"max_concurrent_downloads": {
"type": "number",
"title": "Max Concurrent Downloads",
"description": "Maximum simultaneous downloads",
"min": 1,
"max": 20,
"default": 3
},
"connection_timeout": {
"type": "number",
"title": "Connection Timeout (seconds)",
"description": "Network connection timeout",
"min": 5,
"max": 300,
"default": 30
},
"enable_debug_mode": {
"type": "boolean",
"title": "Debug Mode",
"description": "Enable detailed debug logging",
"default": False
}
}
}
}
def export_config(self, include_sensitive: bool = False) -> Dict[str, Any]:
"""Export configuration, optionally excluding sensitive data."""
config_copy = json.loads(json.dumps(self._config)) # Deep copy
if not include_sensitive:
# Remove sensitive data
if 'security' in config_copy:
config_copy['security'].pop('master_password_hash', None)
config_copy['security'].pop('salt', None)
return config_copy
def import_config(self, config_data: Dict[str, Any], validate: bool = True) -> Dict[str, Any]:
"""Import configuration with validation."""
if validate:
validation_result = self.validate_config(config_data)
if not validation_result['valid']:
return {
'success': False,
'errors': validation_result['errors'],
'warnings': validation_result['warnings']
}
# Merge with existing config (don't overwrite security settings)
current_security = self._config.get('security', {})
merged_config = self._merge_configs(self.default_config, config_data)
# Preserve current security settings if not provided
if not config_data.get('security', {}).get('master_password_hash'):
merged_config['security'] = current_security
self._config = merged_config
success = self.save_config()
return {
'success': success,
'errors': [] if success else ['Failed to save configuration'],
'warnings': validation_result.get('warnings', []) if validate else []
}
@property
def anime_directory(self) -> str:
"""Get anime directory path."""
# Always check environment variable first
env_dir = os.getenv("ANIME_DIRECTORY")
if env_dir:
# Remove quotes if they exist
env_dir = env_dir.strip('"\'')
return env_dir
return self.get("anime.directory", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
@anime_directory.setter
def anime_directory(self, value: str):
"""Set anime directory path."""
self.set("anime.directory", value)
@property
def session_timeout_hours(self) -> int:
"""Get session timeout in hours."""
return self.get("security.session_timeout_hours", 24)
@property
def max_failed_attempts(self) -> int:
"""Get maximum failed login attempts."""
return self.get("security.max_failed_attempts", 5)
@property
def lockout_duration_minutes(self) -> int:
"""Get lockout duration in minutes."""
return self.get("security.lockout_duration_minutes", 30)
@property
def scheduled_rescan_enabled(self) -> bool:
"""Get whether scheduled rescan is enabled."""
return self.get("scheduler.rescan_enabled", False)
@scheduled_rescan_enabled.setter
def scheduled_rescan_enabled(self, value: bool):
"""Set whether scheduled rescan is enabled."""
self.set("scheduler.rescan_enabled", value)
@property
def scheduled_rescan_time(self) -> str:
"""Get scheduled rescan time in HH:MM format."""
return self.get("scheduler.rescan_time", "03:00")
@scheduled_rescan_time.setter
def scheduled_rescan_time(self, value: str):
"""Set scheduled rescan time in HH:MM format."""
self.set("scheduler.rescan_time", value)
@property
def auto_download_after_rescan(self) -> bool:
"""Get whether to auto-download after scheduled rescan."""
return self.get("scheduler.auto_download_after_rescan", False)
@auto_download_after_rescan.setter
def auto_download_after_rescan(self, value: bool):
"""Set whether to auto-download after scheduled rescan."""
self.set("scheduler.auto_download_after_rescan", value)
@property
def log_level(self) -> str:
"""Get current log level."""
return self.get("logging.level", "INFO")
@log_level.setter
def log_level(self, value: str):
"""Set log level."""
self.set("logging.level", value.upper())
@property
def enable_console_logging(self) -> bool:
"""Get whether console logging is enabled."""
return self.get("logging.enable_console_logging", True)
@enable_console_logging.setter
def enable_console_logging(self, value: bool):
"""Set whether console logging is enabled."""
self.set("logging.enable_console_logging", value)
@property
def enable_console_progress(self) -> bool:
"""Get whether console progress bars are enabled."""
return self.get("logging.enable_console_progress", False)
@enable_console_progress.setter
def enable_console_progress(self, value: bool):
"""Set whether console progress bars are enabled."""
self.set("logging.enable_console_progress", value)
@property
def enable_fail2ban_logging(self) -> bool:
"""Get whether fail2ban logging is enabled."""
return self.get("logging.enable_fail2ban_logging", True)
@enable_fail2ban_logging.setter
def enable_fail2ban_logging(self, value: bool):
"""Set whether fail2ban logging is enabled."""
self.set("logging.enable_fail2ban_logging", value)
# Provider configuration properties
@property
def default_provider(self) -> str:
"""Get default provider."""
return self.get("providers.default_provider", "aniworld.to")
@default_provider.setter
def default_provider(self, value: str):
"""Set default provider."""
self.set("providers.default_provider", value)
@property
def preferred_language(self) -> str:
"""Get preferred language."""
return self.get("providers.preferred_language", "German Dub")
@preferred_language.setter
def preferred_language(self, value: str):
"""Set preferred language."""
self.set("providers.preferred_language", value)
@property
def provider_timeout(self) -> int:
"""Get provider timeout in seconds."""
return self.get("providers.provider_timeout", 30)
@provider_timeout.setter
def provider_timeout(self, value: int):
"""Set provider timeout in seconds."""
self.set("providers.provider_timeout", value)
# Advanced configuration properties
@property
def max_concurrent_downloads(self) -> int:
"""Get maximum concurrent downloads."""
return self.get("advanced.max_concurrent_downloads", 3)
@max_concurrent_downloads.setter
def max_concurrent_downloads(self, value: int):
"""Set maximum concurrent downloads."""
self.set("advanced.max_concurrent_downloads", value)
@property
def enable_debug_mode(self) -> bool:
"""Get whether debug mode is enabled."""
return self.get("advanced.enable_debug_mode", False)
@enable_debug_mode.setter
def enable_debug_mode(self, value: bool):
"""Set whether debug mode is enabled."""
self.set("advanced.enable_debug_mode", value)
# Global config instance
config = Config()

View File

@ -1,10 +0,0 @@
"""
Configuration package for the Aniworld server.
This package provides configuration management and environment
variable handling for secure application deployment.
"""
from .env_config import EnvironmentConfig, env_config
__all__ = ['EnvironmentConfig', 'env_config']

View File

@ -1,217 +0,0 @@
"""
Environment configuration for secure handling of sensitive data.
This module provides secure environment variable handling and configuration
management for the Aniworld server application.
"""
import os
import secrets
from typing import Optional, Dict, Any
from dotenv import load_dotenv
import logging
logger = logging.getLogger(__name__)
# Load environment variables from .env file
load_dotenv()
class EnvironmentConfig:
"""Manages environment variables and secure configuration."""
# Security
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
# Database
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
# Redis (for caching and sessions)
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
# API Keys and External Services
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
# Email Configuration (for password reset)
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
# Security Settings
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
# Rate Limiting
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
# Application Settings
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
HOST: str = os.getenv('HOST', '127.0.0.1')
PORT: int = int(os.getenv('PORT', '5000'))
# Anime Directory and Download Settings
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
# Logging
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
LOG_FILE: str = os.getenv('LOG_FILE', './logs/aniworld.log')
@classmethod
def get_database_config(cls) -> Dict[str, Any]:
"""Get database configuration."""
return {
'url': cls.DATABASE_URL,
'password': cls.DATABASE_PASSWORD,
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
}
@classmethod
def get_redis_config(cls) -> Dict[str, Any]:
"""Get Redis configuration."""
return {
'url': cls.REDIS_URL,
'password': cls.REDIS_PASSWORD,
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
'retry_on_timeout': True,
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
}
@classmethod
def get_email_config(cls) -> Dict[str, Any]:
"""Get email configuration."""
return {
'server': cls.SMTP_SERVER,
'port': cls.SMTP_PORT,
'username': cls.SMTP_USERNAME,
'password': cls.SMTP_PASSWORD,
'use_tls': cls.SMTP_USE_TLS,
'from_email': cls.FROM_EMAIL
}
@classmethod
def get_security_config(cls) -> Dict[str, Any]:
"""Get security configuration."""
return {
'secret_key': cls.SECRET_KEY,
'jwt_secret_key': cls.JWT_SECRET_KEY,
'password_salt': cls.PASSWORD_SALT,
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
}
@classmethod
def validate_config(cls) -> bool:
"""Validate that required configuration is present."""
required_vars = [
'SECRET_KEY',
'JWT_SECRET_KEY',
'PASSWORD_SALT'
]
missing_vars = []
for var in required_vars:
if not getattr(cls, var):
missing_vars.append(var)
if missing_vars:
logger.error(f"Missing required environment variables: {missing_vars}")
return False
return True
@classmethod
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
"""Generate a template .env file with all available configuration options."""
try:
template_content = """# Aniworld Server Environment Configuration
# Copy this file to .env and fill in your values
# Security (REQUIRED - Generate secure random values)
SECRET_KEY=your_secret_key_here
JWT_SECRET_KEY=your_jwt_secret_here
PASSWORD_SALT=your_password_salt_here
# Database Configuration
DATABASE_URL=sqlite:///data/aniworld.db
# DATABASE_PASSWORD=your_db_password_here
DATABASE_POOL_SIZE=10
DATABASE_MAX_OVERFLOW=20
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (for caching and sessions)
REDIS_URL=redis://localhost:6379/0
# REDIS_PASSWORD=your_redis_password_here
REDIS_MAX_CONNECTIONS=10
REDIS_SOCKET_TIMEOUT=5
# Email Configuration (for password reset emails)
SMTP_SERVER=localhost
SMTP_PORT=587
# SMTP_USERNAME=your_smtp_username
# SMTP_PASSWORD=your_smtp_password
SMTP_USE_TLS=true
FROM_EMAIL=noreply@aniworld.local
# External API Keys
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
# TMDB_API_KEY=your_tmdb_api_key
# Security Settings
SESSION_TIMEOUT_HOURS=24
MAX_FAILED_LOGIN_ATTEMPTS=5
LOCKOUT_DURATION_MINUTES=30
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
API_RATE_LIMIT_PER_MINUTE=100
# Application Settings
DEBUG=false
HOST=127.0.0.1
PORT=5000
# Anime and Download Settings
ANIME_DIRECTORY=./downloads
MAX_CONCURRENT_DOWNLOADS=3
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
# Logging
LOG_LEVEL=INFO
LOG_FILE=./logs/aniworld.log
"""
with open(file_path, 'w', encoding='utf-8') as f:
f.write(template_content)
logger.info(f"Environment template created at {file_path}")
return True
except Exception as e:
logger.error(f"Error creating environment template: {e}")
return False
# Create global instance
env_config = EnvironmentConfig()
# Validate configuration on import
if not env_config.validate_config():
logger.warning("Invalid environment configuration detected. Please check your .env file.")

View File

@ -1,28 +0,0 @@
from typing import Optional
from pydantic import BaseSettings, Field
class Settings(BaseSettings):
"""Application settings from environment variables."""
jwt_secret_key: str = Field(default="your-secret-key-here", env="JWT_SECRET_KEY")
password_salt: str = Field(default="default-salt", env="PASSWORD_SALT")
master_password_hash: Optional[str] = Field(default=None, env="MASTER_PASSWORD_HASH")
master_password: Optional[str] = Field(default=None, env="MASTER_PASSWORD") # For development
token_expiry_hours: int = Field(default=24, env="SESSION_TIMEOUT_HOURS")
anime_directory: str = Field(default="", env="ANIME_DIRECTORY")
log_level: str = Field(default="INFO", env="LOG_LEVEL")
# Additional settings from .env
database_url: str = Field(default="sqlite:///./data/aniworld.db", env="DATABASE_URL")
cors_origins: str = Field(default="*", env="CORS_ORIGINS")
api_rate_limit: int = Field(default=100, env="API_RATE_LIMIT")
default_provider: str = Field(default="aniworld.to", env="DEFAULT_PROVIDER")
provider_timeout: int = Field(default=30, env="PROVIDER_TIMEOUT")
retry_attempts: int = Field(default=3, env="RETRY_ATTEMPTS")
class Config:
env_file = ".env"
extra = "ignore"
settings = Settings()

View File

@ -1,612 +0,0 @@
"""
FastAPI-based AniWorld Server Application.
This module implements a comprehensive FastAPI application following the instructions:
- Simple master password authentication using JWT
- Repository pattern with dependency injection
- Proper error handling and validation
- OpenAPI documentation
- Security best practices
"""
import hashlib
import logging
import os
import sys
from contextlib import asynccontextmanager
from datetime import datetime, timedelta
from typing import Any, Dict, List, Optional
import jwt
# Add parent directory to path for imports
current_dir = os.path.dirname(__file__)
parent_dir = os.path.join(current_dir, '..')
project_root = os.path.join(parent_dir, '..') # Go up two levels to reach project root
sys.path.insert(0, os.path.abspath(project_root))
import uvicorn
from fastapi import Depends, FastAPI, HTTPException, Request, Security, status
from fastapi.middleware.cors import CORSMiddleware
from fastapi.responses import HTMLResponse, JSONResponse
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
from fastapi.staticfiles import StaticFiles
from fastapi.templating import Jinja2Templates
from pydantic import BaseModel, Field
from src.config.settings import settings
# Application flow services will be imported lazily where needed to avoid
# import-time circular dependencies during tests.
SetupService = None
ApplicationFlowMiddleware = None
# Configure logging
logging.basicConfig(
level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
handlers=[
logging.FileHandler('./logs/aniworld.log'),
logging.StreamHandler()
]
)
logger = logging.getLogger(__name__)
# Security
security = HTTPBearer()
# Settings are loaded from `src.config.settings.settings`
# Pydantic Models
class LoginRequest(BaseModel):
"""Login request model."""
password: str = Field(..., min_length=1, description="Master password")
class LoginResponse(BaseModel):
"""Login response model."""
success: bool
message: str
token: Optional[str] = None
expires_at: Optional[datetime] = None
class TokenVerifyResponse(BaseModel):
"""Token verification response model."""
valid: bool
message: str
user: Optional[str] = None
expires_at: Optional[datetime] = None
class HealthResponse(BaseModel):
"""Health check response model."""
status: str
timestamp: datetime
version: str = "1.0.0"
services: Dict[str, str]
class AnimeSearchRequest(BaseModel):
"""Anime search request model."""
query: str = Field(..., min_length=1, max_length=100)
limit: int = Field(default=20, ge=1, le=100)
offset: int = Field(default=0, ge=0)
class AnimeResponse(BaseModel):
"""Anime response model."""
id: str
title: str
description: Optional[str] = None
episodes: int = 0
status: str = "Unknown"
poster_url: Optional[str] = None
class EpisodeResponse(BaseModel):
"""Episode response model."""
id: str
anime_id: str
episode_number: int
title: Optional[str] = None
description: Optional[str] = None
duration: Optional[int] = None
stream_url: Optional[str] = None
class ErrorResponse(BaseModel):
"""Error response model."""
success: bool = False
error: str
code: Optional[str] = None
details: Optional[Dict[str, Any]] = None
class SetupRequest(BaseModel):
"""Setup request model."""
password: str = Field(..., min_length=8, description="Master password (min 8 characters)")
directory: str = Field(..., min_length=1, description="Anime directory path")
class SetupResponse(BaseModel):
"""Setup response model."""
status: str
message: str
redirect_url: Optional[str] = None
class SetupStatusResponse(BaseModel):
"""Setup status response model."""
setup_complete: bool
requirements: Dict[str, bool]
missing_requirements: List[str]
# Authentication utilities
def hash_password(password: str) -> str:
"""Hash password with salt using SHA-256."""
salted_password = password + settings.password_salt
return hashlib.sha256(salted_password.encode()).hexdigest()
def verify_master_password(password: str) -> bool:
"""Verify password against master password hash."""
if not settings.master_password_hash:
# If no hash is set, check against plain password (development only)
if settings.master_password:
return password == settings.master_password
return False
password_hash = hash_password(password)
return password_hash == settings.master_password_hash
def generate_jwt_token() -> Dict[str, Any]:
"""Generate JWT token for authentication."""
expires_at = datetime.utcnow() + timedelta(hours=settings.token_expiry_hours)
payload = {
'user': 'master',
'exp': expires_at,
'iat': datetime.utcnow(),
'iss': 'aniworld-fastapi-server'
}
token = jwt.encode(payload, settings.jwt_secret_key, algorithm='HS256')
return {
'token': token,
'expires_at': expires_at
}
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
"""Verify and decode JWT token."""
try:
payload = jwt.decode(token, settings.jwt_secret_key, algorithms=['HS256'])
return payload
except jwt.ExpiredSignatureError:
logger.warning("Token has expired")
return None
except jwt.InvalidTokenError as e:
logger.warning(f"Invalid token: {str(e)}")
return None
async def get_current_user(credentials: HTTPAuthorizationCredentials = Security(security)):
"""Dependency to get current authenticated user."""
token = credentials.credentials
payload = verify_jwt_token(token)
if not payload:
raise HTTPException(
status_code=status.HTTP_401_UNAUTHORIZED,
detail="Invalid or expired token",
headers={"WWW-Authenticate": "Bearer"},
)
return payload
# Global exception handler
async def global_exception_handler(request, exc):
"""Global exception handler for unhandled errors."""
logger.error(f"Unhandled exception: {exc}", exc_info=True)
return JSONResponse(
status_code=500,
content={
"success": False,
"error": "Internal Server Error",
"code": "INTERNAL_ERROR"
}
)
# Application lifespan
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Manage application lifespan events."""
# Startup
logger.info("Starting AniWorld FastAPI server...")
logger.info(f"Anime directory: {settings.anime_directory}")
logger.info(f"Log level: {settings.log_level}")
# Verify configuration
if not settings.master_password_hash and not settings.master_password:
logger.warning("No master password configured! Set MASTER_PASSWORD_HASH or MASTER_PASSWORD environment variable.")
yield
# Shutdown
logger.info("Shutting down AniWorld FastAPI server...")
# Create FastAPI application
app = FastAPI(
title="AniWorld API",
description="""
## AniWorld Management System
A comprehensive FastAPI-based application for managing anime series and episodes.
### Features
* **Series Management**: Search, track, and manage anime series
* **Episode Tracking**: Monitor missing episodes and download progress
* **Authentication**: Secure master password authentication with JWT tokens
* **Real-time Updates**: WebSocket support for live progress tracking
* **File Management**: Automatic file scanning and organization
* **Download Queue**: Queue-based download management system
### Authentication
Most endpoints require authentication using a master password.
Use the `/auth/login` endpoint to obtain a JWT token, then include it
in the `Authorization` header as `Bearer <token>`.
### API Versioning
This API follows semantic versioning. Current version: **1.0.0**
""",
version="1.0.0",
docs_url="/docs",
redoc_url="/redoc",
lifespan=lifespan,
contact={
"name": "AniWorld API Support",
"url": "https://github.com/your-repo/aniworld",
"email": "support@aniworld.com",
},
license_info={
"name": "MIT",
"url": "https://opensource.org/licenses/MIT",
},
tags_metadata=[
{
"name": "Authentication",
"description": "Operations related to user authentication and session management",
},
{
"name": "Anime",
"description": "Operations for searching and managing anime series",
},
{
"name": "Episodes",
"description": "Operations for managing individual episodes",
},
{
"name": "Downloads",
"description": "Operations for managing the download queue and progress",
},
{
"name": "System",
"description": "System health, configuration, and maintenance operations",
},
{
"name": "Files",
"description": "File system operations and scanning functionality",
},
]
)
# Configure templates
templates = Jinja2Templates(directory="src/server/web/templates")
# Mount static files
app.mount("/static", StaticFiles(directory="src/server/web/static"), name="static")
# Add CORS middleware
app.add_middleware(
CORSMiddleware,
allow_origins=["*"], # Configure appropriately for production
allow_credentials=True,
allow_methods=["*"],
allow_headers=["*"],
)
# Add application flow middleware (import lazily to avoid circular imports during tests)
try:
if SetupService is None:
from src.server.middleware.application_flow_middleware import (
ApplicationFlowMiddleware as _AppFlow,
)
from src.server.services.setup_service import SetupService as _SetupService
setup_service = _SetupService()
app.add_middleware(_AppFlow, setup_service=setup_service)
except Exception:
# In test environments or minimal setups, middleware may be skipped
pass
# Add global exception handler
app.add_exception_handler(Exception, global_exception_handler)
from src.server.controllers.v2.anime_controller import router as anime_router
# Include controller routers (use v2 controllers to avoid circular imports)
from src.server.controllers.v2.auth_controller import router as auth_router
from src.server.controllers.v2.episode_controller import router as episode_router
from src.server.controllers.v2.setup_controller import router as setup_router
from src.server.controllers.v2.system_controller import router as system_router
app.include_router(auth_router)
app.include_router(setup_router)
app.include_router(anime_router)
app.include_router(episode_router)
app.include_router(system_router)
# Legacy API compatibility endpoints (TODO: migrate JavaScript to use v1 endpoints)
@app.post("/api/add_series")
async def legacy_add_series(
request_data: Dict[str, Any],
current_user: Dict = Depends(get_current_user)
):
"""Legacy endpoint for adding series - basic implementation."""
try:
link = request_data.get('link', '')
name = request_data.get('name', '')
if not link or not name:
return {"status": "error", "message": "Link and name are required"}
return {"status": "success", "message": f"Series '{name}' added successfully"}
except Exception as e:
return {"status": "error", "message": f"Failed to add series: {str(e)}"}
@app.post("/api/download")
async def legacy_download(
request_data: Dict[str, Any],
current_user: Dict = Depends(get_current_user)
):
"""Legacy endpoint for downloading series - basic implementation."""
try:
folders = request_data.get('folders', [])
if not folders:
return {"status": "error", "message": "No folders specified"}
folder_count = len(folders)
return {"status": "success", "message": f"Download started for {folder_count} series"}
except Exception as e:
return {"status": "error", "message": f"Failed to start download: {str(e)}"}
# Setup endpoints moved to controllers: src/server/controllers/setup_controller.py
# Health check endpoint (kept in main app)
@app.get("/health", response_model=HealthResponse, tags=["System"])
async def health_check() -> HealthResponse:
"""
Application health check endpoint.
"""
return HealthResponse(
status="healthy",
timestamp=datetime.utcnow(),
services={
"authentication": "online",
"anime_service": "online",
"episode_service": "online"
}
)
# Common browser requests that might cause "Invalid HTTP request received" warnings
@app.get("/favicon.ico")
async def favicon():
"""Handle favicon requests from browsers."""
return JSONResponse(status_code=404, content={"detail": "Favicon not found"})
@app.get("/robots.txt")
async def robots():
"""Handle robots.txt requests."""
return JSONResponse(status_code=404, content={"detail": "Robots.txt not found"})
@app.get("/")
async def root():
"""Root endpoint redirect to docs."""
return {"message": "AniWorld API", "documentation": "/docs", "health": "/health"}
# Web interface routes
@app.get("/app", response_class=HTMLResponse)
async def web_app(request: Request):
"""Serve the main web application."""
return templates.TemplateResponse("base/index.html", {"request": request})
@app.get("/login", response_class=HTMLResponse)
async def login_page(request: Request):
"""Serve the login page."""
return templates.TemplateResponse("base/login.html", {"request": request})
@app.get("/setup", response_class=HTMLResponse)
async def setup_page(request: Request):
"""Serve the setup page."""
return templates.TemplateResponse("base/setup.html", {"request": request})
@app.get("/queue", response_class=HTMLResponse)
async def queue_page(request: Request):
"""Serve the queue page."""
return templates.TemplateResponse("base/queue.html", {"request": request})
# Anime endpoints (protected)
@app.get("/api/anime/search", response_model=List[AnimeResponse], tags=["Anime"])
async def search_anime(
query: str,
limit: int = 20,
offset: int = 0,
current_user: Dict = Depends(get_current_user)
) -> List[AnimeResponse]:
"""
Search for anime by title.
Requires: Bearer token in Authorization header
- **query**: Search query string
- **limit**: Maximum number of results (1-100)
- **offset**: Number of results to skip for pagination
"""
# TODO: Implement actual anime search logic
# This is a placeholder implementation
logger.info(f"Searching anime with query: {query}")
# Mock data for now
mock_results = [
AnimeResponse(
id=f"anime_{i}",
title=f"Sample Anime {i}",
description=f"Description for anime {i}",
episodes=24,
status="Completed"
)
for i in range(offset + 1, min(offset + limit + 1, 100))
if query.lower() in f"sample anime {i}".lower()
]
return mock_results
@app.get("/api/anime/{anime_id}", response_model=AnimeResponse, tags=["Anime"])
async def get_anime(
anime_id: str,
current_user: Dict = Depends(get_current_user)
) -> AnimeResponse:
"""
Get detailed information about a specific anime.
Requires: Bearer token in Authorization header
- **anime_id**: Unique identifier for the anime
"""
# TODO: Implement actual anime retrieval logic
logger.info(f"Fetching anime details for ID: {anime_id}")
# Mock data for now
return AnimeResponse(
id=anime_id,
title=f"Anime {anime_id}",
description=f"Detailed description for anime {anime_id}",
episodes=24,
status="Completed"
)
@app.get("/api/anime/{anime_id}/episodes", response_model=List[EpisodeResponse], tags=["Episodes"])
async def get_anime_episodes(
anime_id: str,
current_user: Dict = Depends(get_current_user)
) -> List[EpisodeResponse]:
"""
Get all episodes for a specific anime.
Requires: Bearer token in Authorization header
- **anime_id**: Unique identifier for the anime
"""
# TODO: Implement actual episode retrieval logic
logger.info(f"Fetching episodes for anime ID: {anime_id}")
# Mock data for now
return [
EpisodeResponse(
id=f"{anime_id}_ep_{i}",
anime_id=anime_id,
episode_number=i,
title=f"Episode {i}",
description=f"Description for episode {i}",
duration=1440 # 24 minutes in seconds
)
for i in range(1, 25) # 24 episodes
]
@app.get("/api/episodes/{episode_id}", response_model=EpisodeResponse, tags=["Episodes"])
async def get_episode(
episode_id: str,
current_user: Dict = Depends(get_current_user)
) -> EpisodeResponse:
"""
Get detailed information about a specific episode.
Requires: Bearer token in Authorization header
- **episode_id**: Unique identifier for the episode
"""
# TODO: Implement actual episode retrieval logic
logger.info(f"Fetching episode details for ID: {episode_id}")
# Mock data for now
return EpisodeResponse(
id=episode_id,
anime_id="sample_anime",
episode_number=1,
title=f"Episode {episode_id}",
description=f"Detailed description for episode {episode_id}",
duration=1440
)
# Database health check endpoint
@app.get("/api/system/database/health", response_model=Dict[str, Any], tags=["System"])
async def database_health(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
"""
Check database connectivity and health.
Requires: Bearer token in Authorization header
"""
# TODO: Implement actual database health check
return {
"status": "healthy",
"connection_pool": "active",
"response_time_ms": 15,
"last_check": datetime.utcnow().isoformat()
}
# Configuration endpoint
@app.get("/api/system/config", response_model=Dict[str, Any], tags=["System"])
async def get_system_config(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
"""
Get system configuration information.
Requires: Bearer token in Authorization header
"""
return {
"anime_directory": settings.anime_directory,
"log_level": settings.log_level,
"token_expiry_hours": settings.token_expiry_hours,
"version": "1.0.0"
}
if __name__ == "__main__":
import socket
# Configure enhanced logging
log_level = getattr(logging, settings.log_level.upper(), logging.INFO)
logging.getLogger().setLevel(log_level)
# Check if port is available
def is_port_available(host: str, port: int) -> bool:
"""Check if a port is available on the given host."""
try:
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
sock.bind((host, port))
return True
except OSError:
return False
host = "127.0.0.1"
port = 8000
if not is_port_available(host, port):
logger.error(f"Port {port} is already in use on {host}. Please stop other services or choose a different port.")
logger.info("You can check which process is using the port with: netstat -ano | findstr :8000")
sys.exit(1)
logger.info("Starting AniWorld FastAPI server with uvicorn...")
logger.info(f"Anime directory: {settings.anime_directory}")
logger.info(f"Log level: {settings.log_level}")
logger.info(f"Server will be available at http://{host}:{port}")
logger.info(f"API documentation at http://{host}:{port}/docs")
try:
# Run the application
uvicorn.run(
"fastapi_app:app",
host=host,
port=port,
reload=False, # Disable reload to prevent constant restarting
log_level=settings.log_level.lower()
)
except Exception as e:
logger.error(f"Failed to start server: {e}")
sys.exit(1)

View File

@ -1,248 +0,0 @@
"""
Application Flow Middleware for FastAPI.
This middleware enforces the application flow priorities:
1. Setup page (if setup is not complete)
2. Authentication page (if user is not authenticated)
3. Main application (for authenticated users with completed setup)
The middleware redirects users to the appropriate page based on their current state
and the state of the application setup.
"""
import logging
from typing import Optional
from fastapi import Request
from fastapi.responses import RedirectResponse
from starlette.middleware.base import BaseHTTPMiddleware
# Import the setup service
try:
from ...core.application.services.setup_service import SetupService
except ImportError:
# Handle case where service is not available
class SetupService:
def is_setup_complete(self):
return True
logger = logging.getLogger(__name__)
class ApplicationFlowMiddleware(BaseHTTPMiddleware):
"""
Middleware to enforce application flow: setup auth main application.
This middleware:
1. Checks if setup is complete
2. Validates authentication status
3. Redirects to appropriate page based on state
4. Allows API endpoints and static files to pass through
"""
def __init__(self, app, setup_service: Optional[SetupService] = None):
"""
Initialize the application flow middleware.
Args:
app: FastAPI application instance
setup_service: Setup service instance (optional, will create if not provided)
"""
super().__init__(app)
self.setup_service = setup_service or SetupService()
# Define paths that should bypass flow enforcement
self.bypass_paths = {
"/static", # Static files
"/favicon.ico", # Browser favicon requests
"/robots.txt", # Robots.txt
"/health", # Health check endpoints
"/docs", # OpenAPI documentation
"/redoc", # ReDoc documentation
"/openapi.json" # OpenAPI spec
}
# API paths that should bypass flow but may require auth
self.api_paths = {
"/api",
"/auth"
}
# Pages that are part of the flow and should be accessible
self.flow_pages = {
"/setup",
"/login",
"/app"
}
async def dispatch(self, request: Request, call_next):
"""
Process the request and enforce application flow.
Args:
request: Incoming HTTP request
call_next: Next middleware/handler in chain
Returns:
Response: Either a redirect response or the result of call_next
"""
try:
# Get the request path
path = request.url.path
# Skip flow enforcement for certain paths
if self._should_bypass_flow(path):
return await call_next(request)
# Check application setup status
setup_complete = self.setup_service.is_setup_complete()
# Check authentication status
is_authenticated = await self._is_user_authenticated(request)
# Determine the appropriate action
redirect_response = self._determine_redirect(path, setup_complete, is_authenticated)
if redirect_response:
logger.info(f"Redirecting {path} to {redirect_response.headers.get('location')}")
return redirect_response
# Continue with the request
return await call_next(request)
except Exception as e:
logger.error(f"Error in ApplicationFlowMiddleware: {e}", exc_info=True)
# In case of error, allow the request to continue
return await call_next(request)
def _should_bypass_flow(self, path: str) -> bool:
"""
Check if the given path should bypass flow enforcement.
Args:
path: Request path
Returns:
bool: True if path should bypass flow enforcement
"""
# Check exact bypass paths
for bypass_path in self.bypass_paths:
if path.startswith(bypass_path):
return True
# API paths bypass flow enforcement (but may have their own auth)
for api_path in self.api_paths:
if path.startswith(api_path):
return True
return False
async def _is_user_authenticated(self, request: Request) -> bool:
"""
Check if the user is authenticated by validating JWT token.
Args:
request: HTTP request object
Returns:
bool: True if user is authenticated, False otherwise
"""
try:
# Check for Authorization header
auth_header = request.headers.get("authorization")
if not auth_header or not auth_header.startswith("Bearer "):
return False
# Extract and validate token
token = auth_header.split(" ")[1]
# Import JWT validation function (avoid circular imports)
try:
from ..fastapi_app import verify_jwt_token
payload = verify_jwt_token(token)
return payload is not None
except ImportError:
# Fallback if import fails
logger.warning("Could not import JWT verification function")
return False
except Exception as e:
logger.error(f"Error checking authentication: {e}")
return False
def _determine_redirect(self, path: str, setup_complete: bool, is_authenticated: bool) -> Optional[RedirectResponse]:
"""
Determine if a redirect is needed based on current state.
Args:
path: Current request path
setup_complete: Whether application setup is complete
is_authenticated: Whether user is authenticated
Returns:
Optional[RedirectResponse]: Redirect response if needed, None otherwise
"""
# If setup is not complete
if not setup_complete:
# Allow access to setup page
if path == "/setup":
return None
# Redirect everything else to setup
return RedirectResponse(url="/setup", status_code=302)
# Setup is complete, check authentication
if not is_authenticated:
# Allow access to login page
if path == "/login":
return None
# Redirect unauthenticated users to login (except for specific pages)
if path in self.flow_pages or path == "/":
return RedirectResponse(url="/login", status_code=302)
# User is authenticated and setup is complete
else:
# Redirect from setup/login pages to main app
if path in ["/setup", "/login", "/"]:
return RedirectResponse(url="/app", status_code=302)
# No redirect needed
return None
def get_flow_status(self, request: Request) -> dict:
"""
Get current flow status for debugging/monitoring.
Args:
request: HTTP request object
Returns:
dict: Current flow status information
"""
try:
setup_complete = self.setup_service.is_setup_complete()
is_authenticated = self._is_user_authenticated(request)
return {
"setup_complete": setup_complete,
"authenticated": is_authenticated,
"path": request.url.path,
"should_bypass": self._should_bypass_flow(request.url.path)
}
except Exception as e:
return {
"error": str(e),
"path": request.url.path
}
def create_application_flow_middleware(setup_service: Optional[SetupService] = None) -> ApplicationFlowMiddleware:
"""
Factory function to create application flow middleware.
Args:
setup_service: Setup service instance (optional)
Returns:
ApplicationFlowMiddleware: Configured middleware instance
"""
return ApplicationFlowMiddleware(app=None, setup_service=setup_service)

View File

@ -1,41 +0,0 @@
# FastAPI and ASGI server
fastapi==0.118.0
uvicorn[standard]==0.37.0
python-multipart==0.0.12
# Authentication and security
pyjwt==2.10.1
passlib[bcrypt]==1.7.4
python-jose[cryptography]==3.3.0
# Configuration and environment
pydantic==2.11.10
pydantic-settings==2.11.0
python-dotenv==1.1.1
# Database (if needed)
sqlalchemy==2.0.43
alembic==1.16.5
# HTTP client
httpx==0.28.1
aiofiles==24.1.0
# Utilities
python-dateutil==2.9.0.post0
pytz==2024.2
# Development and testing
pytest==8.4.2
pytest-asyncio==1.2.0
pytest-cov==7.0.0
pytest-mock==3.15.1
# Code quality
black==25.9.0
isort==6.1.0
flake8==7.3.0
mypy==1.18.2
# Logging
structlog==25.1.0

View File

@ -1,34 +0,0 @@
from fastapi import FastAPI
from src.server.controllers import (
anime_controller,
auth_controller,
episode_controller,
setup_controller,
system_controller,
)
# Avoid TestClient/httpx incompatibilities in some envs; we'll check route registration instead
def test_controllers_expose_router_objects():
# Routers should exist
assert hasattr(auth_controller, "router")
assert hasattr(anime_controller, "router")
assert hasattr(episode_controller, "router")
assert hasattr(setup_controller, "router")
assert hasattr(system_controller, "router")
def test_include_routers_in_app():
app = FastAPI()
app.include_router(auth_controller.router)
app.include_router(anime_controller.router)
app.include_router(episode_controller.router)
app.include_router(setup_controller.router)
app.include_router(system_controller.router)
# Basic sanity: the system config route should be registered on the app
paths = [r.path for r in app.routes if hasattr(r, 'path')]
assert "/api/system/config" in paths

View File

@ -1,24 +0,0 @@
import os
from src.config.settings import settings
def test_settings_has_fields(monkeypatch):
# Ensure settings object has expected attributes and env vars affect values
monkeypatch.setenv("JWT_SECRET_KEY", "test-secret")
monkeypatch.setenv("ANIME_DIRECTORY", "/tmp/anime")
# Reload settings by creating a new instance
from src.config.settings import Settings
s = Settings()
assert s.jwt_secret_key == "test-secret"
assert s.anime_directory == "/tmp/anime"
def test_settings_defaults():
# When env not set, defaults are used
s = settings
assert hasattr(s, "jwt_secret_key")
assert hasattr(s, "anime_directory")
assert hasattr(s, "token_expiry_hours")

View File

@ -1,549 +0,0 @@
"""
Performance & Optimization Module for AniWorld App
This module provides download speed limiting, parallel download support,
caching mechanisms, memory usage monitoring, and download resumption.
"""
import os
import threading
import time
import logging
import queue
import hashlib
from datetime import datetime, timedelta
from typing import Dict, List, Optional, Any, Callable
from dataclasses import dataclass, field
from concurrent.futures import ThreadPoolExecutor, as_completed
import json
import sqlite3
from contextlib import contextmanager
import gc
import psutil
import requests
@dataclass
class DownloadTask:
"""Represents a download task with all necessary information."""
task_id: str
serie_name: str
season: int
episode: int
key: str
language: str
output_path: str
temp_path: str
priority: int = 0 # Higher number = higher priority
retry_count: int = 0
max_retries: int = 3
created_at: datetime = field(default_factory=datetime.now)
started_at: Optional[datetime] = None
completed_at: Optional[datetime] = None
status: str = 'pending' # pending, downloading, completed, failed, paused
progress: Dict[str, Any] = field(default_factory=dict)
error_message: Optional[str] = None
class SpeedLimiter:
"""Control download speeds to prevent bandwidth saturation."""
def __init__(self, max_speed_mbps: float = 0): # 0 = unlimited
self.max_speed_mbps = max_speed_mbps
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
self.download_start_time = None
self.bytes_downloaded = 0
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
def set_speed_limit(self, max_speed_mbps: float):
"""Set maximum download speed in MB/s."""
with self.lock:
self.max_speed_mbps = max_speed_mbps
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
self.logger.info(f"Speed limit set to {max_speed_mbps} MB/s")
def start_download(self):
"""Mark the start of a new download session."""
with self.lock:
self.download_start_time = time.time()
self.bytes_downloaded = 0
def update_progress(self, bytes_downloaded: int):
"""Update download progress and apply speed limiting if needed."""
if self.max_bytes_per_second <= 0: # No limit
return
with self.lock:
self.bytes_downloaded += bytes_downloaded
if self.download_start_time:
elapsed_time = time.time() - self.download_start_time
if elapsed_time > 0:
current_speed = self.bytes_downloaded / elapsed_time
if current_speed > self.max_bytes_per_second:
# Calculate required delay
target_time = self.bytes_downloaded / self.max_bytes_per_second
delay = target_time - elapsed_time
if delay > 0:
self.logger.debug(f"Speed limiting: sleeping for {delay:.2f}s")
time.sleep(delay)
def get_current_speed(self) -> float:
"""Get current download speed in MB/s."""
with self.lock:
if self.download_start_time:
elapsed_time = time.time() - self.download_start_time
if elapsed_time > 0:
speed_bps = self.bytes_downloaded / elapsed_time
return speed_bps / (1024 * 1024) # Convert to MB/s
return 0.0
class MemoryMonitor:
"""Monitor and optimize memory usage."""
def __init__(self, warning_threshold_mb: int = 1024, critical_threshold_mb: int = 2048):
self.warning_threshold = warning_threshold_mb * 1024 * 1024
self.critical_threshold = critical_threshold_mb * 1024 * 1024
self.logger = logging.getLogger(__name__)
self.monitoring = False
self.monitor_thread = None
def start_monitoring(self, check_interval: int = 30):
"""Start continuous memory monitoring."""
if self.monitoring:
return
self.monitoring = True
self.monitor_thread = threading.Thread(
target=self._monitoring_loop,
args=(check_interval,),
daemon=True
)
self.monitor_thread.start()
self.logger.info("Memory monitoring started")
def stop_monitoring(self):
"""Stop memory monitoring."""
self.monitoring = False
if self.monitor_thread:
self.monitor_thread.join(timeout=5)
self.logger.info("Memory monitoring stopped")
def _monitoring_loop(self, check_interval: int):
"""Main monitoring loop."""
while self.monitoring:
try:
self.check_memory_usage()
time.sleep(check_interval)
except Exception as e:
self.logger.error(f"Error in memory monitoring: {e}")
time.sleep(check_interval)
def check_memory_usage(self):
"""Check current memory usage and take action if needed."""
try:
process = psutil.Process()
memory_info = process.memory_info()
memory_usage = memory_info.rss
if memory_usage > self.critical_threshold:
self.logger.warning(f"Critical memory usage: {memory_usage / (1024*1024):.1f} MB")
self.force_garbage_collection()
# Check again after GC
memory_info = process.memory_info()
memory_usage = memory_info.rss
if memory_usage > self.critical_threshold:
self.logger.error("Memory usage still critical after garbage collection")
elif memory_usage > self.warning_threshold:
self.logger.info(f"Memory usage warning: {memory_usage / (1024*1024):.1f} MB")
except Exception as e:
self.logger.error(f"Failed to check memory usage: {e}")
def force_garbage_collection(self):
"""Force garbage collection to free memory."""
self.logger.debug("Forcing garbage collection")
collected = gc.collect()
self.logger.debug(f"Garbage collection freed {collected} objects")
def get_memory_stats(self) -> Dict[str, Any]:
"""Get current memory statistics."""
try:
process = psutil.Process()
memory_info = process.memory_info()
return {
'rss_mb': memory_info.rss / (1024 * 1024),
'vms_mb': memory_info.vms / (1024 * 1024),
'percent': process.memory_percent(),
'warning_threshold_mb': self.warning_threshold / (1024 * 1024),
'critical_threshold_mb': self.critical_threshold / (1024 * 1024)
}
except Exception as e:
self.logger.error(f"Failed to get memory stats: {e}")
return {}
class ParallelDownloadManager:
"""Manage parallel downloads with configurable thread count."""
def __init__(self, max_workers: int = 3, speed_limiter: Optional[SpeedLimiter] = None):
self.max_workers = max_workers
self.speed_limiter = speed_limiter or SpeedLimiter()
self.executor = ThreadPoolExecutor(max_workers=max_workers)
self.active_tasks: Dict[str, DownloadTask] = {}
self.pending_queue = queue.PriorityQueue()
self.completed_tasks: List[DownloadTask] = []
self.failed_tasks: List[DownloadTask] = []
self.lock = threading.Lock()
self.logger = logging.getLogger(__name__)
self.running = False
self.worker_thread = None
# Statistics
self.stats = {
'total_tasks': 0,
'completed_tasks': 0,
'failed_tasks': 0,
'active_tasks': 0,
'average_speed_mbps': 0.0
}
def start(self):
"""Start the download manager."""
if self.running:
return
self.running = True
self.worker_thread = threading.Thread(target=self._worker_loop, daemon=True)
self.worker_thread.start()
self.logger.info(f"Download manager started with {self.max_workers} workers")
def stop(self):
"""Stop the download manager."""
self.running = False
# Cancel all pending tasks
with self.lock:
while not self.pending_queue.empty():
try:
_, task = self.pending_queue.get_nowait()
task.status = 'cancelled'
except queue.Empty:
break
# Shutdown executor
self.executor.shutdown(wait=True)
if self.worker_thread:
self.worker_thread.join(timeout=5)
self.logger.info("Download manager stopped")
def add_task(self, task: DownloadTask) -> str:
"""Add a download task to the queue."""
with self.lock:
self.stats['total_tasks'] += 1
# Priority queue uses negative priority for max-heap behavior
self.pending_queue.put((-task.priority, task))
self.logger.info(f"Added download task: {task.task_id}")
return task.task_id
def _worker_loop(self):
"""Main worker loop that processes download tasks."""
while self.running:
try:
# Check for pending tasks
if not self.pending_queue.empty() and len(self.active_tasks) < self.max_workers:
_, task = self.pending_queue.get_nowait()
if task.status == 'pending':
self._start_task(task)
# Check completed tasks
self._check_completed_tasks()
time.sleep(0.1) # Small delay to prevent busy waiting
except queue.Empty:
time.sleep(1)
except Exception as e:
self.logger.error(f"Error in worker loop: {e}")
time.sleep(1)
def _start_task(self, task: DownloadTask):
"""Start a download task."""
with self.lock:
task.status = 'downloading'
task.started_at = datetime.now()
self.active_tasks[task.task_id] = task
self.stats['active_tasks'] = len(self.active_tasks)
# Submit to thread pool
future = self.executor.submit(self._execute_download, task)
task.future = future
self.logger.info(f"Started download task: {task.task_id}")
def _execute_download(self, task: DownloadTask) -> bool:
"""Execute the actual download."""
try:
self.logger.info(f"Executing download: {task.serie_name} S{task.season}E{task.episode}")
# Create progress callback that respects speed limiting
def progress_callback(info):
if 'downloaded_bytes' in info:
self.speed_limiter.update_progress(info.get('downloaded_bytes', 0))
# Update task progress
task.progress.update(info)
self.speed_limiter.start_download()
# Here you would call the actual download function
# For now, simulate download
success = self._simulate_download(task, progress_callback)
return success
except Exception as e:
self.logger.error(f"Download failed for task {task.task_id}: {e}")
task.error_message = str(e)
return False
def _simulate_download(self, task: DownloadTask, progress_callback: Callable) -> bool:
"""Simulate download for testing purposes."""
# This is a placeholder - replace with actual download logic
total_size = 100 * 1024 * 1024 # 100MB simulation
downloaded = 0
chunk_size = 1024 * 1024 # 1MB chunks
while downloaded < total_size and task.status == 'downloading':
# Simulate download chunk
time.sleep(0.1)
downloaded += chunk_size
progress_info = {
'status': 'downloading',
'downloaded_bytes': downloaded,
'total_bytes': total_size,
'percent': (downloaded / total_size) * 100
}
progress_callback(progress_info)
if downloaded >= total_size:
progress_callback({'status': 'finished'})
return True
return False
def _check_completed_tasks(self):
"""Check for completed download tasks."""
completed_task_ids = []
with self.lock:
for task_id, task in self.active_tasks.items():
if hasattr(task, 'future') and task.future.done():
completed_task_ids.append(task_id)
# Process completed tasks
for task_id in completed_task_ids:
self._handle_completed_task(task_id)
def _handle_completed_task(self, task_id: str):
"""Handle a completed download task."""
with self.lock:
task = self.active_tasks.pop(task_id, None)
if not task:
return
task.completed_at = datetime.now()
self.stats['active_tasks'] = len(self.active_tasks)
try:
success = task.future.result()
if success:
task.status = 'completed'
self.completed_tasks.append(task)
self.stats['completed_tasks'] += 1
self.logger.info(f"Task completed successfully: {task_id}")
else:
task.status = 'failed'
self.failed_tasks.append(task)
self.stats['failed_tasks'] += 1
self.logger.warning(f"Task failed: {task_id}")
except Exception as e:
task.status = 'failed'
task.error_message = str(e)
self.failed_tasks.append(task)
self.stats['failed_tasks'] += 1
self.logger.error(f"Task failed with exception: {task_id} - {e}")
def get_task_status(self, task_id: str) -> Optional[Dict[str, Any]]:
"""Get status of a specific task."""
with self.lock:
# Check active tasks
if task_id in self.active_tasks:
task = self.active_tasks[task_id]
return self._task_to_dict(task)
# Check completed tasks
for task in self.completed_tasks:
if task.task_id == task_id:
return self._task_to_dict(task)
# Check failed tasks
for task in self.failed_tasks:
if task.task_id == task_id:
return self._task_to_dict(task)
return None
def _task_to_dict(self, task: DownloadTask) -> Dict[str, Any]:
"""Convert task to dictionary representation."""
return {
'task_id': task.task_id,
'serie_name': task.serie_name,
'season': task.season,
'episode': task.episode,
'status': task.status,
'progress': task.progress,
'created_at': task.created_at.isoformat(),
'started_at': task.started_at.isoformat() if task.started_at else None,
'completed_at': task.completed_at.isoformat() if task.completed_at else None,
'error_message': task.error_message,
'retry_count': task.retry_count
}
def get_all_tasks(self) -> Dict[str, List[Dict[str, Any]]]:
"""Get all tasks grouped by status."""
with self.lock:
return {
'active': [self._task_to_dict(task) for task in self.active_tasks.values()],
'completed': [self._task_to_dict(task) for task in self.completed_tasks[-50:]], # Last 50
'failed': [self._task_to_dict(task) for task in self.failed_tasks[-50:]] # Last 50
}
def get_statistics(self) -> Dict[str, Any]:
"""Get download manager statistics."""
return self.stats.copy()
def set_max_workers(self, max_workers: int):
"""Change the number of worker threads."""
if max_workers <= 0:
raise ValueError("max_workers must be positive")
self.max_workers = max_workers
# Recreate executor with new worker count
old_executor = self.executor
self.executor = ThreadPoolExecutor(max_workers=max_workers)
old_executor.shutdown(wait=False)
self.logger.info(f"Updated worker count to {max_workers}")
class ResumeManager:
"""Manage download resumption for interrupted downloads."""
def __init__(self, resume_dir: str = "./resume"):
self.resume_dir = resume_dir
self.logger = logging.getLogger(__name__)
os.makedirs(resume_dir, exist_ok=True)
def save_resume_info(self, task_id: str, resume_data: Dict[str, Any]):
"""Save resume information for a download."""
try:
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
with open(resume_file, 'w') as f:
json.dump(resume_data, f, indent=2, default=str)
self.logger.debug(f"Saved resume info for task: {task_id}")
except Exception as e:
self.logger.error(f"Failed to save resume info for {task_id}: {e}")
def load_resume_info(self, task_id: str) -> Optional[Dict[str, Any]]:
"""Load resume information for a download."""
try:
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
if os.path.exists(resume_file):
with open(resume_file, 'r') as f:
resume_data = json.load(f)
self.logger.debug(f"Loaded resume info for task: {task_id}")
return resume_data
except Exception as e:
self.logger.error(f"Failed to load resume info for {task_id}: {e}")
return None
def clear_resume_info(self, task_id: str):
"""Clear resume information after successful completion."""
try:
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
if os.path.exists(resume_file):
os.remove(resume_file)
self.logger.debug(f"Cleared resume info for task: {task_id}")
except Exception as e:
self.logger.error(f"Failed to clear resume info for {task_id}: {e}")
def get_resumable_tasks(self) -> List[str]:
"""Get list of tasks that can be resumed."""
try:
resume_files = [f for f in os.listdir(self.resume_dir) if f.endswith('.json')]
task_ids = [os.path.splitext(f)[0] for f in resume_files]
return task_ids
except Exception as e:
self.logger.error(f"Failed to get resumable tasks: {e}")
return []
# Global instances
speed_limiter = SpeedLimiter()
memory_monitor = MemoryMonitor()
download_manager = ParallelDownloadManager(max_workers=3, speed_limiter=speed_limiter)
resume_manager = ResumeManager()
def init_performance_monitoring():
"""Initialize performance monitoring components."""
memory_monitor.start_monitoring()
download_manager.start()
def cleanup_performance_monitoring():
"""Clean up performance monitoring components."""
memory_monitor.stop_monitoring()
download_manager.stop()
# Export main components
__all__ = [
'SpeedLimiter',
'MemoryMonitor',
'ParallelDownloadManager',
'ResumeManager',
'DownloadTask',
'speed_limiter',
'download_cache',
'memory_monitor',
'download_manager',
'resume_manager',
'init_performance_monitoring',
'cleanup_performance_monitoring'
]

View File

@ -1,293 +0,0 @@
import threading
import time
from datetime import datetime, timedelta
from typing import Dict, Optional, Callable
import logging
logger = logging.getLogger(__name__)
class ProcessLock:
"""Thread-safe process lock for preventing duplicate operations."""
def __init__(self, name: str, timeout_minutes: int = 60):
self.name = name
self.timeout_minutes = timeout_minutes
self.lock = threading.RLock()
self.locked_at: Optional[datetime] = None
self.locked_by: Optional[str] = None
self.progress_callback: Optional[Callable] = None
self.is_locked = False
self.progress_data = {}
def acquire(self, locked_by: str = "system", progress_callback: Callable = None) -> bool:
"""
Attempt to acquire the lock.
Returns True if lock was acquired, False if already locked.
"""
with self.lock:
# Check if lock has expired
if self.is_locked and self.locked_at:
if datetime.now() - self.locked_at > timedelta(minutes=self.timeout_minutes):
logger.warning(f"Process lock '{self.name}' expired, releasing...")
self._release_internal()
if self.is_locked:
return False
self.is_locked = True
self.locked_at = datetime.now()
self.locked_by = locked_by
self.progress_callback = progress_callback
self.progress_data = {}
logger.info(f"Process lock '{self.name}' acquired by '{locked_by}'")
return True
def release(self) -> bool:
"""Release the lock."""
with self.lock:
if not self.is_locked:
return False
self._release_internal()
logger.info(f"Process lock '{self.name}' released")
return True
def _release_internal(self):
"""Internal method to release lock without logging."""
self.is_locked = False
self.locked_at = None
self.locked_by = None
self.progress_callback = None
self.progress_data = {}
def is_locked_by_other(self, requester: str) -> bool:
"""Check if lock is held by someone other than requester."""
with self.lock:
return self.is_locked and self.locked_by != requester
def get_status(self) -> Dict:
"""Get current lock status."""
with self.lock:
return {
'is_locked': self.is_locked,
'locked_by': self.locked_by,
'locked_at': self.locked_at.isoformat() if self.locked_at else None,
'progress': self.progress_data.copy(),
'timeout_minutes': self.timeout_minutes
}
def update_progress(self, progress_data: Dict):
"""Update progress data for this lock."""
with self.lock:
if self.is_locked:
self.progress_data.update(progress_data)
if self.progress_callback:
try:
self.progress_callback(progress_data)
except Exception as e:
logger.error(f"Progress callback error: {e}")
def __enter__(self):
"""Context manager entry."""
if not self.acquire():
raise ProcessLockError(f"Could not acquire lock '{self.name}'")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.release()
class ProcessLockError(Exception):
"""Exception raised when process lock operations fail."""
pass
class ProcessLockManager:
"""Global manager for all process locks."""
def __init__(self):
self.locks: Dict[str, ProcessLock] = {}
self.manager_lock = threading.RLock()
def get_lock(self, name: str, timeout_minutes: int = 60) -> ProcessLock:
"""Get or create a process lock."""
with self.manager_lock:
if name not in self.locks:
self.locks[name] = ProcessLock(name, timeout_minutes)
return self.locks[name]
def acquire_lock(self, name: str, locked_by: str = "system",
timeout_minutes: int = 60, progress_callback: Callable = None) -> bool:
"""Acquire a named lock."""
lock = self.get_lock(name, timeout_minutes)
return lock.acquire(locked_by, progress_callback)
def release_lock(self, name: str) -> bool:
"""Release a named lock."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].release()
return False
def is_locked(self, name: str) -> bool:
"""Check if a named lock is currently held."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].is_locked
return False
def get_all_locks_status(self) -> Dict:
"""Get status of all locks."""
with self.manager_lock:
return {
name: lock.get_status()
for name, lock in self.locks.items()
}
def cleanup_expired_locks(self) -> int:
"""Clean up any expired locks. Returns number of locks cleaned up."""
cleaned_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked and lock.locked_at:
if datetime.now() - lock.locked_at > timedelta(minutes=lock.timeout_minutes):
lock._release_internal()
cleaned_count += 1
logger.info(f"Cleaned up expired lock: {lock.name}")
return cleaned_count
def force_release_all(self) -> int:
"""Force release all locks. Returns number of locks released."""
released_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked:
lock._release_internal()
released_count += 1
logger.warning(f"Force released lock: {lock.name}")
return released_count
# Global instance
process_lock_manager = ProcessLockManager()
# Predefined lock names for common operations
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
SEARCH_LOCK = "search"
CONFIG_LOCK = "config"
def with_process_lock(lock_name: str, timeout_minutes: int = 60):
"""Decorator to protect functions with process locks."""
def decorator(func):
def wrapper(*args, **kwargs):
locked_by = kwargs.pop('_locked_by', func.__name__)
progress_callback = kwargs.pop('_progress_callback', None)
if not process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes, progress_callback):
raise ProcessLockError(f"Process '{lock_name}' is already running")
try:
return func(*args, **kwargs)
finally:
process_lock_manager.release_lock(lock_name)
return wrapper
return decorator
def check_process_locks():
"""Check and clean up any expired process locks."""
return process_lock_manager.cleanup_expired_locks()
def get_process_status(lock_name: str) -> Dict:
"""Get status of a specific process lock."""
lock = process_lock_manager.get_lock(lock_name)
return lock.get_status()
def update_process_progress(lock_name: str, progress_data: Dict):
"""Update progress for a specific process."""
if process_lock_manager.is_locked(lock_name):
lock = process_lock_manager.get_lock(lock_name)
lock.update_progress(progress_data)
def is_process_running(lock_name: str) -> bool:
"""Check if a specific process is currently running."""
return process_lock_manager.is_locked(lock_name)
class QueueDeduplicator:
"""Prevent duplicate episodes in download queue."""
def __init__(self):
self.active_items = set() # Set of (serie_name, season, episode) tuples
self.lock = threading.RLock()
def add_episode(self, serie_name: str, season: int, episode: int) -> bool:
"""
Add episode to active set if not already present.
Returns True if added, False if duplicate.
"""
with self.lock:
episode_key = (serie_name, season, episode)
if episode_key in self.active_items:
return False
self.active_items.add(episode_key)
return True
def remove_episode(self, serie_name: str, season: int, episode: int):
"""Remove episode from active set."""
with self.lock:
episode_key = (serie_name, season, episode)
self.active_items.discard(episode_key)
def is_episode_active(self, serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is currently being processed."""
with self.lock:
episode_key = (serie_name, season, episode)
return episode_key in self.active_items
def get_active_episodes(self) -> list:
"""Get list of all active episodes."""
with self.lock:
return list(self.active_items)
def clear_all(self):
"""Clear all active episodes."""
with self.lock:
self.active_items.clear()
def get_count(self) -> int:
"""Get number of active episodes."""
with self.lock:
return len(self.active_items)
# Global deduplicator instance
episode_deduplicator = QueueDeduplicator()
def add_episode_to_queue_safe(serie_name: str, season: int, episode: int) -> bool:
"""
Safely add episode to queue with deduplication.
Returns True if added, False if duplicate.
"""
return episode_deduplicator.add_episode(serie_name, season, episode)
def remove_episode_from_queue(serie_name: str, season: int, episode: int):
"""Remove episode from deduplication tracking."""
episode_deduplicator.remove_episode(serie_name, season, episode)
def is_episode_in_queue(serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is already in queue/being processed."""
return episode_deduplicator.is_episode_active(serie_name, season, episode)

View File

@ -1,63 +0,0 @@
@echo off
:: AniWorld FastAPI Server Startup Script for Windows
:: This script activates the conda environment and starts the FastAPI server
echo Starting AniWorld FastAPI Server...
echo.
:: Check if conda is available
where conda >nul 2>nul
if %errorlevel% neq 0 (
echo Error: Conda is not available in PATH
echo Please install Anaconda/Miniconda or add conda to your PATH
pause
exit /b 1
)
:: Activate the AniWorld conda environment
echo Activating AniWorld conda environment...
call conda activate AniWorld
if %errorlevel% neq 0 (
echo Error: Failed to activate AniWorld environment
echo Please create the environment first: conda create -n AniWorld python=3.11
pause
exit /b 1
)
:: Check if uvicorn is installed
python -c "import uvicorn" 2>nul
if %errorlevel% neq 0 (
echo Error: uvicorn is not installed
echo Installing FastAPI dependencies...
pip install -r requirements.txt
if %errorlevel% neq 0 (
echo Error: Failed to install dependencies
pause
exit /b 1
)
)
:: Set default environment variables if not set
if not defined MASTER_PASSWORD (
echo Warning: MASTER_PASSWORD environment variable is not set
echo You can set it by running: set MASTER_PASSWORD=your_password
echo.
)
:: Start the FastAPI server
echo Starting FastAPI server on http://127.0.0.1:8000
echo Press Ctrl+C to stop the server
echo.
echo Available endpoints:
echo - Web Interface: http://127.0.0.1:8000
echo - API Documentation: http://127.0.0.1:8000/docs
echo - Alternative API Docs: http://127.0.0.1:8000/redoc
echo.
:: Start the server with development settings
python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload --log-level info
:: If we get here, the server has stopped
echo.
echo Server stopped.
pause

View File

@ -1,90 +0,0 @@
#!/usr/bin/env python3
"""
AniWorld FastAPI Server Startup Script
This script provides a convenient way to start the AniWorld FastAPI server
with proper configuration for both development and production environments.
"""
import argparse
import os
import sys
from pathlib import Path
import uvicorn
# Add the project root to Python path
project_root = Path(__file__).parent
sys.path.insert(0, str(project_root))
def main():
"""Main entry point for the server startup script."""
parser = argparse.ArgumentParser(description="Start AniWorld FastAPI Server")
# Server configuration arguments
parser.add_argument(
"--host",
default="127.0.0.1",
help="Host to bind the server (default: 127.0.0.1)"
)
parser.add_argument(
"--port",
type=int,
default=8000,
help="Port to bind the server (default: 8000)"
)
parser.add_argument(
"--reload",
action="store_true",
help="Enable auto-reload for development (default: False)"
)
parser.add_argument(
"--workers",
type=int,
default=1,
help="Number of worker processes (default: 1)"
)
parser.add_argument(
"--log-level",
default="info",
choices=["debug", "info", "warning", "error", "critical"],
help="Log level (default: info)"
)
parser.add_argument(
"--env",
default="development",
choices=["development", "production"],
help="Environment mode (default: development)"
)
args = parser.parse_args()
# Configure uvicorn based on environment
if args.env == "development":
# Development configuration
uvicorn.run(
"src.server.fastapi_app:app",
host=args.host,
port=args.port,
reload=args.reload,
log_level=args.log_level,
access_log=True,
use_colors=True,
)
else:
# Production configuration
uvicorn.run(
"src.server.fastapi_app:app",
host=args.host,
port=args.port,
workers=args.workers,
log_level=args.log_level,
access_log=True,
server_header=False,
date_header=False,
)
if __name__ == "__main__":
main()