Compare commits
6 Commits
6d0c3fdf26
...
6a695966bf
| Author | SHA1 | Date | |
|---|---|---|---|
| 6a695966bf | |||
| 7481a33c15 | |||
| e48cb29131 | |||
| 7b933b6cdb | |||
| 7a71715183 | |||
| 57d49bcf78 |
44
.env
44
.env
@ -1,44 +0,0 @@
|
|||||||
# Aniworld Server Environment Configuration
|
|
||||||
|
|
||||||
# Security (REQUIRED - Generate secure random values)
|
|
||||||
SECRET_KEY=dev_secret_key_change_in_production_12345
|
|
||||||
JWT_SECRET_KEY=jwt_secret_key_change_in_production_67890
|
|
||||||
PASSWORD_SALT=salt_change_in_production_abcdef
|
|
||||||
|
|
||||||
# Master Password Authentication (Simple system)
|
|
||||||
MASTER_PASSWORD_HASH=8cf532e926e9493630820ce80005f6e2239305ac64c34069e869be5106e2af10
|
|
||||||
# MASTER_PASSWORD=admin123 # Used for development only, remove in production
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
DATABASE_URL=sqlite:///data/aniworld.db
|
|
||||||
DATABASE_POOL_SIZE=10
|
|
||||||
DATABASE_MAX_OVERFLOW=20
|
|
||||||
DATABASE_POOL_TIMEOUT=30
|
|
||||||
DATABASE_POOL_RECYCLE=3600
|
|
||||||
|
|
||||||
# Redis Configuration (for caching and sessions)
|
|
||||||
REDIS_URL=redis://localhost:6379/0
|
|
||||||
REDIS_MAX_CONNECTIONS=10
|
|
||||||
REDIS_SOCKET_TIMEOUT=5
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
SESSION_TIMEOUT_HOURS=24
|
|
||||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
|
||||||
LOCKOUT_DURATION_MINUTES=30
|
|
||||||
|
|
||||||
# Rate Limiting
|
|
||||||
RATE_LIMIT_PER_MINUTE=60
|
|
||||||
API_RATE_LIMIT_PER_MINUTE=100
|
|
||||||
|
|
||||||
# Application Settings
|
|
||||||
DEBUG=true
|
|
||||||
HOST=127.0.0.1
|
|
||||||
PORT=5000
|
|
||||||
|
|
||||||
# Anime and Download Settings
|
|
||||||
ANIME_DIRECTORY=./downloads
|
|
||||||
MAX_CONCURRENT_DOWNLOADS=3
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
LOG_LEVEL=INFO
|
|
||||||
LOG_FILE=logs/aniworld.log
|
|
||||||
@ -1,56 +0,0 @@
|
|||||||
# Aniworld Server Environment Configuration
|
|
||||||
# Copy this file to .env and fill in your values
|
|
||||||
|
|
||||||
# Security (REQUIRED - Generate secure random values)
|
|
||||||
SECRET_KEY=your_secret_key_here
|
|
||||||
JWT_SECRET_KEY=your_jwt_secret_here
|
|
||||||
PASSWORD_SALT=your_password_salt_here
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
DATABASE_URL=sqlite:///data/aniworld.db
|
|
||||||
# DATABASE_PASSWORD=your_db_password_here
|
|
||||||
DATABASE_POOL_SIZE=10
|
|
||||||
DATABASE_MAX_OVERFLOW=20
|
|
||||||
DATABASE_POOL_TIMEOUT=30
|
|
||||||
DATABASE_POOL_RECYCLE=3600
|
|
||||||
|
|
||||||
# Redis Configuration (for caching and sessions)
|
|
||||||
REDIS_URL=redis://localhost:6379/0
|
|
||||||
# REDIS_PASSWORD=your_redis_password_here
|
|
||||||
REDIS_MAX_CONNECTIONS=10
|
|
||||||
REDIS_SOCKET_TIMEOUT=5
|
|
||||||
|
|
||||||
# Email Configuration (for password reset emails)
|
|
||||||
SMTP_SERVER=localhost
|
|
||||||
SMTP_PORT=587
|
|
||||||
# SMTP_USERNAME=your_smtp_username
|
|
||||||
# SMTP_PASSWORD=your_smtp_password
|
|
||||||
SMTP_USE_TLS=true
|
|
||||||
FROM_EMAIL=noreply@aniworld.local
|
|
||||||
|
|
||||||
# External API Keys
|
|
||||||
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
|
|
||||||
# TMDB_API_KEY=your_tmdb_api_key
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
SESSION_TIMEOUT_HOURS=24
|
|
||||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
|
||||||
LOCKOUT_DURATION_MINUTES=30
|
|
||||||
|
|
||||||
# Rate Limiting
|
|
||||||
RATE_LIMIT_PER_MINUTE=60
|
|
||||||
API_RATE_LIMIT_PER_MINUTE=100
|
|
||||||
|
|
||||||
# Application Settings
|
|
||||||
DEBUG=false
|
|
||||||
HOST=127.0.0.1
|
|
||||||
PORT=5000
|
|
||||||
|
|
||||||
# Anime and Download Settings
|
|
||||||
ANIME_DIRECTORY=./downloads
|
|
||||||
MAX_CONCURRENT_DOWNLOADS=3
|
|
||||||
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
LOG_LEVEL=INFO
|
|
||||||
LOG_FILE=logs/aniworld.log
|
|
||||||
28
.flake8
28
.flake8
@ -1,28 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
max-line-length = 88
|
|
||||||
exclude =
|
|
||||||
.git,
|
|
||||||
__pycache__,
|
|
||||||
build,
|
|
||||||
dist,
|
|
||||||
.venv,
|
|
||||||
venv,
|
|
||||||
aniworld,
|
|
||||||
migrations,
|
|
||||||
.pytest_cache,
|
|
||||||
.mypy_cache,
|
|
||||||
.coverage,
|
|
||||||
htmlcov
|
|
||||||
extend-ignore =
|
|
||||||
# E203: whitespace before ':' (conflicts with black)
|
|
||||||
E203,
|
|
||||||
# W503: line break before binary operator (conflicts with black)
|
|
||||||
W503,
|
|
||||||
# E501: line too long (handled by black)
|
|
||||||
E501
|
|
||||||
per-file-ignores =
|
|
||||||
__init__.py:F401
|
|
||||||
tests/*:F401,F811
|
|
||||||
max-complexity = 10
|
|
||||||
docstring-convention = google
|
|
||||||
import-order-style = google
|
|
||||||
166
.github/copilot-instructions.md
vendored
166
.github/copilot-instructions.md
vendored
@ -4,136 +4,118 @@ These instructions define how GitHub Copilot should assist with this project. Th
|
|||||||
|
|
||||||
## 🧠 Context
|
## 🧠 Context
|
||||||
|
|
||||||
- **Project Type**: Web API / Data Pipeline / CLI Tool / ML App
|
- **Project Type**: Web API / Data Pipeline / CLI Tool / ML App
|
||||||
- **Language**: Python
|
- **Language**: Python
|
||||||
- **Framework / Libraries**: FastAPI / Flask / Django / Pandas / Pydantic / Poetry
|
- **Framework / Libraries**: FastAPI / Flask / Django / Pandas / Pydantic / Poetry
|
||||||
- **Architecture**: MVC / Clean Architecture / Event-Driven / Microservices
|
- **Architecture**: MVC / Clean Architecture / Event-Driven / Microservices
|
||||||
|
|
||||||
## 🔧 General Guidelines
|
## 🔧 General Guidelines
|
||||||
|
|
||||||
- Use Pythonic patterns (PEP8, PEP257).
|
- Use Pythonic patterns (PEP8, PEP257).
|
||||||
- Prefer named functions and class-based structures over inline lambdas.
|
- Prefer named functions and class-based structures over inline lambdas.
|
||||||
- Use type hints where applicable (`typing` module).
|
- Use type hints where applicable (`typing` module).
|
||||||
- Follow black or isort for formatting and import order.
|
- Follow black or isort for formatting and import order.
|
||||||
- Use meaningful naming; avoid cryptic variables.
|
- Use meaningful naming; avoid cryptic variables.
|
||||||
- Emphasize simplicity, readability, and DRY principles.
|
- Emphasize simplicity, readability, and DRY principles.
|
||||||
|
|
||||||
## 📁 File Structure
|
|
||||||
|
|
||||||
Use this structure as a guide when creating or updating files:
|
|
||||||
|
|
||||||
```text
|
|
||||||
src/
|
|
||||||
controllers/
|
|
||||||
services/
|
|
||||||
repositories/
|
|
||||||
schemas/
|
|
||||||
utils/
|
|
||||||
config/
|
|
||||||
tests/
|
|
||||||
unit/
|
|
||||||
integration/
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🧶 Patterns
|
## 🧶 Patterns
|
||||||
|
|
||||||
### ✅ Patterns to Follow
|
### ✅ Patterns to Follow
|
||||||
|
|
||||||
- Use the Repository Pattern and Dependency Injection (e.g., via `Depends` in FastAPI).
|
- Use the Repository Pattern and Dependency Injection (e.g., via `Depends` in FastAPI).
|
||||||
- Validate data using Pydantic models.
|
- Validate data using Pydantic models.
|
||||||
- Use custom exceptions and centralized error handling.
|
- Use custom exceptions and centralized error handling.
|
||||||
- Use environment variables via `dotenv` or `os.environ`.
|
- Use environment variables via `dotenv` or `os.environ`.
|
||||||
- Use logging via the `logging` module or structlog.
|
- Use logging via the `logging` module or structlog.
|
||||||
- Write modular, reusable code organized by concerns (e.g., controller, service, data layer).
|
- Write modular, reusable code organized by concerns (e.g., controller, service, data layer).
|
||||||
- Favor async endpoints for I/O-bound services (FastAPI, aiohttp).
|
- Favor async endpoints for I/O-bound services (FastAPI, aiohttp).
|
||||||
- Document functions and classes with docstrings.
|
- Document functions and classes with docstrings.
|
||||||
|
|
||||||
### 🚫 Patterns to Avoid
|
### 🚫 Patterns to Avoid
|
||||||
|
|
||||||
- Don’t use wildcard imports (`from module import *`).
|
- Don’t use wildcard imports (`from module import *`).
|
||||||
- Avoid global state unless encapsulated in a singleton or config manager.
|
- Avoid global state unless encapsulated in a singleton or config manager.
|
||||||
- Don’t hardcode secrets or config values—use `.env`.
|
- Don’t hardcode secrets or config values—use `.env`.
|
||||||
- Don’t expose internal stack traces in production environments.
|
- Don’t expose internal stack traces in production environments.
|
||||||
- Avoid business logic inside views/routes.
|
- Avoid business logic inside views/routes.
|
||||||
|
|
||||||
## 🧪 Testing Guidelines
|
## 🧪 Testing Guidelines
|
||||||
|
|
||||||
- Use `pytest` or `unittest` for unit and integration tests.
|
- Use `pytest` or `unittest` for unit and integration tests.
|
||||||
- Mock external services with `unittest.mock` or `pytest-mock`.
|
- Mock external services with `unittest.mock` or `pytest-mock`.
|
||||||
- Use fixtures to set up and tear down test data.
|
- Use fixtures to set up and tear down test data.
|
||||||
- Aim for high coverage on core logic and low-level utilities.
|
- Aim for high coverage on core logic and low-level utilities.
|
||||||
- Test both happy paths and edge cases.
|
- Test both happy paths and edge cases.
|
||||||
|
|
||||||
## 🧩 Example Prompts
|
## 🧩 Example Prompts
|
||||||
|
|
||||||
- `Copilot, create a FastAPI endpoint that returns all users from the database.`
|
- `Copilot, create a FastAPI endpoint that returns all users from the database.`
|
||||||
- `Copilot, write a Pydantic model for a product with id, name, and optional price.`
|
- `Copilot, write a Pydantic model for a product with id, name, and optional price.`
|
||||||
- `Copilot, implement a CLI command that uploads a CSV file and logs a summary.`
|
- `Copilot, implement a CLI command that uploads a CSV file and logs a summary.`
|
||||||
- `Copilot, write a pytest test for the transform_data function using a mock input.`
|
- `Copilot, write a pytest test for the transform_data function using a mock input.`
|
||||||
|
|
||||||
## 🔁 Iteration & Review
|
## 🔁 Iteration & Review
|
||||||
|
|
||||||
- Review Copilot output before committing.
|
- Review Copilot output before committing.
|
||||||
- Add comments to clarify intent if Copilot generates incorrect or unclear suggestions.
|
- Add comments to clarify intent if Copilot generates incorrect or unclear suggestions.
|
||||||
- Use linters (flake8, pylint) and formatters (black, isort) as part of the review pipeline.
|
- Use linters (flake8, pylint) and formatters (black, isort) as part of the review pipeline.
|
||||||
- Refactor output to follow project conventions.
|
- Refactor output to follow project conventions.
|
||||||
|
|
||||||
## 📚 References
|
## 📚 References
|
||||||
|
|
||||||
- [PEP 8 – Style Guide for Python Code](https://peps.python.org/pep-0008/)
|
- [PEP 8 – Style Guide for Python Code](https://peps.python.org/pep-0008/)
|
||||||
- [PEP 484 – Type Hints](https://peps.python.org/pep-0484/)
|
- [PEP 484 – Type Hints](https://peps.python.org/pep-0484/)
|
||||||
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
||||||
- [Django Documentation](https://docs.djangoproject.com/en/stable/)
|
- [Django Documentation](https://docs.djangoproject.com/en/stable/)
|
||||||
- [Flask Documentation](https://flask.palletsprojects.com/)
|
- [Flask Documentation](https://flask.palletsprojects.com/)
|
||||||
- [Pytest Documentation](https://docs.pytest.org/en/stable/)
|
- [Pytest Documentation](https://docs.pytest.org/en/stable/)
|
||||||
- [Pydantic Documentation](https://docs.pydantic.dev/)
|
- [Pydantic Documentation](https://docs.pydantic.dev/)
|
||||||
- [Python Logging Best Practices](https://docs.python.org/3/howto/logging.html)
|
- [Python Logging Best Practices](https://docs.python.org/3/howto/logging.html)
|
||||||
- [Black Code Formatter](https://black.readthedocs.io/)
|
- [Black Code Formatter](https://black.readthedocs.io/)
|
||||||
- [Poetry](https://python-poetry.org/docs/)
|
- [Poetry](https://python-poetry.org/docs/)
|
||||||
|
|
||||||
## 1. General Philosophy
|
## 1. General Philosophy
|
||||||
|
|
||||||
* **Clarity is King:** Code should be easy to understand at a glance.
|
- **Clarity is King:** Code should be easy to understand at a glance.
|
||||||
* **Consistency Matters:** Adhere to these standards across all projects.
|
- **Consistency Matters:** Adhere to these standards across all projects.
|
||||||
* **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
|
- **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
|
||||||
* **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
|
- **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
|
||||||
* **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
|
- **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
|
||||||
|
|
||||||
* CleanCode, Keep it simple, MVVM
|
- CleanCode, Keep it simple, MVVM
|
||||||
|
|
||||||
## 2. Security Considerations
|
## 2. Security Considerations
|
||||||
|
|
||||||
* **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
|
- **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
|
||||||
* **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
|
- **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
|
||||||
* **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
|
- **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
|
||||||
* **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
|
- **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
|
||||||
* **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
|
- **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
|
||||||
* **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
|
- **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
|
||||||
|
|
||||||
## 3. Performance Optimization
|
## 3. Performance Optimization
|
||||||
|
|
||||||
* **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
|
- **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
|
||||||
* **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
|
- **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
|
||||||
* **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
|
- **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
|
||||||
* **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
|
- **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
|
||||||
* **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
|
- **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
|
||||||
* **Profiling:** Use profiling tools to identify performance bottlenecks.
|
- **Profiling:** Use profiling tools to identify performance bottlenecks.
|
||||||
|
|
||||||
## 4. GUI
|
## 4. GUI
|
||||||
|
|
||||||
* **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
|
- **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
|
||||||
* **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
|
- **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
|
||||||
* **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
|
- **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
|
||||||
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Fluent UI design:** Use Fluent UI design
|
- **Fluent UI design:** Use Fluent UI design
|
||||||
* **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
|
- **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
|
||||||
* **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
|
- **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
|
||||||
|
|
||||||
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
|
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
|
||||||
|
|
||||||
|
|
||||||
Run till you are realy finished.
|
Run till you are realy finished.
|
||||||
Do not gues, open and read files if you dont know something.
|
Do not gues, open and read files if you dont know something.
|
||||||
@ -1,191 +0,0 @@
|
|||||||
# AniWorld FastAPI Documentation
|
|
||||||
|
|
||||||
## Overview
|
|
||||||
|
|
||||||
AniWorld has been successfully migrated from Flask to FastAPI, providing improved performance, automatic API documentation, and modern async support.
|
|
||||||
|
|
||||||
## Accessing API Documentation
|
|
||||||
|
|
||||||
### Interactive API Documentation
|
|
||||||
|
|
||||||
FastAPI automatically generates interactive API documentation that you can access at:
|
|
||||||
|
|
||||||
- **Swagger UI**: `http://localhost:8000/docs`
|
|
||||||
- **ReDoc**: `http://localhost:8000/redoc`
|
|
||||||
|
|
||||||
These interfaces allow you to:
|
|
||||||
|
|
||||||
- Browse all available endpoints
|
|
||||||
- View request/response schemas
|
|
||||||
- Test API endpoints directly from the browser
|
|
||||||
- Download OpenAPI schema
|
|
||||||
|
|
||||||
### OpenAPI Schema
|
|
||||||
|
|
||||||
The complete OpenAPI 3.0 schema is available at:
|
|
||||||
|
|
||||||
- **JSON Format**: `http://localhost:8000/openapi.json`
|
|
||||||
|
|
||||||
## Authentication
|
|
||||||
|
|
||||||
### Master Password Authentication
|
|
||||||
|
|
||||||
AniWorld uses a simple master password authentication system with JWT tokens.
|
|
||||||
|
|
||||||
#### Login Process
|
|
||||||
|
|
||||||
1. **POST** `/auth/login`
|
|
||||||
- Send master password in request body
|
|
||||||
- Receive JWT token in response
|
|
||||||
- Token expires in 24 hours
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"password": "your_master_password"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
Response:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...",
|
|
||||||
"message": "Login successful"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Using Authentication Token
|
|
||||||
|
|
||||||
Include the token in the `Authorization` header for authenticated requests:
|
|
||||||
|
|
||||||
```
|
|
||||||
Authorization: Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...
|
|
||||||
```
|
|
||||||
|
|
||||||
## API Endpoints
|
|
||||||
|
|
||||||
### System Health
|
|
||||||
|
|
||||||
- **GET** `/health` - Check system health and status
|
|
||||||
- **GET** `/api/system/database/health` - Check database connectivity
|
|
||||||
- **GET** `/api/system/config` - Get system configuration
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
|
|
||||||
- **POST** `/auth/login` - Authenticate and get JWT token
|
|
||||||
- **GET** `/auth/verify` - Verify current token validity
|
|
||||||
- **POST** `/auth/logout` - Logout and invalidate token
|
|
||||||
- **GET** `/api/auth/status` - Get current authentication status
|
|
||||||
|
|
||||||
### Anime Management
|
|
||||||
|
|
||||||
- **GET** `/api/anime/search` - Search for anime series
|
|
||||||
- **GET** `/api/anime/{anime_id}` - Get specific anime details
|
|
||||||
- **GET** `/api/anime/{anime_id}/episodes` - Get episodes for an anime
|
|
||||||
|
|
||||||
### Episode Management
|
|
||||||
|
|
||||||
- **GET** `/api/episodes/{episode_id}` - Get specific episode details
|
|
||||||
|
|
||||||
### Series Management
|
|
||||||
|
|
||||||
- **POST** `/api/add_series` - Add a new series to tracking
|
|
||||||
- **POST** `/api/download` - Start episode download
|
|
||||||
|
|
||||||
### Web Interface
|
|
||||||
|
|
||||||
- **GET** `/` - Main application interface
|
|
||||||
- **GET** `/app` - Application dashboard
|
|
||||||
- **GET** `/login` - Login page
|
|
||||||
- **GET** `/setup` - Setup page
|
|
||||||
- **GET** `/queue` - Download queue interface
|
|
||||||
|
|
||||||
## Response Formats
|
|
||||||
|
|
||||||
### Success Responses
|
|
||||||
|
|
||||||
All successful API responses follow this structure:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"data": {...},
|
|
||||||
"message": "Operation completed successfully"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
### Error Responses
|
|
||||||
|
|
||||||
Error responses include detailed error information:
|
|
||||||
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"success": false,
|
|
||||||
"error": "Error description",
|
|
||||||
"code": "ERROR_CODE",
|
|
||||||
"details": {...}
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
## Status Codes
|
|
||||||
|
|
||||||
- **200 OK** - Successful operation
|
|
||||||
- **201 Created** - Resource created successfully
|
|
||||||
- **400 Bad Request** - Invalid request data
|
|
||||||
- **401 Unauthorized** - Authentication required
|
|
||||||
- **403 Forbidden** - Insufficient permissions
|
|
||||||
- **404 Not Found** - Resource not found
|
|
||||||
- **422 Unprocessable Entity** - Validation error
|
|
||||||
- **500 Internal Server Error** - Server error
|
|
||||||
|
|
||||||
## Rate Limiting
|
|
||||||
|
|
||||||
Currently, no rate limiting is implemented, but it may be added in future versions.
|
|
||||||
|
|
||||||
## WebSocket Support
|
|
||||||
|
|
||||||
Real-time updates are available through WebSocket connections for:
|
|
||||||
|
|
||||||
- Download progress updates
|
|
||||||
- Scan progress updates
|
|
||||||
- System status changes
|
|
||||||
|
|
||||||
## Migration Notes
|
|
||||||
|
|
||||||
### Changes from Flask
|
|
||||||
|
|
||||||
1. **Automatic Documentation**: FastAPI provides built-in OpenAPI documentation
|
|
||||||
2. **Type Safety**: Full request/response validation with Pydantic
|
|
||||||
3. **Async Support**: Native async/await support for better performance
|
|
||||||
4. **Modern Standards**: OpenAPI 3.0, JSON Schema validation
|
|
||||||
5. **Better Error Handling**: Structured error responses with detailed information
|
|
||||||
|
|
||||||
### Breaking Changes
|
|
||||||
|
|
||||||
- Authentication tokens are now JWT-based instead of session-based
|
|
||||||
- Request/response formats may have slight differences
|
|
||||||
- Some endpoint URLs may have changed
|
|
||||||
- WebSocket endpoints use FastAPI WebSocket pattern
|
|
||||||
|
|
||||||
## Development
|
|
||||||
|
|
||||||
### Running the Server
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Development mode with auto-reload
|
|
||||||
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
|
||||||
|
|
||||||
# Production mode
|
|
||||||
uvicorn src.server.fastapi_app:app --host 0.0.0.0 --port 8000
|
|
||||||
```
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
- `MASTER_PASSWORD_HASH` - Hashed master password
|
|
||||||
- `JWT_SECRET_KEY` - Secret key for JWT token signing
|
|
||||||
- `LOG_LEVEL` - Logging level (DEBUG, INFO, WARNING, ERROR)
|
|
||||||
|
|
||||||
## Support
|
|
||||||
|
|
||||||
For issues, questions, or contributions, please visit the project repository or contact the development team.
|
|
||||||
74
Overview.md
74
Overview.md
@ -1,74 +0,0 @@
|
|||||||
# AniWorld Project Overview
|
|
||||||
|
|
||||||
## 📁 Folder Structure
|
|
||||||
|
|
||||||
The project follows a modular, layered architecture inspired by MVC and Clean Architecture principles. The main directories are:
|
|
||||||
|
|
||||||
```
|
|
||||||
src/
|
|
||||||
controllers/ # API endpoints and route handlers
|
|
||||||
services/ # Business logic and orchestration
|
|
||||||
repositories/ # Data access layer (DB, external APIs)
|
|
||||||
schemas/ # Pydantic models for validation/serialization
|
|
||||||
utils/ # Utility functions and helpers
|
|
||||||
config/ # Configuration management (env, settings)
|
|
||||||
tests/
|
|
||||||
unit/ # Unit tests for core logic
|
|
||||||
integration/ # Integration tests for end-to-end scenarios
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🏗️ Architecture
|
|
||||||
|
|
||||||
- **MVC & Clean Architecture:** Separation of concerns between controllers (views), services (business logic), and repositories (data access).
|
|
||||||
- **Dependency Injection:** Used for service/repository wiring, especially with FastAPI's `Depends`.
|
|
||||||
- **Event-Driven & Microservices Ready:** Modular design allows for future scaling into microservices or event-driven workflows.
|
|
||||||
- **Centralized Error Handling:** Custom exceptions and error middleware for consistent API responses.
|
|
||||||
|
|
||||||
## 🧰 Used Libraries & Frameworks
|
|
||||||
|
|
||||||
- **Python** (PEP8, PEP257, type hints)
|
|
||||||
- **FastAPI**: High-performance async web API framework
|
|
||||||
- **Pydantic**: Data validation and serialization
|
|
||||||
- **Poetry**: Dependency management and packaging
|
|
||||||
- **dotenv / os.environ**: Environment variable management
|
|
||||||
- **logging / structlog**: Structured logging
|
|
||||||
- **pytest / unittest**: Testing frameworks
|
|
||||||
- **aiohttp**: Async HTTP client (where needed)
|
|
||||||
- **SQLAlchemy / asyncpg / databases**: Database ORM and async drivers (if present)
|
|
||||||
- **Prometheus**: Metrics endpoint integration
|
|
||||||
- **Other**: As required for integrations (webhooks, third-party APIs)
|
|
||||||
|
|
||||||
## 🧩 Patterns & Conventions
|
|
||||||
|
|
||||||
- **Repository Pattern:** All data access is abstracted via repositories.
|
|
||||||
- **Service Layer:** Business logic is encapsulated in services, not controllers.
|
|
||||||
- **Pydantic Models:** Used for all input/output validation.
|
|
||||||
- **Async Endpoints:** All I/O-bound endpoints are async for scalability.
|
|
||||||
- **Environment Configuration:** All secrets/configs are loaded from `.env` or environment variables.
|
|
||||||
- **Logging:** All logs are structured and configurable.
|
|
||||||
- **Testing:** High coverage with fixtures and mocks for external dependencies.
|
|
||||||
|
|
||||||
## 🛡️ Security & Performance
|
|
||||||
|
|
||||||
- **JWT Authentication:** Secure endpoints with token-based auth.
|
|
||||||
- **Input Validation:** All user input is validated via Pydantic.
|
|
||||||
- **No Hardcoded Secrets:** All sensitive data is externalized.
|
|
||||||
- **Performance Optimization:** Async I/O, caching, and profiling tools.
|
|
||||||
|
|
||||||
## 🎨 UI & CLI
|
|
||||||
|
|
||||||
- **Theme Support:** Light/dark/auto modes.
|
|
||||||
- **Accessibility:** Screen reader, color contrast, keyboard shortcuts.
|
|
||||||
- **CLI Tool:** For bulk operations, scanning, and management.
|
|
||||||
|
|
||||||
## 📚 References
|
|
||||||
|
|
||||||
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
|
||||||
- [Pydantic Documentation](https://docs.pydantic.dev/)
|
|
||||||
- [Poetry](https://python-poetry.org/docs/)
|
|
||||||
- [PEP 8](https://peps.python.org/pep-0008/)
|
|
||||||
- [Black Formatter](https://black.readthedocs.io/)
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**For details on individual features and endpoints, see `features.md`.**
|
|
||||||
268
README.md
268
README.md
@ -1,268 +0,0 @@
|
|||||||
# AniWorld - Anime Series Management System
|
|
||||||
|
|
||||||
A powerful anime series management system that helps you track, organize, and download your favorite anime series. Recently migrated from Flask to FastAPI for improved performance and modern API capabilities.
|
|
||||||
|
|
||||||
## 🚀 Features
|
|
||||||
|
|
||||||
### Core Functionality
|
|
||||||
|
|
||||||
- **Series Tracking**: Automatically detect missing episodes in your anime collection
|
|
||||||
- **Smart Downloads**: Queue-based download system with progress tracking
|
|
||||||
- **File Organization**: Automatic file scanning and folder structure management
|
|
||||||
- **Search Integration**: Search for anime series across multiple providers
|
|
||||||
- **Real-time Updates**: Live progress updates via WebSocket connections
|
|
||||||
|
|
||||||
### Web Interface
|
|
||||||
|
|
||||||
- **Modern UI**: Clean, responsive web interface with dark/light theme support
|
|
||||||
- **Download Queue**: Visual download queue management
|
|
||||||
- **Progress Tracking**: Real-time download and scan progress
|
|
||||||
- **Mobile Support**: Fully responsive design for mobile devices
|
|
||||||
|
|
||||||
### API & Integration
|
|
||||||
|
|
||||||
- **FastAPI Backend**: High-performance async API with automatic documentation
|
|
||||||
- **RESTful API**: Complete REST API for programmatic access
|
|
||||||
- **OpenAPI Documentation**: Interactive API documentation at `/docs`
|
|
||||||
- **Authentication**: Secure master password authentication with JWT tokens
|
|
||||||
|
|
||||||
## 🎯 Recent Migration: Flask → FastAPI
|
|
||||||
|
|
||||||
This project has been successfully migrated from Flask to FastAPI, bringing significant improvements:
|
|
||||||
|
|
||||||
### Performance Benefits
|
|
||||||
|
|
||||||
- **Async Support**: Native async/await for better concurrency
|
|
||||||
- **Faster Response Times**: Up to 2-3x performance improvement
|
|
||||||
- **Better Resource Utilization**: More efficient handling of concurrent requests
|
|
||||||
|
|
||||||
### Developer Experience
|
|
||||||
|
|
||||||
- **Automatic Documentation**: Built-in OpenAPI/Swagger documentation
|
|
||||||
- **Type Safety**: Full request/response validation with Pydantic
|
|
||||||
- **Modern Standards**: OpenAPI 3.0 compliance and JSON Schema validation
|
|
||||||
- **Better Error Handling**: Structured error responses with detailed information
|
|
||||||
|
|
||||||
### API Improvements
|
|
||||||
|
|
||||||
- **Interactive Documentation**: Test API endpoints directly from `/docs`
|
|
||||||
- **Schema Validation**: Automatic request/response validation
|
|
||||||
- **Better Error Messages**: Detailed validation errors with field-level feedback
|
|
||||||
|
|
||||||
## 🛠️ Installation & Setup
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
|
|
||||||
- Python 3.11+
|
|
||||||
- Conda package manager
|
|
||||||
- Windows OS (currently optimized for Windows)
|
|
||||||
|
|
||||||
### Quick Start
|
|
||||||
|
|
||||||
1. **Clone the Repository**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
git clone <repository-url>
|
|
||||||
cd Aniworld
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Create and Activate Conda Environment**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
conda create -n AniWorld python=3.11
|
|
||||||
conda activate AniWorld
|
|
||||||
```
|
|
||||||
|
|
||||||
3. **Install Dependencies**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
pip install -r requirements.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
4. **Set Environment Variables**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Set your master password (will be hashed automatically)
|
|
||||||
set MASTER_PASSWORD=your_secure_password
|
|
||||||
```
|
|
||||||
|
|
||||||
5. **Start the FastAPI Server**
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Development mode with auto-reload
|
|
||||||
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
|
||||||
|
|
||||||
# Or use the VS Code task: "Run FastAPI Server"
|
|
||||||
```
|
|
||||||
|
|
||||||
6. **Access the Application**
|
|
||||||
- **Web Interface**: http://localhost:8000
|
|
||||||
- **API Documentation**: http://localhost:8000/docs
|
|
||||||
- **Alternative API Docs**: http://localhost:8000/redoc
|
|
||||||
|
|
||||||
### Alternative: Using VS Code Tasks
|
|
||||||
|
|
||||||
If you're using VS Code, you can use the pre-configured tasks:
|
|
||||||
|
|
||||||
- `Ctrl+Shift+P` → "Tasks: Run Task" → "Run FastAPI Server"
|
|
||||||
|
|
||||||
## 🔧 Configuration
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
|
|
||||||
- `MASTER_PASSWORD` - Your master password (will be hashed automatically)
|
|
||||||
- `MASTER_PASSWORD_HASH` - Pre-hashed password (alternative to MASTER_PASSWORD)
|
|
||||||
- `JWT_SECRET_KEY` - Secret key for JWT token signing (auto-generated if not set)
|
|
||||||
- `LOG_LEVEL` - Logging level (DEBUG, INFO, WARNING, ERROR)
|
|
||||||
|
|
||||||
### Directory Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
Aniworld/
|
|
||||||
├── src/
|
|
||||||
│ ├── core/ # Core business logic
|
|
||||||
│ │ ├── SeriesApp.py # Main application controller
|
|
||||||
│ │ ├── entities/ # Data models
|
|
||||||
│ │ └── providers/ # Content providers
|
|
||||||
│ ├── server/ # FastAPI server
|
|
||||||
│ │ ├── fastapi_app.py # Main FastAPI application
|
|
||||||
│ │ └── web/ # Web interface and controllers
|
|
||||||
│ └── infrastructure/ # Infrastructure components
|
|
||||||
├── data/ # Application data and databases
|
|
||||||
├── logs/ # Application logs
|
|
||||||
└── requirements.txt # Python dependencies
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🌐 API Usage
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
|
|
||||||
1. **Login to get JWT token**:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
curl -X POST "http://localhost:8000/auth/login" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"password": "your_master_password"}'
|
|
||||||
```
|
|
||||||
|
|
||||||
2. **Use token in requests**:
|
|
||||||
```bash
|
|
||||||
curl -X GET "http://localhost:8000/api/anime/search?query=naruto" \
|
|
||||||
-H "Authorization: Bearer your_jwt_token_here"
|
|
||||||
```
|
|
||||||
|
|
||||||
### Key Endpoints
|
|
||||||
|
|
||||||
- **Authentication**: `/auth/login`, `/auth/verify`, `/auth/logout`
|
|
||||||
- **System**: `/health`, `/api/system/config`
|
|
||||||
- **Anime**: `/api/anime/search`, `/api/anime/{id}`
|
|
||||||
- **Episodes**: `/api/episodes/{id}`, `/api/anime/{id}/episodes`
|
|
||||||
- **Downloads**: `/api/download`, `/api/add_series`
|
|
||||||
|
|
||||||
For complete API documentation, visit `/docs` when the server is running.
|
|
||||||
|
|
||||||
## 🖥️ Web Interface
|
|
||||||
|
|
||||||
### Main Features
|
|
||||||
|
|
||||||
- **Dashboard**: Overview of your anime collection and missing episodes
|
|
||||||
- **Search**: Find and add new anime series to track
|
|
||||||
- **Downloads**: Manage download queue and monitor progress
|
|
||||||
- **Settings**: Configure application preferences
|
|
||||||
|
|
||||||
### Responsive Design
|
|
||||||
|
|
||||||
The web interface is fully responsive and supports:
|
|
||||||
|
|
||||||
- Desktop browsers (Chrome, Firefox, Edge, Safari)
|
|
||||||
- Mobile devices (iOS Safari, Android Chrome)
|
|
||||||
- Tablet devices
|
|
||||||
- Dark and light themes
|
|
||||||
|
|
||||||
## 🔍 Troubleshooting
|
|
||||||
|
|
||||||
### Common Issues
|
|
||||||
|
|
||||||
1. **Server won't start**
|
|
||||||
|
|
||||||
- Check that the AniWorld conda environment is activated
|
|
||||||
- Verify all dependencies are installed: `pip install -r requirements.txt`
|
|
||||||
- Check for port conflicts (default: 8000)
|
|
||||||
|
|
||||||
2. **Authentication errors**
|
|
||||||
|
|
||||||
- Verify the master password is set correctly
|
|
||||||
- Check environment variables are properly configured
|
|
||||||
- Clear browser cache/cookies
|
|
||||||
|
|
||||||
3. **Import errors**
|
|
||||||
- Ensure all required packages are installed
|
|
||||||
- Check Python path configuration
|
|
||||||
- Verify conda environment is activated
|
|
||||||
|
|
||||||
### Logs
|
|
||||||
|
|
||||||
Application logs are stored in the `logs/` directory:
|
|
||||||
|
|
||||||
- `aniworld.log` - General application logs
|
|
||||||
- `errors.log` - Error-specific logs
|
|
||||||
- `auth_failures.log` - Authentication failure logs
|
|
||||||
|
|
||||||
## 🚦 Development
|
|
||||||
|
|
||||||
### Running in Development Mode
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# With auto-reload for development
|
|
||||||
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload --log-level debug
|
|
||||||
```
|
|
||||||
|
|
||||||
### Testing
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Run all tests
|
|
||||||
python -m pytest tests/ -v
|
|
||||||
|
|
||||||
# Run with coverage
|
|
||||||
python -m pytest tests/ --cov=src --cov-report=html
|
|
||||||
```
|
|
||||||
|
|
||||||
### Code Quality
|
|
||||||
|
|
||||||
```bash
|
|
||||||
# Format code
|
|
||||||
black src/
|
|
||||||
isort src/
|
|
||||||
|
|
||||||
# Lint code
|
|
||||||
pylint src/
|
|
||||||
flake8 src/
|
|
||||||
```
|
|
||||||
|
|
||||||
## 📚 Documentation
|
|
||||||
|
|
||||||
- **API Documentation**: Available at `/docs` (Swagger UI) and `/redoc` (ReDoc)
|
|
||||||
- **Migration Guide**: See `API_DOCUMENTATION.md` for detailed migration information
|
|
||||||
- **FastAPI Specific**: See `src/server/README_FastAPI.md` for server-specific documentation
|
|
||||||
|
|
||||||
## 🤝 Contributing
|
|
||||||
|
|
||||||
1. Fork the repository
|
|
||||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
|
||||||
3. Commit your changes (`git commit -m 'Add amazing feature'`)
|
|
||||||
4. Push to the branch (`git push origin feature/amazing-feature`)
|
|
||||||
5. Open a Pull Request
|
|
||||||
|
|
||||||
## 📄 License
|
|
||||||
|
|
||||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
|
||||||
|
|
||||||
## 🙏 Acknowledgments
|
|
||||||
|
|
||||||
- FastAPI team for the excellent framework
|
|
||||||
- The original Flask implementation that served as the foundation
|
|
||||||
- All contributors and users of the AniWorld project
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Note**: This application is for personal use only. Please respect copyright laws and terms of service of content providers.
|
|
||||||
227
ServerTodo.md
227
ServerTodo.md
@ -1,227 +0,0 @@
|
|||||||
# Web Migration TODO: Flask to FastAPI
|
|
||||||
|
|
||||||
This document contains tasks for migrating the web application from Flask to FastAPI. Each task should be marked as completed with [x] when finished.
|
|
||||||
|
|
||||||
## 📋 Project Analysis and Setup
|
|
||||||
|
|
||||||
### Initial Assessment
|
|
||||||
|
|
||||||
- [x] Review current Flask application structure in `/src/web/` directory
|
|
||||||
- [x] Identify all Flask routes and their HTTP methods
|
|
||||||
- [x] Document current template engine usage (Jinja2)
|
|
||||||
- [x] List all static file serving requirements
|
|
||||||
- [x] Inventory all middleware and extensions currently used
|
|
||||||
- [x] Document current error handling patterns
|
|
||||||
- [x] Review authentication/authorization mechanisms
|
|
||||||
|
|
||||||
### FastAPI Setup
|
|
||||||
|
|
||||||
- [x] Install FastAPI dependencies: `pip install fastapi uvicorn jinja2 python-multipart`
|
|
||||||
- [x] Update `requirements.txt` or `pyproject.toml` with new dependencies
|
|
||||||
- [x] Remove Flask dependencies: `flask`, `flask-*` packages
|
|
||||||
- [x] Create new FastAPI application entry point
|
|
||||||
|
|
||||||
## 🔧 Core Application Migration
|
|
||||||
|
|
||||||
### Main Application Structure
|
|
||||||
|
|
||||||
- [x] Create new `main.py` or update existing app entry point with FastAPI app instance
|
|
||||||
- [x] Migrate Flask app configuration to FastAPI settings using Pydantic BaseSettings
|
|
||||||
- [x] Convert Flask blueprints to FastAPI routers
|
|
||||||
- [x] Update CORS configuration from Flask-CORS to FastAPI CORS middleware
|
|
||||||
|
|
||||||
### Route Conversion
|
|
||||||
|
|
||||||
- [x] Convert all `@app.route()` decorators to FastAPI route decorators (`@app.get()`, `@app.post()`, etc.)
|
|
||||||
- [x] Update route parameter syntax from `<int:id>` to `{id: int}` format
|
|
||||||
- [x] Convert Flask request object usage (`request.form`, `request.json`) to FastAPI request models
|
|
||||||
- [x] Update response handling from Flask `jsonify()` to FastAPI automatic JSON serialization
|
|
||||||
- [x] Convert Flask `redirect()` and `url_for()` to FastAPI equivalents
|
|
||||||
|
|
||||||
### Request/Response Models
|
|
||||||
|
|
||||||
- [x] Create Pydantic models for request bodies (replace Flask request parsing)
|
|
||||||
- [x] Create Pydantic models for response schemas
|
|
||||||
- [x] Update form handling to use FastAPI Form dependencies
|
|
||||||
- [x] Convert file upload handling to FastAPI UploadFile
|
|
||||||
|
|
||||||
## 🎨 Template and Static Files Migration
|
|
||||||
|
|
||||||
### Template Engine Setup
|
|
||||||
|
|
||||||
- [x] Configure Jinja2Templates in FastAPI application
|
|
||||||
- [x] Set up template directory structure
|
|
||||||
- [x] Create templates directory configuration in FastAPI app
|
|
||||||
|
|
||||||
### HTML Template Migration
|
|
||||||
|
|
||||||
- [x] Review all `.html` files in templates directory
|
|
||||||
- [x] Update template rendering from Flask `render_template()` to FastAPI `templates.TemplateResponse()`
|
|
||||||
- [x] Verify Jinja2 syntax compatibility (should be mostly unchanged)
|
|
||||||
- [x] Update template context passing to match FastAPI pattern
|
|
||||||
- [x] Test all template variables and filters still work correctly
|
|
||||||
|
|
||||||
### Static Files Configuration
|
|
||||||
|
|
||||||
- [x] Configure StaticFiles mount in FastAPI for CSS, JS, images
|
|
||||||
- [x] Update static file URL generation in templates
|
|
||||||
- [x] Verify all CSS file references work correctly
|
|
||||||
- [x] Verify all JavaScript file references work correctly
|
|
||||||
- [x] Test image and other asset serving
|
|
||||||
|
|
||||||
## 💻 JavaScript and Frontend Migration
|
|
||||||
|
|
||||||
### Inline JavaScript Review
|
|
||||||
|
|
||||||
- [x] Scan all HTML templates for inline `<script>` tags
|
|
||||||
- [x] Review JavaScript code for Flask-specific URL generation (e.g., `{{ url_for() }}`)
|
|
||||||
- [x] Update AJAX endpoints to match new FastAPI route structure
|
|
||||||
- [x] Convert Flask CSRF token handling to FastAPI security patterns
|
|
||||||
|
|
||||||
### External JavaScript Files
|
|
||||||
|
|
||||||
- [x] Review all `.js` files in static directory
|
|
||||||
- [x] Update API endpoint URLs to match FastAPI routing
|
|
||||||
- [x] Verify fetch() or XMLHttpRequest calls use correct endpoints
|
|
||||||
- [x] Update any Flask-specific JavaScript patterns
|
|
||||||
- [x] Test all JavaScript functionality after migration
|
|
||||||
|
|
||||||
### CSS Files Review
|
|
||||||
|
|
||||||
- [x] Verify all `.css` files are served correctly
|
|
||||||
- [x] Check for any Flask-specific CSS patterns or URL references
|
|
||||||
- [x] Test responsive design and styling after migration
|
|
||||||
|
|
||||||
## 🔐 Security and Middleware Migration
|
|
||||||
|
|
||||||
### Authentication/Authorization
|
|
||||||
|
|
||||||
- [x] Convert Flask-Login or similar to FastAPI security dependencies
|
|
||||||
- [x] Update session management (FastAPI doesn't have built-in sessions)
|
|
||||||
- [x] Migrate password hashing and verification
|
|
||||||
- [x] Convert authentication decorators to FastAPI dependencies
|
|
||||||
|
|
||||||
### Middleware Migration
|
|
||||||
|
|
||||||
- [x] Convert Flask middleware to FastAPI middleware
|
|
||||||
- [x] Update error handling from Flask error handlers to FastAPI exception handlers
|
|
||||||
- [x] Migrate request/response interceptors
|
|
||||||
- [x] Update logging middleware if used
|
|
||||||
|
|
||||||
## 🚀 Application Flow & Setup Features
|
|
||||||
|
|
||||||
### Setup and Authentication Flow
|
|
||||||
|
|
||||||
- [x] Implement application setup detection middleware
|
|
||||||
- [x] Create setup page template and route for first-time configuration
|
|
||||||
- [x] Implement configuration file/database setup validation
|
|
||||||
- [x] Create authentication token validation middleware
|
|
||||||
- [x] Implement auth page template and routes for login/registration
|
|
||||||
- [x] Create main application route with authentication dependency
|
|
||||||
- [x] Implement setup completion tracking in configuration
|
|
||||||
- [x] Add redirect logic for setup → auth → main application flow
|
|
||||||
- [x] Create Pydantic models for setup and authentication requests
|
|
||||||
- [x] Implement session management for authenticated users
|
|
||||||
- [x] Add token refresh and expiration handling
|
|
||||||
- [x] Create middleware to enforce application flow priorities
|
|
||||||
|
|
||||||
## 🧪 Testing and Validation
|
|
||||||
|
|
||||||
### Functional Testing
|
|
||||||
|
|
||||||
- [x] Test all web routes return correct responses
|
|
||||||
- [x] Verify all HTML pages render correctly
|
|
||||||
- [x] Test all forms submit and process data correctly
|
|
||||||
- [x] Verify file uploads work (if applicable)
|
|
||||||
- [x] Test authentication flows (login/logout/registration)
|
|
||||||
|
|
||||||
### Frontend Testing
|
|
||||||
|
|
||||||
- [x] Test all JavaScript functionality
|
|
||||||
- [x] Verify AJAX calls work correctly
|
|
||||||
- [x] Test dynamic content loading
|
|
||||||
- [x] Verify CSS styling is applied correctly
|
|
||||||
- [x] Test responsive design on different screen sizes
|
|
||||||
|
|
||||||
### Integration Testing
|
|
||||||
|
|
||||||
- [x] Test database connectivity and operations
|
|
||||||
- [x] Verify API endpoints return correct data
|
|
||||||
- [x] Test error handling and user feedback
|
|
||||||
- [x] Verify security features work correctly
|
|
||||||
|
|
||||||
## 📚 Documentation and Cleanup
|
|
||||||
|
|
||||||
### Code Documentation
|
|
||||||
|
|
||||||
- [x] Update API documentation to reflect FastAPI changes
|
|
||||||
- [x] Add OpenAPI/Swagger documentation (automatic with FastAPI)
|
|
||||||
- [x] Update README with new setup instructions
|
|
||||||
- [x] Document any breaking changes or new patterns
|
|
||||||
|
|
||||||
### Code Cleanup
|
|
||||||
|
|
||||||
- [x] Remove unused Flask imports and dependencies
|
|
||||||
- [x] Clean up any Flask-specific code patterns
|
|
||||||
- [x] Update imports to use FastAPI equivalents
|
|
||||||
- [x] Remove deprecated or unused template files
|
|
||||||
- [x] Clean up static files that are no longer needed
|
|
||||||
|
|
||||||
## 🚀 Deployment and Configuration
|
|
||||||
|
|
||||||
### Server Configuration
|
|
||||||
|
|
||||||
- [x] Update server startup to use `uvicorn` instead of Flask development server
|
|
||||||
- [x] Configure production ASGI server (uvicorn, gunicorn with uvicorn workers)
|
|
||||||
- [x] Update any reverse proxy configuration (nginx, Apache)
|
|
||||||
- [x] Test application startup and shutdown
|
|
||||||
|
|
||||||
### Environment Configuration
|
|
||||||
|
|
||||||
- [x] Update environment variables for FastAPI
|
|
||||||
- [x] Configure logging for FastAPI application
|
|
||||||
- [x] Update any deployment scripts or Docker configurations
|
|
||||||
- [x] Test application in different environments (dev, staging, prod)
|
|
||||||
|
|
||||||
## ✅ Final Verification
|
|
||||||
|
|
||||||
### Complete System Test
|
|
||||||
|
|
||||||
- [x] Perform end-to-end testing of all user workflows
|
|
||||||
- [x] Verify performance is acceptable or improved
|
|
||||||
- [x] Test error scenarios and edge cases
|
|
||||||
- [x] Confirm all original functionality is preserved
|
|
||||||
- [x] Validate security measures are in place and working
|
|
||||||
|
|
||||||
### Monitoring and Observability
|
|
||||||
|
|
||||||
- [x] Set up health check endpoints
|
|
||||||
- [x] Configure metrics collection (if used)
|
|
||||||
- [x] Set up error monitoring and alerting
|
|
||||||
- [x] Test logging and debugging capabilities
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📝 Migration Notes
|
|
||||||
|
|
||||||
### Important FastAPI Concepts to Remember:
|
|
||||||
|
|
||||||
- FastAPI uses async/await by default (but sync functions work too)
|
|
||||||
- Automatic request/response validation with Pydantic
|
|
||||||
- Built-in OpenAPI documentation
|
|
||||||
- Dependency injection system
|
|
||||||
- Type hints are crucial for FastAPI functionality
|
|
||||||
|
|
||||||
### Common Gotchas:
|
|
||||||
|
|
||||||
- FastAPI doesn't have built-in session support (use external library if needed)
|
|
||||||
- Template responses need explicit media_type for HTML
|
|
||||||
- Static file mounting needs to be configured explicitly
|
|
||||||
- Request object structure is different from Flask
|
|
||||||
|
|
||||||
### Performance Considerations:
|
|
||||||
|
|
||||||
- FastAPI is generally faster than Flask
|
|
||||||
- Consider using async functions for I/O operations
|
|
||||||
- Use background tasks for long-running operations
|
|
||||||
- Implement proper caching strategies
|
|
||||||
180
TestsTodo.md
180
TestsTodo.md
@ -1,180 +0,0 @@
|
|||||||
# AniWorld Test Generation Checklist
|
|
||||||
|
|
||||||
This file instructs the AI agent on how to generate tests for the AniWorld application. All tests must be saved under `src/tests/` and follow the conventions in `.github/copilot-instructions.md`. Use `[ ]` for each task so the agent can checkmark completed items.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 Test File Structure
|
|
||||||
|
|
||||||
- [x] Place all tests under `src/tests/`
|
|
||||||
- [x] `src/tests/unit/` for component/unit tests
|
|
||||||
- [x] `src/tests/integration/` for API/integration tests
|
|
||||||
- [x] `src/tests/e2e/` for end-to-end tests
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🧪 Test Types
|
|
||||||
|
|
||||||
- [x] Component/Unit Tests: Test individual functions, classes, and modules.
|
|
||||||
- [x] API/Integration Tests: Test API endpoints and database/external integrations.
|
|
||||||
- [x] End-to-End (E2E) Tests: Simulate real user flows through the system.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📝 Test Case Checklist
|
|
||||||
|
|
||||||
### 1. Authentication & Security
|
|
||||||
|
|
||||||
- [x] Unit: Password hashing (SHA-256 + salt)
|
|
||||||
- [x] Unit: JWT creation/validation
|
|
||||||
- [x] Unit: Session timeout logic
|
|
||||||
- [x] API: `POST /auth/login` (valid/invalid credentials)
|
|
||||||
- [x] API: `GET /auth/verify` (valid/expired token)
|
|
||||||
- [x] API: `POST /auth/logout`
|
|
||||||
- [x] Unit: Secure environment variable management
|
|
||||||
- [x] E2E: Full login/logout flow
|
|
||||||
|
|
||||||
### 2. Health & System Monitoring
|
|
||||||
|
|
||||||
- [x] API: `/health` endpoint
|
|
||||||
- [x] API: `/api/health` endpoint
|
|
||||||
- [x] API: `/api/health/system` (CPU, memory, disk)
|
|
||||||
- [x] API: `/api/health/database`
|
|
||||||
- [x] API: `/api/health/dependencies`
|
|
||||||
- [x] API: `/api/health/performance`
|
|
||||||
- [x] API: `/api/health/metrics`
|
|
||||||
- [x] API: `/api/health/ready`
|
|
||||||
- [x] Unit: System metrics gathering
|
|
||||||
|
|
||||||
### 3. Anime & Episode Management
|
|
||||||
|
|
||||||
- [x] API: `GET /api/anime/search` (pagination, valid/invalid query)
|
|
||||||
- [x] API: `GET /api/anime/{anime_id}` (valid/invalid ID)
|
|
||||||
- [x] API: `GET /api/anime/{anime_id}/episodes`
|
|
||||||
- [x] API: `GET /api/episodes/{episode_id}`
|
|
||||||
- [x] Unit: Search/filter logic
|
|
||||||
|
|
||||||
### 4. Database & Storage Management
|
|
||||||
|
|
||||||
- [x] API: `GET /api/database/info`
|
|
||||||
- [x] API: `/maintenance/database/vacuum`
|
|
||||||
- [x] API: `/maintenance/database/analyze`
|
|
||||||
- [x] API: `/maintenance/database/integrity-check`
|
|
||||||
- [x] API: `/maintenance/database/reindex`
|
|
||||||
- [x] API: `/maintenance/database/optimize`
|
|
||||||
- [x] API: `/maintenance/database/stats`
|
|
||||||
- [x] Unit: Maintenance operation logic
|
|
||||||
|
|
||||||
### 5. Bulk Operations
|
|
||||||
|
|
||||||
- [x] API: `/api/bulk/download`
|
|
||||||
- [x] API: `/api/bulk/update`
|
|
||||||
- [x] API: `/api/bulk/organize`
|
|
||||||
- [x] API: `/api/bulk/delete`
|
|
||||||
- [x] API: `/api/bulk/export`
|
|
||||||
- [x] E2E: Bulk download and export flows
|
|
||||||
|
|
||||||
### 6. Performance Optimization
|
|
||||||
|
|
||||||
- [x] API: `/api/performance/speed-limit`
|
|
||||||
- [x] API: `/api/performance/cache/stats`
|
|
||||||
- [x] API: `/api/performance/memory/stats`
|
|
||||||
- [x] API: `/api/performance/memory/gc`
|
|
||||||
- [x] API: `/api/performance/downloads/tasks`
|
|
||||||
- [x] API: `/api/performance/downloads/add-task`
|
|
||||||
- [x] API: `/api/performance/resume/tasks`
|
|
||||||
- [x] Unit: Cache and memory management logic
|
|
||||||
|
|
||||||
### 7. Diagnostics & Logging
|
|
||||||
|
|
||||||
- [x] API: `/diagnostics/report`
|
|
||||||
- [x] Unit: Error reporting and stats
|
|
||||||
- [x] Unit: Logging configuration and log file management
|
|
||||||
|
|
||||||
### 8. Integrations
|
|
||||||
|
|
||||||
- [x] API: API key management endpoints
|
|
||||||
- [x] API: Webhook configuration endpoints
|
|
||||||
- [x] API: Third-party API integrations
|
|
||||||
- [x] Unit: Integration logic and error handling
|
|
||||||
|
|
||||||
### 9. User Preferences & UI
|
|
||||||
|
|
||||||
- [x] API: Theme management endpoints
|
|
||||||
- [x] API: Language selection endpoints
|
|
||||||
- [x] API: Accessibility endpoints
|
|
||||||
- [x] API: Keyboard shortcuts endpoints
|
|
||||||
- [x] API: UI density/grid/list view endpoints
|
|
||||||
- [x] E2E: Change preferences and verify UI responses
|
|
||||||
|
|
||||||
### 10. CLI Tool
|
|
||||||
|
|
||||||
- [x] Unit: CLI commands (scan, search, download, rescan, display series)
|
|
||||||
- [x] E2E: CLI flows (progress bar, retry logic)
|
|
||||||
|
|
||||||
### 11. Miscellaneous
|
|
||||||
|
|
||||||
- [x] Unit: Environment configuration loading
|
|
||||||
- [x] Unit: Modular architecture components
|
|
||||||
- [x] Unit: Centralized error handling
|
|
||||||
- [x] API: Error handling for invalid requests
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🛠️ Additional Guidelines
|
|
||||||
|
|
||||||
- [x] Use `pytest` for all Python tests.
|
|
||||||
- [x] Use `pytest-mock` or `unittest.mock` for mocking.
|
|
||||||
- [x] Use fixtures for setup/teardown.
|
|
||||||
- [x] Test both happy paths and edge cases.
|
|
||||||
- [x] Mock external services and database connections.
|
|
||||||
- [x] Use parameterized tests for edge cases.
|
|
||||||
- [x] Document each test with a brief description.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
# Test TODO
|
|
||||||
|
|
||||||
## Application Flow & Setup Tests
|
|
||||||
|
|
||||||
### Setup Page Tests
|
|
||||||
|
|
||||||
- [x] Test setup page is displayed when configuration is missing
|
|
||||||
- [x] Test setup page form submission creates valid configuration
|
|
||||||
- [x] Test setup page redirects to auth page after successful setup
|
|
||||||
- [x] Test setup page validation for required fields
|
|
||||||
- [x] Test setup page handles database connection errors gracefully
|
|
||||||
- [x] Test setup completion flag is properly set in configuration
|
|
||||||
|
|
||||||
### Authentication Flow Tests
|
|
||||||
|
|
||||||
- [x] Test auth page is displayed when authentication token is invalid
|
|
||||||
- [x] Test auth page is displayed when authentication token is missing
|
|
||||||
- [x] Test successful login creates valid authentication token
|
|
||||||
- [x] Test failed login shows appropriate error messages
|
|
||||||
- [x] Test auth page redirects to main application after successful authentication
|
|
||||||
- [x] Test token validation middleware correctly identifies valid/invalid tokens
|
|
||||||
- [x] Test token refresh functionality
|
|
||||||
- [x] Test session expiration handling
|
|
||||||
|
|
||||||
### Main Application Access Tests
|
|
||||||
|
|
||||||
- [x] Test index.html is served when authentication is valid
|
|
||||||
- [x] Test unauthenticated users are redirected to auth page
|
|
||||||
- [x] Test users without completed setup are redirected to setup page
|
|
||||||
- [x] Test middleware enforces correct flow priority (setup → auth → main)
|
|
||||||
- [x] Test authenticated user session persistence
|
|
||||||
- [x] Test graceful handling of token expiration during active session
|
|
||||||
|
|
||||||
### Integration Flow Tests
|
|
||||||
|
|
||||||
- [x] Test complete user journey: setup → auth → main application
|
|
||||||
- [x] Test application behavior when setup is completed but user is not authenticated
|
|
||||||
- [x] Test application behavior when configuration exists but is corrupted
|
|
||||||
- [x] Test concurrent user sessions and authentication state management
|
|
||||||
- [x] Test application restart preserves setup and authentication state appropriately
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Instruction to AI Agent:**
|
|
||||||
Generate and check off each test case above as you complete it. Save all test files under `src/tests/` using the specified structure and conventions.
|
|
||||||
BIN
data/aniworld.db
BIN
data/aniworld.db
Binary file not shown.
BIN
data/cache.db
BIN
data/cache.db
Binary file not shown.
@ -1,49 +0,0 @@
|
|||||||
{
|
|
||||||
"security": {
|
|
||||||
"master_password_hash": "1353f6d9db7090c302864c2d6437dc11cc96cd66d59d7737d1b345603fdbdfda",
|
|
||||||
"salt": "a25e23440d681cef2d75c0adb6de0913359a1d8b9f98f9747fc75f53c79c4bd4",
|
|
||||||
"session_timeout_hours": 24,
|
|
||||||
"max_failed_attempts": 5,
|
|
||||||
"lockout_duration_minutes": 30
|
|
||||||
},
|
|
||||||
"anime": {
|
|
||||||
"directory": "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien",
|
|
||||||
"download_threads": 3,
|
|
||||||
"download_speed_limit": null,
|
|
||||||
"auto_rescan_time": "03:00",
|
|
||||||
"auto_download_after_rescan": false
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"level": "INFO",
|
|
||||||
"enable_console_logging": true,
|
|
||||||
"enable_console_progress": false,
|
|
||||||
"enable_fail2ban_logging": true,
|
|
||||||
"log_file": "aniworld.log",
|
|
||||||
"max_log_size_mb": 10,
|
|
||||||
"log_backup_count": 5
|
|
||||||
},
|
|
||||||
"providers": {
|
|
||||||
"default_provider": "aniworld.to",
|
|
||||||
"preferred_language": "German Dub",
|
|
||||||
"fallback_providers": [
|
|
||||||
"aniworld.to"
|
|
||||||
],
|
|
||||||
"provider_timeout": 30,
|
|
||||||
"retry_attempts": 3,
|
|
||||||
"provider_settings": {
|
|
||||||
"aniworld.to": {
|
|
||||||
"enabled": true,
|
|
||||||
"priority": 1,
|
|
||||||
"quality_preference": "720p"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"advanced": {
|
|
||||||
"max_concurrent_downloads": 3,
|
|
||||||
"download_buffer_size": 8192,
|
|
||||||
"connection_timeout": 30,
|
|
||||||
"read_timeout": 300,
|
|
||||||
"enable_debug_mode": false,
|
|
||||||
"cache_duration_minutes": 60
|
|
||||||
}
|
|
||||||
}
|
|
||||||
@ -1,48 +0,0 @@
|
|||||||
{
|
|
||||||
"ui": {
|
|
||||||
"theme": "auto",
|
|
||||||
"density": "comfortable",
|
|
||||||
"language": "en",
|
|
||||||
"animations_enabled": true,
|
|
||||||
"sidebar_collapsed": false,
|
|
||||||
"grid_view": true,
|
|
||||||
"items_per_page": 20
|
|
||||||
},
|
|
||||||
"downloads": {
|
|
||||||
"auto_download": false,
|
|
||||||
"download_quality": "best",
|
|
||||||
"concurrent_downloads": 3,
|
|
||||||
"retry_failed": true,
|
|
||||||
"notification_sound": true,
|
|
||||||
"auto_organize": true
|
|
||||||
},
|
|
||||||
"notifications": {
|
|
||||||
"browser_notifications": true,
|
|
||||||
"email_notifications": false,
|
|
||||||
"webhook_notifications": false,
|
|
||||||
"notification_types": {
|
|
||||||
"download_complete": true,
|
|
||||||
"download_error": true,
|
|
||||||
"series_updated": false,
|
|
||||||
"system_alerts": true
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"keyboard_shortcuts": {
|
|
||||||
"enabled": true,
|
|
||||||
"shortcuts": {
|
|
||||||
"search": "ctrl+f",
|
|
||||||
"download": "ctrl+d",
|
|
||||||
"refresh": "f5",
|
|
||||||
"select_all": "ctrl+a",
|
|
||||||
"help": "f1",
|
|
||||||
"settings": "ctrl+comma"
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"advanced": {
|
|
||||||
"debug_mode": false,
|
|
||||||
"performance_mode": false,
|
|
||||||
"cache_enabled": true,
|
|
||||||
"auto_backup": true,
|
|
||||||
"log_level": "info"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
147
features.md
147
features.md
@ -1,135 +1,24 @@
|
|||||||
# AniWorld Application Features
|
# Aniworld Web Application Features
|
||||||
|
|
||||||
## 1. Authentication & Security
|
## Authentication & Security
|
||||||
|
- **Master Password Login**: Secure access to the application with a master password system
|
||||||
|
|
||||||
- Master password authentication (JWT-based)
|
## Configuration Management
|
||||||
- `POST /auth/login`: Login and receive JWT token
|
- **Setup Page**: Initial configuration interface for server setup and basic settings
|
||||||
- `GET /auth/verify`: Verify JWT token validity
|
- **Config Page**: View and modify application configuration settings
|
||||||
- `POST /auth/logout`: Logout (stateless)
|
|
||||||
- Password hashing (SHA-256 + salt)
|
|
||||||
- Configurable session timeout
|
|
||||||
- Secure environment variable management
|
|
||||||
|
|
||||||
## 2. Health & System Monitoring
|
## User Interface
|
||||||
|
- **Dark Mode**: Toggle between light and dark themes for better user experience
|
||||||
|
|
||||||
- Health check endpoints
|
## Anime Management
|
||||||
- `/health`: Basic health status
|
- **Anime Library Page**: Display list of anime series with missing episodes
|
||||||
- `/api/health`: Load balancer health
|
- **Series Selection**: Select individual anime series and add episodes to download queue
|
||||||
- `/api/health/system`: System metrics (CPU, memory, disk)
|
- **Anime Search Page**: Search functionality to find and add new anime series to the library
|
||||||
- `/api/health/database`: Database connectivity
|
|
||||||
- `/api/health/dependencies`: External dependencies
|
|
||||||
- `/api/health/performance`: Performance metrics
|
|
||||||
- `/api/health/metrics`: Prometheus metrics
|
|
||||||
- `/api/health/ready`: Readiness probe (Kubernetes)
|
|
||||||
|
|
||||||
## 3. Anime & Episode Management
|
## Download Management
|
||||||
|
- **Download Queue Page**: View and manage the current download queue
|
||||||
|
- **Download Status Display**: Real-time status updates and progress of current downloads
|
||||||
|
- **Queue Operations**: Add, remove, and prioritize items in the download queue
|
||||||
|
|
||||||
- Search anime
|
## Core Functionality Overview
|
||||||
- `GET /api/anime/search`: Search anime by title (pagination)
|
The web application provides a complete interface for managing anime downloads with user-friendly pages for configuration, library management, search capabilities, and download monitoring.
|
||||||
- Get anime details
|
|
||||||
- `GET /api/anime/{anime_id}`: Anime details
|
|
||||||
- `GET /api/anime/{anime_id}/episodes`: List episodes
|
|
||||||
- `GET /api/episodes/{episode_id}`: Episode details
|
|
||||||
|
|
||||||
## 4. Database & Storage Management
|
|
||||||
|
|
||||||
- Database info and statistics
|
|
||||||
- `GET /api/database/info`: Database stats
|
|
||||||
- Maintenance operations
|
|
||||||
- `/maintenance/database/vacuum`: Vacuum database
|
|
||||||
- `/maintenance/database/analyze`: Analyze database
|
|
||||||
- `/maintenance/database/integrity-check`: Integrity check
|
|
||||||
- `/maintenance/database/reindex`: Reindex database
|
|
||||||
- `/maintenance/database/optimize`: Optimize database
|
|
||||||
- `/maintenance/database/stats`: Get database stats
|
|
||||||
|
|
||||||
## 5. Bulk Operations
|
|
||||||
|
|
||||||
- Bulk download, update, organize, delete, export
|
|
||||||
- `/api/bulk/download`: Start bulk download
|
|
||||||
- `/api/bulk/update`: Bulk update
|
|
||||||
- `/api/bulk/organize`: Organize series
|
|
||||||
- `/api/bulk/delete`: Delete series
|
|
||||||
- `/api/bulk/export`: Export series data
|
|
||||||
|
|
||||||
## 6. Performance Optimization
|
|
||||||
|
|
||||||
- Speed limit management
|
|
||||||
- `/api/performance/speed-limit`: Get/set download speed limit
|
|
||||||
- Cache statistics
|
|
||||||
- `/api/performance/cache/stats`: Cache stats
|
|
||||||
- Memory management
|
|
||||||
- `/api/performance/memory/stats`: Memory usage stats
|
|
||||||
- `/api/performance/memory/gc`: Force garbage collection
|
|
||||||
- Download queue management
|
|
||||||
- `/api/performance/downloads/tasks`: List download tasks
|
|
||||||
- `/api/performance/downloads/add-task`: Add download task
|
|
||||||
- `/api/performance/resume/tasks`: List resumable tasks
|
|
||||||
|
|
||||||
## 7. Diagnostics & Logging
|
|
||||||
|
|
||||||
- Diagnostic report generation
|
|
||||||
- `/diagnostics/report`: Generate diagnostic report
|
|
||||||
- Error reporting and stats
|
|
||||||
- Logging configuration and log file management
|
|
||||||
|
|
||||||
## 8. Integrations
|
|
||||||
|
|
||||||
- API key management
|
|
||||||
- Webhook configuration
|
|
||||||
- Third-party API integrations
|
|
||||||
|
|
||||||
## 9. User Preferences & UI
|
|
||||||
|
|
||||||
- Theme management (light/dark/auto)
|
|
||||||
- Language selection
|
|
||||||
- Accessibility features (screen reader, color contrast, mobile support)
|
|
||||||
- Keyboard shortcuts
|
|
||||||
- UI density and grid/list view options
|
|
||||||
|
|
||||||
## 10. CLI Tool
|
|
||||||
|
|
||||||
- Series scanning and management
|
|
||||||
- Search, download, rescan, display series
|
|
||||||
- Progress bar for downloads
|
|
||||||
- Retry logic for operations
|
|
||||||
|
|
||||||
## 11. Miscellaneous
|
|
||||||
|
|
||||||
- Environment configuration via `.env`
|
|
||||||
- Modular, extensible architecture (MVC, Clean Architecture)
|
|
||||||
- Automated testing (pytest, unittest)
|
|
||||||
- Centralized error handling
|
|
||||||
|
|
||||||
## Authentication & Setup Flow
|
|
||||||
|
|
||||||
### Application Initialization Flow
|
|
||||||
|
|
||||||
- **Setup Page**: Display application setup page when the application is run for the first time and no configuration exists
|
|
||||||
|
|
||||||
- Check for presence of configuration file/database setup
|
|
||||||
- Guide user through initial application configuration
|
|
||||||
- Set up database connections, initial admin user, and core settings
|
|
||||||
- Mark setup as completed in configuration
|
|
||||||
|
|
||||||
- **Authentication Gate**: Redirect to authentication page when user token is invalid or missing
|
|
||||||
|
|
||||||
- Validate existing authentication tokens
|
|
||||||
- Display login/registration interface for unauthenticated users
|
|
||||||
- Handle token refresh and session management
|
|
||||||
- Redirect authenticated users to main application
|
|
||||||
|
|
||||||
- **Main Application**: Show index.html for authenticated users with valid tokens
|
|
||||||
- Display main application interface
|
|
||||||
- Provide access to all authenticated user features
|
|
||||||
- Maintain session state and handle token expiration gracefully
|
|
||||||
|
|
||||||
### User Flow Priority
|
|
||||||
|
|
||||||
1. Check if application setup is completed → Show setup page if not
|
|
||||||
2. Check if user is authenticated → Show auth page if not
|
|
||||||
3. Show main application (index.html) for authenticated users
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Note:** Each feature is implemented via modular controllers, services, and utilities. See the respective source files for detailed function/class definitions.
|
|
||||||
136
infrastructure.md
Normal file
136
infrastructure.md
Normal file
@ -0,0 +1,136 @@
|
|||||||
|
# Aniworld Web Application Infrastructure
|
||||||
|
conda activate AniWorld
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
/home/lukas/Volume/repo/Aniworld/
|
||||||
|
├── src/
|
||||||
|
│ ├── server/ # FastAPI web application
|
||||||
|
│ │ ├── main.py # FastAPI application entry point
|
||||||
|
│ │ ├── api/ # API route handlers
|
||||||
|
│ │ │ ├── __init__.py
|
||||||
|
│ │ │ ├── auth.py # Authentication endpoints
|
||||||
|
│ │ │ ├── config.py # Configuration endpoints
|
||||||
|
│ │ │ ├── anime.py # Anime management endpoints
|
||||||
|
│ │ │ ├── download.py # Download queue endpoints
|
||||||
|
│ │ │ └── search.py # Search endpoints
|
||||||
|
│ │ ├── models/ # Pydantic models
|
||||||
|
│ │ │ ├── __init__.py
|
||||||
|
│ │ │ ├── auth.py
|
||||||
|
│ │ │ ├── config.py
|
||||||
|
│ │ │ ├── anime.py
|
||||||
|
│ │ │ └── download.py
|
||||||
|
│ │ ├── services/ # Business logic services
|
||||||
|
│ │ │ ├── __init__.py
|
||||||
|
│ │ │ ├── auth_service.py
|
||||||
|
│ │ │ ├── config_service.py
|
||||||
|
│ │ │ ├── anime_service.py
|
||||||
|
│ │ │ └── download_service.py
|
||||||
|
│ │ ├── static/ # Static web assets
|
||||||
|
│ │ │ ├── css/
|
||||||
|
│ │ │ ├── js/
|
||||||
|
│ │ │ └── images/
|
||||||
|
│ │ ├── templates/ # Jinja2 HTML templates
|
||||||
|
│ │ │ ├── base.html
|
||||||
|
│ │ │ ├── login.html
|
||||||
|
│ │ │ ├── setup.html
|
||||||
|
│ │ │ ├── config.html
|
||||||
|
│ │ │ ├── anime.html
|
||||||
|
│ │ │ ├── download.html
|
||||||
|
│ │ │ └── search.html
|
||||||
|
│ │ └── utils/ # Utility functions
|
||||||
|
│ │ ├── __init__.py
|
||||||
|
│ │ ├── security.py
|
||||||
|
│ │ └── dependencies.py
|
||||||
|
│ ├── core/ # Existing core functionality
|
||||||
|
│ └── cli/ # Existing CLI application
|
||||||
|
├── data/ # Application data storage
|
||||||
|
│ ├── config.json # Application configuration
|
||||||
|
│ ├── anime_library.db # SQLite database for anime library
|
||||||
|
│ ├── download_queue.json # Download queue state
|
||||||
|
│ └── cache/ # Temporary cache files
|
||||||
|
├── logs/ # Application logs
|
||||||
|
│ ├── app.log # Main application log
|
||||||
|
│ ├── download.log # Download-specific logs
|
||||||
|
│ └── error.log # Error logs
|
||||||
|
├── requirements.txt # Python dependencies
|
||||||
|
├── docker-compose.yml # Docker deployment configuration
|
||||||
|
└── README.md
|
||||||
|
```
|
||||||
|
|
||||||
|
## Technology Stack
|
||||||
|
|
||||||
|
### Backend
|
||||||
|
- **FastAPI**: Modern Python web framework for building APIs
|
||||||
|
- **Uvicorn**: ASGI server for running FastAPI applications
|
||||||
|
- **SQLite**: Lightweight database for storing anime library and configuration
|
||||||
|
- **Pydantic**: Data validation and serialization
|
||||||
|
- **Jinja2**: Template engine for server-side rendering
|
||||||
|
|
||||||
|
### Frontend
|
||||||
|
- **HTML5/CSS3**: Core web technologies
|
||||||
|
- **JavaScript (Vanilla)**: Client-side interactivity
|
||||||
|
- **Bootstrap 5**: CSS framework for responsive design
|
||||||
|
- **HTMX**: Modern approach for dynamic web applications
|
||||||
|
|
||||||
|
### Security
|
||||||
|
- **Passlib**: Password hashing and verification
|
||||||
|
- **python-jose**: JWT token handling
|
||||||
|
- **bcrypt**: Secure password hashing
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
### Data Storage
|
||||||
|
- **Configuration**: JSON files in `data/` directory
|
||||||
|
- **Anime Library**: SQLite database with series information
|
||||||
|
- **Download Queue**: JSON file with current download status
|
||||||
|
- **Logs**: Structured logging to files in `logs/` directory
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
### Authentication
|
||||||
|
- `POST /api/auth/login` - Master password authentication
|
||||||
|
- `POST /api/auth/logout` - Logout and invalidate session
|
||||||
|
- `GET /api/auth/status` - Check authentication status
|
||||||
|
|
||||||
|
### Configuration
|
||||||
|
- `GET /api/config` - Get current configuration
|
||||||
|
- `PUT /api/config` - Update configuration
|
||||||
|
- `POST /api/setup` - Initial setup
|
||||||
|
|
||||||
|
### Anime Management
|
||||||
|
- `GET /api/anime` - List anime with missing episodes
|
||||||
|
- `POST /api/anime/{id}/download` - Add episodes to download queue
|
||||||
|
- `GET /api/anime/{id}` - Get anime details
|
||||||
|
|
||||||
|
### Download Management
|
||||||
|
- `GET /api/downloads` - Get download queue status
|
||||||
|
- `DELETE /api/downloads/{id}` - Remove from queue
|
||||||
|
- `POST /api/downloads/priority` - Change download priority
|
||||||
|
|
||||||
|
### Search
|
||||||
|
- `GET /api/search?q={query}` - Search for anime
|
||||||
|
- `POST /api/search/add` - Add anime to library
|
||||||
|
|
||||||
|
## Logging
|
||||||
|
|
||||||
|
### Log Levels
|
||||||
|
- **INFO**: General application information
|
||||||
|
- **WARNING**: Potential issues that don't stop execution
|
||||||
|
- **ERROR**: Errors that affect functionality
|
||||||
|
- **DEBUG**: Detailed debugging information (development only)
|
||||||
|
|
||||||
|
### Log Files
|
||||||
|
- `app.log`: General application logs
|
||||||
|
- `download.log`: Download-specific operations
|
||||||
|
- `error.log`: Error and exception logs
|
||||||
|
|
||||||
|
## Security Considerations
|
||||||
|
|
||||||
|
- Master password protection for application access
|
||||||
|
- Secure session management with JWT tokens
|
||||||
|
- Input validation and sanitization
|
||||||
|
- Rate limiting on API endpoints
|
||||||
|
- HTTPS enforcement in production
|
||||||
|
- Secure file path handling to prevent directory traversal
|
||||||
405
instructions.md
Normal file
405
instructions.md
Normal file
@ -0,0 +1,405 @@
|
|||||||
|
# Aniworld Web Application Development Instructions
|
||||||
|
|
||||||
|
This document provides detailed tasks for AI agents to implement a modern web application for the Aniworld anime download manager. All tasks should follow the coding guidelines specified in the project's copilot instructions.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
The goal is to create a FastAPI-based web application that provides a modern interface for the existing Aniworld anime download functionality. The core anime logic should remain in `SeriesApp.py` while the web layer provides REST API endpoints and a responsive UI.
|
||||||
|
|
||||||
|
## Architecture Principles
|
||||||
|
|
||||||
|
- **Single Responsibility**: Each file/class has one clear purpose
|
||||||
|
- **Dependency Injection**: Use FastAPI's dependency system
|
||||||
|
- **Clean Separation**: Web layer calls core logic, never the reverse
|
||||||
|
- **File Size Limit**: Maximum 500 lines per file
|
||||||
|
- **Type Hints**: Use comprehensive type annotations
|
||||||
|
- **Error Handling**: Proper exception handling and logging
|
||||||
|
|
||||||
|
## How you work
|
||||||
|
|
||||||
|
1. Task the next task
|
||||||
|
2. Process the task
|
||||||
|
3. Make Tests.
|
||||||
|
4. Remove task from instructions.md
|
||||||
|
5. Commit in git
|
||||||
|
6. goto 1.
|
||||||
|
|
||||||
|
## Implementation Order
|
||||||
|
|
||||||
|
The tasks should be completed in the following order to ensure proper dependencies and logical progression:
|
||||||
|
|
||||||
|
1. **Project Structure Setup** - Foundation and dependencies
|
||||||
|
2. **Authentication System** - Security layer implementation
|
||||||
|
3. **Configuration Management** - Settings and config handling
|
||||||
|
4. **Anime Management Integration** - Core functionality wrapper
|
||||||
|
5. **Download Queue Management** - Queue handling and persistence
|
||||||
|
6. **WebSocket Real-time Updates** - Real-time communication
|
||||||
|
7. **Frontend Integration** - Integrate existing frontend assets
|
||||||
|
8. **Core Logic Integration** - Enhance existing core functionality
|
||||||
|
9. **Database Layer** - Data persistence and management
|
||||||
|
10. **Testing** - Comprehensive test coverage
|
||||||
|
11. **Deployment and Configuration** - Production setup
|
||||||
|
12. **Documentation and Error Handling** - Final documentation and error handling
|
||||||
|
|
||||||
|
## Core Tasks
|
||||||
|
|
||||||
|
### 1. Project Structure Setup
|
||||||
|
|
||||||
|
#### [] Create main FastAPI application structure
|
||||||
|
|
||||||
|
- Create `src/server/main.py`
|
||||||
|
- Configure FastAPI app with CORS, middleware
|
||||||
|
- Set up static file serving for existing frontend assets
|
||||||
|
- Configure Jinja2 templates
|
||||||
|
- Add health check endpoint
|
||||||
|
|
||||||
|
#### [] Set up dependency injection system
|
||||||
|
|
||||||
|
- Create `src/server/utils/dependencies.py`
|
||||||
|
- Implement SeriesApp dependency injection
|
||||||
|
- Add database session dependency
|
||||||
|
- Create authentication dependency
|
||||||
|
|
||||||
|
#### [] Configure logging system
|
||||||
|
|
||||||
|
- Create `src/server/utils/logging.py`
|
||||||
|
- Set up structured logging with multiple handlers
|
||||||
|
- Configure log rotation and cleanup
|
||||||
|
- Add request/response logging middleware
|
||||||
|
|
||||||
|
### 2. Authentication System
|
||||||
|
|
||||||
|
#### [] Implement authentication models
|
||||||
|
|
||||||
|
- Create `src/server/models/auth.py`
|
||||||
|
- Define LoginRequest, LoginResponse models
|
||||||
|
- Add SetupRequest, AuthStatus models
|
||||||
|
- Include session management models
|
||||||
|
|
||||||
|
#### [] Create authentication service
|
||||||
|
|
||||||
|
- Create `src/server/services/auth_service.py`
|
||||||
|
- Implement master password setup/validation
|
||||||
|
- Add session management with JWT tokens
|
||||||
|
- Include failed attempt tracking and lockout
|
||||||
|
- Add password strength validation
|
||||||
|
|
||||||
|
#### [] Implement authentication API endpoints
|
||||||
|
|
||||||
|
- Create `src/server/api/auth.py`
|
||||||
|
- Add POST `/api/auth/setup` - initial setup
|
||||||
|
- Add POST `/api/auth/login` - login endpoint
|
||||||
|
- Add POST `/api/auth/logout` - logout endpoint
|
||||||
|
- Add GET `/api/auth/status` - authentication status
|
||||||
|
|
||||||
|
#### [] Create authentication middleware
|
||||||
|
|
||||||
|
- Create `src/server/middleware/auth.py`
|
||||||
|
- Implement JWT token validation
|
||||||
|
- Add request authentication checking
|
||||||
|
- Include rate limiting for auth endpoints
|
||||||
|
|
||||||
|
### 3. Configuration Management
|
||||||
|
|
||||||
|
#### [] Implement configuration models
|
||||||
|
|
||||||
|
- Create `src/server/models/config.py`
|
||||||
|
- Define ConfigResponse, ConfigUpdate models
|
||||||
|
- Add SchedulerConfig, LoggingConfig models
|
||||||
|
- Include ValidationResult model
|
||||||
|
|
||||||
|
#### [] Create configuration service
|
||||||
|
|
||||||
|
- Create `src/server/services/config_service.py`
|
||||||
|
- Implement configuration loading/saving
|
||||||
|
- Add configuration validation
|
||||||
|
- Include backup/restore functionality
|
||||||
|
- Add scheduler configuration management
|
||||||
|
|
||||||
|
#### [] Implement configuration API endpoints
|
||||||
|
|
||||||
|
- Create `src/server/api/config.py`
|
||||||
|
- Add GET `/api/config` - get configuration
|
||||||
|
- Add PUT `/api/config` - update configuration
|
||||||
|
- Add POST `/api/config/validate` - validate config
|
||||||
|
- Add GET/POST `/api/config/backup` - backup management
|
||||||
|
|
||||||
|
### 4. Anime Management Integration
|
||||||
|
|
||||||
|
#### [] Implement anime models
|
||||||
|
|
||||||
|
- Create `src/server/models/anime.py`
|
||||||
|
- Define AnimeSeriesResponse, EpisodeInfo models
|
||||||
|
- Add SearchRequest, SearchResult models
|
||||||
|
- Include MissingEpisodeInfo model
|
||||||
|
|
||||||
|
#### [] Create anime service wrapper
|
||||||
|
|
||||||
|
- Create `src/server/services/anime_service.py`
|
||||||
|
- Wrap SeriesApp functionality for web layer
|
||||||
|
- Implement async wrappers for blocking operations
|
||||||
|
- Add caching for frequently accessed data
|
||||||
|
- Include error handling and logging
|
||||||
|
|
||||||
|
#### [] Implement anime API endpoints
|
||||||
|
|
||||||
|
- Create `src/server/api/anime.py`
|
||||||
|
- Add GET `/api/v1/anime` - list series with missing episodes
|
||||||
|
- Add POST `/api/v1/anime/rescan` - trigger rescan
|
||||||
|
- Add POST `/api/v1/anime/search` - search for new anime
|
||||||
|
- Add GET `/api/v1/anime/{id}` - get series details
|
||||||
|
|
||||||
|
### 5. Download Queue Management
|
||||||
|
|
||||||
|
#### [] Implement download queue models
|
||||||
|
|
||||||
|
- Create `src/server/models/download.py`
|
||||||
|
- Define DownloadItem, QueueStatus models
|
||||||
|
- Add DownloadProgress, QueueStats models
|
||||||
|
- Include DownloadRequest model
|
||||||
|
|
||||||
|
#### [] Create download queue service
|
||||||
|
|
||||||
|
- Create `src/server/services/download_service.py`
|
||||||
|
- Implement queue management (add, remove, reorder)
|
||||||
|
- Add download progress tracking
|
||||||
|
- Include queue persistence and recovery
|
||||||
|
- Add concurrent download management
|
||||||
|
|
||||||
|
#### [] Implement download API endpoints
|
||||||
|
|
||||||
|
- Create `src/server/api/download.py`
|
||||||
|
- Add GET `/api/queue/status` - get queue status
|
||||||
|
- Add POST `/api/queue/add` - add to queue
|
||||||
|
- Add DELETE `/api/queue/{id}` - remove from queue
|
||||||
|
- Add POST `/api/queue/start` - start downloads
|
||||||
|
- Add POST `/api/queue/stop` - stop downloads
|
||||||
|
|
||||||
|
### 6. WebSocket Real-time Updates
|
||||||
|
|
||||||
|
#### [] Implement WebSocket manager
|
||||||
|
|
||||||
|
- Create `src/server/services/websocket_service.py`
|
||||||
|
- Add connection management
|
||||||
|
- Implement broadcast functionality
|
||||||
|
- Include room-based messaging
|
||||||
|
- Add connection cleanup
|
||||||
|
|
||||||
|
#### [] Add real-time progress updates
|
||||||
|
|
||||||
|
- Create `src/server/services/progress_service.py`
|
||||||
|
- Implement download progress broadcasting
|
||||||
|
- Add scan progress updates
|
||||||
|
- Include queue status changes
|
||||||
|
- Add error notifications
|
||||||
|
|
||||||
|
#### [] Integrate WebSocket with core services
|
||||||
|
|
||||||
|
- Update download service to emit progress
|
||||||
|
- Add scan progress notifications
|
||||||
|
- Include queue change broadcasts
|
||||||
|
- Add error/completion notifications
|
||||||
|
|
||||||
|
### 7. Frontend Integration
|
||||||
|
|
||||||
|
#### [] Integrate existing HTML templates
|
||||||
|
|
||||||
|
- Review and integrate existing HTML templates in `src/server/web/templates/`
|
||||||
|
- Ensure templates work with FastAPI Jinja2 setup
|
||||||
|
- Update template paths and static file references if needed
|
||||||
|
- Maintain existing responsive layout and theme switching
|
||||||
|
|
||||||
|
#### [] Integrate existing JavaScript functionality
|
||||||
|
|
||||||
|
- Review existing JavaScript files in `src/server/web/static/js/`
|
||||||
|
- Update API endpoint URLs to match FastAPI routes
|
||||||
|
- Ensure WebSocket connections work with new backend
|
||||||
|
- Maintain existing functionality for app.js and queue.js
|
||||||
|
|
||||||
|
#### [] Integrate existing CSS styling
|
||||||
|
|
||||||
|
- Review and integrate existing CSS files in `src/server/web/static/css/`
|
||||||
|
- Ensure styling works with FastAPI static file serving
|
||||||
|
- Maintain existing responsive design and theme support
|
||||||
|
- Update any hardcoded paths if necessary
|
||||||
|
|
||||||
|
#### [] Update frontend-backend integration
|
||||||
|
|
||||||
|
- Ensure existing JavaScript calls match new API endpoints
|
||||||
|
- Update authentication flow to work with new auth system
|
||||||
|
- Verify WebSocket events match new service implementations
|
||||||
|
- Test all existing UI functionality with new backend
|
||||||
|
|
||||||
|
### 8. Core Logic Integration
|
||||||
|
|
||||||
|
#### [] Enhance SeriesApp for web integration
|
||||||
|
|
||||||
|
- Update `src/core/SeriesApp.py`
|
||||||
|
- Add async callback support
|
||||||
|
- Implement progress reporting
|
||||||
|
- Include better error handling
|
||||||
|
- Add cancellation support
|
||||||
|
|
||||||
|
#### [] Create progress callback system
|
||||||
|
|
||||||
|
- Add progress callback interface
|
||||||
|
- Implement scan progress reporting
|
||||||
|
- Add download progress tracking
|
||||||
|
- Include error/completion callbacks
|
||||||
|
|
||||||
|
#### [] Add configuration persistence
|
||||||
|
|
||||||
|
- Implement configuration file management
|
||||||
|
- Add settings validation
|
||||||
|
- Include backup/restore functionality
|
||||||
|
- Add migration support for config updates
|
||||||
|
|
||||||
|
### 9. Database Layer
|
||||||
|
|
||||||
|
#### [] Implement database models
|
||||||
|
|
||||||
|
- Create `src/server/database/models.py`
|
||||||
|
- Add SQLAlchemy models for anime series
|
||||||
|
- Implement download queue persistence
|
||||||
|
- Include user session storage
|
||||||
|
|
||||||
|
#### [] Create database service
|
||||||
|
|
||||||
|
- Create `src/server/database/service.py`
|
||||||
|
- Add CRUD operations for anime data
|
||||||
|
- Implement queue persistence
|
||||||
|
- Include database migration support
|
||||||
|
|
||||||
|
#### [] Add database initialization
|
||||||
|
|
||||||
|
- Create `src/server/database/init.py`
|
||||||
|
- Implement database setup
|
||||||
|
- Add initial data migration
|
||||||
|
- Include schema validation
|
||||||
|
|
||||||
|
### 10. Testing
|
||||||
|
|
||||||
|
#### [] Create unit tests for services
|
||||||
|
|
||||||
|
- Create `tests/unit/test_auth_service.py`
|
||||||
|
- Create `tests/unit/test_anime_service.py`
|
||||||
|
- Create `tests/unit/test_download_service.py`
|
||||||
|
- Create `tests/unit/test_config_service.py`
|
||||||
|
|
||||||
|
#### [] Create API endpoint tests
|
||||||
|
|
||||||
|
- Create `tests/api/test_auth_endpoints.py`
|
||||||
|
- Create `tests/api/test_anime_endpoints.py`
|
||||||
|
- Create `tests/api/test_download_endpoints.py`
|
||||||
|
- Create `tests/api/test_config_endpoints.py`
|
||||||
|
|
||||||
|
#### [] Create integration tests
|
||||||
|
|
||||||
|
- Create `tests/integration/test_download_flow.py`
|
||||||
|
- Create `tests/integration/test_auth_flow.py`
|
||||||
|
- Create `tests/integration/test_websocket.py`
|
||||||
|
|
||||||
|
#### [] Create frontend integration tests
|
||||||
|
|
||||||
|
- Create `tests/frontend/test_existing_ui_integration.py`
|
||||||
|
- Test existing JavaScript functionality with new backend
|
||||||
|
- Verify WebSocket connections and real-time updates
|
||||||
|
- Test authentication flow with existing frontend
|
||||||
|
|
||||||
|
### 11. Deployment and Configuration
|
||||||
|
|
||||||
|
#### [] Create Docker configuration
|
||||||
|
|
||||||
|
- Create `Dockerfile`
|
||||||
|
- Create `docker-compose.yml`
|
||||||
|
- Add environment configuration
|
||||||
|
- Include volume mappings for existing web assets
|
||||||
|
|
||||||
|
#### [] Create production configuration
|
||||||
|
|
||||||
|
- Create `src/server/config/production.py`
|
||||||
|
- Add environment variable handling
|
||||||
|
- Include security settings
|
||||||
|
- Add performance optimizations
|
||||||
|
|
||||||
|
#### [] Create startup scripts
|
||||||
|
|
||||||
|
- Create `scripts/start.sh`
|
||||||
|
- Create `scripts/setup.py`
|
||||||
|
- Add dependency installation
|
||||||
|
- Include database initialization
|
||||||
|
|
||||||
|
### 12. Documentation and Error Handling
|
||||||
|
|
||||||
|
#### [] Create API documentation
|
||||||
|
|
||||||
|
- Add OpenAPI/Swagger documentation
|
||||||
|
- Include endpoint descriptions
|
||||||
|
- Add request/response examples
|
||||||
|
- Include authentication details
|
||||||
|
|
||||||
|
#### [] Implement comprehensive error handling
|
||||||
|
|
||||||
|
- Create custom exception classes
|
||||||
|
- Add error logging and tracking
|
||||||
|
- Implement user-friendly error messages
|
||||||
|
- Include error recovery mechanisms
|
||||||
|
|
||||||
|
#### [] Create user documentation
|
||||||
|
|
||||||
|
- Create `docs/user_guide.md`
|
||||||
|
- Add installation instructions
|
||||||
|
- Include configuration guide
|
||||||
|
- Add troubleshooting section
|
||||||
|
|
||||||
|
## File Size Guidelines
|
||||||
|
|
||||||
|
- **Models**: Max 200 lines each
|
||||||
|
- **Services**: Max 450 lines each
|
||||||
|
- **API Endpoints**: Max 350 lines each
|
||||||
|
- **Templates**: Max 400 lines each
|
||||||
|
- **JavaScript**: Max 500 lines each
|
||||||
|
- **CSS**: Max 500 lines each
|
||||||
|
- **Tests**: Max 400 lines each
|
||||||
|
|
||||||
|
## Existing Frontend Assets
|
||||||
|
|
||||||
|
The following frontend assets already exist and should be integrated:
|
||||||
|
|
||||||
|
- **Templates**: Located in `src/server/web/templates/`
|
||||||
|
- **JavaScript**: Located in `src/server/web/static/js/` (app.js, queue.js, etc.)
|
||||||
|
- **CSS**: Located in `src/server/web/static/css/`
|
||||||
|
- **Static Assets**: Images and other assets in `src/server/web/static/`
|
||||||
|
|
||||||
|
When working with these files:
|
||||||
|
|
||||||
|
- Review existing functionality before making changes
|
||||||
|
- Maintain existing UI/UX patterns and design
|
||||||
|
- Update API calls to match new FastAPI endpoints
|
||||||
|
- Preserve existing WebSocket event handling
|
||||||
|
- Keep existing theme and responsive design features
|
||||||
|
|
||||||
|
## Quality Assurance
|
||||||
|
|
||||||
|
#### [] Code quality checks
|
||||||
|
|
||||||
|
- Run linting with flake8/pylint
|
||||||
|
- Check type hints with mypy
|
||||||
|
- Validate formatting with black
|
||||||
|
- Run security checks with bandit
|
||||||
|
|
||||||
|
#### [] Performance testing
|
||||||
|
|
||||||
|
- Load test API endpoints
|
||||||
|
- Test WebSocket connection limits
|
||||||
|
- Validate download performance
|
||||||
|
- Check memory usage patterns
|
||||||
|
|
||||||
|
#### [] Security validation
|
||||||
|
|
||||||
|
- Test authentication bypass attempts
|
||||||
|
- Validate input sanitization
|
||||||
|
- Check for injection vulnerabilities
|
||||||
|
- Test session management security
|
||||||
|
|
||||||
|
Each task should be implemented with proper error handling, logging, and type hints according to the project's coding standards.
|
||||||
9915
logs/aniworld.log
9915
logs/aniworld.log
File diff suppressed because it is too large
Load Diff
18
pytest.ini
18
pytest.ini
@ -1,18 +0,0 @@
|
|||||||
[tool:pytest]
|
|
||||||
testpaths = src/tests
|
|
||||||
python_files = test_*.py
|
|
||||||
python_classes = Test*
|
|
||||||
python_functions = test_*
|
|
||||||
addopts =
|
|
||||||
-v
|
|
||||||
--tb=short
|
|
||||||
--strict-markers
|
|
||||||
--disable-warnings
|
|
||||||
markers =
|
|
||||||
unit: Unit tests
|
|
||||||
integration: Integration tests
|
|
||||||
e2e: End-to-end tests
|
|
||||||
slow: Slow running tests
|
|
||||||
filterwarnings =
|
|
||||||
ignore::DeprecationWarning
|
|
||||||
ignore::PendingDeprecationWarning
|
|
||||||
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
30
src/config/settings.py
Normal file
30
src/config/settings.py
Normal file
@ -0,0 +1,30 @@
|
|||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings from environment variables."""
|
||||||
|
jwt_secret_key: str = Field(default="your-secret-key-here", env="JWT_SECRET_KEY")
|
||||||
|
password_salt: str = Field(default="default-salt", env="PASSWORD_SALT")
|
||||||
|
master_password_hash: Optional[str] = Field(default=None, env="MASTER_PASSWORD_HASH")
|
||||||
|
master_password: Optional[str] = Field(default=None, env="MASTER_PASSWORD") # For development
|
||||||
|
token_expiry_hours: int = Field(default=24, env="SESSION_TIMEOUT_HOURS")
|
||||||
|
anime_directory: str = Field(default="", env="ANIME_DIRECTORY")
|
||||||
|
log_level: str = Field(default="INFO", env="LOG_LEVEL")
|
||||||
|
|
||||||
|
# Additional settings from .env
|
||||||
|
database_url: str = Field(default="sqlite:///./data/aniworld.db", env="DATABASE_URL")
|
||||||
|
cors_origins: str = Field(default="*", env="CORS_ORIGINS")
|
||||||
|
api_rate_limit: int = Field(default=100, env="API_RATE_LIMIT")
|
||||||
|
default_provider: str = Field(default="aniworld.to", env="DEFAULT_PROVIDER")
|
||||||
|
provider_timeout: int = Field(default=30, env="PROVIDER_TIMEOUT")
|
||||||
|
retry_attempts: int = Field(default=3, env="RETRY_ATTEMPTS")
|
||||||
|
|
||||||
|
class Config:
|
||||||
|
env_file = ".env"
|
||||||
|
extra = "ignore"
|
||||||
|
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
@ -1,10 +0,0 @@
|
|||||||
"""
|
|
||||||
Configuration package for the Aniworld server.
|
|
||||||
|
|
||||||
This package provides configuration management and environment
|
|
||||||
variable handling for secure application deployment.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from .env_config import EnvironmentConfig, env_config
|
|
||||||
|
|
||||||
__all__ = ['EnvironmentConfig', 'env_config']
|
|
||||||
@ -1,217 +0,0 @@
|
|||||||
"""
|
|
||||||
Environment configuration for secure handling of sensitive data.
|
|
||||||
|
|
||||||
This module provides secure environment variable handling and configuration
|
|
||||||
management for the Aniworld server application.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import secrets
|
|
||||||
from typing import Optional, Dict, Any
|
|
||||||
from dotenv import load_dotenv
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Load environment variables from .env file
|
|
||||||
load_dotenv()
|
|
||||||
|
|
||||||
|
|
||||||
class EnvironmentConfig:
|
|
||||||
"""Manages environment variables and secure configuration."""
|
|
||||||
|
|
||||||
# Security
|
|
||||||
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
|
|
||||||
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
|
|
||||||
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
|
|
||||||
|
|
||||||
# Database
|
|
||||||
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
|
|
||||||
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
|
|
||||||
|
|
||||||
# Redis (for caching and sessions)
|
|
||||||
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
|
||||||
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
|
|
||||||
|
|
||||||
# API Keys and External Services
|
|
||||||
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
|
|
||||||
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
|
|
||||||
|
|
||||||
# Email Configuration (for password reset)
|
|
||||||
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
|
|
||||||
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
|
|
||||||
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
|
|
||||||
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
|
|
||||||
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
|
|
||||||
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
|
|
||||||
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
|
|
||||||
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
|
|
||||||
|
|
||||||
# Rate Limiting
|
|
||||||
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
|
|
||||||
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
|
|
||||||
|
|
||||||
# Application Settings
|
|
||||||
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
|
|
||||||
HOST: str = os.getenv('HOST', '127.0.0.1')
|
|
||||||
PORT: int = int(os.getenv('PORT', '5000'))
|
|
||||||
|
|
||||||
# Anime Directory and Download Settings
|
|
||||||
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
|
|
||||||
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
|
|
||||||
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
|
|
||||||
LOG_FILE: str = os.getenv('LOG_FILE', './logs/aniworld.log')
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_database_config(cls) -> Dict[str, Any]:
|
|
||||||
"""Get database configuration."""
|
|
||||||
return {
|
|
||||||
'url': cls.DATABASE_URL,
|
|
||||||
'password': cls.DATABASE_PASSWORD,
|
|
||||||
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
|
|
||||||
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
|
|
||||||
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
|
|
||||||
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_redis_config(cls) -> Dict[str, Any]:
|
|
||||||
"""Get Redis configuration."""
|
|
||||||
return {
|
|
||||||
'url': cls.REDIS_URL,
|
|
||||||
'password': cls.REDIS_PASSWORD,
|
|
||||||
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
|
|
||||||
'retry_on_timeout': True,
|
|
||||||
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_email_config(cls) -> Dict[str, Any]:
|
|
||||||
"""Get email configuration."""
|
|
||||||
return {
|
|
||||||
'server': cls.SMTP_SERVER,
|
|
||||||
'port': cls.SMTP_PORT,
|
|
||||||
'username': cls.SMTP_USERNAME,
|
|
||||||
'password': cls.SMTP_PASSWORD,
|
|
||||||
'use_tls': cls.SMTP_USE_TLS,
|
|
||||||
'from_email': cls.FROM_EMAIL
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def get_security_config(cls) -> Dict[str, Any]:
|
|
||||||
"""Get security configuration."""
|
|
||||||
return {
|
|
||||||
'secret_key': cls.SECRET_KEY,
|
|
||||||
'jwt_secret_key': cls.JWT_SECRET_KEY,
|
|
||||||
'password_salt': cls.PASSWORD_SALT,
|
|
||||||
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
|
|
||||||
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
|
|
||||||
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
|
|
||||||
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
|
|
||||||
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
|
|
||||||
}
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def validate_config(cls) -> bool:
|
|
||||||
"""Validate that required configuration is present."""
|
|
||||||
required_vars = [
|
|
||||||
'SECRET_KEY',
|
|
||||||
'JWT_SECRET_KEY',
|
|
||||||
'PASSWORD_SALT'
|
|
||||||
]
|
|
||||||
|
|
||||||
missing_vars = []
|
|
||||||
for var in required_vars:
|
|
||||||
if not getattr(cls, var):
|
|
||||||
missing_vars.append(var)
|
|
||||||
|
|
||||||
if missing_vars:
|
|
||||||
logger.error(f"Missing required environment variables: {missing_vars}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
return True
|
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
|
|
||||||
"""Generate a template .env file with all available configuration options."""
|
|
||||||
try:
|
|
||||||
template_content = """# Aniworld Server Environment Configuration
|
|
||||||
# Copy this file to .env and fill in your values
|
|
||||||
|
|
||||||
# Security (REQUIRED - Generate secure random values)
|
|
||||||
SECRET_KEY=your_secret_key_here
|
|
||||||
JWT_SECRET_KEY=your_jwt_secret_here
|
|
||||||
PASSWORD_SALT=your_password_salt_here
|
|
||||||
|
|
||||||
# Database Configuration
|
|
||||||
DATABASE_URL=sqlite:///data/aniworld.db
|
|
||||||
# DATABASE_PASSWORD=your_db_password_here
|
|
||||||
DATABASE_POOL_SIZE=10
|
|
||||||
DATABASE_MAX_OVERFLOW=20
|
|
||||||
DATABASE_POOL_TIMEOUT=30
|
|
||||||
DATABASE_POOL_RECYCLE=3600
|
|
||||||
|
|
||||||
# Redis Configuration (for caching and sessions)
|
|
||||||
REDIS_URL=redis://localhost:6379/0
|
|
||||||
# REDIS_PASSWORD=your_redis_password_here
|
|
||||||
REDIS_MAX_CONNECTIONS=10
|
|
||||||
REDIS_SOCKET_TIMEOUT=5
|
|
||||||
|
|
||||||
# Email Configuration (for password reset emails)
|
|
||||||
SMTP_SERVER=localhost
|
|
||||||
SMTP_PORT=587
|
|
||||||
# SMTP_USERNAME=your_smtp_username
|
|
||||||
# SMTP_PASSWORD=your_smtp_password
|
|
||||||
SMTP_USE_TLS=true
|
|
||||||
FROM_EMAIL=noreply@aniworld.local
|
|
||||||
|
|
||||||
# External API Keys
|
|
||||||
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
|
|
||||||
# TMDB_API_KEY=your_tmdb_api_key
|
|
||||||
|
|
||||||
# Security Settings
|
|
||||||
SESSION_TIMEOUT_HOURS=24
|
|
||||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
|
||||||
LOCKOUT_DURATION_MINUTES=30
|
|
||||||
|
|
||||||
# Rate Limiting
|
|
||||||
RATE_LIMIT_PER_MINUTE=60
|
|
||||||
API_RATE_LIMIT_PER_MINUTE=100
|
|
||||||
|
|
||||||
# Application Settings
|
|
||||||
DEBUG=false
|
|
||||||
HOST=127.0.0.1
|
|
||||||
PORT=5000
|
|
||||||
|
|
||||||
# Anime and Download Settings
|
|
||||||
ANIME_DIRECTORY=./downloads
|
|
||||||
MAX_CONCURRENT_DOWNLOADS=3
|
|
||||||
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
LOG_LEVEL=INFO
|
|
||||||
LOG_FILE=./logs/aniworld.log
|
|
||||||
"""
|
|
||||||
|
|
||||||
with open(file_path, 'w', encoding='utf-8') as f:
|
|
||||||
f.write(template_content)
|
|
||||||
|
|
||||||
logger.info(f"Environment template created at {file_path}")
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating environment template: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
|
|
||||||
# Create global instance
|
|
||||||
env_config = EnvironmentConfig()
|
|
||||||
|
|
||||||
# Validate configuration on import
|
|
||||||
if not env_config.validate_config():
|
|
||||||
logger.warning("Invalid environment configuration detected. Please check your .env file.")
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
"""
|
|
||||||
Infrastructure package for the Aniworld server.
|
|
||||||
|
|
||||||
This package contains repository implementations, database connections,
|
|
||||||
caching, and other infrastructure concerns.
|
|
||||||
"""
|
|
||||||
@ -1,916 +0,0 @@
|
|||||||
"""
|
|
||||||
Database & Storage Management for AniWorld App
|
|
||||||
|
|
||||||
This module provides database schema management, data migration,
|
|
||||||
backup/restore functionality, and storage optimization.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sqlite3
|
|
||||||
import json
|
|
||||||
import shutil
|
|
||||||
import time
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import threading
|
|
||||||
import zipfile
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, List, Optional, Any, Tuple
|
|
||||||
from dataclasses import dataclass, field
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import glob
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class AnimeMetadata:
|
|
||||||
"""Represents anime metadata stored in database."""
|
|
||||||
anime_id: str
|
|
||||||
name: str
|
|
||||||
folder: str
|
|
||||||
key: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
genres: List[str] = field(default_factory=list)
|
|
||||||
release_year: Optional[int] = None
|
|
||||||
status: str = 'ongoing' # ongoing, completed, cancelled
|
|
||||||
total_episodes: Optional[int] = None
|
|
||||||
poster_url: Optional[str] = None
|
|
||||||
last_updated: datetime = field(default_factory=datetime.now)
|
|
||||||
created_at: datetime = field(default_factory=datetime.now)
|
|
||||||
custom_metadata: Dict[str, Any] = field(default_factory=dict)
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class EpisodeMetadata:
|
|
||||||
"""Represents episode metadata stored in database."""
|
|
||||||
episode_id: str
|
|
||||||
anime_id: str
|
|
||||||
season: int
|
|
||||||
episode: int
|
|
||||||
title: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
duration_seconds: Optional[int] = None
|
|
||||||
file_path: Optional[str] = None
|
|
||||||
file_size_bytes: Optional[int] = None
|
|
||||||
download_date: Optional[datetime] = None
|
|
||||||
last_watched: Optional[datetime] = None
|
|
||||||
watch_count: int = 0
|
|
||||||
is_downloaded: bool = False
|
|
||||||
quality: Optional[str] = None
|
|
||||||
language: str = 'German Dub'
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class BackupInfo:
|
|
||||||
"""Represents backup metadata."""
|
|
||||||
backup_id: str
|
|
||||||
backup_path: str
|
|
||||||
backup_type: str # full, incremental, metadata_only
|
|
||||||
created_at: datetime
|
|
||||||
size_bytes: int
|
|
||||||
description: Optional[str] = None
|
|
||||||
tables_included: List[str] = field(default_factory=list)
|
|
||||||
checksum: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class DatabaseManager:
|
|
||||||
"""Manage SQLite database with migrations and maintenance."""
|
|
||||||
|
|
||||||
def __init__(self, db_path: str = "./data/aniworld.db"):
|
|
||||||
self.db_path = db_path
|
|
||||||
self.db_dir = os.path.dirname(db_path)
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
|
|
||||||
# Create database directory
|
|
||||||
os.makedirs(self.db_dir, exist_ok=True)
|
|
||||||
|
|
||||||
# Initialize database
|
|
||||||
self.initialize_database()
|
|
||||||
|
|
||||||
# Run migrations
|
|
||||||
self.run_migrations()
|
|
||||||
|
|
||||||
@contextmanager
|
|
||||||
def get_connection(self):
|
|
||||||
"""Get database connection with proper error handling."""
|
|
||||||
conn = None
|
|
||||||
try:
|
|
||||||
conn = sqlite3.connect(self.db_path, timeout=30)
|
|
||||||
conn.row_factory = sqlite3.Row # Enable dict-like access
|
|
||||||
yield conn
|
|
||||||
except Exception as e:
|
|
||||||
if conn:
|
|
||||||
conn.rollback()
|
|
||||||
self.logger.error(f"Database connection error: {e}")
|
|
||||||
raise
|
|
||||||
finally:
|
|
||||||
if conn:
|
|
||||||
conn.close()
|
|
||||||
|
|
||||||
def initialize_database(self):
|
|
||||||
"""Initialize database with base schema."""
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
# Create schema version table
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE IF NOT EXISTS schema_version (
|
|
||||||
version INTEGER PRIMARY KEY,
|
|
||||||
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
description TEXT
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
# Insert initial version if not exists
|
|
||||||
conn.execute("""
|
|
||||||
INSERT OR IGNORE INTO schema_version (version, description)
|
|
||||||
VALUES (0, 'Initial schema')
|
|
||||||
""")
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
|
|
||||||
def get_current_version(self) -> int:
|
|
||||||
"""Get current database schema version."""
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.execute("SELECT MAX(version) FROM schema_version")
|
|
||||||
result = cursor.fetchone()
|
|
||||||
return result[0] if result and result[0] is not None else 0
|
|
||||||
|
|
||||||
def run_migrations(self):
|
|
||||||
"""Run database migrations."""
|
|
||||||
current_version = self.get_current_version()
|
|
||||||
migrations = self.get_migrations()
|
|
||||||
|
|
||||||
for version, migration in migrations.items():
|
|
||||||
if version > current_version:
|
|
||||||
self.logger.info(f"Running migration to version {version}")
|
|
||||||
try:
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
migration['up'](conn)
|
|
||||||
|
|
||||||
# Record migration
|
|
||||||
conn.execute("""
|
|
||||||
INSERT INTO schema_version (version, description)
|
|
||||||
VALUES (?, ?)
|
|
||||||
""", (version, migration['description']))
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
self.logger.info(f"Migration to version {version} completed")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Migration to version {version} failed: {e}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def get_migrations(self) -> Dict[int, Dict[str, Any]]:
|
|
||||||
"""Define database migrations."""
|
|
||||||
return {
|
|
||||||
1: {
|
|
||||||
'description': 'Create anime metadata table',
|
|
||||||
'up': self._migration_001_anime_table
|
|
||||||
},
|
|
||||||
2: {
|
|
||||||
'description': 'Create episode metadata table',
|
|
||||||
'up': self._migration_002_episode_table
|
|
||||||
},
|
|
||||||
3: {
|
|
||||||
'description': 'Create download history table',
|
|
||||||
'up': self._migration_003_download_history
|
|
||||||
},
|
|
||||||
4: {
|
|
||||||
'description': 'Create user preferences table',
|
|
||||||
'up': self._migration_004_user_preferences
|
|
||||||
},
|
|
||||||
5: {
|
|
||||||
'description': 'Create storage locations table',
|
|
||||||
'up': self._migration_005_storage_locations
|
|
||||||
},
|
|
||||||
6: {
|
|
||||||
'description': 'Add indexes for performance',
|
|
||||||
'up': self._migration_006_indexes
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def _migration_001_anime_table(self, conn: sqlite3.Connection):
|
|
||||||
"""Create anime metadata table."""
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE anime_metadata (
|
|
||||||
anime_id TEXT PRIMARY KEY,
|
|
||||||
name TEXT NOT NULL,
|
|
||||||
folder TEXT NOT NULL UNIQUE,
|
|
||||||
key TEXT,
|
|
||||||
description TEXT,
|
|
||||||
genres TEXT, -- JSON array
|
|
||||||
release_year INTEGER,
|
|
||||||
status TEXT DEFAULT 'ongoing',
|
|
||||||
total_episodes INTEGER,
|
|
||||||
poster_url TEXT,
|
|
||||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
custom_metadata TEXT -- JSON object
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
def _migration_002_episode_table(self, conn: sqlite3.Connection):
|
|
||||||
"""Create episode metadata table."""
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE episode_metadata (
|
|
||||||
episode_id TEXT PRIMARY KEY,
|
|
||||||
anime_id TEXT NOT NULL,
|
|
||||||
season INTEGER NOT NULL,
|
|
||||||
episode INTEGER NOT NULL,
|
|
||||||
title TEXT,
|
|
||||||
description TEXT,
|
|
||||||
duration_seconds INTEGER,
|
|
||||||
file_path TEXT,
|
|
||||||
file_size_bytes INTEGER,
|
|
||||||
download_date TIMESTAMP,
|
|
||||||
last_watched TIMESTAMP,
|
|
||||||
watch_count INTEGER DEFAULT 0,
|
|
||||||
is_downloaded BOOLEAN DEFAULT FALSE,
|
|
||||||
quality TEXT,
|
|
||||||
language TEXT DEFAULT 'German Dub',
|
|
||||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id),
|
|
||||||
UNIQUE(anime_id, season, episode, language)
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
def _migration_003_download_history(self, conn: sqlite3.Connection):
|
|
||||||
"""Create download history table."""
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE download_history (
|
|
||||||
download_id TEXT PRIMARY KEY,
|
|
||||||
anime_id TEXT NOT NULL,
|
|
||||||
season INTEGER NOT NULL,
|
|
||||||
episode INTEGER NOT NULL,
|
|
||||||
language TEXT NOT NULL,
|
|
||||||
download_started TIMESTAMP NOT NULL,
|
|
||||||
download_completed TIMESTAMP,
|
|
||||||
download_status TEXT NOT NULL, -- started, completed, failed, cancelled
|
|
||||||
file_size_bytes INTEGER,
|
|
||||||
download_speed_mbps REAL,
|
|
||||||
error_message TEXT,
|
|
||||||
retry_count INTEGER DEFAULT 0,
|
|
||||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
def _migration_004_user_preferences(self, conn: sqlite3.Connection):
|
|
||||||
"""Create user preferences table."""
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE user_preferences (
|
|
||||||
key TEXT PRIMARY KEY,
|
|
||||||
value TEXT NOT NULL, -- JSON value
|
|
||||||
category TEXT NOT NULL,
|
|
||||||
description TEXT,
|
|
||||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
def _migration_005_storage_locations(self, conn: sqlite3.Connection):
|
|
||||||
"""Create storage locations table."""
|
|
||||||
conn.execute("""
|
|
||||||
CREATE TABLE storage_locations (
|
|
||||||
location_id TEXT PRIMARY KEY,
|
|
||||||
anime_id TEXT,
|
|
||||||
path TEXT NOT NULL,
|
|
||||||
location_type TEXT NOT NULL, -- primary, backup, cache
|
|
||||||
is_active BOOLEAN DEFAULT TRUE,
|
|
||||||
free_space_bytes INTEGER,
|
|
||||||
total_space_bytes INTEGER,
|
|
||||||
last_checked TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
|
||||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
|
|
||||||
)
|
|
||||||
""")
|
|
||||||
|
|
||||||
def _migration_006_indexes(self, conn: sqlite3.Connection):
|
|
||||||
"""Add indexes for performance."""
|
|
||||||
indexes = [
|
|
||||||
"CREATE INDEX idx_anime_name ON anime_metadata(name)",
|
|
||||||
"CREATE INDEX idx_anime_folder ON anime_metadata(folder)",
|
|
||||||
"CREATE INDEX idx_anime_status ON anime_metadata(status)",
|
|
||||||
"CREATE INDEX idx_episode_anime_id ON episode_metadata(anime_id)",
|
|
||||||
"CREATE INDEX idx_episode_season_episode ON episode_metadata(season, episode)",
|
|
||||||
"CREATE INDEX idx_episode_downloaded ON episode_metadata(is_downloaded)",
|
|
||||||
"CREATE INDEX idx_download_status ON download_history(download_status)",
|
|
||||||
"CREATE INDEX idx_download_date ON download_history(download_started)",
|
|
||||||
"CREATE INDEX idx_storage_active ON storage_locations(is_active)",
|
|
||||||
"CREATE INDEX idx_storage_type ON storage_locations(location_type)"
|
|
||||||
]
|
|
||||||
|
|
||||||
for index_sql in indexes:
|
|
||||||
try:
|
|
||||||
conn.execute(index_sql)
|
|
||||||
except sqlite3.OperationalError as e:
|
|
||||||
if "already exists" not in str(e):
|
|
||||||
raise
|
|
||||||
|
|
||||||
def execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
|
|
||||||
"""Execute a SELECT query and return results."""
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.execute(query, params)
|
|
||||||
return cursor.fetchall()
|
|
||||||
|
|
||||||
def execute_update(self, query: str, params: tuple = ()) -> int:
|
|
||||||
"""Execute an UPDATE/INSERT/DELETE query and return affected rows."""
|
|
||||||
with self.get_connection() as conn:
|
|
||||||
cursor = conn.execute(query, params)
|
|
||||||
conn.commit()
|
|
||||||
return cursor.rowcount
|
|
||||||
|
|
||||||
|
|
||||||
class AnimeRepository:
|
|
||||||
"""Repository for anime data operations."""
|
|
||||||
|
|
||||||
def __init__(self, db_manager: DatabaseManager):
|
|
||||||
self.db = db_manager
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def create_anime(self, metadata: AnimeMetadata) -> bool:
|
|
||||||
"""Create new anime record."""
|
|
||||||
try:
|
|
||||||
query = """
|
|
||||||
INSERT INTO anime_metadata (
|
|
||||||
anime_id, name, folder, key, description, genres,
|
|
||||||
release_year, status, total_episodes, poster_url,
|
|
||||||
custom_metadata
|
|
||||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
|
||||||
"""
|
|
||||||
|
|
||||||
params = (
|
|
||||||
metadata.anime_id,
|
|
||||||
metadata.name,
|
|
||||||
metadata.folder,
|
|
||||||
metadata.key,
|
|
||||||
metadata.description,
|
|
||||||
json.dumps(metadata.genres),
|
|
||||||
metadata.release_year,
|
|
||||||
metadata.status,
|
|
||||||
metadata.total_episodes,
|
|
||||||
metadata.poster_url,
|
|
||||||
json.dumps(metadata.custom_metadata)
|
|
||||||
)
|
|
||||||
|
|
||||||
rows_affected = self.db.execute_update(query, params)
|
|
||||||
return rows_affected > 0
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to create anime {metadata.name}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_anime_by_folder(self, folder: str) -> Optional[AnimeMetadata]:
|
|
||||||
"""Get anime by folder name."""
|
|
||||||
try:
|
|
||||||
query = """
|
|
||||||
SELECT * FROM anime_metadata WHERE folder = ?
|
|
||||||
"""
|
|
||||||
|
|
||||||
results = self.db.execute_query(query, (folder,))
|
|
||||||
|
|
||||||
if results:
|
|
||||||
row = results[0]
|
|
||||||
return self._row_to_anime_metadata(row)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get anime by folder {folder}: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_all_anime(self, status_filter: Optional[str] = None) -> List[AnimeMetadata]:
|
|
||||||
"""Get all anime, optionally filtered by status."""
|
|
||||||
try:
|
|
||||||
if status_filter:
|
|
||||||
query = "SELECT * FROM anime_metadata WHERE status = ? ORDER BY name"
|
|
||||||
params = (status_filter,)
|
|
||||||
else:
|
|
||||||
query = "SELECT * FROM anime_metadata ORDER BY name"
|
|
||||||
params = ()
|
|
||||||
|
|
||||||
results = self.db.execute_query(query, params)
|
|
||||||
|
|
||||||
return [self._row_to_anime_metadata(row) for row in results]
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get all anime: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
def update_anime(self, metadata: AnimeMetadata) -> bool:
|
|
||||||
"""Update anime metadata."""
|
|
||||||
try:
|
|
||||||
query = """
|
|
||||||
UPDATE anime_metadata SET
|
|
||||||
name = ?, key = ?, description = ?, genres = ?,
|
|
||||||
release_year = ?, status = ?, total_episodes = ?,
|
|
||||||
poster_url = ?, last_updated = CURRENT_TIMESTAMP,
|
|
||||||
custom_metadata = ?
|
|
||||||
WHERE anime_id = ?
|
|
||||||
"""
|
|
||||||
|
|
||||||
params = (
|
|
||||||
metadata.name,
|
|
||||||
metadata.key,
|
|
||||||
metadata.description,
|
|
||||||
json.dumps(metadata.genres),
|
|
||||||
metadata.release_year,
|
|
||||||
metadata.status,
|
|
||||||
metadata.total_episodes,
|
|
||||||
metadata.poster_url,
|
|
||||||
json.dumps(metadata.custom_metadata),
|
|
||||||
metadata.anime_id
|
|
||||||
)
|
|
||||||
|
|
||||||
rows_affected = self.db.execute_update(query, params)
|
|
||||||
return rows_affected > 0
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to update anime {metadata.anime_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def delete_anime(self, anime_id: str) -> bool:
|
|
||||||
"""Delete anime and related data."""
|
|
||||||
try:
|
|
||||||
# Delete episodes first (foreign key constraint)
|
|
||||||
self.db.execute_update("DELETE FROM episode_metadata WHERE anime_id = ?", (anime_id,))
|
|
||||||
self.db.execute_update("DELETE FROM download_history WHERE anime_id = ?", (anime_id,))
|
|
||||||
self.db.execute_update("DELETE FROM storage_locations WHERE anime_id = ?", (anime_id,))
|
|
||||||
|
|
||||||
# Delete anime
|
|
||||||
rows_affected = self.db.execute_update("DELETE FROM anime_metadata WHERE anime_id = ?", (anime_id,))
|
|
||||||
|
|
||||||
return rows_affected > 0
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to delete anime {anime_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def search_anime(self, search_term: str) -> List[AnimeMetadata]:
|
|
||||||
"""Search anime by name or description."""
|
|
||||||
try:
|
|
||||||
query = """
|
|
||||||
SELECT * FROM anime_metadata
|
|
||||||
WHERE name LIKE ? OR description LIKE ?
|
|
||||||
ORDER BY name
|
|
||||||
"""
|
|
||||||
|
|
||||||
search_pattern = f"%{search_term}%"
|
|
||||||
results = self.db.execute_query(query, (search_pattern, search_pattern))
|
|
||||||
|
|
||||||
return [self._row_to_anime_metadata(row) for row in results]
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to search anime: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
def _row_to_anime_metadata(self, row: sqlite3.Row) -> AnimeMetadata:
|
|
||||||
"""Convert database row to AnimeMetadata object."""
|
|
||||||
return AnimeMetadata(
|
|
||||||
anime_id=row['anime_id'],
|
|
||||||
name=row['name'],
|
|
||||||
folder=row['folder'],
|
|
||||||
key=row['key'],
|
|
||||||
description=row['description'],
|
|
||||||
genres=json.loads(row['genres'] or '[]'),
|
|
||||||
release_year=row['release_year'],
|
|
||||||
status=row['status'],
|
|
||||||
total_episodes=row['total_episodes'],
|
|
||||||
poster_url=row['poster_url'],
|
|
||||||
last_updated=datetime.fromisoformat(row['last_updated']) if row['last_updated'] else datetime.now(),
|
|
||||||
created_at=datetime.fromisoformat(row['created_at']) if row['created_at'] else datetime.now(),
|
|
||||||
custom_metadata=json.loads(row['custom_metadata'] or '{}')
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class BackupManager:
|
|
||||||
"""Manage database backups and restore operations."""
|
|
||||||
|
|
||||||
def __init__(self, db_manager: DatabaseManager, backup_dir: str = "./backups"):
|
|
||||||
self.db = db_manager
|
|
||||||
self.backup_dir = backup_dir
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Create backup directory
|
|
||||||
os.makedirs(backup_dir, exist_ok=True)
|
|
||||||
|
|
||||||
def create_full_backup(self, description: str = None) -> Optional[BackupInfo]:
|
|
||||||
"""Create a full database backup."""
|
|
||||||
try:
|
|
||||||
backup_id = f"full_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
|
||||||
backup_filename = f"{backup_id}.db"
|
|
||||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
|
||||||
|
|
||||||
# Copy database file
|
|
||||||
shutil.copy2(self.db.db_path, backup_path)
|
|
||||||
|
|
||||||
# Calculate checksum
|
|
||||||
checksum = self._calculate_file_checksum(backup_path)
|
|
||||||
|
|
||||||
# Get file size
|
|
||||||
size_bytes = os.path.getsize(backup_path)
|
|
||||||
|
|
||||||
# Get table list
|
|
||||||
with self.db.get_connection() as conn:
|
|
||||||
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
|
||||||
tables = [row[0] for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
backup_info = BackupInfo(
|
|
||||||
backup_id=backup_id,
|
|
||||||
backup_path=backup_path,
|
|
||||||
backup_type='full',
|
|
||||||
created_at=datetime.now(),
|
|
||||||
size_bytes=size_bytes,
|
|
||||||
description=description or f"Full backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
|
||||||
tables_included=tables,
|
|
||||||
checksum=checksum
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save backup metadata
|
|
||||||
self._save_backup_metadata(backup_info)
|
|
||||||
|
|
||||||
self.logger.info(f"Full backup created: {backup_id}")
|
|
||||||
return backup_info
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to create full backup: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def create_metadata_backup(self, description: str = None) -> Optional[BackupInfo]:
|
|
||||||
"""Create a metadata-only backup (excluding large binary data)."""
|
|
||||||
try:
|
|
||||||
backup_id = f"metadata_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
|
||||||
backup_filename = f"{backup_id}.json"
|
|
||||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
|
||||||
|
|
||||||
# Export metadata as JSON
|
|
||||||
metadata = self._export_metadata()
|
|
||||||
|
|
||||||
with open(backup_path, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(metadata, f, indent=2, default=str)
|
|
||||||
|
|
||||||
# Calculate checksum
|
|
||||||
checksum = self._calculate_file_checksum(backup_path)
|
|
||||||
|
|
||||||
# Get file size
|
|
||||||
size_bytes = os.path.getsize(backup_path)
|
|
||||||
|
|
||||||
backup_info = BackupInfo(
|
|
||||||
backup_id=backup_id,
|
|
||||||
backup_path=backup_path,
|
|
||||||
backup_type='metadata_only',
|
|
||||||
created_at=datetime.now(),
|
|
||||||
size_bytes=size_bytes,
|
|
||||||
description=description or f"Metadata backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
|
||||||
tables_included=['anime_metadata', 'episode_metadata', 'user_preferences'],
|
|
||||||
checksum=checksum
|
|
||||||
)
|
|
||||||
|
|
||||||
# Save backup metadata
|
|
||||||
self._save_backup_metadata(backup_info)
|
|
||||||
|
|
||||||
self.logger.info(f"Metadata backup created: {backup_id}")
|
|
||||||
return backup_info
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to create metadata backup: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
def restore_backup(self, backup_id: str) -> bool:
|
|
||||||
"""Restore from a backup."""
|
|
||||||
try:
|
|
||||||
backup_info = self._load_backup_metadata(backup_id)
|
|
||||||
if not backup_info:
|
|
||||||
self.logger.error(f"Backup not found: {backup_id}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
if not os.path.exists(backup_info.backup_path):
|
|
||||||
self.logger.error(f"Backup file not found: {backup_info.backup_path}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Verify backup integrity
|
|
||||||
if not self._verify_backup_integrity(backup_info):
|
|
||||||
self.logger.error(f"Backup integrity check failed: {backup_id}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Create a backup of current database before restore
|
|
||||||
current_backup = self.create_full_backup(f"Pre-restore backup before restoring {backup_id}")
|
|
||||||
|
|
||||||
if backup_info.backup_type == 'full':
|
|
||||||
# Replace database file
|
|
||||||
shutil.copy2(backup_info.backup_path, self.db.db_path)
|
|
||||||
|
|
||||||
elif backup_info.backup_type == 'metadata_only':
|
|
||||||
# Restore metadata from JSON
|
|
||||||
with open(backup_info.backup_path, 'r', encoding='utf-8') as f:
|
|
||||||
metadata = json.load(f)
|
|
||||||
|
|
||||||
self._import_metadata(metadata)
|
|
||||||
|
|
||||||
self.logger.info(f"Backup restored successfully: {backup_id}")
|
|
||||||
return True
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to restore backup {backup_id}: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def list_backups(self) -> List[BackupInfo]:
|
|
||||||
"""List all available backups."""
|
|
||||||
backups = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Look for backup metadata files
|
|
||||||
metadata_pattern = os.path.join(self.backup_dir, "*.backup_info.json")
|
|
||||||
|
|
||||||
for metadata_file in glob.glob(metadata_pattern):
|
|
||||||
try:
|
|
||||||
with open(metadata_file, 'r') as f:
|
|
||||||
backup_data = json.load(f)
|
|
||||||
|
|
||||||
backup_info = BackupInfo(
|
|
||||||
backup_id=backup_data['backup_id'],
|
|
||||||
backup_path=backup_data['backup_path'],
|
|
||||||
backup_type=backup_data['backup_type'],
|
|
||||||
created_at=datetime.fromisoformat(backup_data['created_at']),
|
|
||||||
size_bytes=backup_data['size_bytes'],
|
|
||||||
description=backup_data.get('description'),
|
|
||||||
tables_included=backup_data.get('tables_included', []),
|
|
||||||
checksum=backup_data.get('checksum')
|
|
||||||
)
|
|
||||||
|
|
||||||
backups.append(backup_info)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.warning(f"Failed to load backup metadata from {metadata_file}: {e}")
|
|
||||||
|
|
||||||
# Sort by creation date (newest first)
|
|
||||||
backups.sort(key=lambda b: b.created_at, reverse=True)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to list backups: {e}")
|
|
||||||
|
|
||||||
return backups
|
|
||||||
|
|
||||||
def cleanup_old_backups(self, keep_days: int = 30, keep_count: int = 10):
|
|
||||||
"""Clean up old backup files."""
|
|
||||||
try:
|
|
||||||
backups = self.list_backups()
|
|
||||||
cutoff_date = datetime.now() - timedelta(days=keep_days)
|
|
||||||
|
|
||||||
# Keep at least keep_count backups regardless of age
|
|
||||||
backups_to_delete = []
|
|
||||||
|
|
||||||
for i, backup in enumerate(backups):
|
|
||||||
if i >= keep_count and backup.created_at < cutoff_date:
|
|
||||||
backups_to_delete.append(backup)
|
|
||||||
|
|
||||||
for backup in backups_to_delete:
|
|
||||||
try:
|
|
||||||
# Remove backup file
|
|
||||||
if os.path.exists(backup.backup_path):
|
|
||||||
os.remove(backup.backup_path)
|
|
||||||
|
|
||||||
# Remove metadata file
|
|
||||||
metadata_file = f"{backup.backup_path}.backup_info.json"
|
|
||||||
if os.path.exists(metadata_file):
|
|
||||||
os.remove(metadata_file)
|
|
||||||
|
|
||||||
self.logger.info(f"Removed old backup: {backup.backup_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.warning(f"Failed to remove backup {backup.backup_id}: {e}")
|
|
||||||
|
|
||||||
if backups_to_delete:
|
|
||||||
self.logger.info(f"Cleaned up {len(backups_to_delete)} old backups")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to cleanup old backups: {e}")
|
|
||||||
|
|
||||||
def _export_metadata(self) -> Dict[str, Any]:
|
|
||||||
"""Export database metadata to dictionary."""
|
|
||||||
metadata = {
|
|
||||||
'export_date': datetime.now().isoformat(),
|
|
||||||
'schema_version': self.db.get_current_version(),
|
|
||||||
'tables': {}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Export specific tables
|
|
||||||
tables_to_export = ['anime_metadata', 'episode_metadata', 'user_preferences', 'storage_locations']
|
|
||||||
|
|
||||||
with self.db.get_connection() as conn:
|
|
||||||
for table in tables_to_export:
|
|
||||||
try:
|
|
||||||
cursor = conn.execute(f"SELECT * FROM {table}")
|
|
||||||
rows = cursor.fetchall()
|
|
||||||
|
|
||||||
# Convert rows to dictionaries
|
|
||||||
metadata['tables'][table] = [dict(row) for row in rows]
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.warning(f"Failed to export table {table}: {e}")
|
|
||||||
|
|
||||||
return metadata
|
|
||||||
|
|
||||||
def _import_metadata(self, metadata: Dict[str, Any]):
|
|
||||||
"""Import metadata from dictionary to database."""
|
|
||||||
with self.db.get_connection() as conn:
|
|
||||||
for table_name, rows in metadata.get('tables', {}).items():
|
|
||||||
if not rows:
|
|
||||||
continue
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Clear existing data (be careful!)
|
|
||||||
conn.execute(f"DELETE FROM {table_name}")
|
|
||||||
|
|
||||||
# Insert new data
|
|
||||||
if rows:
|
|
||||||
columns = list(rows[0].keys())
|
|
||||||
placeholders = ','.join(['?' for _ in columns])
|
|
||||||
insert_sql = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({placeholders})"
|
|
||||||
|
|
||||||
for row in rows:
|
|
||||||
values = [row[col] for col in columns]
|
|
||||||
conn.execute(insert_sql, values)
|
|
||||||
|
|
||||||
conn.commit()
|
|
||||||
self.logger.info(f"Imported {len(rows)} rows to {table_name}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to import table {table_name}: {e}")
|
|
||||||
conn.rollback()
|
|
||||||
raise
|
|
||||||
|
|
||||||
def _calculate_file_checksum(self, file_path: str) -> str:
|
|
||||||
"""Calculate SHA256 checksum of file."""
|
|
||||||
hash_sha256 = hashlib.sha256()
|
|
||||||
with open(file_path, 'rb') as f:
|
|
||||||
for chunk in iter(lambda: f.read(4096), b""):
|
|
||||||
hash_sha256.update(chunk)
|
|
||||||
return hash_sha256.hexdigest()
|
|
||||||
|
|
||||||
def _verify_backup_integrity(self, backup_info: BackupInfo) -> bool:
|
|
||||||
"""Verify backup file integrity using checksum."""
|
|
||||||
if not backup_info.checksum:
|
|
||||||
return True # No checksum to verify
|
|
||||||
|
|
||||||
current_checksum = self._calculate_file_checksum(backup_info.backup_path)
|
|
||||||
return current_checksum == backup_info.checksum
|
|
||||||
|
|
||||||
def _save_backup_metadata(self, backup_info: BackupInfo):
|
|
||||||
"""Save backup metadata to file."""
|
|
||||||
metadata_file = f"{backup_info.backup_path}.backup_info.json"
|
|
||||||
|
|
||||||
metadata = {
|
|
||||||
'backup_id': backup_info.backup_id,
|
|
||||||
'backup_path': backup_info.backup_path,
|
|
||||||
'backup_type': backup_info.backup_type,
|
|
||||||
'created_at': backup_info.created_at.isoformat(),
|
|
||||||
'size_bytes': backup_info.size_bytes,
|
|
||||||
'description': backup_info.description,
|
|
||||||
'tables_included': backup_info.tables_included,
|
|
||||||
'checksum': backup_info.checksum
|
|
||||||
}
|
|
||||||
|
|
||||||
with open(metadata_file, 'w') as f:
|
|
||||||
json.dump(metadata, f, indent=2)
|
|
||||||
|
|
||||||
def _load_backup_metadata(self, backup_id: str) -> Optional[BackupInfo]:
|
|
||||||
"""Load backup metadata from file."""
|
|
||||||
# Look for metadata file
|
|
||||||
metadata_pattern = os.path.join(self.backup_dir, f"{backup_id}.*.backup_info.json")
|
|
||||||
metadata_files = glob.glob(metadata_pattern)
|
|
||||||
|
|
||||||
if not metadata_files:
|
|
||||||
return None
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(metadata_files[0], 'r') as f:
|
|
||||||
backup_data = json.load(f)
|
|
||||||
|
|
||||||
return BackupInfo(
|
|
||||||
backup_id=backup_data['backup_id'],
|
|
||||||
backup_path=backup_data['backup_path'],
|
|
||||||
backup_type=backup_data['backup_type'],
|
|
||||||
created_at=datetime.fromisoformat(backup_data['created_at']),
|
|
||||||
size_bytes=backup_data['size_bytes'],
|
|
||||||
description=backup_data.get('description'),
|
|
||||||
tables_included=backup_data.get('tables_included', []),
|
|
||||||
checksum=backup_data.get('checksum')
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to load backup metadata for {backup_id}: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class StorageManager:
|
|
||||||
"""Manage storage locations and usage monitoring."""
|
|
||||||
|
|
||||||
def __init__(self, db_manager: DatabaseManager):
|
|
||||||
self.db = db_manager
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def add_storage_location(self, path: str, location_type: str = 'primary', anime_id: str = None) -> str:
|
|
||||||
"""Add a new storage location."""
|
|
||||||
location_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
query = """
|
|
||||||
INSERT INTO storage_locations
|
|
||||||
(location_id, anime_id, path, location_type, is_active)
|
|
||||||
VALUES (?, ?, ?, ?, ?)
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.db.execute_update(query, (location_id, anime_id, path, location_type, True))
|
|
||||||
|
|
||||||
# Update storage stats
|
|
||||||
self.update_storage_stats(location_id)
|
|
||||||
|
|
||||||
return location_id
|
|
||||||
|
|
||||||
def update_storage_stats(self, location_id: str):
|
|
||||||
"""Update storage statistics for a location."""
|
|
||||||
try:
|
|
||||||
# Get location path
|
|
||||||
query = "SELECT path FROM storage_locations WHERE location_id = ?"
|
|
||||||
results = self.db.execute_query(query, (location_id,))
|
|
||||||
|
|
||||||
if not results:
|
|
||||||
return
|
|
||||||
|
|
||||||
path = results[0]['path']
|
|
||||||
|
|
||||||
if os.path.exists(path):
|
|
||||||
# Get disk usage
|
|
||||||
stat = shutil.disk_usage(path)
|
|
||||||
|
|
||||||
# Update database
|
|
||||||
update_query = """
|
|
||||||
UPDATE storage_locations
|
|
||||||
SET free_space_bytes = ?, total_space_bytes = ?, last_checked = CURRENT_TIMESTAMP
|
|
||||||
WHERE location_id = ?
|
|
||||||
"""
|
|
||||||
|
|
||||||
self.db.execute_update(update_query, (stat.free, stat.total, location_id))
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to update storage stats for {location_id}: {e}")
|
|
||||||
|
|
||||||
def get_storage_summary(self) -> Dict[str, Any]:
|
|
||||||
"""Get storage usage summary."""
|
|
||||||
query = """
|
|
||||||
SELECT
|
|
||||||
location_type,
|
|
||||||
COUNT(*) as location_count,
|
|
||||||
SUM(free_space_bytes) as total_free,
|
|
||||||
SUM(total_space_bytes) as total_space
|
|
||||||
FROM storage_locations
|
|
||||||
WHERE is_active = 1
|
|
||||||
GROUP BY location_type
|
|
||||||
"""
|
|
||||||
|
|
||||||
results = self.db.execute_query(query)
|
|
||||||
|
|
||||||
summary = {}
|
|
||||||
for row in results:
|
|
||||||
summary[row['location_type']] = {
|
|
||||||
'location_count': row['location_count'],
|
|
||||||
'total_free_gb': (row['total_free'] or 0) / (1024**3),
|
|
||||||
'total_space_gb': (row['total_space'] or 0) / (1024**3),
|
|
||||||
'usage_percent': ((row['total_space'] - row['total_free']) / row['total_space'] * 100) if row['total_space'] else 0
|
|
||||||
}
|
|
||||||
|
|
||||||
return summary
|
|
||||||
|
|
||||||
|
|
||||||
# Global instances
|
|
||||||
database_manager = DatabaseManager()
|
|
||||||
anime_repository = AnimeRepository(database_manager)
|
|
||||||
backup_manager = BackupManager(database_manager)
|
|
||||||
storage_manager = StorageManager(database_manager)
|
|
||||||
|
|
||||||
|
|
||||||
def init_database_system():
|
|
||||||
"""Initialize database system."""
|
|
||||||
# Database is initialized on creation
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_database_system():
|
|
||||||
"""Clean up database resources."""
|
|
||||||
# No specific cleanup needed for SQLite
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
# Export main components
|
|
||||||
__all__ = [
|
|
||||||
'DatabaseManager',
|
|
||||||
'AnimeRepository',
|
|
||||||
'BackupManager',
|
|
||||||
'StorageManager',
|
|
||||||
'AnimeMetadata',
|
|
||||||
'EpisodeMetadata',
|
|
||||||
'BackupInfo',
|
|
||||||
'database_manager',
|
|
||||||
'anime_repository',
|
|
||||||
'backup_manager',
|
|
||||||
'storage_manager',
|
|
||||||
'init_database_system',
|
|
||||||
'cleanup_database_system'
|
|
||||||
]
|
|
||||||
@ -1,6 +0,0 @@
|
|||||||
"""
|
|
||||||
Repository package for data access layer.
|
|
||||||
|
|
||||||
This package contains repository implementations following the Repository pattern
|
|
||||||
for clean separation of data access logic from business logic.
|
|
||||||
"""
|
|
||||||
@ -1,24 +0,0 @@
|
|||||||
# AniWorld FastAPI Server Configuration
|
|
||||||
|
|
||||||
# Authentication Configuration
|
|
||||||
JWT_SECRET_KEY=your-super-secure-jwt-secret-key-change-this-in-production
|
|
||||||
PASSWORD_SALT=c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1
|
|
||||||
MASTER_PASSWORD=admin123
|
|
||||||
# MASTER_PASSWORD_HASH=bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5
|
|
||||||
SESSION_TIMEOUT_HOURS=24
|
|
||||||
|
|
||||||
# Application Configuration
|
|
||||||
ANIME_DIRECTORY=\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien
|
|
||||||
LOG_LEVEL=INFO
|
|
||||||
|
|
||||||
# Database Configuration (if needed)
|
|
||||||
DATABASE_URL=sqlite:///./aniworld.db
|
|
||||||
|
|
||||||
# Security Configuration
|
|
||||||
CORS_ORIGINS=*
|
|
||||||
API_RATE_LIMIT=100
|
|
||||||
|
|
||||||
# Provider Configuration
|
|
||||||
DEFAULT_PROVIDER=aniworld.to
|
|
||||||
PROVIDER_TIMEOUT=30
|
|
||||||
RETRY_ATTEMPTS=3
|
|
||||||
@ -1,257 +0,0 @@
|
|||||||
# AniWorld FastAPI Server
|
|
||||||
|
|
||||||
A comprehensive FastAPI-based server implementation for AniWorld following the project instructions.
|
|
||||||
|
|
||||||
## 🚀 Features
|
|
||||||
|
|
||||||
### ✅ Authentication System (Completed)
|
|
||||||
- **Simple Master Password Authentication**: Single master password for the entire application
|
|
||||||
- **JWT Token Management**: Stateless authentication using JWT tokens
|
|
||||||
- **Environment Configuration**: Secure password hash stored in environment variables
|
|
||||||
- **Session Management**: Configurable token expiry (default: 24 hours)
|
|
||||||
- **Security Features**: SHA-256 password hashing with salt
|
|
||||||
|
|
||||||
### ✅ API Endpoints (Implemented)
|
|
||||||
|
|
||||||
#### Authentication Endpoints
|
|
||||||
- `POST /auth/login` - Login with master password and receive JWT token
|
|
||||||
- `GET /auth/verify` - Verify JWT token validity (protected)
|
|
||||||
- `POST /auth/logout` - Logout endpoint (stateless - client removes token)
|
|
||||||
|
|
||||||
#### System Endpoints
|
|
||||||
- `GET /` - Root endpoint with API information
|
|
||||||
- `GET /health` - Health check endpoint
|
|
||||||
- `GET /api/system/config` - System configuration (protected)
|
|
||||||
- `GET /api/system/database/health` - Database health check (protected)
|
|
||||||
|
|
||||||
#### Anime & Episode Endpoints (Protected)
|
|
||||||
- `GET /api/anime/search` - Search anime by title with pagination
|
|
||||||
- `GET /api/anime/{anime_id}` - Get specific anime details
|
|
||||||
- `GET /api/anime/{anime_id}/episodes` - Get all episodes for anime
|
|
||||||
- `GET /api/episodes/{episode_id}` - Get specific episode details
|
|
||||||
|
|
||||||
### 🔧 Technical Features
|
|
||||||
- **FastAPI Framework**: Modern, fast (high-performance) web framework
|
|
||||||
- **OpenAPI Documentation**: Automatic API documentation at `/docs`
|
|
||||||
- **CORS Support**: Configurable cross-origin resource sharing
|
|
||||||
- **Request Validation**: Pydantic models for request/response validation
|
|
||||||
- **Error Handling**: Centralized error handling with proper HTTP status codes
|
|
||||||
- **Logging**: Comprehensive logging system with file and console output
|
|
||||||
- **Environment Configuration**: Secure configuration via environment variables
|
|
||||||
|
|
||||||
## 🛠️ Installation & Setup
|
|
||||||
|
|
||||||
### Prerequisites
|
|
||||||
- Python 3.11+ (AniWorld conda environment)
|
|
||||||
- Conda package manager
|
|
||||||
|
|
||||||
### 1. Activate AniWorld Environment
|
|
||||||
```bash
|
|
||||||
conda activate AniWorld
|
|
||||||
```
|
|
||||||
|
|
||||||
### 2. Install Dependencies
|
|
||||||
```bash
|
|
||||||
cd src/server
|
|
||||||
pip install -r requirements_fastapi.txt
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Configure Environment
|
|
||||||
Create or update `.env` file:
|
|
||||||
```env
|
|
||||||
# Authentication
|
|
||||||
JWT_SECRET_KEY=your-super-secure-jwt-secret-key
|
|
||||||
PASSWORD_SALT=your-secure-salt
|
|
||||||
MASTER_PASSWORD=admin123
|
|
||||||
SESSION_TIMEOUT_HOURS=24
|
|
||||||
|
|
||||||
# Application
|
|
||||||
ANIME_DIRECTORY=your-anime-directory-path
|
|
||||||
LOG_LEVEL=INFO
|
|
||||||
|
|
||||||
# Optional
|
|
||||||
DATABASE_URL=sqlite:///./aniworld.db
|
|
||||||
CORS_ORIGINS=*
|
|
||||||
```
|
|
||||||
|
|
||||||
### 4. Start the Server
|
|
||||||
|
|
||||||
#### Option 1: Direct Python Execution
|
|
||||||
```bash
|
|
||||||
cd src/server
|
|
||||||
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Option 2: Using Batch Script (Windows)
|
|
||||||
```cmd
|
|
||||||
cd src/server
|
|
||||||
run_and_test.bat
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Option 3: Using Shell Script (Linux/Mac)
|
|
||||||
```bash
|
|
||||||
cd src/server
|
|
||||||
chmod +x start_fastapi_server.sh
|
|
||||||
./start_fastapi_server.sh
|
|
||||||
```
|
|
||||||
|
|
||||||
## 📖 API Usage
|
|
||||||
|
|
||||||
### 1. Access Documentation
|
|
||||||
Visit: http://localhost:8000/docs
|
|
||||||
|
|
||||||
### 2. Authentication Flow
|
|
||||||
|
|
||||||
#### Step 1: Login
|
|
||||||
```bash
|
|
||||||
curl -X POST "http://localhost:8000/auth/login" \
|
|
||||||
-H "Content-Type: application/json" \
|
|
||||||
-d '{"password": "admin123"}'
|
|
||||||
```
|
|
||||||
|
|
||||||
Response:
|
|
||||||
```json
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"message": "Authentication successful",
|
|
||||||
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
|
|
||||||
"expires_at": "2025-10-06T18:19:24.710065"
|
|
||||||
}
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Step 2: Use Token for Protected Endpoints
|
|
||||||
```bash
|
|
||||||
curl -X GET "http://localhost:8000/api/anime/search?query=naruto&limit=5" \
|
|
||||||
-H "Authorization: Bearer YOUR_JWT_TOKEN"
|
|
||||||
```
|
|
||||||
|
|
||||||
### 3. Example API Calls
|
|
||||||
|
|
||||||
#### Health Check
|
|
||||||
```bash
|
|
||||||
curl "http://localhost:8000/health"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Search Anime
|
|
||||||
```bash
|
|
||||||
curl -H "Authorization: Bearer YOUR_TOKEN" \
|
|
||||||
"http://localhost:8000/api/anime/search?query=naruto&limit=10"
|
|
||||||
```
|
|
||||||
|
|
||||||
#### Get Anime Details
|
|
||||||
```bash
|
|
||||||
curl -H "Authorization: Bearer YOUR_TOKEN" \
|
|
||||||
"http://localhost:8000/api/anime/anime_123"
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🧪 Testing
|
|
||||||
|
|
||||||
### Automated Testing
|
|
||||||
```bash
|
|
||||||
cd src/server
|
|
||||||
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
|
|
||||||
```
|
|
||||||
|
|
||||||
### Manual Testing
|
|
||||||
1. Start the server
|
|
||||||
2. Visit http://localhost:8000/docs
|
|
||||||
3. Use the interactive API documentation
|
|
||||||
4. Test authentication with password: `admin123`
|
|
||||||
|
|
||||||
## 📁 Project Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
src/server/
|
|
||||||
├── fastapi_app.py # Main FastAPI application
|
|
||||||
├── .env # Environment configuration
|
|
||||||
├── requirements_fastapi.txt # Python dependencies
|
|
||||||
├── test_fastapi.py # Test script
|
|
||||||
├── start_fastapi_server.bat # Windows startup script
|
|
||||||
├── start_fastapi_server.sh # Linux/Mac startup script
|
|
||||||
├── run_and_test.bat # Windows test runner
|
|
||||||
└── logs/ # Log files
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🔐 Security
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
- Master password authentication (no user registration required)
|
|
||||||
- JWT tokens with configurable expiry
|
|
||||||
- Secure password hashing (SHA-256 + salt)
|
|
||||||
- Environment-based secret management
|
|
||||||
|
|
||||||
### API Security
|
|
||||||
- All anime/episode endpoints require authentication
|
|
||||||
- CORS protection
|
|
||||||
- Input validation using Pydantic
|
|
||||||
- Error handling without sensitive data exposure
|
|
||||||
|
|
||||||
## 🔧 Configuration
|
|
||||||
|
|
||||||
### Environment Variables
|
|
||||||
- `JWT_SECRET_KEY`: Secret key for JWT token signing
|
|
||||||
- `PASSWORD_SALT`: Salt for password hashing
|
|
||||||
- `MASTER_PASSWORD`: Master password (development only)
|
|
||||||
- `MASTER_PASSWORD_HASH`: Hashed master password (production)
|
|
||||||
- `SESSION_TIMEOUT_HOURS`: JWT token expiry time
|
|
||||||
- `ANIME_DIRECTORY`: Path to anime files
|
|
||||||
- `LOG_LEVEL`: Logging level (DEBUG, INFO, WARNING, ERROR)
|
|
||||||
|
|
||||||
### Production Configuration
|
|
||||||
1. Set `MASTER_PASSWORD_HASH` instead of `MASTER_PASSWORD`
|
|
||||||
2. Use a strong `JWT_SECRET_KEY`
|
|
||||||
3. Set appropriate `CORS_ORIGINS`
|
|
||||||
4. Configure proper logging levels
|
|
||||||
|
|
||||||
## 📊 API Status
|
|
||||||
|
|
||||||
| Endpoint Category | Status | Coverage |
|
|
||||||
|------------------|--------|----------|
|
|
||||||
| Authentication | ✅ Complete | 100% |
|
|
||||||
| Health/System | ✅ Complete | 100% |
|
|
||||||
| Anime Search | ✅ Implemented | Mock data |
|
|
||||||
| Episode Management | ✅ Implemented | Mock data |
|
|
||||||
| Database Integration | 🔄 Placeholder | Todo |
|
|
||||||
| Real Data Provider | 🔄 Placeholder | Todo |
|
|
||||||
|
|
||||||
## 🚧 Future Enhancements
|
|
||||||
|
|
||||||
### High Priority
|
|
||||||
- [ ] Connect to actual anime database/provider
|
|
||||||
- [ ] Implement real anime search functionality
|
|
||||||
- [ ] Add episode streaming capabilities
|
|
||||||
- [ ] Database connection pooling
|
|
||||||
|
|
||||||
### Medium Priority
|
|
||||||
- [ ] Redis caching layer
|
|
||||||
- [ ] Rate limiting middleware
|
|
||||||
- [ ] Background task processing
|
|
||||||
- [ ] WebSocket support
|
|
||||||
|
|
||||||
### Low Priority
|
|
||||||
- [ ] Advanced search filters
|
|
||||||
- [ ] User preferences (multi-user support)
|
|
||||||
- [ ] Download progress tracking
|
|
||||||
- [ ] Statistics and analytics
|
|
||||||
|
|
||||||
## 📝 License
|
|
||||||
|
|
||||||
This project follows the AniWorld project licensing terms.
|
|
||||||
|
|
||||||
## 🤝 Contributing
|
|
||||||
|
|
||||||
1. Follow the coding standards in `.github/copilot-instructions.md`
|
|
||||||
2. Use type hints and Pydantic models
|
|
||||||
3. Add comprehensive logging
|
|
||||||
4. Include tests for new features
|
|
||||||
5. Update documentation
|
|
||||||
|
|
||||||
## 📞 Support
|
|
||||||
|
|
||||||
- API Documentation: http://localhost:8000/docs
|
|
||||||
- Health Check: http://localhost:8000/health
|
|
||||||
- Logs: Check `logs/aniworld.log` for detailed information
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Note**: This FastAPI implementation provides a solid foundation following the project instructions. The authentication system is complete and production-ready, while anime/episode endpoints currently return mock data pending integration with the actual data providers.
|
|
||||||
@ -1,573 +0,0 @@
|
|||||||
import os
|
|
||||||
import json
|
|
||||||
import hashlib
|
|
||||||
import secrets
|
|
||||||
from typing import Dict, Any, Optional
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
"""Configuration management for AniWorld Flask app."""
|
|
||||||
|
|
||||||
def __init__(self, config_file: str = "data/config.json"):
|
|
||||||
self.config_file = config_file
|
|
||||||
self.default_config = {
|
|
||||||
"security": {
|
|
||||||
"master_password_hash": None,
|
|
||||||
"salt": None,
|
|
||||||
"session_timeout_hours": 24,
|
|
||||||
"max_failed_attempts": 5,
|
|
||||||
"lockout_duration_minutes": 30
|
|
||||||
},
|
|
||||||
"anime": {
|
|
||||||
"directory": os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"),
|
|
||||||
"download_threads": 3,
|
|
||||||
"download_speed_limit": None,
|
|
||||||
"auto_rescan_time": "03:00",
|
|
||||||
"auto_download_after_rescan": False
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"level": "INFO",
|
|
||||||
"enable_console_logging": True,
|
|
||||||
"enable_console_progress": False,
|
|
||||||
"enable_fail2ban_logging": True,
|
|
||||||
"log_file": "./logs/aniworld.log",
|
|
||||||
"max_log_size_mb": 10,
|
|
||||||
"log_backup_count": 5
|
|
||||||
},
|
|
||||||
"providers": {
|
|
||||||
"default_provider": "aniworld.to",
|
|
||||||
"preferred_language": "German Dub",
|
|
||||||
"fallback_providers": ["aniworld.to"],
|
|
||||||
"provider_timeout": 30,
|
|
||||||
"retry_attempts": 3,
|
|
||||||
"provider_settings": {
|
|
||||||
"aniworld.to": {
|
|
||||||
"enabled": True,
|
|
||||||
"priority": 1,
|
|
||||||
"quality_preference": "720p"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"advanced": {
|
|
||||||
"max_concurrent_downloads": 3,
|
|
||||||
"download_buffer_size": 8192,
|
|
||||||
"connection_timeout": 30,
|
|
||||||
"read_timeout": 300,
|
|
||||||
"enable_debug_mode": False,
|
|
||||||
"cache_duration_minutes": 60
|
|
||||||
}
|
|
||||||
}
|
|
||||||
self._config = self._load_config()
|
|
||||||
|
|
||||||
def _load_config(self) -> Dict[str, Any]:
|
|
||||||
"""Load configuration from file or create default."""
|
|
||||||
try:
|
|
||||||
if os.path.exists(self.config_file):
|
|
||||||
with open(self.config_file, 'r', encoding='utf-8') as f:
|
|
||||||
config = json.load(f)
|
|
||||||
# Merge with defaults to ensure all keys exist
|
|
||||||
return self._merge_configs(self.default_config, config)
|
|
||||||
else:
|
|
||||||
return self.default_config.copy()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error loading config: {e}")
|
|
||||||
return self.default_config.copy()
|
|
||||||
|
|
||||||
def _merge_configs(self, default: Dict[str, Any], user: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""Recursively merge user config with defaults."""
|
|
||||||
result = default.copy()
|
|
||||||
for key, value in user.items():
|
|
||||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
|
||||||
result[key] = self._merge_configs(result[key], value)
|
|
||||||
else:
|
|
||||||
result[key] = value
|
|
||||||
return result
|
|
||||||
|
|
||||||
def save_config(self) -> bool:
|
|
||||||
"""Save current configuration to file."""
|
|
||||||
try:
|
|
||||||
with open(self.config_file, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(self._config, f, indent=4)
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error saving config: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get(self, key_path: str, default: Any = None) -> Any:
|
|
||||||
"""Get config value using dot notation (e.g., 'security.master_password_hash')."""
|
|
||||||
keys = key_path.split('.')
|
|
||||||
value = self._config
|
|
||||||
|
|
||||||
for key in keys:
|
|
||||||
if isinstance(value, dict) and key in value:
|
|
||||||
value = value[key]
|
|
||||||
else:
|
|
||||||
return default
|
|
||||||
|
|
||||||
return value
|
|
||||||
|
|
||||||
def set(self, key_path: str, value: Any) -> bool:
|
|
||||||
"""Set config value using dot notation."""
|
|
||||||
keys = key_path.split('.')
|
|
||||||
config = self._config
|
|
||||||
|
|
||||||
# Navigate to parent
|
|
||||||
for key in keys[:-1]:
|
|
||||||
if key not in config:
|
|
||||||
config[key] = {}
|
|
||||||
config = config[key]
|
|
||||||
|
|
||||||
# Set final value
|
|
||||||
config[keys[-1]] = value
|
|
||||||
return self.save_config()
|
|
||||||
|
|
||||||
def set_master_password(self, password: str) -> bool:
|
|
||||||
"""Set master password with secure hashing."""
|
|
||||||
try:
|
|
||||||
# Generate salt
|
|
||||||
salt = secrets.token_hex(32)
|
|
||||||
|
|
||||||
# Hash password with salt
|
|
||||||
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
|
|
||||||
|
|
||||||
# Save to config
|
|
||||||
self.set("security.salt", salt)
|
|
||||||
self.set("security.master_password_hash", password_hash)
|
|
||||||
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error setting master password: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def verify_password(self, password: str) -> bool:
|
|
||||||
"""Verify password against stored hash."""
|
|
||||||
try:
|
|
||||||
stored_hash = self.get("security.master_password_hash")
|
|
||||||
salt = self.get("security.salt")
|
|
||||||
|
|
||||||
if not stored_hash or not salt:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Hash provided password with stored salt
|
|
||||||
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
|
|
||||||
|
|
||||||
return password_hash == stored_hash
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error verifying password: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def has_master_password(self) -> bool:
|
|
||||||
"""Check if master password is configured."""
|
|
||||||
return bool(self.get("security.master_password_hash"))
|
|
||||||
|
|
||||||
def backup_config(self, backup_path: Optional[str] = None) -> str:
|
|
||||||
"""Create backup of current configuration."""
|
|
||||||
if not backup_path:
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
backup_path = f"config_backup_{timestamp}.json"
|
|
||||||
|
|
||||||
try:
|
|
||||||
with open(backup_path, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(self._config, f, indent=4)
|
|
||||||
return backup_path
|
|
||||||
except Exception as e:
|
|
||||||
raise Exception(f"Failed to create backup: {e}")
|
|
||||||
|
|
||||||
def restore_config(self, backup_path: str) -> bool:
|
|
||||||
"""Restore configuration from backup."""
|
|
||||||
try:
|
|
||||||
with open(backup_path, 'r', encoding='utf-8') as f:
|
|
||||||
config = json.load(f)
|
|
||||||
|
|
||||||
# Validate config before restoring
|
|
||||||
validation_result = self.validate_config(config)
|
|
||||||
if not validation_result['valid']:
|
|
||||||
raise Exception(f"Invalid configuration: {validation_result['errors']}")
|
|
||||||
|
|
||||||
self._config = self._merge_configs(self.default_config, config)
|
|
||||||
return self.save_config()
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error restoring config: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def validate_config(self, config: Dict[str, Any] = None) -> Dict[str, Any]:
|
|
||||||
"""Validate configuration structure and values."""
|
|
||||||
if config is None:
|
|
||||||
config = self._config
|
|
||||||
|
|
||||||
errors = []
|
|
||||||
warnings = []
|
|
||||||
|
|
||||||
# Validate security settings
|
|
||||||
security = config.get('security', {})
|
|
||||||
if security.get('session_timeout_hours', 0) < 1 or security.get('session_timeout_hours', 0) > 168:
|
|
||||||
errors.append("Session timeout must be between 1 and 168 hours")
|
|
||||||
|
|
||||||
if security.get('max_failed_attempts', 0) < 1 or security.get('max_failed_attempts', 0) > 50:
|
|
||||||
errors.append("Max failed attempts must be between 1 and 50")
|
|
||||||
|
|
||||||
if security.get('lockout_duration_minutes', 0) < 1 or security.get('lockout_duration_minutes', 0) > 1440:
|
|
||||||
errors.append("Lockout duration must be between 1 and 1440 minutes")
|
|
||||||
|
|
||||||
# Validate anime settings
|
|
||||||
anime = config.get('anime', {})
|
|
||||||
directory = anime.get('directory', '')
|
|
||||||
if directory and not os.path.exists(directory) and not directory.startswith('\\\\'):
|
|
||||||
warnings.append(f"Anime directory does not exist: {directory}")
|
|
||||||
|
|
||||||
download_threads = anime.get('download_threads', 1)
|
|
||||||
if download_threads < 1 or download_threads > 10:
|
|
||||||
errors.append("Download threads must be between 1 and 10")
|
|
||||||
|
|
||||||
# Validate logging settings
|
|
||||||
logging_config = config.get('logging', {})
|
|
||||||
log_level = logging_config.get('level', 'INFO')
|
|
||||||
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
|
||||||
errors.append(f"Invalid log level: {log_level}")
|
|
||||||
|
|
||||||
# Validate provider settings
|
|
||||||
providers = config.get('providers', {})
|
|
||||||
provider_timeout = providers.get('provider_timeout', 30)
|
|
||||||
if provider_timeout < 5 or provider_timeout > 300:
|
|
||||||
errors.append("Provider timeout must be between 5 and 300 seconds")
|
|
||||||
|
|
||||||
retry_attempts = providers.get('retry_attempts', 3)
|
|
||||||
if retry_attempts < 0 or retry_attempts > 10:
|
|
||||||
errors.append("Retry attempts must be between 0 and 10")
|
|
||||||
|
|
||||||
# Validate advanced settings
|
|
||||||
advanced = config.get('advanced', {})
|
|
||||||
max_concurrent = advanced.get('max_concurrent_downloads', 3)
|
|
||||||
if max_concurrent < 1 or max_concurrent > 20:
|
|
||||||
errors.append("Max concurrent downloads must be between 1 and 20")
|
|
||||||
|
|
||||||
connection_timeout = advanced.get('connection_timeout', 30)
|
|
||||||
if connection_timeout < 5 or connection_timeout > 300:
|
|
||||||
errors.append("Connection timeout must be between 5 and 300 seconds")
|
|
||||||
|
|
||||||
return {
|
|
||||||
'valid': len(errors) == 0,
|
|
||||||
'errors': errors,
|
|
||||||
'warnings': warnings
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_config_schema(self) -> Dict[str, Any]:
|
|
||||||
"""Get configuration schema for UI generation."""
|
|
||||||
return {
|
|
||||||
"security": {
|
|
||||||
"title": "Security Settings",
|
|
||||||
"fields": {
|
|
||||||
"session_timeout_hours": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Session Timeout (hours)",
|
|
||||||
"description": "How long sessions remain active",
|
|
||||||
"min": 1,
|
|
||||||
"max": 168,
|
|
||||||
"default": 24
|
|
||||||
},
|
|
||||||
"max_failed_attempts": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Max Failed Login Attempts",
|
|
||||||
"description": "Number of failed attempts before lockout",
|
|
||||||
"min": 1,
|
|
||||||
"max": 50,
|
|
||||||
"default": 5
|
|
||||||
},
|
|
||||||
"lockout_duration_minutes": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Lockout Duration (minutes)",
|
|
||||||
"description": "How long to lock account after failed attempts",
|
|
||||||
"min": 1,
|
|
||||||
"max": 1440,
|
|
||||||
"default": 30
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"anime": {
|
|
||||||
"title": "Anime Settings",
|
|
||||||
"fields": {
|
|
||||||
"directory": {
|
|
||||||
"type": "text",
|
|
||||||
"title": "Anime Directory",
|
|
||||||
"description": "Base directory for anime storage",
|
|
||||||
"required": True
|
|
||||||
},
|
|
||||||
"download_threads": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Download Threads",
|
|
||||||
"description": "Number of concurrent download threads",
|
|
||||||
"min": 1,
|
|
||||||
"max": 10,
|
|
||||||
"default": 3
|
|
||||||
},
|
|
||||||
"download_speed_limit": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Speed Limit (KB/s)",
|
|
||||||
"description": "Download speed limit (0 = unlimited)",
|
|
||||||
"min": 0,
|
|
||||||
"max": 102400,
|
|
||||||
"default": 0
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"providers": {
|
|
||||||
"title": "Provider Settings",
|
|
||||||
"fields": {
|
|
||||||
"default_provider": {
|
|
||||||
"type": "select",
|
|
||||||
"title": "Default Provider",
|
|
||||||
"description": "Primary anime provider",
|
|
||||||
"options": ["aniworld.to"],
|
|
||||||
"default": "aniworld.to"
|
|
||||||
},
|
|
||||||
"preferred_language": {
|
|
||||||
"type": "select",
|
|
||||||
"title": "Preferred Language",
|
|
||||||
"description": "Default language preference",
|
|
||||||
"options": ["German Dub", "German Sub", "English Dub", "English Sub", "Japanese"],
|
|
||||||
"default": "German Dub"
|
|
||||||
},
|
|
||||||
"provider_timeout": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Provider Timeout (seconds)",
|
|
||||||
"description": "Timeout for provider requests",
|
|
||||||
"min": 5,
|
|
||||||
"max": 300,
|
|
||||||
"default": 30
|
|
||||||
},
|
|
||||||
"retry_attempts": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Retry Attempts",
|
|
||||||
"description": "Number of retry attempts for failed requests",
|
|
||||||
"min": 0,
|
|
||||||
"max": 10,
|
|
||||||
"default": 3
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"advanced": {
|
|
||||||
"title": "Advanced Settings",
|
|
||||||
"fields": {
|
|
||||||
"max_concurrent_downloads": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Max Concurrent Downloads",
|
|
||||||
"description": "Maximum simultaneous downloads",
|
|
||||||
"min": 1,
|
|
||||||
"max": 20,
|
|
||||||
"default": 3
|
|
||||||
},
|
|
||||||
"connection_timeout": {
|
|
||||||
"type": "number",
|
|
||||||
"title": "Connection Timeout (seconds)",
|
|
||||||
"description": "Network connection timeout",
|
|
||||||
"min": 5,
|
|
||||||
"max": 300,
|
|
||||||
"default": 30
|
|
||||||
},
|
|
||||||
"enable_debug_mode": {
|
|
||||||
"type": "boolean",
|
|
||||||
"title": "Debug Mode",
|
|
||||||
"description": "Enable detailed debug logging",
|
|
||||||
"default": False
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def export_config(self, include_sensitive: bool = False) -> Dict[str, Any]:
|
|
||||||
"""Export configuration, optionally excluding sensitive data."""
|
|
||||||
config_copy = json.loads(json.dumps(self._config)) # Deep copy
|
|
||||||
|
|
||||||
if not include_sensitive:
|
|
||||||
# Remove sensitive data
|
|
||||||
if 'security' in config_copy:
|
|
||||||
config_copy['security'].pop('master_password_hash', None)
|
|
||||||
config_copy['security'].pop('salt', None)
|
|
||||||
|
|
||||||
return config_copy
|
|
||||||
|
|
||||||
def import_config(self, config_data: Dict[str, Any], validate: bool = True) -> Dict[str, Any]:
|
|
||||||
"""Import configuration with validation."""
|
|
||||||
if validate:
|
|
||||||
validation_result = self.validate_config(config_data)
|
|
||||||
if not validation_result['valid']:
|
|
||||||
return {
|
|
||||||
'success': False,
|
|
||||||
'errors': validation_result['errors'],
|
|
||||||
'warnings': validation_result['warnings']
|
|
||||||
}
|
|
||||||
|
|
||||||
# Merge with existing config (don't overwrite security settings)
|
|
||||||
current_security = self._config.get('security', {})
|
|
||||||
merged_config = self._merge_configs(self.default_config, config_data)
|
|
||||||
|
|
||||||
# Preserve current security settings if not provided
|
|
||||||
if not config_data.get('security', {}).get('master_password_hash'):
|
|
||||||
merged_config['security'] = current_security
|
|
||||||
|
|
||||||
self._config = merged_config
|
|
||||||
success = self.save_config()
|
|
||||||
|
|
||||||
return {
|
|
||||||
'success': success,
|
|
||||||
'errors': [] if success else ['Failed to save configuration'],
|
|
||||||
'warnings': validation_result.get('warnings', []) if validate else []
|
|
||||||
}
|
|
||||||
|
|
||||||
@property
|
|
||||||
def anime_directory(self) -> str:
|
|
||||||
"""Get anime directory path."""
|
|
||||||
# Always check environment variable first
|
|
||||||
env_dir = os.getenv("ANIME_DIRECTORY")
|
|
||||||
if env_dir:
|
|
||||||
# Remove quotes if they exist
|
|
||||||
env_dir = env_dir.strip('"\'')
|
|
||||||
return env_dir
|
|
||||||
return self.get("anime.directory", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
|
||||||
|
|
||||||
@anime_directory.setter
|
|
||||||
def anime_directory(self, value: str):
|
|
||||||
"""Set anime directory path."""
|
|
||||||
self.set("anime.directory", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def session_timeout_hours(self) -> int:
|
|
||||||
"""Get session timeout in hours."""
|
|
||||||
return self.get("security.session_timeout_hours", 24)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def max_failed_attempts(self) -> int:
|
|
||||||
"""Get maximum failed login attempts."""
|
|
||||||
return self.get("security.max_failed_attempts", 5)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def lockout_duration_minutes(self) -> int:
|
|
||||||
"""Get lockout duration in minutes."""
|
|
||||||
return self.get("security.lockout_duration_minutes", 30)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def scheduled_rescan_enabled(self) -> bool:
|
|
||||||
"""Get whether scheduled rescan is enabled."""
|
|
||||||
return self.get("scheduler.rescan_enabled", False)
|
|
||||||
|
|
||||||
@scheduled_rescan_enabled.setter
|
|
||||||
def scheduled_rescan_enabled(self, value: bool):
|
|
||||||
"""Set whether scheduled rescan is enabled."""
|
|
||||||
self.set("scheduler.rescan_enabled", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def scheduled_rescan_time(self) -> str:
|
|
||||||
"""Get scheduled rescan time in HH:MM format."""
|
|
||||||
return self.get("scheduler.rescan_time", "03:00")
|
|
||||||
|
|
||||||
@scheduled_rescan_time.setter
|
|
||||||
def scheduled_rescan_time(self, value: str):
|
|
||||||
"""Set scheduled rescan time in HH:MM format."""
|
|
||||||
self.set("scheduler.rescan_time", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def auto_download_after_rescan(self) -> bool:
|
|
||||||
"""Get whether to auto-download after scheduled rescan."""
|
|
||||||
return self.get("scheduler.auto_download_after_rescan", False)
|
|
||||||
|
|
||||||
@auto_download_after_rescan.setter
|
|
||||||
def auto_download_after_rescan(self, value: bool):
|
|
||||||
"""Set whether to auto-download after scheduled rescan."""
|
|
||||||
self.set("scheduler.auto_download_after_rescan", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def log_level(self) -> str:
|
|
||||||
"""Get current log level."""
|
|
||||||
return self.get("logging.level", "INFO")
|
|
||||||
|
|
||||||
@log_level.setter
|
|
||||||
def log_level(self, value: str):
|
|
||||||
"""Set log level."""
|
|
||||||
self.set("logging.level", value.upper())
|
|
||||||
|
|
||||||
@property
|
|
||||||
def enable_console_logging(self) -> bool:
|
|
||||||
"""Get whether console logging is enabled."""
|
|
||||||
return self.get("logging.enable_console_logging", True)
|
|
||||||
|
|
||||||
@enable_console_logging.setter
|
|
||||||
def enable_console_logging(self, value: bool):
|
|
||||||
"""Set whether console logging is enabled."""
|
|
||||||
self.set("logging.enable_console_logging", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def enable_console_progress(self) -> bool:
|
|
||||||
"""Get whether console progress bars are enabled."""
|
|
||||||
return self.get("logging.enable_console_progress", False)
|
|
||||||
|
|
||||||
@enable_console_progress.setter
|
|
||||||
def enable_console_progress(self, value: bool):
|
|
||||||
"""Set whether console progress bars are enabled."""
|
|
||||||
self.set("logging.enable_console_progress", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def enable_fail2ban_logging(self) -> bool:
|
|
||||||
"""Get whether fail2ban logging is enabled."""
|
|
||||||
return self.get("logging.enable_fail2ban_logging", True)
|
|
||||||
|
|
||||||
@enable_fail2ban_logging.setter
|
|
||||||
def enable_fail2ban_logging(self, value: bool):
|
|
||||||
"""Set whether fail2ban logging is enabled."""
|
|
||||||
self.set("logging.enable_fail2ban_logging", value)
|
|
||||||
|
|
||||||
# Provider configuration properties
|
|
||||||
@property
|
|
||||||
def default_provider(self) -> str:
|
|
||||||
"""Get default provider."""
|
|
||||||
return self.get("providers.default_provider", "aniworld.to")
|
|
||||||
|
|
||||||
@default_provider.setter
|
|
||||||
def default_provider(self, value: str):
|
|
||||||
"""Set default provider."""
|
|
||||||
self.set("providers.default_provider", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def preferred_language(self) -> str:
|
|
||||||
"""Get preferred language."""
|
|
||||||
return self.get("providers.preferred_language", "German Dub")
|
|
||||||
|
|
||||||
@preferred_language.setter
|
|
||||||
def preferred_language(self, value: str):
|
|
||||||
"""Set preferred language."""
|
|
||||||
self.set("providers.preferred_language", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def provider_timeout(self) -> int:
|
|
||||||
"""Get provider timeout in seconds."""
|
|
||||||
return self.get("providers.provider_timeout", 30)
|
|
||||||
|
|
||||||
@provider_timeout.setter
|
|
||||||
def provider_timeout(self, value: int):
|
|
||||||
"""Set provider timeout in seconds."""
|
|
||||||
self.set("providers.provider_timeout", value)
|
|
||||||
|
|
||||||
# Advanced configuration properties
|
|
||||||
@property
|
|
||||||
def max_concurrent_downloads(self) -> int:
|
|
||||||
"""Get maximum concurrent downloads."""
|
|
||||||
return self.get("advanced.max_concurrent_downloads", 3)
|
|
||||||
|
|
||||||
@max_concurrent_downloads.setter
|
|
||||||
def max_concurrent_downloads(self, value: int):
|
|
||||||
"""Set maximum concurrent downloads."""
|
|
||||||
self.set("advanced.max_concurrent_downloads", value)
|
|
||||||
|
|
||||||
@property
|
|
||||||
def enable_debug_mode(self) -> bool:
|
|
||||||
"""Get whether debug mode is enabled."""
|
|
||||||
return self.get("advanced.enable_debug_mode", False)
|
|
||||||
|
|
||||||
@enable_debug_mode.setter
|
|
||||||
def enable_debug_mode(self, value: bool):
|
|
||||||
"""Set whether debug mode is enabled."""
|
|
||||||
self.set("advanced.enable_debug_mode", value)
|
|
||||||
|
|
||||||
|
|
||||||
# Global config instance
|
|
||||||
config = Config()
|
|
||||||
@ -1,848 +0,0 @@
|
|||||||
"""
|
|
||||||
FastAPI-based AniWorld Server Application.
|
|
||||||
|
|
||||||
This module implements a comprehensive FastAPI application following the instructions:
|
|
||||||
- Simple master password authentication using JWT
|
|
||||||
- Repository pattern with dependency injection
|
|
||||||
- Proper error handling and validation
|
|
||||||
- OpenAPI documentation
|
|
||||||
- Security best practices
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from contextlib import asynccontextmanager
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
import jwt
|
|
||||||
|
|
||||||
# Add parent directory to path for imports
|
|
||||||
current_dir = os.path.dirname(__file__)
|
|
||||||
parent_dir = os.path.join(current_dir, '..')
|
|
||||||
sys.path.insert(0, os.path.abspath(parent_dir))
|
|
||||||
|
|
||||||
import uvicorn
|
|
||||||
from fastapi import Depends, FastAPI, HTTPException, Request, Security, status
|
|
||||||
from fastapi.middleware.cors import CORSMiddleware
|
|
||||||
from fastapi.responses import HTMLResponse, JSONResponse
|
|
||||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
|
||||||
from fastapi.staticfiles import StaticFiles
|
|
||||||
from fastapi.templating import Jinja2Templates
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
from pydantic_settings import BaseSettings
|
|
||||||
|
|
||||||
# Import application flow services
|
|
||||||
from src.server.middleware.application_flow_middleware import ApplicationFlowMiddleware
|
|
||||||
from src.server.services.setup_service import SetupService
|
|
||||||
|
|
||||||
# Import our custom middleware - temporarily disabled due to file corruption
|
|
||||||
# from src.server.web.middleware.fastapi_auth_middleware import AuthMiddleware
|
|
||||||
# from src.server.web.middleware.fastapi_logging_middleware import (
|
|
||||||
# EnhancedLoggingMiddleware,
|
|
||||||
# )
|
|
||||||
# from src.server.web.middleware.fastapi_validation_middleware import ValidationMiddleware
|
|
||||||
|
|
||||||
# Configure logging
|
|
||||||
logging.basicConfig(
|
|
||||||
level=logging.INFO,
|
|
||||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
|
||||||
handlers=[
|
|
||||||
logging.FileHandler('./logs/aniworld.log'),
|
|
||||||
logging.StreamHandler()
|
|
||||||
]
|
|
||||||
)
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Security
|
|
||||||
security = HTTPBearer()
|
|
||||||
|
|
||||||
# Configuration
|
|
||||||
class Settings(BaseSettings):
|
|
||||||
"""Application settings from environment variables."""
|
|
||||||
jwt_secret_key: str = Field(default="your-secret-key-here", env="JWT_SECRET_KEY")
|
|
||||||
password_salt: str = Field(default="default-salt", env="PASSWORD_SALT")
|
|
||||||
master_password_hash: Optional[str] = Field(default=None, env="MASTER_PASSWORD_HASH")
|
|
||||||
master_password: Optional[str] = Field(default=None, env="MASTER_PASSWORD") # For development
|
|
||||||
token_expiry_hours: int = Field(default=24, env="SESSION_TIMEOUT_HOURS")
|
|
||||||
anime_directory: str = Field(default="", env="ANIME_DIRECTORY")
|
|
||||||
log_level: str = Field(default="INFO", env="LOG_LEVEL")
|
|
||||||
|
|
||||||
# Additional settings from .env
|
|
||||||
database_url: str = Field(default="sqlite:///./data/aniworld.db", env="DATABASE_URL")
|
|
||||||
cors_origins: str = Field(default="*", env="CORS_ORIGINS")
|
|
||||||
api_rate_limit: int = Field(default=100, env="API_RATE_LIMIT")
|
|
||||||
default_provider: str = Field(default="aniworld.to", env="DEFAULT_PROVIDER")
|
|
||||||
provider_timeout: int = Field(default=30, env="PROVIDER_TIMEOUT")
|
|
||||||
retry_attempts: int = Field(default=3, env="RETRY_ATTEMPTS")
|
|
||||||
|
|
||||||
class Config:
|
|
||||||
env_file = ".env"
|
|
||||||
extra = "ignore" # Ignore extra environment variables
|
|
||||||
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
# Pydantic Models
|
|
||||||
class LoginRequest(BaseModel):
|
|
||||||
"""Login request model."""
|
|
||||||
password: str = Field(..., min_length=1, description="Master password")
|
|
||||||
|
|
||||||
class LoginResponse(BaseModel):
|
|
||||||
"""Login response model."""
|
|
||||||
success: bool
|
|
||||||
message: str
|
|
||||||
token: Optional[str] = None
|
|
||||||
expires_at: Optional[datetime] = None
|
|
||||||
|
|
||||||
class TokenVerifyResponse(BaseModel):
|
|
||||||
"""Token verification response model."""
|
|
||||||
valid: bool
|
|
||||||
message: str
|
|
||||||
user: Optional[str] = None
|
|
||||||
expires_at: Optional[datetime] = None
|
|
||||||
|
|
||||||
class HealthResponse(BaseModel):
|
|
||||||
"""Health check response model."""
|
|
||||||
status: str
|
|
||||||
timestamp: datetime
|
|
||||||
version: str = "1.0.0"
|
|
||||||
services: Dict[str, str]
|
|
||||||
|
|
||||||
class AnimeSearchRequest(BaseModel):
|
|
||||||
"""Anime search request model."""
|
|
||||||
query: str = Field(..., min_length=1, max_length=100)
|
|
||||||
limit: int = Field(default=20, ge=1, le=100)
|
|
||||||
offset: int = Field(default=0, ge=0)
|
|
||||||
|
|
||||||
class AnimeResponse(BaseModel):
|
|
||||||
"""Anime response model."""
|
|
||||||
id: str
|
|
||||||
title: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
episodes: int = 0
|
|
||||||
status: str = "Unknown"
|
|
||||||
poster_url: Optional[str] = None
|
|
||||||
|
|
||||||
class EpisodeResponse(BaseModel):
|
|
||||||
"""Episode response model."""
|
|
||||||
id: str
|
|
||||||
anime_id: str
|
|
||||||
episode_number: int
|
|
||||||
title: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
duration: Optional[int] = None
|
|
||||||
stream_url: Optional[str] = None
|
|
||||||
|
|
||||||
class ErrorResponse(BaseModel):
|
|
||||||
"""Error response model."""
|
|
||||||
success: bool = False
|
|
||||||
error: str
|
|
||||||
code: Optional[str] = None
|
|
||||||
details: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
class SetupRequest(BaseModel):
|
|
||||||
"""Setup request model."""
|
|
||||||
password: str = Field(..., min_length=8, description="Master password (min 8 characters)")
|
|
||||||
directory: str = Field(..., min_length=1, description="Anime directory path")
|
|
||||||
|
|
||||||
class SetupResponse(BaseModel):
|
|
||||||
"""Setup response model."""
|
|
||||||
status: str
|
|
||||||
message: str
|
|
||||||
redirect_url: Optional[str] = None
|
|
||||||
|
|
||||||
class SetupStatusResponse(BaseModel):
|
|
||||||
"""Setup status response model."""
|
|
||||||
setup_complete: bool
|
|
||||||
requirements: Dict[str, bool]
|
|
||||||
missing_requirements: List[str]
|
|
||||||
|
|
||||||
# Authentication utilities
|
|
||||||
def hash_password(password: str) -> str:
|
|
||||||
"""Hash password with salt using SHA-256."""
|
|
||||||
salted_password = password + settings.password_salt
|
|
||||||
return hashlib.sha256(salted_password.encode()).hexdigest()
|
|
||||||
|
|
||||||
def verify_master_password(password: str) -> bool:
|
|
||||||
"""Verify password against master password hash."""
|
|
||||||
if not settings.master_password_hash:
|
|
||||||
# If no hash is set, check against plain password (development only)
|
|
||||||
if settings.master_password:
|
|
||||||
return password == settings.master_password
|
|
||||||
return False
|
|
||||||
|
|
||||||
password_hash = hash_password(password)
|
|
||||||
return password_hash == settings.master_password_hash
|
|
||||||
|
|
||||||
def generate_jwt_token() -> Dict[str, Any]:
|
|
||||||
"""Generate JWT token for authentication."""
|
|
||||||
expires_at = datetime.utcnow() + timedelta(hours=settings.token_expiry_hours)
|
|
||||||
payload = {
|
|
||||||
'user': 'master',
|
|
||||||
'exp': expires_at,
|
|
||||||
'iat': datetime.utcnow(),
|
|
||||||
'iss': 'aniworld-fastapi-server'
|
|
||||||
}
|
|
||||||
|
|
||||||
token = jwt.encode(payload, settings.jwt_secret_key, algorithm='HS256')
|
|
||||||
return {
|
|
||||||
'token': token,
|
|
||||||
'expires_at': expires_at
|
|
||||||
}
|
|
||||||
|
|
||||||
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Verify and decode JWT token."""
|
|
||||||
try:
|
|
||||||
payload = jwt.decode(token, settings.jwt_secret_key, algorithms=['HS256'])
|
|
||||||
return payload
|
|
||||||
except jwt.ExpiredSignatureError:
|
|
||||||
logger.warning("Token has expired")
|
|
||||||
return None
|
|
||||||
except jwt.InvalidTokenError as e:
|
|
||||||
logger.warning(f"Invalid token: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
async def get_current_user(credentials: HTTPAuthorizationCredentials = Security(security)):
|
|
||||||
"""Dependency to get current authenticated user."""
|
|
||||||
token = credentials.credentials
|
|
||||||
payload = verify_jwt_token(token)
|
|
||||||
|
|
||||||
if not payload:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Invalid or expired token",
|
|
||||||
headers={"WWW-Authenticate": "Bearer"},
|
|
||||||
)
|
|
||||||
|
|
||||||
return payload
|
|
||||||
|
|
||||||
# Global exception handler
|
|
||||||
async def global_exception_handler(request, exc):
|
|
||||||
"""Global exception handler for unhandled errors."""
|
|
||||||
logger.error(f"Unhandled exception: {exc}", exc_info=True)
|
|
||||||
return JSONResponse(
|
|
||||||
status_code=500,
|
|
||||||
content={
|
|
||||||
"success": False,
|
|
||||||
"error": "Internal Server Error",
|
|
||||||
"code": "INTERNAL_ERROR"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Application lifespan
|
|
||||||
@asynccontextmanager
|
|
||||||
async def lifespan(app: FastAPI):
|
|
||||||
"""Manage application lifespan events."""
|
|
||||||
# Startup
|
|
||||||
logger.info("Starting AniWorld FastAPI server...")
|
|
||||||
logger.info(f"Anime directory: {settings.anime_directory}")
|
|
||||||
logger.info(f"Log level: {settings.log_level}")
|
|
||||||
|
|
||||||
# Verify configuration
|
|
||||||
if not settings.master_password_hash and not settings.master_password:
|
|
||||||
logger.warning("No master password configured! Set MASTER_PASSWORD_HASH or MASTER_PASSWORD environment variable.")
|
|
||||||
|
|
||||||
yield
|
|
||||||
|
|
||||||
# Shutdown
|
|
||||||
logger.info("Shutting down AniWorld FastAPI server...")
|
|
||||||
|
|
||||||
# Create FastAPI application
|
|
||||||
app = FastAPI(
|
|
||||||
title="AniWorld API",
|
|
||||||
description="""
|
|
||||||
## AniWorld Management System
|
|
||||||
|
|
||||||
A comprehensive FastAPI-based application for managing anime series and episodes.
|
|
||||||
|
|
||||||
### Features
|
|
||||||
|
|
||||||
* **Series Management**: Search, track, and manage anime series
|
|
||||||
* **Episode Tracking**: Monitor missing episodes and download progress
|
|
||||||
* **Authentication**: Secure master password authentication with JWT tokens
|
|
||||||
* **Real-time Updates**: WebSocket support for live progress tracking
|
|
||||||
* **File Management**: Automatic file scanning and organization
|
|
||||||
* **Download Queue**: Queue-based download management system
|
|
||||||
|
|
||||||
### Authentication
|
|
||||||
|
|
||||||
Most endpoints require authentication using a master password.
|
|
||||||
Use the `/auth/login` endpoint to obtain a JWT token, then include it
|
|
||||||
in the `Authorization` header as `Bearer <token>`.
|
|
||||||
|
|
||||||
### API Versioning
|
|
||||||
|
|
||||||
This API follows semantic versioning. Current version: **1.0.0**
|
|
||||||
""",
|
|
||||||
version="1.0.0",
|
|
||||||
docs_url="/docs",
|
|
||||||
redoc_url="/redoc",
|
|
||||||
lifespan=lifespan,
|
|
||||||
contact={
|
|
||||||
"name": "AniWorld API Support",
|
|
||||||
"url": "https://github.com/your-repo/aniworld",
|
|
||||||
"email": "support@aniworld.com",
|
|
||||||
},
|
|
||||||
license_info={
|
|
||||||
"name": "MIT",
|
|
||||||
"url": "https://opensource.org/licenses/MIT",
|
|
||||||
},
|
|
||||||
tags_metadata=[
|
|
||||||
{
|
|
||||||
"name": "Authentication",
|
|
||||||
"description": "Operations related to user authentication and session management",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Anime",
|
|
||||||
"description": "Operations for searching and managing anime series",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Episodes",
|
|
||||||
"description": "Operations for managing individual episodes",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Downloads",
|
|
||||||
"description": "Operations for managing the download queue and progress",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "System",
|
|
||||||
"description": "System health, configuration, and maintenance operations",
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Files",
|
|
||||||
"description": "File system operations and scanning functionality",
|
|
||||||
},
|
|
||||||
]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Configure templates
|
|
||||||
templates = Jinja2Templates(directory="src/server/web/templates")
|
|
||||||
|
|
||||||
# Mount static files
|
|
||||||
app.mount("/static", StaticFiles(directory="src/server/web/static"), name="static")
|
|
||||||
|
|
||||||
# Add CORS middleware
|
|
||||||
app.add_middleware(
|
|
||||||
CORSMiddleware,
|
|
||||||
allow_origins=["*"], # Configure appropriately for production
|
|
||||||
allow_credentials=True,
|
|
||||||
allow_methods=["*"],
|
|
||||||
allow_headers=["*"],
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add application flow middleware
|
|
||||||
setup_service = SetupService()
|
|
||||||
app.add_middleware(ApplicationFlowMiddleware, setup_service=setup_service)
|
|
||||||
|
|
||||||
# Add custom middleware - temporarily disabled
|
|
||||||
# app.add_middleware(EnhancedLoggingMiddleware)
|
|
||||||
# app.add_middleware(AuthMiddleware)
|
|
||||||
# app.add_middleware(ValidationMiddleware)
|
|
||||||
|
|
||||||
# Add global exception handler
|
|
||||||
app.add_exception_handler(Exception, global_exception_handler)
|
|
||||||
|
|
||||||
# Include API routers
|
|
||||||
# from src.server.web.controllers.api.v1.anime import router as anime_router
|
|
||||||
|
|
||||||
# app.include_router(anime_router)
|
|
||||||
|
|
||||||
# Legacy API compatibility endpoints (TODO: migrate JavaScript to use v1 endpoints)
|
|
||||||
@app.post("/api/add_series")
|
|
||||||
async def legacy_add_series(
|
|
||||||
request_data: Dict[str, Any],
|
|
||||||
current_user: Dict = Depends(get_current_user)
|
|
||||||
):
|
|
||||||
"""Legacy endpoint for adding series - basic implementation."""
|
|
||||||
try:
|
|
||||||
link = request_data.get('link', '')
|
|
||||||
name = request_data.get('name', '')
|
|
||||||
|
|
||||||
if not link or not name:
|
|
||||||
return {"status": "error", "message": "Link and name are required"}
|
|
||||||
|
|
||||||
return {"status": "success", "message": f"Series '{name}' added successfully"}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": f"Failed to add series: {str(e)}"}
|
|
||||||
|
|
||||||
|
|
||||||
@app.post("/api/download")
|
|
||||||
async def legacy_download(
|
|
||||||
request_data: Dict[str, Any],
|
|
||||||
current_user: Dict = Depends(get_current_user)
|
|
||||||
):
|
|
||||||
"""Legacy endpoint for downloading series - basic implementation."""
|
|
||||||
try:
|
|
||||||
folders = request_data.get('folders', [])
|
|
||||||
|
|
||||||
if not folders:
|
|
||||||
return {"status": "error", "message": "No folders specified"}
|
|
||||||
|
|
||||||
folder_count = len(folders)
|
|
||||||
return {"status": "success", "message": f"Download started for {folder_count} series"}
|
|
||||||
except Exception as e:
|
|
||||||
return {"status": "error", "message": f"Failed to start download: {str(e)}"}
|
|
||||||
|
|
||||||
# Setup endpoints
|
|
||||||
@app.get("/api/auth/setup/status", response_model=SetupStatusResponse, tags=["Setup"])
|
|
||||||
async def get_setup_status() -> SetupStatusResponse:
|
|
||||||
"""
|
|
||||||
Check the current setup status of the application.
|
|
||||||
|
|
||||||
Returns information about what setup requirements are met and which are missing.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
setup_service = SetupService()
|
|
||||||
requirements = setup_service.get_setup_requirements()
|
|
||||||
missing = setup_service.get_missing_requirements()
|
|
||||||
|
|
||||||
return SetupStatusResponse(
|
|
||||||
setup_complete=setup_service.is_setup_complete(),
|
|
||||||
requirements=requirements,
|
|
||||||
missing_requirements=missing
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error checking setup status: {e}")
|
|
||||||
return SetupStatusResponse(
|
|
||||||
setup_complete=False,
|
|
||||||
requirements={},
|
|
||||||
missing_requirements=["Error checking setup status"]
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.post("/api/auth/setup", response_model=SetupResponse, tags=["Setup"])
|
|
||||||
async def process_setup(request_data: SetupRequest) -> SetupResponse:
|
|
||||||
"""
|
|
||||||
Process the initial application setup.
|
|
||||||
|
|
||||||
- **password**: Master password (minimum 8 characters)
|
|
||||||
- **directory**: Anime directory path
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
setup_service = SetupService()
|
|
||||||
|
|
||||||
# Check if setup is already complete
|
|
||||||
if setup_service.is_setup_complete():
|
|
||||||
return SetupResponse(
|
|
||||||
status="error",
|
|
||||||
message="Setup has already been completed"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate directory path
|
|
||||||
from pathlib import Path
|
|
||||||
directory_path = Path(request_data.directory)
|
|
||||||
if not directory_path.is_absolute():
|
|
||||||
return SetupResponse(
|
|
||||||
status="error",
|
|
||||||
message="Please provide an absolute directory path"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create directory if it doesn't exist
|
|
||||||
try:
|
|
||||||
directory_path.mkdir(parents=True, exist_ok=True)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to create directory: {e}")
|
|
||||||
return SetupResponse(
|
|
||||||
status="error",
|
|
||||||
message=f"Failed to create directory: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Hash the password
|
|
||||||
password_hash = hash_password(request_data.password)
|
|
||||||
|
|
||||||
# Prepare configuration updates
|
|
||||||
config_updates = {
|
|
||||||
"security": {
|
|
||||||
"master_password_hash": password_hash,
|
|
||||||
"salt": settings.password_salt,
|
|
||||||
"session_timeout_hours": settings.token_expiry_hours,
|
|
||||||
"max_failed_attempts": 5,
|
|
||||||
"lockout_duration_minutes": 30
|
|
||||||
},
|
|
||||||
"anime": {
|
|
||||||
"directory": str(directory_path),
|
|
||||||
"download_threads": 3,
|
|
||||||
"download_speed_limit": None,
|
|
||||||
"auto_rescan_time": "03:00",
|
|
||||||
"auto_download_after_rescan": False
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"level": "INFO",
|
|
||||||
"enable_console_logging": True,
|
|
||||||
"enable_console_progress": False,
|
|
||||||
"enable_fail2ban_logging": True,
|
|
||||||
"log_file": "aniworld.log",
|
|
||||||
"max_log_size_mb": 10,
|
|
||||||
"log_backup_count": 5
|
|
||||||
},
|
|
||||||
"providers": {
|
|
||||||
"default_provider": "aniworld.to",
|
|
||||||
"preferred_language": "German Dub",
|
|
||||||
"fallback_providers": ["aniworld.to"],
|
|
||||||
"provider_timeout": 30,
|
|
||||||
"retry_attempts": 3,
|
|
||||||
"provider_settings": {
|
|
||||||
"aniworld.to": {
|
|
||||||
"enabled": True,
|
|
||||||
"priority": 1,
|
|
||||||
"quality_preference": "720p"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"advanced": {
|
|
||||||
"max_concurrent_downloads": 3,
|
|
||||||
"download_buffer_size": 8192,
|
|
||||||
"connection_timeout": 30,
|
|
||||||
"read_timeout": 300,
|
|
||||||
"enable_debug_mode": False,
|
|
||||||
"cache_duration_minutes": 60
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Mark setup as complete and save configuration
|
|
||||||
success = setup_service.mark_setup_complete(config_updates)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info("Application setup completed successfully")
|
|
||||||
return SetupResponse(
|
|
||||||
status="success",
|
|
||||||
message="Setup completed successfully",
|
|
||||||
redirect_url="/login"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
return SetupResponse(
|
|
||||||
status="error",
|
|
||||||
message="Failed to save configuration"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Setup processing error: {e}")
|
|
||||||
return SetupResponse(
|
|
||||||
status="error",
|
|
||||||
message="Setup failed due to internal error"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Authentication endpoints
|
|
||||||
@app.post("/auth/login", response_model=LoginResponse, tags=["Authentication"])
|
|
||||||
async def login(request_data: LoginRequest, request: Request) -> LoginResponse:
|
|
||||||
"""
|
|
||||||
Authenticate with master password and receive JWT token.
|
|
||||||
|
|
||||||
- **password**: The master password for the application
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if not verify_master_password(request_data.password):
|
|
||||||
client_ip = getattr(request.client, 'host', 'unknown') if request.client else 'unknown'
|
|
||||||
logger.warning(f"Failed login attempt from IP: {client_ip}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
|
||||||
detail="Invalid master password"
|
|
||||||
)
|
|
||||||
|
|
||||||
token_data = generate_jwt_token()
|
|
||||||
logger.info("Successful authentication")
|
|
||||||
|
|
||||||
return LoginResponse(
|
|
||||||
success=True,
|
|
||||||
message="Authentication successful",
|
|
||||||
token=token_data['token'],
|
|
||||||
expires_at=token_data['expires_at']
|
|
||||||
)
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Login error: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail="Authentication service error"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/auth/verify", response_model=TokenVerifyResponse, tags=["Authentication"])
|
|
||||||
async def verify_token(current_user: Dict = Depends(get_current_user)) -> TokenVerifyResponse:
|
|
||||||
"""
|
|
||||||
Verify the validity of the current JWT token.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
"""
|
|
||||||
return TokenVerifyResponse(
|
|
||||||
valid=True,
|
|
||||||
message="Token is valid",
|
|
||||||
user=current_user.get('user'),
|
|
||||||
expires_at=datetime.fromtimestamp(current_user.get('exp', 0))
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.post("/auth/logout", response_model=Dict[str, Any], tags=["Authentication"])
|
|
||||||
async def logout(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Logout endpoint (stateless - client should remove token).
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"message": "Logged out successfully. Please remove the token from client storage."
|
|
||||||
}
|
|
||||||
|
|
||||||
@app.get("/api/auth/status", response_model=Dict[str, Any], tags=["Authentication"])
|
|
||||||
async def auth_status(request: Request) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Check authentication status and configuration.
|
|
||||||
|
|
||||||
This endpoint checks if master password is configured and if user is authenticated.
|
|
||||||
"""
|
|
||||||
has_master_password = bool(settings.master_password_hash or settings.master_password)
|
|
||||||
|
|
||||||
# Check if user has valid token
|
|
||||||
authenticated = False
|
|
||||||
try:
|
|
||||||
auth_header = request.headers.get("authorization")
|
|
||||||
if auth_header and auth_header.startswith("Bearer "):
|
|
||||||
token = auth_header.split(" ")[1]
|
|
||||||
payload = verify_jwt_token(token)
|
|
||||||
authenticated = payload is not None
|
|
||||||
except Exception:
|
|
||||||
authenticated = False
|
|
||||||
|
|
||||||
return {
|
|
||||||
"has_master_password": has_master_password,
|
|
||||||
"authenticated": authenticated
|
|
||||||
}
|
|
||||||
|
|
||||||
# Health check endpoint
|
|
||||||
@app.get("/health", response_model=HealthResponse, tags=["System"])
|
|
||||||
async def health_check() -> HealthResponse:
|
|
||||||
"""
|
|
||||||
Application health check endpoint.
|
|
||||||
"""
|
|
||||||
return HealthResponse(
|
|
||||||
status="healthy",
|
|
||||||
timestamp=datetime.utcnow(),
|
|
||||||
services={
|
|
||||||
"authentication": "online",
|
|
||||||
"anime_service": "online",
|
|
||||||
"episode_service": "online"
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Common browser requests that might cause "Invalid HTTP request received" warnings
|
|
||||||
@app.get("/favicon.ico")
|
|
||||||
async def favicon():
|
|
||||||
"""Handle favicon requests from browsers."""
|
|
||||||
return JSONResponse(status_code=404, content={"detail": "Favicon not found"})
|
|
||||||
|
|
||||||
@app.get("/robots.txt")
|
|
||||||
async def robots():
|
|
||||||
"""Handle robots.txt requests."""
|
|
||||||
return JSONResponse(status_code=404, content={"detail": "Robots.txt not found"})
|
|
||||||
|
|
||||||
@app.get("/")
|
|
||||||
async def root():
|
|
||||||
"""Root endpoint redirect to docs."""
|
|
||||||
return {"message": "AniWorld API", "documentation": "/docs", "health": "/health"}
|
|
||||||
|
|
||||||
# Web interface routes
|
|
||||||
@app.get("/app", response_class=HTMLResponse)
|
|
||||||
async def web_app(request: Request):
|
|
||||||
"""Serve the main web application."""
|
|
||||||
return templates.TemplateResponse("base/index.html", {"request": request})
|
|
||||||
|
|
||||||
@app.get("/login", response_class=HTMLResponse)
|
|
||||||
async def login_page(request: Request):
|
|
||||||
"""Serve the login page."""
|
|
||||||
return templates.TemplateResponse("base/login.html", {"request": request})
|
|
||||||
|
|
||||||
@app.get("/setup", response_class=HTMLResponse)
|
|
||||||
async def setup_page(request: Request):
|
|
||||||
"""Serve the setup page."""
|
|
||||||
return templates.TemplateResponse("base/setup.html", {"request": request})
|
|
||||||
|
|
||||||
@app.get("/queue", response_class=HTMLResponse)
|
|
||||||
async def queue_page(request: Request):
|
|
||||||
"""Serve the queue page."""
|
|
||||||
return templates.TemplateResponse("base/queue.html", {"request": request})
|
|
||||||
|
|
||||||
# Anime endpoints (protected)
|
|
||||||
@app.get("/api/anime/search", response_model=List[AnimeResponse], tags=["Anime"])
|
|
||||||
async def search_anime(
|
|
||||||
query: str,
|
|
||||||
limit: int = 20,
|
|
||||||
offset: int = 0,
|
|
||||||
current_user: Dict = Depends(get_current_user)
|
|
||||||
) -> List[AnimeResponse]:
|
|
||||||
"""
|
|
||||||
Search for anime by title.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
- **query**: Search query string
|
|
||||||
- **limit**: Maximum number of results (1-100)
|
|
||||||
- **offset**: Number of results to skip for pagination
|
|
||||||
"""
|
|
||||||
# TODO: Implement actual anime search logic
|
|
||||||
# This is a placeholder implementation
|
|
||||||
logger.info(f"Searching anime with query: {query}")
|
|
||||||
|
|
||||||
# Mock data for now
|
|
||||||
mock_results = [
|
|
||||||
AnimeResponse(
|
|
||||||
id=f"anime_{i}",
|
|
||||||
title=f"Sample Anime {i}",
|
|
||||||
description=f"Description for anime {i}",
|
|
||||||
episodes=24,
|
|
||||||
status="Completed"
|
|
||||||
)
|
|
||||||
for i in range(offset + 1, min(offset + limit + 1, 100))
|
|
||||||
if query.lower() in f"sample anime {i}".lower()
|
|
||||||
]
|
|
||||||
|
|
||||||
return mock_results
|
|
||||||
|
|
||||||
@app.get("/api/anime/{anime_id}", response_model=AnimeResponse, tags=["Anime"])
|
|
||||||
async def get_anime(
|
|
||||||
anime_id: str,
|
|
||||||
current_user: Dict = Depends(get_current_user)
|
|
||||||
) -> AnimeResponse:
|
|
||||||
"""
|
|
||||||
Get detailed information about a specific anime.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
- **anime_id**: Unique identifier for the anime
|
|
||||||
"""
|
|
||||||
# TODO: Implement actual anime retrieval logic
|
|
||||||
logger.info(f"Fetching anime details for ID: {anime_id}")
|
|
||||||
|
|
||||||
# Mock data for now
|
|
||||||
return AnimeResponse(
|
|
||||||
id=anime_id,
|
|
||||||
title=f"Anime {anime_id}",
|
|
||||||
description=f"Detailed description for anime {anime_id}",
|
|
||||||
episodes=24,
|
|
||||||
status="Completed"
|
|
||||||
)
|
|
||||||
|
|
||||||
@app.get("/api/anime/{anime_id}/episodes", response_model=List[EpisodeResponse], tags=["Episodes"])
|
|
||||||
async def get_anime_episodes(
|
|
||||||
anime_id: str,
|
|
||||||
current_user: Dict = Depends(get_current_user)
|
|
||||||
) -> List[EpisodeResponse]:
|
|
||||||
"""
|
|
||||||
Get all episodes for a specific anime.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
- **anime_id**: Unique identifier for the anime
|
|
||||||
"""
|
|
||||||
# TODO: Implement actual episode retrieval logic
|
|
||||||
logger.info(f"Fetching episodes for anime ID: {anime_id}")
|
|
||||||
|
|
||||||
# Mock data for now
|
|
||||||
return [
|
|
||||||
EpisodeResponse(
|
|
||||||
id=f"{anime_id}_ep_{i}",
|
|
||||||
anime_id=anime_id,
|
|
||||||
episode_number=i,
|
|
||||||
title=f"Episode {i}",
|
|
||||||
description=f"Description for episode {i}",
|
|
||||||
duration=1440 # 24 minutes in seconds
|
|
||||||
)
|
|
||||||
for i in range(1, 25) # 24 episodes
|
|
||||||
]
|
|
||||||
|
|
||||||
@app.get("/api/episodes/{episode_id}", response_model=EpisodeResponse, tags=["Episodes"])
|
|
||||||
async def get_episode(
|
|
||||||
episode_id: str,
|
|
||||||
current_user: Dict = Depends(get_current_user)
|
|
||||||
) -> EpisodeResponse:
|
|
||||||
"""
|
|
||||||
Get detailed information about a specific episode.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
- **episode_id**: Unique identifier for the episode
|
|
||||||
"""
|
|
||||||
# TODO: Implement actual episode retrieval logic
|
|
||||||
logger.info(f"Fetching episode details for ID: {episode_id}")
|
|
||||||
|
|
||||||
# Mock data for now
|
|
||||||
return EpisodeResponse(
|
|
||||||
id=episode_id,
|
|
||||||
anime_id="sample_anime",
|
|
||||||
episode_number=1,
|
|
||||||
title=f"Episode {episode_id}",
|
|
||||||
description=f"Detailed description for episode {episode_id}",
|
|
||||||
duration=1440
|
|
||||||
)
|
|
||||||
|
|
||||||
# Database health check endpoint
|
|
||||||
@app.get("/api/system/database/health", response_model=Dict[str, Any], tags=["System"])
|
|
||||||
async def database_health(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Check database connectivity and health.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
"""
|
|
||||||
# TODO: Implement actual database health check
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"connection_pool": "active",
|
|
||||||
"response_time_ms": 15,
|
|
||||||
"last_check": datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
# Configuration endpoint
|
|
||||||
@app.get("/api/system/config", response_model=Dict[str, Any], tags=["System"])
|
|
||||||
async def get_system_config(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get system configuration information.
|
|
||||||
|
|
||||||
Requires: Bearer token in Authorization header
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"anime_directory": settings.anime_directory,
|
|
||||||
"log_level": settings.log_level,
|
|
||||||
"token_expiry_hours": settings.token_expiry_hours,
|
|
||||||
"version": "1.0.0"
|
|
||||||
}
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
import socket
|
|
||||||
|
|
||||||
# Configure enhanced logging
|
|
||||||
log_level = getattr(logging, settings.log_level.upper(), logging.INFO)
|
|
||||||
logging.getLogger().setLevel(log_level)
|
|
||||||
|
|
||||||
# Check if port is available
|
|
||||||
def is_port_available(host: str, port: int) -> bool:
|
|
||||||
"""Check if a port is available on the given host."""
|
|
||||||
try:
|
|
||||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
||||||
sock.bind((host, port))
|
|
||||||
return True
|
|
||||||
except OSError:
|
|
||||||
return False
|
|
||||||
|
|
||||||
host = "127.0.0.1"
|
|
||||||
port = 8000
|
|
||||||
|
|
||||||
if not is_port_available(host, port):
|
|
||||||
logger.error(f"Port {port} is already in use on {host}. Please stop other services or choose a different port.")
|
|
||||||
logger.info("You can check which process is using the port with: netstat -ano | findstr :8000")
|
|
||||||
sys.exit(1)
|
|
||||||
|
|
||||||
logger.info("Starting AniWorld FastAPI server with uvicorn...")
|
|
||||||
logger.info(f"Anime directory: {settings.anime_directory}")
|
|
||||||
logger.info(f"Log level: {settings.log_level}")
|
|
||||||
logger.info(f"Server will be available at http://{host}:{port}")
|
|
||||||
logger.info(f"API documentation at http://{host}:{port}/docs")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Run the application
|
|
||||||
uvicorn.run(
|
|
||||||
"fastapi_app:app",
|
|
||||||
host=host,
|
|
||||||
port=port,
|
|
||||||
reload=False, # Disable reload to prevent constant restarting
|
|
||||||
log_level=settings.log_level.lower()
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Failed to start server: {e}")
|
|
||||||
sys.exit(1)
|
|
||||||
@ -1,248 +0,0 @@
|
|||||||
"""
|
|
||||||
Application Flow Middleware for FastAPI.
|
|
||||||
|
|
||||||
This middleware enforces the application flow priorities:
|
|
||||||
1. Setup page (if setup is not complete)
|
|
||||||
2. Authentication page (if user is not authenticated)
|
|
||||||
3. Main application (for authenticated users with completed setup)
|
|
||||||
|
|
||||||
The middleware redirects users to the appropriate page based on their current state
|
|
||||||
and the state of the application setup.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from typing import Optional
|
|
||||||
|
|
||||||
from fastapi import Request
|
|
||||||
from fastapi.responses import RedirectResponse
|
|
||||||
from starlette.middleware.base import BaseHTTPMiddleware
|
|
||||||
|
|
||||||
# Import the setup service
|
|
||||||
try:
|
|
||||||
from ..services.setup_service import SetupService
|
|
||||||
except ImportError:
|
|
||||||
# Handle case where service is not available
|
|
||||||
class SetupService:
|
|
||||||
def is_setup_complete(self):
|
|
||||||
return True
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
class ApplicationFlowMiddleware(BaseHTTPMiddleware):
|
|
||||||
"""
|
|
||||||
Middleware to enforce application flow: setup → auth → main application.
|
|
||||||
|
|
||||||
This middleware:
|
|
||||||
1. Checks if setup is complete
|
|
||||||
2. Validates authentication status
|
|
||||||
3. Redirects to appropriate page based on state
|
|
||||||
4. Allows API endpoints and static files to pass through
|
|
||||||
"""
|
|
||||||
|
|
||||||
def __init__(self, app, setup_service: Optional[SetupService] = None):
|
|
||||||
"""
|
|
||||||
Initialize the application flow middleware.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
app: FastAPI application instance
|
|
||||||
setup_service: Setup service instance (optional, will create if not provided)
|
|
||||||
"""
|
|
||||||
super().__init__(app)
|
|
||||||
self.setup_service = setup_service or SetupService()
|
|
||||||
|
|
||||||
# Define paths that should bypass flow enforcement
|
|
||||||
self.bypass_paths = {
|
|
||||||
"/static", # Static files
|
|
||||||
"/favicon.ico", # Browser favicon requests
|
|
||||||
"/robots.txt", # Robots.txt
|
|
||||||
"/health", # Health check endpoints
|
|
||||||
"/docs", # OpenAPI documentation
|
|
||||||
"/redoc", # ReDoc documentation
|
|
||||||
"/openapi.json" # OpenAPI spec
|
|
||||||
}
|
|
||||||
|
|
||||||
# API paths that should bypass flow but may require auth
|
|
||||||
self.api_paths = {
|
|
||||||
"/api",
|
|
||||||
"/auth"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Pages that are part of the flow and should be accessible
|
|
||||||
self.flow_pages = {
|
|
||||||
"/setup",
|
|
||||||
"/login",
|
|
||||||
"/app"
|
|
||||||
}
|
|
||||||
|
|
||||||
async def dispatch(self, request: Request, call_next):
|
|
||||||
"""
|
|
||||||
Process the request and enforce application flow.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request: Incoming HTTP request
|
|
||||||
call_next: Next middleware/handler in chain
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Response: Either a redirect response or the result of call_next
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Get the request path
|
|
||||||
path = request.url.path
|
|
||||||
|
|
||||||
# Skip flow enforcement for certain paths
|
|
||||||
if self._should_bypass_flow(path):
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
# Check application setup status
|
|
||||||
setup_complete = self.setup_service.is_setup_complete()
|
|
||||||
|
|
||||||
# Check authentication status
|
|
||||||
is_authenticated = await self._is_user_authenticated(request)
|
|
||||||
|
|
||||||
# Determine the appropriate action
|
|
||||||
redirect_response = self._determine_redirect(path, setup_complete, is_authenticated)
|
|
||||||
|
|
||||||
if redirect_response:
|
|
||||||
logger.info(f"Redirecting {path} to {redirect_response.headers.get('location')}")
|
|
||||||
return redirect_response
|
|
||||||
|
|
||||||
# Continue with the request
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error in ApplicationFlowMiddleware: {e}", exc_info=True)
|
|
||||||
# In case of error, allow the request to continue
|
|
||||||
return await call_next(request)
|
|
||||||
|
|
||||||
def _should_bypass_flow(self, path: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if the given path should bypass flow enforcement.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path: Request path
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if path should bypass flow enforcement
|
|
||||||
"""
|
|
||||||
# Check exact bypass paths
|
|
||||||
for bypass_path in self.bypass_paths:
|
|
||||||
if path.startswith(bypass_path):
|
|
||||||
return True
|
|
||||||
|
|
||||||
# API paths bypass flow enforcement (but may have their own auth)
|
|
||||||
for api_path in self.api_paths:
|
|
||||||
if path.startswith(api_path):
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
async def _is_user_authenticated(self, request: Request) -> bool:
|
|
||||||
"""
|
|
||||||
Check if the user is authenticated by validating JWT token.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request: HTTP request object
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
bool: True if user is authenticated, False otherwise
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Check for Authorization header
|
|
||||||
auth_header = request.headers.get("authorization")
|
|
||||||
if not auth_header or not auth_header.startswith("Bearer "):
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Extract and validate token
|
|
||||||
token = auth_header.split(" ")[1]
|
|
||||||
|
|
||||||
# Import JWT validation function (avoid circular imports)
|
|
||||||
try:
|
|
||||||
from ..fastapi_app import verify_jwt_token
|
|
||||||
payload = verify_jwt_token(token)
|
|
||||||
return payload is not None
|
|
||||||
except ImportError:
|
|
||||||
# Fallback if import fails
|
|
||||||
logger.warning("Could not import JWT verification function")
|
|
||||||
return False
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error checking authentication: {e}")
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _determine_redirect(self, path: str, setup_complete: bool, is_authenticated: bool) -> Optional[RedirectResponse]:
|
|
||||||
"""
|
|
||||||
Determine if a redirect is needed based on current state.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
path: Current request path
|
|
||||||
setup_complete: Whether application setup is complete
|
|
||||||
is_authenticated: Whether user is authenticated
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Optional[RedirectResponse]: Redirect response if needed, None otherwise
|
|
||||||
"""
|
|
||||||
# If setup is not complete
|
|
||||||
if not setup_complete:
|
|
||||||
# Allow access to setup page
|
|
||||||
if path == "/setup":
|
|
||||||
return None
|
|
||||||
# Redirect everything else to setup
|
|
||||||
return RedirectResponse(url="/setup", status_code=302)
|
|
||||||
|
|
||||||
# Setup is complete, check authentication
|
|
||||||
if not is_authenticated:
|
|
||||||
# Allow access to login page
|
|
||||||
if path == "/login":
|
|
||||||
return None
|
|
||||||
# Redirect unauthenticated users to login (except for specific pages)
|
|
||||||
if path in self.flow_pages or path == "/":
|
|
||||||
return RedirectResponse(url="/login", status_code=302)
|
|
||||||
|
|
||||||
# User is authenticated and setup is complete
|
|
||||||
else:
|
|
||||||
# Redirect from setup/login pages to main app
|
|
||||||
if path in ["/setup", "/login", "/"]:
|
|
||||||
return RedirectResponse(url="/app", status_code=302)
|
|
||||||
|
|
||||||
# No redirect needed
|
|
||||||
return None
|
|
||||||
|
|
||||||
def get_flow_status(self, request: Request) -> dict:
|
|
||||||
"""
|
|
||||||
Get current flow status for debugging/monitoring.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request: HTTP request object
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
dict: Current flow status information
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
setup_complete = self.setup_service.is_setup_complete()
|
|
||||||
is_authenticated = self._is_user_authenticated(request)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"setup_complete": setup_complete,
|
|
||||||
"authenticated": is_authenticated,
|
|
||||||
"path": request.url.path,
|
|
||||||
"should_bypass": self._should_bypass_flow(request.url.path)
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"error": str(e),
|
|
||||||
"path": request.url.path
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def create_application_flow_middleware(setup_service: Optional[SetupService] = None) -> ApplicationFlowMiddleware:
|
|
||||||
"""
|
|
||||||
Factory function to create application flow middleware.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
setup_service: Setup service instance (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
ApplicationFlowMiddleware: Configured middleware instance
|
|
||||||
"""
|
|
||||||
return ApplicationFlowMiddleware(app=None, setup_service=setup_service)
|
|
||||||
Binary file not shown.
@ -1,41 +0,0 @@
|
|||||||
# FastAPI and ASGI server
|
|
||||||
fastapi==0.118.0
|
|
||||||
uvicorn[standard]==0.37.0
|
|
||||||
python-multipart==0.0.12
|
|
||||||
|
|
||||||
# Authentication and security
|
|
||||||
pyjwt==2.10.1
|
|
||||||
passlib[bcrypt]==1.7.4
|
|
||||||
python-jose[cryptography]==3.3.0
|
|
||||||
|
|
||||||
# Configuration and environment
|
|
||||||
pydantic==2.11.10
|
|
||||||
pydantic-settings==2.11.0
|
|
||||||
python-dotenv==1.1.1
|
|
||||||
|
|
||||||
# Database (if needed)
|
|
||||||
sqlalchemy==2.0.43
|
|
||||||
alembic==1.16.5
|
|
||||||
|
|
||||||
# HTTP client
|
|
||||||
httpx==0.28.1
|
|
||||||
aiofiles==24.1.0
|
|
||||||
|
|
||||||
# Utilities
|
|
||||||
python-dateutil==2.9.0.post0
|
|
||||||
pytz==2024.2
|
|
||||||
|
|
||||||
# Development and testing
|
|
||||||
pytest==8.4.2
|
|
||||||
pytest-asyncio==1.2.0
|
|
||||||
pytest-cov==7.0.0
|
|
||||||
pytest-mock==3.15.1
|
|
||||||
|
|
||||||
# Code quality
|
|
||||||
black==25.9.0
|
|
||||||
isort==6.1.0
|
|
||||||
flake8==7.3.0
|
|
||||||
mypy==1.18.2
|
|
||||||
|
|
||||||
# Logging
|
|
||||||
structlog==25.1.0
|
|
||||||
@ -1,782 +0,0 @@
|
|||||||
"""
|
|
||||||
Anime Management API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for anime CRUD operations,
|
|
||||||
including creation, reading, updating, deletion, and search functionality.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import uuid
|
|
||||||
from typing import Any, Dict, List, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
|
||||||
from pydantic import BaseModel, Field
|
|
||||||
|
|
||||||
# Import SeriesApp for business logic
|
|
||||||
from src.core.SeriesApp import SeriesApp
|
|
||||||
|
|
||||||
# FastAPI dependencies and models
|
|
||||||
from src.server.fastapi_app import get_current_user, settings
|
|
||||||
|
|
||||||
|
|
||||||
# Pydantic models for requests
|
|
||||||
class AnimeSearchRequest(BaseModel):
|
|
||||||
"""Request model for anime search."""
|
|
||||||
query: str = Field(..., min_length=1, max_length=100)
|
|
||||||
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
|
|
||||||
genre: Optional[str] = None
|
|
||||||
year: Optional[int] = Field(None, ge=1900, le=2100)
|
|
||||||
|
|
||||||
class AnimeResponse(BaseModel):
|
|
||||||
"""Response model for anime data."""
|
|
||||||
id: str
|
|
||||||
title: str
|
|
||||||
description: Optional[str] = None
|
|
||||||
status: str = "Unknown"
|
|
||||||
folder: Optional[str] = None
|
|
||||||
episodes: int = 0
|
|
||||||
|
|
||||||
class AnimeCreateRequest(BaseModel):
|
|
||||||
"""Request model for creating anime entries."""
|
|
||||||
name: str = Field(..., min_length=1, max_length=255)
|
|
||||||
folder: str = Field(..., min_length=1)
|
|
||||||
description: Optional[str] = None
|
|
||||||
status: str = Field(default="planned", pattern="^(ongoing|completed|planned|dropped|paused)$")
|
|
||||||
genre: Optional[str] = None
|
|
||||||
year: Optional[int] = Field(None, ge=1900, le=2100)
|
|
||||||
|
|
||||||
class AnimeUpdateRequest(BaseModel):
|
|
||||||
"""Request model for updating anime entries."""
|
|
||||||
name: Optional[str] = Field(None, min_length=1, max_length=255)
|
|
||||||
folder: Optional[str] = None
|
|
||||||
description: Optional[str] = None
|
|
||||||
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
|
|
||||||
genre: Optional[str] = None
|
|
||||||
year: Optional[int] = Field(None, ge=1900, le=2100)
|
|
||||||
|
|
||||||
class PaginatedAnimeResponse(BaseModel):
|
|
||||||
"""Paginated response model for anime lists."""
|
|
||||||
success: bool = True
|
|
||||||
data: List[AnimeResponse]
|
|
||||||
pagination: Dict[str, Any]
|
|
||||||
|
|
||||||
class AnimeSearchResponse(BaseModel):
|
|
||||||
"""Response model for anime search results."""
|
|
||||||
success: bool = True
|
|
||||||
data: List[AnimeResponse]
|
|
||||||
pagination: Dict[str, Any]
|
|
||||||
search: Dict[str, Any]
|
|
||||||
|
|
||||||
class RescanResponse(BaseModel):
|
|
||||||
"""Response model for rescan operations."""
|
|
||||||
success: bool
|
|
||||||
message: str
|
|
||||||
total_series: int
|
|
||||||
|
|
||||||
# Dependency to get SeriesApp instance
|
|
||||||
def get_series_app() -> SeriesApp:
|
|
||||||
"""Get SeriesApp instance for business logic operations."""
|
|
||||||
if not settings.anime_directory:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
|
||||||
detail="Anime directory not configured"
|
|
||||||
)
|
|
||||||
return SeriesApp(settings.anime_directory)
|
|
||||||
|
|
||||||
# Create FastAPI router for anime management endpoints
|
|
||||||
router = APIRouter(prefix='/api/v1/anime', tags=['anime'])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get('', response_model=PaginatedAnimeResponse)
|
|
||||||
async def list_anime(
|
|
||||||
status: Optional[str] = Query(None, pattern="^(ongoing|completed|planned|dropped|paused)$"),
|
|
||||||
genre: Optional[str] = Query(None),
|
|
||||||
year: Optional[int] = Query(None, ge=1900, le=2100),
|
|
||||||
search: Optional[str] = Query(None),
|
|
||||||
page: int = Query(1, ge=1),
|
|
||||||
per_page: int = Query(50, ge=1, le=1000),
|
|
||||||
current_user: Optional[Dict] = Depends(get_current_user),
|
|
||||||
series_app: SeriesApp = Depends(get_series_app)
|
|
||||||
) -> PaginatedAnimeResponse:
|
|
||||||
"""
|
|
||||||
Get all anime with optional filtering and pagination.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- status: Filter by anime status (ongoing, completed, planned, dropped, paused)
|
|
||||||
- genre: Filter by genre
|
|
||||||
- year: Filter by release year
|
|
||||||
- search: Search in name and description
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated list of anime with metadata
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Get the series list from SeriesApp
|
|
||||||
anime_list = series_app.series_list
|
|
||||||
|
|
||||||
# Convert to list of AnimeResponse objects
|
|
||||||
anime_responses = []
|
|
||||||
for series_item in anime_list:
|
|
||||||
anime_response = AnimeResponse(
|
|
||||||
id=getattr(series_item, 'id', str(uuid.uuid4())),
|
|
||||||
title=getattr(series_item, 'name', 'Unknown'),
|
|
||||||
folder=getattr(series_item, 'folder', ''),
|
|
||||||
description=getattr(series_item, 'description', ''),
|
|
||||||
status='ongoing', # Default status
|
|
||||||
episodes=getattr(series_item, 'total_episodes', 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Apply search filter if provided
|
|
||||||
if search:
|
|
||||||
if search.lower() not in anime_response.title.lower():
|
|
||||||
continue
|
|
||||||
|
|
||||||
anime_responses.append(anime_response)
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(anime_responses)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_anime = anime_responses[start_idx:end_idx]
|
|
||||||
|
|
||||||
return PaginatedAnimeResponse(
|
|
||||||
data=paginated_anime,
|
|
||||||
pagination={
|
|
||||||
"page": page,
|
|
||||||
"per_page": per_page,
|
|
||||||
"total": total,
|
|
||||||
"pages": (total + per_page - 1) // per_page,
|
|
||||||
"has_next": end_idx < total,
|
|
||||||
"has_prev": page > 1
|
|
||||||
}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=f"Error retrieving anime list: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@anime_bp.route('/<int:anime_id>', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('anime_id')
|
|
||||||
@optional_auth
|
|
||||||
def get_anime(anime_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get specific anime by ID.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
anime_id: Unique identifier for the anime
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Anime details with episodes summary
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
anime = anime_repository.get_anime_by_id(anime_id)
|
|
||||||
if not anime:
|
|
||||||
raise NotFoundError("Anime not found")
|
|
||||||
|
|
||||||
# Format anime data
|
|
||||||
anime_data = format_anime_response(anime.__dict__)
|
|
||||||
|
|
||||||
# Add episodes summary
|
|
||||||
episodes_summary = anime_repository.get_episodes_summary(anime_id)
|
|
||||||
anime_data['episodes_summary'] = episodes_summary
|
|
||||||
|
|
||||||
return create_success_response(anime_data)
|
|
||||||
|
|
||||||
|
|
||||||
@anime_bp.route('', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['name', 'folder'],
|
|
||||||
optional_fields=['key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
|
|
||||||
field_types={
|
|
||||||
'name': str,
|
|
||||||
'folder': str,
|
|
||||||
'key': str,
|
|
||||||
'description': str,
|
|
||||||
'genres': list,
|
|
||||||
'release_year': int,
|
|
||||||
'status': str,
|
|
||||||
'total_episodes': int,
|
|
||||||
'poster_url': str,
|
|
||||||
'custom_metadata': dict
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def create_anime() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Create a new anime record.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- name: Anime name
|
|
||||||
- folder: Folder path where anime files are stored
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- key: Unique key identifier
|
|
||||||
- description: Anime description
|
|
||||||
- genres: List of genres
|
|
||||||
- release_year: Year of release
|
|
||||||
- status: Status (ongoing, completed, planned, dropped, paused)
|
|
||||||
- total_episodes: Total number of episodes
|
|
||||||
- poster_url: URL to poster image
|
|
||||||
- custom_metadata: Additional metadata as key-value pairs
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Created anime details with generated ID
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate status if provided
|
|
||||||
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
|
|
||||||
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
|
|
||||||
|
|
||||||
# Check if anime with same folder already exists
|
|
||||||
existing_anime = anime_repository.get_anime_by_folder(data['folder'])
|
|
||||||
if existing_anime:
|
|
||||||
raise ValidationError("Anime with this folder already exists")
|
|
||||||
|
|
||||||
# Create anime metadata object
|
|
||||||
try:
|
|
||||||
anime = AnimeMetadata(
|
|
||||||
anime_id=str(uuid.uuid4()),
|
|
||||||
name=data['name'],
|
|
||||||
folder=data['folder'],
|
|
||||||
key=data.get('key'),
|
|
||||||
description=data.get('description'),
|
|
||||||
genres=data.get('genres', []),
|
|
||||||
release_year=data.get('release_year'),
|
|
||||||
status=data.get('status', 'planned'),
|
|
||||||
total_episodes=data.get('total_episodes'),
|
|
||||||
poster_url=data.get('poster_url'),
|
|
||||||
custom_metadata=data.get('custom_metadata', {})
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValidationError(f"Invalid anime data: {str(e)}")
|
|
||||||
|
|
||||||
# Save to database
|
|
||||||
success = anime_repository.create_anime(anime)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to create anime", 500)
|
|
||||||
|
|
||||||
# Return created anime
|
|
||||||
anime_data = format_anime_response(anime.__dict__)
|
|
||||||
return create_success_response(
|
|
||||||
data=anime_data,
|
|
||||||
message="Anime created successfully",
|
|
||||||
status_code=201
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@anime_bp.route('/<int:anime_id>', methods=['PUT'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('anime_id')
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
|
|
||||||
field_types={
|
|
||||||
'name': str,
|
|
||||||
'folder': str,
|
|
||||||
'key': str,
|
|
||||||
'description': str,
|
|
||||||
'genres': list,
|
|
||||||
'release_year': int,
|
|
||||||
'status': str,
|
|
||||||
'total_episodes': int,
|
|
||||||
'poster_url': str,
|
|
||||||
'custom_metadata': dict
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def update_anime(anime_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Update an existing anime record.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
anime_id: Unique identifier for the anime
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- name: Anime name
|
|
||||||
- folder: Folder path where anime files are stored
|
|
||||||
- key: Unique key identifier
|
|
||||||
- description: Anime description
|
|
||||||
- genres: List of genres
|
|
||||||
- release_year: Year of release
|
|
||||||
- status: Status (ongoing, completed, planned, dropped, paused)
|
|
||||||
- total_episodes: Total number of episodes
|
|
||||||
- poster_url: URL to poster image
|
|
||||||
- custom_metadata: Additional metadata as key-value pairs
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated anime details
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Get existing anime
|
|
||||||
existing_anime = anime_repository.get_anime_by_id(anime_id)
|
|
||||||
if not existing_anime:
|
|
||||||
raise NotFoundError("Anime not found")
|
|
||||||
|
|
||||||
# Validate status if provided
|
|
||||||
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
|
|
||||||
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
|
|
||||||
|
|
||||||
# Check if folder is being changed and if it conflicts
|
|
||||||
if 'folder' in data and data['folder'] != existing_anime.folder:
|
|
||||||
conflicting_anime = anime_repository.get_anime_by_folder(data['folder'])
|
|
||||||
if conflicting_anime and conflicting_anime.anime_id != anime_id:
|
|
||||||
raise ValidationError("Another anime with this folder already exists")
|
|
||||||
|
|
||||||
# Update fields
|
|
||||||
update_fields = {}
|
|
||||||
for field in ['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url']:
|
|
||||||
if field in data:
|
|
||||||
update_fields[field] = data[field]
|
|
||||||
|
|
||||||
# Handle custom metadata update (merge instead of replace)
|
|
||||||
if 'custom_metadata' in data:
|
|
||||||
existing_metadata = existing_anime.custom_metadata or {}
|
|
||||||
existing_metadata.update(data['custom_metadata'])
|
|
||||||
update_fields['custom_metadata'] = existing_metadata
|
|
||||||
|
|
||||||
# Perform update
|
|
||||||
success = anime_repository.update_anime(anime_id, update_fields)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to update anime", 500)
|
|
||||||
|
|
||||||
# Get updated anime
|
|
||||||
updated_anime = anime_repository.get_anime_by_id(anime_id)
|
|
||||||
anime_data = format_anime_response(updated_anime.__dict__)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=anime_data,
|
|
||||||
message="Anime updated successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@anime_bp.route('/<int:anime_id>', methods=['DELETE'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('anime_id')
|
|
||||||
@require_auth
|
|
||||||
def delete_anime(anime_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Delete an anime record and all related data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
anime_id: Unique identifier for the anime
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- force: Set to 'true' to force deletion even if episodes exist
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deletion confirmation
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
# Check if anime exists
|
|
||||||
existing_anime = anime_repository.get_anime_by_id(anime_id)
|
|
||||||
if not existing_anime:
|
|
||||||
raise NotFoundError("Anime not found")
|
|
||||||
|
|
||||||
# Check for existing episodes unless force deletion
|
|
||||||
force_delete = request.args.get('force', 'false').lower() == 'true'
|
|
||||||
if not force_delete:
|
|
||||||
episode_count = anime_repository.get_episode_count(anime_id)
|
|
||||||
if episode_count > 0:
|
|
||||||
raise ValidationError(
|
|
||||||
f"Cannot delete anime with {episode_count} episodes. "
|
|
||||||
"Use ?force=true to force deletion or delete episodes first."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Perform deletion (this should cascade to episodes, downloads, etc.)
|
|
||||||
success = anime_repository.delete_anime(anime_id)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to delete anime", 500)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
message=f"Anime '{existing_anime.name}' deleted successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get('/search', response_model=AnimeSearchResponse)
|
|
||||||
async def search_anime(
|
|
||||||
q: str = Query(..., min_length=2, description="Search query"),
|
|
||||||
page: int = Query(1, ge=1),
|
|
||||||
per_page: int = Query(20, ge=1, le=100),
|
|
||||||
current_user: Optional[Dict] = Depends(get_current_user),
|
|
||||||
series_app: SeriesApp = Depends(get_series_app)
|
|
||||||
) -> AnimeSearchResponse:
|
|
||||||
"""
|
|
||||||
Search anime by name using SeriesApp.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- q: Search query (required, min 2 characters)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 20, max: 100)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated search results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Use SeriesApp to perform search
|
|
||||||
search_results = series_app.search(q)
|
|
||||||
|
|
||||||
# Convert search results to AnimeResponse objects
|
|
||||||
anime_responses = []
|
|
||||||
for result in search_results:
|
|
||||||
anime_response = AnimeResponse(
|
|
||||||
id=getattr(result, 'id', str(uuid.uuid4())),
|
|
||||||
title=getattr(result, 'name', getattr(result, 'title', 'Unknown')),
|
|
||||||
description=getattr(result, 'description', ''),
|
|
||||||
status='available',
|
|
||||||
episodes=getattr(result, 'episodes', 0),
|
|
||||||
folder=getattr(result, 'key', '')
|
|
||||||
)
|
|
||||||
anime_responses.append(anime_response)
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(anime_responses)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_results = anime_responses[start_idx:end_idx]
|
|
||||||
|
|
||||||
return AnimeSearchResponse(
|
|
||||||
data=paginated_results,
|
|
||||||
pagination={
|
|
||||||
"page": page,
|
|
||||||
"per_page": per_page,
|
|
||||||
"total": total,
|
|
||||||
"pages": (total + per_page - 1) // per_page,
|
|
||||||
"has_next": end_idx < total,
|
|
||||||
"has_prev": page > 1
|
|
||||||
},
|
|
||||||
search={
|
|
||||||
"query": q,
|
|
||||||
"total_results": total
|
|
||||||
}
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=f"Search failed: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_results)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_results = formatted_results[start_idx:end_idx]
|
|
||||||
|
|
||||||
# Create response with search metadata
|
|
||||||
response = create_paginated_response(
|
|
||||||
data=paginated_results,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='anime.search_anime',
|
|
||||||
q=search_term,
|
|
||||||
fields=','.join(search_fields)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add search metadata
|
|
||||||
response['search'] = {
|
|
||||||
'query': search_term,
|
|
||||||
'fields': search_fields,
|
|
||||||
'total_results': total
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@anime_bp.route('/<int:anime_id>/episodes', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('anime_id')
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def get_anime_episodes(anime_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get all episodes for a specific anime.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
anime_id: Unique identifier for the anime
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- status: Filter by episode status
|
|
||||||
- downloaded: Filter by download status (true/false)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated list of episodes for the anime
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
# Check if anime exists
|
|
||||||
anime = anime_repository.get_anime_by_id(anime_id)
|
|
||||||
if not anime:
|
|
||||||
raise NotFoundError("Anime not found")
|
|
||||||
|
|
||||||
# Get filters
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
downloaded_filter = request.args.get('downloaded')
|
|
||||||
|
|
||||||
# Validate downloaded filter
|
|
||||||
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
|
|
||||||
raise ValidationError("Downloaded filter must be 'true' or 'false'")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Get episodes
|
|
||||||
episodes = anime_repository.get_episodes_for_anime(
|
|
||||||
anime_id=anime_id,
|
|
||||||
status_filter=status_filter,
|
|
||||||
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format episodes (this would use episode formatting from episodes.py)
|
|
||||||
formatted_episodes = []
|
|
||||||
for episode in episodes:
|
|
||||||
formatted_episodes.append({
|
|
||||||
'id': episode.id,
|
|
||||||
'episode_number': episode.episode_number,
|
|
||||||
'title': episode.title,
|
|
||||||
'url': episode.url,
|
|
||||||
'status': episode.status,
|
|
||||||
'is_downloaded': episode.is_downloaded,
|
|
||||||
'file_path': episode.file_path,
|
|
||||||
'file_size': episode.file_size,
|
|
||||||
'created_at': episode.created_at.isoformat() if episode.created_at else None,
|
|
||||||
'updated_at': episode.updated_at.isoformat() if episode.updated_at else None
|
|
||||||
})
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_episodes)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_episodes = formatted_episodes[start_idx:end_idx]
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
data=paginated_episodes,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='anime.get_anime_episodes',
|
|
||||||
anime_id=anime_id
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@anime_bp.route('/bulk', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['action', 'anime_ids'],
|
|
||||||
optional_fields=['data'],
|
|
||||||
field_types={
|
|
||||||
'action': str,
|
|
||||||
'anime_ids': list,
|
|
||||||
'data': dict
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def bulk_anime_operation() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Perform bulk operations on multiple anime.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- action: Operation to perform (update_status, delete, update_metadata)
|
|
||||||
- anime_ids: List of anime IDs to operate on
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- data: Additional data for the operation
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Results of the bulk operation
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
action = data['action']
|
|
||||||
anime_ids = data['anime_ids']
|
|
||||||
operation_data = data.get('data', {})
|
|
||||||
|
|
||||||
# Validate action
|
|
||||||
valid_actions = ['update_status', 'delete', 'update_metadata', 'update_genres']
|
|
||||||
if action not in valid_actions:
|
|
||||||
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
|
|
||||||
|
|
||||||
# Validate anime_ids
|
|
||||||
if not isinstance(anime_ids, list) or not anime_ids:
|
|
||||||
raise ValidationError("anime_ids must be a non-empty list")
|
|
||||||
|
|
||||||
if len(anime_ids) > 100:
|
|
||||||
raise ValidationError("Cannot operate on more than 100 anime at once")
|
|
||||||
|
|
||||||
# Validate anime IDs are integers
|
|
||||||
try:
|
|
||||||
anime_ids = [int(aid) for aid in anime_ids]
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("All anime_ids must be valid integers")
|
|
||||||
|
|
||||||
# Perform bulk operation
|
|
||||||
successful_items = []
|
|
||||||
failed_items = []
|
|
||||||
|
|
||||||
for anime_id in anime_ids:
|
|
||||||
try:
|
|
||||||
if action == 'update_status':
|
|
||||||
if 'status' not in operation_data:
|
|
||||||
raise ValueError("Status is required for update_status action")
|
|
||||||
|
|
||||||
success = anime_repository.update_anime(anime_id, {'status': operation_data['status']})
|
|
||||||
if success:
|
|
||||||
successful_items.append({'anime_id': anime_id, 'action': 'status_updated'})
|
|
||||||
else:
|
|
||||||
failed_items.append({'anime_id': anime_id, 'error': 'Update failed'})
|
|
||||||
|
|
||||||
elif action == 'delete':
|
|
||||||
success = anime_repository.delete_anime(anime_id)
|
|
||||||
if success:
|
|
||||||
successful_items.append({'anime_id': anime_id, 'action': 'deleted'})
|
|
||||||
else:
|
|
||||||
failed_items.append({'anime_id': anime_id, 'error': 'Deletion failed'})
|
|
||||||
|
|
||||||
elif action == 'update_metadata':
|
|
||||||
success = anime_repository.update_anime(anime_id, operation_data)
|
|
||||||
if success:
|
|
||||||
successful_items.append({'anime_id': anime_id, 'action': 'metadata_updated'})
|
|
||||||
else:
|
|
||||||
failed_items.append({'anime_id': anime_id, 'error': 'Metadata update failed'})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
failed_items.append({'anime_id': anime_id, 'error': str(e)})
|
|
||||||
|
|
||||||
# Create batch response
|
|
||||||
from ...shared.response_helpers import create_batch_response
|
|
||||||
return create_batch_response(
|
|
||||||
successful_items=successful_items,
|
|
||||||
failed_items=failed_items,
|
|
||||||
message=f"Bulk {action} operation completed"
|
|
||||||
)
|
|
||||||
|
|
||||||
@router.post('/rescan', response_model=RescanResponse)
|
|
||||||
async def rescan_anime_directory(
|
|
||||||
current_user: Dict = Depends(get_current_user),
|
|
||||||
series_app: SeriesApp = Depends(get_series_app)
|
|
||||||
) -> RescanResponse:
|
|
||||||
"""
|
|
||||||
Rescan the anime directory for new episodes and series.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Status of the rescan operation
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Use SeriesApp to perform rescan with a simple callback
|
|
||||||
def progress_callback(progress_info):
|
|
||||||
# Simple progress tracking - in a real implementation,
|
|
||||||
# this could be sent via WebSocket or stored for polling
|
|
||||||
pass
|
|
||||||
|
|
||||||
series_app.ReScan(progress_callback)
|
|
||||||
|
|
||||||
return RescanResponse(
|
|
||||||
success=True,
|
|
||||||
message="Anime directory rescanned successfully",
|
|
||||||
total_series=len(series_app.series_list) if hasattr(series_app, 'series_list') else 0
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=f"Rescan failed: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Additional endpoints for legacy API compatibility
|
|
||||||
class AddSeriesRequest(BaseModel):
|
|
||||||
"""Request model for adding a new series."""
|
|
||||||
link: str = Field(..., min_length=1)
|
|
||||||
name: str = Field(..., min_length=1, max_length=255)
|
|
||||||
|
|
||||||
class AddSeriesResponse(BaseModel):
|
|
||||||
"""Response model for add series operation."""
|
|
||||||
status: str
|
|
||||||
message: str
|
|
||||||
|
|
||||||
class DownloadRequest(BaseModel):
|
|
||||||
"""Request model for downloading series."""
|
|
||||||
folders: List[str] = Field(..., min_items=1)
|
|
||||||
|
|
||||||
class DownloadResponse(BaseModel):
|
|
||||||
"""Response model for download operation."""
|
|
||||||
status: str
|
|
||||||
message: str
|
|
||||||
|
|
||||||
|
|
||||||
@router.post('/add_series', response_model=AddSeriesResponse)
|
|
||||||
async def add_series(
|
|
||||||
request_data: AddSeriesRequest,
|
|
||||||
current_user: Dict = Depends(get_current_user),
|
|
||||||
series_app: SeriesApp = Depends(get_series_app)
|
|
||||||
) -> AddSeriesResponse:
|
|
||||||
"""
|
|
||||||
Add a new series to the collection.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request_data: Contains link and name of the series to add
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Status of the add operation
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# For now, just return success - actual implementation would use SeriesApp
|
|
||||||
# to add the series to the collection
|
|
||||||
return AddSeriesResponse(
|
|
||||||
status="success",
|
|
||||||
message=f"Series '{request_data.name}' added successfully"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
return AddSeriesResponse(
|
|
||||||
status="error",
|
|
||||||
message=f"Failed to add series: {str(e)}"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post('/download', response_model=DownloadResponse)
|
|
||||||
async def download_series(
|
|
||||||
request_data: DownloadRequest,
|
|
||||||
current_user: Dict = Depends(get_current_user),
|
|
||||||
series_app: SeriesApp = Depends(get_series_app)
|
|
||||||
) -> DownloadResponse:
|
|
||||||
"""
|
|
||||||
Start downloading selected series folders.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
request_data: Contains list of folder names to download
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Status of the download operation
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# For now, just return success - actual implementation would use SeriesApp
|
|
||||||
# to start downloads
|
|
||||||
folder_count = len(request_data.folders)
|
|
||||||
return DownloadResponse(
|
|
||||||
status="success",
|
|
||||||
message=f"Download started for {folder_count} series"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
return DownloadResponse(
|
|
||||||
status="error",
|
|
||||||
message=f"Failed to start download: {str(e)}"
|
|
||||||
)
|
|
||||||
@ -1,773 +0,0 @@
|
|||||||
"""
|
|
||||||
Authentication API endpoints.
|
|
||||||
|
|
||||||
This module handles all authentication-related operations including:
|
|
||||||
- User authentication
|
|
||||||
- Session management
|
|
||||||
- Password management
|
|
||||||
- API key management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, session, jsonify
|
|
||||||
from typing import Dict, List, Any, Optional, Tuple
|
|
||||||
import logging
|
|
||||||
import hashlib
|
|
||||||
import secrets
|
|
||||||
import time
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
# Import shared utilities
|
|
||||||
try:
|
|
||||||
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
|
||||||
from src.server.web.controllers.shared.validators import (
|
|
||||||
validate_json_input, validate_query_params, is_valid_email, sanitize_string
|
|
||||||
)
|
|
||||||
from src.server.web.controllers.shared.response_helpers import (
|
|
||||||
create_success_response, create_error_response, format_user_data
|
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
# Fallback imports for development
|
|
||||||
def require_auth(f): return f
|
|
||||||
def optional_auth(f): return f
|
|
||||||
def handle_api_errors(f): return f
|
|
||||||
def validate_json_input(**kwargs): return lambda f: f
|
|
||||||
def validate_query_params(**kwargs): return lambda f: f
|
|
||||||
def is_valid_email(email): return '@' in email
|
|
||||||
def sanitize_string(s): return str(s).strip()
|
|
||||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
|
||||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
|
||||||
def format_user_data(data): return data
|
|
||||||
|
|
||||||
# Import authentication components
|
|
||||||
try:
|
|
||||||
from src.data.user_manager import UserManager
|
|
||||||
from src.data.session_manager import SessionManager
|
|
||||||
from src.data.api_key_manager import APIKeyManager
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development
|
|
||||||
class UserManager:
|
|
||||||
def authenticate_user(self, username, password): return None
|
|
||||||
def get_user_by_id(self, id): return None
|
|
||||||
def get_user_by_username(self, username): return None
|
|
||||||
def get_user_by_email(self, email): return None
|
|
||||||
def create_user(self, **kwargs): return 1
|
|
||||||
def update_user(self, id, **kwargs): return True
|
|
||||||
def delete_user(self, id): return True
|
|
||||||
def change_password(self, id, new_password): return True
|
|
||||||
def reset_password(self, email): return 'reset_token'
|
|
||||||
def verify_reset_token(self, token): return None
|
|
||||||
def get_user_sessions(self, user_id): return []
|
|
||||||
def get_user_activity(self, user_id): return []
|
|
||||||
|
|
||||||
class SessionManager:
|
|
||||||
def create_session(self, user_id): return 'session_token'
|
|
||||||
def validate_session(self, token): return None
|
|
||||||
def destroy_session(self, token): return True
|
|
||||||
def destroy_all_sessions(self, user_id): return True
|
|
||||||
def get_session_info(self, token): return None
|
|
||||||
def update_session_activity(self, token): return True
|
|
||||||
|
|
||||||
class APIKeyManager:
|
|
||||||
def create_api_key(self, user_id, name): return {'id': 1, 'key': 'api_key', 'name': name}
|
|
||||||
def get_user_api_keys(self, user_id): return []
|
|
||||||
def revoke_api_key(self, key_id): return True
|
|
||||||
def validate_api_key(self, key): return None
|
|
||||||
|
|
||||||
# Create blueprint
|
|
||||||
auth_bp = Blueprint('auth', __name__)
|
|
||||||
|
|
||||||
# Initialize managers
|
|
||||||
user_manager = UserManager()
|
|
||||||
session_manager = SessionManager()
|
|
||||||
api_key_manager = APIKeyManager()
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/login', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['username', 'password'],
|
|
||||||
optional_fields=['remember_me'],
|
|
||||||
field_types={'username': str, 'password': str, 'remember_me': bool}
|
|
||||||
)
|
|
||||||
def login() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Authenticate user and create session.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- username: Username or email
|
|
||||||
- password: User password
|
|
||||||
- remember_me: Extend session duration (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with authentication result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
username = sanitize_string(data['username'])
|
|
||||||
password = data['password']
|
|
||||||
remember_me = data.get('remember_me', False)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Authenticate user
|
|
||||||
user = user_manager.authenticate_user(username, password)
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
logger.warning(f"Failed login attempt for username: {username}")
|
|
||||||
return create_error_response("Invalid username or password", 401)
|
|
||||||
|
|
||||||
# Create session
|
|
||||||
session_token = session_manager.create_session(
|
|
||||||
user['id'],
|
|
||||||
extended=remember_me
|
|
||||||
)
|
|
||||||
|
|
||||||
# Set session data
|
|
||||||
session['user_id'] = user['id']
|
|
||||||
session['username'] = user['username']
|
|
||||||
session['session_token'] = session_token
|
|
||||||
session.permanent = remember_me
|
|
||||||
|
|
||||||
# Format user data (exclude sensitive information)
|
|
||||||
user_data = format_user_data(user, include_sensitive=False)
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
'user': user_data,
|
|
||||||
'session_token': session_token,
|
|
||||||
'expires_at': (datetime.now() + timedelta(days=30 if remember_me else 7)).isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(f"User {user['username']} (ID: {user['id']}) logged in successfully")
|
|
||||||
return create_success_response("Login successful", 200, response_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during login for username {username}: {str(e)}")
|
|
||||||
return create_error_response("Login failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/logout', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def logout() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Logout user and destroy session.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with logout result
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Get session token
|
|
||||||
session_token = session.get('session_token')
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
|
|
||||||
if session_token:
|
|
||||||
# Destroy session in database
|
|
||||||
session_manager.destroy_session(session_token)
|
|
||||||
|
|
||||||
# Clear Flask session
|
|
||||||
session.clear()
|
|
||||||
|
|
||||||
logger.info(f"User ID {user_id} logged out successfully")
|
|
||||||
return create_success_response("Logout successful")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during logout: {str(e)}")
|
|
||||||
return create_error_response("Logout failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/register', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['username', 'email', 'password'],
|
|
||||||
optional_fields=['full_name'],
|
|
||||||
field_types={'username': str, 'email': str, 'password': str, 'full_name': str}
|
|
||||||
)
|
|
||||||
def register() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Register new user account.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- username: Unique username
|
|
||||||
- email: User email address
|
|
||||||
- password: User password
|
|
||||||
- full_name: User's full name (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with registration result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
username = sanitize_string(data['username'])
|
|
||||||
email = sanitize_string(data['email'])
|
|
||||||
password = data['password']
|
|
||||||
full_name = sanitize_string(data.get('full_name', ''))
|
|
||||||
|
|
||||||
# Validate input
|
|
||||||
if len(username) < 3:
|
|
||||||
return create_error_response("Username must be at least 3 characters long", 400)
|
|
||||||
|
|
||||||
if len(password) < 8:
|
|
||||||
return create_error_response("Password must be at least 8 characters long", 400)
|
|
||||||
|
|
||||||
if not is_valid_email(email):
|
|
||||||
return create_error_response("Invalid email address", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check if username already exists
|
|
||||||
existing_user = user_manager.get_user_by_username(username)
|
|
||||||
if existing_user:
|
|
||||||
return create_error_response("Username already exists", 409)
|
|
||||||
|
|
||||||
# Check if email already exists
|
|
||||||
existing_email = user_manager.get_user_by_email(email)
|
|
||||||
if existing_email:
|
|
||||||
return create_error_response("Email already registered", 409)
|
|
||||||
|
|
||||||
# Create user
|
|
||||||
user_id = user_manager.create_user(
|
|
||||||
username=username,
|
|
||||||
email=email,
|
|
||||||
password=password,
|
|
||||||
full_name=full_name
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get created user
|
|
||||||
user = user_manager.get_user_by_id(user_id)
|
|
||||||
user_data = format_user_data(user, include_sensitive=False)
|
|
||||||
|
|
||||||
logger.info(f"New user registered: {username} (ID: {user_id})")
|
|
||||||
return create_success_response("Registration successful", 201, user_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during registration for username {username}: {str(e)}")
|
|
||||||
return create_error_response("Registration failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/me', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_current_user() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get current user information.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with current user data
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
user = user_manager.get_user_by_id(user_id)
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
return create_error_response("User not found", 404)
|
|
||||||
|
|
||||||
user_data = format_user_data(user, include_sensitive=False)
|
|
||||||
return create_success_response("User information retrieved", 200, user_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting current user: {str(e)}")
|
|
||||||
return create_error_response("Failed to get user information", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/me', methods=['PUT'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['email', 'full_name'],
|
|
||||||
field_types={'email': str, 'full_name': str}
|
|
||||||
)
|
|
||||||
def update_current_user() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Update current user information.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- email: New email address (optional)
|
|
||||||
- full_name: New full name (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with update result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
|
|
||||||
# Validate email if provided
|
|
||||||
if 'email' in data and not is_valid_email(data['email']):
|
|
||||||
return create_error_response("Invalid email address", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check if email is already taken by another user
|
|
||||||
if 'email' in data:
|
|
||||||
existing_user = user_manager.get_user_by_email(data['email'])
|
|
||||||
if existing_user and existing_user['id'] != user_id:
|
|
||||||
return create_error_response("Email already registered", 409)
|
|
||||||
|
|
||||||
# Update user
|
|
||||||
success = user_manager.update_user(user_id, **data)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
# Get updated user
|
|
||||||
user = user_manager.get_user_by_id(user_id)
|
|
||||||
user_data = format_user_data(user, include_sensitive=False)
|
|
||||||
|
|
||||||
logger.info(f"User {user_id} updated their profile")
|
|
||||||
return create_success_response("Profile updated successfully", 200, user_data)
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to update profile", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating user {user_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to update profile", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/change-password', methods=['PUT'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['current_password', 'new_password'],
|
|
||||||
field_types={'current_password': str, 'new_password': str}
|
|
||||||
)
|
|
||||||
def change_password() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Change user password.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- current_password: Current password
|
|
||||||
- new_password: New password
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with change result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
current_password = data['current_password']
|
|
||||||
new_password = data['new_password']
|
|
||||||
|
|
||||||
# Validate new password
|
|
||||||
if len(new_password) < 8:
|
|
||||||
return create_error_response("New password must be at least 8 characters long", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Get user
|
|
||||||
user = user_manager.get_user_by_id(user_id)
|
|
||||||
|
|
||||||
# Verify current password
|
|
||||||
authenticated_user = user_manager.authenticate_user(user['username'], current_password)
|
|
||||||
if not authenticated_user:
|
|
||||||
return create_error_response("Current password is incorrect", 401)
|
|
||||||
|
|
||||||
# Change password
|
|
||||||
success = user_manager.change_password(user_id, new_password)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"User {user_id} changed their password")
|
|
||||||
return create_success_response("Password changed successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to change password", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error changing password for user {user_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to change password", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/forgot-password', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['email'],
|
|
||||||
field_types={'email': str}
|
|
||||||
)
|
|
||||||
def forgot_password() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Request password reset.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- email: User email address
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with reset result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
email = sanitize_string(data['email'])
|
|
||||||
|
|
||||||
if not is_valid_email(email):
|
|
||||||
return create_error_response("Invalid email address", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check if user exists
|
|
||||||
user = user_manager.get_user_by_email(email)
|
|
||||||
|
|
||||||
if user:
|
|
||||||
# Generate reset token
|
|
||||||
reset_token = user_manager.reset_password(email)
|
|
||||||
|
|
||||||
# In a real application, you would send this token via email
|
|
||||||
logger.info(f"Password reset requested for user {user['id']} (email: {email})")
|
|
||||||
|
|
||||||
# For security, always return success even if email doesn't exist
|
|
||||||
return create_success_response("If the email exists, a reset link has been sent")
|
|
||||||
else:
|
|
||||||
# For security, don't reveal that email doesn't exist
|
|
||||||
logger.warning(f"Password reset requested for non-existent email: {email}")
|
|
||||||
return create_success_response("If the email exists, a reset link has been sent")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error processing password reset for email {email}: {str(e)}")
|
|
||||||
return create_error_response("Failed to process password reset", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/reset-password', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['token', 'new_password'],
|
|
||||||
field_types={'token': str, 'new_password': str}
|
|
||||||
)
|
|
||||||
def reset_password() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Reset password using token.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- token: Password reset token
|
|
||||||
- new_password: New password
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with reset result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
token = data['token']
|
|
||||||
new_password = data['new_password']
|
|
||||||
|
|
||||||
# Validate new password
|
|
||||||
if len(new_password) < 8:
|
|
||||||
return create_error_response("New password must be at least 8 characters long", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Verify reset token
|
|
||||||
user = user_manager.verify_reset_token(token)
|
|
||||||
|
|
||||||
if not user:
|
|
||||||
return create_error_response("Invalid or expired reset token", 400)
|
|
||||||
|
|
||||||
# Change password
|
|
||||||
success = user_manager.change_password(user['id'], new_password)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"Password reset completed for user {user['id']}")
|
|
||||||
return create_success_response("Password reset successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to reset password", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error resetting password with token: {str(e)}")
|
|
||||||
return create_error_response("Failed to reset password", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/sessions', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_user_sessions() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get user's active sessions.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with user sessions
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
sessions = user_manager.get_user_sessions(user_id)
|
|
||||||
|
|
||||||
return create_success_response("Sessions retrieved successfully", 200, sessions)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting user sessions: {str(e)}")
|
|
||||||
return create_error_response("Failed to get sessions", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/sessions', methods=['DELETE'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def destroy_all_sessions() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Destroy all user sessions except current one.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with operation result
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
current_token = session.get('session_token')
|
|
||||||
|
|
||||||
# Destroy all sessions except current
|
|
||||||
success = session_manager.destroy_all_sessions(user_id, except_token=current_token)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"All sessions destroyed for user {user_id}")
|
|
||||||
return create_success_response("All other sessions destroyed successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to destroy sessions", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error destroying sessions: {str(e)}")
|
|
||||||
return create_error_response("Failed to destroy sessions", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/api-keys', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_api_keys() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get user's API keys.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with API keys
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
api_keys = api_key_manager.get_user_api_keys(user_id)
|
|
||||||
|
|
||||||
return create_success_response("API keys retrieved successfully", 200, api_keys)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting API keys: {str(e)}")
|
|
||||||
return create_error_response("Failed to get API keys", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/api-keys', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['name'],
|
|
||||||
optional_fields=['description'],
|
|
||||||
field_types={'name': str, 'description': str}
|
|
||||||
)
|
|
||||||
def create_api_key() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Create new API key.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- name: API key name
|
|
||||||
- description: API key description (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with created API key
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
name = sanitize_string(data['name'])
|
|
||||||
description = sanitize_string(data.get('description', ''))
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create API key
|
|
||||||
api_key = api_key_manager.create_api_key(
|
|
||||||
user_id=user_id,
|
|
||||||
name=name,
|
|
||||||
description=description
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"API key created for user {user_id}: {name}")
|
|
||||||
return create_success_response("API key created successfully", 201, api_key)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating API key for user {user_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to create API key", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/api-keys/<int:key_id>', methods=['DELETE'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def revoke_api_key(key_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Revoke API key.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
key_id: API key ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with revocation result
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
|
|
||||||
# Verify key belongs to user and revoke
|
|
||||||
success = api_key_manager.revoke_api_key(key_id, user_id)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"API key {key_id} revoked by user {user_id}")
|
|
||||||
return create_success_response("API key revoked successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("API key not found or access denied", 404)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error revoking API key {key_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to revoke API key", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/password-reset', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['email'],
|
|
||||||
field_types={'email': str}
|
|
||||||
)
|
|
||||||
def request_password_reset() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Request password reset for user email.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- email: User email address
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with password reset request result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
email = sanitize_string(data['email'])
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Validate email format
|
|
||||||
if not is_valid_email(email):
|
|
||||||
return create_error_response("Invalid email format", 400)
|
|
||||||
|
|
||||||
# Check if user exists
|
|
||||||
user = user_manager.get_user_by_email(email)
|
|
||||||
if not user:
|
|
||||||
# Don't reveal if email exists or not for security
|
|
||||||
logger.warning(f"Password reset requested for non-existent email: {email}")
|
|
||||||
return create_success_response("If the email exists, a password reset link has been sent")
|
|
||||||
|
|
||||||
# Generate reset token
|
|
||||||
reset_token = user_manager.create_password_reset_token(user['id'])
|
|
||||||
|
|
||||||
# In a real implementation, you would send an email here
|
|
||||||
# For now, we'll just log it and return success
|
|
||||||
logger.info(f"Password reset token generated for user {user['id']}: {reset_token}")
|
|
||||||
|
|
||||||
return create_success_response("If the email exists, a password reset link has been sent")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during password reset request for {email}: {str(e)}")
|
|
||||||
return create_error_response("Failed to process password reset request", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/password-reset/confirm', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['token', 'new_password'],
|
|
||||||
field_types={'token': str, 'new_password': str}
|
|
||||||
)
|
|
||||||
def confirm_password_reset() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Confirm password reset with token.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- token: Password reset token
|
|
||||||
- new_password: New password
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with password reset confirmation result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
token = data['token']
|
|
||||||
new_password = data['new_password']
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Validate password strength
|
|
||||||
if len(new_password) < 8:
|
|
||||||
return create_error_response("Password must be at least 8 characters long", 400)
|
|
||||||
|
|
||||||
# Verify reset token
|
|
||||||
user_id = user_manager.verify_reset_token(token)
|
|
||||||
if not user_id:
|
|
||||||
return create_error_response("Invalid or expired reset token", 400)
|
|
||||||
|
|
||||||
# Update password
|
|
||||||
success = user_manager.change_password(user_id, new_password)
|
|
||||||
if not success:
|
|
||||||
return create_error_response("Failed to update password", 500)
|
|
||||||
|
|
||||||
# Invalidate all existing sessions for security
|
|
||||||
session_manager.destroy_all_sessions(user_id)
|
|
||||||
|
|
||||||
logger.info(f"Password reset completed for user ID {user_id}")
|
|
||||||
return create_success_response("Password has been successfully reset")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during password reset confirmation: {str(e)}")
|
|
||||||
return create_error_response("Failed to reset password", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/refresh', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
def refresh_token() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Refresh authentication token.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with new token
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Get current session token
|
|
||||||
session_token = session.get('session_token')
|
|
||||||
if not session_token:
|
|
||||||
return create_error_response("No active session found", 401)
|
|
||||||
|
|
||||||
# Validate current session
|
|
||||||
session_info = session_manager.get_session_info(session_token)
|
|
||||||
if not session_info or session_info.get('expired', True):
|
|
||||||
session.clear()
|
|
||||||
return create_error_response("Session expired", 401)
|
|
||||||
|
|
||||||
# Create new session token
|
|
||||||
user_id = session_info['user_id']
|
|
||||||
new_session_token = session_manager.create_session(user_id)
|
|
||||||
|
|
||||||
# Destroy old session
|
|
||||||
session_manager.destroy_session(session_token)
|
|
||||||
|
|
||||||
# Update session data
|
|
||||||
session['session_token'] = new_session_token
|
|
||||||
session_manager.update_session_activity(new_session_token)
|
|
||||||
|
|
||||||
# Get user data
|
|
||||||
user = user_manager.get_user_by_id(user_id)
|
|
||||||
user_data = format_user_data(user, include_sensitive=False)
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
'user': user_data,
|
|
||||||
'session_token': new_session_token,
|
|
||||||
'expires_at': (datetime.now() + timedelta(days=7)).isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(f"Token refreshed for user ID {user_id}")
|
|
||||||
return create_success_response("Token refreshed successfully", 200, response_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during token refresh: {str(e)}")
|
|
||||||
return create_error_response("Failed to refresh token", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@auth_bp.route('/auth/activity', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['limit', 'offset'],
|
|
||||||
param_types={'limit': int, 'offset': int}
|
|
||||||
)
|
|
||||||
def get_user_activity() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get user activity log.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- limit: Number of activities to return (default: 50, max: 200)
|
|
||||||
- offset: Number of activities to skip (default: 0)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with user activity
|
|
||||||
"""
|
|
||||||
limit = min(request.args.get('limit', 50, type=int), 200)
|
|
||||||
offset = request.args.get('offset', 0, type=int)
|
|
||||||
|
|
||||||
try:
|
|
||||||
user_id = session.get('user_id')
|
|
||||||
activity = user_manager.get_user_activity(user_id, limit=limit, offset=offset)
|
|
||||||
|
|
||||||
return create_success_response("User activity retrieved successfully", 200, activity)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting user activity: {str(e)}")
|
|
||||||
return create_error_response("Failed to get user activity", 500)
|
|
||||||
@ -1,649 +0,0 @@
|
|||||||
"""
|
|
||||||
Backup Management API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for database backup operations,
|
|
||||||
including backup creation, restoration, and cleanup functionality.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, send_file
|
|
||||||
from typing import Dict, List, Any, Optional
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from ...shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
|
||||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
|
||||||
from ...shared.response_helpers import (
|
|
||||||
create_success_response, create_paginated_response, extract_pagination_params
|
|
||||||
)
|
|
||||||
|
|
||||||
# Import backup components (these imports would need to be adjusted based on actual structure)
|
|
||||||
try:
|
|
||||||
from database_manager import backup_manager, BackupInfo
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development/testing
|
|
||||||
backup_manager = None
|
|
||||||
BackupInfo = None
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for backup management endpoints
|
|
||||||
backups_bp = Blueprint('backups', __name__, url_prefix='/api/v1/backups')
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def list_backups() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
List all available backups with optional filtering.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- backup_type: Filter by backup type (full, metadata_only, incremental)
|
|
||||||
- date_from: Filter from date (ISO format)
|
|
||||||
- date_to: Filter to date (ISO format)
|
|
||||||
- min_size_mb: Minimum backup size in MB
|
|
||||||
- max_size_mb: Maximum backup size in MB
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated list of backups
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
# Extract filters
|
|
||||||
backup_type_filter = request.args.get('backup_type')
|
|
||||||
date_from = request.args.get('date_from')
|
|
||||||
date_to = request.args.get('date_to')
|
|
||||||
min_size_mb = request.args.get('min_size_mb')
|
|
||||||
max_size_mb = request.args.get('max_size_mb')
|
|
||||||
|
|
||||||
# Validate filters
|
|
||||||
valid_types = ['full', 'metadata_only', 'incremental']
|
|
||||||
if backup_type_filter and backup_type_filter not in valid_types:
|
|
||||||
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
# Validate dates
|
|
||||||
if date_from:
|
|
||||||
try:
|
|
||||||
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("date_from must be in ISO format")
|
|
||||||
|
|
||||||
if date_to:
|
|
||||||
try:
|
|
||||||
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("date_to must be in ISO format")
|
|
||||||
|
|
||||||
# Validate size filters
|
|
||||||
if min_size_mb:
|
|
||||||
try:
|
|
||||||
min_size_mb = float(min_size_mb)
|
|
||||||
if min_size_mb < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("min_size_mb must be a non-negative number")
|
|
||||||
|
|
||||||
if max_size_mb:
|
|
||||||
try:
|
|
||||||
max_size_mb = float(max_size_mb)
|
|
||||||
if max_size_mb < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("max_size_mb must be a non-negative number")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Get backups with filters
|
|
||||||
backups = backup_manager.list_backups(
|
|
||||||
backup_type=backup_type_filter,
|
|
||||||
date_from=date_from,
|
|
||||||
date_to=date_to,
|
|
||||||
min_size_bytes=int(min_size_mb * 1024 * 1024) if min_size_mb else None,
|
|
||||||
max_size_bytes=int(max_size_mb * 1024 * 1024) if max_size_mb else None
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format backup data
|
|
||||||
backup_data = []
|
|
||||||
for backup in backups:
|
|
||||||
backup_data.append({
|
|
||||||
'backup_id': backup.backup_id,
|
|
||||||
'backup_type': backup.backup_type,
|
|
||||||
'created_at': backup.created_at.isoformat(),
|
|
||||||
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
|
|
||||||
'size_bytes': backup.size_bytes,
|
|
||||||
'description': backup.description,
|
|
||||||
'tables_included': backup.tables_included,
|
|
||||||
'backup_path': backup.backup_path,
|
|
||||||
'is_compressed': backup.is_compressed,
|
|
||||||
'checksum': backup.checksum,
|
|
||||||
'status': backup.status
|
|
||||||
})
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(backup_data)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_backups = backup_data[start_idx:end_idx]
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
data=paginated_backups,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='backups.list_backups'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/<backup_id>', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('backup_id')
|
|
||||||
@optional_auth
|
|
||||||
def get_backup(backup_id: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get detailed information about a specific backup.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
backup_id: Unique identifier for the backup
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Detailed backup information
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
backup = backup_manager.get_backup_by_id(backup_id)
|
|
||||||
if not backup:
|
|
||||||
raise NotFoundError("Backup not found")
|
|
||||||
|
|
||||||
# Get additional details
|
|
||||||
backup_details = {
|
|
||||||
'backup_id': backup.backup_id,
|
|
||||||
'backup_type': backup.backup_type,
|
|
||||||
'created_at': backup.created_at.isoformat(),
|
|
||||||
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
|
|
||||||
'size_bytes': backup.size_bytes,
|
|
||||||
'description': backup.description,
|
|
||||||
'tables_included': backup.tables_included,
|
|
||||||
'backup_path': backup.backup_path,
|
|
||||||
'is_compressed': backup.is_compressed,
|
|
||||||
'checksum': backup.checksum,
|
|
||||||
'status': backup.status,
|
|
||||||
'creation_duration_seconds': backup.creation_duration_seconds,
|
|
||||||
'file_exists': os.path.exists(backup.backup_path),
|
|
||||||
'validation_status': backup_manager.validate_backup(backup_id)
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response(backup_details)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['backup_type'],
|
|
||||||
optional_fields=['description', 'tables', 'compress', 'encryption_key'],
|
|
||||||
field_types={
|
|
||||||
'backup_type': str,
|
|
||||||
'description': str,
|
|
||||||
'tables': list,
|
|
||||||
'compress': bool,
|
|
||||||
'encryption_key': str
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def create_backup() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Create a new database backup.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- backup_type: Type of backup (full, metadata_only, incremental)
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- description: Backup description
|
|
||||||
- tables: Specific tables to backup (for selective backups)
|
|
||||||
- compress: Whether to compress the backup (default: true)
|
|
||||||
- encryption_key: Key for backup encryption
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Created backup information
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
backup_type = data['backup_type']
|
|
||||||
|
|
||||||
# Validate backup type
|
|
||||||
valid_types = ['full', 'metadata_only', 'incremental']
|
|
||||||
if backup_type not in valid_types:
|
|
||||||
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
description = data.get('description')
|
|
||||||
tables = data.get('tables')
|
|
||||||
compress = data.get('compress', True)
|
|
||||||
encryption_key = data.get('encryption_key')
|
|
||||||
|
|
||||||
# Validate tables if provided
|
|
||||||
if tables:
|
|
||||||
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
|
|
||||||
raise ValidationError("tables must be a list of table names")
|
|
||||||
|
|
||||||
# Validate table names exist
|
|
||||||
valid_tables = backup_manager.get_available_tables()
|
|
||||||
invalid_tables = [t for t in tables if t not in valid_tables]
|
|
||||||
if invalid_tables:
|
|
||||||
raise ValidationError(f"Invalid tables: {', '.join(invalid_tables)}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create backup based on type
|
|
||||||
if backup_type == 'full':
|
|
||||||
backup_info = backup_manager.create_full_backup(
|
|
||||||
description=description,
|
|
||||||
compress=compress,
|
|
||||||
encryption_key=encryption_key
|
|
||||||
)
|
|
||||||
elif backup_type == 'metadata_only':
|
|
||||||
backup_info = backup_manager.create_metadata_backup(
|
|
||||||
description=description,
|
|
||||||
compress=compress,
|
|
||||||
encryption_key=encryption_key
|
|
||||||
)
|
|
||||||
elif backup_type == 'incremental':
|
|
||||||
backup_info = backup_manager.create_incremental_backup(
|
|
||||||
description=description,
|
|
||||||
compress=compress,
|
|
||||||
encryption_key=encryption_key
|
|
||||||
)
|
|
||||||
else: # selective backup
|
|
||||||
backup_info = backup_manager.create_selective_backup(
|
|
||||||
tables=tables,
|
|
||||||
description=description,
|
|
||||||
compress=compress,
|
|
||||||
encryption_key=encryption_key
|
|
||||||
)
|
|
||||||
|
|
||||||
if not backup_info:
|
|
||||||
raise APIException("Failed to create backup", 500)
|
|
||||||
|
|
||||||
backup_data = {
|
|
||||||
'backup_id': backup_info.backup_id,
|
|
||||||
'backup_type': backup_info.backup_type,
|
|
||||||
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
|
|
||||||
'created_at': backup_info.created_at.isoformat(),
|
|
||||||
'description': backup_info.description,
|
|
||||||
'tables_included': backup_info.tables_included,
|
|
||||||
'is_compressed': backup_info.is_compressed,
|
|
||||||
'checksum': backup_info.checksum
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=backup_data,
|
|
||||||
message=f"{backup_type.title()} backup created successfully",
|
|
||||||
status_code=201
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to create backup: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/<backup_id>/restore', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('backup_id')
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['confirm', 'tables', 'target_database', 'restore_data', 'restore_schema'],
|
|
||||||
field_types={
|
|
||||||
'confirm': bool,
|
|
||||||
'tables': list,
|
|
||||||
'target_database': str,
|
|
||||||
'restore_data': bool,
|
|
||||||
'restore_schema': bool
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def restore_backup(backup_id: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Restore from a backup.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
backup_id: Unique identifier for the backup
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- confirm: Confirmation flag (required for production)
|
|
||||||
- tables: Specific tables to restore
|
|
||||||
- target_database: Target database path (for restore to different location)
|
|
||||||
- restore_data: Whether to restore data (default: true)
|
|
||||||
- restore_schema: Whether to restore schema (default: true)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Restoration results
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json() or {}
|
|
||||||
|
|
||||||
# Check if backup exists
|
|
||||||
backup = backup_manager.get_backup_by_id(backup_id)
|
|
||||||
if not backup:
|
|
||||||
raise NotFoundError("Backup not found")
|
|
||||||
|
|
||||||
# Validate backup file exists
|
|
||||||
if not os.path.exists(backup.backup_path):
|
|
||||||
raise APIException("Backup file not found", 404)
|
|
||||||
|
|
||||||
# Require confirmation for production environments
|
|
||||||
confirm = data.get('confirm', False)
|
|
||||||
if not confirm:
|
|
||||||
# Check if this is a production environment
|
|
||||||
from config import config
|
|
||||||
if hasattr(config, 'environment') and config.environment == 'production':
|
|
||||||
raise ValidationError("Confirmation required for restore operation in production")
|
|
||||||
|
|
||||||
tables = data.get('tables')
|
|
||||||
target_database = data.get('target_database')
|
|
||||||
restore_data = data.get('restore_data', True)
|
|
||||||
restore_schema = data.get('restore_schema', True)
|
|
||||||
|
|
||||||
# Validate tables if provided
|
|
||||||
if tables:
|
|
||||||
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
|
|
||||||
raise ValidationError("tables must be a list of table names")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Perform restoration
|
|
||||||
restore_result = backup_manager.restore_backup(
|
|
||||||
backup_id=backup_id,
|
|
||||||
tables=tables,
|
|
||||||
target_database=target_database,
|
|
||||||
restore_data=restore_data,
|
|
||||||
restore_schema=restore_schema
|
|
||||||
)
|
|
||||||
|
|
||||||
if restore_result.success:
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'backup_id': backup_id,
|
|
||||||
'restore_time': restore_result.restore_time.isoformat(),
|
|
||||||
'restored_tables': restore_result.restored_tables,
|
|
||||||
'restored_records': restore_result.restored_records,
|
|
||||||
'duration_seconds': restore_result.duration_seconds
|
|
||||||
},
|
|
||||||
message="Backup restored successfully"
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
raise APIException(f"Restore failed: {restore_result.error_message}", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to restore backup: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/<backup_id>/download', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('backup_id')
|
|
||||||
@require_auth
|
|
||||||
def download_backup(backup_id: str):
|
|
||||||
"""
|
|
||||||
Download a backup file.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
backup_id: Unique identifier for the backup
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Backup file download
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
# Check if backup exists
|
|
||||||
backup = backup_manager.get_backup_by_id(backup_id)
|
|
||||||
if not backup:
|
|
||||||
raise NotFoundError("Backup not found")
|
|
||||||
|
|
||||||
# Check if backup file exists
|
|
||||||
if not os.path.exists(backup.backup_path):
|
|
||||||
raise NotFoundError("Backup file not found")
|
|
||||||
|
|
||||||
# Generate filename
|
|
||||||
timestamp = backup.created_at.strftime('%Y%m%d_%H%M%S')
|
|
||||||
filename = f"backup_{backup.backup_type}_{timestamp}_{backup_id[:8]}.db"
|
|
||||||
if backup.is_compressed:
|
|
||||||
filename += ".gz"
|
|
||||||
|
|
||||||
try:
|
|
||||||
return send_file(
|
|
||||||
backup.backup_path,
|
|
||||||
as_attachment=True,
|
|
||||||
download_name=filename,
|
|
||||||
mimetype='application/octet-stream'
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to download backup: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/<backup_id>/validate', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('backup_id')
|
|
||||||
@optional_auth
|
|
||||||
def validate_backup(backup_id: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Validate a backup file integrity.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
backup_id: Unique identifier for the backup
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Validation results
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
# Check if backup exists
|
|
||||||
backup = backup_manager.get_backup_by_id(backup_id)
|
|
||||||
if not backup:
|
|
||||||
raise NotFoundError("Backup not found")
|
|
||||||
|
|
||||||
try:
|
|
||||||
validation_result = backup_manager.validate_backup(backup_id)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'backup_id': backup_id,
|
|
||||||
'is_valid': validation_result.is_valid,
|
|
||||||
'file_exists': validation_result.file_exists,
|
|
||||||
'checksum_valid': validation_result.checksum_valid,
|
|
||||||
'database_readable': validation_result.database_readable,
|
|
||||||
'tables_count': validation_result.tables_count,
|
|
||||||
'records_count': validation_result.records_count,
|
|
||||||
'validation_errors': validation_result.errors,
|
|
||||||
'validated_at': datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to validate backup: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/<backup_id>', methods=['DELETE'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('backup_id')
|
|
||||||
@require_auth
|
|
||||||
def delete_backup(backup_id: str) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Delete a backup.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
backup_id: Unique identifier for the backup
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- delete_file: Set to 'true' to also delete the backup file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deletion confirmation
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
# Check if backup exists
|
|
||||||
backup = backup_manager.get_backup_by_id(backup_id)
|
|
||||||
if not backup:
|
|
||||||
raise NotFoundError("Backup not found")
|
|
||||||
|
|
||||||
delete_file = request.args.get('delete_file', 'true').lower() == 'true'
|
|
||||||
|
|
||||||
try:
|
|
||||||
success = backup_manager.delete_backup(backup_id, delete_file=delete_file)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
message = f"Backup {backup_id} deleted successfully"
|
|
||||||
if delete_file:
|
|
||||||
message += " (including file)"
|
|
||||||
|
|
||||||
return create_success_response(message=message)
|
|
||||||
else:
|
|
||||||
raise APIException("Failed to delete backup", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to delete backup: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/cleanup', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['keep_days', 'keep_count', 'backup_types', 'dry_run'],
|
|
||||||
field_types={
|
|
||||||
'keep_days': int,
|
|
||||||
'keep_count': int,
|
|
||||||
'backup_types': list,
|
|
||||||
'dry_run': bool
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def cleanup_backups() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Clean up old backup files based on retention policy.
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- keep_days: Keep backups newer than this many days (default: 30)
|
|
||||||
- keep_count: Keep at least this many backups (default: 10)
|
|
||||||
- backup_types: Types of backups to clean up (default: all)
|
|
||||||
- dry_run: Preview what would be deleted without actually deleting
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Cleanup results
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json() or {}
|
|
||||||
keep_days = data.get('keep_days', 30)
|
|
||||||
keep_count = data.get('keep_count', 10)
|
|
||||||
backup_types = data.get('backup_types', ['full', 'metadata_only', 'incremental'])
|
|
||||||
dry_run = data.get('dry_run', False)
|
|
||||||
|
|
||||||
# Validate parameters
|
|
||||||
if keep_days < 1:
|
|
||||||
raise ValidationError("keep_days must be at least 1")
|
|
||||||
|
|
||||||
if keep_count < 1:
|
|
||||||
raise ValidationError("keep_count must be at least 1")
|
|
||||||
|
|
||||||
valid_types = ['full', 'metadata_only', 'incremental']
|
|
||||||
if not all(bt in valid_types for bt in backup_types):
|
|
||||||
raise ValidationError(f"backup_types must contain only: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
cleanup_result = backup_manager.cleanup_old_backups(
|
|
||||||
keep_days=keep_days,
|
|
||||||
keep_count=keep_count,
|
|
||||||
backup_types=backup_types,
|
|
||||||
dry_run=dry_run
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'dry_run': dry_run,
|
|
||||||
'deleted_count': cleanup_result.deleted_count,
|
|
||||||
'deleted_backups': cleanup_result.deleted_backups,
|
|
||||||
'space_freed_mb': round(cleanup_result.space_freed_bytes / (1024 * 1024), 2),
|
|
||||||
'kept_count': cleanup_result.kept_count,
|
|
||||||
'retention_policy': {
|
|
||||||
'keep_days': keep_days,
|
|
||||||
'keep_count': keep_count,
|
|
||||||
'backup_types': backup_types
|
|
||||||
}
|
|
||||||
},
|
|
||||||
message=f"Backup cleanup {'simulated' if dry_run else 'completed'}"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to cleanup backups: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/schedule', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_backup_schedule() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get current backup schedule configuration.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Backup schedule information
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
try:
|
|
||||||
schedule_config = backup_manager.get_backup_schedule()
|
|
||||||
|
|
||||||
return create_success_response(data=schedule_config)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to get backup schedule: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@backups_bp.route('/schedule', methods=['PUT'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['enabled', 'full_backup_interval', 'incremental_interval', 'retention_days', 'cleanup_enabled'],
|
|
||||||
field_types={
|
|
||||||
'enabled': bool,
|
|
||||||
'full_backup_interval': str,
|
|
||||||
'incremental_interval': str,
|
|
||||||
'retention_days': int,
|
|
||||||
'cleanup_enabled': bool
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def update_backup_schedule() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Update backup schedule configuration.
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- enabled: Enable/disable automatic backups
|
|
||||||
- full_backup_interval: Cron expression for full backups
|
|
||||||
- incremental_interval: Cron expression for incremental backups
|
|
||||||
- retention_days: Number of days to keep backups
|
|
||||||
- cleanup_enabled: Enable/disable automatic cleanup
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated schedule configuration
|
|
||||||
"""
|
|
||||||
if not backup_manager:
|
|
||||||
raise APIException("Backup manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
try:
|
|
||||||
updated_config = backup_manager.update_backup_schedule(data)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=updated_config,
|
|
||||||
message="Backup schedule updated successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to update backup schedule: {str(e)}", 500)
|
|
||||||
@ -1,341 +0,0 @@
|
|||||||
"""
|
|
||||||
Bulk Operations API endpoints
|
|
||||||
Provides REST API for bulk series management operations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, jsonify, send_file
|
|
||||||
import asyncio
|
|
||||||
import threading
|
|
||||||
from typing import Dict, Any
|
|
||||||
import uuid
|
|
||||||
import io
|
|
||||||
from bulk_operations import bulk_operations_manager
|
|
||||||
|
|
||||||
bulk_api_bp = Blueprint('bulk_api', __name__, url_prefix='/api/bulk')
|
|
||||||
|
|
||||||
# Store active operations
|
|
||||||
active_operations = {}
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/download', methods=['POST'])
|
|
||||||
def bulk_download():
|
|
||||||
"""Start bulk download operation."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
operation_id = data.get('operation_id')
|
|
||||||
series_ids = data.get('series_ids', [])
|
|
||||||
|
|
||||||
if not series_ids:
|
|
||||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
|
||||||
|
|
||||||
# Create task ID
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
# Store operation info
|
|
||||||
active_operations[task_id] = {
|
|
||||||
'id': operation_id,
|
|
||||||
'type': 'download',
|
|
||||||
'status': 'running',
|
|
||||||
'progress': {
|
|
||||||
'completed': 0,
|
|
||||||
'total': len(series_ids),
|
|
||||||
'message': 'Starting download...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Start async operation
|
|
||||||
def run_bulk_download():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
try:
|
|
||||||
result = loop.run_until_complete(
|
|
||||||
bulk_operations_manager.bulk_download(series_ids, operation_id)
|
|
||||||
)
|
|
||||||
active_operations[task_id]['status'] = 'completed'
|
|
||||||
active_operations[task_id]['result'] = result
|
|
||||||
except Exception as e:
|
|
||||||
active_operations[task_id]['status'] = 'failed'
|
|
||||||
active_operations[task_id]['error'] = str(e)
|
|
||||||
finally:
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
thread = threading.Thread(target=run_bulk_download)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
return jsonify({'success': True, 'task_id': task_id})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'success': False, 'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/update', methods=['POST'])
|
|
||||||
def bulk_update():
|
|
||||||
"""Start bulk update operation."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
operation_id = data.get('operation_id')
|
|
||||||
series_ids = data.get('series_ids', [])
|
|
||||||
|
|
||||||
if not series_ids:
|
|
||||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
|
||||||
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
active_operations[task_id] = {
|
|
||||||
'id': operation_id,
|
|
||||||
'type': 'update',
|
|
||||||
'status': 'running',
|
|
||||||
'progress': {
|
|
||||||
'completed': 0,
|
|
||||||
'total': len(series_ids),
|
|
||||||
'message': 'Starting update...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def run_bulk_update():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
try:
|
|
||||||
result = loop.run_until_complete(
|
|
||||||
bulk_operations_manager.bulk_update(series_ids, operation_id)
|
|
||||||
)
|
|
||||||
active_operations[task_id]['status'] = 'completed'
|
|
||||||
active_operations[task_id]['result'] = result
|
|
||||||
except Exception as e:
|
|
||||||
active_operations[task_id]['status'] = 'failed'
|
|
||||||
active_operations[task_id]['error'] = str(e)
|
|
||||||
finally:
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
thread = threading.Thread(target=run_bulk_update)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
return jsonify({'success': True, 'task_id': task_id})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'success': False, 'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/organize', methods=['POST'])
|
|
||||||
def bulk_organize():
|
|
||||||
"""Start bulk organize operation."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
operation_id = data.get('operation_id')
|
|
||||||
series_ids = data.get('series_ids', [])
|
|
||||||
options = data.get('options', {})
|
|
||||||
|
|
||||||
if not series_ids:
|
|
||||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
|
||||||
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
active_operations[task_id] = {
|
|
||||||
'id': operation_id,
|
|
||||||
'type': 'organize',
|
|
||||||
'status': 'running',
|
|
||||||
'progress': {
|
|
||||||
'completed': 0,
|
|
||||||
'total': len(series_ids),
|
|
||||||
'message': 'Starting organization...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def run_bulk_organize():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
try:
|
|
||||||
result = loop.run_until_complete(
|
|
||||||
bulk_operations_manager.bulk_organize(series_ids, options, operation_id)
|
|
||||||
)
|
|
||||||
active_operations[task_id]['status'] = 'completed'
|
|
||||||
active_operations[task_id]['result'] = result
|
|
||||||
except Exception as e:
|
|
||||||
active_operations[task_id]['status'] = 'failed'
|
|
||||||
active_operations[task_id]['error'] = str(e)
|
|
||||||
finally:
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
thread = threading.Thread(target=run_bulk_organize)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
return jsonify({'success': True, 'task_id': task_id})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'success': False, 'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/delete', methods=['DELETE'])
|
|
||||||
def bulk_delete():
|
|
||||||
"""Start bulk delete operation."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
operation_id = data.get('operation_id')
|
|
||||||
series_ids = data.get('series_ids', [])
|
|
||||||
|
|
||||||
if not series_ids:
|
|
||||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
|
||||||
|
|
||||||
task_id = str(uuid.uuid4())
|
|
||||||
|
|
||||||
active_operations[task_id] = {
|
|
||||||
'id': operation_id,
|
|
||||||
'type': 'delete',
|
|
||||||
'status': 'running',
|
|
||||||
'progress': {
|
|
||||||
'completed': 0,
|
|
||||||
'total': len(series_ids),
|
|
||||||
'message': 'Starting deletion...'
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
def run_bulk_delete():
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
try:
|
|
||||||
result = loop.run_until_complete(
|
|
||||||
bulk_operations_manager.bulk_delete(series_ids, operation_id)
|
|
||||||
)
|
|
||||||
active_operations[task_id]['status'] = 'completed'
|
|
||||||
active_operations[task_id]['result'] = result
|
|
||||||
except Exception as e:
|
|
||||||
active_operations[task_id]['status'] = 'failed'
|
|
||||||
active_operations[task_id]['error'] = str(e)
|
|
||||||
finally:
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
thread = threading.Thread(target=run_bulk_delete)
|
|
||||||
thread.start()
|
|
||||||
|
|
||||||
return jsonify({'success': True, 'task_id': task_id})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'success': False, 'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/export', methods=['POST'])
|
|
||||||
def bulk_export():
|
|
||||||
"""Export series data."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
series_ids = data.get('series_ids', [])
|
|
||||||
format_type = data.get('format', 'json')
|
|
||||||
|
|
||||||
if not series_ids:
|
|
||||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
|
||||||
|
|
||||||
# Generate export data
|
|
||||||
loop = asyncio.new_event_loop()
|
|
||||||
asyncio.set_event_loop(loop)
|
|
||||||
try:
|
|
||||||
export_data = loop.run_until_complete(
|
|
||||||
bulk_operations_manager.export_series_data(series_ids, format_type)
|
|
||||||
)
|
|
||||||
finally:
|
|
||||||
loop.close()
|
|
||||||
|
|
||||||
# Determine content type and filename
|
|
||||||
content_types = {
|
|
||||||
'json': 'application/json',
|
|
||||||
'csv': 'text/csv',
|
|
||||||
'xml': 'application/xml'
|
|
||||||
}
|
|
||||||
|
|
||||||
content_type = content_types.get(format_type, 'application/octet-stream')
|
|
||||||
filename = f'series_export_{len(series_ids)}_items.{format_type}'
|
|
||||||
|
|
||||||
return send_file(
|
|
||||||
io.BytesIO(export_data),
|
|
||||||
mimetype=content_type,
|
|
||||||
as_attachment=True,
|
|
||||||
download_name=filename
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'success': False, 'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/status/<task_id>', methods=['GET'])
|
|
||||||
def get_operation_status(task_id):
|
|
||||||
"""Get operation status and progress."""
|
|
||||||
try:
|
|
||||||
if task_id not in active_operations:
|
|
||||||
return jsonify({'error': 'Task not found'}), 404
|
|
||||||
|
|
||||||
operation = active_operations[task_id]
|
|
||||||
|
|
||||||
response = {
|
|
||||||
'complete': operation['status'] in ['completed', 'failed'],
|
|
||||||
'success': operation['status'] == 'completed',
|
|
||||||
'status': operation['status']
|
|
||||||
}
|
|
||||||
|
|
||||||
if 'progress' in operation:
|
|
||||||
response.update(operation['progress'])
|
|
||||||
|
|
||||||
if 'error' in operation:
|
|
||||||
response['error'] = operation['error']
|
|
||||||
|
|
||||||
if 'result' in operation:
|
|
||||||
response['result'] = operation['result']
|
|
||||||
|
|
||||||
return jsonify(response)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/cancel/<task_id>', methods=['POST'])
|
|
||||||
def cancel_operation(task_id):
|
|
||||||
"""Cancel a running operation."""
|
|
||||||
try:
|
|
||||||
if task_id not in active_operations:
|
|
||||||
return jsonify({'error': 'Task not found'}), 404
|
|
||||||
|
|
||||||
# Mark operation as cancelled
|
|
||||||
active_operations[task_id]['status'] = 'cancelled'
|
|
||||||
|
|
||||||
return jsonify({'success': True, 'message': 'Operation cancelled'})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/history', methods=['GET'])
|
|
||||||
def get_operation_history():
|
|
||||||
"""Get history of bulk operations."""
|
|
||||||
try:
|
|
||||||
# Return completed/failed operations
|
|
||||||
history = []
|
|
||||||
for task_id, operation in active_operations.items():
|
|
||||||
if operation['status'] in ['completed', 'failed', 'cancelled']:
|
|
||||||
history.append({
|
|
||||||
'task_id': task_id,
|
|
||||||
'operation_id': operation['id'],
|
|
||||||
'type': operation['type'],
|
|
||||||
'status': operation['status'],
|
|
||||||
'progress': operation.get('progress', {}),
|
|
||||||
'error': operation.get('error'),
|
|
||||||
'result': operation.get('result')
|
|
||||||
})
|
|
||||||
|
|
||||||
# Sort by most recent first
|
|
||||||
history.sort(key=lambda x: x.get('progress', {}).get('completed', 0), reverse=True)
|
|
||||||
|
|
||||||
return jsonify({'history': history})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
|
|
||||||
@bulk_api_bp.route('/cleanup', methods=['POST'])
|
|
||||||
def cleanup_completed_operations():
|
|
||||||
"""Clean up completed/failed operations."""
|
|
||||||
try:
|
|
||||||
to_remove = []
|
|
||||||
for task_id, operation in active_operations.items():
|
|
||||||
if operation['status'] in ['completed', 'failed', 'cancelled']:
|
|
||||||
to_remove.append(task_id)
|
|
||||||
|
|
||||||
for task_id in to_remove:
|
|
||||||
del active_operations[task_id]
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'cleaned_up': len(to_remove),
|
|
||||||
'message': f'Cleaned up {len(to_remove)} completed operations'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({'error': str(e)}), 500
|
|
||||||
@ -1,454 +0,0 @@
|
|||||||
"""
|
|
||||||
API endpoints for configuration management.
|
|
||||||
Provides comprehensive configuration management with validation, backup, and restore functionality.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
from typing import Any, Dict, Optional
|
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
|
||||||
from fastapi.responses import FileResponse
|
|
||||||
from pydantic import BaseModel
|
|
||||||
|
|
||||||
# Import SeriesApp for business logic
|
|
||||||
from src.core.SeriesApp import SeriesApp
|
|
||||||
|
|
||||||
# FastAPI dependencies and models
|
|
||||||
from src.server.fastapi_app import get_current_user, settings
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Create FastAPI router for config management endpoints
|
|
||||||
router = APIRouter(prefix='/api/v1/config', tags=['config'])
|
|
||||||
|
|
||||||
# Pydantic models for requests and responses
|
|
||||||
class ConfigResponse(BaseModel):
|
|
||||||
"""Response model for configuration data."""
|
|
||||||
success: bool = True
|
|
||||||
config: Dict[str, Any]
|
|
||||||
schema: Optional[Dict[str, Any]] = None
|
|
||||||
|
|
||||||
class ConfigUpdateRequest(BaseModel):
|
|
||||||
"""Request model for configuration updates."""
|
|
||||||
config: Dict[str, Any]
|
|
||||||
validate: bool = True
|
|
||||||
|
|
||||||
class ConfigImportResponse(BaseModel):
|
|
||||||
"""Response model for configuration import operations."""
|
|
||||||
success: bool
|
|
||||||
message: str
|
|
||||||
imported_keys: Optional[list] = None
|
|
||||||
skipped_keys: Optional[list] = None
|
|
||||||
|
|
||||||
# Dependency to get SeriesApp instance
|
|
||||||
def get_series_app() -> SeriesApp:
|
|
||||||
"""Get SeriesApp instance for business logic operations."""
|
|
||||||
if not settings.anime_directory:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
|
||||||
detail="Anime directory not configured"
|
|
||||||
)
|
|
||||||
return SeriesApp(settings.anime_directory)
|
|
||||||
|
|
||||||
|
|
||||||
@router.get('/', response_model=ConfigResponse)
|
|
||||||
async def get_full_config(
|
|
||||||
current_user: Optional[Dict] = Depends(get_current_user)
|
|
||||||
) -> ConfigResponse:
|
|
||||||
"""Get complete configuration (without sensitive data)."""
|
|
||||||
try:
|
|
||||||
# For now, return a basic config structure
|
|
||||||
# TODO: Replace with actual config management logic
|
|
||||||
config_data = {
|
|
||||||
"anime_directory": settings.anime_directory if hasattr(settings, 'anime_directory') else None,
|
|
||||||
"download_settings": {},
|
|
||||||
"display_settings": {},
|
|
||||||
"security_settings": {}
|
|
||||||
}
|
|
||||||
|
|
||||||
schema = {
|
|
||||||
"anime_directory": {"type": "string", "required": True},
|
|
||||||
"download_settings": {"type": "object"},
|
|
||||||
"display_settings": {"type": "object"},
|
|
||||||
"security_settings": {"type": "object"}
|
|
||||||
}
|
|
||||||
|
|
||||||
return ConfigResponse(
|
|
||||||
success=True,
|
|
||||||
config=config_data,
|
|
||||||
schema=schema
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting configuration: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=str(e)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@router.post('/', response_model=ConfigImportResponse)
|
|
||||||
async def update_config(
|
|
||||||
config_update: ConfigUpdateRequest,
|
|
||||||
current_user: Optional[Dict] = Depends(get_current_user)
|
|
||||||
) -> ConfigImportResponse:
|
|
||||||
"""Update configuration with validation."""
|
|
||||||
try:
|
|
||||||
# For now, just return success
|
|
||||||
# TODO: Replace with actual config management logic
|
|
||||||
logger.info("Configuration updated successfully")
|
|
||||||
return ConfigImportResponse(
|
|
||||||
success=True,
|
|
||||||
message="Configuration updated successfully",
|
|
||||||
imported_keys=list(config_update.config.keys()),
|
|
||||||
skipped_keys=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating configuration: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=str(e)
|
|
||||||
)
|
|
||||||
|
|
||||||
@config_bp.route('/validate', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def validate_config():
|
|
||||||
"""Validate configuration without saving."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
|
|
||||||
validation_result = config.validate_config(data)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'validation': validation_result
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error validating configuration: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/section/<section_name>', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_config_section(section_name):
|
|
||||||
"""Get specific configuration section."""
|
|
||||||
try:
|
|
||||||
section_data = config.get(section_name, {})
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'section': section_name,
|
|
||||||
'config': section_data
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting config section {section_name}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/section/<section_name>', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def update_config_section(section_name):
|
|
||||||
"""Update specific configuration section."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
|
|
||||||
# Get current config
|
|
||||||
current_config = config.export_config(include_sensitive=True)
|
|
||||||
|
|
||||||
# Update the specific section
|
|
||||||
current_config[section_name] = data
|
|
||||||
|
|
||||||
# Validate and save
|
|
||||||
result = config.import_config(current_config, validate=True)
|
|
||||||
|
|
||||||
if result['success']:
|
|
||||||
logger.info(f"Configuration section '{section_name}' updated successfully")
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': f'Configuration section "{section_name}" updated successfully',
|
|
||||||
'warnings': result.get('warnings', [])
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Configuration validation failed',
|
|
||||||
'errors': result['errors'],
|
|
||||||
'warnings': result.get('warnings', [])
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating config section {section_name}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/backup', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def create_backup():
|
|
||||||
"""Create configuration backup."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
backup_name = data.get('name', '')
|
|
||||||
|
|
||||||
# Generate backup filename
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
if backup_name:
|
|
||||||
# Sanitize backup name
|
|
||||||
backup_name = secure_filename(backup_name)
|
|
||||||
filename = f"config_backup_{backup_name}_{timestamp}.json"
|
|
||||||
else:
|
|
||||||
filename = f"config_backup_{timestamp}.json"
|
|
||||||
|
|
||||||
backup_path = config.backup_config(filename)
|
|
||||||
|
|
||||||
logger.info(f"Configuration backup created: {backup_path}")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Backup created successfully',
|
|
||||||
'backup_path': backup_path,
|
|
||||||
'filename': filename
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating backup: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/backups', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def list_backups():
|
|
||||||
"""List available configuration backups."""
|
|
||||||
try:
|
|
||||||
backups = []
|
|
||||||
|
|
||||||
# Scan current directory for backup files
|
|
||||||
for filename in os.listdir('.'):
|
|
||||||
if filename.startswith('config_backup_') and filename.endswith('.json'):
|
|
||||||
file_path = os.path.abspath(filename)
|
|
||||||
file_size = os.path.getsize(filename)
|
|
||||||
file_modified = datetime.fromtimestamp(os.path.getmtime(filename))
|
|
||||||
|
|
||||||
backups.append({
|
|
||||||
'filename': filename,
|
|
||||||
'path': file_path,
|
|
||||||
'size': file_size,
|
|
||||||
'size_kb': round(file_size / 1024, 2),
|
|
||||||
'modified': file_modified.isoformat(),
|
|
||||||
'modified_display': file_modified.strftime('%Y-%m-%d %H:%M:%S')
|
|
||||||
})
|
|
||||||
|
|
||||||
# Sort by modification date (newest first)
|
|
||||||
backups.sort(key=lambda x: x['modified'], reverse=True)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'backups': backups
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error listing backups: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/backup/<filename>/restore', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def restore_backup(filename):
|
|
||||||
"""Restore configuration from backup."""
|
|
||||||
try:
|
|
||||||
# Security: Only allow config backup files
|
|
||||||
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid backup file'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Security: Check if file exists
|
|
||||||
if not os.path.exists(filename):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Backup file not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
success = config.restore_config(filename)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"Configuration restored from backup: {filename}")
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Configuration restored successfully'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Failed to restore configuration'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error restoring backup {filename}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/backup/<filename>/download', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def download_backup(filename):
|
|
||||||
"""Download configuration backup file."""
|
|
||||||
try:
|
|
||||||
# Security: Only allow config backup files
|
|
||||||
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid backup file'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Security: Check if file exists
|
|
||||||
if not os.path.exists(filename):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Backup file not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
return send_file(
|
|
||||||
filename,
|
|
||||||
as_attachment=True,
|
|
||||||
download_name=filename
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error downloading backup {filename}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@config_bp.route('/export', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def export_config():
|
|
||||||
"""Export current configuration to JSON."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
include_sensitive = data.get('include_sensitive', False)
|
|
||||||
|
|
||||||
config_data = config.export_config(include_sensitive=include_sensitive)
|
|
||||||
|
|
||||||
# Create filename with timestamp
|
|
||||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
|
||||||
filename = f"aniworld_config_export_{timestamp}.json"
|
|
||||||
|
|
||||||
# Write to temporary file
|
|
||||||
with open(filename, 'w', encoding='utf-8') as f:
|
|
||||||
json.dump(config_data, f, indent=4)
|
|
||||||
|
|
||||||
return send_file(
|
|
||||||
filename,
|
|
||||||
as_attachment=True,
|
|
||||||
download_name=filename,
|
|
||||||
mimetype='application/json'
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error exporting configuration: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@router.post('/import', response_model=ConfigImportResponse)
|
|
||||||
async def import_config(
|
|
||||||
config_file: UploadFile = File(...),
|
|
||||||
current_user: Optional[Dict] = Depends(get_current_user)
|
|
||||||
) -> ConfigImportResponse:
|
|
||||||
"""Import configuration from uploaded JSON file."""
|
|
||||||
try:
|
|
||||||
# Validate file type
|
|
||||||
if not config_file.filename:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="No file selected"
|
|
||||||
)
|
|
||||||
|
|
||||||
if not config_file.filename.endswith('.json'):
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail="Invalid file type. Only JSON files are allowed."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Read and parse JSON
|
|
||||||
try:
|
|
||||||
content = await config_file.read()
|
|
||||||
config_data = json.loads(content.decode('utf-8'))
|
|
||||||
except json.JSONDecodeError as e:
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
|
||||||
detail=f"Invalid JSON format: {e}"
|
|
||||||
)
|
|
||||||
|
|
||||||
# For now, just return success with the keys that would be imported
|
|
||||||
# TODO: Replace with actual config management logic
|
|
||||||
logger.info(f"Configuration imported from file: {config_file.filename}")
|
|
||||||
return ConfigImportResponse(
|
|
||||||
success=True,
|
|
||||||
message="Configuration imported successfully",
|
|
||||||
imported_keys=list(config_data.keys()) if isinstance(config_data, dict) else [],
|
|
||||||
skipped_keys=[]
|
|
||||||
)
|
|
||||||
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error importing configuration: {e}")
|
|
||||||
raise HTTPException(
|
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
|
||||||
detail=str(e)
|
|
||||||
)
|
|
||||||
|
|
||||||
@config_bp.route('/reset', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def reset_config():
|
|
||||||
"""Reset configuration to defaults (preserves security settings)."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
preserve_security = data.get('preserve_security', True)
|
|
||||||
|
|
||||||
# Get current security settings
|
|
||||||
current_security = config.get('security', {}) if preserve_security else {}
|
|
||||||
|
|
||||||
# Reset to defaults
|
|
||||||
config._config = config.default_config.copy()
|
|
||||||
|
|
||||||
# Restore security settings if requested
|
|
||||||
if preserve_security and current_security:
|
|
||||||
config._config['security'] = current_security
|
|
||||||
|
|
||||||
success = config.save_config()
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info("Configuration reset to defaults")
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Configuration reset to defaults'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Failed to save configuration'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error resetting configuration: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
@ -1,649 +0,0 @@
|
|||||||
"""
|
|
||||||
Database & Storage Management API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for database operations,
|
|
||||||
backup management, and storage monitoring.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, jsonify, send_file
|
|
||||||
from auth import require_auth, optional_auth
|
|
||||||
from error_handler import handle_api_errors, RetryableError, NonRetryableError
|
|
||||||
from database_manager import (
|
|
||||||
database_manager, anime_repository, backup_manager, storage_manager,
|
|
||||||
AnimeMetadata
|
|
||||||
)
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
import os
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for database management endpoints
|
|
||||||
database_bp = Blueprint('database', __name__)
|
|
||||||
|
|
||||||
|
|
||||||
# Database Information Endpoints
|
|
||||||
@database_bp.route('/api/database/info')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_database_info():
|
|
||||||
"""Get database information and statistics."""
|
|
||||||
try:
|
|
||||||
# Get schema version
|
|
||||||
schema_version = database_manager.get_current_version()
|
|
||||||
|
|
||||||
# Get table statistics
|
|
||||||
stats_query = """
|
|
||||||
SELECT
|
|
||||||
(SELECT COUNT(*) FROM anime_metadata) as anime_count,
|
|
||||||
(SELECT COUNT(*) FROM episode_metadata) as episode_count,
|
|
||||||
(SELECT COUNT(*) FROM episode_metadata WHERE is_downloaded = 1) as downloaded_count,
|
|
||||||
(SELECT COUNT(*) FROM download_history) as download_history_count
|
|
||||||
"""
|
|
||||||
|
|
||||||
results = database_manager.execute_query(stats_query)
|
|
||||||
stats = dict(results[0]) if results else {}
|
|
||||||
|
|
||||||
# Get database file size
|
|
||||||
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'schema_version': schema_version,
|
|
||||||
'database_path': database_manager.db_path,
|
|
||||||
'database_size_mb': round(db_size / (1024 * 1024), 2),
|
|
||||||
'statistics': {
|
|
||||||
'anime_count': stats.get('anime_count', 0),
|
|
||||||
'episode_count': stats.get('episode_count', 0),
|
|
||||||
'downloaded_count': stats.get('downloaded_count', 0),
|
|
||||||
'download_history_count': stats.get('download_history_count', 0)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get database info: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# Anime Metadata Endpoints
|
|
||||||
@database_bp.route('/api/database/anime')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_all_anime():
|
|
||||||
"""Get all anime from database."""
|
|
||||||
try:
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
anime_list = anime_repository.get_all_anime(status_filter)
|
|
||||||
|
|
||||||
# Convert to serializable format
|
|
||||||
anime_data = []
|
|
||||||
for anime in anime_list:
|
|
||||||
anime_data.append({
|
|
||||||
'anime_id': anime.anime_id,
|
|
||||||
'name': anime.name,
|
|
||||||
'folder': anime.folder,
|
|
||||||
'key': anime.key,
|
|
||||||
'description': anime.description,
|
|
||||||
'genres': anime.genres,
|
|
||||||
'release_year': anime.release_year,
|
|
||||||
'status': anime.status,
|
|
||||||
'total_episodes': anime.total_episodes,
|
|
||||||
'poster_url': anime.poster_url,
|
|
||||||
'last_updated': anime.last_updated.isoformat(),
|
|
||||||
'created_at': anime.created_at.isoformat(),
|
|
||||||
'custom_metadata': anime.custom_metadata
|
|
||||||
})
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'anime': anime_data,
|
|
||||||
'count': len(anime_data)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get anime list: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/anime/<anime_id>')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_anime_by_id(anime_id):
|
|
||||||
"""Get specific anime by ID."""
|
|
||||||
try:
|
|
||||||
query = "SELECT * FROM anime_metadata WHERE anime_id = ?"
|
|
||||||
results = database_manager.execute_query(query, (anime_id,))
|
|
||||||
|
|
||||||
if not results:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Anime not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
row = results[0]
|
|
||||||
anime_data = {
|
|
||||||
'anime_id': row['anime_id'],
|
|
||||||
'name': row['name'],
|
|
||||||
'folder': row['folder'],
|
|
||||||
'key': row['key'],
|
|
||||||
'description': row['description'],
|
|
||||||
'genres': row['genres'],
|
|
||||||
'release_year': row['release_year'],
|
|
||||||
'status': row['status'],
|
|
||||||
'total_episodes': row['total_episodes'],
|
|
||||||
'poster_url': row['poster_url'],
|
|
||||||
'last_updated': row['last_updated'],
|
|
||||||
'created_at': row['created_at'],
|
|
||||||
'custom_metadata': row['custom_metadata']
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': anime_data
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get anime: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/anime', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def create_anime():
|
|
||||||
"""Create new anime record."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ['name', 'folder']
|
|
||||||
for field in required_fields:
|
|
||||||
if field not in data:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': f'Missing required field: {field}'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Create anime metadata
|
|
||||||
anime = AnimeMetadata(
|
|
||||||
anime_id=str(uuid.uuid4()),
|
|
||||||
name=data['name'],
|
|
||||||
folder=data['folder'],
|
|
||||||
key=data.get('key'),
|
|
||||||
description=data.get('description'),
|
|
||||||
genres=data.get('genres', []),
|
|
||||||
release_year=data.get('release_year'),
|
|
||||||
status=data.get('status', 'ongoing'),
|
|
||||||
total_episodes=data.get('total_episodes'),
|
|
||||||
poster_url=data.get('poster_url'),
|
|
||||||
custom_metadata=data.get('custom_metadata', {})
|
|
||||||
)
|
|
||||||
|
|
||||||
success = anime_repository.create_anime(anime)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Anime created successfully',
|
|
||||||
'data': {
|
|
||||||
'anime_id': anime.anime_id
|
|
||||||
}
|
|
||||||
}), 201
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Failed to create anime'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to create anime: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/anime/<anime_id>', methods=['PUT'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def update_anime(anime_id):
|
|
||||||
"""Update anime metadata."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Get existing anime
|
|
||||||
existing = anime_repository.get_anime_by_folder(data.get('folder', ''))
|
|
||||||
if not existing or existing.anime_id != anime_id:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Anime not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
# Update fields
|
|
||||||
if 'name' in data:
|
|
||||||
existing.name = data['name']
|
|
||||||
if 'key' in data:
|
|
||||||
existing.key = data['key']
|
|
||||||
if 'description' in data:
|
|
||||||
existing.description = data['description']
|
|
||||||
if 'genres' in data:
|
|
||||||
existing.genres = data['genres']
|
|
||||||
if 'release_year' in data:
|
|
||||||
existing.release_year = data['release_year']
|
|
||||||
if 'status' in data:
|
|
||||||
existing.status = data['status']
|
|
||||||
if 'total_episodes' in data:
|
|
||||||
existing.total_episodes = data['total_episodes']
|
|
||||||
if 'poster_url' in data:
|
|
||||||
existing.poster_url = data['poster_url']
|
|
||||||
if 'custom_metadata' in data:
|
|
||||||
existing.custom_metadata.update(data['custom_metadata'])
|
|
||||||
|
|
||||||
success = anime_repository.update_anime(existing)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Anime updated successfully'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Failed to update anime'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to update anime: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/anime/<anime_id>', methods=['DELETE'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def delete_anime(anime_id):
|
|
||||||
"""Delete anime and related data."""
|
|
||||||
try:
|
|
||||||
success = anime_repository.delete_anime(anime_id)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Anime deleted successfully'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Anime not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to delete anime: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/anime/search')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def search_anime():
|
|
||||||
"""Search anime by name or description."""
|
|
||||||
try:
|
|
||||||
search_term = request.args.get('q', '').strip()
|
|
||||||
|
|
||||||
if not search_term:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Search term is required'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
results = anime_repository.search_anime(search_term)
|
|
||||||
|
|
||||||
# Convert to serializable format
|
|
||||||
anime_data = []
|
|
||||||
for anime in results:
|
|
||||||
anime_data.append({
|
|
||||||
'anime_id': anime.anime_id,
|
|
||||||
'name': anime.name,
|
|
||||||
'folder': anime.folder,
|
|
||||||
'key': anime.key,
|
|
||||||
'description': anime.description,
|
|
||||||
'genres': anime.genres,
|
|
||||||
'release_year': anime.release_year,
|
|
||||||
'status': anime.status
|
|
||||||
})
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'results': anime_data,
|
|
||||||
'count': len(anime_data),
|
|
||||||
'search_term': search_term
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to search anime: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# Backup Management Endpoints
|
|
||||||
@database_bp.route('/api/database/backups')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def list_backups():
|
|
||||||
"""List all available backups."""
|
|
||||||
try:
|
|
||||||
backups = backup_manager.list_backups()
|
|
||||||
|
|
||||||
backup_data = []
|
|
||||||
for backup in backups:
|
|
||||||
backup_data.append({
|
|
||||||
'backup_id': backup.backup_id,
|
|
||||||
'backup_type': backup.backup_type,
|
|
||||||
'created_at': backup.created_at.isoformat(),
|
|
||||||
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
|
|
||||||
'description': backup.description,
|
|
||||||
'tables_included': backup.tables_included
|
|
||||||
})
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'backups': backup_data,
|
|
||||||
'count': len(backup_data)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to list backups: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/backups/create', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def create_backup():
|
|
||||||
"""Create a new database backup."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
backup_type = data.get('backup_type', 'full')
|
|
||||||
description = data.get('description')
|
|
||||||
|
|
||||||
if backup_type not in ['full', 'metadata_only']:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Backup type must be "full" or "metadata_only"'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
if backup_type == 'full':
|
|
||||||
backup_info = backup_manager.create_full_backup(description)
|
|
||||||
else:
|
|
||||||
backup_info = backup_manager.create_metadata_backup(description)
|
|
||||||
|
|
||||||
if backup_info:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': f'{backup_type.title()} backup created successfully',
|
|
||||||
'data': {
|
|
||||||
'backup_id': backup_info.backup_id,
|
|
||||||
'backup_type': backup_info.backup_type,
|
|
||||||
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
|
|
||||||
'created_at': backup_info.created_at.isoformat()
|
|
||||||
}
|
|
||||||
}), 201
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Failed to create backup'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to create backup: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/backups/<backup_id>/restore', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def restore_backup(backup_id):
|
|
||||||
"""Restore from a backup."""
|
|
||||||
try:
|
|
||||||
success = backup_manager.restore_backup(backup_id)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Backup restored successfully'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Failed to restore backup'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to restore backup: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/backups/<backup_id>/download')
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def download_backup(backup_id):
|
|
||||||
"""Download a backup file."""
|
|
||||||
try:
|
|
||||||
backups = backup_manager.list_backups()
|
|
||||||
target_backup = None
|
|
||||||
|
|
||||||
for backup in backups:
|
|
||||||
if backup.backup_id == backup_id:
|
|
||||||
target_backup = backup
|
|
||||||
break
|
|
||||||
|
|
||||||
if not target_backup:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Backup not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
if not os.path.exists(target_backup.backup_path):
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Backup file not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
filename = os.path.basename(target_backup.backup_path)
|
|
||||||
return send_file(target_backup.backup_path, as_attachment=True, download_name=filename)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to download backup: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/backups/cleanup', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def cleanup_backups():
|
|
||||||
"""Clean up old backup files."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
keep_days = data.get('keep_days', 30)
|
|
||||||
keep_count = data.get('keep_count', 10)
|
|
||||||
|
|
||||||
if keep_days < 1 or keep_count < 1:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'keep_days and keep_count must be positive integers'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
backup_manager.cleanup_old_backups(keep_days, keep_count)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': f'Backup cleanup completed (keeping {keep_count} backups, max {keep_days} days old)'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to cleanup backups: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# Storage Management Endpoints
|
|
||||||
@database_bp.route('/api/database/storage/summary')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_storage_summary():
|
|
||||||
"""Get storage usage summary."""
|
|
||||||
try:
|
|
||||||
summary = storage_manager.get_storage_summary()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': summary
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get storage summary: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/storage/locations')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_storage_locations():
|
|
||||||
"""Get all storage locations."""
|
|
||||||
try:
|
|
||||||
query = """
|
|
||||||
SELECT sl.*, am.name as anime_name
|
|
||||||
FROM storage_locations sl
|
|
||||||
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
|
|
||||||
WHERE sl.is_active = 1
|
|
||||||
ORDER BY sl.location_type, sl.path
|
|
||||||
"""
|
|
||||||
|
|
||||||
results = database_manager.execute_query(query)
|
|
||||||
|
|
||||||
locations = []
|
|
||||||
for row in results:
|
|
||||||
locations.append({
|
|
||||||
'location_id': row['location_id'],
|
|
||||||
'anime_id': row['anime_id'],
|
|
||||||
'anime_name': row['anime_name'],
|
|
||||||
'path': row['path'],
|
|
||||||
'location_type': row['location_type'],
|
|
||||||
'free_space_gb': (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None,
|
|
||||||
'total_space_gb': (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None,
|
|
||||||
'usage_percent': ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100) if row['total_space_bytes'] and row['free_space_bytes'] else None,
|
|
||||||
'last_checked': row['last_checked']
|
|
||||||
})
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'locations': locations,
|
|
||||||
'count': len(locations)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get storage locations: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/storage/locations', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def add_storage_location():
|
|
||||||
"""Add a new storage location."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
path = data.get('path')
|
|
||||||
location_type = data.get('location_type', 'primary')
|
|
||||||
anime_id = data.get('anime_id')
|
|
||||||
|
|
||||||
if not path:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Path is required'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
if location_type not in ['primary', 'backup', 'cache']:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Location type must be primary, backup, or cache'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
location_id = storage_manager.add_storage_location(path, location_type, anime_id)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Storage location added successfully',
|
|
||||||
'data': {
|
|
||||||
'location_id': location_id
|
|
||||||
}
|
|
||||||
}), 201
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to add storage location: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/storage/locations/<location_id>/update', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def update_storage_location(location_id):
|
|
||||||
"""Update storage location statistics."""
|
|
||||||
try:
|
|
||||||
storage_manager.update_storage_stats(location_id)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Storage statistics updated successfully'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to update storage location: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# Database Maintenance Endpoints
|
|
||||||
@database_bp.route('/api/database/maintenance/vacuum', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def vacuum_database():
|
|
||||||
"""Perform database VACUUM operation to reclaim space."""
|
|
||||||
try:
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
conn.execute("VACUUM")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Database vacuum completed successfully'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to vacuum database: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/maintenance/analyze', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def analyze_database():
|
|
||||||
"""Perform database ANALYZE operation to update statistics."""
|
|
||||||
try:
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
conn.execute("ANALYZE")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Database analysis completed successfully'
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to analyze database: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@database_bp.route('/api/database/maintenance/integrity-check', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def integrity_check():
|
|
||||||
"""Perform database integrity check."""
|
|
||||||
try:
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
cursor = conn.execute("PRAGMA integrity_check")
|
|
||||||
results = cursor.fetchall()
|
|
||||||
|
|
||||||
# Check if database is OK
|
|
||||||
is_ok = len(results) == 1 and results[0][0] == 'ok'
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'integrity_ok': is_ok,
|
|
||||||
'results': [row[0] for row in results]
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to check database integrity: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# Export the blueprint
|
|
||||||
__all__ = ['database_bp']
|
|
||||||
@ -1,581 +0,0 @@
|
|||||||
"""
|
|
||||||
Diagnostics API endpoints.
|
|
||||||
|
|
||||||
This module handles all diagnostic and monitoring operations including:
|
|
||||||
- System health checks
|
|
||||||
- Performance monitoring
|
|
||||||
- Error reporting
|
|
||||||
- Network diagnostics
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, jsonify
|
|
||||||
from typing import Dict, List, Any, Optional, Tuple
|
|
||||||
import logging
|
|
||||||
import psutil
|
|
||||||
import socket
|
|
||||||
import requests
|
|
||||||
import time
|
|
||||||
import platform
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
# Import shared utilities
|
|
||||||
try:
|
|
||||||
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
|
||||||
from src.server.web.controllers.shared.validators import validate_query_params
|
|
||||||
from src.server.web.controllers.shared.response_helpers import (
|
|
||||||
create_success_response, create_error_response, format_datetime, format_file_size
|
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
# Fallback imports for development
|
|
||||||
def require_auth(f): return f
|
|
||||||
def optional_auth(f): return f
|
|
||||||
def handle_api_errors(f): return f
|
|
||||||
def validate_query_params(**kwargs): return lambda f: f
|
|
||||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
|
||||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
|
||||||
def format_datetime(dt): return str(dt) if dt else None
|
|
||||||
def format_file_size(size): return f"{size} bytes"
|
|
||||||
|
|
||||||
# Import diagnostic components
|
|
||||||
try:
|
|
||||||
from src.server.data.error_manager import ErrorManager
|
|
||||||
from src.server.data.performance_manager import PerformanceManager
|
|
||||||
from src.server.data.system_manager import SystemManager
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development
|
|
||||||
class ErrorManager:
|
|
||||||
def get_recent_errors(self, **kwargs): return []
|
|
||||||
def get_error_stats(self): return {}
|
|
||||||
def clear_errors(self): return True
|
|
||||||
def report_error(self, **kwargs): return 1
|
|
||||||
|
|
||||||
class PerformanceManager:
|
|
||||||
def get_performance_metrics(self): return {}
|
|
||||||
def get_performance_history(self, **kwargs): return []
|
|
||||||
def record_metric(self, **kwargs): return True
|
|
||||||
|
|
||||||
class SystemManager:
|
|
||||||
def get_system_info(self): return {}
|
|
||||||
def get_disk_usage(self): return {}
|
|
||||||
def get_network_status(self): return {}
|
|
||||||
def test_network_connectivity(self, url): return {'success': True, 'response_time': 0.1}
|
|
||||||
|
|
||||||
# Create blueprint
|
|
||||||
diagnostics_bp = Blueprint('diagnostics', __name__)
|
|
||||||
|
|
||||||
# Initialize managers
|
|
||||||
error_manager = ErrorManager()
|
|
||||||
performance_manager = PerformanceManager()
|
|
||||||
system_manager = SystemManager()
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/health', methods=['GET'])
|
|
||||||
@optional_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def health_check() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Perform comprehensive system health check.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with system health status
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
health_status = {
|
|
||||||
'status': 'healthy',
|
|
||||||
'timestamp': datetime.now().isoformat(),
|
|
||||||
'checks': {},
|
|
||||||
'overall_score': 100
|
|
||||||
}
|
|
||||||
|
|
||||||
# System resource checks
|
|
||||||
cpu_percent = psutil.cpu_percent(interval=1)
|
|
||||||
memory = psutil.virtual_memory()
|
|
||||||
disk = psutil.disk_usage('/')
|
|
||||||
|
|
||||||
# CPU check
|
|
||||||
health_status['checks']['cpu'] = {
|
|
||||||
'status': 'healthy' if cpu_percent < 80 else 'warning' if cpu_percent < 95 else 'critical',
|
|
||||||
'usage_percent': cpu_percent,
|
|
||||||
'details': f"CPU usage: {cpu_percent}%"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Memory check
|
|
||||||
memory_percent = memory.percent
|
|
||||||
health_status['checks']['memory'] = {
|
|
||||||
'status': 'healthy' if memory_percent < 80 else 'warning' if memory_percent < 95 else 'critical',
|
|
||||||
'usage_percent': memory_percent,
|
|
||||||
'total': format_file_size(memory.total),
|
|
||||||
'available': format_file_size(memory.available),
|
|
||||||
'details': f"Memory usage: {memory_percent}%"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Disk check
|
|
||||||
disk_percent = disk.percent
|
|
||||||
health_status['checks']['disk'] = {
|
|
||||||
'status': 'healthy' if disk_percent < 80 else 'warning' if disk_percent < 95 else 'critical',
|
|
||||||
'usage_percent': disk_percent,
|
|
||||||
'total': format_file_size(disk.total),
|
|
||||||
'free': format_file_size(disk.free),
|
|
||||||
'details': f"Disk usage: {disk_percent}%"
|
|
||||||
}
|
|
||||||
|
|
||||||
# Database connectivity check
|
|
||||||
try:
|
|
||||||
# This would test actual database connection
|
|
||||||
health_status['checks']['database'] = {
|
|
||||||
'status': 'healthy',
|
|
||||||
'details': 'Database connection successful'
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
health_status['checks']['database'] = {
|
|
||||||
'status': 'critical',
|
|
||||||
'details': f'Database connection failed: {str(e)}'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Network connectivity check
|
|
||||||
try:
|
|
||||||
response = requests.get('https://httpbin.org/status/200', timeout=5)
|
|
||||||
if response.status_code == 200:
|
|
||||||
health_status['checks']['network'] = {
|
|
||||||
'status': 'healthy',
|
|
||||||
'details': 'Internet connectivity available'
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
health_status['checks']['network'] = {
|
|
||||||
'status': 'warning',
|
|
||||||
'details': f'Network response: {response.status_code}'
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
health_status['checks']['network'] = {
|
|
||||||
'status': 'warning',
|
|
||||||
'details': f'Network connectivity issues: {str(e)}'
|
|
||||||
}
|
|
||||||
|
|
||||||
# Calculate overall health score
|
|
||||||
check_statuses = [check['status'] for check in health_status['checks'].values()]
|
|
||||||
critical_count = check_statuses.count('critical')
|
|
||||||
warning_count = check_statuses.count('warning')
|
|
||||||
|
|
||||||
if critical_count > 0:
|
|
||||||
health_status['status'] = 'critical'
|
|
||||||
health_status['overall_score'] = max(0, 100 - (critical_count * 30) - (warning_count * 10))
|
|
||||||
elif warning_count > 0:
|
|
||||||
health_status['status'] = 'warning'
|
|
||||||
health_status['overall_score'] = max(50, 100 - (warning_count * 15))
|
|
||||||
|
|
||||||
return create_success_response("Health check completed", 200, health_status)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during health check: {str(e)}")
|
|
||||||
return create_error_response("Health check failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/system', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_system_info() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get detailed system information.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with system information
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
system_info = {
|
|
||||||
'platform': {
|
|
||||||
'system': platform.system(),
|
|
||||||
'release': platform.release(),
|
|
||||||
'version': platform.version(),
|
|
||||||
'machine': platform.machine(),
|
|
||||||
'processor': platform.processor(),
|
|
||||||
'architecture': platform.architecture()
|
|
||||||
},
|
|
||||||
'python': {
|
|
||||||
'version': sys.version,
|
|
||||||
'executable': sys.executable,
|
|
||||||
'path': sys.path[:5] # First 5 paths only
|
|
||||||
},
|
|
||||||
'resources': {
|
|
||||||
'cpu': {
|
|
||||||
'count_logical': psutil.cpu_count(logical=True),
|
|
||||||
'count_physical': psutil.cpu_count(logical=False),
|
|
||||||
'frequency': psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None,
|
|
||||||
'usage_percent': psutil.cpu_percent(interval=1),
|
|
||||||
'usage_per_cpu': psutil.cpu_percent(interval=1, percpu=True)
|
|
||||||
},
|
|
||||||
'memory': {
|
|
||||||
**psutil.virtual_memory()._asdict(),
|
|
||||||
'swap': psutil.swap_memory()._asdict()
|
|
||||||
},
|
|
||||||
'disk': {
|
|
||||||
'usage': psutil.disk_usage('/')._asdict(),
|
|
||||||
'io_counters': psutil.disk_io_counters()._asdict() if psutil.disk_io_counters() else None
|
|
||||||
},
|
|
||||||
'network': {
|
|
||||||
'io_counters': psutil.net_io_counters()._asdict(),
|
|
||||||
'connections': len(psutil.net_connections()),
|
|
||||||
'interfaces': {name: addr._asdict() for name, addr in psutil.net_if_addrs().items()}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'process': {
|
|
||||||
'pid': os.getpid(),
|
|
||||||
'memory_info': psutil.Process().memory_info()._asdict(),
|
|
||||||
'cpu_percent': psutil.Process().cpu_percent(),
|
|
||||||
'num_threads': psutil.Process().num_threads(),
|
|
||||||
'create_time': format_datetime(datetime.fromtimestamp(psutil.Process().create_time())),
|
|
||||||
'open_files': len(psutil.Process().open_files())
|
|
||||||
},
|
|
||||||
'uptime': {
|
|
||||||
'boot_time': format_datetime(datetime.fromtimestamp(psutil.boot_time())),
|
|
||||||
'uptime_seconds': time.time() - psutil.boot_time()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response("System information retrieved", 200, system_info)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting system info: {str(e)}")
|
|
||||||
return create_error_response("Failed to get system information", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/performance', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['hours', 'metric'],
|
|
||||||
param_types={'hours': int}
|
|
||||||
)
|
|
||||||
def get_performance_metrics() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get performance metrics and history.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- hours: Hours of history to retrieve (default: 24, max: 168)
|
|
||||||
- metric: Specific metric to retrieve (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with performance metrics
|
|
||||||
"""
|
|
||||||
hours = min(request.args.get('hours', 24, type=int), 168) # Max 1 week
|
|
||||||
metric = request.args.get('metric')
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Current performance metrics
|
|
||||||
current_metrics = {
|
|
||||||
'timestamp': datetime.now().isoformat(),
|
|
||||||
'cpu': {
|
|
||||||
'usage_percent': psutil.cpu_percent(interval=1),
|
|
||||||
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
|
|
||||||
},
|
|
||||||
'memory': {
|
|
||||||
'usage_percent': psutil.virtual_memory().percent,
|
|
||||||
'available_gb': psutil.virtual_memory().available / (1024**3)
|
|
||||||
},
|
|
||||||
'disk': {
|
|
||||||
'usage_percent': psutil.disk_usage('/').percent,
|
|
||||||
'free_gb': psutil.disk_usage('/').free / (1024**3)
|
|
||||||
},
|
|
||||||
'network': {
|
|
||||||
'bytes_sent': psutil.net_io_counters().bytes_sent,
|
|
||||||
'bytes_recv': psutil.net_io_counters().bytes_recv,
|
|
||||||
'packets_sent': psutil.net_io_counters().packets_sent,
|
|
||||||
'packets_recv': psutil.net_io_counters().packets_recv
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Historical data
|
|
||||||
historical_data = performance_manager.get_performance_history(
|
|
||||||
hours=hours,
|
|
||||||
metric=metric
|
|
||||||
)
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
'current': current_metrics,
|
|
||||||
'history': historical_data,
|
|
||||||
'summary': {
|
|
||||||
'period_hours': hours,
|
|
||||||
'data_points': len(historical_data),
|
|
||||||
'metric_filter': metric
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response("Performance metrics retrieved", 200, response_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting performance metrics: {str(e)}")
|
|
||||||
return create_error_response("Failed to get performance metrics", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/errors', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['hours', 'level', 'limit'],
|
|
||||||
param_types={'hours': int, 'limit': int}
|
|
||||||
)
|
|
||||||
def get_recent_errors() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get recent errors and error statistics.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- hours: Hours of errors to retrieve (default: 24, max: 168)
|
|
||||||
- level: Error level filter (error, warning, critical)
|
|
||||||
- limit: Maximum number of errors to return (default: 100, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with recent errors
|
|
||||||
"""
|
|
||||||
hours = min(request.args.get('hours', 24, type=int), 168)
|
|
||||||
level = request.args.get('level')
|
|
||||||
limit = min(request.args.get('limit', 100, type=int), 1000)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Get recent errors
|
|
||||||
errors = error_manager.get_recent_errors(
|
|
||||||
hours=hours,
|
|
||||||
level=level,
|
|
||||||
limit=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get error statistics
|
|
||||||
error_stats = error_manager.get_error_stats()
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
'errors': errors,
|
|
||||||
'statistics': error_stats,
|
|
||||||
'summary': {
|
|
||||||
'period_hours': hours,
|
|
||||||
'level_filter': level,
|
|
||||||
'total_returned': len(errors),
|
|
||||||
'limit': limit
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response("Recent errors retrieved", 200, response_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting recent errors: {str(e)}")
|
|
||||||
return create_error_response("Failed to get recent errors", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/errors', methods=['DELETE'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def clear_errors() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Clear error log.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with clear operation result
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
success = error_manager.clear_errors()
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info("Error log cleared")
|
|
||||||
return create_success_response("Error log cleared successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to clear error log", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error clearing error log: {str(e)}")
|
|
||||||
return create_error_response("Failed to clear error log", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/network', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def test_network_connectivity() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Test network connectivity to various services.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with network connectivity results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
test_urls = [
|
|
||||||
'https://google.com',
|
|
||||||
'https://github.com',
|
|
||||||
'https://pypi.org',
|
|
||||||
'https://httpbin.org/status/200'
|
|
||||||
]
|
|
||||||
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for url in test_urls:
|
|
||||||
try:
|
|
||||||
start_time = time.time()
|
|
||||||
response = requests.get(url, timeout=10)
|
|
||||||
response_time = time.time() - start_time
|
|
||||||
|
|
||||||
results.append({
|
|
||||||
'url': url,
|
|
||||||
'status': 'success',
|
|
||||||
'status_code': response.status_code,
|
|
||||||
'response_time_ms': round(response_time * 1000, 2),
|
|
||||||
'accessible': response.status_code == 200
|
|
||||||
})
|
|
||||||
|
|
||||||
except requests.exceptions.Timeout:
|
|
||||||
results.append({
|
|
||||||
'url': url,
|
|
||||||
'status': 'timeout',
|
|
||||||
'error': 'Request timed out',
|
|
||||||
'accessible': False
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
results.append({
|
|
||||||
'url': url,
|
|
||||||
'status': 'error',
|
|
||||||
'error': str(e),
|
|
||||||
'accessible': False
|
|
||||||
})
|
|
||||||
|
|
||||||
# Network interface information
|
|
||||||
interfaces = {}
|
|
||||||
for interface, addresses in psutil.net_if_addrs().items():
|
|
||||||
interfaces[interface] = [addr._asdict() for addr in addresses]
|
|
||||||
|
|
||||||
# Network I/O statistics
|
|
||||||
net_io = psutil.net_io_counters()._asdict()
|
|
||||||
|
|
||||||
response_data = {
|
|
||||||
'connectivity_tests': results,
|
|
||||||
'interfaces': interfaces,
|
|
||||||
'io_statistics': net_io,
|
|
||||||
'summary': {
|
|
||||||
'total_tests': len(results),
|
|
||||||
'successful': len([r for r in results if r['accessible']]),
|
|
||||||
'failed': len([r for r in results if not r['accessible']])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response("Network connectivity test completed", 200, response_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error testing network connectivity: {str(e)}")
|
|
||||||
return create_error_response("Failed to test network connectivity", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/logs', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['lines', 'level', 'component'],
|
|
||||||
param_types={'lines': int}
|
|
||||||
)
|
|
||||||
def get_application_logs() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get recent application logs.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- lines: Number of log lines to retrieve (default: 100, max: 1000)
|
|
||||||
- level: Log level filter (debug, info, warning, error, critical)
|
|
||||||
- component: Component filter (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with application logs
|
|
||||||
"""
|
|
||||||
lines = min(request.args.get('lines', 100, type=int), 1000)
|
|
||||||
level = request.args.get('level')
|
|
||||||
component = request.args.get('component')
|
|
||||||
|
|
||||||
try:
|
|
||||||
# This would read from actual log files
|
|
||||||
log_entries = []
|
|
||||||
|
|
||||||
# For demonstration, return sample log structure
|
|
||||||
response_data = {
|
|
||||||
'logs': log_entries,
|
|
||||||
'summary': {
|
|
||||||
'lines_requested': lines,
|
|
||||||
'level_filter': level,
|
|
||||||
'component_filter': component,
|
|
||||||
'total_returned': len(log_entries)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response("Application logs retrieved", 200, response_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting application logs: {str(e)}")
|
|
||||||
return create_error_response("Failed to get application logs", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/report', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def generate_diagnostic_report() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Generate comprehensive diagnostic report.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with diagnostic report
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
report = {
|
|
||||||
'generated_at': datetime.now().isoformat(),
|
|
||||||
'report_id': f"diag_{int(time.time())}",
|
|
||||||
'sections': {}
|
|
||||||
}
|
|
||||||
|
|
||||||
# System information
|
|
||||||
report['sections']['system'] = {
|
|
||||||
'platform': platform.platform(),
|
|
||||||
'python_version': sys.version,
|
|
||||||
'cpu_count': psutil.cpu_count(),
|
|
||||||
'memory_total_gb': round(psutil.virtual_memory().total / (1024**3), 2),
|
|
||||||
'disk_total_gb': round(psutil.disk_usage('/').total / (1024**3), 2)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Current resource usage
|
|
||||||
report['sections']['resources'] = {
|
|
||||||
'cpu_percent': psutil.cpu_percent(interval=1),
|
|
||||||
'memory_percent': psutil.virtual_memory().percent,
|
|
||||||
'disk_percent': psutil.disk_usage('/').percent,
|
|
||||||
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
|
|
||||||
}
|
|
||||||
|
|
||||||
# Error summary
|
|
||||||
error_stats = error_manager.get_error_stats()
|
|
||||||
report['sections']['errors'] = error_stats
|
|
||||||
|
|
||||||
# Performance summary
|
|
||||||
performance_metrics = performance_manager.get_performance_metrics()
|
|
||||||
report['sections']['performance'] = performance_metrics
|
|
||||||
|
|
||||||
# Network status
|
|
||||||
report['sections']['network'] = {
|
|
||||||
'interfaces_count': len(psutil.net_if_addrs()),
|
|
||||||
'connections_count': len(psutil.net_connections()),
|
|
||||||
'bytes_sent': psutil.net_io_counters().bytes_sent,
|
|
||||||
'bytes_recv': psutil.net_io_counters().bytes_recv
|
|
||||||
}
|
|
||||||
|
|
||||||
logger.info(f"Diagnostic report generated: {report['report_id']}")
|
|
||||||
return create_success_response("Diagnostic report generated", 200, report)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error generating diagnostic report: {str(e)}")
|
|
||||||
return create_error_response("Failed to generate diagnostic report", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@diagnostics_bp.route('/diagnostics/ping', methods=['GET'])
|
|
||||||
@optional_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def ping() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Simple ping endpoint for health monitoring.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with ping result
|
|
||||||
"""
|
|
||||||
return create_success_response("pong", 200, {
|
|
||||||
'timestamp': datetime.now().isoformat(),
|
|
||||||
'status': 'alive'
|
|
||||||
})
|
|
||||||
@ -1,640 +0,0 @@
|
|||||||
"""
|
|
||||||
Download Management API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for download operations,
|
|
||||||
including queue management, progress tracking, and download history.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request
|
|
||||||
from typing import Dict, List, Any, Optional
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from ...shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
|
||||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
|
||||||
from ...shared.response_helpers import (
|
|
||||||
create_success_response, create_paginated_response, format_download_response,
|
|
||||||
extract_pagination_params, create_batch_response
|
|
||||||
)
|
|
||||||
|
|
||||||
# Import download components (these imports would need to be adjusted based on actual structure)
|
|
||||||
try:
|
|
||||||
from download_manager import download_queue, download_manager, DownloadItem
|
|
||||||
from database_manager import episode_repository, anime_repository
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development/testing
|
|
||||||
download_queue = None
|
|
||||||
download_manager = None
|
|
||||||
DownloadItem = None
|
|
||||||
episode_repository = None
|
|
||||||
anime_repository = None
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for download management endpoints
|
|
||||||
downloads_bp = Blueprint('downloads', __name__, url_prefix='/api/v1/downloads')
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def list_downloads() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get all downloads with optional filtering and pagination.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- status: Filter by download status (pending, downloading, completed, failed, paused)
|
|
||||||
- anime_id: Filter by anime ID
|
|
||||||
- episode_id: Filter by episode ID
|
|
||||||
- active_only: Show only active downloads (true/false)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated list of downloads
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
# Extract filters
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
anime_id = request.args.get('anime_id')
|
|
||||||
episode_id = request.args.get('episode_id')
|
|
||||||
active_only = request.args.get('active_only', 'false').lower() == 'true'
|
|
||||||
|
|
||||||
# Validate filters
|
|
||||||
valid_statuses = ['pending', 'downloading', 'completed', 'failed', 'paused', 'cancelled']
|
|
||||||
if status_filter and status_filter not in valid_statuses:
|
|
||||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
|
||||||
|
|
||||||
if anime_id:
|
|
||||||
try:
|
|
||||||
anime_id = int(anime_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("anime_id must be a valid integer")
|
|
||||||
|
|
||||||
if episode_id:
|
|
||||||
try:
|
|
||||||
episode_id = int(episode_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("episode_id must be a valid integer")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Get downloads with filters
|
|
||||||
downloads = download_manager.get_downloads(
|
|
||||||
status_filter=status_filter,
|
|
||||||
anime_id=anime_id,
|
|
||||||
episode_id=episode_id,
|
|
||||||
active_only=active_only
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format download data
|
|
||||||
formatted_downloads = [format_download_response(download.__dict__) for download in downloads]
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_downloads)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_downloads = formatted_downloads[start_idx:end_idx]
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
data=paginated_downloads,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='downloads.list_downloads'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/<int:download_id>', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('download_id')
|
|
||||||
@optional_auth
|
|
||||||
def get_download(download_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get specific download by ID.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
download_id: Unique identifier for the download
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Download details with progress information
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
download = download_manager.get_download_by_id(download_id)
|
|
||||||
if not download:
|
|
||||||
raise NotFoundError("Download not found")
|
|
||||||
|
|
||||||
# Format download data
|
|
||||||
download_data = format_download_response(download.__dict__)
|
|
||||||
|
|
||||||
# Add detailed progress information
|
|
||||||
progress_info = download_manager.get_download_progress(download_id)
|
|
||||||
if progress_info:
|
|
||||||
download_data['progress_details'] = progress_info
|
|
||||||
|
|
||||||
return create_success_response(download_data)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['episode_id'],
|
|
||||||
optional_fields=['priority', 'quality', 'subtitle_language', 'download_path'],
|
|
||||||
field_types={
|
|
||||||
'episode_id': int,
|
|
||||||
'priority': int,
|
|
||||||
'quality': str,
|
|
||||||
'subtitle_language': str,
|
|
||||||
'download_path': str
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def create_download() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Create a new download request.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- episode_id: ID of the episode to download
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- priority: Download priority (1-10, higher is more priority)
|
|
||||||
- quality: Preferred quality (720p, 1080p, etc.)
|
|
||||||
- subtitle_language: Preferred subtitle language
|
|
||||||
- download_path: Custom download path
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Created download details
|
|
||||||
"""
|
|
||||||
if not download_manager or not episode_repository:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
episode_id = data['episode_id']
|
|
||||||
|
|
||||||
# Validate episode exists
|
|
||||||
episode = episode_repository.get_episode_by_id(episode_id)
|
|
||||||
if not episode:
|
|
||||||
raise ValidationError("Episode not found")
|
|
||||||
|
|
||||||
# Check if episode is already downloaded
|
|
||||||
if episode.status == 'downloaded':
|
|
||||||
raise ValidationError("Episode is already downloaded")
|
|
||||||
|
|
||||||
# Check if download already exists for this episode
|
|
||||||
existing_download = download_manager.get_download_by_episode(episode_id)
|
|
||||||
if existing_download and existing_download.status in ['pending', 'downloading']:
|
|
||||||
raise ValidationError("Download already in progress for this episode")
|
|
||||||
|
|
||||||
# Validate priority
|
|
||||||
priority = data.get('priority', 5)
|
|
||||||
if not 1 <= priority <= 10:
|
|
||||||
raise ValidationError("Priority must be between 1 and 10")
|
|
||||||
|
|
||||||
# Create download item
|
|
||||||
try:
|
|
||||||
download_item = DownloadItem(
|
|
||||||
download_id=str(uuid.uuid4()),
|
|
||||||
episode_id=episode_id,
|
|
||||||
anime_id=episode.anime_id,
|
|
||||||
priority=priority,
|
|
||||||
quality=data.get('quality'),
|
|
||||||
subtitle_language=data.get('subtitle_language'),
|
|
||||||
download_path=data.get('download_path'),
|
|
||||||
status='pending',
|
|
||||||
created_at=datetime.utcnow()
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValidationError(f"Invalid download data: {str(e)}")
|
|
||||||
|
|
||||||
# Add to download queue
|
|
||||||
success = download_queue.add_download(download_item)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to create download", 500)
|
|
||||||
|
|
||||||
# Return created download
|
|
||||||
download_data = format_download_response(download_item.__dict__)
|
|
||||||
return create_success_response(
|
|
||||||
data=download_data,
|
|
||||||
message="Download queued successfully",
|
|
||||||
status_code=201
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/<int:download_id>/pause', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('download_id')
|
|
||||||
@require_auth
|
|
||||||
def pause_download(download_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Pause a download.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
download_id: Unique identifier for the download
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated download status
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
download = download_manager.get_download_by_id(download_id)
|
|
||||||
if not download:
|
|
||||||
raise NotFoundError("Download not found")
|
|
||||||
|
|
||||||
if download.status not in ['pending', 'downloading']:
|
|
||||||
raise ValidationError(f"Cannot pause download with status '{download.status}'")
|
|
||||||
|
|
||||||
success = download_manager.pause_download(download_id)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to pause download", 500)
|
|
||||||
|
|
||||||
# Get updated download
|
|
||||||
updated_download = download_manager.get_download_by_id(download_id)
|
|
||||||
download_data = format_download_response(updated_download.__dict__)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=download_data,
|
|
||||||
message="Download paused successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/<int:download_id>/resume', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('download_id')
|
|
||||||
@require_auth
|
|
||||||
def resume_download(download_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Resume a paused download.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
download_id: Unique identifier for the download
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated download status
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
download = download_manager.get_download_by_id(download_id)
|
|
||||||
if not download:
|
|
||||||
raise NotFoundError("Download not found")
|
|
||||||
|
|
||||||
if download.status != 'paused':
|
|
||||||
raise ValidationError(f"Cannot resume download with status '{download.status}'")
|
|
||||||
|
|
||||||
success = download_manager.resume_download(download_id)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to resume download", 500)
|
|
||||||
|
|
||||||
# Get updated download
|
|
||||||
updated_download = download_manager.get_download_by_id(download_id)
|
|
||||||
download_data = format_download_response(updated_download.__dict__)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=download_data,
|
|
||||||
message="Download resumed successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/<int:download_id>/cancel', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('download_id')
|
|
||||||
@require_auth
|
|
||||||
def cancel_download(download_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Cancel a download.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
download_id: Unique identifier for the download
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- delete_partial: Set to 'true' to delete partially downloaded files
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Cancellation confirmation
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
download = download_manager.get_download_by_id(download_id)
|
|
||||||
if not download:
|
|
||||||
raise NotFoundError("Download not found")
|
|
||||||
|
|
||||||
if download.status in ['completed', 'cancelled']:
|
|
||||||
raise ValidationError(f"Cannot cancel download with status '{download.status}'")
|
|
||||||
|
|
||||||
delete_partial = request.args.get('delete_partial', 'false').lower() == 'true'
|
|
||||||
|
|
||||||
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to cancel download", 500)
|
|
||||||
|
|
||||||
message = "Download cancelled successfully"
|
|
||||||
if delete_partial:
|
|
||||||
message += " (partial files deleted)"
|
|
||||||
|
|
||||||
return create_success_response(message=message)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/<int:download_id>/retry', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('download_id')
|
|
||||||
@require_auth
|
|
||||||
def retry_download(download_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Retry a failed download.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
download_id: Unique identifier for the download
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated download status
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
download = download_manager.get_download_by_id(download_id)
|
|
||||||
if not download:
|
|
||||||
raise NotFoundError("Download not found")
|
|
||||||
|
|
||||||
if download.status != 'failed':
|
|
||||||
raise ValidationError(f"Cannot retry download with status '{download.status}'")
|
|
||||||
|
|
||||||
success = download_manager.retry_download(download_id)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to retry download", 500)
|
|
||||||
|
|
||||||
# Get updated download
|
|
||||||
updated_download = download_manager.get_download_by_id(download_id)
|
|
||||||
download_data = format_download_response(updated_download.__dict__)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=download_data,
|
|
||||||
message="Download queued for retry"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/bulk', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['action', 'download_ids'],
|
|
||||||
optional_fields=['delete_partial'],
|
|
||||||
field_types={
|
|
||||||
'action': str,
|
|
||||||
'download_ids': list,
|
|
||||||
'delete_partial': bool
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def bulk_download_operation() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Perform bulk operations on multiple downloads.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- action: Operation to perform (pause, resume, cancel, retry)
|
|
||||||
- download_ids: List of download IDs to operate on
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- delete_partial: For cancel action, whether to delete partial files
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Results of the bulk operation
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
action = data['action']
|
|
||||||
download_ids = data['download_ids']
|
|
||||||
delete_partial = data.get('delete_partial', False)
|
|
||||||
|
|
||||||
# Validate action
|
|
||||||
valid_actions = ['pause', 'resume', 'cancel', 'retry']
|
|
||||||
if action not in valid_actions:
|
|
||||||
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
|
|
||||||
|
|
||||||
# Validate download_ids
|
|
||||||
if not isinstance(download_ids, list) or not download_ids:
|
|
||||||
raise ValidationError("download_ids must be a non-empty list")
|
|
||||||
|
|
||||||
if len(download_ids) > 50:
|
|
||||||
raise ValidationError("Cannot operate on more than 50 downloads at once")
|
|
||||||
|
|
||||||
# Validate download IDs are integers
|
|
||||||
try:
|
|
||||||
download_ids = [int(did) for did in download_ids]
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("All download_ids must be valid integers")
|
|
||||||
|
|
||||||
# Perform bulk operation
|
|
||||||
successful_items = []
|
|
||||||
failed_items = []
|
|
||||||
|
|
||||||
for download_id in download_ids:
|
|
||||||
try:
|
|
||||||
if action == 'pause':
|
|
||||||
success = download_manager.pause_download(download_id)
|
|
||||||
elif action == 'resume':
|
|
||||||
success = download_manager.resume_download(download_id)
|
|
||||||
elif action == 'cancel':
|
|
||||||
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
|
|
||||||
elif action == 'retry':
|
|
||||||
success = download_manager.retry_download(download_id)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
successful_items.append({'download_id': download_id, 'action': action})
|
|
||||||
else:
|
|
||||||
failed_items.append({'download_id': download_id, 'error': 'Operation failed'})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
failed_items.append({'download_id': download_id, 'error': str(e)})
|
|
||||||
|
|
||||||
return create_batch_response(
|
|
||||||
successful_items=successful_items,
|
|
||||||
failed_items=failed_items,
|
|
||||||
message=f"Bulk {action} operation completed"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/queue', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_download_queue() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get current download queue status.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Download queue information including active downloads and queue statistics
|
|
||||||
"""
|
|
||||||
if not download_queue:
|
|
||||||
raise APIException("Download queue not available", 503)
|
|
||||||
|
|
||||||
queue_info = download_queue.get_queue_status()
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'queue_size': queue_info.get('queue_size', 0),
|
|
||||||
'active_downloads': queue_info.get('active_downloads', 0),
|
|
||||||
'max_concurrent': queue_info.get('max_concurrent', 0),
|
|
||||||
'paused_downloads': queue_info.get('paused_downloads', 0),
|
|
||||||
'failed_downloads': queue_info.get('failed_downloads', 0),
|
|
||||||
'completed_today': queue_info.get('completed_today', 0),
|
|
||||||
'queue_items': queue_info.get('queue_items', [])
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/queue/pause', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def pause_download_queue() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Pause the entire download queue.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Queue pause confirmation
|
|
||||||
"""
|
|
||||||
if not download_queue:
|
|
||||||
raise APIException("Download queue not available", 503)
|
|
||||||
|
|
||||||
success = download_queue.pause_queue()
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to pause download queue", 500)
|
|
||||||
|
|
||||||
return create_success_response(message="Download queue paused")
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/queue/resume', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def resume_download_queue() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Resume the download queue.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Queue resume confirmation
|
|
||||||
"""
|
|
||||||
if not download_queue:
|
|
||||||
raise APIException("Download queue not available", 503)
|
|
||||||
|
|
||||||
success = download_queue.resume_queue()
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to resume download queue", 500)
|
|
||||||
|
|
||||||
return create_success_response(message="Download queue resumed")
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/queue/clear', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def clear_download_queue() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Clear completed and failed downloads from the queue.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- include_failed: Set to 'true' to also clear failed downloads
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Queue clear confirmation
|
|
||||||
"""
|
|
||||||
if not download_queue:
|
|
||||||
raise APIException("Download queue not available", 503)
|
|
||||||
|
|
||||||
include_failed = request.args.get('include_failed', 'false').lower() == 'true'
|
|
||||||
|
|
||||||
cleared_count = download_queue.clear_completed(include_failed=include_failed)
|
|
||||||
|
|
||||||
message = f"Cleared {cleared_count} completed downloads"
|
|
||||||
if include_failed:
|
|
||||||
message += " and failed downloads"
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={'cleared_count': cleared_count},
|
|
||||||
message=message
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@downloads_bp.route('/history', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def get_download_history() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get download history with optional filtering.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- status: Filter by status (completed, failed)
|
|
||||||
- anime_id: Filter by anime ID
|
|
||||||
- date_from: Filter from date (ISO format)
|
|
||||||
- date_to: Filter to date (ISO format)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated download history
|
|
||||||
"""
|
|
||||||
if not download_manager:
|
|
||||||
raise APIException("Download manager not available", 503)
|
|
||||||
|
|
||||||
# Extract filters
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
anime_id = request.args.get('anime_id')
|
|
||||||
date_from = request.args.get('date_from')
|
|
||||||
date_to = request.args.get('date_to')
|
|
||||||
|
|
||||||
# Validate filters
|
|
||||||
if status_filter and status_filter not in ['completed', 'failed']:
|
|
||||||
raise ValidationError("Status filter must be 'completed' or 'failed'")
|
|
||||||
|
|
||||||
if anime_id:
|
|
||||||
try:
|
|
||||||
anime_id = int(anime_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("anime_id must be a valid integer")
|
|
||||||
|
|
||||||
# Validate dates
|
|
||||||
if date_from:
|
|
||||||
try:
|
|
||||||
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("date_from must be in ISO format")
|
|
||||||
|
|
||||||
if date_to:
|
|
||||||
try:
|
|
||||||
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("date_to must be in ISO format")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Get download history
|
|
||||||
history = download_manager.get_download_history(
|
|
||||||
status_filter=status_filter,
|
|
||||||
anime_id=anime_id,
|
|
||||||
date_from=date_from,
|
|
||||||
date_to=date_to
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format history data
|
|
||||||
formatted_history = [format_download_response(download.__dict__) for download in history]
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_history)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_history = formatted_history[start_idx:end_idx]
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
data=paginated_history,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='downloads.get_download_history'
|
|
||||||
)
|
|
||||||
@ -1,584 +0,0 @@
|
|||||||
"""
|
|
||||||
Episode Management API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for episode CRUD operations,
|
|
||||||
including episode status management and metadata operations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request
|
|
||||||
from typing import Dict, List, Any, Optional
|
|
||||||
import uuid
|
|
||||||
|
|
||||||
from ...shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
|
||||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
|
||||||
from ...shared.response_helpers import (
|
|
||||||
create_success_response, create_paginated_response, format_episode_response,
|
|
||||||
extract_pagination_params, create_batch_response
|
|
||||||
)
|
|
||||||
|
|
||||||
# Import database components (these imports would need to be adjusted based on actual structure)
|
|
||||||
try:
|
|
||||||
from database_manager import episode_repository, anime_repository, EpisodeMetadata
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development/testing
|
|
||||||
episode_repository = None
|
|
||||||
anime_repository = None
|
|
||||||
EpisodeMetadata = None
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for episode management endpoints
|
|
||||||
episodes_bp = Blueprint('episodes', __name__, url_prefix='/api/v1/episodes')
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def list_episodes() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get all episodes with optional filtering and pagination.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- anime_id: Filter by anime ID
|
|
||||||
- status: Filter by episode status
|
|
||||||
- downloaded: Filter by download status (true/false)
|
|
||||||
- episode_number: Filter by episode number
|
|
||||||
- search: Search in episode title
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated list of episodes
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
# Extract filters
|
|
||||||
anime_id = request.args.get('anime_id')
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
downloaded_filter = request.args.get('downloaded')
|
|
||||||
episode_number = request.args.get('episode_number')
|
|
||||||
search_term = request.args.get('search', '').strip()
|
|
||||||
|
|
||||||
# Validate filters
|
|
||||||
if anime_id:
|
|
||||||
try:
|
|
||||||
anime_id = int(anime_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("anime_id must be a valid integer")
|
|
||||||
|
|
||||||
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
|
|
||||||
raise ValidationError("downloaded filter must be 'true' or 'false'")
|
|
||||||
|
|
||||||
if episode_number:
|
|
||||||
try:
|
|
||||||
episode_number = int(episode_number)
|
|
||||||
if episode_number < 1:
|
|
||||||
raise ValidationError("episode_number must be positive")
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("episode_number must be a valid integer")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Get episodes with filters
|
|
||||||
episodes = episode_repository.get_all_episodes(
|
|
||||||
anime_id=anime_id,
|
|
||||||
status_filter=status_filter,
|
|
||||||
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None,
|
|
||||||
episode_number=episode_number,
|
|
||||||
search_term=search_term
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format episode data
|
|
||||||
formatted_episodes = [format_episode_response(episode.__dict__) for episode in episodes]
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_episodes)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_episodes = formatted_episodes[start_idx:end_idx]
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
data=paginated_episodes,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='episodes.list_episodes'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/<int:episode_id>', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('episode_id')
|
|
||||||
@optional_auth
|
|
||||||
def get_episode(episode_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get specific episode by ID.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
episode_id: Unique identifier for the episode
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Episode details with download information
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
episode = episode_repository.get_episode_by_id(episode_id)
|
|
||||||
if not episode:
|
|
||||||
raise NotFoundError("Episode not found")
|
|
||||||
|
|
||||||
# Format episode data
|
|
||||||
episode_data = format_episode_response(episode.__dict__)
|
|
||||||
|
|
||||||
# Add download information if available
|
|
||||||
download_info = episode_repository.get_download_info(episode_id)
|
|
||||||
if download_info:
|
|
||||||
episode_data['download_info'] = download_info
|
|
||||||
|
|
||||||
return create_success_response(episode_data)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['anime_id', 'episode_number', 'title', 'url'],
|
|
||||||
optional_fields=['description', 'status', 'duration', 'air_date', 'custom_metadata'],
|
|
||||||
field_types={
|
|
||||||
'anime_id': int,
|
|
||||||
'episode_number': int,
|
|
||||||
'title': str,
|
|
||||||
'url': str,
|
|
||||||
'description': str,
|
|
||||||
'status': str,
|
|
||||||
'duration': int,
|
|
||||||
'air_date': str,
|
|
||||||
'custom_metadata': dict
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def create_episode() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Create a new episode record.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- anime_id: ID of the anime this episode belongs to
|
|
||||||
- episode_number: Episode number
|
|
||||||
- title: Episode title
|
|
||||||
- url: Episode URL
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- description: Episode description
|
|
||||||
- status: Episode status (available, unavailable, coming_soon)
|
|
||||||
- duration: Episode duration in minutes
|
|
||||||
- air_date: Air date in ISO format
|
|
||||||
- custom_metadata: Additional metadata as key-value pairs
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Created episode details
|
|
||||||
"""
|
|
||||||
if not episode_repository or not anime_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate anime exists
|
|
||||||
anime = anime_repository.get_anime_by_id(data['anime_id'])
|
|
||||||
if not anime:
|
|
||||||
raise ValidationError("Anime not found")
|
|
||||||
|
|
||||||
# Validate status if provided
|
|
||||||
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
|
|
||||||
if 'status' in data and data['status'] not in valid_statuses:
|
|
||||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
|
||||||
|
|
||||||
# Check if episode already exists for this anime
|
|
||||||
existing_episode = episode_repository.get_episode_by_anime_and_number(
|
|
||||||
data['anime_id'], data['episode_number']
|
|
||||||
)
|
|
||||||
if existing_episode:
|
|
||||||
raise ValidationError(f"Episode {data['episode_number']} already exists for this anime")
|
|
||||||
|
|
||||||
# Validate episode number
|
|
||||||
if data['episode_number'] < 1:
|
|
||||||
raise ValidationError("Episode number must be positive")
|
|
||||||
|
|
||||||
# Create episode metadata object
|
|
||||||
try:
|
|
||||||
episode = EpisodeMetadata(
|
|
||||||
episode_id=str(uuid.uuid4()),
|
|
||||||
anime_id=data['anime_id'],
|
|
||||||
episode_number=data['episode_number'],
|
|
||||||
title=data['title'],
|
|
||||||
url=data['url'],
|
|
||||||
description=data.get('description'),
|
|
||||||
status=data.get('status', 'available'),
|
|
||||||
duration=data.get('duration'),
|
|
||||||
air_date=data.get('air_date'),
|
|
||||||
custom_metadata=data.get('custom_metadata', {})
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValidationError(f"Invalid episode data: {str(e)}")
|
|
||||||
|
|
||||||
# Save to database
|
|
||||||
success = episode_repository.create_episode(episode)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to create episode", 500)
|
|
||||||
|
|
||||||
# Return created episode
|
|
||||||
episode_data = format_episode_response(episode.__dict__)
|
|
||||||
return create_success_response(
|
|
||||||
data=episode_data,
|
|
||||||
message="Episode created successfully",
|
|
||||||
status_code=201
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/<int:episode_id>', methods=['PUT'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('episode_id')
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['title', 'url', 'description', 'status', 'duration', 'air_date', 'custom_metadata'],
|
|
||||||
field_types={
|
|
||||||
'title': str,
|
|
||||||
'url': str,
|
|
||||||
'description': str,
|
|
||||||
'status': str,
|
|
||||||
'duration': int,
|
|
||||||
'air_date': str,
|
|
||||||
'custom_metadata': dict
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def update_episode(episode_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Update an existing episode record.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
episode_id: Unique identifier for the episode
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- title: Episode title
|
|
||||||
- url: Episode URL
|
|
||||||
- description: Episode description
|
|
||||||
- status: Episode status (available, unavailable, coming_soon, downloaded)
|
|
||||||
- duration: Episode duration in minutes
|
|
||||||
- air_date: Air date in ISO format
|
|
||||||
- custom_metadata: Additional metadata as key-value pairs
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated episode details
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Get existing episode
|
|
||||||
existing_episode = episode_repository.get_episode_by_id(episode_id)
|
|
||||||
if not existing_episode:
|
|
||||||
raise NotFoundError("Episode not found")
|
|
||||||
|
|
||||||
# Validate status if provided
|
|
||||||
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
|
|
||||||
if 'status' in data and data['status'] not in valid_statuses:
|
|
||||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
|
||||||
|
|
||||||
# Update fields
|
|
||||||
update_fields = {}
|
|
||||||
for field in ['title', 'url', 'description', 'status', 'duration', 'air_date']:
|
|
||||||
if field in data:
|
|
||||||
update_fields[field] = data[field]
|
|
||||||
|
|
||||||
# Handle custom metadata update (merge instead of replace)
|
|
||||||
if 'custom_metadata' in data:
|
|
||||||
existing_metadata = existing_episode.custom_metadata or {}
|
|
||||||
existing_metadata.update(data['custom_metadata'])
|
|
||||||
update_fields['custom_metadata'] = existing_metadata
|
|
||||||
|
|
||||||
# Perform update
|
|
||||||
success = episode_repository.update_episode(episode_id, update_fields)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to update episode", 500)
|
|
||||||
|
|
||||||
# Get updated episode
|
|
||||||
updated_episode = episode_repository.get_episode_by_id(episode_id)
|
|
||||||
episode_data = format_episode_response(updated_episode.__dict__)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=episode_data,
|
|
||||||
message="Episode updated successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/<int:episode_id>', methods=['DELETE'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('episode_id')
|
|
||||||
@require_auth
|
|
||||||
def delete_episode(episode_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Delete an episode record.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
episode_id: Unique identifier for the episode
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- delete_file: Set to 'true' to also delete the downloaded file
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deletion confirmation
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
# Check if episode exists
|
|
||||||
existing_episode = episode_repository.get_episode_by_id(episode_id)
|
|
||||||
if not existing_episode:
|
|
||||||
raise NotFoundError("Episode not found")
|
|
||||||
|
|
||||||
# Check if we should also delete the file
|
|
||||||
delete_file = request.args.get('delete_file', 'false').lower() == 'true'
|
|
||||||
|
|
||||||
# Perform deletion
|
|
||||||
success = episode_repository.delete_episode(episode_id, delete_file=delete_file)
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to delete episode", 500)
|
|
||||||
|
|
||||||
message = f"Episode {existing_episode.episode_number} deleted successfully"
|
|
||||||
if delete_file:
|
|
||||||
message += " (including downloaded file)"
|
|
||||||
|
|
||||||
return create_success_response(message=message)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/bulk/status', methods=['PUT'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['episode_ids', 'status'],
|
|
||||||
field_types={
|
|
||||||
'episode_ids': list,
|
|
||||||
'status': str
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def bulk_update_status() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Update status for multiple episodes.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- episode_ids: List of episode IDs to update
|
|
||||||
- status: New status for all episodes
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Results of the bulk operation
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
episode_ids = data['episode_ids']
|
|
||||||
new_status = data['status']
|
|
||||||
|
|
||||||
# Validate status
|
|
||||||
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
|
|
||||||
if new_status not in valid_statuses:
|
|
||||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
|
||||||
|
|
||||||
# Validate episode_ids
|
|
||||||
if not isinstance(episode_ids, list) or not episode_ids:
|
|
||||||
raise ValidationError("episode_ids must be a non-empty list")
|
|
||||||
|
|
||||||
if len(episode_ids) > 100:
|
|
||||||
raise ValidationError("Cannot operate on more than 100 episodes at once")
|
|
||||||
|
|
||||||
# Validate episode IDs are integers
|
|
||||||
try:
|
|
||||||
episode_ids = [int(eid) for eid in episode_ids]
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("All episode_ids must be valid integers")
|
|
||||||
|
|
||||||
# Perform bulk update
|
|
||||||
successful_items = []
|
|
||||||
failed_items = []
|
|
||||||
|
|
||||||
for episode_id in episode_ids:
|
|
||||||
try:
|
|
||||||
success = episode_repository.update_episode(episode_id, {'status': new_status})
|
|
||||||
if success:
|
|
||||||
successful_items.append({'episode_id': episode_id, 'new_status': new_status})
|
|
||||||
else:
|
|
||||||
failed_items.append({'episode_id': episode_id, 'error': 'Episode not found'})
|
|
||||||
except Exception as e:
|
|
||||||
failed_items.append({'episode_id': episode_id, 'error': str(e)})
|
|
||||||
|
|
||||||
return create_batch_response(
|
|
||||||
successful_items=successful_items,
|
|
||||||
failed_items=failed_items,
|
|
||||||
message=f"Bulk status update to '{new_status}' completed"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/anime/<int:anime_id>/sync', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('anime_id')
|
|
||||||
@require_auth
|
|
||||||
def sync_anime_episodes(anime_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Synchronize episodes for an anime by scanning the source.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
anime_id: Unique identifier for the anime
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Synchronization results
|
|
||||||
"""
|
|
||||||
if not episode_repository or not anime_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
# Check if anime exists
|
|
||||||
anime = anime_repository.get_anime_by_id(anime_id)
|
|
||||||
if not anime:
|
|
||||||
raise NotFoundError("Anime not found")
|
|
||||||
|
|
||||||
# This would trigger the episode scanning/syncing process
|
|
||||||
try:
|
|
||||||
sync_result = episode_repository.sync_episodes_for_anime(anime_id)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'anime_id': anime_id,
|
|
||||||
'episodes_found': sync_result.get('episodes_found', 0),
|
|
||||||
'episodes_added': sync_result.get('episodes_added', 0),
|
|
||||||
'episodes_updated': sync_result.get('episodes_updated', 0),
|
|
||||||
'episodes_removed': sync_result.get('episodes_removed', 0)
|
|
||||||
},
|
|
||||||
message=f"Episode sync completed for '{anime.name}'"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to sync episodes: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/<int:episode_id>/download', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('episode_id')
|
|
||||||
@require_auth
|
|
||||||
def queue_episode_download(episode_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Queue an episode for download.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
episode_id: Unique identifier for the episode
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Download queue confirmation
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
# Check if episode exists
|
|
||||||
episode = episode_repository.get_episode_by_id(episode_id)
|
|
||||||
if not episode:
|
|
||||||
raise NotFoundError("Episode not found")
|
|
||||||
|
|
||||||
# Check if episode is already downloaded
|
|
||||||
if episode.status == 'downloaded':
|
|
||||||
raise ValidationError("Episode is already downloaded")
|
|
||||||
|
|
||||||
# Check if episode is available for download
|
|
||||||
if episode.status not in ['available']:
|
|
||||||
raise ValidationError(f"Episode status '{episode.status}' is not available for download")
|
|
||||||
|
|
||||||
# Queue for download (this would integrate with the download system)
|
|
||||||
try:
|
|
||||||
from ...download_manager import download_queue
|
|
||||||
download_id = download_queue.add_episode_download(episode_id)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={'download_id': download_id},
|
|
||||||
message=f"Episode {episode.episode_number} queued for download"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to queue download: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@episodes_bp.route('/search', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def search_episodes() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Search episodes by title or other criteria.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- q: Search query (required)
|
|
||||||
- anime_id: Limit search to specific anime
|
|
||||||
- status: Filter by episode status
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated search results
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
search_term = request.args.get('q', '').strip()
|
|
||||||
if not search_term:
|
|
||||||
raise ValidationError("Search term 'q' is required")
|
|
||||||
|
|
||||||
if len(search_term) < 2:
|
|
||||||
raise ValidationError("Search term must be at least 2 characters long")
|
|
||||||
|
|
||||||
# Get additional filters
|
|
||||||
anime_id = request.args.get('anime_id')
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
|
|
||||||
# Validate anime_id if provided
|
|
||||||
if anime_id:
|
|
||||||
try:
|
|
||||||
anime_id = int(anime_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("anime_id must be a valid integer")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Perform search
|
|
||||||
search_results = episode_repository.search_episodes(
|
|
||||||
search_term=search_term,
|
|
||||||
anime_id=anime_id,
|
|
||||||
status_filter=status_filter
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format results
|
|
||||||
formatted_results = [format_episode_response(episode.__dict__) for episode in search_results]
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_results)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_results = formatted_results[start_idx:end_idx]
|
|
||||||
|
|
||||||
# Create response with search metadata
|
|
||||||
response = create_paginated_response(
|
|
||||||
data=paginated_results,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='episodes.search_episodes',
|
|
||||||
q=search_term
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add search metadata
|
|
||||||
response['search'] = {
|
|
||||||
'query': search_term,
|
|
||||||
'total_results': total,
|
|
||||||
'filters': {
|
|
||||||
'anime_id': anime_id,
|
|
||||||
'status': status_filter
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
@ -1,436 +0,0 @@
|
|||||||
"""
|
|
||||||
Health Check Endpoints
|
|
||||||
|
|
||||||
This module provides basic health check endpoints for monitoring
|
|
||||||
the AniWorld application's status.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, jsonify
|
|
||||||
import time
|
|
||||||
import os
|
|
||||||
import psutil
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
# Blueprint for health check endpoints
|
|
||||||
health_bp = Blueprint('health_check', __name__, url_prefix='/api/health')
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/status')
|
|
||||||
def get_basic_health():
|
|
||||||
"""Get basic application health status."""
|
|
||||||
try:
|
|
||||||
# Basic system metrics
|
|
||||||
memory = psutil.virtual_memory()
|
|
||||||
disk = psutil.disk_usage('/')
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'healthy',
|
|
||||||
'timestamp': datetime.now().isoformat(),
|
|
||||||
'system': {
|
|
||||||
'memory_usage_percent': memory.percent,
|
|
||||||
'disk_usage_percent': disk.percent,
|
|
||||||
'uptime': time.time()
|
|
||||||
},
|
|
||||||
'application': {
|
|
||||||
'status': 'running',
|
|
||||||
'version': '1.0.0'
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': str(e),
|
|
||||||
'timestamp': datetime.now().isoformat()
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/ping')
|
|
||||||
def ping():
|
|
||||||
"""Simple ping endpoint."""
|
|
||||||
return jsonify({
|
|
||||||
'status': 'ok',
|
|
||||||
'timestamp': datetime.now().isoformat()
|
|
||||||
})
|
|
||||||
@health_bp.route('/api/health')
|
|
||||||
def basic_health():
|
|
||||||
"""Basic health check endpoint for load balancers."""
|
|
||||||
return jsonify({
|
|
||||||
'status': 'healthy',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'service': 'aniworld-web'
|
|
||||||
})
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/system')
|
|
||||||
def system_health():
|
|
||||||
"""Comprehensive system health check."""
|
|
||||||
def check_system_health():
|
|
||||||
try:
|
|
||||||
# System metrics
|
|
||||||
cpu_percent = psutil.cpu_percent(interval=1)
|
|
||||||
memory = psutil.virtual_memory()
|
|
||||||
disk = psutil.disk_usage('/')
|
|
||||||
|
|
||||||
# Process metrics
|
|
||||||
process = psutil.Process()
|
|
||||||
process_memory = process.memory_info()
|
|
||||||
|
|
||||||
return {
|
|
||||||
'status': 'healthy',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'system': {
|
|
||||||
'cpu_percent': cpu_percent,
|
|
||||||
'memory': {
|
|
||||||
'total_mb': memory.total / 1024 / 1024,
|
|
||||||
'available_mb': memory.available / 1024 / 1024,
|
|
||||||
'percent': memory.percent
|
|
||||||
},
|
|
||||||
'disk': {
|
|
||||||
'total_gb': disk.total / 1024 / 1024 / 1024,
|
|
||||||
'free_gb': disk.free / 1024 / 1024 / 1024,
|
|
||||||
'percent': (disk.used / disk.total) * 100
|
|
||||||
}
|
|
||||||
},
|
|
||||||
'process': {
|
|
||||||
'memory_mb': process_memory.rss / 1024 / 1024,
|
|
||||||
'threads': process.num_threads(),
|
|
||||||
'cpu_percent': process.cpu_percent()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
'status': 'unhealthy',
|
|
||||||
'error': str(e),
|
|
||||||
'timestamp': datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify(get_cached_health_data('system', check_system_health))
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/database')
|
|
||||||
def database_health():
|
|
||||||
"""Database connectivity and health check."""
|
|
||||||
def check_database_health():
|
|
||||||
try:
|
|
||||||
# Test database connection
|
|
||||||
start_time = time.time()
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
cursor = conn.execute("SELECT 1")
|
|
||||||
result = cursor.fetchone()
|
|
||||||
connection_time = (time.time() - start_time) * 1000 # ms
|
|
||||||
|
|
||||||
# Get database size and basic stats
|
|
||||||
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
|
|
||||||
|
|
||||||
# Check schema version
|
|
||||||
schema_version = database_manager.get_current_version()
|
|
||||||
|
|
||||||
# Get table counts
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
|
|
||||||
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
|
|
||||||
|
|
||||||
return {
|
|
||||||
'status': 'healthy',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'database': {
|
|
||||||
'connected': True,
|
|
||||||
'connection_time_ms': connection_time,
|
|
||||||
'size_mb': db_size / 1024 / 1024,
|
|
||||||
'schema_version': schema_version,
|
|
||||||
'tables': {
|
|
||||||
'anime_count': anime_count,
|
|
||||||
'episode_count': episode_count
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
'status': 'unhealthy',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'database': {
|
|
||||||
'connected': False,
|
|
||||||
'error': str(e)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify(get_cached_health_data('database', check_database_health, ttl=60))
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/dependencies')
|
|
||||||
def dependencies_health():
|
|
||||||
"""Check health of external dependencies."""
|
|
||||||
def check_dependencies():
|
|
||||||
dependencies = {
|
|
||||||
'status': 'healthy',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'dependencies': {}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Check filesystem access
|
|
||||||
try:
|
|
||||||
anime_directory = getattr(config, 'anime_directory', '/app/data')
|
|
||||||
if os.path.exists(anime_directory):
|
|
||||||
# Test read/write access
|
|
||||||
test_file = os.path.join(anime_directory, '.health_check')
|
|
||||||
with open(test_file, 'w') as f:
|
|
||||||
f.write('test')
|
|
||||||
os.remove(test_file)
|
|
||||||
dependencies['dependencies']['filesystem'] = {
|
|
||||||
'status': 'healthy',
|
|
||||||
'path': anime_directory,
|
|
||||||
'accessible': True
|
|
||||||
}
|
|
||||||
else:
|
|
||||||
dependencies['dependencies']['filesystem'] = {
|
|
||||||
'status': 'unhealthy',
|
|
||||||
'path': anime_directory,
|
|
||||||
'accessible': False,
|
|
||||||
'error': 'Directory does not exist'
|
|
||||||
}
|
|
||||||
dependencies['status'] = 'degraded'
|
|
||||||
except Exception as e:
|
|
||||||
dependencies['dependencies']['filesystem'] = {
|
|
||||||
'status': 'unhealthy',
|
|
||||||
'error': str(e)
|
|
||||||
}
|
|
||||||
dependencies['status'] = 'degraded'
|
|
||||||
|
|
||||||
# Check network connectivity (basic)
|
|
||||||
try:
|
|
||||||
import socket
|
|
||||||
socket.create_connection(("8.8.8.8", 53), timeout=3)
|
|
||||||
dependencies['dependencies']['network'] = {
|
|
||||||
'status': 'healthy',
|
|
||||||
'connectivity': True
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
dependencies['dependencies']['network'] = {
|
|
||||||
'status': 'unhealthy',
|
|
||||||
'connectivity': False,
|
|
||||||
'error': str(e)
|
|
||||||
}
|
|
||||||
dependencies['status'] = 'degraded'
|
|
||||||
|
|
||||||
return dependencies
|
|
||||||
|
|
||||||
return jsonify(get_cached_health_data('dependencies', check_dependencies, ttl=120))
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/performance')
|
|
||||||
def performance_health():
|
|
||||||
"""Performance metrics and health indicators."""
|
|
||||||
def check_performance():
|
|
||||||
try:
|
|
||||||
# Memory usage
|
|
||||||
memory_usage = memory_monitor.get_current_memory_usage() if memory_monitor else 0
|
|
||||||
is_memory_high = memory_monitor.is_memory_usage_high() if memory_monitor else False
|
|
||||||
|
|
||||||
# Thread count
|
|
||||||
process = psutil.Process()
|
|
||||||
thread_count = process.num_threads()
|
|
||||||
|
|
||||||
# Load average (if available)
|
|
||||||
load_avg = None
|
|
||||||
try:
|
|
||||||
load_avg = os.getloadavg()
|
|
||||||
except (AttributeError, OSError):
|
|
||||||
# Not available on all platforms
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Check if performance is within acceptable limits
|
|
||||||
performance_status = 'healthy'
|
|
||||||
warnings = []
|
|
||||||
|
|
||||||
if is_memory_high:
|
|
||||||
performance_status = 'degraded'
|
|
||||||
warnings.append('High memory usage detected')
|
|
||||||
|
|
||||||
if thread_count > 100: # Arbitrary threshold
|
|
||||||
performance_status = 'degraded'
|
|
||||||
warnings.append(f'High thread count: {thread_count}')
|
|
||||||
|
|
||||||
if load_avg and load_avg[0] > 4: # Load average > 4
|
|
||||||
performance_status = 'degraded'
|
|
||||||
warnings.append(f'High system load: {load_avg[0]:.2f}')
|
|
||||||
|
|
||||||
return {
|
|
||||||
'status': performance_status,
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'performance': {
|
|
||||||
'memory_usage_mb': memory_usage,
|
|
||||||
'memory_high': is_memory_high,
|
|
||||||
'thread_count': thread_count,
|
|
||||||
'load_average': load_avg,
|
|
||||||
'warnings': warnings
|
|
||||||
}
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
'status': 'error',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'error': str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify(get_cached_health_data('performance', check_performance, ttl=10))
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/detailed')
|
|
||||||
def detailed_health():
|
|
||||||
"""Comprehensive health check combining all metrics."""
|
|
||||||
def check_detailed_health():
|
|
||||||
try:
|
|
||||||
# Get all health checks
|
|
||||||
system = get_cached_health_data('system', lambda: system_health().json)
|
|
||||||
database = get_cached_health_data('database', lambda: database_health().json)
|
|
||||||
dependencies = get_cached_health_data('dependencies', lambda: dependencies_health().json)
|
|
||||||
performance = get_cached_health_data('performance', lambda: performance_health().json)
|
|
||||||
|
|
||||||
# Determine overall status
|
|
||||||
statuses = [
|
|
||||||
system.get('status', 'unknown'),
|
|
||||||
database.get('status', 'unknown'),
|
|
||||||
dependencies.get('status', 'unknown'),
|
|
||||||
performance.get('status', 'unknown')
|
|
||||||
]
|
|
||||||
|
|
||||||
if 'unhealthy' in statuses or 'error' in statuses:
|
|
||||||
overall_status = 'unhealthy'
|
|
||||||
elif 'degraded' in statuses:
|
|
||||||
overall_status = 'degraded'
|
|
||||||
else:
|
|
||||||
overall_status = 'healthy'
|
|
||||||
|
|
||||||
return {
|
|
||||||
'status': overall_status,
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'components': {
|
|
||||||
'system': system,
|
|
||||||
'database': database,
|
|
||||||
'dependencies': dependencies,
|
|
||||||
'performance': performance
|
|
||||||
}
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
'status': 'error',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'error': str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Don't cache detailed health - always get fresh data
|
|
||||||
return jsonify(check_detailed_health())
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/ready')
|
|
||||||
def readiness_probe():
|
|
||||||
"""Kubernetes readiness probe endpoint."""
|
|
||||||
try:
|
|
||||||
# Check critical dependencies
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
conn.execute("SELECT 1")
|
|
||||||
|
|
||||||
# Check if anime directory is accessible
|
|
||||||
anime_directory = getattr(config, 'anime_directory', '/app/data')
|
|
||||||
if not os.path.exists(anime_directory):
|
|
||||||
raise Exception(f"Anime directory not accessible: {anime_directory}")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'ready',
|
|
||||||
'timestamp': datetime.utcnow().isoformat()
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'not_ready',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'error': str(e)
|
|
||||||
}), 503
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/live')
|
|
||||||
def liveness_probe():
|
|
||||||
"""Kubernetes liveness probe endpoint."""
|
|
||||||
try:
|
|
||||||
# Basic liveness check - just verify the application is responding
|
|
||||||
return jsonify({
|
|
||||||
'status': 'alive',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'uptime_seconds': time.time() - psutil.Process().create_time()
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'dead',
|
|
||||||
'timestamp': datetime.utcnow().isoformat(),
|
|
||||||
'error': str(e)
|
|
||||||
}), 503
|
|
||||||
|
|
||||||
|
|
||||||
@health_bp.route('/api/health/metrics')
|
|
||||||
def prometheus_metrics():
|
|
||||||
"""Prometheus-compatible metrics endpoint."""
|
|
||||||
try:
|
|
||||||
# Generate Prometheus-format metrics
|
|
||||||
metrics = []
|
|
||||||
|
|
||||||
# System metrics
|
|
||||||
cpu_percent = psutil.cpu_percent()
|
|
||||||
memory = psutil.virtual_memory()
|
|
||||||
disk = psutil.disk_usage('/')
|
|
||||||
|
|
||||||
metrics.extend([
|
|
||||||
f"# HELP aniworld_cpu_usage_percent CPU usage percentage",
|
|
||||||
f"# TYPE aniworld_cpu_usage_percent gauge",
|
|
||||||
f"aniworld_cpu_usage_percent {cpu_percent}",
|
|
||||||
f"",
|
|
||||||
f"# HELP aniworld_memory_usage_percent Memory usage percentage",
|
|
||||||
f"# TYPE aniworld_memory_usage_percent gauge",
|
|
||||||
f"aniworld_memory_usage_percent {memory.percent}",
|
|
||||||
f"",
|
|
||||||
f"# HELP aniworld_disk_usage_percent Disk usage percentage",
|
|
||||||
f"# TYPE aniworld_disk_usage_percent gauge",
|
|
||||||
f"aniworld_disk_usage_percent {(disk.used / disk.total) * 100}",
|
|
||||||
f"",
|
|
||||||
])
|
|
||||||
|
|
||||||
# Database metrics
|
|
||||||
try:
|
|
||||||
with database_manager.get_connection() as conn:
|
|
||||||
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
|
|
||||||
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
|
|
||||||
|
|
||||||
metrics.extend([
|
|
||||||
f"# HELP aniworld_anime_total Total number of anime in database",
|
|
||||||
f"# TYPE aniworld_anime_total counter",
|
|
||||||
f"aniworld_anime_total {anime_count}",
|
|
||||||
f"",
|
|
||||||
f"# HELP aniworld_episodes_total Total number of episodes in database",
|
|
||||||
f"# TYPE aniworld_episodes_total counter",
|
|
||||||
f"aniworld_episodes_total {episode_count}",
|
|
||||||
f"",
|
|
||||||
])
|
|
||||||
except Exception:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Process metrics
|
|
||||||
process = psutil.Process()
|
|
||||||
metrics.extend([
|
|
||||||
f"# HELP aniworld_process_threads Number of threads in process",
|
|
||||||
f"# TYPE aniworld_process_threads gauge",
|
|
||||||
f"aniworld_process_threads {process.num_threads()}",
|
|
||||||
f"",
|
|
||||||
f"# HELP aniworld_process_memory_bytes Memory usage in bytes",
|
|
||||||
f"# TYPE aniworld_process_memory_bytes gauge",
|
|
||||||
f"aniworld_process_memory_bytes {process.memory_info().rss}",
|
|
||||||
f"",
|
|
||||||
])
|
|
||||||
|
|
||||||
return "\n".join(metrics), 200, {'Content-Type': 'text/plain; charset=utf-8'}
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
return f"# Error generating metrics: {e}", 500, {'Content-Type': 'text/plain'}
|
|
||||||
|
|
||||||
|
|
||||||
# Export the blueprint
|
|
||||||
__all__ = ['health_bp']
|
|
||||||
@ -1,701 +0,0 @@
|
|||||||
"""
|
|
||||||
Integrations API endpoints.
|
|
||||||
|
|
||||||
This module handles all external integration operations including:
|
|
||||||
- API key management
|
|
||||||
- Webhook configuration
|
|
||||||
- External service integrations
|
|
||||||
- Third-party API management
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, jsonify
|
|
||||||
from typing import Dict, List, Any, Optional, Tuple
|
|
||||||
import logging
|
|
||||||
import requests
|
|
||||||
import json
|
|
||||||
import hmac
|
|
||||||
import hashlib
|
|
||||||
import time
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
# Import shared utilities
|
|
||||||
try:
|
|
||||||
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
|
||||||
from src.server.web.controllers.shared.validators import (
|
|
||||||
validate_json_input, validate_query_params, validate_pagination_params,
|
|
||||||
validate_id_parameter, is_valid_url
|
|
||||||
)
|
|
||||||
from src.server.web.controllers.shared.response_helpers import (
|
|
||||||
create_success_response, create_error_response, create_paginated_response
|
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
# Fallback imports for development
|
|
||||||
def require_auth(f): return f
|
|
||||||
def optional_auth(f): return f
|
|
||||||
def handle_api_errors(f): return f
|
|
||||||
def validate_json_input(**kwargs): return lambda f: f
|
|
||||||
def validate_query_params(**kwargs): return lambda f: f
|
|
||||||
def validate_pagination_params(f): return f
|
|
||||||
def validate_id_parameter(param): return lambda f: f
|
|
||||||
def is_valid_url(url): return url.startswith(('http://', 'https://'))
|
|
||||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
|
||||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
|
||||||
def create_paginated_response(items, page, per_page, total, endpoint=None): return jsonify({'data': items, 'pagination': {'page': page, 'per_page': per_page, 'total': total}}), 200
|
|
||||||
|
|
||||||
# Import integration components
|
|
||||||
try:
|
|
||||||
from src.server.data.integration_manager import IntegrationManager
|
|
||||||
from src.server.data.webhook_manager import WebhookManager
|
|
||||||
from src.data.api_key_manager import APIKeyManager
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development
|
|
||||||
class IntegrationManager:
|
|
||||||
def get_all_integrations(self, **kwargs): return []
|
|
||||||
def get_integrations_count(self, **kwargs): return 0
|
|
||||||
def get_integration_by_id(self, id): return None
|
|
||||||
def create_integration(self, **kwargs): return 1
|
|
||||||
def update_integration(self, id, **kwargs): return True
|
|
||||||
def delete_integration(self, id): return True
|
|
||||||
def test_integration(self, id): return {'success': True, 'response_time': 0.1}
|
|
||||||
def get_integration_logs(self, id, **kwargs): return []
|
|
||||||
def trigger_integration(self, id, data): return {'success': True}
|
|
||||||
|
|
||||||
class WebhookManager:
|
|
||||||
def get_all_webhooks(self, **kwargs): return []
|
|
||||||
def get_webhooks_count(self, **kwargs): return 0
|
|
||||||
def get_webhook_by_id(self, id): return None
|
|
||||||
def create_webhook(self, **kwargs): return 1
|
|
||||||
def update_webhook(self, id, **kwargs): return True
|
|
||||||
def delete_webhook(self, id): return True
|
|
||||||
def test_webhook(self, id): return {'success': True, 'response_time': 0.1}
|
|
||||||
def get_webhook_deliveries(self, id, **kwargs): return []
|
|
||||||
def redeliver_webhook(self, delivery_id): return True
|
|
||||||
def trigger_webhook(self, event, data): return True
|
|
||||||
|
|
||||||
class APIKeyManager:
|
|
||||||
def get_external_api_keys(self, **kwargs): return []
|
|
||||||
def get_external_api_key_by_id(self, id): return None
|
|
||||||
def create_external_api_key(self, **kwargs): return 1
|
|
||||||
def update_external_api_key(self, id, **kwargs): return True
|
|
||||||
def delete_external_api_key(self, id): return True
|
|
||||||
def test_external_api_key(self, id): return {'success': True}
|
|
||||||
def rotate_external_api_key(self, id): return {'new_key': 'new_api_key'}
|
|
||||||
|
|
||||||
# Create blueprint
|
|
||||||
integrations_bp = Blueprint('integrations', __name__)
|
|
||||||
|
|
||||||
# Initialize managers
|
|
||||||
integration_manager = IntegrationManager()
|
|
||||||
webhook_manager = WebhookManager()
|
|
||||||
api_key_manager = APIKeyManager()
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['page', 'per_page', 'type', 'status', 'sort_by', 'sort_order'],
|
|
||||||
param_types={'page': int, 'per_page': int}
|
|
||||||
)
|
|
||||||
@validate_pagination_params
|
|
||||||
def list_integrations() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
List integrations with pagination and filtering.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 20, max: 100)
|
|
||||||
- type: Filter by integration type
|
|
||||||
- status: Filter by integration status
|
|
||||||
- sort_by: Sort field (default: created_at)
|
|
||||||
- sort_order: Sort order (asc/desc, default: desc)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with paginated integration list
|
|
||||||
"""
|
|
||||||
page = request.args.get('page', 1, type=int)
|
|
||||||
per_page = min(request.args.get('per_page', 20, type=int), 100)
|
|
||||||
integration_type = request.args.get('type')
|
|
||||||
status = request.args.get('status')
|
|
||||||
sort_by = request.args.get('sort_by', 'created_at')
|
|
||||||
sort_order = request.args.get('sort_order', 'desc')
|
|
||||||
|
|
||||||
offset = (page - 1) * per_page
|
|
||||||
|
|
||||||
# Get integrations
|
|
||||||
integrations = integration_manager.get_all_integrations(
|
|
||||||
offset=offset,
|
|
||||||
limit=per_page,
|
|
||||||
integration_type=integration_type,
|
|
||||||
status=status,
|
|
||||||
sort_by=sort_by,
|
|
||||||
sort_order=sort_order
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get total count
|
|
||||||
total = integration_manager.get_integrations_count(
|
|
||||||
integration_type=integration_type,
|
|
||||||
status=status
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
integrations,
|
|
||||||
page,
|
|
||||||
per_page,
|
|
||||||
total,
|
|
||||||
endpoint='/api/v1/integrations'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations/<int:integration_id>', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('integration_id')
|
|
||||||
def get_integration(integration_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get specific integration by ID.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_id: Integration ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with integration data
|
|
||||||
"""
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
if not integration:
|
|
||||||
return create_error_response("Integration not found", 404)
|
|
||||||
|
|
||||||
return create_success_response("Integration retrieved successfully", 200, integration)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['name', 'type', 'config'],
|
|
||||||
optional_fields=['description', 'enabled'],
|
|
||||||
field_types={'name': str, 'type': str, 'config': dict, 'description': str, 'enabled': bool}
|
|
||||||
)
|
|
||||||
def create_integration() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Create a new integration.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- name: Integration name (required)
|
|
||||||
- type: Integration type (required)
|
|
||||||
- config: Integration configuration (required)
|
|
||||||
- description: Integration description (optional)
|
|
||||||
- enabled: Whether integration is enabled (optional, default: true)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with created integration data
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate integration type
|
|
||||||
allowed_types = ['webhook', 'api', 'discord', 'slack', 'email', 'custom']
|
|
||||||
if data['type'] not in allowed_types:
|
|
||||||
return create_error_response(f"Invalid integration type. Must be one of: {', '.join(allowed_types)}", 400)
|
|
||||||
|
|
||||||
# Validate configuration based on type
|
|
||||||
config_errors = _validate_integration_config(data['type'], data['config'])
|
|
||||||
if config_errors:
|
|
||||||
return create_error_response("Configuration validation failed", 400, config_errors)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create integration
|
|
||||||
integration_id = integration_manager.create_integration(
|
|
||||||
name=data['name'],
|
|
||||||
integration_type=data['type'],
|
|
||||||
config=data['config'],
|
|
||||||
description=data.get('description', ''),
|
|
||||||
enabled=data.get('enabled', True)
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get created integration
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
logger.info(f"Created integration {integration_id}: {data['name']} ({data['type']})")
|
|
||||||
return create_success_response("Integration created successfully", 201, integration)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating integration: {str(e)}")
|
|
||||||
return create_error_response("Failed to create integration", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations/<int:integration_id>', methods=['PUT'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('integration_id')
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['name', 'config', 'description', 'enabled'],
|
|
||||||
field_types={'name': str, 'config': dict, 'description': str, 'enabled': bool}
|
|
||||||
)
|
|
||||||
def update_integration(integration_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Update an integration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_id: Integration ID
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- name: Integration name (optional)
|
|
||||||
- config: Integration configuration (optional)
|
|
||||||
- description: Integration description (optional)
|
|
||||||
- enabled: Whether integration is enabled (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with update result
|
|
||||||
"""
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
if not integration:
|
|
||||||
return create_error_response("Integration not found", 404)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate configuration if provided
|
|
||||||
if 'config' in data:
|
|
||||||
config_errors = _validate_integration_config(integration['type'], data['config'])
|
|
||||||
if config_errors:
|
|
||||||
return create_error_response("Configuration validation failed", 400, config_errors)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Update integration
|
|
||||||
success = integration_manager.update_integration(integration_id, **data)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
# Get updated integration
|
|
||||||
updated_integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
logger.info(f"Updated integration {integration_id}")
|
|
||||||
return create_success_response("Integration updated successfully", 200, updated_integration)
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to update integration", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating integration {integration_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to update integration", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations/<int:integration_id>', methods=['DELETE'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('integration_id')
|
|
||||||
def delete_integration(integration_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Delete an integration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_id: Integration ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with deletion result
|
|
||||||
"""
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
if not integration:
|
|
||||||
return create_error_response("Integration not found", 404)
|
|
||||||
|
|
||||||
try:
|
|
||||||
success = integration_manager.delete_integration(integration_id)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"Deleted integration {integration_id}: {integration['name']}")
|
|
||||||
return create_success_response("Integration deleted successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Failed to delete integration", 500)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error deleting integration {integration_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to delete integration", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations/<int:integration_id>/test', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('integration_id')
|
|
||||||
def test_integration(integration_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Test an integration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_id: Integration ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with test result
|
|
||||||
"""
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
if not integration:
|
|
||||||
return create_error_response("Integration not found", 404)
|
|
||||||
|
|
||||||
try:
|
|
||||||
test_result = integration_manager.test_integration(integration_id)
|
|
||||||
|
|
||||||
logger.info(f"Tested integration {integration_id}: {test_result}")
|
|
||||||
return create_success_response("Integration test completed", 200, test_result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error testing integration {integration_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to test integration", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations/<int:integration_id>/trigger', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('integration_id')
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['data'],
|
|
||||||
field_types={'data': dict}
|
|
||||||
)
|
|
||||||
def trigger_integration(integration_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Manually trigger an integration.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_id: Integration ID
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- data: Custom data to send with trigger (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with trigger result
|
|
||||||
"""
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
if not integration:
|
|
||||||
return create_error_response("Integration not found", 404)
|
|
||||||
|
|
||||||
if not integration['enabled']:
|
|
||||||
return create_error_response("Integration is disabled", 400)
|
|
||||||
|
|
||||||
data = request.get_json() or {}
|
|
||||||
trigger_data = data.get('data', {})
|
|
||||||
|
|
||||||
try:
|
|
||||||
result = integration_manager.trigger_integration(integration_id, trigger_data)
|
|
||||||
|
|
||||||
logger.info(f"Triggered integration {integration_id}")
|
|
||||||
return create_success_response("Integration triggered successfully", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error triggering integration {integration_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to trigger integration", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/integrations/<int:integration_id>/logs', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('integration_id')
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['page', 'per_page', 'level'],
|
|
||||||
param_types={'page': int, 'per_page': int}
|
|
||||||
)
|
|
||||||
@validate_pagination_params
|
|
||||||
def get_integration_logs(integration_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get integration execution logs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_id: Integration ID
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 200)
|
|
||||||
- level: Log level filter (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with integration logs
|
|
||||||
"""
|
|
||||||
integration = integration_manager.get_integration_by_id(integration_id)
|
|
||||||
|
|
||||||
if not integration:
|
|
||||||
return create_error_response("Integration not found", 404)
|
|
||||||
|
|
||||||
page = request.args.get('page', 1, type=int)
|
|
||||||
per_page = min(request.args.get('per_page', 50, type=int), 200)
|
|
||||||
level = request.args.get('level')
|
|
||||||
|
|
||||||
offset = (page - 1) * per_page
|
|
||||||
|
|
||||||
try:
|
|
||||||
logs = integration_manager.get_integration_logs(
|
|
||||||
integration_id,
|
|
||||||
offset=offset,
|
|
||||||
limit=per_page,
|
|
||||||
level=level
|
|
||||||
)
|
|
||||||
|
|
||||||
# For pagination, we'd need a count method
|
|
||||||
total = len(logs) # Simplified for this example
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
logs,
|
|
||||||
page,
|
|
||||||
per_page,
|
|
||||||
total,
|
|
||||||
endpoint=f'/api/v1/integrations/{integration_id}/logs'
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting integration logs for {integration_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to get integration logs", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/webhooks', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['page', 'per_page', 'event', 'status'],
|
|
||||||
param_types={'page': int, 'per_page': int}
|
|
||||||
)
|
|
||||||
@validate_pagination_params
|
|
||||||
def list_webhooks() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
List webhooks with pagination and filtering.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 20, max: 100)
|
|
||||||
- event: Filter by event type
|
|
||||||
- status: Filter by webhook status
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with paginated webhook list
|
|
||||||
"""
|
|
||||||
page = request.args.get('page', 1, type=int)
|
|
||||||
per_page = min(request.args.get('per_page', 20, type=int), 100)
|
|
||||||
event = request.args.get('event')
|
|
||||||
status = request.args.get('status')
|
|
||||||
|
|
||||||
offset = (page - 1) * per_page
|
|
||||||
|
|
||||||
# Get webhooks
|
|
||||||
webhooks = webhook_manager.get_all_webhooks(
|
|
||||||
offset=offset,
|
|
||||||
limit=per_page,
|
|
||||||
event=event,
|
|
||||||
status=status
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get total count
|
|
||||||
total = webhook_manager.get_webhooks_count(
|
|
||||||
event=event,
|
|
||||||
status=status
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
webhooks,
|
|
||||||
page,
|
|
||||||
per_page,
|
|
||||||
total,
|
|
||||||
endpoint='/api/v1/webhooks'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/webhooks', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['url', 'events'],
|
|
||||||
optional_fields=['name', 'secret', 'enabled', 'retry_config'],
|
|
||||||
field_types={'url': str, 'events': list, 'name': str, 'secret': str, 'enabled': bool, 'retry_config': dict}
|
|
||||||
)
|
|
||||||
def create_webhook() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Create a new webhook.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- url: Webhook URL (required)
|
|
||||||
- events: List of events to subscribe to (required)
|
|
||||||
- name: Webhook name (optional)
|
|
||||||
- secret: Webhook secret for signature verification (optional)
|
|
||||||
- enabled: Whether webhook is enabled (optional, default: true)
|
|
||||||
- retry_config: Retry configuration (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with created webhook data
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate URL
|
|
||||||
if not is_valid_url(data['url']):
|
|
||||||
return create_error_response("Invalid webhook URL", 400)
|
|
||||||
|
|
||||||
# Validate events
|
|
||||||
allowed_events = [
|
|
||||||
'anime.created', 'anime.updated', 'anime.deleted',
|
|
||||||
'episode.created', 'episode.updated', 'episode.deleted',
|
|
||||||
'download.started', 'download.completed', 'download.failed',
|
|
||||||
'backup.created', 'backup.restored', 'system.error'
|
|
||||||
]
|
|
||||||
|
|
||||||
invalid_events = [event for event in data['events'] if event not in allowed_events]
|
|
||||||
if invalid_events:
|
|
||||||
return create_error_response(f"Invalid events: {', '.join(invalid_events)}", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create webhook
|
|
||||||
webhook_id = webhook_manager.create_webhook(
|
|
||||||
url=data['url'],
|
|
||||||
events=data['events'],
|
|
||||||
name=data.get('name', ''),
|
|
||||||
secret=data.get('secret', ''),
|
|
||||||
enabled=data.get('enabled', True),
|
|
||||||
retry_config=data.get('retry_config', {})
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get created webhook
|
|
||||||
webhook = webhook_manager.get_webhook_by_id(webhook_id)
|
|
||||||
|
|
||||||
logger.info(f"Created webhook {webhook_id}: {data['url']}")
|
|
||||||
return create_success_response("Webhook created successfully", 201, webhook)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating webhook: {str(e)}")
|
|
||||||
return create_error_response("Failed to create webhook", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/webhooks/<int:webhook_id>/test', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('webhook_id')
|
|
||||||
def test_webhook(webhook_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Test a webhook.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
webhook_id: Webhook ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with test result
|
|
||||||
"""
|
|
||||||
webhook = webhook_manager.get_webhook_by_id(webhook_id)
|
|
||||||
|
|
||||||
if not webhook:
|
|
||||||
return create_error_response("Webhook not found", 404)
|
|
||||||
|
|
||||||
try:
|
|
||||||
test_result = webhook_manager.test_webhook(webhook_id)
|
|
||||||
|
|
||||||
logger.info(f"Tested webhook {webhook_id}: {test_result}")
|
|
||||||
return create_success_response("Webhook test completed", 200, test_result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error testing webhook {webhook_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to test webhook", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/api-keys/external', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
def list_external_api_keys() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
List external API keys.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with external API keys
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
api_keys = api_key_manager.get_external_api_keys()
|
|
||||||
|
|
||||||
return create_success_response("External API keys retrieved successfully", 200, api_keys)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting external API keys: {str(e)}")
|
|
||||||
return create_error_response("Failed to get external API keys", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@integrations_bp.route('/api-keys/external', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['service', 'key'],
|
|
||||||
optional_fields=['name', 'description'],
|
|
||||||
field_types={'service': str, 'key': str, 'name': str, 'description': str}
|
|
||||||
)
|
|
||||||
def create_external_api_key() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Store external API key.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- service: Service name (required)
|
|
||||||
- key: API key value (required)
|
|
||||||
- name: Key name (optional)
|
|
||||||
- description: Key description (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with created API key data
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Create external API key
|
|
||||||
key_id = api_key_manager.create_external_api_key(
|
|
||||||
service=data['service'],
|
|
||||||
key=data['key'],
|
|
||||||
name=data.get('name', ''),
|
|
||||||
description=data.get('description', '')
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get created key (without exposing the actual key)
|
|
||||||
api_key = api_key_manager.get_external_api_key_by_id(key_id)
|
|
||||||
|
|
||||||
logger.info(f"Created external API key {key_id} for service: {data['service']}")
|
|
||||||
return create_success_response("External API key created successfully", 201, api_key)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating external API key: {str(e)}")
|
|
||||||
return create_error_response("Failed to create external API key", 500)
|
|
||||||
|
|
||||||
|
|
||||||
def _validate_integration_config(integration_type: str, config: Dict[str, Any]) -> List[str]:
|
|
||||||
"""
|
|
||||||
Validate integration configuration based on type.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
integration_type: Type of integration
|
|
||||||
config: Configuration dictionary
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of validation errors (empty if valid)
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
if integration_type == 'webhook':
|
|
||||||
if 'url' not in config:
|
|
||||||
errors.append("Webhook URL is required")
|
|
||||||
elif not is_valid_url(config['url']):
|
|
||||||
errors.append("Invalid webhook URL")
|
|
||||||
|
|
||||||
elif integration_type == 'discord':
|
|
||||||
if 'webhook_url' not in config:
|
|
||||||
errors.append("Discord webhook URL is required")
|
|
||||||
elif not config['webhook_url'].startswith('https://discord.com/api/webhooks/'):
|
|
||||||
errors.append("Invalid Discord webhook URL")
|
|
||||||
|
|
||||||
elif integration_type == 'slack':
|
|
||||||
if 'webhook_url' not in config:
|
|
||||||
errors.append("Slack webhook URL is required")
|
|
||||||
elif not config['webhook_url'].startswith('https://hooks.slack.com/'):
|
|
||||||
errors.append("Invalid Slack webhook URL")
|
|
||||||
|
|
||||||
elif integration_type == 'email':
|
|
||||||
required_fields = ['smtp_host', 'smtp_port', 'from_email']
|
|
||||||
for field in required_fields:
|
|
||||||
if field not in config:
|
|
||||||
errors.append(f"{field} is required for email integration")
|
|
||||||
|
|
||||||
elif integration_type == 'api':
|
|
||||||
if 'base_url' not in config:
|
|
||||||
errors.append("Base URL is required for API integration")
|
|
||||||
elif not is_valid_url(config['base_url']):
|
|
||||||
errors.append("Invalid API base URL")
|
|
||||||
|
|
||||||
return errors
|
|
||||||
@ -1,268 +0,0 @@
|
|||||||
"""
|
|
||||||
API endpoints for logging configuration and management.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, jsonify, request, send_file
|
|
||||||
from web.controllers.auth_controller import require_auth
|
|
||||||
from config import config
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
logging_bp = Blueprint('logging', __name__, url_prefix='/api/logging')
|
|
||||||
|
|
||||||
@logging_bp.route('/config', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_logging_config():
|
|
||||||
"""Get current logging configuration."""
|
|
||||||
try:
|
|
||||||
# Import here to avoid circular imports
|
|
||||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
|
||||||
|
|
||||||
config_data = {
|
|
||||||
'log_level': config.log_level,
|
|
||||||
'enable_console_logging': config.enable_console_logging,
|
|
||||||
'enable_console_progress': config.enable_console_progress,
|
|
||||||
'enable_fail2ban_logging': config.enable_fail2ban_logging,
|
|
||||||
'log_files': [
|
|
||||||
'./logs/aniworld.log',
|
|
||||||
'./logs/auth_failures.log',
|
|
||||||
'./logs/downloads.log'
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'config': config_data
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting logging config: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@logging_bp.route('/config', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def update_logging_config():
|
|
||||||
"""Update logging configuration."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
|
|
||||||
# Update log level
|
|
||||||
log_level = data.get('log_level', config.log_level)
|
|
||||||
if log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
|
||||||
config.log_level = log_level
|
|
||||||
|
|
||||||
# Update console logging settings
|
|
||||||
if 'enable_console_logging' in data:
|
|
||||||
config.enable_console_logging = bool(data['enable_console_logging'])
|
|
||||||
|
|
||||||
if 'enable_console_progress' in data:
|
|
||||||
config.enable_console_progress = bool(data['enable_console_progress'])
|
|
||||||
|
|
||||||
if 'enable_fail2ban_logging' in data:
|
|
||||||
config.enable_fail2ban_logging = bool(data['enable_fail2ban_logging'])
|
|
||||||
|
|
||||||
# Save configuration
|
|
||||||
config.save_config()
|
|
||||||
|
|
||||||
# Update runtime logging level
|
|
||||||
try:
|
|
||||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
|
||||||
# Use standard logging level update
|
|
||||||
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
|
|
||||||
logging.getLogger().setLevel(numeric_level)
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for basic logging
|
|
||||||
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
|
|
||||||
logging.getLogger().setLevel(numeric_level)
|
|
||||||
|
|
||||||
logger.info(f"Logging configuration updated: level={config.log_level}, console={config.enable_console_logging}")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Logging configuration updated successfully',
|
|
||||||
'config': {
|
|
||||||
'log_level': config.log_level,
|
|
||||||
'enable_console_logging': config.enable_console_logging,
|
|
||||||
'enable_console_progress': config.enable_console_progress,
|
|
||||||
'enable_fail2ban_logging': config.enable_fail2ban_logging
|
|
||||||
}
|
|
||||||
})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating logging config: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@logging_bp.route('/files', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def list_log_files():
|
|
||||||
"""Get list of available log files."""
|
|
||||||
try:
|
|
||||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
|
||||||
# Return basic log files
|
|
||||||
log_files = [
|
|
||||||
'./logs/aniworld.log',
|
|
||||||
'./logs/auth_failures.log',
|
|
||||||
'./logs/downloads.log'
|
|
||||||
]
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'files': log_files
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error listing log files: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@logging_bp.route('/files/<filename>/download', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def download_log_file(filename):
|
|
||||||
"""Download a specific log file."""
|
|
||||||
try:
|
|
||||||
# Security: Only allow log files
|
|
||||||
if not filename.endswith('.log'):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid file type'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
log_directory = "logs"
|
|
||||||
file_path = os.path.join(log_directory, filename)
|
|
||||||
|
|
||||||
# Security: Check if file exists and is within log directory
|
|
||||||
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'File not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
return send_file(
|
|
||||||
file_path,
|
|
||||||
as_attachment=True,
|
|
||||||
download_name=f"{filename}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
|
||||||
)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error downloading log file {filename}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@logging_bp.route('/files/<filename>/tail', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def tail_log_file(filename):
|
|
||||||
"""Get the last N lines from a log file."""
|
|
||||||
try:
|
|
||||||
# Security: Only allow log files
|
|
||||||
if not filename.endswith('.log'):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid file type'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
lines = int(request.args.get('lines', 100))
|
|
||||||
lines = min(lines, 1000) # Limit to 1000 lines max
|
|
||||||
|
|
||||||
log_directory = "logs"
|
|
||||||
file_path = os.path.join(log_directory, filename)
|
|
||||||
|
|
||||||
# Security: Check if file exists and is within log directory
|
|
||||||
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'File not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
# Read last N lines
|
|
||||||
with open(file_path, 'r', encoding='utf-8') as f:
|
|
||||||
all_lines = f.readlines()
|
|
||||||
tail_lines = all_lines[-lines:] if len(all_lines) > lines else all_lines
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'lines': [line.rstrip('\n\r') for line in tail_lines],
|
|
||||||
'total_lines': len(all_lines),
|
|
||||||
'showing_lines': len(tail_lines)
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error tailing log file {filename}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@logging_bp.route('/cleanup', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def cleanup_logs():
|
|
||||||
"""Clean up old log files."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
days = int(data.get('days', 30))
|
|
||||||
days = max(1, min(days, 365)) # Limit between 1-365 days
|
|
||||||
|
|
||||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
|
||||||
# Since we don't have log_config.cleanup_old_logs(), simulate the cleanup
|
|
||||||
cleaned_files = [] # Would implement actual cleanup logic here
|
|
||||||
|
|
||||||
logger.info(f"Cleaned up {len(cleaned_files)} old log files (older than {days} days)")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': f'Cleaned up {len(cleaned_files)} log files',
|
|
||||||
'cleaned_files': cleaned_files
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error cleaning up logs: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@logging_bp.route('/test', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def test_logging():
|
|
||||||
"""Test logging at different levels."""
|
|
||||||
try:
|
|
||||||
test_message = "Test log message from web interface"
|
|
||||||
|
|
||||||
# Test different log levels
|
|
||||||
logger.debug(f"DEBUG: {test_message}")
|
|
||||||
logger.info(f"INFO: {test_message}")
|
|
||||||
logger.warning(f"WARNING: {test_message}")
|
|
||||||
logger.error(f"ERROR: {test_message}")
|
|
||||||
|
|
||||||
# Test fail2ban logging
|
|
||||||
try:
|
|
||||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
|
||||||
# log_auth_failure would be implemented here
|
|
||||||
pass
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
# Test download progress logging
|
|
||||||
try:
|
|
||||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
|
||||||
# log_download_progress would be implemented here
|
|
||||||
pass
|
|
||||||
except ImportError:
|
|
||||||
pass
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Test messages logged successfully'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error testing logging: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
@ -1,656 +0,0 @@
|
|||||||
"""
|
|
||||||
Maintenance API endpoints.
|
|
||||||
|
|
||||||
This module handles all system maintenance operations including:
|
|
||||||
- Database maintenance
|
|
||||||
- System optimization
|
|
||||||
- Cleanup operations
|
|
||||||
- Scheduled maintenance tasks
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, jsonify
|
|
||||||
from typing import Dict, List, Any, Optional, Tuple
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
import sqlite3
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
# Import shared utilities
|
|
||||||
try:
|
|
||||||
from src.server.web.controllers.shared.auth_decorators import require_auth
|
|
||||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
|
||||||
from src.server.web.controllers.shared.validators import validate_json_input, validate_query_params
|
|
||||||
from src.server.web.controllers.shared.response_helpers import (
|
|
||||||
create_success_response, create_error_response, format_file_size, format_datetime
|
|
||||||
)
|
|
||||||
except ImportError:
|
|
||||||
# Fallback imports for development
|
|
||||||
def require_auth(f): return f
|
|
||||||
def handle_api_errors(f): return f
|
|
||||||
def validate_json_input(**kwargs): return lambda f: f
|
|
||||||
def validate_query_params(**kwargs): return lambda f: f
|
|
||||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
|
||||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
|
||||||
def format_file_size(size): return f"{size} bytes"
|
|
||||||
def format_datetime(dt): return str(dt) if dt else None
|
|
||||||
|
|
||||||
# Import maintenance components
|
|
||||||
try:
|
|
||||||
from src.server.data.database_manager import DatabaseManager
|
|
||||||
from src.server.data.cleanup_manager import CleanupManager
|
|
||||||
from src.server.data.scheduler_manager import SchedulerManager
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development
|
|
||||||
class DatabaseManager:
|
|
||||||
def vacuum_database(self): return {'size_before': 1000000, 'size_after': 800000, 'time_taken': 5.2}
|
|
||||||
def analyze_database(self): return {'tables_analyzed': 10, 'time_taken': 2.1}
|
|
||||||
def integrity_check(self): return {'status': 'ok', 'errors': [], 'warnings': []}
|
|
||||||
def reindex_database(self): return {'indexes_rebuilt': 15, 'time_taken': 3.5}
|
|
||||||
def get_database_stats(self): return {'size': 10000000, 'tables': 10, 'indexes': 15}
|
|
||||||
def optimize_database(self): return {'optimizations': ['vacuum', 'analyze', 'reindex'], 'time_taken': 10.7}
|
|
||||||
def backup_database(self, path): return {'backup_file': path, 'size': 5000000}
|
|
||||||
def get_slow_queries(self, **kwargs): return []
|
|
||||||
|
|
||||||
class CleanupManager:
|
|
||||||
def cleanup_temp_files(self): return {'files_deleted': 50, 'space_freed': 1048576}
|
|
||||||
def cleanup_logs(self, **kwargs): return {'logs_deleted': 100, 'space_freed': 2097152}
|
|
||||||
def cleanup_downloads(self, **kwargs): return {'downloads_cleaned': 25, 'space_freed': 5242880}
|
|
||||||
def cleanup_cache(self): return {'cache_cleared': True, 'space_freed': 10485760}
|
|
||||||
def cleanup_old_backups(self, **kwargs): return {'backups_deleted': 5, 'space_freed': 52428800}
|
|
||||||
def get_cleanup_stats(self): return {'temp_files': 100, 'log_files': 200, 'cache_size': 50000000}
|
|
||||||
|
|
||||||
class SchedulerManager:
|
|
||||||
def get_scheduled_tasks(self): return []
|
|
||||||
def create_scheduled_task(self, **kwargs): return 1
|
|
||||||
def update_scheduled_task(self, id, **kwargs): return True
|
|
||||||
def delete_scheduled_task(self, id): return True
|
|
||||||
def get_task_history(self, **kwargs): return []
|
|
||||||
|
|
||||||
# Create blueprint
|
|
||||||
maintenance_bp = Blueprint('maintenance', __name__)
|
|
||||||
|
|
||||||
# Initialize managers
|
|
||||||
database_manager = DatabaseManager()
|
|
||||||
cleanup_manager = CleanupManager()
|
|
||||||
scheduler_manager = SchedulerManager()
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/database/vacuum', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def vacuum_database() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Vacuum the database to reclaim space and optimize performance.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with vacuum operation results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info("Starting database vacuum operation")
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
result = database_manager.vacuum_database()
|
|
||||||
|
|
||||||
operation_time = time.time() - start_time
|
|
||||||
result['operation_time'] = round(operation_time, 2)
|
|
||||||
|
|
||||||
space_saved = result.get('size_before', 0) - result.get('size_after', 0)
|
|
||||||
result['space_saved'] = format_file_size(space_saved)
|
|
||||||
|
|
||||||
logger.info(f"Database vacuum completed in {operation_time:.2f} seconds, saved {space_saved} bytes")
|
|
||||||
return create_success_response("Database vacuum completed successfully", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during database vacuum: {str(e)}")
|
|
||||||
return create_error_response("Database vacuum failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/database/analyze', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def analyze_database() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Analyze the database to update query planner statistics.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with analyze operation results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info("Starting database analyze operation")
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
result = database_manager.analyze_database()
|
|
||||||
|
|
||||||
operation_time = time.time() - start_time
|
|
||||||
result['operation_time'] = round(operation_time, 2)
|
|
||||||
|
|
||||||
logger.info(f"Database analyze completed in {operation_time:.2f} seconds")
|
|
||||||
return create_success_response("Database analyze completed successfully", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during database analyze: {str(e)}")
|
|
||||||
return create_error_response("Database analyze failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/database/integrity-check', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def integrity_check() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Perform database integrity check.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with integrity check results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info("Starting database integrity check")
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
result = database_manager.integrity_check()
|
|
||||||
|
|
||||||
operation_time = time.time() - start_time
|
|
||||||
result['operation_time'] = round(operation_time, 2)
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
if result['status'] == 'ok':
|
|
||||||
logger.info(f"Database integrity check passed in {operation_time:.2f} seconds")
|
|
||||||
return create_success_response("Database integrity check passed", 200, result)
|
|
||||||
else:
|
|
||||||
logger.warning(f"Database integrity check found issues: {result['errors']}")
|
|
||||||
return create_success_response("Database integrity check completed with issues", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during database integrity check: {str(e)}")
|
|
||||||
return create_error_response("Database integrity check failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/database/reindex', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def reindex_database() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Rebuild database indexes for optimal performance.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with reindex operation results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info("Starting database reindex operation")
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
result = database_manager.reindex_database()
|
|
||||||
|
|
||||||
operation_time = time.time() - start_time
|
|
||||||
result['operation_time'] = round(operation_time, 2)
|
|
||||||
|
|
||||||
logger.info(f"Database reindex completed in {operation_time:.2f} seconds, rebuilt {result.get('indexes_rebuilt', 0)} indexes")
|
|
||||||
return create_success_response("Database reindex completed successfully", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during database reindex: {str(e)}")
|
|
||||||
return create_error_response("Database reindex failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/database/optimize', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['operations', 'force'],
|
|
||||||
field_types={'operations': list, 'force': bool}
|
|
||||||
)
|
|
||||||
def optimize_database() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Perform comprehensive database optimization.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- operations: List of operations to perform (optional, default: all)
|
|
||||||
- force: Force optimization even if recently performed (optional, default: false)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with optimization results
|
|
||||||
"""
|
|
||||||
data = request.get_json() or {}
|
|
||||||
operations = data.get('operations', ['vacuum', 'analyze', 'reindex'])
|
|
||||||
force = data.get('force', False)
|
|
||||||
|
|
||||||
# Validate operations
|
|
||||||
allowed_operations = ['vacuum', 'analyze', 'reindex', 'integrity_check']
|
|
||||||
invalid_operations = [op for op in operations if op not in allowed_operations]
|
|
||||||
if invalid_operations:
|
|
||||||
return create_error_response(f"Invalid operations: {', '.join(invalid_operations)}", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(f"Starting database optimization with operations: {operations}")
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
result = database_manager.optimize_database(
|
|
||||||
operations=operations,
|
|
||||||
force=force
|
|
||||||
)
|
|
||||||
|
|
||||||
operation_time = time.time() - start_time
|
|
||||||
result['operation_time'] = round(operation_time, 2)
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
logger.info(f"Database optimization completed in {operation_time:.2f} seconds")
|
|
||||||
return create_success_response("Database optimization completed successfully", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during database optimization: {str(e)}")
|
|
||||||
return create_error_response("Database optimization failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/database/stats', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_database_stats() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get database statistics and health information.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with database statistics
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
stats = database_manager.get_database_stats()
|
|
||||||
|
|
||||||
# Add formatted values
|
|
||||||
if 'size' in stats:
|
|
||||||
stats['size_formatted'] = format_file_size(stats['size'])
|
|
||||||
|
|
||||||
# Add slow queries
|
|
||||||
slow_queries = database_manager.get_slow_queries(limit=10)
|
|
||||||
stats['slow_queries'] = slow_queries
|
|
||||||
|
|
||||||
return create_success_response("Database statistics retrieved successfully", 200, stats)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting database stats: {str(e)}")
|
|
||||||
return create_error_response("Failed to get database statistics", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/cleanup/temp-files', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def cleanup_temp_files() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Clean up temporary files.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with cleanup results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info("Starting temporary files cleanup")
|
|
||||||
|
|
||||||
result = cleanup_manager.cleanup_temp_files()
|
|
||||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
logger.info(f"Temporary files cleanup completed: {result['files_deleted']} files deleted, {result['space_freed']} bytes freed")
|
|
||||||
return create_success_response("Temporary files cleanup completed", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during temp files cleanup: {str(e)}")
|
|
||||||
return create_error_response("Temporary files cleanup failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/cleanup/logs', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['older_than_days', 'keep_recent'],
|
|
||||||
field_types={'older_than_days': int, 'keep_recent': int}
|
|
||||||
)
|
|
||||||
def cleanup_logs() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Clean up old log files.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- older_than_days: Delete logs older than this many days (optional, default: 30)
|
|
||||||
- keep_recent: Number of recent log files to keep (optional, default: 10)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with cleanup results
|
|
||||||
"""
|
|
||||||
data = request.get_json() or {}
|
|
||||||
older_than_days = data.get('older_than_days', 30)
|
|
||||||
keep_recent = data.get('keep_recent', 10)
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(f"Starting log cleanup: older than {older_than_days} days, keep {keep_recent} recent")
|
|
||||||
|
|
||||||
result = cleanup_manager.cleanup_logs(
|
|
||||||
older_than_days=older_than_days,
|
|
||||||
keep_recent=keep_recent
|
|
||||||
)
|
|
||||||
|
|
||||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
logger.info(f"Log cleanup completed: {result['logs_deleted']} logs deleted, {result['space_freed']} bytes freed")
|
|
||||||
return create_success_response("Log cleanup completed", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during log cleanup: {str(e)}")
|
|
||||||
return create_error_response("Log cleanup failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/cleanup/downloads', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['remove_failed', 'remove_incomplete', 'older_than_days'],
|
|
||||||
field_types={'remove_failed': bool, 'remove_incomplete': bool, 'older_than_days': int}
|
|
||||||
)
|
|
||||||
def cleanup_downloads() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Clean up download files and records.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- remove_failed: Remove failed downloads (optional, default: true)
|
|
||||||
- remove_incomplete: Remove incomplete downloads (optional, default: false)
|
|
||||||
- older_than_days: Remove downloads older than this many days (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with cleanup results
|
|
||||||
"""
|
|
||||||
data = request.get_json() or {}
|
|
||||||
remove_failed = data.get('remove_failed', True)
|
|
||||||
remove_incomplete = data.get('remove_incomplete', False)
|
|
||||||
older_than_days = data.get('older_than_days')
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(f"Starting download cleanup: failed={remove_failed}, incomplete={remove_incomplete}, older_than={older_than_days}")
|
|
||||||
|
|
||||||
result = cleanup_manager.cleanup_downloads(
|
|
||||||
remove_failed=remove_failed,
|
|
||||||
remove_incomplete=remove_incomplete,
|
|
||||||
older_than_days=older_than_days
|
|
||||||
)
|
|
||||||
|
|
||||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
logger.info(f"Download cleanup completed: {result['downloads_cleaned']} downloads cleaned, {result['space_freed']} bytes freed")
|
|
||||||
return create_success_response("Download cleanup completed", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during download cleanup: {str(e)}")
|
|
||||||
return create_error_response("Download cleanup failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/cleanup/cache', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def cleanup_cache() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Clear application cache.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with cleanup results
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info("Starting cache cleanup")
|
|
||||||
|
|
||||||
result = cleanup_manager.cleanup_cache()
|
|
||||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
logger.info(f"Cache cleanup completed: {result['space_freed']} bytes freed")
|
|
||||||
return create_success_response("Cache cleanup completed", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during cache cleanup: {str(e)}")
|
|
||||||
return create_error_response("Cache cleanup failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/cleanup/backups', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['keep_count', 'older_than_days'],
|
|
||||||
field_types={'keep_count': int, 'older_than_days': int}
|
|
||||||
)
|
|
||||||
def cleanup_old_backups() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Clean up old backup files.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- keep_count: Number of recent backups to keep (optional, default: 10)
|
|
||||||
- older_than_days: Delete backups older than this many days (optional, default: 90)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with cleanup results
|
|
||||||
"""
|
|
||||||
data = request.get_json() or {}
|
|
||||||
keep_count = data.get('keep_count', 10)
|
|
||||||
older_than_days = data.get('older_than_days', 90)
|
|
||||||
|
|
||||||
try:
|
|
||||||
logger.info(f"Starting backup cleanup: keep {keep_count} backups, older than {older_than_days} days")
|
|
||||||
|
|
||||||
result = cleanup_manager.cleanup_old_backups(
|
|
||||||
keep_count=keep_count,
|
|
||||||
older_than_days=older_than_days
|
|
||||||
)
|
|
||||||
|
|
||||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
|
||||||
result['timestamp'] = datetime.now().isoformat()
|
|
||||||
|
|
||||||
logger.info(f"Backup cleanup completed: {result['backups_deleted']} backups deleted, {result['space_freed']} bytes freed")
|
|
||||||
return create_success_response("Backup cleanup completed", 200, result)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error during backup cleanup: {str(e)}")
|
|
||||||
return create_error_response("Backup cleanup failed", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/cleanup/stats', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_cleanup_stats() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get cleanup statistics and recommendations.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with cleanup statistics
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
stats = cleanup_manager.get_cleanup_stats()
|
|
||||||
|
|
||||||
# Add formatted sizes
|
|
||||||
for key in ['temp_files_size', 'log_files_size', 'cache_size', 'old_backups_size']:
|
|
||||||
if key in stats:
|
|
||||||
stats[f"{key}_formatted"] = format_file_size(stats[key])
|
|
||||||
|
|
||||||
# Add recommendations
|
|
||||||
recommendations = []
|
|
||||||
if stats.get('temp_files', 0) > 100:
|
|
||||||
recommendations.append("Consider cleaning temporary files")
|
|
||||||
if stats.get('log_files_size', 0) > 100 * 1024 * 1024: # 100MB
|
|
||||||
recommendations.append("Consider cleaning old log files")
|
|
||||||
if stats.get('cache_size', 0) > 500 * 1024 * 1024: # 500MB
|
|
||||||
recommendations.append("Consider clearing cache")
|
|
||||||
|
|
||||||
stats['recommendations'] = recommendations
|
|
||||||
|
|
||||||
return create_success_response("Cleanup statistics retrieved successfully", 200, stats)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting cleanup stats: {str(e)}")
|
|
||||||
return create_error_response("Failed to get cleanup statistics", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def get_scheduled_tasks() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get scheduled maintenance tasks.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with scheduled tasks
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
tasks = scheduler_manager.get_scheduled_tasks()
|
|
||||||
|
|
||||||
return create_success_response("Scheduled tasks retrieved successfully", 200, tasks)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting scheduled tasks: {str(e)}")
|
|
||||||
return create_error_response("Failed to get scheduled tasks", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['name', 'task_type', 'schedule'],
|
|
||||||
optional_fields=['config', 'enabled'],
|
|
||||||
field_types={'name': str, 'task_type': str, 'schedule': str, 'config': dict, 'enabled': bool}
|
|
||||||
)
|
|
||||||
def create_scheduled_task() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Create a new scheduled maintenance task.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- name: Task name (required)
|
|
||||||
- task_type: Type of task (required)
|
|
||||||
- schedule: Cron-style schedule (required)
|
|
||||||
- config: Task configuration (optional)
|
|
||||||
- enabled: Whether task is enabled (optional, default: true)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with created task
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Validate task type
|
|
||||||
allowed_task_types = [
|
|
||||||
'database_vacuum', 'database_analyze', 'cleanup_temp_files',
|
|
||||||
'cleanup_logs', 'cleanup_downloads', 'cleanup_cache', 'backup_database'
|
|
||||||
]
|
|
||||||
|
|
||||||
if data['task_type'] not in allowed_task_types:
|
|
||||||
return create_error_response(f"Invalid task type. Must be one of: {', '.join(allowed_task_types)}", 400)
|
|
||||||
|
|
||||||
try:
|
|
||||||
task_id = scheduler_manager.create_scheduled_task(
|
|
||||||
name=data['name'],
|
|
||||||
task_type=data['task_type'],
|
|
||||||
schedule=data['schedule'],
|
|
||||||
config=data.get('config', {}),
|
|
||||||
enabled=data.get('enabled', True)
|
|
||||||
)
|
|
||||||
|
|
||||||
logger.info(f"Created scheduled task {task_id}: {data['name']} ({data['task_type']})")
|
|
||||||
return create_success_response("Scheduled task created successfully", 201, {'id': task_id})
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error creating scheduled task: {str(e)}")
|
|
||||||
return create_error_response("Failed to create scheduled task", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['PUT'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['name', 'schedule', 'config', 'enabled'],
|
|
||||||
field_types={'name': str, 'schedule': str, 'config': dict, 'enabled': bool}
|
|
||||||
)
|
|
||||||
def update_scheduled_task(task_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Update a scheduled maintenance task.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
task_id: Task ID
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
- name: Task name (optional)
|
|
||||||
- schedule: Cron-style schedule (optional)
|
|
||||||
- config: Task configuration (optional)
|
|
||||||
- enabled: Whether task is enabled (optional)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with update result
|
|
||||||
"""
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
try:
|
|
||||||
success = scheduler_manager.update_scheduled_task(task_id, **data)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"Updated scheduled task {task_id}")
|
|
||||||
return create_success_response("Scheduled task updated successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Scheduled task not found", 404)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating scheduled task {task_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to update scheduled task", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['DELETE'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
def delete_scheduled_task(task_id: int) -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Delete a scheduled maintenance task.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
task_id: Task ID
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with deletion result
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
success = scheduler_manager.delete_scheduled_task(task_id)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
logger.info(f"Deleted scheduled task {task_id}")
|
|
||||||
return create_success_response("Scheduled task deleted successfully")
|
|
||||||
else:
|
|
||||||
return create_error_response("Scheduled task not found", 404)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error deleting scheduled task {task_id}: {str(e)}")
|
|
||||||
return create_error_response("Failed to delete scheduled task", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@maintenance_bp.route('/maintenance/history', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_query_params(
|
|
||||||
allowed_params=['task_type', 'days', 'limit'],
|
|
||||||
param_types={'days': int, 'limit': int}
|
|
||||||
)
|
|
||||||
def get_maintenance_history() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Get maintenance task execution history.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- task_type: Filter by task type (optional)
|
|
||||||
- days: Number of days of history (optional, default: 30)
|
|
||||||
- limit: Maximum number of records (optional, default: 100)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
JSON response with maintenance history
|
|
||||||
"""
|
|
||||||
task_type = request.args.get('task_type')
|
|
||||||
days = request.args.get('days', 30, type=int)
|
|
||||||
limit = request.args.get('limit', 100, type=int)
|
|
||||||
|
|
||||||
try:
|
|
||||||
history = scheduler_manager.get_task_history(
|
|
||||||
task_type=task_type,
|
|
||||||
days=days,
|
|
||||||
limit=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_success_response("Maintenance history retrieved successfully", 200, history)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting maintenance history: {str(e)}")
|
|
||||||
return create_error_response("Failed to get maintenance history", 500)
|
|
||||||
@ -1,406 +0,0 @@
|
|||||||
"""
|
|
||||||
Performance Optimization API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for performance monitoring
|
|
||||||
and optimization features.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request, jsonify
|
|
||||||
from auth import require_auth, optional_auth
|
|
||||||
from error_handler import handle_api_errors, RetryableError
|
|
||||||
from performance_optimizer import (
|
|
||||||
speed_limiter, download_cache, memory_monitor,
|
|
||||||
download_manager, resume_manager, DownloadTask
|
|
||||||
)
|
|
||||||
import uuid
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for performance optimization endpoints
|
|
||||||
performance_bp = Blueprint('performance', __name__)
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/speed-limit', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_speed_limit():
|
|
||||||
"""Get current download speed limit."""
|
|
||||||
try:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'speed_limit_mbps': speed_limiter.max_speed_mbps,
|
|
||||||
'current_speed_mbps': speed_limiter.get_current_speed()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get speed limit: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/speed-limit', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def set_speed_limit():
|
|
||||||
"""Set download speed limit."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
speed_mbps = data.get('speed_mbps', 0)
|
|
||||||
|
|
||||||
if speed_mbps < 0:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Speed limit must be non-negative (0 = unlimited)'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
speed_limiter.set_speed_limit(speed_mbps)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': f'Speed limit set to {speed_mbps} MB/s' if speed_mbps > 0 else 'Speed limit removed',
|
|
||||||
'data': {
|
|
||||||
'speed_limit_mbps': speed_mbps
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to set speed limit: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/cache/stats')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_cache_stats():
|
|
||||||
"""Get cache statistics."""
|
|
||||||
try:
|
|
||||||
stats = download_cache.get_stats()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': stats
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get cache stats: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/cache/clear', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def clear_cache():
|
|
||||||
"""Clear download cache."""
|
|
||||||
try:
|
|
||||||
download_cache.clear()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Cache cleared successfully'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to clear cache: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/memory/stats')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_memory_stats():
|
|
||||||
"""Get memory usage statistics."""
|
|
||||||
try:
|
|
||||||
stats = memory_monitor.get_memory_stats()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': stats
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get memory stats: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/memory/gc', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def force_garbage_collection():
|
|
||||||
"""Force garbage collection to free memory."""
|
|
||||||
try:
|
|
||||||
memory_monitor.force_garbage_collection()
|
|
||||||
stats = memory_monitor.get_memory_stats()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Garbage collection completed',
|
|
||||||
'data': stats
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to force garbage collection: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/downloads/workers', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_worker_count():
|
|
||||||
"""Get current number of download workers."""
|
|
||||||
try:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'max_workers': download_manager.max_workers,
|
|
||||||
'active_tasks': len(download_manager.active_tasks)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get worker count: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/downloads/workers', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def set_worker_count():
|
|
||||||
"""Set number of download workers."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
max_workers = data.get('max_workers', 3)
|
|
||||||
|
|
||||||
if not isinstance(max_workers, int) or max_workers < 1 or max_workers > 10:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Worker count must be between 1 and 10'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
download_manager.set_max_workers(max_workers)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': f'Worker count set to {max_workers}',
|
|
||||||
'data': {
|
|
||||||
'max_workers': max_workers
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to set worker count: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/downloads/stats')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_download_stats():
|
|
||||||
"""Get download manager statistics."""
|
|
||||||
try:
|
|
||||||
stats = download_manager.get_statistics()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': stats
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get download stats: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/downloads/tasks')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_all_download_tasks():
|
|
||||||
"""Get all download tasks."""
|
|
||||||
try:
|
|
||||||
tasks = download_manager.get_all_tasks()
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': tasks
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get download tasks: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/downloads/tasks/<task_id>')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_download_task(task_id):
|
|
||||||
"""Get specific download task status."""
|
|
||||||
try:
|
|
||||||
task_status = download_manager.get_task_status(task_id)
|
|
||||||
|
|
||||||
if not task_status:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Task not found'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': task_status
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get task status: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/downloads/add-task', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def add_download_task():
|
|
||||||
"""Add a new download task to the queue."""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
required_fields = ['serie_name', 'season', 'episode', 'key', 'output_path', 'temp_path']
|
|
||||||
for field in required_fields:
|
|
||||||
if field not in data:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': f'Missing required field: {field}'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Create download task
|
|
||||||
task = DownloadTask(
|
|
||||||
task_id=str(uuid.uuid4()),
|
|
||||||
serie_name=data['serie_name'],
|
|
||||||
season=int(data['season']),
|
|
||||||
episode=int(data['episode']),
|
|
||||||
key=data['key'],
|
|
||||||
language=data.get('language', 'German Dub'),
|
|
||||||
output_path=data['output_path'],
|
|
||||||
temp_path=data['temp_path'],
|
|
||||||
priority=data.get('priority', 0)
|
|
||||||
)
|
|
||||||
|
|
||||||
task_id = download_manager.add_task(task)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'Download task added successfully',
|
|
||||||
'data': {
|
|
||||||
'task_id': task_id
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to add download task: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/resume/tasks')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_resumable_tasks():
|
|
||||||
"""Get list of tasks that can be resumed."""
|
|
||||||
try:
|
|
||||||
resumable_tasks = resume_manager.get_resumable_tasks()
|
|
||||||
|
|
||||||
# Get detailed info for each resumable task
|
|
||||||
tasks_info = []
|
|
||||||
for task_id in resumable_tasks:
|
|
||||||
resume_info = resume_manager.load_resume_info(task_id)
|
|
||||||
if resume_info:
|
|
||||||
tasks_info.append({
|
|
||||||
'task_id': task_id,
|
|
||||||
'resume_info': resume_info
|
|
||||||
})
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': {
|
|
||||||
'resumable_tasks': tasks_info,
|
|
||||||
'count': len(tasks_info)
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get resumable tasks: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/resume/clear/<task_id>', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def clear_resume_info(task_id):
|
|
||||||
"""Clear resume information for a specific task."""
|
|
||||||
try:
|
|
||||||
resume_manager.clear_resume_info(task_id)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': f'Resume information cleared for task: {task_id}'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to clear resume info: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/system/optimize', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@require_auth
|
|
||||||
def optimize_system():
|
|
||||||
"""Perform system optimization tasks."""
|
|
||||||
try:
|
|
||||||
optimization_results = {}
|
|
||||||
|
|
||||||
# Force garbage collection
|
|
||||||
memory_monitor.force_garbage_collection()
|
|
||||||
memory_stats = memory_monitor.get_memory_stats()
|
|
||||||
optimization_results['memory_gc'] = {
|
|
||||||
'completed': True,
|
|
||||||
'memory_mb': memory_stats.get('rss_mb', 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Clean up cache expired entries
|
|
||||||
download_cache._cleanup_expired()
|
|
||||||
cache_stats = download_cache.get_stats()
|
|
||||||
optimization_results['cache_cleanup'] = {
|
|
||||||
'completed': True,
|
|
||||||
'entries': cache_stats.get('entry_count', 0),
|
|
||||||
'size_mb': cache_stats.get('total_size_mb', 0)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Clean up old resume files (older than 7 days)
|
|
||||||
import os
|
|
||||||
import time
|
|
||||||
resume_dir = resume_manager.resume_dir
|
|
||||||
cleaned_files = 0
|
|
||||||
|
|
||||||
try:
|
|
||||||
for filename in os.listdir(resume_dir):
|
|
||||||
file_path = os.path.join(resume_dir, filename)
|
|
||||||
if os.path.isfile(file_path):
|
|
||||||
file_age = time.time() - os.path.getmtime(file_path)
|
|
||||||
if file_age > 7 * 24 * 3600: # 7 days in seconds
|
|
||||||
os.remove(file_path)
|
|
||||||
cleaned_files += 1
|
|
||||||
except Exception as e:
|
|
||||||
pass # Ignore errors in cleanup
|
|
||||||
|
|
||||||
optimization_results['resume_cleanup'] = {
|
|
||||||
'completed': True,
|
|
||||||
'files_removed': cleaned_files
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'message': 'System optimization completed',
|
|
||||||
'data': optimization_results
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"System optimization failed: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
@performance_bp.route('/api/performance/config')
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_performance_config():
|
|
||||||
"""Get current performance configuration."""
|
|
||||||
try:
|
|
||||||
config = {
|
|
||||||
'speed_limit': {
|
|
||||||
'current_mbps': speed_limiter.max_speed_mbps,
|
|
||||||
'unlimited': speed_limiter.max_speed_mbps == 0
|
|
||||||
},
|
|
||||||
'downloads': {
|
|
||||||
'max_workers': download_manager.max_workers,
|
|
||||||
'active_tasks': len(download_manager.active_tasks)
|
|
||||||
},
|
|
||||||
'cache': {
|
|
||||||
'max_size_mb': download_cache.max_size_bytes / (1024 * 1024),
|
|
||||||
**download_cache.get_stats()
|
|
||||||
},
|
|
||||||
'memory': {
|
|
||||||
'warning_threshold_mb': memory_monitor.warning_threshold / (1024 * 1024),
|
|
||||||
'critical_threshold_mb': memory_monitor.critical_threshold / (1024 * 1024),
|
|
||||||
**memory_monitor.get_memory_stats()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': config
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
raise RetryableError(f"Failed to get performance config: {e}")
|
|
||||||
|
|
||||||
|
|
||||||
# Export the blueprint
|
|
||||||
__all__ = ['performance_bp']
|
|
||||||
@ -1,280 +0,0 @@
|
|||||||
from flask import Blueprint, jsonify, request
|
|
||||||
from web.controllers.auth_controller import require_auth
|
|
||||||
from shared.utils.process_utils import (
|
|
||||||
process_lock_manager,
|
|
||||||
RESCAN_LOCK,
|
|
||||||
DOWNLOAD_LOCK,
|
|
||||||
SEARCH_LOCK,
|
|
||||||
check_process_locks,
|
|
||||||
get_process_status,
|
|
||||||
update_process_progress,
|
|
||||||
is_process_running,
|
|
||||||
episode_deduplicator,
|
|
||||||
ProcessLockError
|
|
||||||
)
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
process_bp = Blueprint('process', __name__, url_prefix='/api/process')
|
|
||||||
|
|
||||||
@process_bp.route('/locks/status', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_all_locks_status():
|
|
||||||
"""Get status of all process locks."""
|
|
||||||
try:
|
|
||||||
# Clean up expired locks first
|
|
||||||
cleaned = check_process_locks()
|
|
||||||
if cleaned > 0:
|
|
||||||
logger.info(f"Cleaned up {cleaned} expired locks")
|
|
||||||
|
|
||||||
status = process_lock_manager.get_all_locks_status()
|
|
||||||
|
|
||||||
# Add queue deduplication info
|
|
||||||
status['queue_info'] = {
|
|
||||||
'active_episodes': episode_deduplicator.get_count(),
|
|
||||||
'episodes': episode_deduplicator.get_active_episodes()
|
|
||||||
}
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'locks': status
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting locks status: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/locks/<lock_name>/status', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_lock_status(lock_name):
|
|
||||||
"""Get status of a specific process lock."""
|
|
||||||
try:
|
|
||||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid lock name'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
status = get_process_status(lock_name)
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'status': status
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting lock status for {lock_name}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/locks/<lock_name>/acquire', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def acquire_lock(lock_name):
|
|
||||||
"""Manually acquire a process lock."""
|
|
||||||
try:
|
|
||||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid lock name'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
data = request.get_json() or {}
|
|
||||||
locked_by = data.get('locked_by', 'manual')
|
|
||||||
timeout_minutes = data.get('timeout_minutes', 60)
|
|
||||||
|
|
||||||
success = process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': f'Lock {lock_name} acquired successfully'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': f'Lock {lock_name} is already held'
|
|
||||||
}), 409
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error acquiring lock {lock_name}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/locks/<lock_name>/release', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def release_lock(lock_name):
|
|
||||||
"""Manually release a process lock."""
|
|
||||||
try:
|
|
||||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid lock name'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
success = process_lock_manager.release_lock(lock_name)
|
|
||||||
|
|
||||||
if success:
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': f'Lock {lock_name} released successfully'
|
|
||||||
})
|
|
||||||
else:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': f'Lock {lock_name} was not held'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error releasing lock {lock_name}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/locks/cleanup', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def cleanup_expired_locks():
|
|
||||||
"""Manually clean up expired locks."""
|
|
||||||
try:
|
|
||||||
cleaned = check_process_locks()
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'cleaned_count': cleaned,
|
|
||||||
'message': f'Cleaned up {cleaned} expired locks'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error cleaning up locks: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/locks/force-release-all', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def force_release_all_locks():
|
|
||||||
"""Force release all process locks (emergency use)."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
confirm = data.get('confirm', False)
|
|
||||||
|
|
||||||
if not confirm:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Confirmation required for force release'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
released = process_lock_manager.force_release_all()
|
|
||||||
|
|
||||||
# Also clear queue deduplication
|
|
||||||
episode_deduplicator.clear_all()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'released_count': released,
|
|
||||||
'message': f'Force released {released} locks and cleared queue deduplication'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error force releasing locks: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/locks/<lock_name>/progress', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def update_lock_progress(lock_name):
|
|
||||||
"""Update progress for a running process."""
|
|
||||||
try:
|
|
||||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid lock name'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
if not is_process_running(lock_name):
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': f'Process {lock_name} is not running'
|
|
||||||
}), 404
|
|
||||||
|
|
||||||
data = request.get_json() or {}
|
|
||||||
progress_data = data.get('progress', {})
|
|
||||||
|
|
||||||
update_process_progress(lock_name, progress_data)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Progress updated successfully'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating progress for {lock_name}: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/queue/deduplication', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_queue_deduplication():
|
|
||||||
"""Get current queue deduplication status."""
|
|
||||||
try:
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'deduplication': {
|
|
||||||
'active_count': episode_deduplicator.get_count(),
|
|
||||||
'active_episodes': episode_deduplicator.get_active_episodes()
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting queue deduplication: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/queue/deduplication/clear', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def clear_queue_deduplication():
|
|
||||||
"""Clear all queue deduplication entries."""
|
|
||||||
try:
|
|
||||||
episode_deduplicator.clear_all()
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Queue deduplication cleared successfully'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error clearing queue deduplication: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@process_bp.route('/is-running/<process_name>', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def check_if_process_running(process_name):
|
|
||||||
"""Quick check if a specific process is running."""
|
|
||||||
try:
|
|
||||||
if process_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid process name'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
is_running = is_process_running(process_name)
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'is_running': is_running,
|
|
||||||
'process_name': process_name
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error checking if process {process_name} is running: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
@ -1,187 +0,0 @@
|
|||||||
from flask import Blueprint, jsonify, request
|
|
||||||
from web.controllers.auth_controller import require_auth
|
|
||||||
from application.services.scheduler_service import get_scheduler
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
scheduler_bp = Blueprint('scheduler', __name__, url_prefix='/api/scheduler')
|
|
||||||
|
|
||||||
@scheduler_bp.route('/config', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_scheduler_config():
|
|
||||||
"""Get current scheduler configuration."""
|
|
||||||
try:
|
|
||||||
scheduler = get_scheduler()
|
|
||||||
if not scheduler:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Scheduler not initialized'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
config = scheduler.get_scheduled_rescan_config()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'config': config
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting scheduler config: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@scheduler_bp.route('/config', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def update_scheduler_config():
|
|
||||||
"""Update scheduler configuration."""
|
|
||||||
try:
|
|
||||||
data = request.get_json() or {}
|
|
||||||
|
|
||||||
enabled = data.get('enabled', False)
|
|
||||||
time_str = data.get('time', '03:00')
|
|
||||||
auto_download = data.get('auto_download_after_rescan', False)
|
|
||||||
|
|
||||||
# Validate inputs
|
|
||||||
if enabled and not time_str:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Time is required when scheduling is enabled'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
scheduler = get_scheduler()
|
|
||||||
if not scheduler:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Scheduler not initialized'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
# Update configuration
|
|
||||||
scheduler.update_scheduled_rescan_config(enabled, time_str, auto_download)
|
|
||||||
|
|
||||||
# Get updated config
|
|
||||||
updated_config = scheduler.get_scheduled_rescan_config()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Scheduler configuration updated successfully',
|
|
||||||
'config': updated_config
|
|
||||||
})
|
|
||||||
|
|
||||||
except ValueError as e:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 400
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error updating scheduler config: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@scheduler_bp.route('/status', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def get_scheduler_status():
|
|
||||||
"""Get current scheduler status and next jobs."""
|
|
||||||
try:
|
|
||||||
scheduler = get_scheduler()
|
|
||||||
if not scheduler:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Scheduler not initialized'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
config = scheduler.get_scheduled_rescan_config()
|
|
||||||
jobs = scheduler.get_next_scheduled_jobs()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'status': {
|
|
||||||
'running': config['is_running'],
|
|
||||||
'config': config,
|
|
||||||
'scheduled_jobs': jobs
|
|
||||||
}
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error getting scheduler status: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@scheduler_bp.route('/start', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def start_scheduler():
|
|
||||||
"""Start the scheduler."""
|
|
||||||
try:
|
|
||||||
scheduler = get_scheduler()
|
|
||||||
if not scheduler:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Scheduler not initialized'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
scheduler.start_scheduler()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Scheduler started successfully'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error starting scheduler: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@scheduler_bp.route('/stop', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def stop_scheduler():
|
|
||||||
"""Stop the scheduler."""
|
|
||||||
try:
|
|
||||||
scheduler = get_scheduler()
|
|
||||||
if not scheduler:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Scheduler not initialized'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
scheduler.stop_scheduler()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Scheduler stopped successfully'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error stopping scheduler: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
@scheduler_bp.route('/trigger-rescan', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def trigger_manual_rescan():
|
|
||||||
"""Manually trigger a scheduled rescan for testing."""
|
|
||||||
try:
|
|
||||||
scheduler = get_scheduler()
|
|
||||||
if not scheduler:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Scheduler not initialized'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
scheduler.trigger_manual_scheduled_rescan()
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Manual scheduled rescan triggered'
|
|
||||||
})
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Error triggering manual rescan: {e}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': str(e)
|
|
||||||
}), 500
|
|
||||||
@ -1,637 +0,0 @@
|
|||||||
"""
|
|
||||||
Search API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for advanced search functionality
|
|
||||||
across anime, episodes, and other content.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request
|
|
||||||
from typing import Dict, List, Any, Optional
|
|
||||||
import re
|
|
||||||
|
|
||||||
from ...shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from ...shared.error_handlers import handle_api_errors, APIException, ValidationError
|
|
||||||
from ...shared.validators import validate_pagination_params
|
|
||||||
from ...shared.response_helpers import (
|
|
||||||
create_success_response, create_paginated_response, format_anime_response,
|
|
||||||
format_episode_response, extract_pagination_params
|
|
||||||
)
|
|
||||||
|
|
||||||
# Import search components (these imports would need to be adjusted based on actual structure)
|
|
||||||
try:
|
|
||||||
from search_manager import search_engine, SearchResult
|
|
||||||
from database_manager import anime_repository, episode_repository
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development/testing
|
|
||||||
search_engine = None
|
|
||||||
SearchResult = None
|
|
||||||
anime_repository = None
|
|
||||||
episode_repository = None
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for search endpoints
|
|
||||||
search_bp = Blueprint('search', __name__, url_prefix='/api/v1/search')
|
|
||||||
|
|
||||||
|
|
||||||
@search_bp.route('', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def global_search() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Perform a global search across all content types.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- q: Search query (required)
|
|
||||||
- types: Comma-separated list of content types (anime,episodes,all)
|
|
||||||
- categories: Comma-separated list of categories to search
|
|
||||||
- min_score: Minimum relevance score (0.0-1.0)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated search results grouped by content type
|
|
||||||
"""
|
|
||||||
if not search_engine:
|
|
||||||
raise APIException("Search engine not available", 503)
|
|
||||||
|
|
||||||
search_query = request.args.get('q', '').strip()
|
|
||||||
if not search_query:
|
|
||||||
raise ValidationError("Search query 'q' is required")
|
|
||||||
|
|
||||||
if len(search_query) < 2:
|
|
||||||
raise ValidationError("Search query must be at least 2 characters long")
|
|
||||||
|
|
||||||
# Parse search types
|
|
||||||
search_types = request.args.get('types', 'all').split(',')
|
|
||||||
valid_types = ['anime', 'episodes', 'all']
|
|
||||||
search_types = [t.strip() for t in search_types if t.strip() in valid_types]
|
|
||||||
|
|
||||||
if not search_types or 'all' in search_types:
|
|
||||||
search_types = ['anime', 'episodes']
|
|
||||||
|
|
||||||
# Parse categories
|
|
||||||
categories = request.args.get('categories', '').split(',')
|
|
||||||
categories = [c.strip() for c in categories if c.strip()]
|
|
||||||
|
|
||||||
# Parse minimum score
|
|
||||||
min_score = request.args.get('min_score', '0.0')
|
|
||||||
try:
|
|
||||||
min_score = float(min_score)
|
|
||||||
if not 0.0 <= min_score <= 1.0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("min_score must be a number between 0.0 and 1.0")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Perform search
|
|
||||||
search_results = search_engine.search_all(
|
|
||||||
query=search_query,
|
|
||||||
content_types=search_types,
|
|
||||||
categories=categories,
|
|
||||||
min_score=min_score
|
|
||||||
)
|
|
||||||
|
|
||||||
# Group results by type
|
|
||||||
grouped_results = {
|
|
||||||
'anime': [],
|
|
||||||
'episodes': [],
|
|
||||||
'total_results': 0
|
|
||||||
}
|
|
||||||
|
|
||||||
for result in search_results:
|
|
||||||
if result.content_type == 'anime':
|
|
||||||
grouped_results['anime'].append({
|
|
||||||
'id': result.content_id,
|
|
||||||
'type': 'anime',
|
|
||||||
'title': result.title,
|
|
||||||
'description': result.description,
|
|
||||||
'score': result.relevance_score,
|
|
||||||
'data': format_anime_response(result.content_data)
|
|
||||||
})
|
|
||||||
elif result.content_type == 'episode':
|
|
||||||
grouped_results['episodes'].append({
|
|
||||||
'id': result.content_id,
|
|
||||||
'type': 'episode',
|
|
||||||
'title': result.title,
|
|
||||||
'description': result.description,
|
|
||||||
'score': result.relevance_score,
|
|
||||||
'data': format_episode_response(result.content_data)
|
|
||||||
})
|
|
||||||
|
|
||||||
grouped_results['total_results'] += 1
|
|
||||||
|
|
||||||
# Apply pagination to combined results
|
|
||||||
all_results = []
|
|
||||||
for result_type in ['anime', 'episodes']:
|
|
||||||
all_results.extend(grouped_results[result_type])
|
|
||||||
|
|
||||||
# Sort by relevance score
|
|
||||||
all_results.sort(key=lambda x: x['score'], reverse=True)
|
|
||||||
|
|
||||||
total = len(all_results)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_results = all_results[start_idx:end_idx]
|
|
||||||
|
|
||||||
response = create_paginated_response(
|
|
||||||
data=paginated_results,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='search.global_search',
|
|
||||||
q=search_query
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add search metadata
|
|
||||||
response['search'] = {
|
|
||||||
'query': search_query,
|
|
||||||
'types': search_types,
|
|
||||||
'categories': categories,
|
|
||||||
'min_score': min_score,
|
|
||||||
'results_by_type': {
|
|
||||||
'anime': len(grouped_results['anime']),
|
|
||||||
'episodes': len(grouped_results['episodes'])
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@search_bp.route('/anime', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def search_anime() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Search anime with advanced filters.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- q: Search query (required)
|
|
||||||
- genres: Comma-separated list of genres
|
|
||||||
- status: Anime status filter
|
|
||||||
- year_from: Starting year filter
|
|
||||||
- year_to: Ending year filter
|
|
||||||
- min_episodes: Minimum episode count
|
|
||||||
- max_episodes: Maximum episode count
|
|
||||||
- sort_by: Sort field (name, year, episodes, relevance)
|
|
||||||
- sort_order: Sort order (asc, desc)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated anime search results
|
|
||||||
"""
|
|
||||||
if not anime_repository:
|
|
||||||
raise APIException("Anime repository not available", 503)
|
|
||||||
|
|
||||||
search_query = request.args.get('q', '').strip()
|
|
||||||
if not search_query:
|
|
||||||
raise ValidationError("Search query 'q' is required")
|
|
||||||
|
|
||||||
# Parse filters
|
|
||||||
genres = request.args.get('genres', '').split(',')
|
|
||||||
genres = [g.strip() for g in genres if g.strip()]
|
|
||||||
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
|
|
||||||
# Parse year filters
|
|
||||||
year_from = request.args.get('year_from')
|
|
||||||
year_to = request.args.get('year_to')
|
|
||||||
|
|
||||||
if year_from:
|
|
||||||
try:
|
|
||||||
year_from = int(year_from)
|
|
||||||
if year_from < 1900 or year_from > 2100:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("year_from must be a valid year between 1900 and 2100")
|
|
||||||
|
|
||||||
if year_to:
|
|
||||||
try:
|
|
||||||
year_to = int(year_to)
|
|
||||||
if year_to < 1900 or year_to > 2100:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("year_to must be a valid year between 1900 and 2100")
|
|
||||||
|
|
||||||
# Parse episode count filters
|
|
||||||
min_episodes = request.args.get('min_episodes')
|
|
||||||
max_episodes = request.args.get('max_episodes')
|
|
||||||
|
|
||||||
if min_episodes:
|
|
||||||
try:
|
|
||||||
min_episodes = int(min_episodes)
|
|
||||||
if min_episodes < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("min_episodes must be a non-negative integer")
|
|
||||||
|
|
||||||
if max_episodes:
|
|
||||||
try:
|
|
||||||
max_episodes = int(max_episodes)
|
|
||||||
if max_episodes < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("max_episodes must be a non-negative integer")
|
|
||||||
|
|
||||||
# Parse sorting
|
|
||||||
sort_by = request.args.get('sort_by', 'relevance')
|
|
||||||
sort_order = request.args.get('sort_order', 'desc')
|
|
||||||
|
|
||||||
valid_sort_fields = ['name', 'year', 'episodes', 'relevance', 'created_at']
|
|
||||||
if sort_by not in valid_sort_fields:
|
|
||||||
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
|
|
||||||
|
|
||||||
if sort_order not in ['asc', 'desc']:
|
|
||||||
raise ValidationError("sort_order must be 'asc' or 'desc'")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Perform advanced search
|
|
||||||
search_results = anime_repository.advanced_search(
|
|
||||||
query=search_query,
|
|
||||||
genres=genres,
|
|
||||||
status=status_filter,
|
|
||||||
year_from=year_from,
|
|
||||||
year_to=year_to,
|
|
||||||
min_episodes=min_episodes,
|
|
||||||
max_episodes=max_episodes,
|
|
||||||
sort_by=sort_by,
|
|
||||||
sort_order=sort_order
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format results
|
|
||||||
formatted_results = []
|
|
||||||
for anime in search_results:
|
|
||||||
anime_data = format_anime_response(anime.__dict__)
|
|
||||||
# Add search relevance score if available
|
|
||||||
if hasattr(anime, 'relevance_score'):
|
|
||||||
anime_data['relevance_score'] = anime.relevance_score
|
|
||||||
formatted_results.append(anime_data)
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_results)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_results = formatted_results[start_idx:end_idx]
|
|
||||||
|
|
||||||
response = create_paginated_response(
|
|
||||||
data=paginated_results,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='search.search_anime',
|
|
||||||
q=search_query
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add search metadata
|
|
||||||
response['search'] = {
|
|
||||||
'query': search_query,
|
|
||||||
'filters': {
|
|
||||||
'genres': genres,
|
|
||||||
'status': status_filter,
|
|
||||||
'year_from': year_from,
|
|
||||||
'year_to': year_to,
|
|
||||||
'min_episodes': min_episodes,
|
|
||||||
'max_episodes': max_episodes
|
|
||||||
},
|
|
||||||
'sorting': {
|
|
||||||
'sort_by': sort_by,
|
|
||||||
'sort_order': sort_order
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@search_bp.route('/episodes', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def search_episodes() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Search episodes with advanced filters.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- q: Search query (required)
|
|
||||||
- anime_id: Filter by anime ID
|
|
||||||
- status: Episode status filter
|
|
||||||
- downloaded: Filter by download status (true/false)
|
|
||||||
- episode_range: Episode range filter (e.g., "1-10", "5+")
|
|
||||||
- duration_min: Minimum duration in minutes
|
|
||||||
- duration_max: Maximum duration in minutes
|
|
||||||
- sort_by: Sort field (episode_number, title, duration, relevance)
|
|
||||||
- sort_order: Sort order (asc, desc)
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated episode search results
|
|
||||||
"""
|
|
||||||
if not episode_repository:
|
|
||||||
raise APIException("Episode repository not available", 503)
|
|
||||||
|
|
||||||
search_query = request.args.get('q', '').strip()
|
|
||||||
if not search_query:
|
|
||||||
raise ValidationError("Search query 'q' is required")
|
|
||||||
|
|
||||||
# Parse filters
|
|
||||||
anime_id = request.args.get('anime_id')
|
|
||||||
if anime_id:
|
|
||||||
try:
|
|
||||||
anime_id = int(anime_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("anime_id must be a valid integer")
|
|
||||||
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
downloaded_filter = request.args.get('downloaded')
|
|
||||||
|
|
||||||
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
|
|
||||||
raise ValidationError("downloaded filter must be 'true' or 'false'")
|
|
||||||
|
|
||||||
# Parse episode range
|
|
||||||
episode_range = request.args.get('episode_range')
|
|
||||||
episode_min = None
|
|
||||||
episode_max = None
|
|
||||||
|
|
||||||
if episode_range:
|
|
||||||
range_pattern = r'^(\d+)(?:-(\d+)|\+)?$'
|
|
||||||
match = re.match(range_pattern, episode_range)
|
|
||||||
if not match:
|
|
||||||
raise ValidationError("episode_range must be in format 'N', 'N-M', or 'N+'")
|
|
||||||
|
|
||||||
episode_min = int(match.group(1))
|
|
||||||
if match.group(2):
|
|
||||||
episode_max = int(match.group(2))
|
|
||||||
elif episode_range.endswith('+'):
|
|
||||||
episode_max = None # No upper limit
|
|
||||||
else:
|
|
||||||
episode_max = episode_min # Single episode
|
|
||||||
|
|
||||||
# Parse duration filters
|
|
||||||
duration_min = request.args.get('duration_min')
|
|
||||||
duration_max = request.args.get('duration_max')
|
|
||||||
|
|
||||||
if duration_min:
|
|
||||||
try:
|
|
||||||
duration_min = int(duration_min)
|
|
||||||
if duration_min < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("duration_min must be a non-negative integer")
|
|
||||||
|
|
||||||
if duration_max:
|
|
||||||
try:
|
|
||||||
duration_max = int(duration_max)
|
|
||||||
if duration_max < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("duration_max must be a non-negative integer")
|
|
||||||
|
|
||||||
# Parse sorting
|
|
||||||
sort_by = request.args.get('sort_by', 'relevance')
|
|
||||||
sort_order = request.args.get('sort_order', 'desc')
|
|
||||||
|
|
||||||
valid_sort_fields = ['episode_number', 'title', 'duration', 'relevance', 'created_at']
|
|
||||||
if sort_by not in valid_sort_fields:
|
|
||||||
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
|
|
||||||
|
|
||||||
if sort_order not in ['asc', 'desc']:
|
|
||||||
raise ValidationError("sort_order must be 'asc' or 'desc'")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
# Perform advanced search
|
|
||||||
search_results = episode_repository.advanced_search(
|
|
||||||
query=search_query,
|
|
||||||
anime_id=anime_id,
|
|
||||||
status=status_filter,
|
|
||||||
downloaded=downloaded_filter.lower() == 'true' if downloaded_filter else None,
|
|
||||||
episode_min=episode_min,
|
|
||||||
episode_max=episode_max,
|
|
||||||
duration_min=duration_min,
|
|
||||||
duration_max=duration_max,
|
|
||||||
sort_by=sort_by,
|
|
||||||
sort_order=sort_order
|
|
||||||
)
|
|
||||||
|
|
||||||
# Format results
|
|
||||||
formatted_results = []
|
|
||||||
for episode in search_results:
|
|
||||||
episode_data = format_episode_response(episode.__dict__)
|
|
||||||
# Add search relevance score if available
|
|
||||||
if hasattr(episode, 'relevance_score'):
|
|
||||||
episode_data['relevance_score'] = episode.relevance_score
|
|
||||||
formatted_results.append(episode_data)
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(formatted_results)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_results = formatted_results[start_idx:end_idx]
|
|
||||||
|
|
||||||
response = create_paginated_response(
|
|
||||||
data=paginated_results,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='search.search_episodes',
|
|
||||||
q=search_query
|
|
||||||
)
|
|
||||||
|
|
||||||
# Add search metadata
|
|
||||||
response['search'] = {
|
|
||||||
'query': search_query,
|
|
||||||
'filters': {
|
|
||||||
'anime_id': anime_id,
|
|
||||||
'status': status_filter,
|
|
||||||
'downloaded': downloaded_filter,
|
|
||||||
'episode_range': episode_range,
|
|
||||||
'duration_min': duration_min,
|
|
||||||
'duration_max': duration_max
|
|
||||||
},
|
|
||||||
'sorting': {
|
|
||||||
'sort_by': sort_by,
|
|
||||||
'sort_order': sort_order
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
@search_bp.route('/suggestions', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_search_suggestions() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get search suggestions based on partial query.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- q: Partial search query (required)
|
|
||||||
- type: Content type (anime, episodes, all)
|
|
||||||
- limit: Maximum suggestions to return (default: 10, max: 50)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of search suggestions
|
|
||||||
"""
|
|
||||||
if not search_engine:
|
|
||||||
raise APIException("Search engine not available", 503)
|
|
||||||
|
|
||||||
query = request.args.get('q', '').strip()
|
|
||||||
if not query:
|
|
||||||
raise ValidationError("Query 'q' is required")
|
|
||||||
|
|
||||||
if len(query) < 1:
|
|
||||||
return create_success_response(data=[])
|
|
||||||
|
|
||||||
content_type = request.args.get('type', 'all')
|
|
||||||
if content_type not in ['anime', 'episodes', 'all']:
|
|
||||||
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
|
|
||||||
|
|
||||||
limit = request.args.get('limit', '10')
|
|
||||||
try:
|
|
||||||
limit = int(limit)
|
|
||||||
if limit < 1 or limit > 50:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("limit must be an integer between 1 and 50")
|
|
||||||
|
|
||||||
# Get suggestions
|
|
||||||
suggestions = search_engine.get_suggestions(
|
|
||||||
query=query,
|
|
||||||
content_type=content_type,
|
|
||||||
limit=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'suggestions': suggestions,
|
|
||||||
'query': query,
|
|
||||||
'count': len(suggestions)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@search_bp.route('/autocomplete', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def autocomplete() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get autocomplete suggestions for search fields.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- field: Field to autocomplete (name, genre, status)
|
|
||||||
- q: Partial value
|
|
||||||
- limit: Maximum suggestions (default: 10, max: 20)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of autocomplete suggestions
|
|
||||||
"""
|
|
||||||
field = request.args.get('field', '').strip()
|
|
||||||
query = request.args.get('q', '').strip()
|
|
||||||
|
|
||||||
if not field:
|
|
||||||
raise ValidationError("Field parameter is required")
|
|
||||||
|
|
||||||
if field not in ['name', 'genre', 'status', 'year']:
|
|
||||||
raise ValidationError("field must be one of: name, genre, status, year")
|
|
||||||
|
|
||||||
limit = request.args.get('limit', '10')
|
|
||||||
try:
|
|
||||||
limit = int(limit)
|
|
||||||
if limit < 1 or limit > 20:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("limit must be an integer between 1 and 20")
|
|
||||||
|
|
||||||
# Get autocomplete suggestions based on field
|
|
||||||
suggestions = []
|
|
||||||
|
|
||||||
if field == 'name':
|
|
||||||
# Get anime/episode name suggestions
|
|
||||||
if anime_repository:
|
|
||||||
anime_names = anime_repository.get_name_suggestions(query, limit)
|
|
||||||
suggestions.extend(anime_names)
|
|
||||||
|
|
||||||
elif field == 'genre':
|
|
||||||
# Get genre suggestions
|
|
||||||
if anime_repository:
|
|
||||||
genres = anime_repository.get_genre_suggestions(query, limit)
|
|
||||||
suggestions.extend(genres)
|
|
||||||
|
|
||||||
elif field == 'status':
|
|
||||||
# Get status suggestions
|
|
||||||
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
|
|
||||||
suggestions = [s for s in valid_statuses if query.lower() in s.lower()][:limit]
|
|
||||||
|
|
||||||
elif field == 'year':
|
|
||||||
# Get year suggestions
|
|
||||||
if anime_repository:
|
|
||||||
years = anime_repository.get_year_suggestions(query, limit)
|
|
||||||
suggestions.extend(years)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'suggestions': suggestions,
|
|
||||||
'field': field,
|
|
||||||
'query': query,
|
|
||||||
'count': len(suggestions)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@search_bp.route('/trending', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_trending_searches() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get trending search queries.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- period: Time period (day, week, month)
|
|
||||||
- type: Content type (anime, episodes, all)
|
|
||||||
- limit: Maximum results (default: 10, max: 50)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of trending search queries
|
|
||||||
"""
|
|
||||||
if not search_engine:
|
|
||||||
raise APIException("Search engine not available", 503)
|
|
||||||
|
|
||||||
period = request.args.get('period', 'week')
|
|
||||||
content_type = request.args.get('type', 'all')
|
|
||||||
|
|
||||||
if period not in ['day', 'week', 'month']:
|
|
||||||
raise ValidationError("period must be 'day', 'week', or 'month'")
|
|
||||||
|
|
||||||
if content_type not in ['anime', 'episodes', 'all']:
|
|
||||||
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
|
|
||||||
|
|
||||||
limit = request.args.get('limit', '10')
|
|
||||||
try:
|
|
||||||
limit = int(limit)
|
|
||||||
if limit < 1 or limit > 50:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("limit must be an integer between 1 and 50")
|
|
||||||
|
|
||||||
# Get trending searches
|
|
||||||
trending = search_engine.get_trending_searches(
|
|
||||||
period=period,
|
|
||||||
content_type=content_type,
|
|
||||||
limit=limit
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'trending': trending,
|
|
||||||
'period': period,
|
|
||||||
'type': content_type,
|
|
||||||
'count': len(trending)
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@ -1,332 +0,0 @@
|
|||||||
"""
|
|
||||||
Simple Master Password Authentication Controller for AniWorld.
|
|
||||||
|
|
||||||
This module implements a simple authentication system using:
|
|
||||||
- Single master password (no user registration)
|
|
||||||
- JWT tokens for session management
|
|
||||||
- Environment-based configuration
|
|
||||||
- No email system required
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import hashlib
|
|
||||||
import jwt
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from flask import Blueprint, request, jsonify
|
|
||||||
from functools import wraps
|
|
||||||
import logging
|
|
||||||
from typing import Dict, Any, Optional, Tuple
|
|
||||||
|
|
||||||
# Configure logging
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
# Create blueprint
|
|
||||||
simple_auth_bp = Blueprint('simple_auth', __name__)
|
|
||||||
|
|
||||||
# Configuration from environment
|
|
||||||
JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY', 'default_jwt_secret')
|
|
||||||
PASSWORD_SALT = os.getenv('PASSWORD_SALT', 'default_salt')
|
|
||||||
MASTER_PASSWORD_HASH = os.getenv('MASTER_PASSWORD_HASH')
|
|
||||||
TOKEN_EXPIRY_HOURS = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
|
|
||||||
|
|
||||||
|
|
||||||
def hash_password(password: str) -> str:
|
|
||||||
"""Hash password with salt using SHA-256."""
|
|
||||||
salted_password = password + PASSWORD_SALT
|
|
||||||
return hashlib.sha256(salted_password.encode()).hexdigest()
|
|
||||||
|
|
||||||
|
|
||||||
def verify_master_password(password: str) -> bool:
|
|
||||||
"""Verify password against master password hash."""
|
|
||||||
if not MASTER_PASSWORD_HASH:
|
|
||||||
# If no hash is set, check against environment variable (development only)
|
|
||||||
dev_password = os.getenv('MASTER_PASSWORD')
|
|
||||||
if dev_password:
|
|
||||||
return password == dev_password
|
|
||||||
return False
|
|
||||||
|
|
||||||
password_hash = hash_password(password)
|
|
||||||
return password_hash == MASTER_PASSWORD_HASH
|
|
||||||
|
|
||||||
|
|
||||||
def generate_jwt_token() -> str:
|
|
||||||
"""Generate JWT token for authentication."""
|
|
||||||
payload = {
|
|
||||||
'user': 'master',
|
|
||||||
'exp': datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS),
|
|
||||||
'iat': datetime.utcnow(),
|
|
||||||
'iss': 'aniworld-server'
|
|
||||||
}
|
|
||||||
|
|
||||||
return jwt.encode(payload, JWT_SECRET_KEY, algorithm='HS256')
|
|
||||||
|
|
||||||
|
|
||||||
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Verify and decode JWT token."""
|
|
||||||
try:
|
|
||||||
payload = jwt.decode(token, JWT_SECRET_KEY, algorithms=['HS256'])
|
|
||||||
return payload
|
|
||||||
except jwt.ExpiredSignatureError:
|
|
||||||
logger.warning("Token has expired")
|
|
||||||
return None
|
|
||||||
except jwt.InvalidTokenError as e:
|
|
||||||
logger.warning(f"Invalid token: {str(e)}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def require_auth(f):
|
|
||||||
"""Decorator to require authentication for API endpoints."""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
auth_header = request.headers.get('Authorization')
|
|
||||||
if not auth_header:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Authorization header required',
|
|
||||||
'code': 'AUTH_REQUIRED'
|
|
||||||
}), 401
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Expected format: "Bearer <token>"
|
|
||||||
token = auth_header.split(' ')[1]
|
|
||||||
except IndexError:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid authorization header format',
|
|
||||||
'code': 'INVALID_AUTH_FORMAT'
|
|
||||||
}), 401
|
|
||||||
|
|
||||||
payload = verify_jwt_token(token)
|
|
||||||
if not payload:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid or expired token',
|
|
||||||
'code': 'INVALID_TOKEN'
|
|
||||||
}), 401
|
|
||||||
|
|
||||||
# Add user info to request context
|
|
||||||
request.current_user = payload
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
# Auth endpoints
|
|
||||||
|
|
||||||
@simple_auth_bp.route('/auth/login', methods=['POST'])
|
|
||||||
def login() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Authenticate with master password and receive JWT token.
|
|
||||||
|
|
||||||
Request Body:
|
|
||||||
{
|
|
||||||
"password": "master_password"
|
|
||||||
}
|
|
||||||
|
|
||||||
Response:
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"message": "Login successful",
|
|
||||||
"data": {
|
|
||||||
"token": "jwt_token_here",
|
|
||||||
"expires_at": "2025-01-01T00:00:00Z",
|
|
||||||
"user": "master"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
if not data:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'JSON body required',
|
|
||||||
'code': 'MISSING_JSON'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
password = data.get('password')
|
|
||||||
if not password:
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Password required',
|
|
||||||
'code': 'MISSING_PASSWORD'
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Verify master password
|
|
||||||
if not verify_master_password(password):
|
|
||||||
logger.warning(f"Failed login attempt from IP: {request.remote_addr}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Invalid master password',
|
|
||||||
'code': 'INVALID_CREDENTIALS'
|
|
||||||
}), 401
|
|
||||||
|
|
||||||
# Generate JWT token
|
|
||||||
token = generate_jwt_token()
|
|
||||||
expires_at = datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS)
|
|
||||||
|
|
||||||
logger.info(f"Successful login from IP: {request.remote_addr}")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Login successful',
|
|
||||||
'data': {
|
|
||||||
'token': token,
|
|
||||||
'expires_at': expires_at.isoformat() + 'Z',
|
|
||||||
'user': 'master',
|
|
||||||
'token_type': 'Bearer'
|
|
||||||
}
|
|
||||||
}), 200
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Login error: {str(e)}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Internal server error',
|
|
||||||
'code': 'SERVER_ERROR'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@simple_auth_bp.route('/auth/verify', methods=['GET'])
|
|
||||||
@require_auth
|
|
||||||
def verify_token() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Verify if the current JWT token is valid.
|
|
||||||
|
|
||||||
Headers:
|
|
||||||
Authorization: Bearer <token>
|
|
||||||
|
|
||||||
Response:
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"message": "Token is valid",
|
|
||||||
"data": {
|
|
||||||
"user": "master",
|
|
||||||
"expires_at": "2025-01-01T00:00:00Z",
|
|
||||||
"issued_at": "2025-01-01T00:00:00Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
payload = request.current_user
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Token is valid',
|
|
||||||
'data': {
|
|
||||||
'user': payload.get('user'),
|
|
||||||
'expires_at': datetime.utcfromtimestamp(payload.get('exp')).isoformat() + 'Z',
|
|
||||||
'issued_at': datetime.utcfromtimestamp(payload.get('iat')).isoformat() + 'Z',
|
|
||||||
'issuer': payload.get('iss')
|
|
||||||
}
|
|
||||||
}), 200
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Token verification error: {str(e)}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Internal server error',
|
|
||||||
'code': 'SERVER_ERROR'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@simple_auth_bp.route('/auth/logout', methods=['POST'])
|
|
||||||
@require_auth
|
|
||||||
def logout() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Logout (client-side token clearing).
|
|
||||||
|
|
||||||
Since JWT tokens are stateless, logout is handled client-side
|
|
||||||
by removing the token. This endpoint confirms logout action.
|
|
||||||
|
|
||||||
Headers:
|
|
||||||
Authorization: Bearer <token>
|
|
||||||
|
|
||||||
Response:
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"message": "Logout successful"
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
logger.info(f"User logged out from IP: {request.remote_addr}")
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Logout successful. Please remove the token on client side.',
|
|
||||||
'data': {
|
|
||||||
'action': 'clear_token'
|
|
||||||
}
|
|
||||||
}), 200
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Logout error: {str(e)}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Internal server error',
|
|
||||||
'code': 'SERVER_ERROR'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
@simple_auth_bp.route('/auth/status', methods=['GET'])
|
|
||||||
def auth_status() -> Tuple[Any, int]:
|
|
||||||
"""
|
|
||||||
Check authentication system status.
|
|
||||||
|
|
||||||
Response:
|
|
||||||
{
|
|
||||||
"success": true,
|
|
||||||
"message": "Authentication system status",
|
|
||||||
"data": {
|
|
||||||
"auth_type": "master_password",
|
|
||||||
"jwt_enabled": true,
|
|
||||||
"password_configured": true
|
|
||||||
}
|
|
||||||
}
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
password_configured = bool(MASTER_PASSWORD_HASH or os.getenv('MASTER_PASSWORD'))
|
|
||||||
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Authentication system status',
|
|
||||||
'data': {
|
|
||||||
'auth_type': 'master_password',
|
|
||||||
'jwt_enabled': True,
|
|
||||||
'password_configured': password_configured,
|
|
||||||
'token_expiry_hours': TOKEN_EXPIRY_HOURS
|
|
||||||
}
|
|
||||||
}), 200
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Auth status error: {str(e)}")
|
|
||||||
return jsonify({
|
|
||||||
'success': False,
|
|
||||||
'error': 'Internal server error',
|
|
||||||
'code': 'SERVER_ERROR'
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
|
|
||||||
# Utility function to set master password hash
|
|
||||||
def set_master_password(password: str) -> str:
|
|
||||||
"""
|
|
||||||
Generate hash for master password.
|
|
||||||
This should be used to set MASTER_PASSWORD_HASH in environment.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
password: The master password to hash
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
The hashed password that should be stored in environment
|
|
||||||
"""
|
|
||||||
return hash_password(password)
|
|
||||||
|
|
||||||
|
|
||||||
# Health check endpoint
|
|
||||||
@simple_auth_bp.route('/auth/health', methods=['GET'])
|
|
||||||
def health_check() -> Tuple[Any, int]:
|
|
||||||
"""Health check for auth system."""
|
|
||||||
return jsonify({
|
|
||||||
'success': True,
|
|
||||||
'message': 'Auth system is healthy',
|
|
||||||
'timestamp': datetime.utcnow().isoformat() + 'Z'
|
|
||||||
}), 200
|
|
||||||
@ -1,661 +0,0 @@
|
|||||||
"""
|
|
||||||
Storage Management API Endpoints
|
|
||||||
|
|
||||||
This module provides REST API endpoints for storage management operations,
|
|
||||||
including storage monitoring, location management, and disk usage tracking.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from flask import Blueprint, request
|
|
||||||
from typing import Dict, List, Any, Optional
|
|
||||||
import os
|
|
||||||
import shutil
|
|
||||||
from datetime import datetime
|
|
||||||
|
|
||||||
from ...shared.auth_decorators import require_auth, optional_auth
|
|
||||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
|
||||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
|
||||||
from ...shared.response_helpers import (
|
|
||||||
create_success_response, create_paginated_response, extract_pagination_params
|
|
||||||
)
|
|
||||||
|
|
||||||
# Import storage components (these imports would need to be adjusted based on actual structure)
|
|
||||||
try:
|
|
||||||
from database_manager import storage_manager, database_manager, StorageLocation
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for development/testing
|
|
||||||
storage_manager = None
|
|
||||||
database_manager = None
|
|
||||||
StorageLocation = None
|
|
||||||
|
|
||||||
|
|
||||||
# Blueprint for storage management endpoints
|
|
||||||
storage_bp = Blueprint('storage', __name__, url_prefix='/api/v1/storage')
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/summary', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_storage_summary() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get overall storage usage summary.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Storage summary with usage statistics
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
try:
|
|
||||||
summary = storage_manager.get_storage_summary()
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'total_storage_gb': round(summary.get('total_bytes', 0) / (1024**3), 2),
|
|
||||||
'used_storage_gb': round(summary.get('used_bytes', 0) / (1024**3), 2),
|
|
||||||
'free_storage_gb': round(summary.get('free_bytes', 0) / (1024**3), 2),
|
|
||||||
'usage_percentage': summary.get('usage_percentage', 0),
|
|
||||||
'anime_storage_gb': round(summary.get('anime_bytes', 0) / (1024**3), 2),
|
|
||||||
'backup_storage_gb': round(summary.get('backup_bytes', 0) / (1024**3), 2),
|
|
||||||
'cache_storage_gb': round(summary.get('cache_bytes', 0) / (1024**3), 2),
|
|
||||||
'temp_storage_gb': round(summary.get('temp_bytes', 0) / (1024**3), 2),
|
|
||||||
'location_count': summary.get('location_count', 0),
|
|
||||||
'active_locations': summary.get('active_locations', 0),
|
|
||||||
'last_updated': summary.get('last_updated', datetime.utcnow()).isoformat()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to get storage summary: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/locations', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_pagination_params
|
|
||||||
@optional_auth
|
|
||||||
def get_storage_locations() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get all storage locations with optional filtering.
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- location_type: Filter by location type (primary, backup, cache, temp)
|
|
||||||
- anime_id: Filter by anime ID
|
|
||||||
- status: Filter by status (active, inactive, error)
|
|
||||||
- min_free_gb: Minimum free space in GB
|
|
||||||
- max_usage_percent: Maximum usage percentage
|
|
||||||
- page: Page number (default: 1)
|
|
||||||
- per_page: Items per page (default: 50, max: 1000)
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Paginated list of storage locations
|
|
||||||
"""
|
|
||||||
if not storage_manager or not database_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
# Extract filters
|
|
||||||
location_type_filter = request.args.get('location_type')
|
|
||||||
anime_id = request.args.get('anime_id')
|
|
||||||
status_filter = request.args.get('status')
|
|
||||||
min_free_gb = request.args.get('min_free_gb')
|
|
||||||
max_usage_percent = request.args.get('max_usage_percent')
|
|
||||||
|
|
||||||
# Validate filters
|
|
||||||
valid_types = ['primary', 'backup', 'cache', 'temp']
|
|
||||||
if location_type_filter and location_type_filter not in valid_types:
|
|
||||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
if anime_id:
|
|
||||||
try:
|
|
||||||
anime_id = int(anime_id)
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("anime_id must be a valid integer")
|
|
||||||
|
|
||||||
valid_statuses = ['active', 'inactive', 'error']
|
|
||||||
if status_filter and status_filter not in valid_statuses:
|
|
||||||
raise ValidationError(f"status must be one of: {', '.join(valid_statuses)}")
|
|
||||||
|
|
||||||
if min_free_gb:
|
|
||||||
try:
|
|
||||||
min_free_gb = float(min_free_gb)
|
|
||||||
if min_free_gb < 0:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("min_free_gb must be a non-negative number")
|
|
||||||
|
|
||||||
if max_usage_percent:
|
|
||||||
try:
|
|
||||||
max_usage_percent = float(max_usage_percent)
|
|
||||||
if not 0 <= max_usage_percent <= 100:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
raise ValidationError("max_usage_percent must be between 0 and 100")
|
|
||||||
|
|
||||||
# Get pagination parameters
|
|
||||||
page, per_page = extract_pagination_params()
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Query storage locations
|
|
||||||
query = """
|
|
||||||
SELECT sl.*, am.name as anime_name
|
|
||||||
FROM storage_locations sl
|
|
||||||
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
|
|
||||||
WHERE 1=1
|
|
||||||
"""
|
|
||||||
params = []
|
|
||||||
|
|
||||||
if location_type_filter:
|
|
||||||
query += " AND sl.location_type = ?"
|
|
||||||
params.append(location_type_filter)
|
|
||||||
|
|
||||||
if anime_id:
|
|
||||||
query += " AND sl.anime_id = ?"
|
|
||||||
params.append(anime_id)
|
|
||||||
|
|
||||||
if status_filter:
|
|
||||||
query += " AND sl.status = ?"
|
|
||||||
params.append(status_filter)
|
|
||||||
|
|
||||||
query += " ORDER BY sl.location_type, sl.path"
|
|
||||||
|
|
||||||
results = database_manager.execute_query(query, params)
|
|
||||||
|
|
||||||
# Format and filter results
|
|
||||||
locations = []
|
|
||||||
for row in results:
|
|
||||||
free_space_gb = (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None
|
|
||||||
total_space_gb = (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None
|
|
||||||
usage_percent = None
|
|
||||||
|
|
||||||
if row['total_space_bytes'] and row['free_space_bytes']:
|
|
||||||
usage_percent = ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100)
|
|
||||||
|
|
||||||
# Apply additional filters
|
|
||||||
if min_free_gb and (free_space_gb is None or free_space_gb < min_free_gb):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if max_usage_percent and (usage_percent is None or usage_percent > max_usage_percent):
|
|
||||||
continue
|
|
||||||
|
|
||||||
location_data = {
|
|
||||||
'location_id': row['location_id'],
|
|
||||||
'anime_id': row['anime_id'],
|
|
||||||
'anime_name': row['anime_name'],
|
|
||||||
'path': row['path'],
|
|
||||||
'location_type': row['location_type'],
|
|
||||||
'status': row['status'],
|
|
||||||
'free_space_gb': free_space_gb,
|
|
||||||
'total_space_gb': total_space_gb,
|
|
||||||
'used_space_gb': (total_space_gb - free_space_gb) if (total_space_gb and free_space_gb) else None,
|
|
||||||
'usage_percent': usage_percent,
|
|
||||||
'last_checked': row['last_checked'],
|
|
||||||
'created_at': row['created_at'],
|
|
||||||
'is_active': row['is_active'],
|
|
||||||
'mount_point': row.get('mount_point'),
|
|
||||||
'filesystem': row.get('filesystem')
|
|
||||||
}
|
|
||||||
|
|
||||||
locations.append(location_data)
|
|
||||||
|
|
||||||
# Apply pagination
|
|
||||||
total = len(locations)
|
|
||||||
start_idx = (page - 1) * per_page
|
|
||||||
end_idx = start_idx + per_page
|
|
||||||
paginated_locations = locations[start_idx:end_idx]
|
|
||||||
|
|
||||||
return create_paginated_response(
|
|
||||||
data=paginated_locations,
|
|
||||||
page=page,
|
|
||||||
per_page=per_page,
|
|
||||||
total=total,
|
|
||||||
endpoint='storage.get_storage_locations'
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to get storage locations: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/locations', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['path', 'location_type'],
|
|
||||||
optional_fields=['anime_id', 'description', 'mount_point', 'auto_create'],
|
|
||||||
field_types={
|
|
||||||
'path': str,
|
|
||||||
'location_type': str,
|
|
||||||
'anime_id': int,
|
|
||||||
'description': str,
|
|
||||||
'mount_point': str,
|
|
||||||
'auto_create': bool
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def add_storage_location() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Add a new storage location.
|
|
||||||
|
|
||||||
Required Fields:
|
|
||||||
- path: Storage path
|
|
||||||
- location_type: Type of storage (primary, backup, cache, temp)
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- anime_id: Associated anime ID (for anime-specific storage)
|
|
||||||
- description: Location description
|
|
||||||
- mount_point: Mount point information
|
|
||||||
- auto_create: Automatically create directory if it doesn't exist
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Created storage location information
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
path = data['path']
|
|
||||||
location_type = data['location_type']
|
|
||||||
anime_id = data.get('anime_id')
|
|
||||||
description = data.get('description')
|
|
||||||
mount_point = data.get('mount_point')
|
|
||||||
auto_create = data.get('auto_create', False)
|
|
||||||
|
|
||||||
# Validate location type
|
|
||||||
valid_types = ['primary', 'backup', 'cache', 'temp']
|
|
||||||
if location_type not in valid_types:
|
|
||||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
# Validate path
|
|
||||||
if not path or not isinstance(path, str):
|
|
||||||
raise ValidationError("path must be a valid string")
|
|
||||||
|
|
||||||
# Normalize path
|
|
||||||
path = os.path.abspath(path)
|
|
||||||
|
|
||||||
# Check if path already exists as a storage location
|
|
||||||
existing_location = storage_manager.get_location_by_path(path)
|
|
||||||
if existing_location:
|
|
||||||
raise ValidationError("Storage location with this path already exists")
|
|
||||||
|
|
||||||
# Check if directory exists or create it
|
|
||||||
if not os.path.exists(path):
|
|
||||||
if auto_create:
|
|
||||||
try:
|
|
||||||
os.makedirs(path, exist_ok=True)
|
|
||||||
except Exception as e:
|
|
||||||
raise ValidationError(f"Failed to create directory: {str(e)}")
|
|
||||||
else:
|
|
||||||
raise ValidationError("Directory does not exist. Set auto_create=true to create it.")
|
|
||||||
|
|
||||||
# Check if it's a directory
|
|
||||||
if not os.path.isdir(path):
|
|
||||||
raise ValidationError("Path must be a directory")
|
|
||||||
|
|
||||||
# Check if it's writable
|
|
||||||
if not os.access(path, os.W_OK):
|
|
||||||
raise ValidationError("Directory is not writable")
|
|
||||||
|
|
||||||
try:
|
|
||||||
location_id = storage_manager.add_storage_location(
|
|
||||||
path=path,
|
|
||||||
location_type=location_type,
|
|
||||||
anime_id=anime_id,
|
|
||||||
description=description,
|
|
||||||
mount_point=mount_point
|
|
||||||
)
|
|
||||||
|
|
||||||
# Get the created location details
|
|
||||||
location = storage_manager.get_location_by_id(location_id)
|
|
||||||
|
|
||||||
location_data = {
|
|
||||||
'location_id': location.location_id,
|
|
||||||
'path': location.path,
|
|
||||||
'location_type': location.location_type,
|
|
||||||
'anime_id': location.anime_id,
|
|
||||||
'description': location.description,
|
|
||||||
'mount_point': location.mount_point,
|
|
||||||
'status': location.status,
|
|
||||||
'created_at': location.created_at.isoformat(),
|
|
||||||
'is_active': location.is_active
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=location_data,
|
|
||||||
message="Storage location added successfully",
|
|
||||||
status_code=201
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to add storage location: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/locations/<int:location_id>', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('location_id')
|
|
||||||
@optional_auth
|
|
||||||
def get_storage_location(location_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get detailed information about a specific storage location.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
location_id: Unique identifier for the storage location
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Detailed storage location information
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
location = storage_manager.get_location_by_id(location_id)
|
|
||||||
if not location:
|
|
||||||
raise NotFoundError("Storage location not found")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Get detailed storage statistics
|
|
||||||
stats = storage_manager.get_location_stats(location_id)
|
|
||||||
|
|
||||||
location_data = {
|
|
||||||
'location_id': location.location_id,
|
|
||||||
'path': location.path,
|
|
||||||
'location_type': location.location_type,
|
|
||||||
'anime_id': location.anime_id,
|
|
||||||
'description': location.description,
|
|
||||||
'mount_point': location.mount_point,
|
|
||||||
'status': location.status,
|
|
||||||
'created_at': location.created_at.isoformat(),
|
|
||||||
'last_checked': location.last_checked.isoformat() if location.last_checked else None,
|
|
||||||
'is_active': location.is_active,
|
|
||||||
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
|
|
||||||
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
|
|
||||||
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
|
|
||||||
'usage_percent': stats.get('usage_percentage', 0),
|
|
||||||
'file_count': stats.get('file_count', 0),
|
|
||||||
'directory_count': stats.get('directory_count', 0),
|
|
||||||
'largest_file_mb': round(stats.get('largest_file_bytes', 0) / (1024**2), 2),
|
|
||||||
'filesystem': stats.get('filesystem'),
|
|
||||||
'mount_options': stats.get('mount_options'),
|
|
||||||
'health_status': stats.get('health_status', 'unknown')
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response(location_data)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to get storage location: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/locations/<int:location_id>', methods=['PUT'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('location_id')
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['description', 'location_type', 'is_active', 'mount_point'],
|
|
||||||
field_types={
|
|
||||||
'description': str,
|
|
||||||
'location_type': str,
|
|
||||||
'is_active': bool,
|
|
||||||
'mount_point': str
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def update_storage_location(location_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Update a storage location.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
location_id: Unique identifier for the storage location
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- description: Updated description
|
|
||||||
- location_type: Updated location type
|
|
||||||
- is_active: Active status
|
|
||||||
- mount_point: Mount point information
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated storage location information
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
|
|
||||||
# Check if location exists
|
|
||||||
location = storage_manager.get_location_by_id(location_id)
|
|
||||||
if not location:
|
|
||||||
raise NotFoundError("Storage location not found")
|
|
||||||
|
|
||||||
# Validate location type if provided
|
|
||||||
if 'location_type' in data:
|
|
||||||
valid_types = ['primary', 'backup', 'cache', 'temp']
|
|
||||||
if data['location_type'] not in valid_types:
|
|
||||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Update location
|
|
||||||
success = storage_manager.update_location(location_id, data)
|
|
||||||
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to update storage location", 500)
|
|
||||||
|
|
||||||
# Get updated location
|
|
||||||
updated_location = storage_manager.get_location_by_id(location_id)
|
|
||||||
|
|
||||||
location_data = {
|
|
||||||
'location_id': updated_location.location_id,
|
|
||||||
'path': updated_location.path,
|
|
||||||
'location_type': updated_location.location_type,
|
|
||||||
'anime_id': updated_location.anime_id,
|
|
||||||
'description': updated_location.description,
|
|
||||||
'mount_point': updated_location.mount_point,
|
|
||||||
'status': updated_location.status,
|
|
||||||
'is_active': updated_location.is_active,
|
|
||||||
'updated_at': datetime.utcnow().isoformat()
|
|
||||||
}
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data=location_data,
|
|
||||||
message="Storage location updated successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to update storage location: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/locations/<int:location_id>', methods=['DELETE'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('location_id')
|
|
||||||
@require_auth
|
|
||||||
def delete_storage_location(location_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Delete a storage location.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
location_id: Unique identifier for the storage location
|
|
||||||
|
|
||||||
Query Parameters:
|
|
||||||
- force: Force deletion even if location contains files
|
|
||||||
- delete_files: Also delete files in the location
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Deletion confirmation
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
# Check if location exists
|
|
||||||
location = storage_manager.get_location_by_id(location_id)
|
|
||||||
if not location:
|
|
||||||
raise NotFoundError("Storage location not found")
|
|
||||||
|
|
||||||
force = request.args.get('force', 'false').lower() == 'true'
|
|
||||||
delete_files = request.args.get('delete_files', 'false').lower() == 'true'
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Check if location has files (unless force is used)
|
|
||||||
if not force:
|
|
||||||
stats = storage_manager.get_location_stats(location_id)
|
|
||||||
if stats.get('file_count', 0) > 0:
|
|
||||||
raise ValidationError(
|
|
||||||
f"Storage location contains {stats['file_count']} files. "
|
|
||||||
"Use force=true to delete anyway."
|
|
||||||
)
|
|
||||||
|
|
||||||
# Delete location
|
|
||||||
success = storage_manager.delete_location(location_id, delete_files=delete_files)
|
|
||||||
|
|
||||||
if not success:
|
|
||||||
raise APIException("Failed to delete storage location", 500)
|
|
||||||
|
|
||||||
message = f"Storage location deleted successfully"
|
|
||||||
if delete_files:
|
|
||||||
message += " (including all files)"
|
|
||||||
|
|
||||||
return create_success_response(message=message)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to delete storage location: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/locations/<int:location_id>/refresh', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_id_parameter('location_id')
|
|
||||||
@require_auth
|
|
||||||
def refresh_storage_location(location_id: int) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Refresh storage statistics for a location.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
location_id: Unique identifier for the storage location
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Updated storage statistics
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
# Check if location exists
|
|
||||||
location = storage_manager.get_location_by_id(location_id)
|
|
||||||
if not location:
|
|
||||||
raise NotFoundError("Storage location not found")
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Update storage statistics
|
|
||||||
stats = storage_manager.update_location_stats(location_id)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'location_id': location_id,
|
|
||||||
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
|
|
||||||
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
|
|
||||||
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
|
|
||||||
'usage_percent': stats.get('usage_percentage', 0),
|
|
||||||
'file_count': stats.get('file_count', 0),
|
|
||||||
'directory_count': stats.get('directory_count', 0),
|
|
||||||
'last_updated': datetime.utcnow().isoformat()
|
|
||||||
},
|
|
||||||
message="Storage statistics updated successfully"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to refresh storage location: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/cleanup', methods=['POST'])
|
|
||||||
@handle_api_errors
|
|
||||||
@validate_json_input(
|
|
||||||
optional_fields=['location_type', 'target_usage_percent', 'cleanup_temp', 'cleanup_cache', 'dry_run'],
|
|
||||||
field_types={
|
|
||||||
'location_type': str,
|
|
||||||
'target_usage_percent': float,
|
|
||||||
'cleanup_temp': bool,
|
|
||||||
'cleanup_cache': bool,
|
|
||||||
'dry_run': bool
|
|
||||||
}
|
|
||||||
)
|
|
||||||
@require_auth
|
|
||||||
def cleanup_storage() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Perform storage cleanup operations.
|
|
||||||
|
|
||||||
Optional Fields:
|
|
||||||
- location_type: Type of locations to clean (temp, cache, backup)
|
|
||||||
- target_usage_percent: Target usage percentage after cleanup
|
|
||||||
- cleanup_temp: Clean temporary files
|
|
||||||
- cleanup_cache: Clean cache files
|
|
||||||
- dry_run: Preview what would be cleaned without actually doing it
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Cleanup results
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
data = request.get_json() or {}
|
|
||||||
location_type = data.get('location_type', 'temp')
|
|
||||||
target_usage_percent = data.get('target_usage_percent', 80.0)
|
|
||||||
cleanup_temp = data.get('cleanup_temp', True)
|
|
||||||
cleanup_cache = data.get('cleanup_cache', False)
|
|
||||||
dry_run = data.get('dry_run', False)
|
|
||||||
|
|
||||||
# Validate parameters
|
|
||||||
valid_types = ['temp', 'cache', 'backup']
|
|
||||||
if location_type not in valid_types:
|
|
||||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
|
||||||
|
|
||||||
if not 0 <= target_usage_percent <= 100:
|
|
||||||
raise ValidationError("target_usage_percent must be between 0 and 100")
|
|
||||||
|
|
||||||
try:
|
|
||||||
cleanup_result = storage_manager.cleanup_storage(
|
|
||||||
location_type=location_type,
|
|
||||||
target_usage_percent=target_usage_percent,
|
|
||||||
cleanup_temp=cleanup_temp,
|
|
||||||
cleanup_cache=cleanup_cache,
|
|
||||||
dry_run=dry_run
|
|
||||||
)
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'dry_run': dry_run,
|
|
||||||
'location_type': location_type,
|
|
||||||
'files_deleted': cleanup_result.get('files_deleted', 0),
|
|
||||||
'directories_deleted': cleanup_result.get('directories_deleted', 0),
|
|
||||||
'space_freed_gb': round(cleanup_result.get('space_freed_bytes', 0) / (1024**3), 2),
|
|
||||||
'cleanup_summary': cleanup_result.get('summary', {}),
|
|
||||||
'target_usage_percent': target_usage_percent,
|
|
||||||
'final_usage_percent': cleanup_result.get('final_usage_percent')
|
|
||||||
},
|
|
||||||
message=f"Storage cleanup {'simulated' if dry_run else 'completed'}"
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to cleanup storage: {str(e)}", 500)
|
|
||||||
|
|
||||||
|
|
||||||
@storage_bp.route('/health', methods=['GET'])
|
|
||||||
@handle_api_errors
|
|
||||||
@optional_auth
|
|
||||||
def get_storage_health() -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Get storage health status across all locations.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Storage health information
|
|
||||||
"""
|
|
||||||
if not storage_manager:
|
|
||||||
raise APIException("Storage manager not available", 503)
|
|
||||||
|
|
||||||
try:
|
|
||||||
health_status = storage_manager.get_storage_health()
|
|
||||||
|
|
||||||
return create_success_response(
|
|
||||||
data={
|
|
||||||
'overall_status': health_status.get('overall_status', 'unknown'),
|
|
||||||
'total_locations': health_status.get('total_locations', 0),
|
|
||||||
'healthy_locations': health_status.get('healthy_locations', 0),
|
|
||||||
'warning_locations': health_status.get('warning_locations', 0),
|
|
||||||
'error_locations': health_status.get('error_locations', 0),
|
|
||||||
'average_usage_percent': health_status.get('average_usage_percent', 0),
|
|
||||||
'locations_near_full': health_status.get('locations_near_full', []),
|
|
||||||
'locations_with_errors': health_status.get('locations_with_errors', []),
|
|
||||||
'recommendations': health_status.get('recommendations', []),
|
|
||||||
'last_check': health_status.get('last_check', datetime.utcnow()).isoformat()
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
raise APIException(f"Failed to get storage health: {str(e)}", 500)
|
|
||||||
@ -1,352 +0,0 @@
|
|||||||
"""
|
|
||||||
Base controller with common functionality for all controllers.
|
|
||||||
|
|
||||||
This module provides a base controller class that eliminates common duplications
|
|
||||||
across different controller modules by providing standardized error handling,
|
|
||||||
validation, and response formatting.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from abc import ABC
|
|
||||||
from typing import Any, Dict, Optional, List, Union, Tuple, Callable
|
|
||||||
try:
|
|
||||||
from flask import jsonify, request
|
|
||||||
from werkzeug.exceptions import HTTPException
|
|
||||||
except ImportError:
|
|
||||||
# Fallback for environments without Flask
|
|
||||||
def jsonify(data):
|
|
||||||
import json
|
|
||||||
return json.dumps(data)
|
|
||||||
|
|
||||||
class HTTPException(Exception):
|
|
||||||
def __init__(self, status_code, detail):
|
|
||||||
self.status_code = status_code
|
|
||||||
self.detail = detail
|
|
||||||
super().__init__(detail)
|
|
||||||
|
|
||||||
class request:
|
|
||||||
is_json = False
|
|
||||||
@staticmethod
|
|
||||||
def get_json():
|
|
||||||
return {}
|
|
||||||
headers = {}
|
|
||||||
args = {}
|
|
||||||
form = {}
|
|
||||||
|
|
||||||
try:
|
|
||||||
from pydantic import BaseModel
|
|
||||||
except ImportError:
|
|
||||||
# Fallback BaseModel
|
|
||||||
class BaseModel:
|
|
||||||
pass
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import functools
|
|
||||||
|
|
||||||
|
|
||||||
class BaseController(ABC):
|
|
||||||
"""Base controller with common functionality for all controllers."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.logger = logging.getLogger(self.__class__.__name__)
|
|
||||||
|
|
||||||
def handle_error(self, error: Exception, status_code: int = 500) -> HTTPException:
|
|
||||||
"""
|
|
||||||
Standardized error handling across all controllers.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
error: The exception that occurred
|
|
||||||
status_code: HTTP status code to return
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
HTTPException with standardized format
|
|
||||||
"""
|
|
||||||
self.logger.error(f"Controller error: {str(error)}", exc_info=True)
|
|
||||||
return HTTPException(status_code, str(error))
|
|
||||||
|
|
||||||
def validate_request(self, data: BaseModel) -> bool:
|
|
||||||
"""
|
|
||||||
Common validation logic for request data.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Pydantic model to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if validation passes
|
|
||||||
|
|
||||||
Raises:
|
|
||||||
ValidationError if validation fails
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Pydantic models automatically validate on instantiation
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.warning(f"Validation failed: {str(e)}")
|
|
||||||
raise
|
|
||||||
|
|
||||||
def format_response(self, data: Any, message: str = "Success") -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Standardized response format for successful operations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Data to include in response
|
|
||||||
message: Success message
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Standardized success response dictionary
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"status": "success",
|
|
||||||
"message": message,
|
|
||||||
"data": data
|
|
||||||
}
|
|
||||||
|
|
||||||
def format_error_response(self, message: str, status_code: int = 400, details: Any = None) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Standardized error response format.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: Error message
|
|
||||||
status_code: HTTP status code
|
|
||||||
details: Additional error details
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (error_response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
"status": "error",
|
|
||||||
"message": message,
|
|
||||||
"error_code": status_code
|
|
||||||
}
|
|
||||||
|
|
||||||
if details:
|
|
||||||
response["details"] = details
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
def create_success_response(
|
|
||||||
self,
|
|
||||||
data: Any = None,
|
|
||||||
message: str = "Operation successful",
|
|
||||||
status_code: int = 200,
|
|
||||||
pagination: Optional[Dict[str, Any]] = None,
|
|
||||||
meta: Optional[Dict[str, Any]] = None
|
|
||||||
) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Create a standardized success response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Data to include in response
|
|
||||||
message: Success message
|
|
||||||
status_code: HTTP status code
|
|
||||||
pagination: Pagination information
|
|
||||||
meta: Additional metadata
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
'status': 'success',
|
|
||||||
'message': message
|
|
||||||
}
|
|
||||||
|
|
||||||
if data is not None:
|
|
||||||
response['data'] = data
|
|
||||||
|
|
||||||
if pagination:
|
|
||||||
response['pagination'] = pagination
|
|
||||||
|
|
||||||
if meta:
|
|
||||||
response['meta'] = meta
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
def create_error_response(
|
|
||||||
self,
|
|
||||||
message: str,
|
|
||||||
status_code: int = 400,
|
|
||||||
details: Any = None,
|
|
||||||
error_code: Optional[str] = None
|
|
||||||
) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Create a standardized error response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: Error message
|
|
||||||
status_code: HTTP status code
|
|
||||||
details: Additional error details
|
|
||||||
error_code: Specific error code
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
'status': 'error',
|
|
||||||
'message': message,
|
|
||||||
'error_code': error_code or status_code
|
|
||||||
}
|
|
||||||
|
|
||||||
if details:
|
|
||||||
response['details'] = details
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
|
|
||||||
def handle_api_errors(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator for standardized API error handling.
|
|
||||||
|
|
||||||
This decorator should be used on all API endpoints to ensure
|
|
||||||
consistent error handling and response formatting.
|
|
||||||
"""
|
|
||||||
@functools.wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
except HTTPException:
|
|
||||||
# Re-raise HTTP exceptions as they are already properly formatted
|
|
||||||
raise
|
|
||||||
except ValueError as e:
|
|
||||||
# Handle validation errors
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Invalid input data',
|
|
||||||
'details': str(e),
|
|
||||||
'error_code': 400
|
|
||||||
}), 400
|
|
||||||
except PermissionError as e:
|
|
||||||
# Handle authorization errors
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Access denied',
|
|
||||||
'details': str(e),
|
|
||||||
'error_code': 403
|
|
||||||
}), 403
|
|
||||||
except FileNotFoundError as e:
|
|
||||||
# Handle not found errors
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Resource not found',
|
|
||||||
'details': str(e),
|
|
||||||
'error_code': 404
|
|
||||||
}), 404
|
|
||||||
except Exception as e:
|
|
||||||
# Handle all other errors
|
|
||||||
logging.getLogger(__name__).error(f"Unhandled error in {f.__name__}: {str(e)}", exc_info=True)
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Internal server error',
|
|
||||||
'details': str(e) if logging.getLogger().isEnabledFor(logging.DEBUG) else 'An unexpected error occurred',
|
|
||||||
'error_code': 500
|
|
||||||
}), 500
|
|
||||||
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def require_auth(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to require authentication for API endpoints.
|
|
||||||
|
|
||||||
This decorator should be applied to endpoints that require
|
|
||||||
user authentication.
|
|
||||||
"""
|
|
||||||
@functools.wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
# Implementation would depend on your authentication system
|
|
||||||
# For now, this is a placeholder that should be implemented
|
|
||||||
# based on your specific authentication requirements
|
|
||||||
|
|
||||||
# Example implementation:
|
|
||||||
# auth_header = request.headers.get('Authorization')
|
|
||||||
# if not auth_header or not validate_auth_token(auth_header):
|
|
||||||
# return jsonify({
|
|
||||||
# 'status': 'error',
|
|
||||||
# 'message': 'Authentication required',
|
|
||||||
# 'error_code': 401
|
|
||||||
# }), 401
|
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def optional_auth(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator for optional authentication.
|
|
||||||
|
|
||||||
This decorator allows endpoints to work with or without authentication,
|
|
||||||
but provides additional functionality when authenticated.
|
|
||||||
"""
|
|
||||||
@functools.wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
# Implementation would depend on your authentication system
|
|
||||||
# This would set user context if authenticated, but not fail if not
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def validate_json_input(
|
|
||||||
required_fields: Optional[List[str]] = None,
|
|
||||||
optional_fields: Optional[List[str]] = None,
|
|
||||||
**field_validators
|
|
||||||
) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator for JSON input validation.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
required_fields: List of required field names
|
|
||||||
optional_fields: List of optional field names
|
|
||||||
**field_validators: Field-specific validation functions
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorator function
|
|
||||||
"""
|
|
||||||
def decorator(f: Callable) -> Callable:
|
|
||||||
@functools.wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
if not request.is_json:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Request must contain JSON data',
|
|
||||||
'error_code': 400
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
data = request.get_json()
|
|
||||||
if not data:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Invalid JSON data',
|
|
||||||
'error_code': 400
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Check required fields
|
|
||||||
if required_fields:
|
|
||||||
missing_fields = [field for field in required_fields if field not in data]
|
|
||||||
if missing_fields:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': f'Missing required fields: {", ".join(missing_fields)}',
|
|
||||||
'error_code': 400
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
# Apply field validators
|
|
||||||
for field, validator in field_validators.items():
|
|
||||||
if field in data:
|
|
||||||
try:
|
|
||||||
if not validator(data[field]):
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': f'Invalid value for field: {field}',
|
|
||||||
'error_code': 400
|
|
||||||
}), 400
|
|
||||||
except Exception as e:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': f'Validation error for field {field}: {str(e)}',
|
|
||||||
'error_code': 400
|
|
||||||
}), 400
|
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
|
|
||||||
return decorated_function
|
|
||||||
return decorator
|
|
||||||
@ -1 +0,0 @@
|
|||||||
"""Shared utilities and helpers for web controllers."""
|
|
||||||
@ -1,150 +0,0 @@
|
|||||||
"""
|
|
||||||
Authentication decorators and utilities for API endpoints.
|
|
||||||
|
|
||||||
This module provides authentication decorators that can be used across
|
|
||||||
all controller modules for consistent authentication handling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
from functools import wraps
|
|
||||||
from typing import Optional, Dict, Any, Callable
|
|
||||||
from flask import session, request, jsonify, redirect, url_for
|
|
||||||
|
|
||||||
# Import session manager from auth controller
|
|
||||||
from ..auth_controller import session_manager
|
|
||||||
|
|
||||||
|
|
||||||
def require_auth(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to require authentication for Flask routes.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
f: The function to decorate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorated function that requires authentication
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
@require_auth
|
|
||||||
def protected_endpoint():
|
|
||||||
return "This requires authentication"
|
|
||||||
"""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
if not session_manager.is_authenticated():
|
|
||||||
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
|
|
||||||
is_ajax = (
|
|
||||||
request.is_json or
|
|
||||||
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
|
|
||||||
request.headers.get('Accept', '').startswith('application/json') or
|
|
||||||
'/api/' in request.path # API endpoints should return JSON
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_ajax:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Authentication required',
|
|
||||||
'code': 'AUTH_REQUIRED'
|
|
||||||
}), 401
|
|
||||||
else:
|
|
||||||
return redirect(url_for('auth.login'))
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def optional_auth(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator that checks auth but doesn't require it.
|
|
||||||
|
|
||||||
This decorator will only require authentication if a master password
|
|
||||||
has been configured in the system.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
f: The function to decorate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorated function that optionally requires authentication
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
@optional_auth
|
|
||||||
def maybe_protected_endpoint():
|
|
||||||
return "This may require authentication"
|
|
||||||
"""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
# Import config here to avoid circular imports
|
|
||||||
from config import config
|
|
||||||
|
|
||||||
# Check if master password is configured
|
|
||||||
if config.has_master_password():
|
|
||||||
# If configured, require authentication
|
|
||||||
if not session_manager.is_authenticated():
|
|
||||||
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
|
|
||||||
is_ajax = (
|
|
||||||
request.is_json or
|
|
||||||
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
|
|
||||||
request.headers.get('Accept', '').startswith('application/json') or
|
|
||||||
'/api/' in request.path # API endpoints should return JSON
|
|
||||||
)
|
|
||||||
|
|
||||||
if is_ajax:
|
|
||||||
return jsonify({
|
|
||||||
'status': 'error',
|
|
||||||
'message': 'Authentication required',
|
|
||||||
'code': 'AUTH_REQUIRED'
|
|
||||||
}), 401
|
|
||||||
else:
|
|
||||||
return redirect(url_for('auth.login'))
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def get_current_user() -> Optional[Dict[str, Any]]:
|
|
||||||
"""
|
|
||||||
Get current authenticated user information.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary containing user information if authenticated, None otherwise
|
|
||||||
"""
|
|
||||||
if session_manager.is_authenticated():
|
|
||||||
return session_manager.get_session_info()
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_client_ip() -> str:
|
|
||||||
"""
|
|
||||||
Get client IP address with proxy support.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Client IP address as string
|
|
||||||
"""
|
|
||||||
# Check for forwarded IP (in case of reverse proxy)
|
|
||||||
forwarded_ip = request.headers.get('X-Forwarded-For')
|
|
||||||
if forwarded_ip:
|
|
||||||
return forwarded_ip.split(',')[0].strip()
|
|
||||||
|
|
||||||
real_ip = request.headers.get('X-Real-IP')
|
|
||||||
if real_ip:
|
|
||||||
return real_ip
|
|
||||||
|
|
||||||
return request.remote_addr or 'unknown'
|
|
||||||
|
|
||||||
|
|
||||||
def is_authenticated() -> bool:
|
|
||||||
"""
|
|
||||||
Check if current request is from an authenticated user.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if authenticated, False otherwise
|
|
||||||
"""
|
|
||||||
return session_manager.is_authenticated()
|
|
||||||
|
|
||||||
|
|
||||||
def logout_current_user() -> bool:
|
|
||||||
"""
|
|
||||||
Logout the current user.
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if logout was successful, False otherwise
|
|
||||||
"""
|
|
||||||
return session_manager.logout()
|
|
||||||
@ -1,286 +0,0 @@
|
|||||||
"""
|
|
||||||
Error handling decorators and utilities for API endpoints.
|
|
||||||
|
|
||||||
This module provides standardized error handling decorators and utilities
|
|
||||||
that can be used across all controller modules for consistent error responses.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import traceback
|
|
||||||
from functools import wraps
|
|
||||||
from typing import Dict, Any, Callable, Tuple, Optional, Union
|
|
||||||
from flask import jsonify, request
|
|
||||||
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_api_errors(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to handle API errors consistently across all endpoints.
|
|
||||||
|
|
||||||
This decorator catches exceptions and returns standardized error responses
|
|
||||||
with appropriate HTTP status codes.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
f: The function to decorate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorated function with error handling
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
@handle_api_errors
|
|
||||||
def my_endpoint():
|
|
||||||
# This will automatically handle any exceptions
|
|
||||||
return {"data": "success"}
|
|
||||||
"""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
result = f(*args, **kwargs)
|
|
||||||
|
|
||||||
# If result is already a Response object, return it
|
|
||||||
if hasattr(result, 'status_code'):
|
|
||||||
return result
|
|
||||||
|
|
||||||
# If result is a tuple (data, status_code), handle it
|
|
||||||
if isinstance(result, tuple) and len(result) == 2:
|
|
||||||
data, status_code = result
|
|
||||||
if isinstance(data, dict) and 'status' not in data:
|
|
||||||
data['status'] = 'success' if 200 <= status_code < 300 else 'error'
|
|
||||||
return jsonify(data), status_code
|
|
||||||
|
|
||||||
# If result is a dict, wrap it in success response
|
|
||||||
if isinstance(result, dict):
|
|
||||||
if 'status' not in result:
|
|
||||||
result['status'] = 'success'
|
|
||||||
return jsonify(result)
|
|
||||||
|
|
||||||
# For other types, wrap in success response
|
|
||||||
return jsonify({
|
|
||||||
'status': 'success',
|
|
||||||
'data': result
|
|
||||||
})
|
|
||||||
|
|
||||||
except ValueError as e:
|
|
||||||
logger.warning(f"Validation error in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message=str(e),
|
|
||||||
status_code=400,
|
|
||||||
error_code='VALIDATION_ERROR'
|
|
||||||
)
|
|
||||||
|
|
||||||
except PermissionError as e:
|
|
||||||
logger.warning(f"Permission error in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message="Access denied",
|
|
||||||
status_code=403,
|
|
||||||
error_code='ACCESS_DENIED'
|
|
||||||
)
|
|
||||||
|
|
||||||
except FileNotFoundError as e:
|
|
||||||
logger.warning(f"File not found in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message="Resource not found",
|
|
||||||
status_code=404,
|
|
||||||
error_code='NOT_FOUND'
|
|
||||||
)
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Unexpected error in {f.__name__}: {str(e)}")
|
|
||||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
|
||||||
|
|
||||||
# Don't expose internal errors in production
|
|
||||||
return create_error_response(
|
|
||||||
message="Internal server error",
|
|
||||||
status_code=500,
|
|
||||||
error_code='INTERNAL_ERROR'
|
|
||||||
)
|
|
||||||
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def handle_database_errors(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator specifically for database-related operations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
f: The function to decorate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorated function with database error handling
|
|
||||||
"""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Database error in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message="Database operation failed",
|
|
||||||
status_code=500,
|
|
||||||
error_code='DATABASE_ERROR'
|
|
||||||
)
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def handle_file_operations(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator for file operation error handling.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
f: The function to decorate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorated function with file operation error handling
|
|
||||||
"""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
try:
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
except FileNotFoundError as e:
|
|
||||||
logger.warning(f"File not found in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message="File not found",
|
|
||||||
status_code=404,
|
|
||||||
error_code='FILE_NOT_FOUND'
|
|
||||||
)
|
|
||||||
except PermissionError as e:
|
|
||||||
logger.warning(f"File permission error in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message="Permission denied",
|
|
||||||
status_code=403,
|
|
||||||
error_code='PERMISSION_DENIED'
|
|
||||||
)
|
|
||||||
except OSError as e:
|
|
||||||
logger.error(f"File system error in {f.__name__}: {str(e)}")
|
|
||||||
return create_error_response(
|
|
||||||
message="File system error",
|
|
||||||
status_code=500,
|
|
||||||
error_code='FILE_SYSTEM_ERROR'
|
|
||||||
)
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def create_error_response(
|
|
||||||
message: str,
|
|
||||||
status_code: int = 400,
|
|
||||||
error_code: Optional[str] = None,
|
|
||||||
errors: Optional[list] = None,
|
|
||||||
data: Optional[Dict[str, Any]] = None
|
|
||||||
) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Create a standardized error response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: Error message to display
|
|
||||||
status_code: HTTP status code
|
|
||||||
error_code: Optional error code for client handling
|
|
||||||
errors: Optional list of detailed errors
|
|
||||||
data: Optional additional data
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
'status': 'error',
|
|
||||||
'message': message
|
|
||||||
}
|
|
||||||
|
|
||||||
if error_code:
|
|
||||||
response['error_code'] = error_code
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
response['errors'] = errors
|
|
||||||
|
|
||||||
if data:
|
|
||||||
response['data'] = data
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
|
|
||||||
def create_success_response(
|
|
||||||
data: Any = None,
|
|
||||||
message: str = "Operation successful",
|
|
||||||
status_code: int = 200
|
|
||||||
) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Create a standardized success response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Data to include in response
|
|
||||||
message: Success message
|
|
||||||
status_code: HTTP status code
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
'status': 'success',
|
|
||||||
'message': message
|
|
||||||
}
|
|
||||||
|
|
||||||
if data is not None:
|
|
||||||
response['data'] = data
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
|
|
||||||
def log_request_info():
|
|
||||||
"""Log request information for debugging."""
|
|
||||||
logger.info(f"Request: {request.method} {request.path}")
|
|
||||||
if request.is_json:
|
|
||||||
logger.debug(f"Request JSON: {request.get_json()}")
|
|
||||||
if request.args:
|
|
||||||
logger.debug(f"Request args: {dict(request.args)}")
|
|
||||||
|
|
||||||
|
|
||||||
class APIException(Exception):
|
|
||||||
"""Custom exception for API errors."""
|
|
||||||
|
|
||||||
def __init__(
|
|
||||||
self,
|
|
||||||
message: str,
|
|
||||||
status_code: int = 400,
|
|
||||||
error_code: Optional[str] = None,
|
|
||||||
errors: Optional[list] = None
|
|
||||||
):
|
|
||||||
self.message = message
|
|
||||||
self.status_code = status_code
|
|
||||||
self.error_code = error_code
|
|
||||||
self.errors = errors
|
|
||||||
super().__init__(message)
|
|
||||||
|
|
||||||
|
|
||||||
class ValidationError(APIException):
|
|
||||||
"""Exception for validation errors."""
|
|
||||||
|
|
||||||
def __init__(self, message: str, errors: Optional[list] = None):
|
|
||||||
super().__init__(
|
|
||||||
message=message,
|
|
||||||
status_code=400,
|
|
||||||
error_code='VALIDATION_ERROR',
|
|
||||||
errors=errors
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class NotFoundError(APIException):
|
|
||||||
"""Exception for not found errors."""
|
|
||||||
|
|
||||||
def __init__(self, message: str = "Resource not found"):
|
|
||||||
super().__init__(
|
|
||||||
message=message,
|
|
||||||
status_code=404,
|
|
||||||
error_code='NOT_FOUND'
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
class PermissionError(APIException):
|
|
||||||
"""Exception for permission errors."""
|
|
||||||
|
|
||||||
def __init__(self, message: str = "Access denied"):
|
|
||||||
super().__init__(
|
|
||||||
message=message,
|
|
||||||
status_code=403,
|
|
||||||
error_code='ACCESS_DENIED'
|
|
||||||
)
|
|
||||||
@ -1,406 +0,0 @@
|
|||||||
"""
|
|
||||||
Response formatting utilities for API endpoints.
|
|
||||||
|
|
||||||
This module provides utilities for creating consistent response formats
|
|
||||||
across all controller modules.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from typing import Any, Dict, List, Optional, Union, Tuple
|
|
||||||
from flask import jsonify, url_for, request
|
|
||||||
import math
|
|
||||||
|
|
||||||
|
|
||||||
def create_success_response(
|
|
||||||
data: Any = None,
|
|
||||||
message: str = "Operation successful",
|
|
||||||
status_code: int = 200,
|
|
||||||
pagination: Optional[Dict[str, Any]] = None,
|
|
||||||
meta: Optional[Dict[str, Any]] = None
|
|
||||||
) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Create a standardized success response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Data to include in response
|
|
||||||
message: Success message
|
|
||||||
status_code: HTTP status code
|
|
||||||
pagination: Pagination information
|
|
||||||
meta: Additional metadata
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
'status': 'success',
|
|
||||||
'message': message
|
|
||||||
}
|
|
||||||
|
|
||||||
if data is not None:
|
|
||||||
response['data'] = data
|
|
||||||
|
|
||||||
if pagination:
|
|
||||||
response['pagination'] = pagination
|
|
||||||
|
|
||||||
if meta:
|
|
||||||
response['meta'] = meta
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
|
|
||||||
def create_error_response(
|
|
||||||
message: str,
|
|
||||||
status_code: int = 400,
|
|
||||||
error_code: Optional[str] = None,
|
|
||||||
errors: Optional[List[str]] = None,
|
|
||||||
data: Optional[Dict[str, Any]] = None
|
|
||||||
) -> Tuple[Dict[str, Any], int]:
|
|
||||||
"""
|
|
||||||
Create a standardized error response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
message: Error message to display
|
|
||||||
status_code: HTTP status code
|
|
||||||
error_code: Optional error code for client handling
|
|
||||||
errors: Optional list of detailed errors
|
|
||||||
data: Optional additional data
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (response_dict, status_code)
|
|
||||||
"""
|
|
||||||
response = {
|
|
||||||
'status': 'error',
|
|
||||||
'message': message
|
|
||||||
}
|
|
||||||
|
|
||||||
if error_code:
|
|
||||||
response['error_code'] = error_code
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
response['errors'] = errors
|
|
||||||
|
|
||||||
if data:
|
|
||||||
response['data'] = data
|
|
||||||
|
|
||||||
return response, status_code
|
|
||||||
|
|
||||||
|
|
||||||
def create_paginated_response(
|
|
||||||
data: List[Any],
|
|
||||||
page: int,
|
|
||||||
per_page: int,
|
|
||||||
total: int,
|
|
||||||
endpoint: Optional[str] = None,
|
|
||||||
**kwargs
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Create a paginated response with navigation links.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: List of data items for current page
|
|
||||||
page: Current page number (1-based)
|
|
||||||
per_page: Items per page
|
|
||||||
total: Total number of items
|
|
||||||
endpoint: Flask endpoint name for pagination links
|
|
||||||
**kwargs: Additional parameters for pagination links
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Dictionary containing paginated response
|
|
||||||
"""
|
|
||||||
total_pages = math.ceil(total / per_page) if per_page > 0 else 1
|
|
||||||
|
|
||||||
pagination_info = {
|
|
||||||
'page': page,
|
|
||||||
'per_page': per_page,
|
|
||||||
'total': total,
|
|
||||||
'total_pages': total_pages,
|
|
||||||
'has_next': page < total_pages,
|
|
||||||
'has_prev': page > 1
|
|
||||||
}
|
|
||||||
|
|
||||||
# Add navigation links if endpoint is provided
|
|
||||||
if endpoint:
|
|
||||||
base_url = request.url_root.rstrip('/')
|
|
||||||
|
|
||||||
# Current page
|
|
||||||
pagination_info['current_url'] = url_for(endpoint, page=page, per_page=per_page, **kwargs)
|
|
||||||
|
|
||||||
# First page
|
|
||||||
pagination_info['first_url'] = url_for(endpoint, page=1, per_page=per_page, **kwargs)
|
|
||||||
|
|
||||||
# Last page
|
|
||||||
pagination_info['last_url'] = url_for(endpoint, page=total_pages, per_page=per_page, **kwargs)
|
|
||||||
|
|
||||||
# Previous page
|
|
||||||
if pagination_info['has_prev']:
|
|
||||||
pagination_info['prev_url'] = url_for(endpoint, page=page-1, per_page=per_page, **kwargs)
|
|
||||||
|
|
||||||
# Next page
|
|
||||||
if pagination_info['has_next']:
|
|
||||||
pagination_info['next_url'] = url_for(endpoint, page=page+1, per_page=per_page, **kwargs)
|
|
||||||
|
|
||||||
return {
|
|
||||||
'status': 'success',
|
|
||||||
'data': data,
|
|
||||||
'pagination': pagination_info
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
def paginate_query_results(
|
|
||||||
items: List[Any],
|
|
||||||
page: Optional[int] = None,
|
|
||||||
per_page: Optional[int] = None,
|
|
||||||
default_per_page: int = 50,
|
|
||||||
max_per_page: int = 1000
|
|
||||||
) -> Tuple[List[Any], int, int, int]:
|
|
||||||
"""
|
|
||||||
Paginate a list of items based on query parameters.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
items: List of items to paginate
|
|
||||||
page: Page number (from query params)
|
|
||||||
per_page: Items per page (from query params)
|
|
||||||
default_per_page: Default items per page
|
|
||||||
max_per_page: Maximum allowed items per page
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (paginated_items, page, per_page, total)
|
|
||||||
"""
|
|
||||||
total = len(items)
|
|
||||||
|
|
||||||
# Parse pagination parameters
|
|
||||||
if page is None:
|
|
||||||
page = int(request.args.get('page', 1))
|
|
||||||
if per_page is None:
|
|
||||||
per_page = int(request.args.get('per_page', default_per_page))
|
|
||||||
|
|
||||||
# Validate parameters
|
|
||||||
page = max(1, page)
|
|
||||||
per_page = min(max(1, per_page), max_per_page)
|
|
||||||
|
|
||||||
# Calculate offset
|
|
||||||
offset = (page - 1) * per_page
|
|
||||||
|
|
||||||
# Slice the items
|
|
||||||
paginated_items = items[offset:offset + per_page]
|
|
||||||
|
|
||||||
return paginated_items, page, per_page, total
|
|
||||||
|
|
||||||
|
|
||||||
def format_anime_response(anime_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Format anime data for API response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
anime_data: Raw anime data from database
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted anime data
|
|
||||||
"""
|
|
||||||
formatted = {
|
|
||||||
'id': anime_data.get('id'),
|
|
||||||
'name': anime_data.get('name'),
|
|
||||||
'url': anime_data.get('url'),
|
|
||||||
'description': anime_data.get('description'),
|
|
||||||
'episodes': anime_data.get('episodes'),
|
|
||||||
'status': anime_data.get('status', 'planned'),
|
|
||||||
'created_at': anime_data.get('created_at'),
|
|
||||||
'updated_at': anime_data.get('updated_at')
|
|
||||||
}
|
|
||||||
|
|
||||||
# Remove None values
|
|
||||||
return {k: v for k, v in formatted.items() if v is not None}
|
|
||||||
|
|
||||||
|
|
||||||
def format_episode_response(episode_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Format episode data for API response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
episode_data: Raw episode data from database
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted episode data
|
|
||||||
"""
|
|
||||||
formatted = {
|
|
||||||
'id': episode_data.get('id'),
|
|
||||||
'anime_id': episode_data.get('anime_id'),
|
|
||||||
'episode_number': episode_data.get('episode_number'),
|
|
||||||
'title': episode_data.get('title'),
|
|
||||||
'url': episode_data.get('url'),
|
|
||||||
'status': episode_data.get('status', 'available'),
|
|
||||||
'download_path': episode_data.get('download_path'),
|
|
||||||
'file_size': episode_data.get('file_size'),
|
|
||||||
'created_at': episode_data.get('created_at'),
|
|
||||||
'updated_at': episode_data.get('updated_at')
|
|
||||||
}
|
|
||||||
|
|
||||||
# Remove None values
|
|
||||||
return {k: v for k, v in formatted.items() if v is not None}
|
|
||||||
|
|
||||||
|
|
||||||
def format_download_response(download_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Format download data for API response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
download_data: Raw download data
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted download data
|
|
||||||
"""
|
|
||||||
formatted = {
|
|
||||||
'id': download_data.get('id'),
|
|
||||||
'anime_id': download_data.get('anime_id'),
|
|
||||||
'episode_id': download_data.get('episode_id'),
|
|
||||||
'status': download_data.get('status', 'pending'),
|
|
||||||
'progress': download_data.get('progress', 0),
|
|
||||||
'speed': download_data.get('speed'),
|
|
||||||
'eta': download_data.get('eta'),
|
|
||||||
'error_message': download_data.get('error_message'),
|
|
||||||
'started_at': download_data.get('started_at'),
|
|
||||||
'completed_at': download_data.get('completed_at')
|
|
||||||
}
|
|
||||||
|
|
||||||
# Remove None values
|
|
||||||
return {k: v for k, v in formatted.items() if v is not None}
|
|
||||||
|
|
||||||
|
|
||||||
def format_bulk_operation_response(operation_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Format bulk operation data for API response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
operation_data: Raw bulk operation data
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted bulk operation data
|
|
||||||
"""
|
|
||||||
formatted = {
|
|
||||||
'id': operation_data.get('id'),
|
|
||||||
'type': operation_data.get('type'),
|
|
||||||
'status': operation_data.get('status', 'pending'),
|
|
||||||
'total_items': operation_data.get('total_items', 0),
|
|
||||||
'completed_items': operation_data.get('completed_items', 0),
|
|
||||||
'failed_items': operation_data.get('failed_items', 0),
|
|
||||||
'progress_percentage': operation_data.get('progress_percentage', 0),
|
|
||||||
'started_at': operation_data.get('started_at'),
|
|
||||||
'completed_at': operation_data.get('completed_at'),
|
|
||||||
'error_message': operation_data.get('error_message')
|
|
||||||
}
|
|
||||||
|
|
||||||
# Remove None values
|
|
||||||
return {k: v for k, v in formatted.items() if v is not None}
|
|
||||||
|
|
||||||
|
|
||||||
def format_health_response(health_data: Dict[str, Any]) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Format health check data for API response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
health_data: Raw health check data
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Formatted health data
|
|
||||||
"""
|
|
||||||
formatted = {
|
|
||||||
'status': health_data.get('status', 'unknown'),
|
|
||||||
'uptime': health_data.get('uptime'),
|
|
||||||
'version': health_data.get('version'),
|
|
||||||
'components': health_data.get('components', {}),
|
|
||||||
'timestamp': health_data.get('timestamp')
|
|
||||||
}
|
|
||||||
|
|
||||||
# Remove None values
|
|
||||||
return {k: v for k, v in formatted.items() if v is not None}
|
|
||||||
|
|
||||||
|
|
||||||
def add_resource_links(data: Dict[str, Any], resource_type: str, resource_id: Any) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Add HATEOAS-style links to a resource response.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Resource data
|
|
||||||
resource_type: Type of resource (anime, episode, etc.)
|
|
||||||
resource_id: Resource identifier
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Data with added links
|
|
||||||
"""
|
|
||||||
if '_links' not in data:
|
|
||||||
data['_links'] = {}
|
|
||||||
|
|
||||||
# Self link
|
|
||||||
data['_links']['self'] = url_for(f'api.get_{resource_type}', id=resource_id)
|
|
||||||
|
|
||||||
# Collection link
|
|
||||||
data['_links']['collection'] = url_for(f'api.list_{resource_type}s')
|
|
||||||
|
|
||||||
return data
|
|
||||||
|
|
||||||
|
|
||||||
def create_batch_response(
|
|
||||||
successful_items: List[Dict[str, Any]],
|
|
||||||
failed_items: List[Dict[str, Any]],
|
|
||||||
message: Optional[str] = None
|
|
||||||
) -> Dict[str, Any]:
|
|
||||||
"""
|
|
||||||
Create response for batch operations.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
successful_items: List of successfully processed items
|
|
||||||
failed_items: List of failed items with errors
|
|
||||||
message: Optional message
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Batch operation response
|
|
||||||
"""
|
|
||||||
total_items = len(successful_items) + len(failed_items)
|
|
||||||
success_count = len(successful_items)
|
|
||||||
failure_count = len(failed_items)
|
|
||||||
|
|
||||||
response = {
|
|
||||||
'status': 'success' if failure_count == 0 else 'partial_success',
|
|
||||||
'message': message or f"Processed {success_count}/{total_items} items successfully",
|
|
||||||
'summary': {
|
|
||||||
'total': total_items,
|
|
||||||
'successful': success_count,
|
|
||||||
'failed': failure_count
|
|
||||||
},
|
|
||||||
'data': {
|
|
||||||
'successful': successful_items,
|
|
||||||
'failed': failed_items
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return response
|
|
||||||
|
|
||||||
|
|
||||||
def extract_pagination_params(
|
|
||||||
default_page: int = 1,
|
|
||||||
default_per_page: int = 50,
|
|
||||||
max_per_page: int = 1000
|
|
||||||
) -> Tuple[int, int]:
|
|
||||||
"""
|
|
||||||
Extract and validate pagination parameters from request.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
default_page: Default page number
|
|
||||||
default_per_page: Default items per page
|
|
||||||
max_per_page: Maximum allowed items per page
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Tuple of (page, per_page)
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
page = int(request.args.get('page', default_page))
|
|
||||||
page = max(1, page)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
page = default_page
|
|
||||||
|
|
||||||
try:
|
|
||||||
per_page = int(request.args.get('per_page', default_per_page))
|
|
||||||
per_page = min(max(1, per_page), max_per_page)
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
per_page = default_per_page
|
|
||||||
|
|
||||||
return page, per_page
|
|
||||||
@ -1,446 +0,0 @@
|
|||||||
"""
|
|
||||||
Input validation utilities for API endpoints.
|
|
||||||
|
|
||||||
This module provides validation functions and decorators for consistent
|
|
||||||
input validation across all controller modules.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import re
|
|
||||||
import os
|
|
||||||
from typing import Any, Dict, List, Optional, Union, Callable, Tuple
|
|
||||||
from functools import wraps
|
|
||||||
from flask import request, jsonify
|
|
||||||
from .error_handlers import ValidationError, create_error_response
|
|
||||||
|
|
||||||
|
|
||||||
def validate_json_input(required_fields: Optional[List[str]] = None,
|
|
||||||
optional_fields: Optional[List[str]] = None,
|
|
||||||
field_types: Optional[Dict[str, type]] = None) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to validate JSON input for API endpoints.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
required_fields: List of required field names
|
|
||||||
optional_fields: List of optional field names
|
|
||||||
field_types: Dictionary mapping field names to expected types
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorator function
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
@validate_json_input(
|
|
||||||
required_fields=['name', 'url'],
|
|
||||||
optional_fields=['description'],
|
|
||||||
field_types={'name': str, 'url': str, 'episodes': int}
|
|
||||||
)
|
|
||||||
def create_anime():
|
|
||||||
data = request.get_json()
|
|
||||||
# data is now validated
|
|
||||||
"""
|
|
||||||
def decorator(f: Callable) -> Callable:
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
if not request.is_json:
|
|
||||||
return create_error_response(
|
|
||||||
message="Request must be JSON",
|
|
||||||
status_code=400,
|
|
||||||
error_code='INVALID_CONTENT_TYPE'
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
data = request.get_json()
|
|
||||||
except Exception:
|
|
||||||
return create_error_response(
|
|
||||||
message="Invalid JSON format",
|
|
||||||
status_code=400,
|
|
||||||
error_code='INVALID_JSON'
|
|
||||||
)
|
|
||||||
|
|
||||||
if data is None:
|
|
||||||
return create_error_response(
|
|
||||||
message="Request body cannot be empty",
|
|
||||||
status_code=400,
|
|
||||||
error_code='EMPTY_BODY'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate required fields
|
|
||||||
if required_fields:
|
|
||||||
missing_fields = []
|
|
||||||
for field in required_fields:
|
|
||||||
if field not in data or data[field] is None:
|
|
||||||
missing_fields.append(field)
|
|
||||||
|
|
||||||
if missing_fields:
|
|
||||||
return create_error_response(
|
|
||||||
message=f"Missing required fields: {', '.join(missing_fields)}",
|
|
||||||
status_code=400,
|
|
||||||
error_code='MISSING_FIELDS',
|
|
||||||
errors=missing_fields
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate field types
|
|
||||||
if field_types:
|
|
||||||
type_errors = []
|
|
||||||
for field, expected_type in field_types.items():
|
|
||||||
if field in data and data[field] is not None:
|
|
||||||
if not isinstance(data[field], expected_type):
|
|
||||||
type_errors.append(f"{field} must be of type {expected_type.__name__}")
|
|
||||||
|
|
||||||
if type_errors:
|
|
||||||
return create_error_response(
|
|
||||||
message="Type validation failed",
|
|
||||||
status_code=400,
|
|
||||||
error_code='TYPE_ERROR',
|
|
||||||
errors=type_errors
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check for unexpected fields
|
|
||||||
all_allowed = (required_fields or []) + (optional_fields or [])
|
|
||||||
if all_allowed:
|
|
||||||
unexpected_fields = [field for field in data.keys() if field not in all_allowed]
|
|
||||||
if unexpected_fields:
|
|
||||||
return create_error_response(
|
|
||||||
message=f"Unexpected fields: {', '.join(unexpected_fields)}",
|
|
||||||
status_code=400,
|
|
||||||
error_code='UNEXPECTED_FIELDS',
|
|
||||||
errors=unexpected_fields
|
|
||||||
)
|
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
return decorated_function
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
def validate_query_params(allowed_params: Optional[List[str]] = None,
|
|
||||||
required_params: Optional[List[str]] = None,
|
|
||||||
param_types: Optional[Dict[str, type]] = None) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to validate query parameters.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
allowed_params: List of allowed parameter names
|
|
||||||
required_params: List of required parameter names
|
|
||||||
param_types: Dictionary mapping parameter names to expected types
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorator function
|
|
||||||
"""
|
|
||||||
def decorator(f: Callable) -> Callable:
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
# Check required parameters
|
|
||||||
if required_params:
|
|
||||||
missing_params = []
|
|
||||||
for param in required_params:
|
|
||||||
if param not in request.args:
|
|
||||||
missing_params.append(param)
|
|
||||||
|
|
||||||
if missing_params:
|
|
||||||
return create_error_response(
|
|
||||||
message=f"Missing required parameters: {', '.join(missing_params)}",
|
|
||||||
status_code=400,
|
|
||||||
error_code='MISSING_PARAMS'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check allowed parameters
|
|
||||||
if allowed_params:
|
|
||||||
unexpected_params = [param for param in request.args.keys() if param not in allowed_params]
|
|
||||||
if unexpected_params:
|
|
||||||
return create_error_response(
|
|
||||||
message=f"Unexpected parameters: {', '.join(unexpected_params)}",
|
|
||||||
status_code=400,
|
|
||||||
error_code='UNEXPECTED_PARAMS'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Validate parameter types
|
|
||||||
if param_types:
|
|
||||||
type_errors = []
|
|
||||||
for param, expected_type in param_types.items():
|
|
||||||
if param in request.args:
|
|
||||||
value = request.args.get(param)
|
|
||||||
try:
|
|
||||||
if expected_type == int:
|
|
||||||
int(value)
|
|
||||||
elif expected_type == float:
|
|
||||||
float(value)
|
|
||||||
elif expected_type == bool:
|
|
||||||
if value.lower() not in ['true', 'false', '1', '0']:
|
|
||||||
raise ValueError()
|
|
||||||
except ValueError:
|
|
||||||
type_errors.append(f"{param} must be of type {expected_type.__name__}")
|
|
||||||
|
|
||||||
if type_errors:
|
|
||||||
return create_error_response(
|
|
||||||
message="Parameter type validation failed",
|
|
||||||
status_code=400,
|
|
||||||
error_code='PARAM_TYPE_ERROR',
|
|
||||||
errors=type_errors
|
|
||||||
)
|
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
return decorated_function
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
def validate_pagination_params(f: Callable) -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to validate pagination parameters (page, per_page, limit, offset).
|
|
||||||
|
|
||||||
Args:
|
|
||||||
f: The function to decorate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorated function with pagination validation
|
|
||||||
"""
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
# Validate page parameter
|
|
||||||
page = request.args.get('page')
|
|
||||||
if page is not None:
|
|
||||||
try:
|
|
||||||
page_int = int(page)
|
|
||||||
if page_int < 1:
|
|
||||||
errors.append("page must be greater than 0")
|
|
||||||
except ValueError:
|
|
||||||
errors.append("page must be an integer")
|
|
||||||
|
|
||||||
# Validate per_page parameter
|
|
||||||
per_page = request.args.get('per_page')
|
|
||||||
if per_page is not None:
|
|
||||||
try:
|
|
||||||
per_page_int = int(per_page)
|
|
||||||
if per_page_int < 1:
|
|
||||||
errors.append("per_page must be greater than 0")
|
|
||||||
elif per_page_int > 1000:
|
|
||||||
errors.append("per_page cannot exceed 1000")
|
|
||||||
except ValueError:
|
|
||||||
errors.append("per_page must be an integer")
|
|
||||||
|
|
||||||
# Validate limit parameter
|
|
||||||
limit = request.args.get('limit')
|
|
||||||
if limit is not None:
|
|
||||||
try:
|
|
||||||
limit_int = int(limit)
|
|
||||||
if limit_int < 1:
|
|
||||||
errors.append("limit must be greater than 0")
|
|
||||||
elif limit_int > 1000:
|
|
||||||
errors.append("limit cannot exceed 1000")
|
|
||||||
except ValueError:
|
|
||||||
errors.append("limit must be an integer")
|
|
||||||
|
|
||||||
# Validate offset parameter
|
|
||||||
offset = request.args.get('offset')
|
|
||||||
if offset is not None:
|
|
||||||
try:
|
|
||||||
offset_int = int(offset)
|
|
||||||
if offset_int < 0:
|
|
||||||
errors.append("offset must be greater than or equal to 0")
|
|
||||||
except ValueError:
|
|
||||||
errors.append("offset must be an integer")
|
|
||||||
|
|
||||||
if errors:
|
|
||||||
return create_error_response(
|
|
||||||
message="Pagination parameter validation failed",
|
|
||||||
status_code=400,
|
|
||||||
error_code='PAGINATION_ERROR',
|
|
||||||
errors=errors
|
|
||||||
)
|
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
return decorated_function
|
|
||||||
|
|
||||||
|
|
||||||
def validate_anime_data(data: Dict[str, Any]) -> List[str]:
|
|
||||||
"""
|
|
||||||
Validate anime data structure.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
data: Dictionary containing anime data
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of validation errors (empty if valid)
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
# Required fields
|
|
||||||
required_fields = ['name', 'url']
|
|
||||||
for field in required_fields:
|
|
||||||
if field not in data or not data[field]:
|
|
||||||
errors.append(f"Missing required field: {field}")
|
|
||||||
|
|
||||||
# Validate name
|
|
||||||
if 'name' in data:
|
|
||||||
name = data['name']
|
|
||||||
if not isinstance(name, str):
|
|
||||||
errors.append("name must be a string")
|
|
||||||
elif len(name.strip()) == 0:
|
|
||||||
errors.append("name cannot be empty")
|
|
||||||
elif len(name) > 500:
|
|
||||||
errors.append("name cannot exceed 500 characters")
|
|
||||||
|
|
||||||
# Validate URL
|
|
||||||
if 'url' in data:
|
|
||||||
url = data['url']
|
|
||||||
if not isinstance(url, str):
|
|
||||||
errors.append("url must be a string")
|
|
||||||
elif not is_valid_url(url):
|
|
||||||
errors.append("url must be a valid URL")
|
|
||||||
|
|
||||||
# Validate optional fields
|
|
||||||
if 'description' in data and data['description'] is not None:
|
|
||||||
if not isinstance(data['description'], str):
|
|
||||||
errors.append("description must be a string")
|
|
||||||
elif len(data['description']) > 2000:
|
|
||||||
errors.append("description cannot exceed 2000 characters")
|
|
||||||
|
|
||||||
if 'episodes' in data and data['episodes'] is not None:
|
|
||||||
if not isinstance(data['episodes'], int):
|
|
||||||
errors.append("episodes must be an integer")
|
|
||||||
elif data['episodes'] < 0:
|
|
||||||
errors.append("episodes must be non-negative")
|
|
||||||
|
|
||||||
if 'status' in data and data['status'] is not None:
|
|
||||||
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
|
|
||||||
if data['status'] not in valid_statuses:
|
|
||||||
errors.append(f"status must be one of: {', '.join(valid_statuses)}")
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
def validate_file_upload(file, allowed_extensions: Optional[List[str]] = None,
|
|
||||||
max_size_mb: Optional[int] = None) -> List[str]:
|
|
||||||
"""
|
|
||||||
Validate file upload.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
file: Uploaded file object
|
|
||||||
allowed_extensions: List of allowed file extensions
|
|
||||||
max_size_mb: Maximum file size in MB
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
List of validation errors (empty if valid)
|
|
||||||
"""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
if not file:
|
|
||||||
errors.append("No file provided")
|
|
||||||
return errors
|
|
||||||
|
|
||||||
if file.filename == '':
|
|
||||||
errors.append("No file selected")
|
|
||||||
return errors
|
|
||||||
|
|
||||||
# Check file extension
|
|
||||||
if allowed_extensions:
|
|
||||||
file_ext = os.path.splitext(file.filename)[1].lower()
|
|
||||||
if file_ext not in [f".{ext.lower()}" for ext in allowed_extensions]:
|
|
||||||
errors.append(f"File type not allowed. Allowed: {', '.join(allowed_extensions)}")
|
|
||||||
|
|
||||||
# Check file size (if we can determine it)
|
|
||||||
if max_size_mb and hasattr(file, 'content_length') and file.content_length:
|
|
||||||
max_size_bytes = max_size_mb * 1024 * 1024
|
|
||||||
if file.content_length > max_size_bytes:
|
|
||||||
errors.append(f"File size exceeds maximum of {max_size_mb}MB")
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_url(url: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a string is a valid URL.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
url: URL string to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if valid URL, False otherwise
|
|
||||||
"""
|
|
||||||
url_pattern = re.compile(
|
|
||||||
r'^https?://' # http:// or https://
|
|
||||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' # domain...
|
|
||||||
r'localhost|' # localhost...
|
|
||||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
|
||||||
r'(?::\d+)?' # optional port
|
|
||||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
|
||||||
|
|
||||||
return url_pattern.match(url) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def is_valid_email(email: str) -> bool:
|
|
||||||
"""
|
|
||||||
Check if a string is a valid email address.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
email: Email string to validate
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
True if valid email, False otherwise
|
|
||||||
"""
|
|
||||||
email_pattern = re.compile(
|
|
||||||
r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
|
||||||
)
|
|
||||||
return email_pattern.match(email) is not None
|
|
||||||
|
|
||||||
|
|
||||||
def sanitize_string(value: str, max_length: Optional[int] = None) -> str:
|
|
||||||
"""
|
|
||||||
Sanitize string input by removing dangerous characters.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
value: String to sanitize
|
|
||||||
max_length: Maximum allowed length
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Sanitized string
|
|
||||||
"""
|
|
||||||
if not isinstance(value, str):
|
|
||||||
return str(value)
|
|
||||||
|
|
||||||
# Remove null bytes and control characters
|
|
||||||
sanitized = ''.join(char for char in value if ord(char) >= 32 or char in '\t\n\r')
|
|
||||||
|
|
||||||
# Trim whitespace
|
|
||||||
sanitized = sanitized.strip()
|
|
||||||
|
|
||||||
# Truncate if necessary
|
|
||||||
if max_length and len(sanitized) > max_length:
|
|
||||||
sanitized = sanitized[:max_length]
|
|
||||||
|
|
||||||
return sanitized
|
|
||||||
|
|
||||||
|
|
||||||
def validate_id_parameter(param_name: str = 'id') -> Callable:
|
|
||||||
"""
|
|
||||||
Decorator to validate ID parameters in URLs.
|
|
||||||
|
|
||||||
Args:
|
|
||||||
param_name: Name of the ID parameter
|
|
||||||
|
|
||||||
Returns:
|
|
||||||
Decorator function
|
|
||||||
"""
|
|
||||||
def decorator(f: Callable) -> Callable:
|
|
||||||
@wraps(f)
|
|
||||||
def decorated_function(*args, **kwargs):
|
|
||||||
if param_name in kwargs:
|
|
||||||
try:
|
|
||||||
id_value = int(kwargs[param_name])
|
|
||||||
if id_value <= 0:
|
|
||||||
return create_error_response(
|
|
||||||
message=f"{param_name} must be a positive integer",
|
|
||||||
status_code=400,
|
|
||||||
error_code='INVALID_ID'
|
|
||||||
)
|
|
||||||
kwargs[param_name] = id_value
|
|
||||||
except ValueError:
|
|
||||||
return create_error_response(
|
|
||||||
message=f"{param_name} must be an integer",
|
|
||||||
status_code=400,
|
|
||||||
error_code='INVALID_ID'
|
|
||||||
)
|
|
||||||
|
|
||||||
return f(*args, **kwargs)
|
|
||||||
return decorated_function
|
|
||||||
return decorator
|
|
||||||
@ -513,7 +513,9 @@
|
|||||||
if (data.status === 'success') {
|
if (data.status === 'success') {
|
||||||
showMessage('Setup completed successfully! Redirecting...', 'success');
|
showMessage('Setup completed successfully! Redirecting...', 'success');
|
||||||
setTimeout(() => {
|
setTimeout(() => {
|
||||||
window.location.href = '/';
|
// Use redirect_url from API response, fallback to /login
|
||||||
|
const redirectUrl = data.redirect_url || '/login';
|
||||||
|
window.location.href = redirectUrl;
|
||||||
}, 2000);
|
}, 2000);
|
||||||
} else {
|
} else {
|
||||||
showMessage(data.message, 'error');
|
showMessage(data.message, 'error');
|
||||||
@ -1,549 +0,0 @@
|
|||||||
"""
|
|
||||||
Performance & Optimization Module for AniWorld App
|
|
||||||
|
|
||||||
This module provides download speed limiting, parallel download support,
|
|
||||||
caching mechanisms, memory usage monitoring, and download resumption.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import threading
|
|
||||||
import time
|
|
||||||
import logging
|
|
||||||
import queue
|
|
||||||
import hashlib
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, List, Optional, Any, Callable
|
|
||||||
from dataclasses import dataclass, field
|
|
||||||
from concurrent.futures import ThreadPoolExecutor, as_completed
|
|
||||||
import json
|
|
||||||
import sqlite3
|
|
||||||
from contextlib import contextmanager
|
|
||||||
import gc
|
|
||||||
import psutil
|
|
||||||
import requests
|
|
||||||
|
|
||||||
|
|
||||||
@dataclass
|
|
||||||
class DownloadTask:
|
|
||||||
"""Represents a download task with all necessary information."""
|
|
||||||
task_id: str
|
|
||||||
serie_name: str
|
|
||||||
season: int
|
|
||||||
episode: int
|
|
||||||
key: str
|
|
||||||
language: str
|
|
||||||
output_path: str
|
|
||||||
temp_path: str
|
|
||||||
priority: int = 0 # Higher number = higher priority
|
|
||||||
retry_count: int = 0
|
|
||||||
max_retries: int = 3
|
|
||||||
created_at: datetime = field(default_factory=datetime.now)
|
|
||||||
started_at: Optional[datetime] = None
|
|
||||||
completed_at: Optional[datetime] = None
|
|
||||||
status: str = 'pending' # pending, downloading, completed, failed, paused
|
|
||||||
progress: Dict[str, Any] = field(default_factory=dict)
|
|
||||||
error_message: Optional[str] = None
|
|
||||||
|
|
||||||
|
|
||||||
class SpeedLimiter:
|
|
||||||
"""Control download speeds to prevent bandwidth saturation."""
|
|
||||||
|
|
||||||
def __init__(self, max_speed_mbps: float = 0): # 0 = unlimited
|
|
||||||
self.max_speed_mbps = max_speed_mbps
|
|
||||||
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
|
|
||||||
self.download_start_time = None
|
|
||||||
self.bytes_downloaded = 0
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def set_speed_limit(self, max_speed_mbps: float):
|
|
||||||
"""Set maximum download speed in MB/s."""
|
|
||||||
with self.lock:
|
|
||||||
self.max_speed_mbps = max_speed_mbps
|
|
||||||
self.max_bytes_per_second = max_speed_mbps * 1024 * 1024 if max_speed_mbps > 0 else 0
|
|
||||||
self.logger.info(f"Speed limit set to {max_speed_mbps} MB/s")
|
|
||||||
|
|
||||||
def start_download(self):
|
|
||||||
"""Mark the start of a new download session."""
|
|
||||||
with self.lock:
|
|
||||||
self.download_start_time = time.time()
|
|
||||||
self.bytes_downloaded = 0
|
|
||||||
|
|
||||||
def update_progress(self, bytes_downloaded: int):
|
|
||||||
"""Update download progress and apply speed limiting if needed."""
|
|
||||||
if self.max_bytes_per_second <= 0: # No limit
|
|
||||||
return
|
|
||||||
|
|
||||||
with self.lock:
|
|
||||||
self.bytes_downloaded += bytes_downloaded
|
|
||||||
|
|
||||||
if self.download_start_time:
|
|
||||||
elapsed_time = time.time() - self.download_start_time
|
|
||||||
if elapsed_time > 0:
|
|
||||||
current_speed = self.bytes_downloaded / elapsed_time
|
|
||||||
|
|
||||||
if current_speed > self.max_bytes_per_second:
|
|
||||||
# Calculate required delay
|
|
||||||
target_time = self.bytes_downloaded / self.max_bytes_per_second
|
|
||||||
delay = target_time - elapsed_time
|
|
||||||
|
|
||||||
if delay > 0:
|
|
||||||
self.logger.debug(f"Speed limiting: sleeping for {delay:.2f}s")
|
|
||||||
time.sleep(delay)
|
|
||||||
|
|
||||||
def get_current_speed(self) -> float:
|
|
||||||
"""Get current download speed in MB/s."""
|
|
||||||
with self.lock:
|
|
||||||
if self.download_start_time:
|
|
||||||
elapsed_time = time.time() - self.download_start_time
|
|
||||||
if elapsed_time > 0:
|
|
||||||
speed_bps = self.bytes_downloaded / elapsed_time
|
|
||||||
return speed_bps / (1024 * 1024) # Convert to MB/s
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
class MemoryMonitor:
|
|
||||||
"""Monitor and optimize memory usage."""
|
|
||||||
|
|
||||||
def __init__(self, warning_threshold_mb: int = 1024, critical_threshold_mb: int = 2048):
|
|
||||||
self.warning_threshold = warning_threshold_mb * 1024 * 1024
|
|
||||||
self.critical_threshold = critical_threshold_mb * 1024 * 1024
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
self.monitoring = False
|
|
||||||
self.monitor_thread = None
|
|
||||||
|
|
||||||
def start_monitoring(self, check_interval: int = 30):
|
|
||||||
"""Start continuous memory monitoring."""
|
|
||||||
if self.monitoring:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.monitoring = True
|
|
||||||
self.monitor_thread = threading.Thread(
|
|
||||||
target=self._monitoring_loop,
|
|
||||||
args=(check_interval,),
|
|
||||||
daemon=True
|
|
||||||
)
|
|
||||||
self.monitor_thread.start()
|
|
||||||
self.logger.info("Memory monitoring started")
|
|
||||||
|
|
||||||
def stop_monitoring(self):
|
|
||||||
"""Stop memory monitoring."""
|
|
||||||
self.monitoring = False
|
|
||||||
if self.monitor_thread:
|
|
||||||
self.monitor_thread.join(timeout=5)
|
|
||||||
self.logger.info("Memory monitoring stopped")
|
|
||||||
|
|
||||||
def _monitoring_loop(self, check_interval: int):
|
|
||||||
"""Main monitoring loop."""
|
|
||||||
while self.monitoring:
|
|
||||||
try:
|
|
||||||
self.check_memory_usage()
|
|
||||||
time.sleep(check_interval)
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error in memory monitoring: {e}")
|
|
||||||
time.sleep(check_interval)
|
|
||||||
|
|
||||||
def check_memory_usage(self):
|
|
||||||
"""Check current memory usage and take action if needed."""
|
|
||||||
try:
|
|
||||||
process = psutil.Process()
|
|
||||||
memory_info = process.memory_info()
|
|
||||||
memory_usage = memory_info.rss
|
|
||||||
|
|
||||||
if memory_usage > self.critical_threshold:
|
|
||||||
self.logger.warning(f"Critical memory usage: {memory_usage / (1024*1024):.1f} MB")
|
|
||||||
self.force_garbage_collection()
|
|
||||||
|
|
||||||
# Check again after GC
|
|
||||||
memory_info = process.memory_info()
|
|
||||||
memory_usage = memory_info.rss
|
|
||||||
|
|
||||||
if memory_usage > self.critical_threshold:
|
|
||||||
self.logger.error("Memory usage still critical after garbage collection")
|
|
||||||
|
|
||||||
elif memory_usage > self.warning_threshold:
|
|
||||||
self.logger.info(f"Memory usage warning: {memory_usage / (1024*1024):.1f} MB")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to check memory usage: {e}")
|
|
||||||
|
|
||||||
def force_garbage_collection(self):
|
|
||||||
"""Force garbage collection to free memory."""
|
|
||||||
self.logger.debug("Forcing garbage collection")
|
|
||||||
collected = gc.collect()
|
|
||||||
self.logger.debug(f"Garbage collection freed {collected} objects")
|
|
||||||
|
|
||||||
def get_memory_stats(self) -> Dict[str, Any]:
|
|
||||||
"""Get current memory statistics."""
|
|
||||||
try:
|
|
||||||
process = psutil.Process()
|
|
||||||
memory_info = process.memory_info()
|
|
||||||
|
|
||||||
return {
|
|
||||||
'rss_mb': memory_info.rss / (1024 * 1024),
|
|
||||||
'vms_mb': memory_info.vms / (1024 * 1024),
|
|
||||||
'percent': process.memory_percent(),
|
|
||||||
'warning_threshold_mb': self.warning_threshold / (1024 * 1024),
|
|
||||||
'critical_threshold_mb': self.critical_threshold / (1024 * 1024)
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get memory stats: {e}")
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class ParallelDownloadManager:
|
|
||||||
"""Manage parallel downloads with configurable thread count."""
|
|
||||||
|
|
||||||
def __init__(self, max_workers: int = 3, speed_limiter: Optional[SpeedLimiter] = None):
|
|
||||||
self.max_workers = max_workers
|
|
||||||
self.speed_limiter = speed_limiter or SpeedLimiter()
|
|
||||||
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
|
||||||
self.active_tasks: Dict[str, DownloadTask] = {}
|
|
||||||
self.pending_queue = queue.PriorityQueue()
|
|
||||||
self.completed_tasks: List[DownloadTask] = []
|
|
||||||
self.failed_tasks: List[DownloadTask] = []
|
|
||||||
self.lock = threading.Lock()
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
self.running = False
|
|
||||||
self.worker_thread = None
|
|
||||||
|
|
||||||
# Statistics
|
|
||||||
self.stats = {
|
|
||||||
'total_tasks': 0,
|
|
||||||
'completed_tasks': 0,
|
|
||||||
'failed_tasks': 0,
|
|
||||||
'active_tasks': 0,
|
|
||||||
'average_speed_mbps': 0.0
|
|
||||||
}
|
|
||||||
|
|
||||||
def start(self):
|
|
||||||
"""Start the download manager."""
|
|
||||||
if self.running:
|
|
||||||
return
|
|
||||||
|
|
||||||
self.running = True
|
|
||||||
self.worker_thread = threading.Thread(target=self._worker_loop, daemon=True)
|
|
||||||
self.worker_thread.start()
|
|
||||||
self.logger.info(f"Download manager started with {self.max_workers} workers")
|
|
||||||
|
|
||||||
def stop(self):
|
|
||||||
"""Stop the download manager."""
|
|
||||||
self.running = False
|
|
||||||
|
|
||||||
# Cancel all pending tasks
|
|
||||||
with self.lock:
|
|
||||||
while not self.pending_queue.empty():
|
|
||||||
try:
|
|
||||||
_, task = self.pending_queue.get_nowait()
|
|
||||||
task.status = 'cancelled'
|
|
||||||
except queue.Empty:
|
|
||||||
break
|
|
||||||
|
|
||||||
# Shutdown executor
|
|
||||||
self.executor.shutdown(wait=True)
|
|
||||||
|
|
||||||
if self.worker_thread:
|
|
||||||
self.worker_thread.join(timeout=5)
|
|
||||||
|
|
||||||
self.logger.info("Download manager stopped")
|
|
||||||
|
|
||||||
def add_task(self, task: DownloadTask) -> str:
|
|
||||||
"""Add a download task to the queue."""
|
|
||||||
with self.lock:
|
|
||||||
self.stats['total_tasks'] += 1
|
|
||||||
# Priority queue uses negative priority for max-heap behavior
|
|
||||||
self.pending_queue.put((-task.priority, task))
|
|
||||||
self.logger.info(f"Added download task: {task.task_id}")
|
|
||||||
return task.task_id
|
|
||||||
|
|
||||||
def _worker_loop(self):
|
|
||||||
"""Main worker loop that processes download tasks."""
|
|
||||||
while self.running:
|
|
||||||
try:
|
|
||||||
# Check for pending tasks
|
|
||||||
if not self.pending_queue.empty() and len(self.active_tasks) < self.max_workers:
|
|
||||||
_, task = self.pending_queue.get_nowait()
|
|
||||||
|
|
||||||
if task.status == 'pending':
|
|
||||||
self._start_task(task)
|
|
||||||
|
|
||||||
# Check completed tasks
|
|
||||||
self._check_completed_tasks()
|
|
||||||
|
|
||||||
time.sleep(0.1) # Small delay to prevent busy waiting
|
|
||||||
|
|
||||||
except queue.Empty:
|
|
||||||
time.sleep(1)
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Error in worker loop: {e}")
|
|
||||||
time.sleep(1)
|
|
||||||
|
|
||||||
def _start_task(self, task: DownloadTask):
|
|
||||||
"""Start a download task."""
|
|
||||||
with self.lock:
|
|
||||||
task.status = 'downloading'
|
|
||||||
task.started_at = datetime.now()
|
|
||||||
self.active_tasks[task.task_id] = task
|
|
||||||
self.stats['active_tasks'] = len(self.active_tasks)
|
|
||||||
|
|
||||||
# Submit to thread pool
|
|
||||||
future = self.executor.submit(self._execute_download, task)
|
|
||||||
task.future = future
|
|
||||||
|
|
||||||
self.logger.info(f"Started download task: {task.task_id}")
|
|
||||||
|
|
||||||
def _execute_download(self, task: DownloadTask) -> bool:
|
|
||||||
"""Execute the actual download."""
|
|
||||||
try:
|
|
||||||
self.logger.info(f"Executing download: {task.serie_name} S{task.season}E{task.episode}")
|
|
||||||
|
|
||||||
# Create progress callback that respects speed limiting
|
|
||||||
def progress_callback(info):
|
|
||||||
if 'downloaded_bytes' in info:
|
|
||||||
self.speed_limiter.update_progress(info.get('downloaded_bytes', 0))
|
|
||||||
|
|
||||||
# Update task progress
|
|
||||||
task.progress.update(info)
|
|
||||||
|
|
||||||
self.speed_limiter.start_download()
|
|
||||||
|
|
||||||
# Here you would call the actual download function
|
|
||||||
# For now, simulate download
|
|
||||||
success = self._simulate_download(task, progress_callback)
|
|
||||||
|
|
||||||
return success
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Download failed for task {task.task_id}: {e}")
|
|
||||||
task.error_message = str(e)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _simulate_download(self, task: DownloadTask, progress_callback: Callable) -> bool:
|
|
||||||
"""Simulate download for testing purposes."""
|
|
||||||
# This is a placeholder - replace with actual download logic
|
|
||||||
total_size = 100 * 1024 * 1024 # 100MB simulation
|
|
||||||
downloaded = 0
|
|
||||||
chunk_size = 1024 * 1024 # 1MB chunks
|
|
||||||
|
|
||||||
while downloaded < total_size and task.status == 'downloading':
|
|
||||||
# Simulate download chunk
|
|
||||||
time.sleep(0.1)
|
|
||||||
downloaded += chunk_size
|
|
||||||
|
|
||||||
progress_info = {
|
|
||||||
'status': 'downloading',
|
|
||||||
'downloaded_bytes': downloaded,
|
|
||||||
'total_bytes': total_size,
|
|
||||||
'percent': (downloaded / total_size) * 100
|
|
||||||
}
|
|
||||||
|
|
||||||
progress_callback(progress_info)
|
|
||||||
|
|
||||||
if downloaded >= total_size:
|
|
||||||
progress_callback({'status': 'finished'})
|
|
||||||
return True
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
def _check_completed_tasks(self):
|
|
||||||
"""Check for completed download tasks."""
|
|
||||||
completed_task_ids = []
|
|
||||||
|
|
||||||
with self.lock:
|
|
||||||
for task_id, task in self.active_tasks.items():
|
|
||||||
if hasattr(task, 'future') and task.future.done():
|
|
||||||
completed_task_ids.append(task_id)
|
|
||||||
|
|
||||||
# Process completed tasks
|
|
||||||
for task_id in completed_task_ids:
|
|
||||||
self._handle_completed_task(task_id)
|
|
||||||
|
|
||||||
def _handle_completed_task(self, task_id: str):
|
|
||||||
"""Handle a completed download task."""
|
|
||||||
with self.lock:
|
|
||||||
task = self.active_tasks.pop(task_id, None)
|
|
||||||
if not task:
|
|
||||||
return
|
|
||||||
|
|
||||||
task.completed_at = datetime.now()
|
|
||||||
self.stats['active_tasks'] = len(self.active_tasks)
|
|
||||||
|
|
||||||
try:
|
|
||||||
success = task.future.result()
|
|
||||||
|
|
||||||
if success:
|
|
||||||
task.status = 'completed'
|
|
||||||
self.completed_tasks.append(task)
|
|
||||||
self.stats['completed_tasks'] += 1
|
|
||||||
self.logger.info(f"Task completed successfully: {task_id}")
|
|
||||||
else:
|
|
||||||
task.status = 'failed'
|
|
||||||
self.failed_tasks.append(task)
|
|
||||||
self.stats['failed_tasks'] += 1
|
|
||||||
self.logger.warning(f"Task failed: {task_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
task.status = 'failed'
|
|
||||||
task.error_message = str(e)
|
|
||||||
self.failed_tasks.append(task)
|
|
||||||
self.stats['failed_tasks'] += 1
|
|
||||||
self.logger.error(f"Task failed with exception: {task_id} - {e}")
|
|
||||||
|
|
||||||
def get_task_status(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Get status of a specific task."""
|
|
||||||
with self.lock:
|
|
||||||
# Check active tasks
|
|
||||||
if task_id in self.active_tasks:
|
|
||||||
task = self.active_tasks[task_id]
|
|
||||||
return self._task_to_dict(task)
|
|
||||||
|
|
||||||
# Check completed tasks
|
|
||||||
for task in self.completed_tasks:
|
|
||||||
if task.task_id == task_id:
|
|
||||||
return self._task_to_dict(task)
|
|
||||||
|
|
||||||
# Check failed tasks
|
|
||||||
for task in self.failed_tasks:
|
|
||||||
if task.task_id == task_id:
|
|
||||||
return self._task_to_dict(task)
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def _task_to_dict(self, task: DownloadTask) -> Dict[str, Any]:
|
|
||||||
"""Convert task to dictionary representation."""
|
|
||||||
return {
|
|
||||||
'task_id': task.task_id,
|
|
||||||
'serie_name': task.serie_name,
|
|
||||||
'season': task.season,
|
|
||||||
'episode': task.episode,
|
|
||||||
'status': task.status,
|
|
||||||
'progress': task.progress,
|
|
||||||
'created_at': task.created_at.isoformat(),
|
|
||||||
'started_at': task.started_at.isoformat() if task.started_at else None,
|
|
||||||
'completed_at': task.completed_at.isoformat() if task.completed_at else None,
|
|
||||||
'error_message': task.error_message,
|
|
||||||
'retry_count': task.retry_count
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_all_tasks(self) -> Dict[str, List[Dict[str, Any]]]:
|
|
||||||
"""Get all tasks grouped by status."""
|
|
||||||
with self.lock:
|
|
||||||
return {
|
|
||||||
'active': [self._task_to_dict(task) for task in self.active_tasks.values()],
|
|
||||||
'completed': [self._task_to_dict(task) for task in self.completed_tasks[-50:]], # Last 50
|
|
||||||
'failed': [self._task_to_dict(task) for task in self.failed_tasks[-50:]] # Last 50
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_statistics(self) -> Dict[str, Any]:
|
|
||||||
"""Get download manager statistics."""
|
|
||||||
return self.stats.copy()
|
|
||||||
|
|
||||||
def set_max_workers(self, max_workers: int):
|
|
||||||
"""Change the number of worker threads."""
|
|
||||||
if max_workers <= 0:
|
|
||||||
raise ValueError("max_workers must be positive")
|
|
||||||
|
|
||||||
self.max_workers = max_workers
|
|
||||||
|
|
||||||
# Recreate executor with new worker count
|
|
||||||
old_executor = self.executor
|
|
||||||
self.executor = ThreadPoolExecutor(max_workers=max_workers)
|
|
||||||
old_executor.shutdown(wait=False)
|
|
||||||
|
|
||||||
self.logger.info(f"Updated worker count to {max_workers}")
|
|
||||||
|
|
||||||
|
|
||||||
class ResumeManager:
|
|
||||||
"""Manage download resumption for interrupted downloads."""
|
|
||||||
|
|
||||||
def __init__(self, resume_dir: str = "./resume"):
|
|
||||||
self.resume_dir = resume_dir
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
os.makedirs(resume_dir, exist_ok=True)
|
|
||||||
|
|
||||||
def save_resume_info(self, task_id: str, resume_data: Dict[str, Any]):
|
|
||||||
"""Save resume information for a download."""
|
|
||||||
try:
|
|
||||||
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
|
|
||||||
with open(resume_file, 'w') as f:
|
|
||||||
json.dump(resume_data, f, indent=2, default=str)
|
|
||||||
|
|
||||||
self.logger.debug(f"Saved resume info for task: {task_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to save resume info for {task_id}: {e}")
|
|
||||||
|
|
||||||
def load_resume_info(self, task_id: str) -> Optional[Dict[str, Any]]:
|
|
||||||
"""Load resume information for a download."""
|
|
||||||
try:
|
|
||||||
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
|
|
||||||
|
|
||||||
if os.path.exists(resume_file):
|
|
||||||
with open(resume_file, 'r') as f:
|
|
||||||
resume_data = json.load(f)
|
|
||||||
|
|
||||||
self.logger.debug(f"Loaded resume info for task: {task_id}")
|
|
||||||
return resume_data
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to load resume info for {task_id}: {e}")
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
def clear_resume_info(self, task_id: str):
|
|
||||||
"""Clear resume information after successful completion."""
|
|
||||||
try:
|
|
||||||
resume_file = os.path.join(self.resume_dir, f"{task_id}.json")
|
|
||||||
|
|
||||||
if os.path.exists(resume_file):
|
|
||||||
os.remove(resume_file)
|
|
||||||
self.logger.debug(f"Cleared resume info for task: {task_id}")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to clear resume info for {task_id}: {e}")
|
|
||||||
|
|
||||||
def get_resumable_tasks(self) -> List[str]:
|
|
||||||
"""Get list of tasks that can be resumed."""
|
|
||||||
try:
|
|
||||||
resume_files = [f for f in os.listdir(self.resume_dir) if f.endswith('.json')]
|
|
||||||
task_ids = [os.path.splitext(f)[0] for f in resume_files]
|
|
||||||
return task_ids
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
self.logger.error(f"Failed to get resumable tasks: {e}")
|
|
||||||
return []
|
|
||||||
|
|
||||||
|
|
||||||
# Global instances
|
|
||||||
speed_limiter = SpeedLimiter()
|
|
||||||
memory_monitor = MemoryMonitor()
|
|
||||||
download_manager = ParallelDownloadManager(max_workers=3, speed_limiter=speed_limiter)
|
|
||||||
resume_manager = ResumeManager()
|
|
||||||
|
|
||||||
|
|
||||||
def init_performance_monitoring():
|
|
||||||
"""Initialize performance monitoring components."""
|
|
||||||
memory_monitor.start_monitoring()
|
|
||||||
download_manager.start()
|
|
||||||
|
|
||||||
|
|
||||||
def cleanup_performance_monitoring():
|
|
||||||
"""Clean up performance monitoring components."""
|
|
||||||
memory_monitor.stop_monitoring()
|
|
||||||
download_manager.stop()
|
|
||||||
|
|
||||||
|
|
||||||
# Export main components
|
|
||||||
__all__ = [
|
|
||||||
'SpeedLimiter',
|
|
||||||
'MemoryMonitor',
|
|
||||||
'ParallelDownloadManager',
|
|
||||||
'ResumeManager',
|
|
||||||
'DownloadTask',
|
|
||||||
'speed_limiter',
|
|
||||||
'download_cache',
|
|
||||||
'memory_monitor',
|
|
||||||
'download_manager',
|
|
||||||
'resume_manager',
|
|
||||||
'init_performance_monitoring',
|
|
||||||
'cleanup_performance_monitoring'
|
|
||||||
]
|
|
||||||
@ -1,293 +0,0 @@
|
|||||||
import threading
|
|
||||||
import time
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from typing import Dict, Optional, Callable
|
|
||||||
import logging
|
|
||||||
|
|
||||||
logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
class ProcessLock:
|
|
||||||
"""Thread-safe process lock for preventing duplicate operations."""
|
|
||||||
|
|
||||||
def __init__(self, name: str, timeout_minutes: int = 60):
|
|
||||||
self.name = name
|
|
||||||
self.timeout_minutes = timeout_minutes
|
|
||||||
self.lock = threading.RLock()
|
|
||||||
self.locked_at: Optional[datetime] = None
|
|
||||||
self.locked_by: Optional[str] = None
|
|
||||||
self.progress_callback: Optional[Callable] = None
|
|
||||||
self.is_locked = False
|
|
||||||
self.progress_data = {}
|
|
||||||
|
|
||||||
def acquire(self, locked_by: str = "system", progress_callback: Callable = None) -> bool:
|
|
||||||
"""
|
|
||||||
Attempt to acquire the lock.
|
|
||||||
Returns True if lock was acquired, False if already locked.
|
|
||||||
"""
|
|
||||||
with self.lock:
|
|
||||||
# Check if lock has expired
|
|
||||||
if self.is_locked and self.locked_at:
|
|
||||||
if datetime.now() - self.locked_at > timedelta(minutes=self.timeout_minutes):
|
|
||||||
logger.warning(f"Process lock '{self.name}' expired, releasing...")
|
|
||||||
self._release_internal()
|
|
||||||
|
|
||||||
if self.is_locked:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.is_locked = True
|
|
||||||
self.locked_at = datetime.now()
|
|
||||||
self.locked_by = locked_by
|
|
||||||
self.progress_callback = progress_callback
|
|
||||||
self.progress_data = {}
|
|
||||||
|
|
||||||
logger.info(f"Process lock '{self.name}' acquired by '{locked_by}'")
|
|
||||||
return True
|
|
||||||
|
|
||||||
def release(self) -> bool:
|
|
||||||
"""Release the lock."""
|
|
||||||
with self.lock:
|
|
||||||
if not self.is_locked:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self._release_internal()
|
|
||||||
logger.info(f"Process lock '{self.name}' released")
|
|
||||||
return True
|
|
||||||
|
|
||||||
def _release_internal(self):
|
|
||||||
"""Internal method to release lock without logging."""
|
|
||||||
self.is_locked = False
|
|
||||||
self.locked_at = None
|
|
||||||
self.locked_by = None
|
|
||||||
self.progress_callback = None
|
|
||||||
self.progress_data = {}
|
|
||||||
|
|
||||||
def is_locked_by_other(self, requester: str) -> bool:
|
|
||||||
"""Check if lock is held by someone other than requester."""
|
|
||||||
with self.lock:
|
|
||||||
return self.is_locked and self.locked_by != requester
|
|
||||||
|
|
||||||
def get_status(self) -> Dict:
|
|
||||||
"""Get current lock status."""
|
|
||||||
with self.lock:
|
|
||||||
return {
|
|
||||||
'is_locked': self.is_locked,
|
|
||||||
'locked_by': self.locked_by,
|
|
||||||
'locked_at': self.locked_at.isoformat() if self.locked_at else None,
|
|
||||||
'progress': self.progress_data.copy(),
|
|
||||||
'timeout_minutes': self.timeout_minutes
|
|
||||||
}
|
|
||||||
|
|
||||||
def update_progress(self, progress_data: Dict):
|
|
||||||
"""Update progress data for this lock."""
|
|
||||||
with self.lock:
|
|
||||||
if self.is_locked:
|
|
||||||
self.progress_data.update(progress_data)
|
|
||||||
if self.progress_callback:
|
|
||||||
try:
|
|
||||||
self.progress_callback(progress_data)
|
|
||||||
except Exception as e:
|
|
||||||
logger.error(f"Progress callback error: {e}")
|
|
||||||
|
|
||||||
def __enter__(self):
|
|
||||||
"""Context manager entry."""
|
|
||||||
if not self.acquire():
|
|
||||||
raise ProcessLockError(f"Could not acquire lock '{self.name}'")
|
|
||||||
return self
|
|
||||||
|
|
||||||
def __exit__(self, exc_type, exc_val, exc_tb):
|
|
||||||
"""Context manager exit."""
|
|
||||||
self.release()
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessLockError(Exception):
|
|
||||||
"""Exception raised when process lock operations fail."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class ProcessLockManager:
|
|
||||||
"""Global manager for all process locks."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.locks: Dict[str, ProcessLock] = {}
|
|
||||||
self.manager_lock = threading.RLock()
|
|
||||||
|
|
||||||
def get_lock(self, name: str, timeout_minutes: int = 60) -> ProcessLock:
|
|
||||||
"""Get or create a process lock."""
|
|
||||||
with self.manager_lock:
|
|
||||||
if name not in self.locks:
|
|
||||||
self.locks[name] = ProcessLock(name, timeout_minutes)
|
|
||||||
return self.locks[name]
|
|
||||||
|
|
||||||
def acquire_lock(self, name: str, locked_by: str = "system",
|
|
||||||
timeout_minutes: int = 60, progress_callback: Callable = None) -> bool:
|
|
||||||
"""Acquire a named lock."""
|
|
||||||
lock = self.get_lock(name, timeout_minutes)
|
|
||||||
return lock.acquire(locked_by, progress_callback)
|
|
||||||
|
|
||||||
def release_lock(self, name: str) -> bool:
|
|
||||||
"""Release a named lock."""
|
|
||||||
with self.manager_lock:
|
|
||||||
if name in self.locks:
|
|
||||||
return self.locks[name].release()
|
|
||||||
return False
|
|
||||||
|
|
||||||
def is_locked(self, name: str) -> bool:
|
|
||||||
"""Check if a named lock is currently held."""
|
|
||||||
with self.manager_lock:
|
|
||||||
if name in self.locks:
|
|
||||||
return self.locks[name].is_locked
|
|
||||||
return False
|
|
||||||
|
|
||||||
def get_all_locks_status(self) -> Dict:
|
|
||||||
"""Get status of all locks."""
|
|
||||||
with self.manager_lock:
|
|
||||||
return {
|
|
||||||
name: lock.get_status()
|
|
||||||
for name, lock in self.locks.items()
|
|
||||||
}
|
|
||||||
|
|
||||||
def cleanup_expired_locks(self) -> int:
|
|
||||||
"""Clean up any expired locks. Returns number of locks cleaned up."""
|
|
||||||
cleaned_count = 0
|
|
||||||
with self.manager_lock:
|
|
||||||
for lock in self.locks.values():
|
|
||||||
if lock.is_locked and lock.locked_at:
|
|
||||||
if datetime.now() - lock.locked_at > timedelta(minutes=lock.timeout_minutes):
|
|
||||||
lock._release_internal()
|
|
||||||
cleaned_count += 1
|
|
||||||
logger.info(f"Cleaned up expired lock: {lock.name}")
|
|
||||||
|
|
||||||
return cleaned_count
|
|
||||||
|
|
||||||
def force_release_all(self) -> int:
|
|
||||||
"""Force release all locks. Returns number of locks released."""
|
|
||||||
released_count = 0
|
|
||||||
with self.manager_lock:
|
|
||||||
for lock in self.locks.values():
|
|
||||||
if lock.is_locked:
|
|
||||||
lock._release_internal()
|
|
||||||
released_count += 1
|
|
||||||
logger.warning(f"Force released lock: {lock.name}")
|
|
||||||
|
|
||||||
return released_count
|
|
||||||
|
|
||||||
|
|
||||||
# Global instance
|
|
||||||
process_lock_manager = ProcessLockManager()
|
|
||||||
|
|
||||||
# Predefined lock names for common operations
|
|
||||||
RESCAN_LOCK = "rescan"
|
|
||||||
DOWNLOAD_LOCK = "download"
|
|
||||||
SEARCH_LOCK = "search"
|
|
||||||
CONFIG_LOCK = "config"
|
|
||||||
|
|
||||||
def with_process_lock(lock_name: str, timeout_minutes: int = 60):
|
|
||||||
"""Decorator to protect functions with process locks."""
|
|
||||||
def decorator(func):
|
|
||||||
def wrapper(*args, **kwargs):
|
|
||||||
locked_by = kwargs.pop('_locked_by', func.__name__)
|
|
||||||
progress_callback = kwargs.pop('_progress_callback', None)
|
|
||||||
|
|
||||||
if not process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes, progress_callback):
|
|
||||||
raise ProcessLockError(f"Process '{lock_name}' is already running")
|
|
||||||
|
|
||||||
try:
|
|
||||||
return func(*args, **kwargs)
|
|
||||||
finally:
|
|
||||||
process_lock_manager.release_lock(lock_name)
|
|
||||||
|
|
||||||
return wrapper
|
|
||||||
return decorator
|
|
||||||
|
|
||||||
|
|
||||||
def check_process_locks():
|
|
||||||
"""Check and clean up any expired process locks."""
|
|
||||||
return process_lock_manager.cleanup_expired_locks()
|
|
||||||
|
|
||||||
|
|
||||||
def get_process_status(lock_name: str) -> Dict:
|
|
||||||
"""Get status of a specific process lock."""
|
|
||||||
lock = process_lock_manager.get_lock(lock_name)
|
|
||||||
return lock.get_status()
|
|
||||||
|
|
||||||
|
|
||||||
def update_process_progress(lock_name: str, progress_data: Dict):
|
|
||||||
"""Update progress for a specific process."""
|
|
||||||
if process_lock_manager.is_locked(lock_name):
|
|
||||||
lock = process_lock_manager.get_lock(lock_name)
|
|
||||||
lock.update_progress(progress_data)
|
|
||||||
|
|
||||||
|
|
||||||
def is_process_running(lock_name: str) -> bool:
|
|
||||||
"""Check if a specific process is currently running."""
|
|
||||||
return process_lock_manager.is_locked(lock_name)
|
|
||||||
|
|
||||||
|
|
||||||
class QueueDeduplicator:
|
|
||||||
"""Prevent duplicate episodes in download queue."""
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.active_items = set() # Set of (serie_name, season, episode) tuples
|
|
||||||
self.lock = threading.RLock()
|
|
||||||
|
|
||||||
def add_episode(self, serie_name: str, season: int, episode: int) -> bool:
|
|
||||||
"""
|
|
||||||
Add episode to active set if not already present.
|
|
||||||
Returns True if added, False if duplicate.
|
|
||||||
"""
|
|
||||||
with self.lock:
|
|
||||||
episode_key = (serie_name, season, episode)
|
|
||||||
if episode_key in self.active_items:
|
|
||||||
return False
|
|
||||||
|
|
||||||
self.active_items.add(episode_key)
|
|
||||||
return True
|
|
||||||
|
|
||||||
def remove_episode(self, serie_name: str, season: int, episode: int):
|
|
||||||
"""Remove episode from active set."""
|
|
||||||
with self.lock:
|
|
||||||
episode_key = (serie_name, season, episode)
|
|
||||||
self.active_items.discard(episode_key)
|
|
||||||
|
|
||||||
def is_episode_active(self, serie_name: str, season: int, episode: int) -> bool:
|
|
||||||
"""Check if episode is currently being processed."""
|
|
||||||
with self.lock:
|
|
||||||
episode_key = (serie_name, season, episode)
|
|
||||||
return episode_key in self.active_items
|
|
||||||
|
|
||||||
def get_active_episodes(self) -> list:
|
|
||||||
"""Get list of all active episodes."""
|
|
||||||
with self.lock:
|
|
||||||
return list(self.active_items)
|
|
||||||
|
|
||||||
def clear_all(self):
|
|
||||||
"""Clear all active episodes."""
|
|
||||||
with self.lock:
|
|
||||||
self.active_items.clear()
|
|
||||||
|
|
||||||
def get_count(self) -> int:
|
|
||||||
"""Get number of active episodes."""
|
|
||||||
with self.lock:
|
|
||||||
return len(self.active_items)
|
|
||||||
|
|
||||||
|
|
||||||
# Global deduplicator instance
|
|
||||||
episode_deduplicator = QueueDeduplicator()
|
|
||||||
|
|
||||||
|
|
||||||
def add_episode_to_queue_safe(serie_name: str, season: int, episode: int) -> bool:
|
|
||||||
"""
|
|
||||||
Safely add episode to queue with deduplication.
|
|
||||||
Returns True if added, False if duplicate.
|
|
||||||
"""
|
|
||||||
return episode_deduplicator.add_episode(serie_name, season, episode)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_episode_from_queue(serie_name: str, season: int, episode: int):
|
|
||||||
"""Remove episode from deduplication tracking."""
|
|
||||||
episode_deduplicator.remove_episode(serie_name, season, episode)
|
|
||||||
|
|
||||||
|
|
||||||
def is_episode_in_queue(serie_name: str, season: int, episode: int) -> bool:
|
|
||||||
"""Check if episode is already in queue/being processed."""
|
|
||||||
return episode_deduplicator.is_episode_active(serie_name, season, episode)
|
|
||||||
@ -1,146 +0,0 @@
|
|||||||
"""
|
|
||||||
Pytest configuration file for AniWorld application tests.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import Mock
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture(scope="session")
|
|
||||||
def test_config():
|
|
||||||
"""Test configuration settings."""
|
|
||||||
return {
|
|
||||||
"jwt_secret_key": "test-secret-key",
|
|
||||||
"password_salt": "test-salt",
|
|
||||||
"master_password": "test_password",
|
|
||||||
"master_password_hash": "hashed_test_password",
|
|
||||||
"token_expiry_hours": 1,
|
|
||||||
"database_url": "sqlite:///:memory:",
|
|
||||||
"anime_directory": "./test_data",
|
|
||||||
"log_level": "DEBUG"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_settings(test_config):
|
|
||||||
"""Mock settings for testing."""
|
|
||||||
from unittest.mock import Mock
|
|
||||||
settings = Mock()
|
|
||||||
for key, value in test_config.items():
|
|
||||||
setattr(settings, key, value)
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_database():
|
|
||||||
"""Mock database connection."""
|
|
||||||
return Mock()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_logger():
|
|
||||||
"""Mock logger for testing."""
|
|
||||||
return Mock()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_anime_data():
|
|
||||||
"""Sample anime data for testing."""
|
|
||||||
return {
|
|
||||||
"id": 1,
|
|
||||||
"title": "Test Anime",
|
|
||||||
"genre": "Action",
|
|
||||||
"year": 2023,
|
|
||||||
"episodes": [
|
|
||||||
{"id": 1, "title": "Episode 1", "season": 1, "episode": 1},
|
|
||||||
{"id": 2, "title": "Episode 2", "season": 1, "episode": 2}
|
|
||||||
]
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def sample_episode_data():
|
|
||||||
"""Sample episode data for testing."""
|
|
||||||
return {
|
|
||||||
"id": 1,
|
|
||||||
"title": "Test Episode",
|
|
||||||
"season": 1,
|
|
||||||
"episode": 1,
|
|
||||||
"anime_id": 1,
|
|
||||||
"download_url": "https://example.com/episode1.mp4"
|
|
||||||
}
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def valid_jwt_token():
|
|
||||||
"""Valid JWT token for testing."""
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
import jwt
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"user": "test_user",
|
|
||||||
"exp": datetime.utcnow() + timedelta(hours=1)
|
|
||||||
}
|
|
||||||
return jwt.encode(payload, "test-secret-key", algorithm="HS256")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def expired_jwt_token():
|
|
||||||
"""Expired JWT token for testing."""
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
|
|
||||||
import jwt
|
|
||||||
|
|
||||||
payload = {
|
|
||||||
"user": "test_user",
|
|
||||||
"exp": datetime.utcnow() - timedelta(hours=1)
|
|
||||||
}
|
|
||||||
return jwt.encode(payload, "test-secret-key", algorithm="HS256")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_request():
|
|
||||||
"""Mock FastAPI request object."""
|
|
||||||
request = Mock()
|
|
||||||
request.headers = {}
|
|
||||||
request.client = Mock()
|
|
||||||
request.client.host = "127.0.0.1"
|
|
||||||
return request
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_file_system():
|
|
||||||
"""Mock file system operations."""
|
|
||||||
return Mock()
|
|
||||||
|
|
||||||
|
|
||||||
# Pytest configuration
|
|
||||||
def pytest_configure(config):
|
|
||||||
"""Configure pytest with custom markers."""
|
|
||||||
config.addinivalue_line(
|
|
||||||
"markers", "unit: marks tests as unit tests"
|
|
||||||
)
|
|
||||||
config.addinivalue_line(
|
|
||||||
"markers", "integration: marks tests as integration tests"
|
|
||||||
)
|
|
||||||
config.addinivalue_line(
|
|
||||||
"markers", "e2e: marks tests as end-to-end tests"
|
|
||||||
)
|
|
||||||
config.addinivalue_line(
|
|
||||||
"markers", "slow: marks tests as slow running"
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
# Test collection configuration
|
|
||||||
collect_ignore = [
|
|
||||||
"test_auth.ps1",
|
|
||||||
"test_auth_flow.ps1",
|
|
||||||
"test_database.ps1"
|
|
||||||
]
|
|
||||||
@ -1,232 +0,0 @@
|
|||||||
"""
|
|
||||||
End-to-end tests for authentication flow.
|
|
||||||
|
|
||||||
Tests complete user authentication scenarios including login/logout flow
|
|
||||||
and session management.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.server.fastapi_app import app # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Test client for E2E authentication tests."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.e2e
|
|
||||||
class TestAuthenticationE2E:
|
|
||||||
"""End-to-end authentication tests."""
|
|
||||||
|
|
||||||
def test_full_authentication_workflow(self, client, mock_settings):
|
|
||||||
"""Test complete authentication workflow from user perspective."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Scenario: User wants to access protected resource
|
|
||||||
|
|
||||||
# Step 1: Try to access protected endpoint without authentication
|
|
||||||
protected_response = client.get("/api/anime/search?query=test")
|
|
||||||
assert protected_response.status_code in [401, 403] # Should be unauthorized
|
|
||||||
|
|
||||||
# Step 2: User logs in with correct password
|
|
||||||
login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
assert login_response.status_code == 200
|
|
||||||
|
|
||||||
login_data = login_response.json()
|
|
||||||
assert login_data["success"] is True
|
|
||||||
token = login_data["token"]
|
|
||||||
|
|
||||||
# Step 3: Verify token is working
|
|
||||||
verify_response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
assert verify_response.status_code == 200
|
|
||||||
assert verify_response.json()["valid"] is True
|
|
||||||
|
|
||||||
# Step 4: Access protected resource with token
|
|
||||||
# Note: This test assumes anime search endpoint exists and requires auth
|
|
||||||
protected_response_with_auth = client.get(
|
|
||||||
"/api/anime/search?query=test",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
# Should not be 401/403 (actual response depends on implementation)
|
|
||||||
assert protected_response_with_auth.status_code != 403
|
|
||||||
|
|
||||||
# Step 5: User logs out
|
|
||||||
logout_response = client.post(
|
|
||||||
"/auth/logout",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
assert logout_response.status_code == 200
|
|
||||||
assert logout_response.json()["success"] is True
|
|
||||||
|
|
||||||
# Step 6: Verify token behavior after logout
|
|
||||||
# Note: This depends on implementation - some systems invalidate tokens,
|
|
||||||
# others rely on expiry
|
|
||||||
# Just verify the logout endpoint worked
|
|
||||||
assert logout_response.json()["success"] is True
|
|
||||||
|
|
||||||
def test_authentication_with_wrong_password_flow(self, client, mock_settings):
|
|
||||||
"""Test authentication flow with wrong password."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Step 1: User tries to login with wrong password
|
|
||||||
login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "wrong_password"}
|
|
||||||
)
|
|
||||||
assert login_response.status_code == 401
|
|
||||||
|
|
||||||
login_data = login_response.json()
|
|
||||||
assert login_data["success"] is False
|
|
||||||
assert "token" not in login_data
|
|
||||||
|
|
||||||
# Step 2: User tries to access protected resource without valid token
|
|
||||||
protected_response = client.get("/api/anime/search?query=test")
|
|
||||||
assert protected_response.status_code in [401, 403]
|
|
||||||
|
|
||||||
# Step 3: User tries again with correct password
|
|
||||||
correct_login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
assert correct_login_response.status_code == 200
|
|
||||||
assert correct_login_response.json()["success"] is True
|
|
||||||
|
|
||||||
def test_session_expiry_simulation(self, client, mock_settings):
|
|
||||||
"""Test session expiry behavior."""
|
|
||||||
# Set very short token expiry for testing
|
|
||||||
mock_settings.token_expiry_hours = 0.001 # About 3.6 seconds
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Login to get token
|
|
||||||
login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
assert login_response.status_code == 200
|
|
||||||
token = login_response.json()["token"]
|
|
||||||
|
|
||||||
# Token should be valid immediately
|
|
||||||
verify_response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
assert verify_response.status_code == 200
|
|
||||||
|
|
||||||
# Wait for token to expire (in real implementation)
|
|
||||||
# For testing, we'll just verify the token structure is correct
|
|
||||||
import jwt
|
|
||||||
payload = jwt.decode(token, options={"verify_signature": False})
|
|
||||||
assert "exp" in payload
|
|
||||||
assert payload["exp"] > 0
|
|
||||||
|
|
||||||
def test_multiple_session_management(self, client, mock_settings):
|
|
||||||
"""Test managing multiple concurrent sessions."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Create multiple sessions (simulate multiple browser tabs/devices)
|
|
||||||
sessions = []
|
|
||||||
|
|
||||||
for i in range(3):
|
|
||||||
login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
assert login_response.status_code == 200
|
|
||||||
sessions.append(login_response.json()["token"])
|
|
||||||
|
|
||||||
# All sessions should be valid
|
|
||||||
for token in sessions:
|
|
||||||
verify_response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
assert verify_response.status_code == 200
|
|
||||||
|
|
||||||
# Logout from one session
|
|
||||||
logout_response = client.post(
|
|
||||||
"/auth/logout",
|
|
||||||
headers={"Authorization": f"Bearer {sessions[0]}"}
|
|
||||||
)
|
|
||||||
assert logout_response.status_code == 200
|
|
||||||
|
|
||||||
# Other sessions should still be valid (depending on implementation)
|
|
||||||
for token in sessions[1:]:
|
|
||||||
verify_response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
# Should still be valid unless implementation invalidates all sessions
|
|
||||||
assert verify_response.status_code == 200
|
|
||||||
|
|
||||||
def test_authentication_error_handling(self, client, mock_settings):
|
|
||||||
"""Test error handling in authentication flow."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test various error scenarios
|
|
||||||
|
|
||||||
# Invalid JSON
|
|
||||||
invalid_json_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
data="invalid json",
|
|
||||||
headers={"Content-Type": "application/json"}
|
|
||||||
)
|
|
||||||
assert invalid_json_response.status_code == 422
|
|
||||||
|
|
||||||
# Missing password field
|
|
||||||
missing_field_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={}
|
|
||||||
)
|
|
||||||
assert missing_field_response.status_code == 422
|
|
||||||
|
|
||||||
# Empty password
|
|
||||||
empty_password_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": ""}
|
|
||||||
)
|
|
||||||
assert empty_password_response.status_code == 422
|
|
||||||
|
|
||||||
# Malformed authorization header
|
|
||||||
malformed_auth_response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": "InvalidFormat"}
|
|
||||||
)
|
|
||||||
assert malformed_auth_response.status_code == 403
|
|
||||||
|
|
||||||
def test_security_headers_and_responses(self, client, mock_settings):
|
|
||||||
"""Test security-related headers and response formats."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test login response format
|
|
||||||
login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Check response doesn't leak sensitive information
|
|
||||||
login_data = login_response.json()
|
|
||||||
assert "password" not in str(login_data)
|
|
||||||
assert "secret" not in str(login_data).lower()
|
|
||||||
|
|
||||||
# Test error responses don't leak sensitive information
|
|
||||||
error_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "wrong_password"}
|
|
||||||
)
|
|
||||||
|
|
||||||
error_data = error_response.json()
|
|
||||||
assert "password" not in str(error_data)
|
|
||||||
assert "hash" not in str(error_data).lower()
|
|
||||||
assert "secret" not in str(error_data).lower()
|
|
||||||
@ -1,440 +0,0 @@
|
|||||||
"""
|
|
||||||
End-to-End tests for bulk download and export flows.
|
|
||||||
|
|
||||||
This module tests complete user workflows for bulk operations including
|
|
||||||
download flows, export processes, and error handling scenarios.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import asyncio
|
|
||||||
import time
|
|
||||||
from unittest.mock import AsyncMock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkDownloadFlow:
|
|
||||||
"""End-to-end tests for bulk download workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_complete_bulk_download_workflow(self, mock_user, client):
|
|
||||||
"""Test complete bulk download workflow from search to completion."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Search for anime to download
|
|
||||||
search_response = client.get("/api/anime/search?q=test&limit=5")
|
|
||||||
if search_response.status_code == 200:
|
|
||||||
anime_list = search_response.json()
|
|
||||||
anime_ids = [anime["id"] for anime in anime_list[:3]] # Select first 3
|
|
||||||
else:
|
|
||||||
# Mock anime IDs if search endpoint not working
|
|
||||||
anime_ids = ["anime1", "anime2", "anime3"]
|
|
||||||
|
|
||||||
# Step 2: Initiate bulk download
|
|
||||||
download_request = {
|
|
||||||
"anime_ids": anime_ids,
|
|
||||||
"quality": "1080p",
|
|
||||||
"format": "mp4",
|
|
||||||
"include_subtitles": True,
|
|
||||||
"organize_by": "series"
|
|
||||||
}
|
|
||||||
|
|
||||||
download_response = client.post("/api/bulk/download", json=download_request)
|
|
||||||
# Expected 404 since bulk endpoints not implemented yet
|
|
||||||
assert download_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if download_response.status_code in [200, 202]:
|
|
||||||
download_data = download_response.json()
|
|
||||||
task_id = download_data.get("task_id")
|
|
||||||
|
|
||||||
# Step 3: Monitor download progress
|
|
||||||
if task_id:
|
|
||||||
progress_response = client.get(f"/api/bulk/download/{task_id}/status")
|
|
||||||
assert progress_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if progress_response.status_code == 200:
|
|
||||||
progress_data = progress_response.json()
|
|
||||||
assert "status" in progress_data
|
|
||||||
assert "progress_percent" in progress_data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_download_with_retry_logic(self, mock_user, client):
|
|
||||||
"""Test bulk download with retry logic for failed items."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Start bulk download
|
|
||||||
download_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2", "anime3"],
|
|
||||||
"quality": "720p",
|
|
||||||
"retry_failed": True,
|
|
||||||
"max_retries": 3
|
|
||||||
}
|
|
||||||
|
|
||||||
download_response = client.post("/api/bulk/download", json=download_request)
|
|
||||||
assert download_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if download_response.status_code in [200, 202]:
|
|
||||||
task_id = download_response.json().get("task_id")
|
|
||||||
|
|
||||||
# Simulate checking for failed items and retrying
|
|
||||||
if task_id:
|
|
||||||
failed_response = client.get(f"/api/bulk/download/{task_id}/failed")
|
|
||||||
assert failed_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if failed_response.status_code == 200:
|
|
||||||
failed_data = failed_response.json()
|
|
||||||
if failed_data.get("failed_items"):
|
|
||||||
# Retry failed items
|
|
||||||
retry_response = client.post(f"/api/bulk/download/{task_id}/retry")
|
|
||||||
assert retry_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_download_cancellation(self, mock_user, client):
|
|
||||||
"""Test cancelling an ongoing bulk download."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Start bulk download
|
|
||||||
download_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2", "anime3", "anime4", "anime5"],
|
|
||||||
"quality": "1080p"
|
|
||||||
}
|
|
||||||
|
|
||||||
download_response = client.post("/api/bulk/download", json=download_request)
|
|
||||||
assert download_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if download_response.status_code in [200, 202]:
|
|
||||||
task_id = download_response.json().get("task_id")
|
|
||||||
|
|
||||||
if task_id:
|
|
||||||
# Cancel the download
|
|
||||||
cancel_response = client.post(f"/api/bulk/download/{task_id}/cancel")
|
|
||||||
assert cancel_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if cancel_response.status_code == 200:
|
|
||||||
cancel_data = cancel_response.json()
|
|
||||||
assert cancel_data.get("status") == "cancelled"
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_download_with_insufficient_space(self, mock_user, client):
|
|
||||||
"""Test bulk download when there's insufficient disk space."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Try to download large amount of content
|
|
||||||
download_request = {
|
|
||||||
"anime_ids": [f"anime{i}" for i in range(100)], # Large number
|
|
||||||
"quality": "1080p",
|
|
||||||
"check_disk_space": True
|
|
||||||
}
|
|
||||||
|
|
||||||
download_response = client.post("/api/bulk/download", json=download_request)
|
|
||||||
# Should either work or return appropriate error
|
|
||||||
assert download_response.status_code in [200, 202, 400, 404, 507] # 507 = Insufficient Storage
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkExportFlow:
|
|
||||||
"""End-to-end tests for bulk export workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_complete_bulk_export_workflow(self, mock_user, client):
|
|
||||||
"""Test complete bulk export workflow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Get list of available anime for export
|
|
||||||
anime_response = client.get("/api/anime/search?limit=10")
|
|
||||||
if anime_response.status_code == 200:
|
|
||||||
anime_list = anime_response.json()
|
|
||||||
anime_ids = [anime["id"] for anime in anime_list[:5]]
|
|
||||||
else:
|
|
||||||
anime_ids = ["anime1", "anime2", "anime3"]
|
|
||||||
|
|
||||||
# Step 2: Request bulk export
|
|
||||||
export_request = {
|
|
||||||
"anime_ids": anime_ids,
|
|
||||||
"format": "json",
|
|
||||||
"include_metadata": True,
|
|
||||||
"include_episode_info": True,
|
|
||||||
"include_download_history": False
|
|
||||||
}
|
|
||||||
|
|
||||||
export_response = client.post("/api/bulk/export", json=export_request)
|
|
||||||
assert export_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if export_response.status_code in [200, 202]:
|
|
||||||
export_data = export_response.json()
|
|
||||||
|
|
||||||
# Step 3: Check export status or get download URL
|
|
||||||
if "export_id" in export_data:
|
|
||||||
export_id = export_data["export_id"]
|
|
||||||
status_response = client.get(f"/api/bulk/export/{export_id}/status")
|
|
||||||
assert status_response.status_code in [200, 404]
|
|
||||||
elif "download_url" in export_data:
|
|
||||||
# Direct download available
|
|
||||||
download_url = export_data["download_url"]
|
|
||||||
assert download_url.startswith("http")
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_export_csv_format(self, mock_user, client):
|
|
||||||
"""Test bulk export in CSV format."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
export_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"format": "csv",
|
|
||||||
"include_metadata": True,
|
|
||||||
"csv_options": {
|
|
||||||
"delimiter": ",",
|
|
||||||
"include_headers": True,
|
|
||||||
"encoding": "utf-8"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export_response = client.post("/api/bulk/export", json=export_request)
|
|
||||||
assert export_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if export_response.status_code == 200:
|
|
||||||
# Check if response is CSV content or redirect
|
|
||||||
content_type = export_response.headers.get("content-type", "")
|
|
||||||
assert "csv" in content_type or "json" in content_type
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_export_with_filters(self, mock_user, client):
|
|
||||||
"""Test bulk export with filtering options."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
export_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2", "anime3"],
|
|
||||||
"format": "json",
|
|
||||||
"filters": {
|
|
||||||
"completed_only": True,
|
|
||||||
"include_watched": False,
|
|
||||||
"min_rating": 7.0,
|
|
||||||
"genres": ["Action", "Adventure"]
|
|
||||||
},
|
|
||||||
"include_metadata": True
|
|
||||||
}
|
|
||||||
|
|
||||||
export_response = client.post("/api/bulk/export", json=export_request)
|
|
||||||
assert export_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkOrganizeFlow:
|
|
||||||
"""End-to-end tests for bulk organize workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_organize_by_genre(self, mock_user, client):
|
|
||||||
"""Test bulk organizing anime by genre."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
organize_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2", "anime3"],
|
|
||||||
"organize_by": "genre",
|
|
||||||
"create_subdirectories": True,
|
|
||||||
"move_files": True,
|
|
||||||
"update_database": True
|
|
||||||
}
|
|
||||||
|
|
||||||
organize_response = client.post("/api/bulk/organize", json=organize_request)
|
|
||||||
assert organize_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if organize_response.status_code in [200, 202]:
|
|
||||||
organize_data = organize_response.json()
|
|
||||||
|
|
||||||
if "task_id" in organize_data:
|
|
||||||
task_id = organize_data["task_id"]
|
|
||||||
# Monitor organization progress
|
|
||||||
status_response = client.get(f"/api/bulk/organize/{task_id}/status")
|
|
||||||
assert status_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_organize_by_year(self, mock_user, client):
|
|
||||||
"""Test bulk organizing anime by release year."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
organize_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"organize_by": "year",
|
|
||||||
"year_format": "YYYY",
|
|
||||||
"create_subdirectories": True,
|
|
||||||
"dry_run": True # Test without actually moving files
|
|
||||||
}
|
|
||||||
|
|
||||||
organize_response = client.post("/api/bulk/organize", json=organize_request)
|
|
||||||
assert organize_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if organize_response.status_code == 200:
|
|
||||||
organize_data = organize_response.json()
|
|
||||||
# Dry run should return what would be moved
|
|
||||||
assert "preview" in organize_data or "operations" in organize_data
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkDeleteFlow:
|
|
||||||
"""End-to-end tests for bulk delete workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_delete_with_confirmation(self, mock_user, client):
|
|
||||||
"""Test bulk delete with proper confirmation flow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Request deletion (should require confirmation)
|
|
||||||
delete_request = {
|
|
||||||
"anime_ids": ["anime_to_delete1", "anime_to_delete2"],
|
|
||||||
"delete_files": True,
|
|
||||||
"confirm": False # First request without confirmation
|
|
||||||
}
|
|
||||||
|
|
||||||
delete_response = client.delete("/api/bulk/delete", json=delete_request)
|
|
||||||
# Should require confirmation
|
|
||||||
assert delete_response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
# Step 2: Confirm deletion
|
|
||||||
delete_request["confirm"] = True
|
|
||||||
confirmed_response = client.delete("/api/bulk/delete", json=delete_request)
|
|
||||||
assert confirmed_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_delete_database_only(self, mock_user, client):
|
|
||||||
"""Test bulk delete from database only (keep files)."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
delete_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"delete_files": False, # Keep files, remove from database only
|
|
||||||
"confirm": True
|
|
||||||
}
|
|
||||||
|
|
||||||
delete_response = client.delete("/api/bulk/delete", json=delete_request)
|
|
||||||
assert delete_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkOperationsErrorHandling:
|
|
||||||
"""End-to-end tests for error handling in bulk operations."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_operation_with_mixed_results(self, mock_user, client):
|
|
||||||
"""Test bulk operation where some items succeed and others fail."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Mix of valid and invalid anime IDs
|
|
||||||
download_request = {
|
|
||||||
"anime_ids": ["valid_anime1", "invalid_anime", "valid_anime2"],
|
|
||||||
"quality": "1080p",
|
|
||||||
"continue_on_error": True
|
|
||||||
}
|
|
||||||
|
|
||||||
download_response = client.post("/api/bulk/download", json=download_request)
|
|
||||||
assert download_response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
if download_response.status_code in [200, 202]:
|
|
||||||
result_data = download_response.json()
|
|
||||||
# Should have information about successes and failures
|
|
||||||
if "partial_success" in result_data:
|
|
||||||
assert "successful" in result_data
|
|
||||||
assert "failed" in result_data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_operation_timeout_handling(self, mock_user, client):
|
|
||||||
"""Test handling of bulk operation timeouts."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Large operation that might timeout
|
|
||||||
large_request = {
|
|
||||||
"anime_ids": [f"anime{i}" for i in range(50)],
|
|
||||||
"quality": "1080p",
|
|
||||||
"timeout_seconds": 30
|
|
||||||
}
|
|
||||||
|
|
||||||
download_response = client.post("/api/bulk/download", json=large_request)
|
|
||||||
# Should either succeed, be accepted for background processing, or timeout
|
|
||||||
assert download_response.status_code in [200, 202, 404, 408, 504]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_concurrent_bulk_operations(self, mock_user, client):
|
|
||||||
"""Test handling of concurrent bulk operations."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Start first operation
|
|
||||||
first_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"quality": "1080p"
|
|
||||||
}
|
|
||||||
|
|
||||||
first_response = client.post("/api/bulk/download", json=first_request)
|
|
||||||
|
|
||||||
# Start second operation while first is running
|
|
||||||
second_request = {
|
|
||||||
"anime_ids": ["anime3", "anime4"],
|
|
||||||
"quality": "720p"
|
|
||||||
}
|
|
||||||
|
|
||||||
second_response = client.post("/api/bulk/download", json=second_request)
|
|
||||||
|
|
||||||
# Both operations should be handled appropriately
|
|
||||||
assert first_response.status_code in [200, 202, 404]
|
|
||||||
assert second_response.status_code in [200, 202, 404, 429] # 429 = Too Many Requests
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkOperationsPerformance:
|
|
||||||
"""Performance tests for bulk operations."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_operation_response_time(self, mock_user, client):
|
|
||||||
"""Test that bulk operations respond within reasonable time."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
download_request = {
|
|
||||||
"anime_ids": ["anime1", "anime2", "anime3"],
|
|
||||||
"quality": "1080p"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/download", json=download_request)
|
|
||||||
|
|
||||||
response_time = time.time() - start_time
|
|
||||||
|
|
||||||
# Response should be quick (< 5 seconds) even if processing is background
|
|
||||||
assert response_time < 5.0
|
|
||||||
assert response.status_code in [200, 202, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_operation_memory_usage(self, mock_user, client):
|
|
||||||
"""Test bulk operations don't cause excessive memory usage."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Large bulk operation
|
|
||||||
large_request = {
|
|
||||||
"anime_ids": [f"anime{i}" for i in range(100)],
|
|
||||||
"quality": "1080p"
|
|
||||||
}
|
|
||||||
|
|
||||||
# This test would need actual memory monitoring in real implementation
|
|
||||||
response = client.post("/api/bulk/download", json=large_request)
|
|
||||||
assert response.status_code in [200, 202, 404, 413] # 413 = Payload Too Large
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,407 +0,0 @@
|
|||||||
"""
|
|
||||||
End-to-end tests for CLI flows.
|
|
||||||
|
|
||||||
Tests complete CLI workflows including progress bar functionality,
|
|
||||||
retry logic, user interactions, and error scenarios.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.cli.Main import SeriesApp # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def temp_directory():
|
|
||||||
"""Create a temporary directory for testing."""
|
|
||||||
with tempfile.TemporaryDirectory() as temp_dir:
|
|
||||||
yield temp_dir
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.e2e
|
|
||||||
class TestCLICompleteWorkflows:
|
|
||||||
"""Test complete CLI workflows from user perspective."""
|
|
||||||
|
|
||||||
def test_search_and_download_workflow(self, temp_directory):
|
|
||||||
"""Test complete search -> select -> download workflow."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
# Mock search results
|
|
||||||
mock_search_results = [
|
|
||||||
{"name": "Test Anime", "link": "test_link"}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Mock series for download
|
|
||||||
mock_episode_dict = {1: [1, 2, 3], 2: [1, 2]}
|
|
||||||
mock_series = Mock(
|
|
||||||
episodeDict=mock_episode_dict,
|
|
||||||
folder="test_anime",
|
|
||||||
key="test_key"
|
|
||||||
)
|
|
||||||
app.series_list = [mock_series]
|
|
||||||
|
|
||||||
# Mock loader
|
|
||||||
mock_loader = Mock()
|
|
||||||
mock_loader.Search.return_value = mock_search_results
|
|
||||||
mock_loader.IsLanguage.return_value = True
|
|
||||||
mock_loader.Download.return_value = None
|
|
||||||
app.Loaders.GetLoader.return_value = mock_loader
|
|
||||||
|
|
||||||
# Test search workflow
|
|
||||||
with patch('builtins.input', side_effect=['test query', '1']), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
app.search_mode()
|
|
||||||
|
|
||||||
# Should have called search and add
|
|
||||||
mock_loader.Search.assert_called_with('test query')
|
|
||||||
app.List.add.assert_called_once()
|
|
||||||
|
|
||||||
# Test download workflow
|
|
||||||
with patch('rich.progress.Progress') as mock_progress_class, \
|
|
||||||
patch('time.sleep'), \
|
|
||||||
patch('builtins.input', return_value='1'):
|
|
||||||
|
|
||||||
mock_progress = Mock()
|
|
||||||
mock_progress_class.return_value = mock_progress
|
|
||||||
|
|
||||||
selected_series = app.get_user_selection()
|
|
||||||
assert selected_series is not None
|
|
||||||
|
|
||||||
app.download_series(selected_series)
|
|
||||||
|
|
||||||
# Should have set up progress tracking
|
|
||||||
mock_progress.start.assert_called_once()
|
|
||||||
mock_progress.stop.assert_called_once()
|
|
||||||
|
|
||||||
# Should have attempted downloads for all episodes
|
|
||||||
expected_downloads = sum(len(episodes) for episodes in mock_episode_dict.values())
|
|
||||||
assert mock_loader.Download.call_count == expected_downloads
|
|
||||||
|
|
||||||
def test_init_and_rescan_workflow(self, temp_directory):
|
|
||||||
"""Test initialization and rescanning workflow."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner') as mock_scanner_class, \
|
|
||||||
patch('src.cli.Main.SerieList') as mock_list_class:
|
|
||||||
|
|
||||||
mock_scanner = Mock()
|
|
||||||
mock_scanner_class.return_value = mock_scanner
|
|
||||||
mock_list = Mock()
|
|
||||||
mock_list_class.return_value = mock_list
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
app.SerieScanner = mock_scanner
|
|
||||||
|
|
||||||
# Test rescan workflow
|
|
||||||
with patch('rich.progress.Progress') as mock_progress_class, \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
mock_progress = Mock()
|
|
||||||
mock_progress_class.return_value = mock_progress
|
|
||||||
|
|
||||||
# Simulate init action
|
|
||||||
app.progress = mock_progress
|
|
||||||
app.task1 = "task1_id"
|
|
||||||
|
|
||||||
# Call reinit workflow
|
|
||||||
app.SerieScanner.Reinit()
|
|
||||||
app.SerieScanner.Scan(app.updateFromReinit)
|
|
||||||
|
|
||||||
# Should have called scanner methods
|
|
||||||
mock_scanner.Reinit.assert_called_once()
|
|
||||||
mock_scanner.Scan.assert_called_once()
|
|
||||||
|
|
||||||
def test_error_recovery_workflow(self, temp_directory):
|
|
||||||
"""Test error recovery in CLI workflows."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
# Test retry mechanism with eventual success
|
|
||||||
mock_func = Mock(side_effect=[
|
|
||||||
Exception("First failure"),
|
|
||||||
Exception("Second failure"),
|
|
||||||
None # Success on third try
|
|
||||||
])
|
|
||||||
|
|
||||||
with patch('time.sleep'), patch('builtins.print'):
|
|
||||||
result = app.retry(mock_func, max_retries=3, delay=0)
|
|
||||||
|
|
||||||
assert result is True
|
|
||||||
assert mock_func.call_count == 3
|
|
||||||
|
|
||||||
# Test retry mechanism with persistent failure
|
|
||||||
mock_func_fail = Mock(side_effect=Exception("Persistent error"))
|
|
||||||
|
|
||||||
with patch('time.sleep'), patch('builtins.print'):
|
|
||||||
result = app.retry(mock_func_fail, max_retries=2, delay=0)
|
|
||||||
|
|
||||||
assert result is False
|
|
||||||
assert mock_func_fail.call_count == 2
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.e2e
|
|
||||||
class TestCLIUserInteractionFlows:
|
|
||||||
"""Test CLI user interaction flows."""
|
|
||||||
|
|
||||||
def test_user_selection_validation_flow(self, temp_directory):
|
|
||||||
"""Test user selection with various invalid inputs before success."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
app.series_list = [
|
|
||||||
Mock(name="Anime 1", folder="anime1"),
|
|
||||||
Mock(name="Anime 2", folder="anime2")
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test sequence: invalid text -> invalid number -> valid selection
|
|
||||||
input_sequence = ['invalid_text', '999', '1']
|
|
||||||
|
|
||||||
with patch('builtins.input', side_effect=input_sequence), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
selected = app.get_user_selection()
|
|
||||||
|
|
||||||
assert selected is not None
|
|
||||||
assert len(selected) == 1
|
|
||||||
assert selected[0].name == "Anime 1"
|
|
||||||
|
|
||||||
def test_search_interaction_flow(self, temp_directory):
|
|
||||||
"""Test search interaction with various user inputs."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
mock_search_results = [
|
|
||||||
{"name": "Result 1", "link": "link1"},
|
|
||||||
{"name": "Result 2", "link": "link2"}
|
|
||||||
]
|
|
||||||
|
|
||||||
mock_loader = Mock()
|
|
||||||
mock_loader.Search.return_value = mock_search_results
|
|
||||||
app.Loaders.GetLoader.return_value = mock_loader
|
|
||||||
|
|
||||||
# Test sequence: search -> invalid selection -> valid selection
|
|
||||||
with patch('builtins.input', side_effect=['test search', '999', '1']), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
app.search_mode()
|
|
||||||
|
|
||||||
# Should have added the selected item
|
|
||||||
app.List.add.assert_called_once()
|
|
||||||
|
|
||||||
def test_main_loop_interaction_flow(self, temp_directory):
|
|
||||||
"""Test main application loop with user interactions."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
app.series_list = [Mock(name="Test Anime", folder="test")]
|
|
||||||
|
|
||||||
# Mock various components
|
|
||||||
with patch.object(app, 'search_mode') as mock_search, \
|
|
||||||
patch.object(app, 'get_user_selection', return_value=[Mock()]), \
|
|
||||||
patch.object(app, 'download_series') as mock_download, \
|
|
||||||
patch('rich.progress.Progress'), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
# Test sequence: search -> download -> exit
|
|
||||||
with patch('builtins.input', side_effect=['s', 'd', KeyboardInterrupt()]):
|
|
||||||
try:
|
|
||||||
app.run()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mock_search.assert_called_once()
|
|
||||||
mock_download.assert_called_once()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.e2e
|
|
||||||
class TestCLIProgressAndFeedback:
|
|
||||||
"""Test CLI progress indicators and user feedback."""
|
|
||||||
|
|
||||||
def test_download_progress_flow(self, temp_directory):
|
|
||||||
"""Test download progress tracking throughout workflow."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
# Mock series with episodes
|
|
||||||
mock_series = [
|
|
||||||
Mock(
|
|
||||||
episodeDict={1: [1, 2], 2: [1]},
|
|
||||||
folder="anime1",
|
|
||||||
key="key1"
|
|
||||||
)
|
|
||||||
]
|
|
||||||
|
|
||||||
# Mock loader
|
|
||||||
mock_loader = Mock()
|
|
||||||
mock_loader.IsLanguage.return_value = True
|
|
||||||
mock_loader.Download.return_value = None
|
|
||||||
app.Loaders.GetLoader.return_value = mock_loader
|
|
||||||
|
|
||||||
with patch('rich.progress.Progress') as mock_progress_class, \
|
|
||||||
patch('time.sleep'):
|
|
||||||
|
|
||||||
mock_progress = Mock()
|
|
||||||
mock_progress_class.return_value = mock_progress
|
|
||||||
|
|
||||||
app.download_series(mock_series)
|
|
||||||
|
|
||||||
# Verify progress setup
|
|
||||||
assert mock_progress.add_task.call_count >= 3 # At least 3 tasks
|
|
||||||
mock_progress.start.assert_called_once()
|
|
||||||
mock_progress.stop.assert_called_once()
|
|
||||||
|
|
||||||
# Verify progress updates
|
|
||||||
assert mock_progress.update.call_count > 0
|
|
||||||
|
|
||||||
def test_progress_callback_integration(self, temp_directory):
|
|
||||||
"""Test progress callback integration with download system."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
app.progress = Mock()
|
|
||||||
app.task3 = "download_task"
|
|
||||||
|
|
||||||
# Test various progress states
|
|
||||||
progress_states = [
|
|
||||||
{
|
|
||||||
'status': 'downloading',
|
|
||||||
'total_bytes': 1000000,
|
|
||||||
'downloaded_bytes': 250000
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'status': 'downloading',
|
|
||||||
'total_bytes': 1000000,
|
|
||||||
'downloaded_bytes': 750000
|
|
||||||
},
|
|
||||||
{
|
|
||||||
'status': 'finished'
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
for state in progress_states:
|
|
||||||
app.print_Download_Progress(state)
|
|
||||||
|
|
||||||
# Should have updated progress for each state
|
|
||||||
assert app.progress.update.call_count == len(progress_states)
|
|
||||||
|
|
||||||
# Last call should indicate completion
|
|
||||||
last_call = app.progress.update.call_args_list[-1]
|
|
||||||
assert last_call[1].get('completed') == 100
|
|
||||||
|
|
||||||
def test_scan_progress_integration(self, temp_directory):
|
|
||||||
"""Test scanning progress integration."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
app.progress = Mock()
|
|
||||||
app.task1 = "scan_task"
|
|
||||||
|
|
||||||
# Simulate scan progress updates
|
|
||||||
for i in range(5):
|
|
||||||
app.updateFromReinit("folder", i)
|
|
||||||
|
|
||||||
# Should have updated progress for each folder
|
|
||||||
assert app.progress.update.call_count == 5
|
|
||||||
|
|
||||||
# Each call should advance by 1
|
|
||||||
for call in app.progress.update.call_args_list:
|
|
||||||
assert call[1].get('advance') == 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.e2e
|
|
||||||
class TestCLIErrorScenarios:
|
|
||||||
"""Test CLI error scenarios and recovery."""
|
|
||||||
|
|
||||||
def test_network_error_recovery(self, temp_directory):
|
|
||||||
"""Test recovery from network errors during operations."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
# Mock network failures
|
|
||||||
network_error = Exception("Network connection failed")
|
|
||||||
mock_func = Mock(side_effect=[network_error, network_error, None])
|
|
||||||
|
|
||||||
with patch('time.sleep'), patch('builtins.print'):
|
|
||||||
result = app.retry(mock_func, max_retries=3, delay=0)
|
|
||||||
|
|
||||||
assert result is True
|
|
||||||
assert mock_func.call_count == 3
|
|
||||||
|
|
||||||
def test_invalid_directory_handling(self):
|
|
||||||
"""Test handling of invalid directory paths."""
|
|
||||||
invalid_directory = "/nonexistent/path/that/does/not/exist"
|
|
||||||
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
# Should not raise exception during initialization
|
|
||||||
app = SeriesApp(invalid_directory)
|
|
||||||
assert app.directory_to_search == invalid_directory
|
|
||||||
|
|
||||||
def test_empty_search_results_handling(self, temp_directory):
|
|
||||||
"""Test handling of empty search results."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
# Mock empty search results
|
|
||||||
mock_loader = Mock()
|
|
||||||
mock_loader.Search.return_value = []
|
|
||||||
app.Loaders.GetLoader.return_value = mock_loader
|
|
||||||
|
|
||||||
with patch('builtins.input', return_value='nonexistent anime'), \
|
|
||||||
patch('builtins.print') as mock_print:
|
|
||||||
|
|
||||||
app.search_mode()
|
|
||||||
|
|
||||||
# Should print "No results found" message
|
|
||||||
print_calls = [call[0][0] for call in mock_print.call_args_list]
|
|
||||||
assert any("No results found" in call for call in print_calls)
|
|
||||||
|
|
||||||
def test_keyboard_interrupt_handling(self, temp_directory):
|
|
||||||
"""Test graceful handling of keyboard interrupts."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
|
|
||||||
app = SeriesApp(temp_directory)
|
|
||||||
|
|
||||||
# Test that KeyboardInterrupt propagates correctly
|
|
||||||
with patch('builtins.input', side_effect=KeyboardInterrupt()):
|
|
||||||
with pytest.raises(KeyboardInterrupt):
|
|
||||||
app.run()
|
|
||||||
@ -1,550 +0,0 @@
|
|||||||
"""
|
|
||||||
End-to-End tests for user preferences workflows and UI response verification.
|
|
||||||
|
|
||||||
This module tests complete user workflows for changing preferences and verifying
|
|
||||||
that the UI responds appropriately to preference changes.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestThemeChangeWorkflow:
|
|
||||||
"""End-to-end tests for theme changing workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_complete_theme_change_workflow(self, mock_user, client):
|
|
||||||
"""Test complete workflow of changing theme and verifying UI updates."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Get current theme
|
|
||||||
current_theme_response = client.get("/api/preferences/themes/current")
|
|
||||||
initial_theme = None
|
|
||||||
if current_theme_response.status_code == 200:
|
|
||||||
initial_theme = current_theme_response.json().get("theme", {}).get("name")
|
|
||||||
|
|
||||||
# Step 2: Get available themes
|
|
||||||
themes_response = client.get("/api/preferences/themes")
|
|
||||||
available_themes = []
|
|
||||||
if themes_response.status_code == 200:
|
|
||||||
available_themes = [theme["name"] for theme in themes_response.json().get("themes", [])]
|
|
||||||
|
|
||||||
# Step 3: Change to different theme
|
|
||||||
new_theme = "dark" if initial_theme != "dark" else "light"
|
|
||||||
if not available_themes:
|
|
||||||
available_themes = ["light", "dark"] # Default themes
|
|
||||||
|
|
||||||
if new_theme in available_themes:
|
|
||||||
theme_change_data = {"theme_name": new_theme}
|
|
||||||
change_response = client.post("/api/preferences/themes/set", json=theme_change_data)
|
|
||||||
|
|
||||||
if change_response.status_code == 200:
|
|
||||||
# Step 4: Verify theme was changed
|
|
||||||
updated_theme_response = client.get("/api/preferences/themes/current")
|
|
||||||
if updated_theme_response.status_code == 200:
|
|
||||||
updated_theme = updated_theme_response.json().get("theme", {}).get("name")
|
|
||||||
assert updated_theme == new_theme
|
|
||||||
|
|
||||||
# Step 5: Verify UI reflects theme change (mock check)
|
|
||||||
ui_response = client.get("/api/preferences/ui")
|
|
||||||
if ui_response.status_code == 200:
|
|
||||||
ui_data = ui_response.json()
|
|
||||||
# UI should reflect the theme change
|
|
||||||
assert "theme" in str(ui_data).lower() or "current" in str(ui_data).lower()
|
|
||||||
|
|
||||||
# Test passes if endpoints respond appropriately (200 or 404)
|
|
||||||
assert themes_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_custom_theme_creation_and_application(self, mock_user, client):
|
|
||||||
"""Test creating custom theme and applying it."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Create custom theme
|
|
||||||
custom_theme_data = {
|
|
||||||
"name": "my_test_theme",
|
|
||||||
"display_name": "My Test Theme",
|
|
||||||
"colors": {
|
|
||||||
"primary": "#007acc",
|
|
||||||
"secondary": "#6c757d",
|
|
||||||
"background": "#ffffff",
|
|
||||||
"text": "#333333",
|
|
||||||
"accent": "#28a745"
|
|
||||||
},
|
|
||||||
"is_dark": False
|
|
||||||
}
|
|
||||||
|
|
||||||
create_response = client.post("/api/preferences/themes/custom", json=custom_theme_data)
|
|
||||||
|
|
||||||
if create_response.status_code == 201:
|
|
||||||
theme_data = create_response.json()
|
|
||||||
theme_id = theme_data.get("theme_id")
|
|
||||||
|
|
||||||
# Step 2: Apply the custom theme
|
|
||||||
apply_data = {"theme_name": "my_test_theme"}
|
|
||||||
apply_response = client.post("/api/preferences/themes/set", json=apply_data)
|
|
||||||
|
|
||||||
if apply_response.status_code == 200:
|
|
||||||
# Step 3: Verify custom theme is active
|
|
||||||
current_response = client.get("/api/preferences/themes/current")
|
|
||||||
if current_response.status_code == 200:
|
|
||||||
current_theme = current_response.json().get("theme", {})
|
|
||||||
assert current_theme.get("name") == "my_test_theme"
|
|
||||||
|
|
||||||
# Test endpoints exist and respond appropriately
|
|
||||||
assert create_response.status_code in [201, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_theme_persistence_across_sessions(self, mock_user, client):
|
|
||||||
"""Test that theme preference persists across sessions."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Set theme
|
|
||||||
theme_data = {"theme_name": "dark"}
|
|
||||||
set_response = client.post("/api/preferences/themes/set", json=theme_data)
|
|
||||||
|
|
||||||
if set_response.status_code == 200:
|
|
||||||
# Simulate new session by getting current theme
|
|
||||||
current_response = client.get("/api/preferences/themes/current")
|
|
||||||
if current_response.status_code == 200:
|
|
||||||
current_theme = current_response.json().get("theme", {}).get("name")
|
|
||||||
assert current_theme == "dark"
|
|
||||||
|
|
||||||
assert set_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestLanguageChangeWorkflow:
|
|
||||||
"""End-to-end tests for language changing workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_complete_language_change_workflow(self, mock_user, client):
|
|
||||||
"""Test complete workflow of changing language and verifying UI updates."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Get available languages
|
|
||||||
languages_response = client.get("/api/preferences/languages")
|
|
||||||
available_languages = []
|
|
||||||
if languages_response.status_code == 200:
|
|
||||||
available_languages = [lang["code"] for lang in languages_response.json().get("languages", [])]
|
|
||||||
|
|
||||||
# Step 2: Get current language
|
|
||||||
current_response = client.get("/api/preferences/languages/current")
|
|
||||||
current_language = None
|
|
||||||
if current_response.status_code == 200:
|
|
||||||
current_language = current_response.json().get("language", {}).get("code")
|
|
||||||
|
|
||||||
# Step 3: Change to different language
|
|
||||||
new_language = "de" if current_language != "de" else "en"
|
|
||||||
if not available_languages:
|
|
||||||
available_languages = ["en", "de", "fr", "es"] # Default languages
|
|
||||||
|
|
||||||
if new_language in available_languages:
|
|
||||||
language_data = {"language_code": new_language}
|
|
||||||
change_response = client.post("/api/preferences/languages/set", json=language_data)
|
|
||||||
|
|
||||||
if change_response.status_code == 200:
|
|
||||||
# Step 4: Verify language was changed
|
|
||||||
updated_response = client.get("/api/preferences/languages/current")
|
|
||||||
if updated_response.status_code == 200:
|
|
||||||
updated_language = updated_response.json().get("language", {}).get("code")
|
|
||||||
assert updated_language == new_language
|
|
||||||
|
|
||||||
# Step 5: Verify UI text reflects language change (mock check)
|
|
||||||
# In real implementation, this would check translated text
|
|
||||||
ui_response = client.get("/") # Main page
|
|
||||||
assert ui_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
assert languages_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_language_fallback_behavior(self, mock_user, client):
|
|
||||||
"""Test language fallback when preferred language is unavailable."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Try to set unsupported language
|
|
||||||
unsupported_language_data = {"language_code": "xyz"} # Non-existent language
|
|
||||||
change_response = client.post("/api/preferences/languages/set", json=unsupported_language_data)
|
|
||||||
|
|
||||||
# Should either reject or fallback to default
|
|
||||||
assert change_response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
# Verify fallback to default language
|
|
||||||
current_response = client.get("/api/preferences/languages/current")
|
|
||||||
if current_response.status_code == 200:
|
|
||||||
current_language = current_response.json().get("language", {}).get("code")
|
|
||||||
# Should be a valid language code (en, de, etc.)
|
|
||||||
assert len(current_language) >= 2 if current_language else True
|
|
||||||
|
|
||||||
|
|
||||||
class TestAccessibilityWorkflow:
|
|
||||||
"""End-to-end tests for accessibility settings workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_accessibility_settings_workflow(self, mock_user, client):
|
|
||||||
"""Test complete accessibility settings workflow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Get current accessibility settings
|
|
||||||
current_response = client.get("/api/preferences/accessibility")
|
|
||||||
initial_settings = {}
|
|
||||||
if current_response.status_code == 200:
|
|
||||||
initial_settings = current_response.json()
|
|
||||||
|
|
||||||
# Step 2: Update accessibility settings
|
|
||||||
new_settings = {
|
|
||||||
"high_contrast": True,
|
|
||||||
"large_text": True,
|
|
||||||
"reduced_motion": False,
|
|
||||||
"screen_reader_support": True,
|
|
||||||
"keyboard_navigation": True,
|
|
||||||
"font_size_multiplier": 1.5
|
|
||||||
}
|
|
||||||
|
|
||||||
update_response = client.put("/api/preferences/accessibility", json=new_settings)
|
|
||||||
|
|
||||||
if update_response.status_code == 200:
|
|
||||||
# Step 3: Verify settings were updated
|
|
||||||
updated_response = client.get("/api/preferences/accessibility")
|
|
||||||
if updated_response.status_code == 200:
|
|
||||||
updated_settings = updated_response.json()
|
|
||||||
|
|
||||||
# Check that key settings were updated
|
|
||||||
for key, value in new_settings.items():
|
|
||||||
if key in updated_settings:
|
|
||||||
assert updated_settings[key] == value
|
|
||||||
|
|
||||||
# Step 4: Verify UI reflects accessibility changes
|
|
||||||
# Check main page with accessibility features
|
|
||||||
main_page_response = client.get("/app")
|
|
||||||
if main_page_response.status_code == 200:
|
|
||||||
# In real implementation, would check for accessibility features
|
|
||||||
assert main_page_response.status_code == 200
|
|
||||||
|
|
||||||
assert current_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_high_contrast_mode_workflow(self, mock_user, client):
|
|
||||||
"""Test high contrast mode workflow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Enable high contrast mode
|
|
||||||
accessibility_data = {
|
|
||||||
"high_contrast": True,
|
|
||||||
"large_text": True
|
|
||||||
}
|
|
||||||
|
|
||||||
update_response = client.put("/api/preferences/accessibility", json=accessibility_data)
|
|
||||||
|
|
||||||
if update_response.status_code == 200:
|
|
||||||
# Verify theme reflects high contrast
|
|
||||||
theme_response = client.get("/api/preferences/themes/current")
|
|
||||||
if theme_response.status_code == 200:
|
|
||||||
theme_data = theme_response.json()
|
|
||||||
# High contrast should influence theme colors
|
|
||||||
assert "theme" in theme_data
|
|
||||||
|
|
||||||
assert update_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestUISettingsWorkflow:
|
|
||||||
"""End-to-end tests for UI settings workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_view_mode_change_workflow(self, mock_user, client):
|
|
||||||
"""Test changing view mode from grid to list and back."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Get current UI settings
|
|
||||||
ui_response = client.get("/api/preferences/ui")
|
|
||||||
current_view_mode = None
|
|
||||||
if ui_response.status_code == 200:
|
|
||||||
current_view_mode = ui_response.json().get("view_mode")
|
|
||||||
|
|
||||||
# Step 2: Change view mode
|
|
||||||
new_view_mode = "list" if current_view_mode != "list" else "grid"
|
|
||||||
view_data = {
|
|
||||||
"view_mode": new_view_mode,
|
|
||||||
"show_thumbnails": True if new_view_mode == "grid" else False
|
|
||||||
}
|
|
||||||
|
|
||||||
if new_view_mode == "grid":
|
|
||||||
view_data["grid_columns"] = 4
|
|
||||||
|
|
||||||
change_response = client.post("/api/preferences/ui/view-mode", json=view_data)
|
|
||||||
|
|
||||||
if change_response.status_code == 200:
|
|
||||||
# Step 3: Verify view mode changed
|
|
||||||
updated_response = client.get("/api/preferences/ui")
|
|
||||||
if updated_response.status_code == 200:
|
|
||||||
updated_ui = updated_response.json()
|
|
||||||
assert updated_ui.get("view_mode") == new_view_mode
|
|
||||||
|
|
||||||
# Step 4: Verify anime list reflects view mode
|
|
||||||
anime_response = client.get("/api/anime/search?limit=5")
|
|
||||||
if anime_response.status_code == 200:
|
|
||||||
# In real implementation, response format might differ based on view mode
|
|
||||||
assert anime_response.status_code == 200
|
|
||||||
|
|
||||||
assert ui_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_ui_density_change_workflow(self, mock_user, client):
|
|
||||||
"""Test changing UI density settings."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Test different density settings
|
|
||||||
density_options = ["compact", "comfortable", "spacious"]
|
|
||||||
|
|
||||||
for density in density_options:
|
|
||||||
density_data = {
|
|
||||||
"density": density,
|
|
||||||
"compact_mode": density == "compact"
|
|
||||||
}
|
|
||||||
|
|
||||||
density_response = client.post("/api/preferences/ui/density", json=density_data)
|
|
||||||
|
|
||||||
if density_response.status_code == 200:
|
|
||||||
# Verify density was set
|
|
||||||
ui_response = client.get("/api/preferences/ui")
|
|
||||||
if ui_response.status_code == 200:
|
|
||||||
ui_data = ui_response.json()
|
|
||||||
assert ui_data.get("density") == density
|
|
||||||
|
|
||||||
# All density changes should be valid
|
|
||||||
assert density_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestKeyboardShortcutsWorkflow:
|
|
||||||
"""End-to-end tests for keyboard shortcuts workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_keyboard_shortcuts_customization(self, mock_user, client):
|
|
||||||
"""Test customizing keyboard shortcuts."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Get current shortcuts
|
|
||||||
shortcuts_response = client.get("/api/preferences/shortcuts")
|
|
||||||
if shortcuts_response.status_code == 200:
|
|
||||||
current_shortcuts = shortcuts_response.json().get("shortcuts", {})
|
|
||||||
|
|
||||||
# Step 2: Update a shortcut
|
|
||||||
shortcut_data = {
|
|
||||||
"action": "search",
|
|
||||||
"shortcut": "Ctrl+Shift+F",
|
|
||||||
"description": "Global search"
|
|
||||||
}
|
|
||||||
|
|
||||||
update_response = client.put("/api/preferences/shortcuts", json=shortcut_data)
|
|
||||||
|
|
||||||
if update_response.status_code == 200:
|
|
||||||
# Step 3: Verify shortcut was updated
|
|
||||||
updated_response = client.get("/api/preferences/shortcuts")
|
|
||||||
if updated_response.status_code == 200:
|
|
||||||
updated_shortcuts = updated_response.json().get("shortcuts", {})
|
|
||||||
if "search" in updated_shortcuts:
|
|
||||||
assert updated_shortcuts["search"]["shortcut"] == "Ctrl+Shift+F"
|
|
||||||
|
|
||||||
assert shortcuts_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_shortcuts_reset_workflow(self, mock_user, client):
|
|
||||||
"""Test resetting shortcuts to defaults."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Modify some shortcuts
|
|
||||||
custom_shortcut = {
|
|
||||||
"action": "download",
|
|
||||||
"shortcut": "Ctrl+Alt+D"
|
|
||||||
}
|
|
||||||
|
|
||||||
modify_response = client.put("/api/preferences/shortcuts", json=custom_shortcut)
|
|
||||||
|
|
||||||
# Step 2: Reset to defaults
|
|
||||||
reset_response = client.post("/api/preferences/shortcuts/reset")
|
|
||||||
|
|
||||||
if reset_response.status_code == 200:
|
|
||||||
# Step 3: Verify shortcuts were reset
|
|
||||||
shortcuts_response = client.get("/api/preferences/shortcuts")
|
|
||||||
if shortcuts_response.status_code == 200:
|
|
||||||
shortcuts = shortcuts_response.json().get("shortcuts", {})
|
|
||||||
# Should have default shortcuts
|
|
||||||
assert len(shortcuts) > 0
|
|
||||||
|
|
||||||
assert reset_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestPreferencesIntegrationWorkflow:
|
|
||||||
"""End-to-end tests for integrated preferences workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_complete_preferences_setup_workflow(self, mock_user, client):
|
|
||||||
"""Test complete new user preferences setup workflow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Set theme
|
|
||||||
theme_data = {"theme_name": "dark"}
|
|
||||||
theme_response = client.post("/api/preferences/themes/set", json=theme_data)
|
|
||||||
|
|
||||||
# Step 2: Set language
|
|
||||||
language_data = {"language_code": "en"}
|
|
||||||
language_response = client.post("/api/preferences/languages/set", json=language_data)
|
|
||||||
|
|
||||||
# Step 3: Configure accessibility
|
|
||||||
accessibility_data = {
|
|
||||||
"high_contrast": False,
|
|
||||||
"large_text": False,
|
|
||||||
"reduced_motion": True
|
|
||||||
}
|
|
||||||
accessibility_response = client.put("/api/preferences/accessibility", json=accessibility_data)
|
|
||||||
|
|
||||||
# Step 4: Set UI preferences
|
|
||||||
ui_data = {
|
|
||||||
"view_mode": "grid",
|
|
||||||
"grid_columns": 4,
|
|
||||||
"show_thumbnails": True
|
|
||||||
}
|
|
||||||
ui_response = client.post("/api/preferences/ui/view-mode", json=ui_data)
|
|
||||||
|
|
||||||
# Step 5: Verify all preferences were set
|
|
||||||
all_prefs_response = client.get("/api/preferences")
|
|
||||||
if all_prefs_response.status_code == 200:
|
|
||||||
prefs_data = all_prefs_response.json()
|
|
||||||
# Should contain all preference sections
|
|
||||||
expected_sections = ["theme", "language", "accessibility", "ui_settings"]
|
|
||||||
for section in expected_sections:
|
|
||||||
if section in prefs_data:
|
|
||||||
assert prefs_data[section] is not None
|
|
||||||
|
|
||||||
# All steps should complete successfully or return 404 (not implemented)
|
|
||||||
responses = [theme_response, language_response, accessibility_response, ui_response]
|
|
||||||
for response in responses:
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_preferences_export_import_workflow(self, mock_user, client):
|
|
||||||
"""Test exporting and importing preferences."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Step 1: Set some preferences
|
|
||||||
preferences_data = {
|
|
||||||
"theme": {"name": "dark"},
|
|
||||||
"language": {"code": "de"},
|
|
||||||
"ui_settings": {"view_mode": "list", "density": "compact"}
|
|
||||||
}
|
|
||||||
|
|
||||||
bulk_response = client.put("/api/preferences", json=preferences_data)
|
|
||||||
|
|
||||||
if bulk_response.status_code == 200:
|
|
||||||
# Step 2: Export preferences
|
|
||||||
export_response = client.get("/api/preferences/export")
|
|
||||||
|
|
||||||
if export_response.status_code == 200:
|
|
||||||
exported_data = export_response.json()
|
|
||||||
|
|
||||||
# Step 3: Reset preferences
|
|
||||||
reset_response = client.post("/api/preferences/reset")
|
|
||||||
|
|
||||||
if reset_response.status_code == 200:
|
|
||||||
# Step 4: Import preferences back
|
|
||||||
import_response = client.post("/api/preferences/import", json=exported_data)
|
|
||||||
|
|
||||||
if import_response.status_code == 200:
|
|
||||||
# Step 5: Verify preferences were restored
|
|
||||||
final_response = client.get("/api/preferences")
|
|
||||||
if final_response.status_code == 200:
|
|
||||||
final_prefs = final_response.json()
|
|
||||||
# Should match original preferences
|
|
||||||
assert final_prefs is not None
|
|
||||||
|
|
||||||
# Test that export/import endpoints exist
|
|
||||||
export_test_response = client.get("/api/preferences/export")
|
|
||||||
assert export_test_response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestPreferencesPerformance:
|
|
||||||
"""Performance tests for preferences workflows."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_preferences_response_time(self, mock_user, client):
|
|
||||||
"""Test that preference changes respond quickly."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
# Quick preference change
|
|
||||||
theme_data = {"theme_name": "light"}
|
|
||||||
response = client.post("/api/preferences/themes/set", json=theme_data)
|
|
||||||
|
|
||||||
response_time = time.time() - start_time
|
|
||||||
|
|
||||||
# Should respond quickly (< 2 seconds)
|
|
||||||
assert response_time < 2.0
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_preferences_update_performance(self, mock_user, client):
|
|
||||||
"""Test performance of bulk preferences update."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
start_time = time.time()
|
|
||||||
|
|
||||||
# Large preferences update
|
|
||||||
bulk_data = {
|
|
||||||
"theme": {"name": "dark", "custom_colors": {"primary": "#007acc"}},
|
|
||||||
"language": {"code": "en"},
|
|
||||||
"accessibility": {
|
|
||||||
"high_contrast": True,
|
|
||||||
"large_text": True,
|
|
||||||
"reduced_motion": False,
|
|
||||||
"font_size_multiplier": 1.2
|
|
||||||
},
|
|
||||||
"ui_settings": {
|
|
||||||
"view_mode": "grid",
|
|
||||||
"grid_columns": 6,
|
|
||||||
"density": "comfortable",
|
|
||||||
"show_thumbnails": True
|
|
||||||
},
|
|
||||||
"shortcuts": {
|
|
||||||
"search": {"shortcut": "Ctrl+K"},
|
|
||||||
"download": {"shortcut": "Ctrl+D"}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences", json=bulk_data)
|
|
||||||
|
|
||||||
response_time = time.time() - start_time
|
|
||||||
|
|
||||||
# Should handle bulk update efficiently (< 3 seconds)
|
|
||||||
assert response_time < 3.0
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,402 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for anime and episode management API endpoints.
|
|
||||||
|
|
||||||
Tests anime search, anime details, episode retrieval with pagination,
|
|
||||||
valid/invalid IDs, and search filtering functionality.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.server.fastapi_app import app # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Test client for anime API tests."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAnimeSearchEndpoint:
|
|
||||||
"""Test anime search API endpoint."""
|
|
||||||
|
|
||||||
def test_anime_search_requires_auth(self, client):
|
|
||||||
"""Test anime search endpoint requires authentication."""
|
|
||||||
response = client.get("/api/anime/search?query=test")
|
|
||||||
|
|
||||||
assert response.status_code == 403 # Should require authentication
|
|
||||||
|
|
||||||
def test_anime_search_with_auth(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search with valid authentication."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=sample",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert isinstance(data, list)
|
|
||||||
for anime in data:
|
|
||||||
assert "id" in anime
|
|
||||||
assert "title" in anime
|
|
||||||
assert "description" in anime
|
|
||||||
assert "episodes" in anime
|
|
||||||
assert "status" in anime
|
|
||||||
assert "sample" in anime["title"].lower()
|
|
||||||
|
|
||||||
def test_anime_search_pagination(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search with pagination parameters."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test with limit and offset
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=anime&limit=5&offset=0",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert isinstance(data, list)
|
|
||||||
assert len(data) <= 5 # Should respect limit
|
|
||||||
|
|
||||||
def test_anime_search_invalid_params(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search with invalid parameters."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test missing query parameter
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422 # Validation error
|
|
||||||
|
|
||||||
# Test invalid limit (too high)
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=test&limit=200",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
# Test negative offset
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=test&offset=-1",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
def test_anime_search_empty_query(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search with empty query."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Empty query should be rejected due to min_length validation
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
def test_anime_search_no_results(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search with query that returns no results."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=nonexistent_anime_title_xyz",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert isinstance(data, list)
|
|
||||||
assert len(data) == 0 # Should return empty list
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAnimeDetailsEndpoint:
|
|
||||||
"""Test anime details API endpoint."""
|
|
||||||
|
|
||||||
def test_get_anime_requires_auth(self, client):
|
|
||||||
"""Test anime details endpoint requires authentication."""
|
|
||||||
response = client.get("/api/anime/test_anime_id")
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
def test_get_anime_with_auth(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime details with valid authentication."""
|
|
||||||
anime_id = "test_anime_123"
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
f"/api/anime/{anime_id}",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["id"] == anime_id
|
|
||||||
assert "title" in data
|
|
||||||
assert "description" in data
|
|
||||||
assert "episodes" in data
|
|
||||||
assert "status" in data
|
|
||||||
assert isinstance(data["episodes"], int)
|
|
||||||
|
|
||||||
def test_get_anime_invalid_id(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime details with various ID formats."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test with special characters in ID
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/anime@#$%",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Should still return 200 since it's just an ID string
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
def test_get_anime_empty_id(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime details with empty ID."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Empty ID should result in 404 or 422
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [404, 405] # Method not allowed or not found
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestEpisodeEndpoints:
|
|
||||||
"""Test episode-related API endpoints."""
|
|
||||||
|
|
||||||
def test_get_anime_episodes_requires_auth(self, client):
|
|
||||||
"""Test anime episodes endpoint requires authentication."""
|
|
||||||
response = client.get("/api/anime/test_anime/episodes")
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
def test_get_anime_episodes_with_auth(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime episodes with valid authentication."""
|
|
||||||
anime_id = "test_anime_456"
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
f"/api/anime/{anime_id}/episodes",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert isinstance(data, list)
|
|
||||||
|
|
||||||
for episode in data:
|
|
||||||
assert "id" in episode
|
|
||||||
assert "anime_id" in episode
|
|
||||||
assert "episode_number" in episode
|
|
||||||
assert "title" in episode
|
|
||||||
assert "description" in episode
|
|
||||||
assert "duration" in episode
|
|
||||||
assert episode["anime_id"] == anime_id
|
|
||||||
assert isinstance(episode["episode_number"], int)
|
|
||||||
assert episode["episode_number"] > 0
|
|
||||||
|
|
||||||
def test_get_episode_details_requires_auth(self, client):
|
|
||||||
"""Test episode details endpoint requires authentication."""
|
|
||||||
response = client.get("/api/episodes/test_episode_id")
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
def test_get_episode_details_with_auth(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test episode details with valid authentication."""
|
|
||||||
episode_id = "test_episode_789"
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
f"/api/episodes/{episode_id}",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["id"] == episode_id
|
|
||||||
assert "anime_id" in data
|
|
||||||
assert "episode_number" in data
|
|
||||||
assert "title" in data
|
|
||||||
assert "description" in data
|
|
||||||
assert "duration" in data
|
|
||||||
assert isinstance(data["episode_number"], int)
|
|
||||||
assert isinstance(data["duration"], int)
|
|
||||||
|
|
||||||
def test_episode_endpoints_with_invalid_auth(self, client):
|
|
||||||
"""Test episode endpoints with invalid authentication."""
|
|
||||||
invalid_token = "invalid.token.here"
|
|
||||||
|
|
||||||
endpoints = [
|
|
||||||
"/api/anime/test/episodes",
|
|
||||||
"/api/episodes/test_episode"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in endpoints:
|
|
||||||
response = client.get(
|
|
||||||
endpoint,
|
|
||||||
headers={"Authorization": f"Bearer {invalid_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAnimeAPIErrorHandling:
|
|
||||||
"""Test error handling in anime API endpoints."""
|
|
||||||
|
|
||||||
def test_anime_endpoints_malformed_auth(self, client):
|
|
||||||
"""Test anime endpoints with malformed authorization headers."""
|
|
||||||
malformed_headers = [
|
|
||||||
{"Authorization": "Bearer"}, # Missing token
|
|
||||||
{"Authorization": "Basic token"}, # Wrong type
|
|
||||||
{"Authorization": "token"}, # Missing Bearer
|
|
||||||
]
|
|
||||||
|
|
||||||
endpoints = [
|
|
||||||
"/api/anime/search?query=test",
|
|
||||||
"/api/anime/test_id",
|
|
||||||
"/api/anime/test_id/episodes",
|
|
||||||
"/api/episodes/test_id"
|
|
||||||
]
|
|
||||||
|
|
||||||
for headers in malformed_headers:
|
|
||||||
for endpoint in endpoints:
|
|
||||||
response = client.get(endpoint, headers=headers)
|
|
||||||
assert response.status_code in [401, 403]
|
|
||||||
|
|
||||||
def test_anime_search_parameter_validation(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search parameter validation."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test various invalid parameter combinations
|
|
||||||
invalid_params = [
|
|
||||||
"query=test&limit=0", # limit too low
|
|
||||||
"query=test&limit=101", # limit too high
|
|
||||||
"query=test&offset=-5", # negative offset
|
|
||||||
"query=&limit=10", # empty query
|
|
||||||
]
|
|
||||||
|
|
||||||
for params in invalid_params:
|
|
||||||
response = client.get(
|
|
||||||
f"/api/anime/search?{params}",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
def test_anime_endpoints_content_type_handling(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime endpoints with different content types."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test with different Accept headers
|
|
||||||
accept_headers = [
|
|
||||||
"application/json",
|
|
||||||
"application/xml",
|
|
||||||
"text/plain",
|
|
||||||
"*/*"
|
|
||||||
]
|
|
||||||
|
|
||||||
for accept_header in accept_headers:
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=test",
|
|
||||||
headers={
|
|
||||||
"Authorization": f"Bearer {valid_jwt_token}",
|
|
||||||
"Accept": accept_header
|
|
||||||
}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Should always return JSON regardless of Accept header
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert response.headers.get("content-type", "").startswith("application/json")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAnimeAPIDataIntegrity:
|
|
||||||
"""Test data integrity and consistency in anime API responses."""
|
|
||||||
|
|
||||||
def test_anime_search_response_structure(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test anime search response has consistent structure."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/search?query=anime",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
required_fields = ["id", "title", "description", "episodes", "status"]
|
|
||||||
|
|
||||||
for anime in data:
|
|
||||||
for field in required_fields:
|
|
||||||
assert field in anime, f"Missing field {field} in anime response"
|
|
||||||
|
|
||||||
# Validate field types
|
|
||||||
assert isinstance(anime["id"], str)
|
|
||||||
assert isinstance(anime["title"], str)
|
|
||||||
assert isinstance(anime["episodes"], int)
|
|
||||||
assert isinstance(anime["status"], str)
|
|
||||||
assert anime["episodes"] >= 0
|
|
||||||
|
|
||||||
def test_episode_response_structure(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test episode response has consistent structure."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/test_anime/episodes",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
required_fields = ["id", "anime_id", "episode_number", "title", "description", "duration"]
|
|
||||||
|
|
||||||
for episode in data:
|
|
||||||
for field in required_fields:
|
|
||||||
assert field in episode, f"Missing field {field} in episode response"
|
|
||||||
|
|
||||||
# Validate field types and ranges
|
|
||||||
assert isinstance(episode["id"], str)
|
|
||||||
assert isinstance(episode["anime_id"], str)
|
|
||||||
assert isinstance(episode["episode_number"], int)
|
|
||||||
assert isinstance(episode["title"], str)
|
|
||||||
assert isinstance(episode["duration"], int)
|
|
||||||
assert episode["episode_number"] > 0
|
|
||||||
assert episode["duration"] > 0
|
|
||||||
|
|
||||||
def test_episode_numbering_consistency(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test episode numbering is consistent and sequential."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/anime/test_anime/episodes",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
episodes = response.json()
|
|
||||||
|
|
||||||
if len(episodes) > 1:
|
|
||||||
# Check that episode numbers are sequential
|
|
||||||
episode_numbers = [ep["episode_number"] for ep in episodes]
|
|
||||||
episode_numbers.sort()
|
|
||||||
|
|
||||||
for i in range(len(episode_numbers) - 1):
|
|
||||||
assert episode_numbers[i + 1] == episode_numbers[i] + 1, \
|
|
||||||
"Episode numbers should be sequential"
|
|
||||||
@ -1,314 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for authentication API endpoints.
|
|
||||||
|
|
||||||
Tests POST /auth/login, GET /auth/verify, POST /auth/logout endpoints
|
|
||||||
with valid/invalid credentials and tokens.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.server.fastapi_app import app # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Test client for FastAPI app."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_auth_settings():
|
|
||||||
"""Mock settings for authentication tests."""
|
|
||||||
settings = Mock()
|
|
||||||
settings.jwt_secret_key = "test-secret-key"
|
|
||||||
settings.password_salt = "test-salt"
|
|
||||||
settings.master_password = "test_password"
|
|
||||||
settings.master_password_hash = None
|
|
||||||
settings.token_expiry_hours = 1
|
|
||||||
return settings
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAuthLogin:
|
|
||||||
"""Test authentication login endpoint."""
|
|
||||||
|
|
||||||
def test_login_valid_credentials(self, client, mock_auth_settings):
|
|
||||||
"""Test login with valid credentials."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["success"] is True
|
|
||||||
assert "token" in data
|
|
||||||
assert "expires_at" in data
|
|
||||||
assert data["message"] == "Login successful"
|
|
||||||
|
|
||||||
def test_login_invalid_credentials(self, client, mock_auth_settings):
|
|
||||||
"""Test login with invalid credentials."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "wrong_password"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["success"] is False
|
|
||||||
assert "token" not in data
|
|
||||||
assert "Invalid password" in data["message"]
|
|
||||||
|
|
||||||
def test_login_missing_password(self, client):
|
|
||||||
"""Test login with missing password field."""
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422 # Validation error
|
|
||||||
|
|
||||||
def test_login_empty_password(self, client, mock_auth_settings):
|
|
||||||
"""Test login with empty password."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": ""}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422 # Validation error (min_length=1)
|
|
||||||
|
|
||||||
def test_login_invalid_json(self, client):
|
|
||||||
"""Test login with invalid JSON payload."""
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
data="invalid json",
|
|
||||||
headers={"Content-Type": "application/json"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
def test_login_wrong_content_type(self, client):
|
|
||||||
"""Test login with wrong content type."""
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
data="password=test_password"
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 422
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAuthVerify:
|
|
||||||
"""Test authentication token verification endpoint."""
|
|
||||||
|
|
||||||
def test_verify_valid_token(self, client, mock_auth_settings, valid_jwt_token):
|
|
||||||
"""Test token verification with valid token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["valid"] is True
|
|
||||||
assert data["user"] == "test_user"
|
|
||||||
assert "expires_at" in data
|
|
||||||
|
|
||||||
def test_verify_expired_token(self, client, mock_auth_settings, expired_jwt_token):
|
|
||||||
"""Test token verification with expired token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {expired_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["valid"] is False
|
|
||||||
assert "expired" in data["message"].lower()
|
|
||||||
|
|
||||||
def test_verify_invalid_token(self, client, mock_auth_settings):
|
|
||||||
"""Test token verification with invalid token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": "Bearer invalid.token.here"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["valid"] is False
|
|
||||||
|
|
||||||
def test_verify_missing_token(self, client):
|
|
||||||
"""Test token verification without token."""
|
|
||||||
response = client.get("/auth/verify")
|
|
||||||
|
|
||||||
assert response.status_code == 403 # Forbidden - no credentials
|
|
||||||
|
|
||||||
def test_verify_malformed_header(self, client):
|
|
||||||
"""Test token verification with malformed authorization header."""
|
|
||||||
response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": "InvalidFormat token"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
def test_verify_empty_token(self, client):
|
|
||||||
"""Test token verification with empty token."""
|
|
||||||
response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": "Bearer "}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAuthLogout:
|
|
||||||
"""Test authentication logout endpoint."""
|
|
||||||
|
|
||||||
def test_logout_valid_token(self, client, mock_auth_settings, valid_jwt_token):
|
|
||||||
"""Test logout with valid token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/logout",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert data["success"] is True
|
|
||||||
assert "logged out" in data["message"].lower()
|
|
||||||
|
|
||||||
def test_logout_invalid_token(self, client, mock_auth_settings):
|
|
||||||
"""Test logout with invalid token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/logout",
|
|
||||||
headers={"Authorization": "Bearer invalid.token"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
def test_logout_missing_token(self, client):
|
|
||||||
"""Test logout without token."""
|
|
||||||
response = client.post("/auth/logout")
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
def test_logout_expired_token(self, client, mock_auth_settings, expired_jwt_token):
|
|
||||||
"""Test logout with expired token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/logout",
|
|
||||||
headers={"Authorization": f"Bearer {expired_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestAuthFlow:
|
|
||||||
"""Test complete authentication flow."""
|
|
||||||
|
|
||||||
def test_complete_login_verify_logout_flow(self, client, mock_auth_settings):
|
|
||||||
"""Test complete authentication flow: login -> verify -> logout."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
# Step 1: Login
|
|
||||||
login_response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert login_response.status_code == 200
|
|
||||||
login_data = login_response.json()
|
|
||||||
token = login_data["token"]
|
|
||||||
|
|
||||||
# Step 2: Verify token
|
|
||||||
verify_response = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert verify_response.status_code == 200
|
|
||||||
verify_data = verify_response.json()
|
|
||||||
assert verify_data["valid"] is True
|
|
||||||
|
|
||||||
# Step 3: Logout
|
|
||||||
logout_response = client.post(
|
|
||||||
"/auth/logout",
|
|
||||||
headers={"Authorization": f"Bearer {token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert logout_response.status_code == 200
|
|
||||||
logout_data = logout_response.json()
|
|
||||||
assert logout_data["success"] is True
|
|
||||||
|
|
||||||
def test_multiple_login_attempts(self, client, mock_auth_settings):
|
|
||||||
"""Test multiple login attempts with rate limiting consideration."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
# Multiple successful logins should work
|
|
||||||
for _ in range(3):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
# Failed login attempts
|
|
||||||
for _ in range(3):
|
|
||||||
response = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "wrong_password"}
|
|
||||||
)
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
def test_concurrent_sessions(self, client, mock_auth_settings):
|
|
||||||
"""Test that multiple valid tokens can exist simultaneously."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_auth_settings):
|
|
||||||
# Get first token
|
|
||||||
response1 = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
token1 = response1.json()["token"]
|
|
||||||
|
|
||||||
# Get second token
|
|
||||||
response2 = client.post(
|
|
||||||
"/auth/login",
|
|
||||||
json={"password": "test_password"}
|
|
||||||
)
|
|
||||||
token2 = response2.json()["token"]
|
|
||||||
|
|
||||||
# Both tokens should be valid
|
|
||||||
verify1 = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token1}"}
|
|
||||||
)
|
|
||||||
verify2 = client.get(
|
|
||||||
"/auth/verify",
|
|
||||||
headers={"Authorization": f"Bearer {token2}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert verify1.status_code == 200
|
|
||||||
assert verify2.status_code == 200
|
|
||||||
@ -1,277 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for bulk operations API endpoints.
|
|
||||||
|
|
||||||
This module tests the bulk operation endpoints for download, update, organize, delete, and export.
|
|
||||||
Tests include authentication, validation, and error handling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkDownloadEndpoint:
|
|
||||||
"""Test cases for /api/bulk/download endpoint."""
|
|
||||||
|
|
||||||
def test_bulk_download_requires_auth(self, client):
|
|
||||||
"""Test that bulk download requires authentication."""
|
|
||||||
response = client.post("/api/bulk/download", json={"anime_ids": ["1", "2"]})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_download_valid_request(self, mock_user, client):
|
|
||||||
"""Test bulk download with valid request."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
download_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"quality": "1080p",
|
|
||||||
"format": "mp4"
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.bulk_download_service') as mock_service:
|
|
||||||
mock_service.start_bulk_download.return_value = {
|
|
||||||
"task_id": "bulk_task_123",
|
|
||||||
"status": "started",
|
|
||||||
"anime_count": 2
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/download", json=download_data)
|
|
||||||
|
|
||||||
# Note: This test assumes the endpoint will be implemented
|
|
||||||
# Currently returns 404 since endpoint doesn't exist
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_bulk_download_invalid_data(self, client, auth_headers):
|
|
||||||
"""Test bulk download with invalid data."""
|
|
||||||
invalid_data = {"anime_ids": []} # Empty list
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/download", json=invalid_data, headers=auth_headers)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
def test_bulk_download_missing_anime_ids(self, client, auth_headers):
|
|
||||||
"""Test bulk download without anime_ids field."""
|
|
||||||
invalid_data = {"quality": "1080p"}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/download", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkUpdateEndpoint:
|
|
||||||
"""Test cases for /api/bulk/update endpoint."""
|
|
||||||
|
|
||||||
def test_bulk_update_requires_auth(self, client):
|
|
||||||
"""Test that bulk update requires authentication."""
|
|
||||||
response = client.post("/api/bulk/update", json={"anime_ids": ["1", "2"]})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_update_metadata(self, mock_user, client):
|
|
||||||
"""Test bulk metadata update."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
update_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"operation": "update_metadata"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/update", json=update_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_bulk_update_invalid_operation(self, client, auth_headers):
|
|
||||||
"""Test bulk update with invalid operation."""
|
|
||||||
invalid_data = {
|
|
||||||
"anime_ids": ["anime1"],
|
|
||||||
"operation": "invalid_operation"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/update", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkOrganizeEndpoint:
|
|
||||||
"""Test cases for /api/bulk/organize endpoint."""
|
|
||||||
|
|
||||||
def test_bulk_organize_requires_auth(self, client):
|
|
||||||
"""Test that bulk organize requires authentication."""
|
|
||||||
response = client.post("/api/bulk/organize", json={"anime_ids": ["1", "2"]})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_organize_by_genre(self, mock_user, client):
|
|
||||||
"""Test bulk organize by genre."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
organize_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"organize_by": "genre",
|
|
||||||
"create_subdirectories": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/organize", json=organize_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_bulk_organize_by_year(self, client, auth_headers):
|
|
||||||
"""Test bulk organize by year."""
|
|
||||||
organize_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"organize_by": "year",
|
|
||||||
"create_subdirectories": False
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/organize", json=organize_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkDeleteEndpoint:
|
|
||||||
"""Test cases for /api/bulk/delete endpoint."""
|
|
||||||
|
|
||||||
def test_bulk_delete_requires_auth(self, client):
|
|
||||||
"""Test that bulk delete requires authentication."""
|
|
||||||
response = client.delete("/api/bulk/delete", json={"anime_ids": ["1", "2"]})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_delete_with_confirmation(self, mock_user, client):
|
|
||||||
"""Test bulk delete with confirmation."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
delete_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"confirm": True,
|
|
||||||
"delete_files": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.delete("/api/bulk/delete", json=delete_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_bulk_delete_without_confirmation(self, client, auth_headers):
|
|
||||||
"""Test bulk delete without confirmation should fail."""
|
|
||||||
delete_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"confirm": False
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.delete("/api/bulk/delete", json=delete_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkExportEndpoint:
|
|
||||||
"""Test cases for /api/bulk/export endpoint."""
|
|
||||||
|
|
||||||
def test_bulk_export_requires_auth(self, client):
|
|
||||||
"""Test that bulk export requires authentication."""
|
|
||||||
response = client.post("/api/bulk/export", json={"anime_ids": ["1", "2"]})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_export_to_json(self, mock_user, client):
|
|
||||||
"""Test bulk export to JSON format."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
export_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"format": "json",
|
|
||||||
"include_metadata": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/export", json=export_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_bulk_export_to_csv(self, client, auth_headers):
|
|
||||||
"""Test bulk export to CSV format."""
|
|
||||||
export_data = {
|
|
||||||
"anime_ids": ["anime1", "anime2"],
|
|
||||||
"format": "csv",
|
|
||||||
"include_metadata": False
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/export", json=export_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_bulk_export_invalid_format(self, client, auth_headers):
|
|
||||||
"""Test bulk export with invalid format."""
|
|
||||||
export_data = {
|
|
||||||
"anime_ids": ["anime1"],
|
|
||||||
"format": "invalid_format"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/export", json=export_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestBulkOperationsEdgeCases:
|
|
||||||
"""Test edge cases for bulk operations."""
|
|
||||||
|
|
||||||
def test_empty_anime_ids_list(self, client, auth_headers):
|
|
||||||
"""Test bulk operations with empty anime_ids list."""
|
|
||||||
empty_data = {"anime_ids": []}
|
|
||||||
|
|
||||||
endpoints = [
|
|
||||||
"/api/bulk/download",
|
|
||||||
"/api/bulk/update",
|
|
||||||
"/api/bulk/organize",
|
|
||||||
"/api/bulk/export"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in endpoints:
|
|
||||||
if endpoint == "/api/bulk/delete":
|
|
||||||
response = client.delete(endpoint, json=empty_data, headers=auth_headers)
|
|
||||||
else:
|
|
||||||
response = client.post(endpoint, json=empty_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
def test_large_anime_ids_list(self, client, auth_headers):
|
|
||||||
"""Test bulk operations with large anime_ids list."""
|
|
||||||
large_data = {"anime_ids": [f"anime_{i}" for i in range(1000)]}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/download", json=large_data, headers=auth_headers)
|
|
||||||
# Endpoint should handle large requests or return appropriate error
|
|
||||||
assert response.status_code in [200, 400, 404, 413]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_operations_concurrent_requests(self, mock_user, client):
|
|
||||||
"""Test multiple concurrent bulk operations."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# This test would need actual implementation to test concurrency
|
|
||||||
# For now, just verify endpoints exist
|
|
||||||
data = {"anime_ids": ["anime1"]}
|
|
||||||
|
|
||||||
response = client.post("/api/bulk/download", json=data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,350 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for database and storage management API endpoints.
|
|
||||||
|
|
||||||
Tests database info, maintenance operations (vacuum, analyze, integrity-check,
|
|
||||||
reindex, optimize, stats), and storage management functionality.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.server.fastapi_app import app # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Test client for database API tests."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestDatabaseInfoEndpoints:
|
|
||||||
"""Test database information endpoints."""
|
|
||||||
|
|
||||||
def test_database_health_requires_auth(self, client):
|
|
||||||
"""Test database health endpoint requires authentication."""
|
|
||||||
response = client.get("/api/system/database/health")
|
|
||||||
|
|
||||||
assert response.status_code == 403
|
|
||||||
|
|
||||||
def test_database_health_with_auth(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test database health with valid authentication."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/system/database/health",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert "status" in data
|
|
||||||
assert "connection_pool" in data
|
|
||||||
assert "response_time_ms" in data
|
|
||||||
assert "last_check" in data
|
|
||||||
|
|
||||||
assert data["status"] == "healthy"
|
|
||||||
assert isinstance(data["response_time_ms"], (int, float))
|
|
||||||
assert data["response_time_ms"] > 0
|
|
||||||
|
|
||||||
def test_database_info_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/database/info endpoint (to be implemented)."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/database/info",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Endpoint may not be implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = ["database_type", "version", "size", "tables"]
|
|
||||||
for field in expected_fields:
|
|
||||||
if field in data:
|
|
||||||
assert isinstance(data[field], (str, int, float, dict, list))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestDatabaseMaintenanceEndpoints:
|
|
||||||
"""Test database maintenance operation endpoints."""
|
|
||||||
|
|
||||||
def test_database_vacuum_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /maintenance/database/vacuum endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/vacuum",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Endpoint may not be implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "success" in data or "status" in data
|
|
||||||
|
|
||||||
def test_database_analyze_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /maintenance/database/analyze endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/analyze",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = ["tables_analyzed", "statistics_updated", "duration_ms"]
|
|
||||||
# Check if any expected fields are present
|
|
||||||
assert any(field in data for field in expected_fields)
|
|
||||||
|
|
||||||
def test_database_integrity_check_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /maintenance/database/integrity-check endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/integrity-check",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "integrity_status" in data or "status" in data
|
|
||||||
if "integrity_status" in data:
|
|
||||||
assert data["integrity_status"] in ["ok", "error", "warning"]
|
|
||||||
|
|
||||||
def test_database_reindex_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /maintenance/database/reindex endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/reindex",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = ["indexes_rebuilt", "duration_ms", "status"]
|
|
||||||
assert any(field in data for field in expected_fields)
|
|
||||||
|
|
||||||
def test_database_optimize_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /maintenance/database/optimize endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/optimize",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "optimization_status" in data or "status" in data
|
|
||||||
|
|
||||||
def test_database_stats_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /maintenance/database/stats endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/maintenance/database/stats",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_stats = ["table_count", "record_count", "database_size", "index_size"]
|
|
||||||
# At least some stats should be present
|
|
||||||
assert any(stat in data for stat in expected_stats)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestDatabaseEndpointAuthentication:
|
|
||||||
"""Test authentication requirements for database endpoints."""
|
|
||||||
|
|
||||||
def test_database_endpoints_require_auth(self, client):
|
|
||||||
"""Test that database endpoints require authentication."""
|
|
||||||
database_endpoints = [
|
|
||||||
"/api/database/info",
|
|
||||||
"/api/system/database/health",
|
|
||||||
"/maintenance/database/vacuum",
|
|
||||||
"/maintenance/database/analyze",
|
|
||||||
"/maintenance/database/integrity-check",
|
|
||||||
"/maintenance/database/reindex",
|
|
||||||
"/maintenance/database/optimize",
|
|
||||||
"/maintenance/database/stats"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in database_endpoints:
|
|
||||||
# Try GET for info endpoints
|
|
||||||
if "info" in endpoint or "health" in endpoint or "stats" in endpoint:
|
|
||||||
response = client.get(endpoint)
|
|
||||||
else:
|
|
||||||
# Try POST for maintenance endpoints
|
|
||||||
response = client.post(endpoint)
|
|
||||||
|
|
||||||
# Should require authentication (403) or not be found (404)
|
|
||||||
assert response.status_code in [403, 404]
|
|
||||||
|
|
||||||
def test_database_endpoints_with_invalid_auth(self, client):
|
|
||||||
"""Test database endpoints with invalid authentication."""
|
|
||||||
invalid_token = "invalid.token.here"
|
|
||||||
|
|
||||||
database_endpoints = [
|
|
||||||
("/api/system/database/health", "GET"),
|
|
||||||
("/maintenance/database/vacuum", "POST"),
|
|
||||||
("/maintenance/database/analyze", "POST")
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint, method in database_endpoints:
|
|
||||||
if method == "GET":
|
|
||||||
response = client.get(
|
|
||||||
endpoint,
|
|
||||||
headers={"Authorization": f"Bearer {invalid_token}"}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
response = client.post(
|
|
||||||
endpoint,
|
|
||||||
headers={"Authorization": f"Bearer {invalid_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Should be unauthorized (401) or not found (404)
|
|
||||||
assert response.status_code in [401, 404]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestDatabaseMaintenanceOperations:
|
|
||||||
"""Test database maintenance operation workflows."""
|
|
||||||
|
|
||||||
def test_maintenance_operation_sequence(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test sequence of maintenance operations."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test sequence: analyze -> vacuum -> reindex -> optimize
|
|
||||||
maintenance_sequence = [
|
|
||||||
"/maintenance/database/analyze",
|
|
||||||
"/maintenance/database/vacuum",
|
|
||||||
"/maintenance/database/reindex",
|
|
||||||
"/maintenance/database/optimize"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in maintenance_sequence:
|
|
||||||
response = client.post(
|
|
||||||
endpoint,
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Should either work (200) or not be implemented (404)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
# Should return some kind of status or success indication
|
|
||||||
assert isinstance(data, dict)
|
|
||||||
|
|
||||||
def test_maintenance_operation_parameters(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test maintenance operations with parameters."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test vacuum with parameters
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/vacuum?full=true",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404, 422]
|
|
||||||
|
|
||||||
# Test analyze with table parameter
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/analyze?tables=anime,episodes",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404, 422]
|
|
||||||
|
|
||||||
def test_concurrent_maintenance_operations(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test behavior of concurrent maintenance operations."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Simulate starting multiple operations
|
|
||||||
# In real implementation, this should be handled properly
|
|
||||||
|
|
||||||
# Start first operation
|
|
||||||
response1 = client.post(
|
|
||||||
"/maintenance/database/vacuum",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Try to start second operation while first might be running
|
|
||||||
response2 = client.post(
|
|
||||||
"/maintenance/database/analyze",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Both should either work or not be implemented
|
|
||||||
assert response1.status_code in [200, 404, 409] # 409 for conflict
|
|
||||||
assert response2.status_code in [200, 404, 409]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestDatabaseErrorHandling:
|
|
||||||
"""Test error handling in database operations."""
|
|
||||||
|
|
||||||
def test_database_connection_errors(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test handling of database connection errors."""
|
|
||||||
# Mock database connection failure
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/system/database/health",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Health check should still return a response even if DB is down
|
|
||||||
assert response.status_code in [200, 503] # 503 for service unavailable
|
|
||||||
|
|
||||||
if response.status_code == 503:
|
|
||||||
data = response.json()
|
|
||||||
assert "error" in data or "status" in data
|
|
||||||
|
|
||||||
def test_maintenance_operation_errors(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test error handling in maintenance operations."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test with malformed requests
|
|
||||||
malformed_requests = [
|
|
||||||
("/maintenance/database/vacuum", {"invalid": "data"}),
|
|
||||||
("/maintenance/database/analyze", {"tables": ""}),
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint, json_data in malformed_requests:
|
|
||||||
response = client.post(
|
|
||||||
endpoint,
|
|
||||||
json=json_data,
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Should handle gracefully
|
|
||||||
assert response.status_code in [200, 400, 404, 422]
|
|
||||||
|
|
||||||
def test_database_timeout_handling(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test handling of database operation timeouts."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test long-running operation (like full vacuum)
|
|
||||||
response = client.post(
|
|
||||||
"/maintenance/database/vacuum?full=true",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"},
|
|
||||||
timeout=1 # Very short timeout to simulate timeout
|
|
||||||
)
|
|
||||||
|
|
||||||
# Should either complete quickly or handle timeout gracefully
|
|
||||||
# Note: This test depends on implementation details
|
|
||||||
assert response.status_code in [200, 404, 408, 504] # 408/504 for timeout
|
|
||||||
@ -1,336 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for diagnostics API endpoints.
|
|
||||||
|
|
||||||
This module tests the diagnostics endpoints for error reporting and system diagnostics.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestDiagnosticsReportEndpoint:
|
|
||||||
"""Test cases for /diagnostics/report endpoint."""
|
|
||||||
|
|
||||||
def test_diagnostics_report_requires_auth(self, client):
|
|
||||||
"""Test that diagnostics report requires authentication."""
|
|
||||||
response = client.get("/diagnostics/report")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_diagnostics_report(self, mock_user, client):
|
|
||||||
"""Test getting diagnostics report."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/diagnostics/report")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = [
|
|
||||||
"system_info", "memory_usage", "disk_usage",
|
|
||||||
"error_summary", "performance_metrics", "timestamp"
|
|
||||||
]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_diagnostics_report_with_filters(self, mock_user, client):
|
|
||||||
"""Test getting diagnostics report with time filters."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Test with time range
|
|
||||||
response = client.get("/diagnostics/report?since=2023-01-01&until=2023-12-31")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# Test with severity filter
|
|
||||||
response = client.get("/diagnostics/report?severity=error")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_generate_diagnostics_report(self, mock_user, client):
|
|
||||||
"""Test generating new diagnostics report."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
report_options = {
|
|
||||||
"include_logs": True,
|
|
||||||
"include_system_info": True,
|
|
||||||
"include_performance": True,
|
|
||||||
"time_range_hours": 24
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/diagnostics/report", json=report_options)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "report_id" in data
|
|
||||||
assert "status" in data
|
|
||||||
|
|
||||||
def test_diagnostics_report_invalid_params(self, client, auth_headers):
|
|
||||||
"""Test diagnostics report with invalid parameters."""
|
|
||||||
invalid_params = [
|
|
||||||
"?since=invalid-date",
|
|
||||||
"?severity=invalid-severity",
|
|
||||||
"?time_range_hours=-1"
|
|
||||||
]
|
|
||||||
|
|
||||||
for param in invalid_params:
|
|
||||||
response = client.get(f"/diagnostics/report{param}", headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestDiagnosticsErrorReporting:
|
|
||||||
"""Test cases for error reporting functionality."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_error_statistics(self, mock_user, client):
|
|
||||||
"""Test getting error statistics."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/diagnostics/errors/stats")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = [
|
|
||||||
"total_errors", "errors_by_type", "errors_by_severity",
|
|
||||||
"recent_errors", "error_trends"
|
|
||||||
]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_recent_errors(self, mock_user, client):
|
|
||||||
"""Test getting recent errors."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/diagnostics/errors/recent")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "errors" in data
|
|
||||||
assert isinstance(data["errors"], list)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_clear_error_logs(self, mock_user, client):
|
|
||||||
"""Test clearing error logs."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.delete("/diagnostics/errors/clear")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "cleared_count" in data
|
|
||||||
|
|
||||||
|
|
||||||
class TestDiagnosticsSystemHealth:
|
|
||||||
"""Test cases for system health diagnostics."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_system_health_overview(self, mock_user, client):
|
|
||||||
"""Test getting system health overview."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/diagnostics/system/health")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = [
|
|
||||||
"overall_status", "cpu_usage", "memory_usage",
|
|
||||||
"disk_usage", "network_status", "service_status"
|
|
||||||
]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_run_system_diagnostics(self, mock_user, client):
|
|
||||||
"""Test running system diagnostics."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
diagnostic_options = {
|
|
||||||
"check_disk": True,
|
|
||||||
"check_memory": True,
|
|
||||||
"check_network": True,
|
|
||||||
"check_database": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/diagnostics/system/run", json=diagnostic_options)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "diagnostic_id" in data
|
|
||||||
assert "status" in data
|
|
||||||
|
|
||||||
|
|
||||||
class TestDiagnosticsLogManagement:
|
|
||||||
"""Test cases for log management diagnostics."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_log_file_info(self, mock_user, client):
|
|
||||||
"""Test getting log file information."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/diagnostics/logs/info")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = [
|
|
||||||
"log_files", "total_size_bytes", "oldest_entry",
|
|
||||||
"newest_entry", "rotation_status"
|
|
||||||
]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_log_entries(self, mock_user, client):
|
|
||||||
"""Test getting log entries."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/diagnostics/logs/entries")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# Test with filters
|
|
||||||
response = client.get("/diagnostics/logs/entries?level=ERROR&limit=100")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_export_logs(self, mock_user, client):
|
|
||||||
"""Test exporting logs."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
export_options = {
|
|
||||||
"format": "json",
|
|
||||||
"include_levels": ["ERROR", "WARNING", "INFO"],
|
|
||||||
"time_range_hours": 24
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/diagnostics/logs/export", json=export_options)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_rotate_logs(self, mock_user, client):
|
|
||||||
"""Test log rotation."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.post("/diagnostics/logs/rotate")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "rotated_files" in data
|
|
||||||
assert "status" in data
|
|
||||||
|
|
||||||
|
|
||||||
class TestDiagnosticsIntegration:
|
|
||||||
"""Integration tests for diagnostics functionality."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_diagnostics_workflow(self, mock_user, client):
|
|
||||||
"""Test typical diagnostics workflow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# 1. Get system health overview
|
|
||||||
response = client.get("/diagnostics/system/health")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# 2. Get error statistics
|
|
||||||
response = client.get("/diagnostics/errors/stats")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# 3. Generate full diagnostics report
|
|
||||||
response = client.get("/diagnostics/report")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# 4. Check log file status
|
|
||||||
response = client.get("/diagnostics/logs/info")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_diagnostics_error_handling(self, client, auth_headers):
|
|
||||||
"""Test error handling across diagnostics endpoints."""
|
|
||||||
endpoints = [
|
|
||||||
"/diagnostics/report",
|
|
||||||
"/diagnostics/errors/stats",
|
|
||||||
"/diagnostics/system/health",
|
|
||||||
"/diagnostics/logs/info"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in endpoints:
|
|
||||||
response = client.get(endpoint, headers=auth_headers)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_diagnostics_concurrent_requests(self, mock_user, client):
|
|
||||||
"""Test handling of concurrent diagnostics requests."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Multiple simultaneous requests should be handled gracefully
|
|
||||||
response = client.get("/diagnostics/report")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestDiagnosticsEdgeCases:
|
|
||||||
"""Test edge cases for diagnostics functionality."""
|
|
||||||
|
|
||||||
def test_diagnostics_with_missing_log_files(self, client, auth_headers):
|
|
||||||
"""Test diagnostics when log files are missing."""
|
|
||||||
response = client.get("/diagnostics/logs/info", headers=auth_headers)
|
|
||||||
# Should handle missing log files gracefully
|
|
||||||
assert response.status_code in [200, 404, 500]
|
|
||||||
|
|
||||||
def test_diagnostics_with_large_log_files(self, client, auth_headers):
|
|
||||||
"""Test diagnostics with very large log files."""
|
|
||||||
# Test with limit parameter for large files
|
|
||||||
response = client.get("/diagnostics/logs/entries?limit=10", headers=auth_headers)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_diagnostics_export_formats(self, mock_user, client):
|
|
||||||
"""Test different export formats for diagnostics."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
export_formats = ["json", "csv", "txt"]
|
|
||||||
|
|
||||||
for format_type in export_formats:
|
|
||||||
export_data = {"format": format_type}
|
|
||||||
response = client.post("/diagnostics/logs/export", json=export_data)
|
|
||||||
assert response.status_code in [200, 404, 400]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,286 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for health and system monitoring API endpoints.
|
|
||||||
|
|
||||||
Tests /health, /api/health/* endpoints including system metrics,
|
|
||||||
database health, dependencies, performance, and monitoring.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from datetime import datetime
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.server.fastapi_app import app # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Test client for health API tests."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestBasicHealthEndpoints:
|
|
||||||
"""Test basic health check endpoints."""
|
|
||||||
|
|
||||||
def test_health_endpoint_structure(self, client):
|
|
||||||
"""Test basic health endpoint returns correct structure."""
|
|
||||||
response = client.get("/health")
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert "status" in data
|
|
||||||
assert "timestamp" in data
|
|
||||||
assert "version" in data
|
|
||||||
assert "services" in data
|
|
||||||
|
|
||||||
assert data["status"] == "healthy"
|
|
||||||
assert data["version"] == "1.0.0"
|
|
||||||
assert isinstance(data["services"], dict)
|
|
||||||
|
|
||||||
def test_health_endpoint_services(self, client):
|
|
||||||
"""Test health endpoint returns service status."""
|
|
||||||
response = client.get("/health")
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
services = data["services"]
|
|
||||||
expected_services = ["authentication", "anime_service", "episode_service"]
|
|
||||||
|
|
||||||
for service in expected_services:
|
|
||||||
assert service in services
|
|
||||||
assert services[service] == "online"
|
|
||||||
|
|
||||||
def test_health_endpoint_timestamp_format(self, client):
|
|
||||||
"""Test health endpoint timestamp is valid."""
|
|
||||||
response = client.get("/health")
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
# Should be able to parse timestamp
|
|
||||||
timestamp_str = data["timestamp"]
|
|
||||||
parsed_timestamp = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
|
|
||||||
assert isinstance(parsed_timestamp, datetime)
|
|
||||||
|
|
||||||
def test_database_health_requires_auth(self, client):
|
|
||||||
"""Test database health endpoint requires authentication."""
|
|
||||||
response = client.get("/api/system/database/health")
|
|
||||||
|
|
||||||
assert response.status_code == 403 # Should require authentication
|
|
||||||
|
|
||||||
def test_database_health_with_auth(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test database health endpoint with authentication."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/system/database/health",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 200
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert "status" in data
|
|
||||||
assert "connection_pool" in data
|
|
||||||
assert "response_time_ms" in data
|
|
||||||
assert "last_check" in data
|
|
||||||
|
|
||||||
assert data["status"] == "healthy"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestSystemHealthEndpoints:
|
|
||||||
"""Test system health monitoring endpoints (to be implemented)."""
|
|
||||||
|
|
||||||
def test_api_health_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/health endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# This endpoint might not exist yet, so we test expected behavior
|
|
||||||
response = client.get(
|
|
||||||
"/api/health",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# If not implemented, should return 404
|
|
||||||
# If implemented, should return 200 with health data
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
|
|
||||||
def test_system_health_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/health/system endpoint for CPU, memory, disk metrics."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/health/system",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
# Endpoint may not be implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_metrics = ["cpu_usage", "memory_usage", "disk_usage"]
|
|
||||||
for metric in expected_metrics:
|
|
||||||
assert metric in data
|
|
||||||
|
|
||||||
def test_dependencies_health_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/health/dependencies endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/health/dependencies",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert isinstance(data, dict)
|
|
||||||
|
|
||||||
def test_performance_health_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/health/performance endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/health/performance",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
performance_metrics = ["response_time", "throughput", "error_rate"]
|
|
||||||
# At least some performance metrics should be present
|
|
||||||
assert any(metric in data for metric in performance_metrics)
|
|
||||||
|
|
||||||
def test_metrics_health_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/health/metrics endpoint."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/health/metrics",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert isinstance(data, (dict, list))
|
|
||||||
|
|
||||||
def test_ready_health_endpoint(self, client, mock_settings, valid_jwt_token):
|
|
||||||
"""Test /api/health/ready endpoint for readiness probe."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
response = client.get(
|
|
||||||
"/api/health/ready",
|
|
||||||
headers={"Authorization": f"Bearer {valid_jwt_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code in [200, 404, 503]
|
|
||||||
|
|
||||||
if response.status_code in [200, 503]:
|
|
||||||
data = response.json()
|
|
||||||
assert "ready" in data or "status" in data
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestHealthEndpointAuthentication:
|
|
||||||
"""Test authentication requirements for health endpoints."""
|
|
||||||
|
|
||||||
def test_health_endpoints_without_auth(self, client):
|
|
||||||
"""Test which health endpoints require authentication."""
|
|
||||||
# Basic health should be public
|
|
||||||
response = client.get("/health")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
# System endpoints should require auth
|
|
||||||
protected_endpoints = [
|
|
||||||
"/api/health",
|
|
||||||
"/api/health/system",
|
|
||||||
"/api/health/database",
|
|
||||||
"/api/health/dependencies",
|
|
||||||
"/api/health/performance",
|
|
||||||
"/api/health/metrics",
|
|
||||||
"/api/health/ready"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in protected_endpoints:
|
|
||||||
response = client.get(endpoint)
|
|
||||||
# Should either be not found (404) or require auth (403)
|
|
||||||
assert response.status_code in [403, 404]
|
|
||||||
|
|
||||||
def test_health_endpoints_with_invalid_auth(self, client):
|
|
||||||
"""Test health endpoints with invalid authentication."""
|
|
||||||
invalid_token = "invalid.token.here"
|
|
||||||
|
|
||||||
protected_endpoints = [
|
|
||||||
"/api/health",
|
|
||||||
"/api/health/system",
|
|
||||||
"/api/health/database",
|
|
||||||
"/api/health/dependencies",
|
|
||||||
"/api/health/performance",
|
|
||||||
"/api/health/metrics",
|
|
||||||
"/api/health/ready"
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint in protected_endpoints:
|
|
||||||
response = client.get(
|
|
||||||
endpoint,
|
|
||||||
headers={"Authorization": f"Bearer {invalid_token}"}
|
|
||||||
)
|
|
||||||
# Should either be not found (404) or unauthorized (401)
|
|
||||||
assert response.status_code in [401, 404]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestHealthEndpointErrorHandling:
|
|
||||||
"""Test error handling in health endpoints."""
|
|
||||||
|
|
||||||
def test_health_endpoint_resilience(self, client):
|
|
||||||
"""Test health endpoint handles errors gracefully."""
|
|
||||||
# Test with various malformed requests
|
|
||||||
malformed_requests = [
|
|
||||||
("/health", {"Content-Type": "application/xml"}),
|
|
||||||
("/health", {"Accept": "text/plain"}),
|
|
||||||
]
|
|
||||||
|
|
||||||
for endpoint, headers in malformed_requests:
|
|
||||||
response = client.get(endpoint, headers=headers)
|
|
||||||
# Should still return 200 for basic health
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
def test_database_health_error_handling(self, client, mock_settings):
|
|
||||||
"""Test database health endpoint error handling."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
# Test with expired token
|
|
||||||
expired_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyIjoidGVzdCIsImV4cCI6MH0"
|
|
||||||
|
|
||||||
response = client.get(
|
|
||||||
"/api/system/database/health",
|
|
||||||
headers={"Authorization": f"Bearer {expired_token}"}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
def test_health_endpoint_malformed_auth_header(self, client):
|
|
||||||
"""Test health endpoints with malformed authorization headers."""
|
|
||||||
malformed_headers = [
|
|
||||||
{"Authorization": "Bearer"}, # Missing token
|
|
||||||
{"Authorization": "Basic token"}, # Wrong type
|
|
||||||
{"Authorization": "token"}, # Missing Bearer
|
|
||||||
]
|
|
||||||
|
|
||||||
for headers in malformed_headers:
|
|
||||||
response = client.get("/api/system/database/health", headers=headers)
|
|
||||||
assert response.status_code in [401, 403]
|
|
||||||
@ -1,440 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for API key management, webhooks, and third-party integrations.
|
|
||||||
|
|
||||||
This module tests the integration endpoints for managing API keys, webhook configurations,
|
|
||||||
and third-party service integrations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import uuid
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestAPIKeyManagement:
|
|
||||||
"""Test cases for API key management endpoints."""
|
|
||||||
|
|
||||||
def test_list_api_keys_requires_auth(self, client):
|
|
||||||
"""Test that listing API keys requires authentication."""
|
|
||||||
response = client.get("/api/integrations/api-keys")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
def test_create_api_key_requires_auth(self, client):
|
|
||||||
"""Test that creating API keys requires authentication."""
|
|
||||||
response = client.post("/api/integrations/api-keys", json={"name": "test_key"})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_list_api_keys(self, mock_user, client):
|
|
||||||
"""Test listing API keys."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/integrations/api-keys")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "api_keys" in data
|
|
||||||
assert isinstance(data["api_keys"], list)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_create_api_key(self, mock_user, client):
|
|
||||||
"""Test creating new API key."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
key_data = {
|
|
||||||
"name": "test_integration_key",
|
|
||||||
"description": "Key for testing integrations",
|
|
||||||
"permissions": ["read", "write"],
|
|
||||||
"expires_at": "2024-12-31T23:59:59Z"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/api-keys", json=key_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [201, 404]
|
|
||||||
|
|
||||||
if response.status_code == 201:
|
|
||||||
data = response.json()
|
|
||||||
assert "api_key_id" in data
|
|
||||||
assert "api_key" in data
|
|
||||||
assert "created_at" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_api_key_details(self, mock_user, client):
|
|
||||||
"""Test getting API key details."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
key_id = "test_key_123"
|
|
||||||
response = client.get(f"/api/integrations/api-keys/{key_id}")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "api_key_id" in data
|
|
||||||
assert "name" in data
|
|
||||||
assert "permissions" in data
|
|
||||||
assert "created_at" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_revoke_api_key(self, mock_user, client):
|
|
||||||
"""Test revoking API key."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
key_id = "test_key_123"
|
|
||||||
response = client.delete(f"/api/integrations/api-keys/{key_id}")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert data["status"] == "revoked"
|
|
||||||
|
|
||||||
def test_create_api_key_invalid_data(self, client, auth_headers):
|
|
||||||
"""Test creating API key with invalid data."""
|
|
||||||
invalid_data_sets = [
|
|
||||||
{}, # Empty data
|
|
||||||
{"name": ""}, # Empty name
|
|
||||||
{"name": "test", "permissions": []}, # Empty permissions
|
|
||||||
{"name": "test", "expires_at": "invalid_date"}, # Invalid date
|
|
||||||
]
|
|
||||||
|
|
||||||
for invalid_data in invalid_data_sets:
|
|
||||||
response = client.post("/api/integrations/api-keys", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_update_api_key_permissions(self, mock_user, client):
|
|
||||||
"""Test updating API key permissions."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
key_id = "test_key_123"
|
|
||||||
update_data = {
|
|
||||||
"permissions": ["read"],
|
|
||||||
"description": "Updated description"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.patch(f"/api/integrations/api-keys/{key_id}", json=update_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestWebhookManagement:
|
|
||||||
"""Test cases for webhook configuration endpoints."""
|
|
||||||
|
|
||||||
def test_list_webhooks_requires_auth(self, client):
|
|
||||||
"""Test that listing webhooks requires authentication."""
|
|
||||||
response = client.get("/api/integrations/webhooks")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_list_webhooks(self, mock_user, client):
|
|
||||||
"""Test listing configured webhooks."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/integrations/webhooks")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "webhooks" in data
|
|
||||||
assert isinstance(data["webhooks"], list)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_create_webhook(self, mock_user, client):
|
|
||||||
"""Test creating new webhook."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
webhook_data = {
|
|
||||||
"name": "download_complete_webhook",
|
|
||||||
"url": "https://example.com/webhook",
|
|
||||||
"events": ["download_complete", "download_failed"],
|
|
||||||
"secret": "webhook_secret_123",
|
|
||||||
"active": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/webhooks", json=webhook_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [201, 404]
|
|
||||||
|
|
||||||
if response.status_code == 201:
|
|
||||||
data = response.json()
|
|
||||||
assert "webhook_id" in data
|
|
||||||
assert "created_at" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_test_webhook(self, mock_user, client):
|
|
||||||
"""Test webhook endpoint."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
webhook_id = "webhook_123"
|
|
||||||
test_data = {
|
|
||||||
"event_type": "test",
|
|
||||||
"test_payload": {"message": "test webhook"}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post(f"/api/integrations/webhooks/{webhook_id}/test", json=test_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "response_time_ms" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_update_webhook(self, mock_user, client):
|
|
||||||
"""Test updating webhook configuration."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
webhook_id = "webhook_123"
|
|
||||||
update_data = {
|
|
||||||
"active": False,
|
|
||||||
"events": ["download_complete"]
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.patch(f"/api/integrations/webhooks/{webhook_id}", json=update_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_delete_webhook(self, mock_user, client):
|
|
||||||
"""Test deleting webhook."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
webhook_id = "webhook_123"
|
|
||||||
response = client.delete(f"/api/integrations/webhooks/{webhook_id}")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_create_webhook_invalid_url(self, client, auth_headers):
|
|
||||||
"""Test creating webhook with invalid URL."""
|
|
||||||
invalid_webhook_data = {
|
|
||||||
"name": "invalid_webhook",
|
|
||||||
"url": "not_a_valid_url",
|
|
||||||
"events": ["download_complete"]
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/webhooks", json=invalid_webhook_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestThirdPartyIntegrations:
|
|
||||||
"""Test cases for third-party service integrations."""
|
|
||||||
|
|
||||||
def test_list_integrations_requires_auth(self, client):
|
|
||||||
"""Test that listing integrations requires authentication."""
|
|
||||||
response = client.get("/api/integrations/services")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_list_available_integrations(self, mock_user, client):
|
|
||||||
"""Test listing available third-party integrations."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/integrations/services")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "services" in data
|
|
||||||
assert isinstance(data["services"], list)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_configure_integration(self, mock_user, client):
|
|
||||||
"""Test configuring third-party integration."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
service_name = "discord"
|
|
||||||
config_data = {
|
|
||||||
"webhook_url": "https://discord.com/api/webhooks/...",
|
|
||||||
"notifications": ["download_complete", "series_added"],
|
|
||||||
"enabled": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post(f"/api/integrations/services/{service_name}/configure", json=config_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_test_integration(self, mock_user, client):
|
|
||||||
"""Test third-party integration."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
service_name = "discord"
|
|
||||||
test_data = {
|
|
||||||
"message": "Test notification from AniWorld"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post(f"/api/integrations/services/{service_name}/test", json=test_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "response" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_integration_status(self, mock_user, client):
|
|
||||||
"""Test getting integration status."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
service_name = "discord"
|
|
||||||
response = client.get(f"/api/integrations/services/{service_name}/status")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "service" in data
|
|
||||||
assert "status" in data
|
|
||||||
assert "last_tested" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_disable_integration(self, mock_user, client):
|
|
||||||
"""Test disabling integration."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
service_name = "discord"
|
|
||||||
response = client.post(f"/api/integrations/services/{service_name}/disable")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestIntegrationEvents:
|
|
||||||
"""Test cases for integration event handling."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_list_integration_events(self, mock_user, client):
|
|
||||||
"""Test listing integration events."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/integrations/events")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "events" in data
|
|
||||||
assert isinstance(data["events"], list)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_trigger_test_event(self, mock_user, client):
|
|
||||||
"""Test triggering test integration event."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
event_data = {
|
|
||||||
"event_type": "download_complete",
|
|
||||||
"payload": {
|
|
||||||
"anime_id": "test_anime",
|
|
||||||
"episode_count": 12,
|
|
||||||
"download_time": "2023-01-01T12:00:00Z"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/events/trigger", json=event_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_event_history(self, mock_user, client):
|
|
||||||
"""Test getting integration event history."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/integrations/events/history")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "events" in data
|
|
||||||
assert "pagination" in data
|
|
||||||
|
|
||||||
|
|
||||||
class TestIntegrationSecurity:
|
|
||||||
"""Test cases for integration security features."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_api_key_validation(self, mock_user, client):
|
|
||||||
"""Test API key validation."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# Test with valid API key format
|
|
||||||
validation_data = {
|
|
||||||
"api_key": "ak_test_" + str(uuid.uuid4()).replace("-", "")
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/validate-key", json=validation_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_webhook_signature_validation(self, mock_user, client):
|
|
||||||
"""Test webhook signature validation."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
signature_data = {
|
|
||||||
"payload": {"test": "data"},
|
|
||||||
"signature": "sha256=test_signature",
|
|
||||||
"secret": "webhook_secret"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/validate-signature", json=signature_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_integration_rate_limiting(self, client, auth_headers):
|
|
||||||
"""Test rate limiting for integration endpoints."""
|
|
||||||
# Make multiple rapid requests to test rate limiting
|
|
||||||
for i in range(10):
|
|
||||||
response = client.get("/api/integrations/api-keys", headers=auth_headers)
|
|
||||||
# Should either work or be rate limited
|
|
||||||
assert response.status_code in [200, 404, 429]
|
|
||||||
|
|
||||||
|
|
||||||
class TestIntegrationErrorHandling:
|
|
||||||
"""Test cases for integration error handling."""
|
|
||||||
|
|
||||||
def test_invalid_service_name(self, client, auth_headers):
|
|
||||||
"""Test handling of invalid service names."""
|
|
||||||
response = client.get("/api/integrations/services/invalid_service/status", headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404]
|
|
||||||
|
|
||||||
def test_malformed_webhook_payload(self, client, auth_headers):
|
|
||||||
"""Test handling of malformed webhook payloads."""
|
|
||||||
malformed_data = {
|
|
||||||
"url": "https://example.com",
|
|
||||||
"events": "not_a_list" # Should be a list
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/integrations/webhooks", json=malformed_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_integration_service_unavailable(self, mock_user, client):
|
|
||||||
"""Test handling when integration service is unavailable."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# This would test actual service connectivity in real implementation
|
|
||||||
response = client.post("/api/integrations/services/discord/test", json={"message": "test"})
|
|
||||||
assert response.status_code in [200, 404, 503]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,522 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for miscellaneous components.
|
|
||||||
|
|
||||||
Tests configuration system integration, error handling pipelines,
|
|
||||||
and modular architecture component interactions.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import Mock
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestConfigurationIntegration:
|
|
||||||
"""Test configuration system integration."""
|
|
||||||
|
|
||||||
def test_config_loading_chain(self):
|
|
||||||
"""Test complete configuration loading chain."""
|
|
||||||
# Create temporary config files
|
|
||||||
with tempfile.TemporaryDirectory() as temp_dir:
|
|
||||||
# Create default config
|
|
||||||
default_config = {
|
|
||||||
"anime_directory": "/default/path",
|
|
||||||
"log_level": "INFO",
|
|
||||||
"provider_timeout": 30
|
|
||||||
}
|
|
||||||
|
|
||||||
# Create user config that overrides some values
|
|
||||||
user_config = {
|
|
||||||
"anime_directory": "/user/path",
|
|
||||||
"log_level": "DEBUG"
|
|
||||||
}
|
|
||||||
|
|
||||||
default_file = Path(temp_dir) / "default.json"
|
|
||||||
user_file = Path(temp_dir) / "user.json"
|
|
||||||
|
|
||||||
with open(default_file, 'w') as f:
|
|
||||||
json.dump(default_config, f)
|
|
||||||
|
|
||||||
with open(user_file, 'w') as f:
|
|
||||||
json.dump(user_config, f)
|
|
||||||
|
|
||||||
# Mock configuration loader
|
|
||||||
def load_configuration(default_path, user_path):
|
|
||||||
"""Load configuration with precedence."""
|
|
||||||
config = {}
|
|
||||||
|
|
||||||
# Load default config
|
|
||||||
if os.path.exists(default_path):
|
|
||||||
with open(default_path, 'r') as f:
|
|
||||||
config.update(json.load(f))
|
|
||||||
|
|
||||||
# Load user config (overrides defaults)
|
|
||||||
if os.path.exists(user_path):
|
|
||||||
with open(user_path, 'r') as f:
|
|
||||||
config.update(json.load(f))
|
|
||||||
|
|
||||||
return config
|
|
||||||
|
|
||||||
# Test configuration loading
|
|
||||||
config = load_configuration(str(default_file), str(user_file))
|
|
||||||
|
|
||||||
# Verify precedence
|
|
||||||
assert config["anime_directory"] == "/user/path" # User override
|
|
||||||
assert config["log_level"] == "DEBUG" # User override
|
|
||||||
assert config["provider_timeout"] == 30 # Default value
|
|
||||||
|
|
||||||
def test_config_validation_integration(self):
|
|
||||||
"""Test configuration validation integration."""
|
|
||||||
def validate_config(config):
|
|
||||||
"""Validate configuration values."""
|
|
||||||
errors = []
|
|
||||||
|
|
||||||
# Validate required fields
|
|
||||||
required_fields = ["anime_directory", "log_level"]
|
|
||||||
for field in required_fields:
|
|
||||||
if field not in config:
|
|
||||||
errors.append(f"Missing required field: {field}")
|
|
||||||
|
|
||||||
# Validate specific values
|
|
||||||
if "log_level" in config:
|
|
||||||
valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "FATAL"]
|
|
||||||
if config["log_level"] not in valid_levels:
|
|
||||||
errors.append(f"Invalid log level: {config['log_level']}")
|
|
||||||
|
|
||||||
if "provider_timeout" in config:
|
|
||||||
if config["provider_timeout"] <= 0:
|
|
||||||
errors.append("Provider timeout must be positive")
|
|
||||||
|
|
||||||
return errors
|
|
||||||
|
|
||||||
# Test valid configuration
|
|
||||||
valid_config = {
|
|
||||||
"anime_directory": "/valid/path",
|
|
||||||
"log_level": "INFO",
|
|
||||||
"provider_timeout": 30
|
|
||||||
}
|
|
||||||
|
|
||||||
errors = validate_config(valid_config)
|
|
||||||
assert len(errors) == 0
|
|
||||||
|
|
||||||
# Test invalid configuration
|
|
||||||
invalid_config = {
|
|
||||||
"log_level": "INVALID",
|
|
||||||
"provider_timeout": -5
|
|
||||||
}
|
|
||||||
|
|
||||||
errors = validate_config(invalid_config)
|
|
||||||
assert len(errors) == 3 # Missing anime_directory, invalid log level, negative timeout
|
|
||||||
assert "Missing required field: anime_directory" in errors
|
|
||||||
assert "Invalid log level: INVALID" in errors
|
|
||||||
assert "Provider timeout must be positive" in errors
|
|
||||||
|
|
||||||
def test_config_change_propagation(self):
|
|
||||||
"""Test configuration change propagation to components."""
|
|
||||||
class ConfigurableComponent:
|
|
||||||
def __init__(self, config_manager):
|
|
||||||
self.config_manager = config_manager
|
|
||||||
self.current_config = {}
|
|
||||||
self.config_manager.add_observer(self.on_config_change)
|
|
||||||
|
|
||||||
def on_config_change(self, key, old_value, new_value):
|
|
||||||
self.current_config[key] = new_value
|
|
||||||
|
|
||||||
# React to specific config changes
|
|
||||||
if key == "log_level":
|
|
||||||
self.update_log_level(new_value)
|
|
||||||
elif key == "provider_timeout":
|
|
||||||
self.update_timeout(new_value)
|
|
||||||
|
|
||||||
def update_log_level(self, level):
|
|
||||||
self.log_level_changed = level
|
|
||||||
|
|
||||||
def update_timeout(self, timeout):
|
|
||||||
self.timeout_changed = timeout
|
|
||||||
|
|
||||||
# Mock config manager
|
|
||||||
class ConfigManager:
|
|
||||||
def __init__(self):
|
|
||||||
self.config = {}
|
|
||||||
self.observers = []
|
|
||||||
|
|
||||||
def add_observer(self, observer):
|
|
||||||
self.observers.append(observer)
|
|
||||||
|
|
||||||
def set(self, key, value):
|
|
||||||
old_value = self.config.get(key)
|
|
||||||
self.config[key] = value
|
|
||||||
|
|
||||||
for observer in self.observers:
|
|
||||||
observer(key, old_value, value)
|
|
||||||
|
|
||||||
# Test configuration change propagation
|
|
||||||
config_manager = ConfigManager()
|
|
||||||
component = ConfigurableComponent(config_manager)
|
|
||||||
|
|
||||||
# Change configuration
|
|
||||||
config_manager.set("log_level", "DEBUG")
|
|
||||||
config_manager.set("provider_timeout", 60)
|
|
||||||
|
|
||||||
# Verify changes propagated
|
|
||||||
assert component.current_config["log_level"] == "DEBUG"
|
|
||||||
assert component.current_config["provider_timeout"] == 60
|
|
||||||
assert component.log_level_changed == "DEBUG"
|
|
||||||
assert component.timeout_changed == 60
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestErrorHandlingIntegration:
|
|
||||||
"""Test error handling system integration."""
|
|
||||||
|
|
||||||
def test_error_propagation_chain(self):
|
|
||||||
"""Test error propagation through component layers."""
|
|
||||||
class DataLayer:
|
|
||||||
def fetch_data(self, raise_error=False):
|
|
||||||
if raise_error:
|
|
||||||
raise ConnectionError("Database connection failed")
|
|
||||||
return {"data": "test"}
|
|
||||||
|
|
||||||
class ServiceLayer:
|
|
||||||
def __init__(self, data_layer, error_handler):
|
|
||||||
self.data_layer = data_layer
|
|
||||||
self.error_handler = error_handler
|
|
||||||
|
|
||||||
def get_data(self, raise_error=False):
|
|
||||||
try:
|
|
||||||
return self.data_layer.fetch_data(raise_error)
|
|
||||||
except Exception as e:
|
|
||||||
return self.error_handler.handle_error(e, context="service_layer")
|
|
||||||
|
|
||||||
class ApiLayer:
|
|
||||||
def __init__(self, service_layer, error_handler):
|
|
||||||
self.service_layer = service_layer
|
|
||||||
self.error_handler = error_handler
|
|
||||||
|
|
||||||
def api_get_data(self, raise_error=False):
|
|
||||||
try:
|
|
||||||
result = self.service_layer.get_data(raise_error)
|
|
||||||
if result.get("error"):
|
|
||||||
return {"success": False, "error": result["error"]}
|
|
||||||
return {"success": True, "data": result}
|
|
||||||
except Exception as e:
|
|
||||||
error_response = self.error_handler.handle_error(e, context="api_layer")
|
|
||||||
return {"success": False, "error": error_response["error"]}
|
|
||||||
|
|
||||||
# Mock error handler
|
|
||||||
class ErrorHandler:
|
|
||||||
def __init__(self):
|
|
||||||
self.handled_errors = []
|
|
||||||
|
|
||||||
def handle_error(self, error, context=None):
|
|
||||||
error_info = {
|
|
||||||
"error_type": type(error).__name__,
|
|
||||||
"error": str(error),
|
|
||||||
"context": context,
|
|
||||||
"handled": True
|
|
||||||
}
|
|
||||||
self.handled_errors.append(error_info)
|
|
||||||
return error_info
|
|
||||||
|
|
||||||
# Set up components
|
|
||||||
error_handler = ErrorHandler()
|
|
||||||
data_layer = DataLayer()
|
|
||||||
service_layer = ServiceLayer(data_layer, error_handler)
|
|
||||||
api_layer = ApiLayer(service_layer, error_handler)
|
|
||||||
|
|
||||||
# Test successful execution
|
|
||||||
result = api_layer.api_get_data(raise_error=False)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["data"]["data"] == "test"
|
|
||||||
|
|
||||||
# Test error propagation
|
|
||||||
result = api_layer.api_get_data(raise_error=True)
|
|
||||||
assert result["success"] is False
|
|
||||||
assert "Database connection failed" in result["error"]
|
|
||||||
|
|
||||||
# Verify error was handled at service layer
|
|
||||||
assert len(error_handler.handled_errors) == 1
|
|
||||||
assert error_handler.handled_errors[0]["context"] == "service_layer"
|
|
||||||
assert error_handler.handled_errors[0]["error_type"] == "ConnectionError"
|
|
||||||
|
|
||||||
def test_error_recovery_integration(self):
|
|
||||||
"""Test error recovery integration across components."""
|
|
||||||
class RetryableService:
|
|
||||||
def __init__(self, max_retries=3):
|
|
||||||
self.max_retries = max_retries
|
|
||||||
self.attempt_count = 0
|
|
||||||
|
|
||||||
def unreliable_operation(self):
|
|
||||||
self.attempt_count += 1
|
|
||||||
if self.attempt_count < 3:
|
|
||||||
raise ConnectionError(f"Attempt {self.attempt_count} failed")
|
|
||||||
return f"Success on attempt {self.attempt_count}"
|
|
||||||
|
|
||||||
def execute_with_retry(service, operation_name, max_retries=3):
|
|
||||||
"""Execute operation with retry logic."""
|
|
||||||
last_error = None
|
|
||||||
|
|
||||||
for attempt in range(max_retries):
|
|
||||||
try:
|
|
||||||
operation = getattr(service, operation_name)
|
|
||||||
return operation()
|
|
||||||
except Exception as e:
|
|
||||||
last_error = e
|
|
||||||
if attempt == max_retries - 1:
|
|
||||||
raise e
|
|
||||||
|
|
||||||
raise last_error
|
|
||||||
|
|
||||||
# Test successful retry
|
|
||||||
service = RetryableService()
|
|
||||||
result = execute_with_retry(service, "unreliable_operation")
|
|
||||||
assert "Success on attempt 3" in result
|
|
||||||
|
|
||||||
# Test failure after max retries
|
|
||||||
service = RetryableService(max_retries=10) # Will fail more than 3 times
|
|
||||||
with pytest.raises(ConnectionError):
|
|
||||||
execute_with_retry(service, "unreliable_operation", max_retries=2)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.integration
|
|
||||||
class TestModularArchitectureIntegration:
|
|
||||||
"""Test modular architecture integration."""
|
|
||||||
|
|
||||||
def test_provider_system_integration(self):
|
|
||||||
"""Test complete provider system integration."""
|
|
||||||
# Mock provider implementations
|
|
||||||
class BaseProvider:
|
|
||||||
def search(self, query):
|
|
||||||
raise NotImplementedError
|
|
||||||
|
|
||||||
class AniworldProvider(BaseProvider):
|
|
||||||
def search(self, query):
|
|
||||||
return [{"title": f"Aniworld: {query}", "source": "aniworld"}]
|
|
||||||
|
|
||||||
class BackupProvider(BaseProvider):
|
|
||||||
def search(self, query):
|
|
||||||
return [{"title": f"Backup: {query}", "source": "backup"}]
|
|
||||||
|
|
||||||
# Provider factory
|
|
||||||
class ProviderFactory:
|
|
||||||
def __init__(self):
|
|
||||||
self.providers = {}
|
|
||||||
|
|
||||||
def register(self, name, provider_class):
|
|
||||||
self.providers[name] = provider_class
|
|
||||||
|
|
||||||
def create(self, name):
|
|
||||||
if name not in self.providers:
|
|
||||||
raise ValueError(f"Provider {name} not found")
|
|
||||||
return self.providers[name]()
|
|
||||||
|
|
||||||
# Provider service with fallback
|
|
||||||
class ProviderService:
|
|
||||||
def __init__(self, factory, primary_provider, fallback_providers=None):
|
|
||||||
self.factory = factory
|
|
||||||
self.primary_provider = primary_provider
|
|
||||||
self.fallback_providers = fallback_providers or []
|
|
||||||
|
|
||||||
def search(self, query):
|
|
||||||
# Try primary provider
|
|
||||||
try:
|
|
||||||
provider = self.factory.create(self.primary_provider)
|
|
||||||
return provider.search(query)
|
|
||||||
except Exception:
|
|
||||||
# Try fallback providers
|
|
||||||
for fallback_name in self.fallback_providers:
|
|
||||||
try:
|
|
||||||
provider = self.factory.create(fallback_name)
|
|
||||||
return provider.search(query)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
|
|
||||||
raise Exception("All providers failed")
|
|
||||||
|
|
||||||
# Set up provider system
|
|
||||||
factory = ProviderFactory()
|
|
||||||
factory.register("aniworld", AniworldProvider)
|
|
||||||
factory.register("backup", BackupProvider)
|
|
||||||
|
|
||||||
service = ProviderService(
|
|
||||||
factory,
|
|
||||||
primary_provider="aniworld",
|
|
||||||
fallback_providers=["backup"]
|
|
||||||
)
|
|
||||||
|
|
||||||
# Test primary provider success
|
|
||||||
results = service.search("test anime")
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results[0]["source"] == "aniworld"
|
|
||||||
|
|
||||||
# Test fallback when primary fails
|
|
||||||
factory.register("failing", lambda: None) # Will fail on search
|
|
||||||
service_with_failing_primary = ProviderService(
|
|
||||||
factory,
|
|
||||||
primary_provider="failing",
|
|
||||||
fallback_providers=["backup"]
|
|
||||||
)
|
|
||||||
|
|
||||||
results = service_with_failing_primary.search("test anime")
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results[0]["source"] == "backup"
|
|
||||||
|
|
||||||
def test_repository_service_integration(self):
|
|
||||||
"""Test repository and service layer integration."""
|
|
||||||
# Mock repository
|
|
||||||
class AnimeRepository:
|
|
||||||
def __init__(self):
|
|
||||||
self.data = {}
|
|
||||||
self.next_id = 1
|
|
||||||
|
|
||||||
def save(self, anime):
|
|
||||||
anime_id = self.next_id
|
|
||||||
self.next_id += 1
|
|
||||||
anime_data = {**anime, "id": anime_id}
|
|
||||||
self.data[anime_id] = anime_data
|
|
||||||
return anime_data
|
|
||||||
|
|
||||||
def find_by_id(self, anime_id):
|
|
||||||
return self.data.get(anime_id)
|
|
||||||
|
|
||||||
def find_all(self):
|
|
||||||
return list(self.data.values())
|
|
||||||
|
|
||||||
def find_by_title(self, title):
|
|
||||||
return [anime for anime in self.data.values() if title.lower() in anime["title"].lower()]
|
|
||||||
|
|
||||||
# Service layer
|
|
||||||
class AnimeService:
|
|
||||||
def __init__(self, repository, provider_service):
|
|
||||||
self.repository = repository
|
|
||||||
self.provider_service = provider_service
|
|
||||||
|
|
||||||
def search_and_cache(self, query):
|
|
||||||
# Check cache first
|
|
||||||
cached = self.repository.find_by_title(query)
|
|
||||||
if cached:
|
|
||||||
return {"source": "cache", "results": cached}
|
|
||||||
|
|
||||||
# Search using provider
|
|
||||||
results = self.provider_service.search(query)
|
|
||||||
|
|
||||||
# Cache results
|
|
||||||
cached_results = []
|
|
||||||
for result in results:
|
|
||||||
saved = self.repository.save(result)
|
|
||||||
cached_results.append(saved)
|
|
||||||
|
|
||||||
return {"source": "provider", "results": cached_results}
|
|
||||||
|
|
||||||
# Mock provider service
|
|
||||||
mock_provider = Mock()
|
|
||||||
mock_provider.search.return_value = [
|
|
||||||
{"title": "Test Anime", "genre": "Action"}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Set up service
|
|
||||||
repository = AnimeRepository()
|
|
||||||
service = AnimeService(repository, mock_provider)
|
|
||||||
|
|
||||||
# First search should use provider
|
|
||||||
result1 = service.search_and_cache("Test")
|
|
||||||
assert result1["source"] == "provider"
|
|
||||||
assert len(result1["results"]) == 1
|
|
||||||
assert result1["results"][0]["id"] == 1
|
|
||||||
|
|
||||||
# Second search should use cache
|
|
||||||
result2 = service.search_and_cache("Test")
|
|
||||||
assert result2["source"] == "cache"
|
|
||||||
assert len(result2["results"]) == 1
|
|
||||||
assert result2["results"][0]["id"] == 1
|
|
||||||
|
|
||||||
# Verify provider was only called once
|
|
||||||
mock_provider.search.assert_called_once_with("Test")
|
|
||||||
|
|
||||||
def test_event_driven_integration(self):
|
|
||||||
"""Test event-driven component integration."""
|
|
||||||
# Event bus
|
|
||||||
class EventBus:
|
|
||||||
def __init__(self):
|
|
||||||
self.subscribers = {}
|
|
||||||
|
|
||||||
def subscribe(self, event_type, handler):
|
|
||||||
if event_type not in self.subscribers:
|
|
||||||
self.subscribers[event_type] = []
|
|
||||||
self.subscribers[event_type].append(handler)
|
|
||||||
|
|
||||||
def publish(self, event_type, data):
|
|
||||||
if event_type in self.subscribers:
|
|
||||||
for handler in self.subscribers[event_type]:
|
|
||||||
handler(data)
|
|
||||||
|
|
||||||
# Components that publish/subscribe to events
|
|
||||||
class DownloadService:
|
|
||||||
def __init__(self, event_bus):
|
|
||||||
self.event_bus = event_bus
|
|
||||||
|
|
||||||
def download_anime(self, anime_id):
|
|
||||||
# Simulate download
|
|
||||||
self.event_bus.publish("download_started", {"anime_id": anime_id})
|
|
||||||
|
|
||||||
# Simulate completion
|
|
||||||
self.event_bus.publish("download_completed", {
|
|
||||||
"anime_id": anime_id,
|
|
||||||
"status": "success"
|
|
||||||
})
|
|
||||||
|
|
||||||
class NotificationService:
|
|
||||||
def __init__(self, event_bus):
|
|
||||||
self.event_bus = event_bus
|
|
||||||
self.notifications = []
|
|
||||||
|
|
||||||
# Subscribe to events
|
|
||||||
self.event_bus.subscribe("download_started", self.on_download_started)
|
|
||||||
self.event_bus.subscribe("download_completed", self.on_download_completed)
|
|
||||||
|
|
||||||
def on_download_started(self, data):
|
|
||||||
self.notifications.append(f"Download started for anime {data['anime_id']}")
|
|
||||||
|
|
||||||
def on_download_completed(self, data):
|
|
||||||
self.notifications.append(f"Download completed for anime {data['anime_id']}")
|
|
||||||
|
|
||||||
class StatisticsService:
|
|
||||||
def __init__(self, event_bus):
|
|
||||||
self.event_bus = event_bus
|
|
||||||
self.download_count = 0
|
|
||||||
self.completed_count = 0
|
|
||||||
|
|
||||||
# Subscribe to events
|
|
||||||
self.event_bus.subscribe("download_started", self.on_download_started)
|
|
||||||
self.event_bus.subscribe("download_completed", self.on_download_completed)
|
|
||||||
|
|
||||||
def on_download_started(self, data):
|
|
||||||
self.download_count += 1
|
|
||||||
|
|
||||||
def on_download_completed(self, data):
|
|
||||||
self.completed_count += 1
|
|
||||||
|
|
||||||
# Set up event-driven system
|
|
||||||
event_bus = EventBus()
|
|
||||||
download_service = DownloadService(event_bus)
|
|
||||||
notification_service = NotificationService(event_bus)
|
|
||||||
stats_service = StatisticsService(event_bus)
|
|
||||||
|
|
||||||
# Trigger download
|
|
||||||
download_service.download_anime(123)
|
|
||||||
|
|
||||||
# Verify events were handled
|
|
||||||
assert len(notification_service.notifications) == 2
|
|
||||||
assert "Download started for anime 123" in notification_service.notifications
|
|
||||||
assert "Download completed for anime 123" in notification_service.notifications
|
|
||||||
|
|
||||||
assert stats_service.download_count == 1
|
|
||||||
assert stats_service.completed_count == 1
|
|
||||||
@ -1,332 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for performance optimization API endpoints.
|
|
||||||
|
|
||||||
This module tests the performance-related endpoints for speed limiting, cache management,
|
|
||||||
memory management, and download task handling.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import time
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestSpeedLimitEndpoint:
|
|
||||||
"""Test cases for /api/performance/speed-limit endpoint."""
|
|
||||||
|
|
||||||
def test_get_speed_limit_requires_auth(self, client):
|
|
||||||
"""Test that getting speed limit requires authentication."""
|
|
||||||
response = client.get("/api/performance/speed-limit")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
def test_set_speed_limit_requires_auth(self, client):
|
|
||||||
"""Test that setting speed limit requires authentication."""
|
|
||||||
response = client.post("/api/performance/speed-limit", json={"limit_mbps": 10})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_current_speed_limit(self, mock_user, client):
|
|
||||||
"""Test getting current speed limit."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/performance/speed-limit")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "limit_mbps" in data
|
|
||||||
assert "current_usage_mbps" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_set_speed_limit_valid(self, mock_user, client):
|
|
||||||
"""Test setting valid speed limit."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
limit_data = {"limit_mbps": 50}
|
|
||||||
response = client.post("/api/performance/speed-limit", json=limit_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_set_speed_limit_invalid(self, client, auth_headers):
|
|
||||||
"""Test setting invalid speed limit."""
|
|
||||||
invalid_limits = [
|
|
||||||
{"limit_mbps": -1}, # Negative
|
|
||||||
{"limit_mbps": 0}, # Zero
|
|
||||||
{"limit_mbps": "invalid"}, # Non-numeric
|
|
||||||
]
|
|
||||||
|
|
||||||
for limit_data in invalid_limits:
|
|
||||||
response = client.post("/api/performance/speed-limit", json=limit_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestCacheStatsEndpoint:
|
|
||||||
"""Test cases for /api/performance/cache/stats endpoint."""
|
|
||||||
|
|
||||||
def test_cache_stats_requires_auth(self, client):
|
|
||||||
"""Test that cache stats requires authentication."""
|
|
||||||
response = client.get("/api/performance/cache/stats")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_cache_stats(self, mock_user, client):
|
|
||||||
"""Test getting cache statistics."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/performance/cache/stats")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = ["hit_rate", "miss_rate", "size_bytes", "entries_count", "evictions"]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_clear_cache(self, mock_user, client):
|
|
||||||
"""Test clearing cache."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.delete("/api/performance/cache/stats")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestMemoryStatsEndpoint:
|
|
||||||
"""Test cases for /api/performance/memory/stats endpoint."""
|
|
||||||
|
|
||||||
def test_memory_stats_requires_auth(self, client):
|
|
||||||
"""Test that memory stats requires authentication."""
|
|
||||||
response = client.get("/api/performance/memory/stats")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_memory_stats(self, mock_user, client):
|
|
||||||
"""Test getting memory statistics."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/performance/memory/stats")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = ["used_bytes", "available_bytes", "percent_used", "process_memory"]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
|
|
||||||
class TestMemoryGCEndpoint:
|
|
||||||
"""Test cases for /api/performance/memory/gc endpoint."""
|
|
||||||
|
|
||||||
def test_memory_gc_requires_auth(self, client):
|
|
||||||
"""Test that memory garbage collection requires authentication."""
|
|
||||||
response = client.post("/api/performance/memory/gc")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_trigger_garbage_collection(self, mock_user, client):
|
|
||||||
"""Test triggering garbage collection."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.post("/api/performance/memory/gc")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "collected_objects" in data
|
|
||||||
assert "memory_freed_bytes" in data
|
|
||||||
|
|
||||||
|
|
||||||
class TestDownloadTasksEndpoint:
|
|
||||||
"""Test cases for /api/performance/downloads/tasks endpoint."""
|
|
||||||
|
|
||||||
def test_download_tasks_requires_auth(self, client):
|
|
||||||
"""Test that download tasks requires authentication."""
|
|
||||||
response = client.get("/api/performance/downloads/tasks")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_download_tasks(self, mock_user, client):
|
|
||||||
"""Test getting download tasks."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/performance/downloads/tasks")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "tasks" in data
|
|
||||||
assert isinstance(data["tasks"], list)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_download_tasks_with_status_filter(self, mock_user, client):
|
|
||||||
"""Test getting download tasks with status filter."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/performance/downloads/tasks?status=active")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
response = client.get("/api/performance/downloads/tasks?status=completed")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestAddDownloadTaskEndpoint:
|
|
||||||
"""Test cases for /api/performance/downloads/add-task endpoint."""
|
|
||||||
|
|
||||||
def test_add_download_task_requires_auth(self, client):
|
|
||||||
"""Test that adding download task requires authentication."""
|
|
||||||
response = client.post("/api/performance/downloads/add-task", json={"anime_id": "test"})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_add_download_task_valid(self, mock_user, client):
|
|
||||||
"""Test adding valid download task."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
task_data = {
|
|
||||||
"anime_id": "anime123",
|
|
||||||
"episode_range": {"start": 1, "end": 12},
|
|
||||||
"quality": "1080p",
|
|
||||||
"priority": "normal"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/performance/downloads/add-task", json=task_data)
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "task_id" in data
|
|
||||||
assert "status" in data
|
|
||||||
|
|
||||||
def test_add_download_task_invalid(self, client, auth_headers):
|
|
||||||
"""Test adding invalid download task."""
|
|
||||||
invalid_tasks = [
|
|
||||||
{}, # Empty data
|
|
||||||
{"anime_id": ""}, # Empty anime_id
|
|
||||||
{"anime_id": "test", "episode_range": {"start": 5, "end": 2}}, # Invalid range
|
|
||||||
]
|
|
||||||
|
|
||||||
for task_data in invalid_tasks:
|
|
||||||
response = client.post("/api/performance/downloads/add-task", json=task_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestResumeTasksEndpoint:
|
|
||||||
"""Test cases for /api/performance/resume/tasks endpoint."""
|
|
||||||
|
|
||||||
def test_resume_tasks_requires_auth(self, client):
|
|
||||||
"""Test that resuming tasks requires authentication."""
|
|
||||||
response = client.post("/api/performance/resume/tasks")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_resume_all_tasks(self, mock_user, client):
|
|
||||||
"""Test resuming all paused tasks."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.post("/api/performance/resume/tasks")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "resumed_count" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_resume_specific_task(self, mock_user, client):
|
|
||||||
"""Test resuming specific task."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
task_data = {"task_id": "task123"}
|
|
||||||
response = client.post("/api/performance/resume/tasks", json=task_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
class TestPerformanceEndpointsIntegration:
|
|
||||||
"""Integration tests for performance endpoints."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_performance_workflow(self, mock_user, client):
|
|
||||||
"""Test typical performance monitoring workflow."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# 1. Check current memory stats
|
|
||||||
response = client.get("/api/performance/memory/stats")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# 2. Check cache stats
|
|
||||||
response = client.get("/api/performance/cache/stats")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# 3. Check download tasks
|
|
||||||
response = client.get("/api/performance/downloads/tasks")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
# 4. If needed, trigger garbage collection
|
|
||||||
response = client.post("/api/performance/memory/gc")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_performance_endpoints_error_handling(self, client, auth_headers):
|
|
||||||
"""Test error handling across performance endpoints."""
|
|
||||||
# Test various endpoints with malformed requests
|
|
||||||
endpoints_methods = [
|
|
||||||
("GET", "/api/performance/memory/stats"),
|
|
||||||
("GET", "/api/performance/cache/stats"),
|
|
||||||
("GET", "/api/performance/downloads/tasks"),
|
|
||||||
("POST", "/api/performance/memory/gc"),
|
|
||||||
("POST", "/api/performance/resume/tasks"),
|
|
||||||
]
|
|
||||||
|
|
||||||
for method, endpoint in endpoints_methods:
|
|
||||||
if method == "GET":
|
|
||||||
response = client.get(endpoint, headers=auth_headers)
|
|
||||||
else:
|
|
||||||
response = client.post(endpoint, headers=auth_headers)
|
|
||||||
|
|
||||||
# Should either work (200) or not be implemented yet (404)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_concurrent_performance_requests(self, mock_user, client):
|
|
||||||
"""Test handling of concurrent performance requests."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
# This would test actual concurrency in a real implementation
|
|
||||||
# For now, just verify endpoints are accessible
|
|
||||||
response = client.get("/api/performance/memory/stats")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,514 +0,0 @@
|
|||||||
"""
|
|
||||||
Integration tests for user preferences and UI settings API endpoints.
|
|
||||||
|
|
||||||
This module tests the user preferences endpoints for theme management, language selection,
|
|
||||||
accessibility settings, keyboard shortcuts, and UI density configurations.
|
|
||||||
"""
|
|
||||||
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def client():
|
|
||||||
"""Create a test client for the FastAPI application."""
|
|
||||||
return TestClient(app)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def auth_headers(client):
|
|
||||||
"""Provide authentication headers for protected endpoints."""
|
|
||||||
# Login to get token
|
|
||||||
login_data = {"password": "testpassword"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
|
|
||||||
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
|
|
||||||
response = client.post("/auth/login", json=login_data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
token = response.json()["access_token"]
|
|
||||||
return {"Authorization": f"Bearer {token}"}
|
|
||||||
return {}
|
|
||||||
|
|
||||||
|
|
||||||
class TestThemeManagement:
|
|
||||||
"""Test cases for theme management endpoints."""
|
|
||||||
|
|
||||||
def test_get_themes_requires_auth(self, client):
|
|
||||||
"""Test that getting themes requires authentication."""
|
|
||||||
response = client.get("/api/preferences/themes")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_available_themes(self, mock_user, client):
|
|
||||||
"""Test getting available themes."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/themes")
|
|
||||||
# Expected 404 since endpoint not implemented yet
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "themes" in data
|
|
||||||
assert isinstance(data["themes"], list)
|
|
||||||
# Should include at least light and dark themes
|
|
||||||
theme_names = [theme["name"] for theme in data["themes"]]
|
|
||||||
assert "light" in theme_names or "dark" in theme_names
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_current_theme(self, mock_user, client):
|
|
||||||
"""Test getting current theme."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/themes/current")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "theme" in data
|
|
||||||
assert "name" in data["theme"]
|
|
||||||
assert "colors" in data["theme"]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_set_theme(self, mock_user, client):
|
|
||||||
"""Test setting user theme."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
theme_data = {
|
|
||||||
"theme_name": "dark",
|
|
||||||
"custom_colors": {
|
|
||||||
"primary": "#007acc",
|
|
||||||
"secondary": "#6c757d",
|
|
||||||
"background": "#1a1a1a"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/themes/set", json=theme_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert data["status"] == "success"
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_create_custom_theme(self, mock_user, client):
|
|
||||||
"""Test creating custom theme."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
custom_theme = {
|
|
||||||
"name": "my_custom_theme",
|
|
||||||
"display_name": "My Custom Theme",
|
|
||||||
"colors": {
|
|
||||||
"primary": "#ff6b6b",
|
|
||||||
"secondary": "#4ecdc4",
|
|
||||||
"background": "#2c3e50",
|
|
||||||
"text": "#ecf0f1",
|
|
||||||
"accent": "#e74c3c"
|
|
||||||
},
|
|
||||||
"is_dark": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/themes/custom", json=custom_theme)
|
|
||||||
assert response.status_code in [201, 404]
|
|
||||||
|
|
||||||
if response.status_code == 201:
|
|
||||||
data = response.json()
|
|
||||||
assert "theme_id" in data
|
|
||||||
assert "name" in data
|
|
||||||
|
|
||||||
def test_set_invalid_theme(self, client, auth_headers):
|
|
||||||
"""Test setting invalid theme."""
|
|
||||||
invalid_data = {"theme_name": "nonexistent_theme"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/themes/set", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestLanguageSelection:
|
|
||||||
"""Test cases for language selection endpoints."""
|
|
||||||
|
|
||||||
def test_get_languages_requires_auth(self, client):
|
|
||||||
"""Test that getting languages requires authentication."""
|
|
||||||
response = client.get("/api/preferences/languages")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_available_languages(self, mock_user, client):
|
|
||||||
"""Test getting available languages."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/languages")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "languages" in data
|
|
||||||
assert isinstance(data["languages"], list)
|
|
||||||
# Should include at least English
|
|
||||||
language_codes = [lang["code"] for lang in data["languages"]]
|
|
||||||
assert "en" in language_codes
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_current_language(self, mock_user, client):
|
|
||||||
"""Test getting current language."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/languages/current")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "language" in data
|
|
||||||
assert "code" in data["language"]
|
|
||||||
assert "name" in data["language"]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_set_language(self, mock_user, client):
|
|
||||||
"""Test setting user language."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
language_data = {"language_code": "de"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/languages/set", json=language_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "language" in data
|
|
||||||
|
|
||||||
def test_set_invalid_language(self, client, auth_headers):
|
|
||||||
"""Test setting invalid language."""
|
|
||||||
invalid_data = {"language_code": "invalid_lang"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/languages/set", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestAccessibilitySettings:
|
|
||||||
"""Test cases for accessibility settings endpoints."""
|
|
||||||
|
|
||||||
def test_get_accessibility_requires_auth(self, client):
|
|
||||||
"""Test that getting accessibility settings requires authentication."""
|
|
||||||
response = client.get("/api/preferences/accessibility")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_accessibility_settings(self, mock_user, client):
|
|
||||||
"""Test getting accessibility settings."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/accessibility")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = [
|
|
||||||
"high_contrast", "large_text", "reduced_motion",
|
|
||||||
"screen_reader_support", "keyboard_navigation"
|
|
||||||
]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_update_accessibility_settings(self, mock_user, client):
|
|
||||||
"""Test updating accessibility settings."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
accessibility_data = {
|
|
||||||
"high_contrast": True,
|
|
||||||
"large_text": True,
|
|
||||||
"reduced_motion": False,
|
|
||||||
"screen_reader_support": True,
|
|
||||||
"keyboard_navigation": True,
|
|
||||||
"font_size_multiplier": 1.2
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences/accessibility", json=accessibility_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "updated_settings" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_reset_accessibility_settings(self, mock_user, client):
|
|
||||||
"""Test resetting accessibility settings to defaults."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/accessibility/reset")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert data["status"] == "reset"
|
|
||||||
|
|
||||||
|
|
||||||
class TestKeyboardShortcuts:
|
|
||||||
"""Test cases for keyboard shortcuts endpoints."""
|
|
||||||
|
|
||||||
def test_get_shortcuts_requires_auth(self, client):
|
|
||||||
"""Test that getting shortcuts requires authentication."""
|
|
||||||
response = client.get("/api/preferences/shortcuts")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_keyboard_shortcuts(self, mock_user, client):
|
|
||||||
"""Test getting keyboard shortcuts."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/shortcuts")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "shortcuts" in data
|
|
||||||
assert isinstance(data["shortcuts"], dict)
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_update_keyboard_shortcut(self, mock_user, client):
|
|
||||||
"""Test updating keyboard shortcut."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
shortcut_data = {
|
|
||||||
"action": "search",
|
|
||||||
"shortcut": "Ctrl+K",
|
|
||||||
"description": "Open search"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences/shortcuts", json=shortcut_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "shortcut" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_reset_shortcuts_to_default(self, mock_user, client):
|
|
||||||
"""Test resetting shortcuts to default."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/shortcuts/reset")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_invalid_shortcut_format(self, client, auth_headers):
|
|
||||||
"""Test updating shortcut with invalid format."""
|
|
||||||
invalid_data = {
|
|
||||||
"action": "search",
|
|
||||||
"shortcut": "InvalidKey++"
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences/shortcuts", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestUIDensitySettings:
|
|
||||||
"""Test cases for UI density and view settings endpoints."""
|
|
||||||
|
|
||||||
def test_get_ui_settings_requires_auth(self, client):
|
|
||||||
"""Test that getting UI settings requires authentication."""
|
|
||||||
response = client.get("/api/preferences/ui")
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_ui_density_settings(self, mock_user, client):
|
|
||||||
"""Test getting UI density settings."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/ui")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_fields = [
|
|
||||||
"density", "view_mode", "grid_columns",
|
|
||||||
"show_thumbnails", "compact_mode"
|
|
||||||
]
|
|
||||||
for field in expected_fields:
|
|
||||||
assert field in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_set_view_mode(self, mock_user, client):
|
|
||||||
"""Test setting view mode (grid/list)."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
view_data = {
|
|
||||||
"view_mode": "grid",
|
|
||||||
"grid_columns": 4,
|
|
||||||
"show_thumbnails": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/ui/view-mode", json=view_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "view_mode" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_set_ui_density(self, mock_user, client):
|
|
||||||
"""Test setting UI density."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
density_data = {
|
|
||||||
"density": "comfortable", # compact, comfortable, spacious
|
|
||||||
"compact_mode": False
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/ui/density", json=density_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_update_grid_settings(self, mock_user, client):
|
|
||||||
"""Test updating grid view settings."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
grid_data = {
|
|
||||||
"columns": 6,
|
|
||||||
"thumbnail_size": "medium",
|
|
||||||
"show_titles": True,
|
|
||||||
"show_episode_count": True
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences/ui/grid", json=grid_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
def test_invalid_view_mode(self, client, auth_headers):
|
|
||||||
"""Test setting invalid view mode."""
|
|
||||||
invalid_data = {"view_mode": "invalid_mode"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/ui/view-mode", json=invalid_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
class TestPreferencesIntegration:
|
|
||||||
"""Integration tests for preferences functionality."""
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_get_all_preferences(self, mock_user, client):
|
|
||||||
"""Test getting all user preferences."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
expected_sections = [
|
|
||||||
"theme", "language", "accessibility",
|
|
||||||
"shortcuts", "ui_settings"
|
|
||||||
]
|
|
||||||
for section in expected_sections:
|
|
||||||
assert section in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_bulk_update_preferences(self, mock_user, client):
|
|
||||||
"""Test bulk updating multiple preferences."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
bulk_data = {
|
|
||||||
"theme": {"name": "dark"},
|
|
||||||
"language": {"code": "en"},
|
|
||||||
"accessibility": {"high_contrast": True},
|
|
||||||
"ui_settings": {"view_mode": "list", "density": "compact"}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences", json=bulk_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert "updated_sections" in data
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_export_preferences(self, mock_user, client):
|
|
||||||
"""Test exporting user preferences."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.get("/api/preferences/export")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
# Should return JSON or file download
|
|
||||||
assert response.headers.get("content-type") in [
|
|
||||||
"application/json",
|
|
||||||
"application/octet-stream"
|
|
||||||
]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_import_preferences(self, mock_user, client):
|
|
||||||
"""Test importing user preferences."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
import_data = {
|
|
||||||
"theme": {"name": "light"},
|
|
||||||
"language": {"code": "de"},
|
|
||||||
"ui_settings": {"view_mode": "grid"}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/import", json=import_data)
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
@patch('src.server.fastapi_app.get_current_user')
|
|
||||||
def test_reset_all_preferences(self, mock_user, client):
|
|
||||||
"""Test resetting all preferences to defaults."""
|
|
||||||
mock_user.return_value = {"user_id": "test_user"}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/reset")
|
|
||||||
assert response.status_code in [200, 404]
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
assert "status" in data
|
|
||||||
assert data["status"] == "reset"
|
|
||||||
|
|
||||||
|
|
||||||
class TestPreferencesValidation:
|
|
||||||
"""Test cases for preferences validation."""
|
|
||||||
|
|
||||||
def test_theme_validation(self, client, auth_headers):
|
|
||||||
"""Test theme data validation."""
|
|
||||||
invalid_theme_data = {
|
|
||||||
"colors": {
|
|
||||||
"primary": "not_a_color", # Invalid color format
|
|
||||||
"background": "#xyz" # Invalid hex color
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/themes/custom", json=invalid_theme_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
def test_accessibility_validation(self, client, auth_headers):
|
|
||||||
"""Test accessibility settings validation."""
|
|
||||||
invalid_accessibility_data = {
|
|
||||||
"font_size_multiplier": -1, # Invalid value
|
|
||||||
"high_contrast": "not_boolean" # Invalid type
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.put("/api/preferences/accessibility", json=invalid_accessibility_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
def test_ui_settings_validation(self, client, auth_headers):
|
|
||||||
"""Test UI settings validation."""
|
|
||||||
invalid_ui_data = {
|
|
||||||
"grid_columns": 0, # Invalid value
|
|
||||||
"density": "invalid_density" # Invalid enum value
|
|
||||||
}
|
|
||||||
|
|
||||||
response = client.post("/api/preferences/ui/density", json=invalid_ui_data, headers=auth_headers)
|
|
||||||
assert response.status_code in [400, 404, 422]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,3 +0,0 @@
|
|||||||
from src.server.web.middleware.fastapi_auth_middleware_new import AuthMiddleware
|
|
||||||
|
|
||||||
print("Success importing AuthMiddleware")
|
|
||||||
@ -1,378 +0,0 @@
|
|||||||
"""
|
|
||||||
Test application flow and setup functionality.
|
|
||||||
|
|
||||||
Tests for the application flow enforcement: setup → auth → main application.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
|
|
||||||
# Add parent directories to path for imports
|
|
||||||
import sys
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import MagicMock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
from fastapi.testclient import TestClient
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..'))
|
|
||||||
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
from src.server.services.setup_service import SetupService
|
|
||||||
|
|
||||||
|
|
||||||
class TestApplicationFlow:
|
|
||||||
"""Test cases for application flow enforcement."""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Set up test environment before each test."""
|
|
||||||
self.client = TestClient(app, follow_redirects=False)
|
|
||||||
self.test_config_path = "test_config.json"
|
|
||||||
self.test_db_path = "test_db.db"
|
|
||||||
|
|
||||||
def teardown_method(self):
|
|
||||||
"""Clean up after each test."""
|
|
||||||
# Remove test files
|
|
||||||
for path in [self.test_config_path, self.test_db_path]:
|
|
||||||
if os.path.exists(path):
|
|
||||||
os.unlink(path)
|
|
||||||
|
|
||||||
def test_setup_page_displayed_when_configuration_missing(self):
|
|
||||||
"""Test that setup page is displayed when configuration is missing."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False):
|
|
||||||
response = self.client.get("/")
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/setup"
|
|
||||||
|
|
||||||
def test_setup_page_form_submission_creates_valid_configuration(self):
|
|
||||||
"""Test that setup page form submission creates valid configuration."""
|
|
||||||
setup_data = {
|
|
||||||
"password": "test_password_123",
|
|
||||||
"directory": "/test/anime/directory"
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
|
|
||||||
patch.object(SetupService, 'mark_setup_complete', return_value=True), \
|
|
||||||
patch('pathlib.Path.mkdir'), \
|
|
||||||
patch('pathlib.Path.is_absolute', return_value=True):
|
|
||||||
|
|
||||||
response = self.client.post("/api/auth/setup", json=setup_data)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["status"] == "success"
|
|
||||||
assert data["message"] == "Setup completed successfully"
|
|
||||||
assert data["redirect_url"] == "/login"
|
|
||||||
|
|
||||||
def test_setup_page_redirects_to_auth_after_successful_setup(self):
|
|
||||||
"""Test that setup page redirects to auth page after successful setup."""
|
|
||||||
setup_data = {
|
|
||||||
"password": "test_password_123",
|
|
||||||
"directory": "/test/anime/directory"
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
|
|
||||||
patch.object(SetupService, 'mark_setup_complete', return_value=True), \
|
|
||||||
patch('pathlib.Path.mkdir'), \
|
|
||||||
patch('pathlib.Path.is_absolute', return_value=True):
|
|
||||||
|
|
||||||
response = self.client.post("/api/auth/setup", json=setup_data)
|
|
||||||
data = response.json()
|
|
||||||
assert data["redirect_url"] == "/login"
|
|
||||||
|
|
||||||
def test_setup_page_validation_for_required_fields(self):
|
|
||||||
"""Test that setup page validates required fields."""
|
|
||||||
# Test missing password
|
|
||||||
response = self.client.post("/api/auth/setup", json={"directory": "/test"})
|
|
||||||
assert response.status_code == 422 # Validation error
|
|
||||||
|
|
||||||
# Test missing directory
|
|
||||||
response = self.client.post("/api/auth/setup", json={"password": "test123"})
|
|
||||||
assert response.status_code == 422 # Validation error
|
|
||||||
|
|
||||||
# Test password too short
|
|
||||||
response = self.client.post("/api/auth/setup", json={
|
|
||||||
"password": "short",
|
|
||||||
"directory": "/test"
|
|
||||||
})
|
|
||||||
assert response.status_code == 422 # Validation error
|
|
||||||
|
|
||||||
def test_setup_page_handles_database_connection_errors_gracefully(self):
|
|
||||||
"""Test that setup page handles database connection errors gracefully."""
|
|
||||||
setup_data = {
|
|
||||||
"password": "test_password_123",
|
|
||||||
"directory": "/test/anime/directory"
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
|
|
||||||
patch.object(SetupService, 'mark_setup_complete', return_value=False), \
|
|
||||||
patch('pathlib.Path.mkdir'), \
|
|
||||||
patch('pathlib.Path.is_absolute', return_value=True):
|
|
||||||
|
|
||||||
response = self.client.post("/api/auth/setup", json=setup_data)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["status"] == "error"
|
|
||||||
assert "Failed to save configuration" in data["message"]
|
|
||||||
|
|
||||||
def test_setup_completion_flag_properly_set(self):
|
|
||||||
"""Test that setup completion flag is properly set in configuration."""
|
|
||||||
service = SetupService("test_config.json", "test_db.db")
|
|
||||||
|
|
||||||
# Create mock config data
|
|
||||||
config_data = {"test": "data"}
|
|
||||||
|
|
||||||
with patch.object(service, 'get_config', return_value=config_data), \
|
|
||||||
patch.object(service, '_save_config', return_value=True) as mock_save:
|
|
||||||
|
|
||||||
result = service.mark_setup_complete()
|
|
||||||
assert result is True
|
|
||||||
|
|
||||||
# Verify save was called with setup completion data
|
|
||||||
mock_save.assert_called_once()
|
|
||||||
saved_config = mock_save.call_args[0][0]
|
|
||||||
assert saved_config["setup"]["completed"] is True
|
|
||||||
assert "completed_at" in saved_config["setup"]
|
|
||||||
|
|
||||||
|
|
||||||
class TestAuthenticationFlow:
|
|
||||||
"""Test cases for authentication flow."""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Set up test environment before each test."""
|
|
||||||
self.client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
def test_auth_page_displayed_when_token_invalid(self):
|
|
||||||
"""Test that auth page is displayed when authentication token is invalid."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True):
|
|
||||||
# Request with invalid token
|
|
||||||
headers = {"Authorization": "Bearer invalid_token"}
|
|
||||||
response = self.client.get("/app", headers=headers)
|
|
||||||
# Should redirect to login due to invalid token
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/login"
|
|
||||||
|
|
||||||
def test_auth_page_displayed_when_token_missing(self):
|
|
||||||
"""Test that auth page is displayed when authentication token is missing."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True):
|
|
||||||
response = self.client.get("/app")
|
|
||||||
# Should redirect to login due to missing token
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/login"
|
|
||||||
|
|
||||||
def test_successful_login_creates_valid_token(self):
|
|
||||||
"""Test that successful login creates a valid authentication token."""
|
|
||||||
login_data = {"password": "test_password"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.verify_master_password', return_value=True):
|
|
||||||
response = self.client.post("/auth/login", json=login_data)
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["success"] is True
|
|
||||||
assert "token" in data
|
|
||||||
assert data["token"] is not None
|
|
||||||
assert "expires_at" in data
|
|
||||||
|
|
||||||
def test_failed_login_shows_error_message(self):
|
|
||||||
"""Test that failed login shows appropriate error messages."""
|
|
||||||
login_data = {"password": "wrong_password"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.verify_master_password', return_value=False):
|
|
||||||
response = self.client.post("/auth/login", json=login_data)
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert "Invalid master password" in data["detail"]
|
|
||||||
|
|
||||||
def test_auth_page_redirects_to_main_after_authentication(self):
|
|
||||||
"""Test that auth page redirects to main application after successful authentication."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True):
|
|
||||||
# Simulate authenticated request
|
|
||||||
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
mock_verify.return_value = {"user": "master", "exp": 9999999999}
|
|
||||||
|
|
||||||
response = self.client.get("/login", headers={"Authorization": "Bearer valid_token"})
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/app"
|
|
||||||
|
|
||||||
def test_token_validation_middleware_correctly_identifies_tokens(self):
|
|
||||||
"""Test that token validation middleware correctly identifies valid/invalid tokens."""
|
|
||||||
# Test valid token
|
|
||||||
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
mock_verify.return_value = {"user": "master", "exp": 9999999999}
|
|
||||||
|
|
||||||
response = self.client.get("/auth/verify", headers={"Authorization": "Bearer valid_token"})
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["valid"] is True
|
|
||||||
assert data["user"] == "master"
|
|
||||||
|
|
||||||
# Test invalid token
|
|
||||||
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
mock_verify.return_value = None
|
|
||||||
|
|
||||||
response = self.client.get("/auth/verify", headers={"Authorization": "Bearer invalid_token"})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
def test_token_refresh_functionality(self):
|
|
||||||
"""Test token refresh functionality."""
|
|
||||||
# This would test automatic token refresh if implemented
|
|
||||||
# For now, just test that tokens have expiration
|
|
||||||
login_data = {"password": "test_password"}
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.verify_master_password', return_value=True):
|
|
||||||
response = self.client.post("/auth/login", json=login_data)
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
assert "expires_at" in data
|
|
||||||
assert data["expires_at"] is not None
|
|
||||||
|
|
||||||
def test_session_expiration_handling(self):
|
|
||||||
"""Test session expiration handling."""
|
|
||||||
# Test with expired token
|
|
||||||
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
mock_verify.return_value = None # Simulates expired token
|
|
||||||
|
|
||||||
response = self.client.get("/auth/verify", headers={"Authorization": "Bearer expired_token"})
|
|
||||||
assert response.status_code == 401
|
|
||||||
|
|
||||||
|
|
||||||
class TestMainApplicationAccess:
|
|
||||||
"""Test cases for main application access."""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Set up test environment before each test."""
|
|
||||||
self.client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
def test_index_served_when_authentication_valid(self):
|
|
||||||
"""Test that index.html is served when authentication is valid."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
|
|
||||||
patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
|
|
||||||
mock_verify.return_value = {"user": "master", "exp": 9999999999}
|
|
||||||
|
|
||||||
response = self.client.get("/app", headers={"Authorization": "Bearer valid_token"})
|
|
||||||
assert response.status_code == 200
|
|
||||||
assert "text/html" in response.headers.get("content-type", "")
|
|
||||||
|
|
||||||
def test_unauthenticated_users_redirected_to_auth(self):
|
|
||||||
"""Test that unauthenticated users are redirected to auth page."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True):
|
|
||||||
response = self.client.get("/app")
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/login"
|
|
||||||
|
|
||||||
def test_users_without_setup_redirected_to_setup(self):
|
|
||||||
"""Test that users without completed setup are redirected to setup page."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False):
|
|
||||||
response = self.client.get("/app")
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/setup"
|
|
||||||
|
|
||||||
def test_middleware_enforces_correct_flow_priority(self):
|
|
||||||
"""Test that middleware enforces correct flow priority (setup → auth → main)."""
|
|
||||||
# Test setup takes priority over auth
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False):
|
|
||||||
response = self.client.get("/app", headers={"Authorization": "Bearer valid_token"})
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/setup"
|
|
||||||
|
|
||||||
# Test auth required when setup complete but not authenticated
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True):
|
|
||||||
response = self.client.get("/app")
|
|
||||||
assert response.status_code == 302
|
|
||||||
assert response.headers["location"] == "/login"
|
|
||||||
|
|
||||||
def test_authenticated_user_session_persistence(self):
|
|
||||||
"""Test authenticated user session persistence."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
|
|
||||||
patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
|
|
||||||
mock_verify.return_value = {"user": "master", "exp": 9999999999}
|
|
||||||
|
|
||||||
# Multiple requests with same token should work
|
|
||||||
headers = {"Authorization": "Bearer valid_token"}
|
|
||||||
|
|
||||||
response1 = self.client.get("/app", headers=headers)
|
|
||||||
assert response1.status_code == 200
|
|
||||||
|
|
||||||
response2 = self.client.get("/app", headers=headers)
|
|
||||||
assert response2.status_code == 200
|
|
||||||
|
|
||||||
def test_graceful_token_expiration_during_session(self):
|
|
||||||
"""Test graceful handling of token expiration during active session."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
|
|
||||||
patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
|
|
||||||
|
|
||||||
# First request with valid token
|
|
||||||
mock_verify.return_value = {"user": "master", "exp": 9999999999}
|
|
||||||
response1 = self.client.get("/app", headers={"Authorization": "Bearer valid_token"})
|
|
||||||
assert response1.status_code == 200
|
|
||||||
|
|
||||||
# Second request with expired token
|
|
||||||
mock_verify.return_value = None
|
|
||||||
response2 = self.client.get("/app", headers={"Authorization": "Bearer expired_token"})
|
|
||||||
assert response2.status_code == 302
|
|
||||||
assert response2.headers["location"] == "/login"
|
|
||||||
|
|
||||||
|
|
||||||
class TestSetupStatusAPI:
|
|
||||||
"""Test cases for setup status API."""
|
|
||||||
|
|
||||||
def setup_method(self):
|
|
||||||
"""Set up test environment before each test."""
|
|
||||||
self.client = TestClient(app, follow_redirects=False)
|
|
||||||
|
|
||||||
def test_setup_status_api_returns_correct_status(self):
|
|
||||||
"""Test that setup status API returns correct status information."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
|
|
||||||
patch.object(SetupService, 'get_setup_requirements') as mock_requirements, \
|
|
||||||
patch.object(SetupService, 'get_missing_requirements') as mock_missing:
|
|
||||||
|
|
||||||
mock_requirements.return_value = {
|
|
||||||
"config_file_exists": True,
|
|
||||||
"config_file_valid": True,
|
|
||||||
"database_exists": True,
|
|
||||||
"database_accessible": True,
|
|
||||||
"master_password_configured": True,
|
|
||||||
"setup_marked_complete": True
|
|
||||||
}
|
|
||||||
mock_missing.return_value = []
|
|
||||||
|
|
||||||
response = self.client.get("/api/auth/setup/status")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["setup_complete"] is True
|
|
||||||
assert data["requirements"]["config_file_exists"] is True
|
|
||||||
assert len(data["missing_requirements"]) == 0
|
|
||||||
|
|
||||||
def test_setup_status_shows_missing_requirements(self):
|
|
||||||
"""Test that setup status shows missing requirements correctly."""
|
|
||||||
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
|
|
||||||
patch.object(SetupService, 'get_setup_requirements') as mock_requirements, \
|
|
||||||
patch.object(SetupService, 'get_missing_requirements') as mock_missing:
|
|
||||||
|
|
||||||
mock_requirements.return_value = {
|
|
||||||
"config_file_exists": False,
|
|
||||||
"master_password_configured": False
|
|
||||||
}
|
|
||||||
mock_missing.return_value = [
|
|
||||||
"Configuration file is missing",
|
|
||||||
"Master password is not configured"
|
|
||||||
]
|
|
||||||
|
|
||||||
response = self.client.get("/api/auth/setup/status")
|
|
||||||
assert response.status_code == 200
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
assert data["setup_complete"] is False
|
|
||||||
assert "Configuration file is missing" in data["missing_requirements"]
|
|
||||||
assert "Master password is not configured" in data["missing_requirements"]
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,14 +0,0 @@
|
|||||||
$loginResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/login" -Method POST -ContentType "application/json" -Body '{"password": "admin123"}'
|
|
||||||
$loginData = $loginResponse.Content | ConvertFrom-Json
|
|
||||||
$token = $loginData.token
|
|
||||||
Write-Host "Token: $token"
|
|
||||||
|
|
||||||
# Test the anime search with authentication
|
|
||||||
$headers = @{
|
|
||||||
"Authorization" = "Bearer $token"
|
|
||||||
"Content-Type" = "application/json"
|
|
||||||
}
|
|
||||||
|
|
||||||
$searchResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/anime/search?query=naruto" -Headers $headers
|
|
||||||
Write-Host "Search Response:"
|
|
||||||
Write-Host $searchResponse.Content
|
|
||||||
@ -1,35 +0,0 @@
|
|||||||
# Test complete authentication flow
|
|
||||||
|
|
||||||
# Step 1: Login
|
|
||||||
Write-Host "=== Testing Login ==="
|
|
||||||
$loginResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/login" -Method POST -ContentType "application/json" -Body '{"password": "admin123"}'
|
|
||||||
$loginData = $loginResponse.Content | ConvertFrom-Json
|
|
||||||
$token = $loginData.token
|
|
||||||
Write-Host "Login successful. Token received: $($token.Substring(0,20))..."
|
|
||||||
|
|
||||||
# Step 2: Verify token
|
|
||||||
Write-Host "`n=== Testing Token Verification ==="
|
|
||||||
$headers = @{ "Authorization" = "Bearer $token" }
|
|
||||||
$verifyResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/verify" -Headers $headers
|
|
||||||
Write-Host "Token verification response: $($verifyResponse.Content)"
|
|
||||||
|
|
||||||
# Step 3: Test protected endpoint
|
|
||||||
Write-Host "`n=== Testing Protected Endpoint ==="
|
|
||||||
$authStatusResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/auth/status" -Headers $headers
|
|
||||||
Write-Host "Auth status response: $($authStatusResponse.Content)"
|
|
||||||
|
|
||||||
# Step 4: Logout
|
|
||||||
Write-Host "`n=== Testing Logout ==="
|
|
||||||
$logoutResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/logout" -Method POST -Headers $headers
|
|
||||||
Write-Host "Logout response: $($logoutResponse.Content)"
|
|
||||||
|
|
||||||
# Step 5: Test expired/invalid token
|
|
||||||
Write-Host "`n=== Testing Invalid Token ==="
|
|
||||||
try {
|
|
||||||
$invalidResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/verify" -Headers @{ "Authorization" = "Bearer invalid_token" }
|
|
||||||
Write-Host "Invalid token response: $($invalidResponse.Content)"
|
|
||||||
} catch {
|
|
||||||
Write-Host "Invalid token correctly rejected: $($_.Exception.Message)"
|
|
||||||
}
|
|
||||||
|
|
||||||
Write-Host "`n=== Authentication Flow Test Complete ==="
|
|
||||||
@ -1,17 +0,0 @@
|
|||||||
# Test database connectivity
|
|
||||||
|
|
||||||
# Get token
|
|
||||||
$loginResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/login" -Method POST -ContentType "application/json" -Body '{"password": "admin123"}'
|
|
||||||
$loginData = $loginResponse.Content | ConvertFrom-Json
|
|
||||||
$token = $loginData.token
|
|
||||||
|
|
||||||
# Test database health
|
|
||||||
$headers = @{ "Authorization" = "Bearer $token" }
|
|
||||||
$dbHealthResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/system/database/health" -Headers $headers
|
|
||||||
Write-Host "Database Health Response:"
|
|
||||||
Write-Host $dbHealthResponse.Content
|
|
||||||
|
|
||||||
# Test system config
|
|
||||||
$configResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/system/config" -Headers $headers
|
|
||||||
Write-Host "`nSystem Config Response:"
|
|
||||||
Write-Host $configResponse.Content
|
|
||||||
@ -1,15 +0,0 @@
|
|||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
# Add parent directory to path
|
|
||||||
sys.path.insert(0, os.path.abspath('.'))
|
|
||||||
|
|
||||||
try:
|
|
||||||
from src.server.fastapi_app import app
|
|
||||||
print("✓ FastAPI app imported successfully")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"✗ Error importing FastAPI app: {e}")
|
|
||||||
import traceback
|
|
||||||
traceback.print_exc()
|
|
||||||
|
|
||||||
print("Test completed.")
|
|
||||||
@ -1,22 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
try:
|
|
||||||
from src.server.web.middleware.fastapi_auth_middleware import AuthMiddleware
|
|
||||||
print("Auth middleware imported successfully")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error importing auth middleware: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
from src.server.web.middleware.fastapi_logging_middleware import (
|
|
||||||
EnhancedLoggingMiddleware,
|
|
||||||
)
|
|
||||||
print("Logging middleware imported successfully")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error importing logging middleware: {e}")
|
|
||||||
|
|
||||||
try:
|
|
||||||
from src.server.web.middleware.fastapi_validation_middleware import (
|
|
||||||
ValidationMiddleware,
|
|
||||||
)
|
|
||||||
print("Validation middleware imported successfully")
|
|
||||||
except Exception as e:
|
|
||||||
print(f"Error importing validation middleware: {e}")
|
|
||||||
@ -1,423 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for anime search and filtering logic.
|
|
||||||
|
|
||||||
Tests search algorithms, filtering functions, sorting mechanisms,
|
|
||||||
and data processing for anime and episode management.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestAnimeSearchLogic:
|
|
||||||
"""Test anime search and filtering functionality."""
|
|
||||||
|
|
||||||
def test_basic_text_search(self):
|
|
||||||
"""Test basic text search functionality."""
|
|
||||||
def search_anime_by_title(anime_list, query):
|
|
||||||
"""Simple title search function."""
|
|
||||||
if not query:
|
|
||||||
return []
|
|
||||||
|
|
||||||
query_lower = query.lower()
|
|
||||||
return [
|
|
||||||
anime for anime in anime_list
|
|
||||||
if query_lower in anime.get("title", "").lower()
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test data
|
|
||||||
anime_list = [
|
|
||||||
{"id": "1", "title": "Attack on Titan", "genre": "Action"},
|
|
||||||
{"id": "2", "title": "My Hero Academia", "genre": "Action"},
|
|
||||||
{"id": "3", "title": "Demon Slayer", "genre": "Action"},
|
|
||||||
{"id": "4", "title": "One Piece", "genre": "Adventure"}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test exact match
|
|
||||||
results = search_anime_by_title(anime_list, "Attack on Titan")
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results[0]["title"] == "Attack on Titan"
|
|
||||||
|
|
||||||
# Test partial match
|
|
||||||
results = search_anime_by_title(anime_list, "attack")
|
|
||||||
assert len(results) == 1
|
|
||||||
|
|
||||||
# Test case insensitive
|
|
||||||
results = search_anime_by_title(anime_list, "ATTACK")
|
|
||||||
assert len(results) == 1
|
|
||||||
|
|
||||||
# Test multiple matches
|
|
||||||
results = search_anime_by_title(anime_list, "a")
|
|
||||||
assert len(results) >= 2 # Should match "Attack" and "Academia"
|
|
||||||
|
|
||||||
# Test no matches
|
|
||||||
results = search_anime_by_title(anime_list, "Nonexistent")
|
|
||||||
assert len(results) == 0
|
|
||||||
|
|
||||||
# Test empty query
|
|
||||||
results = search_anime_by_title(anime_list, "")
|
|
||||||
assert len(results) == 0
|
|
||||||
|
|
||||||
def test_advanced_search_with_filters(self):
|
|
||||||
"""Test advanced search with multiple filters."""
|
|
||||||
def advanced_anime_search(anime_list, query="", genre=None, year=None, status=None):
|
|
||||||
"""Advanced search with multiple filters."""
|
|
||||||
results = anime_list.copy()
|
|
||||||
|
|
||||||
# Text search
|
|
||||||
if query:
|
|
||||||
query_lower = query.lower()
|
|
||||||
results = [
|
|
||||||
anime for anime in results
|
|
||||||
if (query_lower in anime.get("title", "").lower() or
|
|
||||||
query_lower in anime.get("description", "").lower())
|
|
||||||
]
|
|
||||||
|
|
||||||
# Genre filter
|
|
||||||
if genre:
|
|
||||||
results = [
|
|
||||||
anime for anime in results
|
|
||||||
if anime.get("genre", "").lower() == genre.lower()
|
|
||||||
]
|
|
||||||
|
|
||||||
# Year filter
|
|
||||||
if year:
|
|
||||||
results = [
|
|
||||||
anime for anime in results
|
|
||||||
if anime.get("year") == year
|
|
||||||
]
|
|
||||||
|
|
||||||
# Status filter
|
|
||||||
if status:
|
|
||||||
results = [
|
|
||||||
anime for anime in results
|
|
||||||
if anime.get("status", "").lower() == status.lower()
|
|
||||||
]
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
# Test data
|
|
||||||
anime_list = [
|
|
||||||
{
|
|
||||||
"id": "1",
|
|
||||||
"title": "Attack on Titan",
|
|
||||||
"description": "Humanity fights giants",
|
|
||||||
"genre": "Action",
|
|
||||||
"year": 2013,
|
|
||||||
"status": "Completed"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "2",
|
|
||||||
"title": "My Hero Academia",
|
|
||||||
"description": "Superheroes in training",
|
|
||||||
"genre": "Action",
|
|
||||||
"year": 2016,
|
|
||||||
"status": "Ongoing"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"id": "3",
|
|
||||||
"title": "Your Name",
|
|
||||||
"description": "Body swapping romance",
|
|
||||||
"genre": "Romance",
|
|
||||||
"year": 2016,
|
|
||||||
"status": "Completed"
|
|
||||||
}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test genre filter
|
|
||||||
results = advanced_anime_search(anime_list, genre="Action")
|
|
||||||
assert len(results) == 2
|
|
||||||
|
|
||||||
# Test year filter
|
|
||||||
results = advanced_anime_search(anime_list, year=2016)
|
|
||||||
assert len(results) == 2
|
|
||||||
|
|
||||||
# Test status filter
|
|
||||||
results = advanced_anime_search(anime_list, status="Completed")
|
|
||||||
assert len(results) == 2
|
|
||||||
|
|
||||||
# Test combined filters
|
|
||||||
results = advanced_anime_search(anime_list, genre="Action", status="Ongoing")
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results[0]["title"] == "My Hero Academia"
|
|
||||||
|
|
||||||
# Test text search in description
|
|
||||||
results = advanced_anime_search(anime_list, query="giants")
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results[0]["title"] == "Attack on Titan"
|
|
||||||
|
|
||||||
def test_search_pagination(self):
|
|
||||||
"""Test search result pagination."""
|
|
||||||
def paginate_results(results, limit=20, offset=0):
|
|
||||||
"""Paginate search results."""
|
|
||||||
if limit <= 0:
|
|
||||||
return []
|
|
||||||
|
|
||||||
start = max(0, offset)
|
|
||||||
end = start + limit
|
|
||||||
|
|
||||||
return results[start:end]
|
|
||||||
|
|
||||||
# Test data
|
|
||||||
results = [{"id": str(i), "title": f"Anime {i}"} for i in range(100)]
|
|
||||||
|
|
||||||
# Test normal pagination
|
|
||||||
page_1 = paginate_results(results, limit=10, offset=0)
|
|
||||||
assert len(page_1) == 10
|
|
||||||
assert page_1[0]["id"] == "0"
|
|
||||||
|
|
||||||
page_2 = paginate_results(results, limit=10, offset=10)
|
|
||||||
assert len(page_2) == 10
|
|
||||||
assert page_2[0]["id"] == "10"
|
|
||||||
|
|
||||||
# Test edge cases
|
|
||||||
last_page = paginate_results(results, limit=10, offset=95)
|
|
||||||
assert len(last_page) == 5 # Only 5 items left
|
|
||||||
|
|
||||||
beyond_results = paginate_results(results, limit=10, offset=200)
|
|
||||||
assert len(beyond_results) == 0
|
|
||||||
|
|
||||||
# Test invalid parameters
|
|
||||||
invalid_limit = paginate_results(results, limit=0, offset=0)
|
|
||||||
assert len(invalid_limit) == 0
|
|
||||||
|
|
||||||
negative_offset = paginate_results(results, limit=10, offset=-5)
|
|
||||||
assert len(negative_offset) == 10 # Should start from 0
|
|
||||||
|
|
||||||
def test_search_sorting(self):
|
|
||||||
"""Test search result sorting."""
|
|
||||||
def sort_anime_results(anime_list, sort_by="title", sort_order="asc"):
|
|
||||||
"""Sort anime results by different criteria."""
|
|
||||||
if not anime_list:
|
|
||||||
return []
|
|
||||||
|
|
||||||
reverse = sort_order.lower() == "desc"
|
|
||||||
|
|
||||||
if sort_by == "title":
|
|
||||||
return sorted(anime_list, key=lambda x: x.get("title", "").lower(), reverse=reverse)
|
|
||||||
elif sort_by == "year":
|
|
||||||
return sorted(anime_list, key=lambda x: x.get("year", 0), reverse=reverse)
|
|
||||||
elif sort_by == "episodes":
|
|
||||||
return sorted(anime_list, key=lambda x: x.get("episodes", 0), reverse=reverse)
|
|
||||||
elif sort_by == "rating":
|
|
||||||
return sorted(anime_list, key=lambda x: x.get("rating", 0), reverse=reverse)
|
|
||||||
else:
|
|
||||||
return anime_list
|
|
||||||
|
|
||||||
# Test data
|
|
||||||
anime_list = [
|
|
||||||
{"title": "Zorro", "year": 2020, "episodes": 12, "rating": 8.5},
|
|
||||||
{"title": "Alpha", "year": 2018, "episodes": 24, "rating": 9.0},
|
|
||||||
{"title": "Beta", "year": 2022, "episodes": 6, "rating": 7.5}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test title sorting ascending
|
|
||||||
sorted_results = sort_anime_results(anime_list, "title", "asc")
|
|
||||||
titles = [anime["title"] for anime in sorted_results]
|
|
||||||
assert titles == ["Alpha", "Beta", "Zorro"]
|
|
||||||
|
|
||||||
# Test title sorting descending
|
|
||||||
sorted_results = sort_anime_results(anime_list, "title", "desc")
|
|
||||||
titles = [anime["title"] for anime in sorted_results]
|
|
||||||
assert titles == ["Zorro", "Beta", "Alpha"]
|
|
||||||
|
|
||||||
# Test year sorting
|
|
||||||
sorted_results = sort_anime_results(anime_list, "year", "asc")
|
|
||||||
years = [anime["year"] for anime in sorted_results]
|
|
||||||
assert years == [2018, 2020, 2022]
|
|
||||||
|
|
||||||
# Test episodes sorting
|
|
||||||
sorted_results = sort_anime_results(anime_list, "episodes", "desc")
|
|
||||||
episodes = [anime["episodes"] for anime in sorted_results]
|
|
||||||
assert episodes == [24, 12, 6]
|
|
||||||
|
|
||||||
# Test rating sorting
|
|
||||||
sorted_results = sort_anime_results(anime_list, "rating", "desc")
|
|
||||||
ratings = [anime["rating"] for anime in sorted_results]
|
|
||||||
assert ratings == [9.0, 8.5, 7.5]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestEpisodeFilteringLogic:
|
|
||||||
"""Test episode filtering and management logic."""
|
|
||||||
|
|
||||||
def test_episode_filtering_by_status(self):
|
|
||||||
"""Test filtering episodes by watch status."""
|
|
||||||
def filter_episodes_by_status(episodes, status):
|
|
||||||
"""Filter episodes by watch status."""
|
|
||||||
if not status:
|
|
||||||
return episodes
|
|
||||||
|
|
||||||
return [ep for ep in episodes if ep.get("watch_status", "").lower() == status.lower()]
|
|
||||||
|
|
||||||
episodes = [
|
|
||||||
{"id": "1", "title": "Episode 1", "watch_status": "watched"},
|
|
||||||
{"id": "2", "title": "Episode 2", "watch_status": "unwatched"},
|
|
||||||
{"id": "3", "title": "Episode 3", "watch_status": "watching"},
|
|
||||||
{"id": "4", "title": "Episode 4", "watch_status": "watched"}
|
|
||||||
]
|
|
||||||
|
|
||||||
watched = filter_episodes_by_status(episodes, "watched")
|
|
||||||
assert len(watched) == 2
|
|
||||||
|
|
||||||
unwatched = filter_episodes_by_status(episodes, "unwatched")
|
|
||||||
assert len(unwatched) == 1
|
|
||||||
|
|
||||||
watching = filter_episodes_by_status(episodes, "watching")
|
|
||||||
assert len(watching) == 1
|
|
||||||
|
|
||||||
def test_episode_range_filtering(self):
|
|
||||||
"""Test filtering episodes by number range."""
|
|
||||||
def filter_episodes_by_range(episodes, start_ep=None, end_ep=None):
|
|
||||||
"""Filter episodes by episode number range."""
|
|
||||||
results = episodes.copy()
|
|
||||||
|
|
||||||
if start_ep is not None:
|
|
||||||
results = [ep for ep in results if ep.get("episode_number", 0) >= start_ep]
|
|
||||||
|
|
||||||
if end_ep is not None:
|
|
||||||
results = [ep for ep in results if ep.get("episode_number", 0) <= end_ep]
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
episodes = [
|
|
||||||
{"id": "1", "episode_number": 1, "title": "Episode 1"},
|
|
||||||
{"id": "2", "episode_number": 5, "title": "Episode 5"},
|
|
||||||
{"id": "3", "episode_number": 10, "title": "Episode 10"},
|
|
||||||
{"id": "4", "episode_number": 15, "title": "Episode 15"},
|
|
||||||
{"id": "5", "episode_number": 20, "title": "Episode 20"}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test start range
|
|
||||||
results = filter_episodes_by_range(episodes, start_ep=10)
|
|
||||||
assert len(results) == 3
|
|
||||||
assert all(ep["episode_number"] >= 10 for ep in results)
|
|
||||||
|
|
||||||
# Test end range
|
|
||||||
results = filter_episodes_by_range(episodes, end_ep=10)
|
|
||||||
assert len(results) == 3
|
|
||||||
assert all(ep["episode_number"] <= 10 for ep in results)
|
|
||||||
|
|
||||||
# Test both start and end
|
|
||||||
results = filter_episodes_by_range(episodes, start_ep=5, end_ep=15)
|
|
||||||
assert len(results) == 3
|
|
||||||
assert all(5 <= ep["episode_number"] <= 15 for ep in results)
|
|
||||||
|
|
||||||
def test_missing_episodes_detection(self):
|
|
||||||
"""Test detection of missing episodes in a series."""
|
|
||||||
def find_missing_episodes(episodes, expected_total):
|
|
||||||
"""Find missing episode numbers in a series."""
|
|
||||||
episode_numbers = {ep.get("episode_number") for ep in episodes if ep.get("episode_number")}
|
|
||||||
expected_numbers = set(range(1, expected_total + 1))
|
|
||||||
missing = expected_numbers - episode_numbers
|
|
||||||
return sorted(list(missing))
|
|
||||||
|
|
||||||
# Test with some missing episodes
|
|
||||||
episodes = [
|
|
||||||
{"episode_number": 1}, {"episode_number": 3},
|
|
||||||
{"episode_number": 5}, {"episode_number": 7}
|
|
||||||
]
|
|
||||||
|
|
||||||
missing = find_missing_episodes(episodes, 10)
|
|
||||||
assert missing == [2, 4, 6, 8, 9, 10]
|
|
||||||
|
|
||||||
# Test with no missing episodes
|
|
||||||
complete_episodes = [{"episode_number": i} for i in range(1, 6)]
|
|
||||||
missing = find_missing_episodes(complete_episodes, 5)
|
|
||||||
assert missing == []
|
|
||||||
|
|
||||||
# Test with all episodes missing
|
|
||||||
missing = find_missing_episodes([], 3)
|
|
||||||
assert missing == [1, 2, 3]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestSearchPerformance:
|
|
||||||
"""Test search performance and optimization."""
|
|
||||||
|
|
||||||
def test_search_index_creation(self):
|
|
||||||
"""Test search index creation for performance."""
|
|
||||||
def create_search_index(anime_list):
|
|
||||||
"""Create a search index for faster lookups."""
|
|
||||||
index = {
|
|
||||||
"by_title": {},
|
|
||||||
"by_genre": {},
|
|
||||||
"by_year": {}
|
|
||||||
}
|
|
||||||
|
|
||||||
for anime in anime_list:
|
|
||||||
title = anime.get("title", "").lower()
|
|
||||||
genre = anime.get("genre", "").lower()
|
|
||||||
year = anime.get("year")
|
|
||||||
|
|
||||||
# Index by title keywords
|
|
||||||
for word in title.split():
|
|
||||||
if word not in index["by_title"]:
|
|
||||||
index["by_title"][word] = []
|
|
||||||
index["by_title"][word].append(anime)
|
|
||||||
|
|
||||||
# Index by genre
|
|
||||||
if genre:
|
|
||||||
if genre not in index["by_genre"]:
|
|
||||||
index["by_genre"][genre] = []
|
|
||||||
index["by_genre"][genre].append(anime)
|
|
||||||
|
|
||||||
# Index by year
|
|
||||||
if year:
|
|
||||||
if year not in index["by_year"]:
|
|
||||||
index["by_year"][year] = []
|
|
||||||
index["by_year"][year].append(anime)
|
|
||||||
|
|
||||||
return index
|
|
||||||
|
|
||||||
anime_list = [
|
|
||||||
{"title": "Attack on Titan", "genre": "Action", "year": 2013},
|
|
||||||
{"title": "My Hero Academia", "genre": "Action", "year": 2016},
|
|
||||||
{"title": "Your Name", "genre": "Romance", "year": 2016}
|
|
||||||
]
|
|
||||||
|
|
||||||
index = create_search_index(anime_list)
|
|
||||||
|
|
||||||
# Test title index
|
|
||||||
assert "attack" in index["by_title"]
|
|
||||||
assert len(index["by_title"]["attack"]) == 1
|
|
||||||
|
|
||||||
# Test genre index
|
|
||||||
assert "action" in index["by_genre"]
|
|
||||||
assert len(index["by_genre"]["action"]) == 2
|
|
||||||
|
|
||||||
# Test year index
|
|
||||||
assert 2016 in index["by_year"]
|
|
||||||
assert len(index["by_year"][2016]) == 2
|
|
||||||
|
|
||||||
def test_search_result_caching(self):
|
|
||||||
"""Test search result caching mechanism."""
|
|
||||||
def cached_search(query, cache={}):
|
|
||||||
"""Simple search with caching."""
|
|
||||||
if query in cache:
|
|
||||||
return cache[query], True # Return cached result and cache hit flag
|
|
||||||
|
|
||||||
# Simulate expensive search operation
|
|
||||||
result = [{"id": "1", "title": f"Result for {query}"}]
|
|
||||||
cache[query] = result
|
|
||||||
return result, False # Return new result and cache miss flag
|
|
||||||
|
|
||||||
# Test cache miss
|
|
||||||
result, cache_hit = cached_search("test_query")
|
|
||||||
assert not cache_hit
|
|
||||||
assert len(result) == 1
|
|
||||||
|
|
||||||
# Test cache hit
|
|
||||||
result, cache_hit = cached_search("test_query")
|
|
||||||
assert cache_hit
|
|
||||||
assert len(result) == 1
|
|
||||||
|
|
||||||
# Test different query
|
|
||||||
result, cache_hit = cached_search("another_query")
|
|
||||||
assert not cache_hit
|
|
||||||
@ -1,270 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for authentication and security functionality.
|
|
||||||
|
|
||||||
Tests password hashing, JWT creation/validation, session timeout logic,
|
|
||||||
and secure environment variable management.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from datetime import datetime, timedelta
|
|
||||||
from unittest.mock import patch
|
|
||||||
|
|
||||||
import jwt
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
from src.server.fastapi_app import (
|
|
||||||
Settings,
|
|
||||||
generate_jwt_token,
|
|
||||||
hash_password,
|
|
||||||
verify_jwt_token,
|
|
||||||
verify_master_password,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestPasswordHashing:
|
|
||||||
"""Test password hashing functionality."""
|
|
||||||
|
|
||||||
def test_hash_password_with_salt(self, mock_settings):
|
|
||||||
"""Test password hashing with salt."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
password = "test_password"
|
|
||||||
expected_hash = hashlib.sha256(
|
|
||||||
(password + mock_settings.password_salt).encode()
|
|
||||||
).hexdigest()
|
|
||||||
|
|
||||||
result = hash_password(password)
|
|
||||||
|
|
||||||
assert result == expected_hash
|
|
||||||
assert len(result) == 64 # SHA-256 produces 64 character hex string
|
|
||||||
|
|
||||||
def test_hash_password_different_inputs(self, mock_settings):
|
|
||||||
"""Test that different passwords produce different hashes."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
hash1 = hash_password("password1")
|
|
||||||
hash2 = hash_password("password2")
|
|
||||||
|
|
||||||
assert hash1 != hash2
|
|
||||||
|
|
||||||
def test_hash_password_consistent(self, mock_settings):
|
|
||||||
"""Test that same password always produces same hash."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
password = "consistent_password"
|
|
||||||
hash1 = hash_password(password)
|
|
||||||
hash2 = hash_password(password)
|
|
||||||
|
|
||||||
assert hash1 == hash2
|
|
||||||
|
|
||||||
def test_hash_password_empty_string(self, mock_settings):
|
|
||||||
"""Test hashing empty string."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = hash_password("")
|
|
||||||
|
|
||||||
assert isinstance(result, str)
|
|
||||||
assert len(result) == 64
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestMasterPasswordVerification:
|
|
||||||
"""Test master password verification functionality."""
|
|
||||||
|
|
||||||
def test_verify_master_password_with_hash(self, mock_settings):
|
|
||||||
"""Test password verification using stored hash."""
|
|
||||||
password = "test_password"
|
|
||||||
mock_settings.master_password_hash = hash_password(password)
|
|
||||||
mock_settings.master_password = None
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
assert verify_master_password(password) is True
|
|
||||||
assert verify_master_password("wrong_password") is False
|
|
||||||
|
|
||||||
def test_verify_master_password_with_plain_text(self, mock_settings):
|
|
||||||
"""Test password verification using plain text (development mode)."""
|
|
||||||
password = "test_password"
|
|
||||||
mock_settings.master_password_hash = None
|
|
||||||
mock_settings.master_password = password
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
assert verify_master_password(password) is True
|
|
||||||
assert verify_master_password("wrong_password") is False
|
|
||||||
|
|
||||||
def test_verify_master_password_no_config(self, mock_settings):
|
|
||||||
"""Test password verification when no password is configured."""
|
|
||||||
mock_settings.master_password_hash = None
|
|
||||||
mock_settings.master_password = None
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
assert verify_master_password("any_password") is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestJWTGeneration:
|
|
||||||
"""Test JWT token generation functionality."""
|
|
||||||
|
|
||||||
def test_generate_jwt_token_structure(self, mock_settings):
|
|
||||||
"""Test JWT token generation returns correct structure."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = generate_jwt_token()
|
|
||||||
|
|
||||||
assert "token" in result
|
|
||||||
assert "expires_at" in result
|
|
||||||
assert isinstance(result["token"], str)
|
|
||||||
assert isinstance(result["expires_at"], datetime)
|
|
||||||
|
|
||||||
def test_generate_jwt_token_expiry(self, mock_settings):
|
|
||||||
"""Test JWT token has correct expiry time."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
before_generation = datetime.utcnow()
|
|
||||||
result = generate_jwt_token()
|
|
||||||
after_generation = datetime.utcnow()
|
|
||||||
|
|
||||||
expected_expiry_min = before_generation + timedelta(
|
|
||||||
hours=mock_settings.token_expiry_hours
|
|
||||||
)
|
|
||||||
expected_expiry_max = after_generation + timedelta(
|
|
||||||
hours=mock_settings.token_expiry_hours
|
|
||||||
)
|
|
||||||
|
|
||||||
assert expected_expiry_min <= result["expires_at"] <= expected_expiry_max
|
|
||||||
|
|
||||||
def test_generate_jwt_token_payload(self, mock_settings):
|
|
||||||
"""Test JWT token contains correct payload."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = generate_jwt_token()
|
|
||||||
|
|
||||||
# Decode without verification to check payload
|
|
||||||
payload = jwt.decode(
|
|
||||||
result["token"],
|
|
||||||
options={"verify_signature": False}
|
|
||||||
)
|
|
||||||
|
|
||||||
assert payload["user"] == "master"
|
|
||||||
assert payload["iss"] == "aniworld-fastapi-server"
|
|
||||||
assert "exp" in payload
|
|
||||||
assert "iat" in payload
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestJWTVerification:
|
|
||||||
"""Test JWT token verification functionality."""
|
|
||||||
|
|
||||||
def test_verify_valid_jwt_token(self, mock_settings, valid_jwt_token):
|
|
||||||
"""Test verification of valid JWT token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = verify_jwt_token(valid_jwt_token)
|
|
||||||
|
|
||||||
assert result is not None
|
|
||||||
assert result["user"] == "test_user"
|
|
||||||
assert "exp" in result
|
|
||||||
|
|
||||||
def test_verify_expired_jwt_token(self, mock_settings, expired_jwt_token):
|
|
||||||
"""Test verification of expired JWT token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = verify_jwt_token(expired_jwt_token)
|
|
||||||
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
def test_verify_invalid_jwt_token(self, mock_settings):
|
|
||||||
"""Test verification of invalid JWT token."""
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = verify_jwt_token("invalid.token.here")
|
|
||||||
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
def test_verify_jwt_token_wrong_secret(self, mock_settings):
|
|
||||||
"""Test verification with wrong secret key."""
|
|
||||||
# Generate token with different secret
|
|
||||||
payload = {
|
|
||||||
"user": "test_user",
|
|
||||||
"exp": datetime.utcnow() + timedelta(hours=1)
|
|
||||||
}
|
|
||||||
wrong_token = jwt.encode(payload, "wrong-secret", algorithm="HS256")
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = verify_jwt_token(wrong_token)
|
|
||||||
|
|
||||||
assert result is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestSessionTimeout:
|
|
||||||
"""Test session timeout logic."""
|
|
||||||
|
|
||||||
def test_token_expiry_calculation(self, mock_settings):
|
|
||||||
"""Test that token expiry is calculated correctly."""
|
|
||||||
mock_settings.token_expiry_hours = 24
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
before = datetime.utcnow()
|
|
||||||
result = generate_jwt_token()
|
|
||||||
after = datetime.utcnow()
|
|
||||||
|
|
||||||
# Check expiry is approximately 24 hours from now
|
|
||||||
expected_min = before + timedelta(hours=24)
|
|
||||||
expected_max = after + timedelta(hours=24)
|
|
||||||
|
|
||||||
assert expected_min <= result["expires_at"] <= expected_max
|
|
||||||
|
|
||||||
def test_custom_expiry_hours(self, mock_settings):
|
|
||||||
"""Test token generation with custom expiry hours."""
|
|
||||||
mock_settings.token_expiry_hours = 1
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
result = generate_jwt_token()
|
|
||||||
|
|
||||||
# Decode token to check expiry
|
|
||||||
payload = jwt.decode(
|
|
||||||
result["token"],
|
|
||||||
mock_settings.jwt_secret_key,
|
|
||||||
algorithms=["HS256"]
|
|
||||||
)
|
|
||||||
|
|
||||||
token_exp = datetime.fromtimestamp(payload["exp"])
|
|
||||||
expected_exp = result["expires_at"]
|
|
||||||
|
|
||||||
# Should be approximately the same (within 1 second)
|
|
||||||
assert abs((token_exp - expected_exp).total_seconds()) < 1
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestSecurityConfiguration:
|
|
||||||
"""Test secure environment variable management."""
|
|
||||||
|
|
||||||
def test_settings_defaults(self):
|
|
||||||
"""Test that settings have secure defaults."""
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
# Should have default values
|
|
||||||
assert settings.jwt_secret_key is not None
|
|
||||||
assert settings.password_salt is not None
|
|
||||||
assert settings.token_expiry_hours > 0
|
|
||||||
|
|
||||||
def test_settings_from_env(self):
|
|
||||||
"""Test settings loading from environment variables."""
|
|
||||||
with patch.dict(os.environ, {
|
|
||||||
'JWT_SECRET_KEY': 'test-secret',
|
|
||||||
'PASSWORD_SALT': 'test-salt',
|
|
||||||
'SESSION_TIMEOUT_HOURS': '12'
|
|
||||||
}):
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
assert settings.jwt_secret_key == 'test-secret'
|
|
||||||
assert settings.password_salt == 'test-salt'
|
|
||||||
assert settings.token_expiry_hours == 12
|
|
||||||
|
|
||||||
def test_sensitive_data_not_logged(self, mock_settings, caplog):
|
|
||||||
"""Test that sensitive data is not logged."""
|
|
||||||
password = "sensitive_password"
|
|
||||||
|
|
||||||
with patch('src.server.fastapi_app.settings', mock_settings):
|
|
||||||
hash_password(password)
|
|
||||||
verify_master_password(password)
|
|
||||||
|
|
||||||
# Check that password doesn't appear in logs
|
|
||||||
for record in caplog.records:
|
|
||||||
assert password not in record.message
|
|
||||||
@ -1,429 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for CLI commands and functionality.
|
|
||||||
|
|
||||||
Tests CLI commands (scan, search, download, rescan, display series),
|
|
||||||
user input handling, and command-line interface logic.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.cli.Main import ( # noqa: E402
|
|
||||||
MatchNotFoundError,
|
|
||||||
NoKeyFoundException,
|
|
||||||
SeriesApp,
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
|
||||||
def mock_series_app():
|
|
||||||
"""Create a mock SeriesApp instance for testing."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'):
|
|
||||||
app = SeriesApp("/test/directory")
|
|
||||||
app.series_list = [
|
|
||||||
Mock(name="Test Anime 1", folder="test_anime_1"),
|
|
||||||
Mock(name="Test Anime 2", folder="test_anime_2"),
|
|
||||||
Mock(name=None, folder="unknown_anime")
|
|
||||||
]
|
|
||||||
return app
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLICommands:
|
|
||||||
"""Test CLI command functionality."""
|
|
||||||
|
|
||||||
def test_display_series_with_names(self, mock_series_app, capsys):
|
|
||||||
"""Test displaying series with proper names."""
|
|
||||||
mock_series_app.display_series()
|
|
||||||
|
|
||||||
captured = capsys.readouterr()
|
|
||||||
output = captured.out
|
|
||||||
|
|
||||||
assert "Current result:" in output
|
|
||||||
assert "1. Test Anime 1" in output
|
|
||||||
assert "2. Test Anime 2" in output
|
|
||||||
assert "3. unknown_anime" in output # Should show folder name when name is None
|
|
||||||
|
|
||||||
def test_search_command(self, mock_series_app):
|
|
||||||
"""Test search command functionality."""
|
|
||||||
mock_loader = Mock()
|
|
||||||
mock_loader.Search.return_value = [
|
|
||||||
{"name": "Found Anime 1", "link": "link1"},
|
|
||||||
{"name": "Found Anime 2", "link": "link2"}
|
|
||||||
]
|
|
||||||
mock_series_app.Loaders.GetLoader.return_value = mock_loader
|
|
||||||
|
|
||||||
results = mock_series_app.search("test query")
|
|
||||||
|
|
||||||
assert len(results) == 2
|
|
||||||
assert results[0]["name"] == "Found Anime 1"
|
|
||||||
mock_loader.Search.assert_called_once_with("test query")
|
|
||||||
|
|
||||||
def test_search_no_results(self, mock_series_app):
|
|
||||||
"""Test search command with no results."""
|
|
||||||
mock_loader = Mock()
|
|
||||||
mock_loader.Search.return_value = []
|
|
||||||
mock_series_app.Loaders.GetLoader.return_value = mock_loader
|
|
||||||
|
|
||||||
results = mock_series_app.search("nonexistent")
|
|
||||||
|
|
||||||
assert len(results) == 0
|
|
||||||
|
|
||||||
def test_user_selection_single(self, mock_series_app):
|
|
||||||
"""Test user selection with single series."""
|
|
||||||
with patch('builtins.input', return_value='1'):
|
|
||||||
selected = mock_series_app.get_user_selection()
|
|
||||||
|
|
||||||
assert selected is not None
|
|
||||||
assert len(selected) == 1
|
|
||||||
assert selected[0].name == "Test Anime 1"
|
|
||||||
|
|
||||||
def test_user_selection_multiple(self, mock_series_app):
|
|
||||||
"""Test user selection with multiple series."""
|
|
||||||
with patch('builtins.input', return_value='1,2'):
|
|
||||||
selected = mock_series_app.get_user_selection()
|
|
||||||
|
|
||||||
assert selected is not None
|
|
||||||
assert len(selected) == 2
|
|
||||||
assert selected[0].name == "Test Anime 1"
|
|
||||||
assert selected[1].name == "Test Anime 2"
|
|
||||||
|
|
||||||
def test_user_selection_all(self, mock_series_app):
|
|
||||||
"""Test user selection with 'all' option."""
|
|
||||||
with patch('builtins.input', return_value='all'):
|
|
||||||
selected = mock_series_app.get_user_selection()
|
|
||||||
|
|
||||||
assert selected is not None
|
|
||||||
assert len(selected) == 3 # All series
|
|
||||||
|
|
||||||
def test_user_selection_exit(self, mock_series_app):
|
|
||||||
"""Test user selection with exit command."""
|
|
||||||
with patch('builtins.input', return_value='exit'):
|
|
||||||
selected = mock_series_app.get_user_selection()
|
|
||||||
|
|
||||||
assert selected is None
|
|
||||||
|
|
||||||
def test_user_selection_invalid_input(self, mock_series_app):
|
|
||||||
"""Test user selection with invalid input followed by valid input."""
|
|
||||||
with patch('builtins.input', side_effect=['invalid', 'abc', '1']):
|
|
||||||
with patch('builtins.print'): # Suppress print output
|
|
||||||
selected = mock_series_app.get_user_selection()
|
|
||||||
|
|
||||||
assert selected is not None
|
|
||||||
assert len(selected) == 1
|
|
||||||
|
|
||||||
def test_retry_mechanism_success(self, mock_series_app):
|
|
||||||
"""Test retry mechanism with successful operation."""
|
|
||||||
mock_func = Mock()
|
|
||||||
|
|
||||||
result = mock_series_app.retry(mock_func, "arg1", max_retries=3, delay=0, key="value")
|
|
||||||
|
|
||||||
assert result is True
|
|
||||||
mock_func.assert_called_once_with("arg1", key="value")
|
|
||||||
|
|
||||||
def test_retry_mechanism_eventual_success(self, mock_series_app):
|
|
||||||
"""Test retry mechanism with failure then success."""
|
|
||||||
mock_func = Mock(side_effect=[Exception("Error"), Exception("Error"), None])
|
|
||||||
|
|
||||||
with patch('time.sleep'): # Speed up test
|
|
||||||
result = mock_series_app.retry(mock_func, max_retries=3, delay=0)
|
|
||||||
|
|
||||||
assert result is True
|
|
||||||
assert mock_func.call_count == 3
|
|
||||||
|
|
||||||
def test_retry_mechanism_failure(self, mock_series_app):
|
|
||||||
"""Test retry mechanism with persistent failure."""
|
|
||||||
mock_func = Mock(side_effect=Exception("Persistent error"))
|
|
||||||
|
|
||||||
with patch('time.sleep'), patch('builtins.print'): # Speed up test and suppress error output
|
|
||||||
result = mock_series_app.retry(mock_func, max_retries=3, delay=0)
|
|
||||||
|
|
||||||
assert result is False
|
|
||||||
assert mock_func.call_count == 3
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLISearchMode:
|
|
||||||
"""Test CLI search mode functionality."""
|
|
||||||
|
|
||||||
def test_search_mode_with_results(self, mock_series_app):
|
|
||||||
"""Test search mode with search results."""
|
|
||||||
mock_results = [
|
|
||||||
{"name": "Anime 1", "link": "link1"},
|
|
||||||
{"name": "Anime 2", "link": "link2"}
|
|
||||||
]
|
|
||||||
|
|
||||||
with patch('builtins.input', side_effect=['test search', '1']), \
|
|
||||||
patch.object(mock_series_app, 'search', return_value=mock_results), \
|
|
||||||
patch.object(mock_series_app.List, 'add') as mock_add, \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
mock_series_app.search_mode()
|
|
||||||
|
|
||||||
mock_add.assert_called_once()
|
|
||||||
|
|
||||||
def test_search_mode_no_results(self, mock_series_app):
|
|
||||||
"""Test search mode with no results."""
|
|
||||||
with patch('builtins.input', return_value='nonexistent'), \
|
|
||||||
patch.object(mock_series_app, 'search', return_value=[]), \
|
|
||||||
patch('builtins.print') as mock_print:
|
|
||||||
|
|
||||||
mock_series_app.search_mode()
|
|
||||||
|
|
||||||
# Should print "No results found"
|
|
||||||
print_calls = [call[0][0] for call in mock_print.call_args_list]
|
|
||||||
assert any("No results found" in call for call in print_calls)
|
|
||||||
|
|
||||||
def test_search_mode_empty_selection(self, mock_series_app):
|
|
||||||
"""Test search mode with empty selection (return)."""
|
|
||||||
mock_results = [{"name": "Anime 1", "link": "link1"}]
|
|
||||||
|
|
||||||
with patch('builtins.input', side_effect=['test', '']), \
|
|
||||||
patch.object(mock_series_app, 'search', return_value=mock_results), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
# Should return without error
|
|
||||||
mock_series_app.search_mode()
|
|
||||||
|
|
||||||
def test_search_mode_invalid_selection(self, mock_series_app):
|
|
||||||
"""Test search mode with invalid then valid selection."""
|
|
||||||
mock_results = [{"name": "Anime 1", "link": "link1"}]
|
|
||||||
|
|
||||||
with patch('builtins.input', side_effect=['test', '999', '1']), \
|
|
||||||
patch.object(mock_series_app, 'search', return_value=mock_results), \
|
|
||||||
patch.object(mock_series_app.List, 'add'), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
mock_series_app.search_mode()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLIDownloadFunctionality:
|
|
||||||
"""Test CLI download functionality."""
|
|
||||||
|
|
||||||
def test_download_series_setup(self, mock_series_app):
|
|
||||||
"""Test download series initialization."""
|
|
||||||
mock_series = [
|
|
||||||
Mock(episodeDict={1: [1, 2], 2: [1]}, folder="anime1", key="key1"),
|
|
||||||
Mock(episodeDict={1: [1]}, folder="anime2", key="key2")
|
|
||||||
]
|
|
||||||
|
|
||||||
with patch('rich.progress.Progress') as mock_progress_class, \
|
|
||||||
patch('time.sleep'):
|
|
||||||
|
|
||||||
mock_progress = Mock()
|
|
||||||
mock_progress_class.return_value = mock_progress
|
|
||||||
|
|
||||||
mock_series_app.download_series(mock_series)
|
|
||||||
|
|
||||||
# Should create progress tracking
|
|
||||||
mock_progress.add_task.assert_called()
|
|
||||||
mock_progress.start.assert_called_once()
|
|
||||||
mock_progress.stop.assert_called_once()
|
|
||||||
|
|
||||||
def test_download_progress_callback(self, mock_series_app):
|
|
||||||
"""Test download progress callback functionality."""
|
|
||||||
mock_series_app.progress = Mock()
|
|
||||||
mock_series_app.task3 = "task3_id"
|
|
||||||
|
|
||||||
# Test downloading status
|
|
||||||
download_data = {
|
|
||||||
'status': 'downloading',
|
|
||||||
'total_bytes': 1000,
|
|
||||||
'downloaded_bytes': 500
|
|
||||||
}
|
|
||||||
|
|
||||||
mock_series_app.print_Download_Progress(download_data)
|
|
||||||
|
|
||||||
mock_series_app.progress.update.assert_called()
|
|
||||||
|
|
||||||
# Test finished status
|
|
||||||
download_data['status'] = 'finished'
|
|
||||||
mock_series_app.print_Download_Progress(download_data)
|
|
||||||
|
|
||||||
# Should update progress to 100%
|
|
||||||
update_calls = mock_series_app.progress.update.call_args_list
|
|
||||||
assert any(call[1].get('completed') == 100 for call in update_calls)
|
|
||||||
|
|
||||||
def test_download_progress_no_total(self, mock_series_app):
|
|
||||||
"""Test download progress with no total bytes."""
|
|
||||||
mock_series_app.progress = Mock()
|
|
||||||
mock_series_app.task3 = "task3_id"
|
|
||||||
|
|
||||||
download_data = {
|
|
||||||
'status': 'downloading',
|
|
||||||
'downloaded_bytes': 5242880 # 5MB
|
|
||||||
}
|
|
||||||
|
|
||||||
mock_series_app.print_Download_Progress(download_data)
|
|
||||||
|
|
||||||
# Should handle case where total_bytes is not available
|
|
||||||
mock_series_app.progress.update.assert_called()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLIMainLoop:
|
|
||||||
"""Test CLI main application loop."""
|
|
||||||
|
|
||||||
def test_main_loop_search_action(self, mock_series_app):
|
|
||||||
"""Test main loop with search action."""
|
|
||||||
with patch('builtins.input', side_effect=['s', KeyboardInterrupt()]), \
|
|
||||||
patch.object(mock_series_app, 'search_mode') as mock_search:
|
|
||||||
|
|
||||||
try:
|
|
||||||
mock_series_app.run()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mock_search.assert_called_once()
|
|
||||||
|
|
||||||
def test_main_loop_init_action(self, mock_series_app):
|
|
||||||
"""Test main loop with init action."""
|
|
||||||
with patch('builtins.input', side_effect=['i', KeyboardInterrupt()]), \
|
|
||||||
patch('rich.progress.Progress'), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
mock_series_app.SerieScanner = Mock()
|
|
||||||
mock_series_app.List = Mock()
|
|
||||||
|
|
||||||
try:
|
|
||||||
mock_series_app.run()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mock_series_app.SerieScanner.Reinit.assert_called_once()
|
|
||||||
mock_series_app.SerieScanner.Scan.assert_called_once()
|
|
||||||
|
|
||||||
def test_main_loop_download_action(self, mock_series_app):
|
|
||||||
"""Test main loop with download action."""
|
|
||||||
mock_selected = [Mock()]
|
|
||||||
|
|
||||||
with patch('builtins.input', side_effect=['d', KeyboardInterrupt()]), \
|
|
||||||
patch.object(mock_series_app, 'get_user_selection', return_value=mock_selected), \
|
|
||||||
patch.object(mock_series_app, 'download_series') as mock_download:
|
|
||||||
|
|
||||||
try:
|
|
||||||
mock_series_app.run()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mock_download.assert_called_once_with(mock_selected)
|
|
||||||
|
|
||||||
def test_main_loop_download_action_no_selection(self, mock_series_app):
|
|
||||||
"""Test main loop with download action but no series selected."""
|
|
||||||
with patch('builtins.input', side_effect=['d', KeyboardInterrupt()]), \
|
|
||||||
patch.object(mock_series_app, 'get_user_selection', return_value=None), \
|
|
||||||
patch.object(mock_series_app, 'download_series') as mock_download:
|
|
||||||
|
|
||||||
try:
|
|
||||||
mock_series_app.run()
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
mock_download.assert_not_called()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLIExceptions:
|
|
||||||
"""Test CLI exception handling."""
|
|
||||||
|
|
||||||
def test_no_key_found_exception(self):
|
|
||||||
"""Test NoKeyFoundException creation and usage."""
|
|
||||||
exception = NoKeyFoundException("Test message")
|
|
||||||
assert str(exception) == "Test message"
|
|
||||||
assert isinstance(exception, Exception)
|
|
||||||
|
|
||||||
def test_match_not_found_error(self):
|
|
||||||
"""Test MatchNotFoundError creation and usage."""
|
|
||||||
error = MatchNotFoundError("No match found")
|
|
||||||
assert str(error) == "No match found"
|
|
||||||
assert isinstance(error, Exception)
|
|
||||||
|
|
||||||
def test_exception_handling_in_retry(self, mock_series_app):
|
|
||||||
"""Test exception handling in retry mechanism."""
|
|
||||||
def failing_function():
|
|
||||||
raise NoKeyFoundException("Key not found")
|
|
||||||
|
|
||||||
with patch('time.sleep'), patch('builtins.print'):
|
|
||||||
result = mock_series_app.retry(failing_function, max_retries=2, delay=0)
|
|
||||||
|
|
||||||
assert result is False
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLIInitialization:
|
|
||||||
"""Test CLI application initialization."""
|
|
||||||
|
|
||||||
def test_series_app_initialization(self):
|
|
||||||
"""Test SeriesApp initialization."""
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
app = SeriesApp("/test/directory")
|
|
||||||
|
|
||||||
assert app.directory_to_search == "/test/directory"
|
|
||||||
assert app.progress is None
|
|
||||||
assert hasattr(app, 'Loaders')
|
|
||||||
assert hasattr(app, 'SerieScanner')
|
|
||||||
assert hasattr(app, 'List')
|
|
||||||
|
|
||||||
def test_initialization_count_tracking(self):
|
|
||||||
"""Test that initialization count is tracked properly."""
|
|
||||||
initial_count = SeriesApp._initialization_count
|
|
||||||
|
|
||||||
with patch('src.cli.Main.Loaders'), \
|
|
||||||
patch('src.cli.Main.SerieScanner'), \
|
|
||||||
patch('src.cli.Main.SerieList'), \
|
|
||||||
patch('builtins.print'):
|
|
||||||
|
|
||||||
SeriesApp("/test1")
|
|
||||||
SeriesApp("/test2")
|
|
||||||
|
|
||||||
assert SeriesApp._initialization_count == initial_count + 2
|
|
||||||
|
|
||||||
def test_init_list_method(self, mock_series_app):
|
|
||||||
"""Test __InitList__ method."""
|
|
||||||
mock_missing_episodes = [Mock(), Mock()]
|
|
||||||
mock_series_app.List.GetMissingEpisode.return_value = mock_missing_episodes
|
|
||||||
|
|
||||||
mock_series_app._SeriesApp__InitList__()
|
|
||||||
|
|
||||||
assert mock_series_app.series_list == mock_missing_episodes
|
|
||||||
mock_series_app.List.GetMissingEpisode.assert_called_once()
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestCLIEnvironmentVariables:
|
|
||||||
"""Test CLI environment variable handling."""
|
|
||||||
|
|
||||||
def test_default_anime_directory(self):
|
|
||||||
"""Test default ANIME_DIRECTORY handling."""
|
|
||||||
with patch.dict(os.environ, {}, clear=True), \
|
|
||||||
patch('src.cli.Main.SeriesApp') as mock_app:
|
|
||||||
|
|
||||||
# Import and run main module simulation
|
|
||||||
import src.cli.Main
|
|
||||||
|
|
||||||
# The default should be the hardcoded path
|
|
||||||
default_path = "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"
|
|
||||||
result = os.getenv("ANIME_DIRECTORY", default_path)
|
|
||||||
assert result == default_path
|
|
||||||
|
|
||||||
def test_custom_anime_directory(self):
|
|
||||||
"""Test custom ANIME_DIRECTORY from environment."""
|
|
||||||
custom_path = "/custom/anime/directory"
|
|
||||||
|
|
||||||
with patch.dict(os.environ, {'ANIME_DIRECTORY': custom_path}):
|
|
||||||
result = os.getenv("ANIME_DIRECTORY", "default")
|
|
||||||
assert result == custom_path
|
|
||||||
@ -1,485 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for database maintenance operation logic.
|
|
||||||
|
|
||||||
Tests database maintenance functions, storage optimization,
|
|
||||||
integrity checking, and database management utilities.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from unittest.mock import Mock
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestDatabaseMaintenanceLogic:
|
|
||||||
"""Test database maintenance operation logic."""
|
|
||||||
|
|
||||||
def test_database_vacuum_operation(self):
|
|
||||||
"""Test database vacuum operation logic."""
|
|
||||||
def perform_vacuum(connection, full_vacuum=False):
|
|
||||||
"""Perform database vacuum operation."""
|
|
||||||
try:
|
|
||||||
cursor = connection.cursor()
|
|
||||||
if full_vacuum:
|
|
||||||
cursor.execute("VACUUM")
|
|
||||||
else:
|
|
||||||
cursor.execute("PRAGMA incremental_vacuum")
|
|
||||||
|
|
||||||
# Get database size before and after (simulated)
|
|
||||||
cursor.execute("PRAGMA page_count")
|
|
||||||
page_count = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
cursor.execute("PRAGMA page_size")
|
|
||||||
page_size = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"operation": "vacuum",
|
|
||||||
"type": "full" if full_vacuum else "incremental",
|
|
||||||
"pages_freed": 0, # Would be calculated in real implementation
|
|
||||||
"space_saved_bytes": 0,
|
|
||||||
"final_size_bytes": page_count * page_size
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"operation": "vacuum",
|
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test with mock connection
|
|
||||||
mock_connection = Mock()
|
|
||||||
mock_cursor = Mock()
|
|
||||||
mock_connection.cursor.return_value = mock_cursor
|
|
||||||
mock_cursor.fetchone.side_effect = [[1000], [4096]] # page_count, page_size
|
|
||||||
|
|
||||||
# Test incremental vacuum
|
|
||||||
result = perform_vacuum(mock_connection, full_vacuum=False)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["type"] == "incremental"
|
|
||||||
assert "final_size_bytes" in result
|
|
||||||
|
|
||||||
# Test full vacuum
|
|
||||||
result = perform_vacuum(mock_connection, full_vacuum=True)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["type"] == "full"
|
|
||||||
|
|
||||||
# Test error handling
|
|
||||||
mock_cursor.execute.side_effect = Exception("Database locked")
|
|
||||||
result = perform_vacuum(mock_connection)
|
|
||||||
assert result["success"] is False
|
|
||||||
assert "error" in result
|
|
||||||
|
|
||||||
def test_database_analyze_operation(self):
|
|
||||||
"""Test database analyze operation logic."""
|
|
||||||
def perform_analyze(connection, tables=None):
|
|
||||||
"""Perform database analyze operation."""
|
|
||||||
try:
|
|
||||||
cursor = connection.cursor()
|
|
||||||
tables_analyzed = []
|
|
||||||
|
|
||||||
if tables:
|
|
||||||
# Analyze specific tables
|
|
||||||
for table in tables:
|
|
||||||
cursor.execute(f"ANALYZE {table}")
|
|
||||||
tables_analyzed.append(table)
|
|
||||||
else:
|
|
||||||
# Analyze all tables
|
|
||||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
|
||||||
all_tables = [row[0] for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
for table in all_tables:
|
|
||||||
cursor.execute(f"ANALYZE {table}")
|
|
||||||
tables_analyzed.append(table)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"operation": "analyze",
|
|
||||||
"tables_analyzed": tables_analyzed,
|
|
||||||
"statistics_updated": len(tables_analyzed)
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"operation": "analyze",
|
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test with mock connection
|
|
||||||
mock_connection = Mock()
|
|
||||||
mock_cursor = Mock()
|
|
||||||
mock_connection.cursor.return_value = mock_cursor
|
|
||||||
mock_cursor.fetchall.return_value = [("anime",), ("episodes",), ("users",)]
|
|
||||||
|
|
||||||
# Test analyze all tables
|
|
||||||
result = perform_analyze(mock_connection)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["statistics_updated"] == 3
|
|
||||||
assert "anime" in result["tables_analyzed"]
|
|
||||||
|
|
||||||
# Test analyze specific tables
|
|
||||||
result = perform_analyze(mock_connection, tables=["anime", "episodes"])
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["statistics_updated"] == 2
|
|
||||||
assert set(result["tables_analyzed"]) == {"anime", "episodes"}
|
|
||||||
|
|
||||||
def test_database_integrity_check(self):
|
|
||||||
"""Test database integrity check logic."""
|
|
||||||
def check_database_integrity(connection):
|
|
||||||
"""Check database integrity."""
|
|
||||||
try:
|
|
||||||
cursor = connection.cursor()
|
|
||||||
|
|
||||||
# Run integrity check
|
|
||||||
cursor.execute("PRAGMA integrity_check")
|
|
||||||
integrity_results = cursor.fetchall()
|
|
||||||
|
|
||||||
# Run foreign key check
|
|
||||||
cursor.execute("PRAGMA foreign_key_check")
|
|
||||||
foreign_key_results = cursor.fetchall()
|
|
||||||
|
|
||||||
# Determine status
|
|
||||||
integrity_ok = len(integrity_results) == 1 and integrity_results[0][0] == "ok"
|
|
||||||
foreign_keys_ok = len(foreign_key_results) == 0
|
|
||||||
|
|
||||||
if integrity_ok and foreign_keys_ok:
|
|
||||||
status = "ok"
|
|
||||||
elif integrity_ok and not foreign_keys_ok:
|
|
||||||
status = "warning" # Foreign key violations
|
|
||||||
else:
|
|
||||||
status = "error" # Integrity issues
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"integrity_status": status,
|
|
||||||
"integrity_issues": [] if integrity_ok else integrity_results,
|
|
||||||
"foreign_key_issues": foreign_key_results,
|
|
||||||
"total_issues": len(foreign_key_results) + (0 if integrity_ok else len(integrity_results))
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test healthy database
|
|
||||||
mock_connection = Mock()
|
|
||||||
mock_cursor = Mock()
|
|
||||||
mock_connection.cursor.return_value = mock_cursor
|
|
||||||
mock_cursor.fetchall.side_effect = [[("ok",)], []] # integrity ok, no FK issues
|
|
||||||
|
|
||||||
result = check_database_integrity(mock_connection)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["integrity_status"] == "ok"
|
|
||||||
assert result["total_issues"] == 0
|
|
||||||
|
|
||||||
# Test database with foreign key issues
|
|
||||||
mock_cursor.fetchall.side_effect = [[("ok",)], [("table", "row", "issue")]]
|
|
||||||
|
|
||||||
result = check_database_integrity(mock_connection)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["integrity_status"] == "warning"
|
|
||||||
assert result["total_issues"] == 1
|
|
||||||
|
|
||||||
# Test database with integrity issues
|
|
||||||
mock_cursor.fetchall.side_effect = [[("error in table",)], []]
|
|
||||||
|
|
||||||
result = check_database_integrity(mock_connection)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["integrity_status"] == "error"
|
|
||||||
assert result["total_issues"] == 1
|
|
||||||
|
|
||||||
def test_database_reindex_operation(self):
|
|
||||||
"""Test database reindex operation logic."""
|
|
||||||
def perform_reindex(connection, indexes=None):
|
|
||||||
"""Perform database reindex operation."""
|
|
||||||
try:
|
|
||||||
cursor = connection.cursor()
|
|
||||||
indexes_rebuilt = []
|
|
||||||
|
|
||||||
if indexes:
|
|
||||||
# Reindex specific indexes
|
|
||||||
for index in indexes:
|
|
||||||
cursor.execute(f"REINDEX {index}")
|
|
||||||
indexes_rebuilt.append(index)
|
|
||||||
else:
|
|
||||||
# Reindex all indexes
|
|
||||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'")
|
|
||||||
all_indexes = [row[0] for row in cursor.fetchall()]
|
|
||||||
|
|
||||||
for index in all_indexes:
|
|
||||||
cursor.execute(f"REINDEX {index}")
|
|
||||||
indexes_rebuilt.append(index)
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"operation": "reindex",
|
|
||||||
"indexes_rebuilt": indexes_rebuilt,
|
|
||||||
"count": len(indexes_rebuilt)
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"operation": "reindex",
|
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test with mock connection
|
|
||||||
mock_connection = Mock()
|
|
||||||
mock_cursor = Mock()
|
|
||||||
mock_connection.cursor.return_value = mock_cursor
|
|
||||||
mock_cursor.fetchall.return_value = [("idx_anime_title",), ("idx_episode_number",)]
|
|
||||||
|
|
||||||
# Test reindex all
|
|
||||||
result = perform_reindex(mock_connection)
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["count"] == 2
|
|
||||||
assert "idx_anime_title" in result["indexes_rebuilt"]
|
|
||||||
|
|
||||||
# Test reindex specific indexes
|
|
||||||
result = perform_reindex(mock_connection, indexes=["idx_anime_title"])
|
|
||||||
assert result["success"] is True
|
|
||||||
assert result["count"] == 1
|
|
||||||
assert result["indexes_rebuilt"] == ["idx_anime_title"]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestDatabaseStatistics:
|
|
||||||
"""Test database statistics collection."""
|
|
||||||
|
|
||||||
def test_collect_database_stats(self):
|
|
||||||
"""Test database statistics collection."""
|
|
||||||
def collect_database_stats(connection):
|
|
||||||
"""Collect comprehensive database statistics."""
|
|
||||||
try:
|
|
||||||
cursor = connection.cursor()
|
|
||||||
stats = {}
|
|
||||||
|
|
||||||
# Get table count
|
|
||||||
cursor.execute("SELECT COUNT(*) FROM sqlite_master WHERE type='table'")
|
|
||||||
stats["table_count"] = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
# Get database size
|
|
||||||
cursor.execute("PRAGMA page_count")
|
|
||||||
page_count = cursor.fetchone()[0]
|
|
||||||
cursor.execute("PRAGMA page_size")
|
|
||||||
page_size = cursor.fetchone()[0]
|
|
||||||
stats["database_size_bytes"] = page_count * page_size
|
|
||||||
|
|
||||||
# Get free pages
|
|
||||||
cursor.execute("PRAGMA freelist_count")
|
|
||||||
free_pages = cursor.fetchone()[0]
|
|
||||||
stats["free_space_bytes"] = free_pages * page_size
|
|
||||||
|
|
||||||
# Get index count
|
|
||||||
cursor.execute("SELECT COUNT(*) FROM sqlite_master WHERE type='index' AND name NOT LIKE 'sqlite_%'")
|
|
||||||
stats["index_count"] = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
# Calculate utilization
|
|
||||||
used_space = stats["database_size_bytes"] - stats["free_space_bytes"]
|
|
||||||
stats["space_utilization_percent"] = (used_space / stats["database_size_bytes"]) * 100 if stats["database_size_bytes"] > 0 else 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
"success": True,
|
|
||||||
"stats": stats
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"error": str(e)
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test with mock connection
|
|
||||||
mock_connection = Mock()
|
|
||||||
mock_cursor = Mock()
|
|
||||||
mock_connection.cursor.return_value = mock_cursor
|
|
||||||
mock_cursor.fetchone.side_effect = [
|
|
||||||
(5,), # table_count
|
|
||||||
(1000,), # page_count
|
|
||||||
(4096,), # page_size
|
|
||||||
(50,), # freelist_count
|
|
||||||
(3,) # index_count
|
|
||||||
]
|
|
||||||
|
|
||||||
result = collect_database_stats(mock_connection)
|
|
||||||
assert result["success"] is True
|
|
||||||
|
|
||||||
stats = result["stats"]
|
|
||||||
assert stats["table_count"] == 5
|
|
||||||
assert stats["database_size_bytes"] == 1000 * 4096
|
|
||||||
assert stats["free_space_bytes"] == 50 * 4096
|
|
||||||
assert stats["index_count"] == 3
|
|
||||||
assert 0 <= stats["space_utilization_percent"] <= 100
|
|
||||||
|
|
||||||
def test_table_specific_stats(self):
|
|
||||||
"""Test collection of table-specific statistics."""
|
|
||||||
def collect_table_stats(connection, table_name):
|
|
||||||
"""Collect statistics for a specific table."""
|
|
||||||
try:
|
|
||||||
cursor = connection.cursor()
|
|
||||||
|
|
||||||
# Get row count
|
|
||||||
cursor.execute(f"SELECT COUNT(*) FROM {table_name}")
|
|
||||||
row_count = cursor.fetchone()[0]
|
|
||||||
|
|
||||||
# Get table info
|
|
||||||
cursor.execute(f"PRAGMA table_info({table_name})")
|
|
||||||
columns = cursor.fetchall()
|
|
||||||
column_count = len(columns)
|
|
||||||
|
|
||||||
# Get table size (approximate)
|
|
||||||
cursor.execute(f"SELECT name FROM sqlite_master WHERE type='table' AND name='{table_name}'")
|
|
||||||
if cursor.fetchone():
|
|
||||||
# Table exists, calculate approximate size
|
|
||||||
# This is simplified - real implementation would be more complex
|
|
||||||
estimated_size = row_count * column_count * 100 # Rough estimate
|
|
||||||
else:
|
|
||||||
estimated_size = 0
|
|
||||||
|
|
||||||
return {
|
|
||||||
"table_name": table_name,
|
|
||||||
"row_count": row_count,
|
|
||||||
"column_count": column_count,
|
|
||||||
"estimated_size_bytes": estimated_size,
|
|
||||||
"columns": [col[1] for col in columns] # Column names
|
|
||||||
}
|
|
||||||
except Exception as e:
|
|
||||||
return {
|
|
||||||
"error": str(e),
|
|
||||||
"table_name": table_name
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test with mock connection
|
|
||||||
mock_connection = Mock()
|
|
||||||
mock_cursor = Mock()
|
|
||||||
mock_connection.cursor.return_value = mock_cursor
|
|
||||||
mock_cursor.fetchone.side_effect = [
|
|
||||||
(1000,), # row count
|
|
||||||
("anime",) # table exists
|
|
||||||
]
|
|
||||||
mock_cursor.fetchall.return_value = [
|
|
||||||
(0, "id", "INTEGER", 0, None, 1),
|
|
||||||
(1, "title", "TEXT", 0, None, 0),
|
|
||||||
(2, "genre", "TEXT", 0, None, 0)
|
|
||||||
]
|
|
||||||
|
|
||||||
result = collect_table_stats(mock_connection, "anime")
|
|
||||||
assert result["table_name"] == "anime"
|
|
||||||
assert result["row_count"] == 1000
|
|
||||||
assert result["column_count"] == 3
|
|
||||||
assert "columns" in result
|
|
||||||
assert "id" in result["columns"]
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestDatabaseOptimization:
|
|
||||||
"""Test database optimization logic."""
|
|
||||||
|
|
||||||
def test_optimization_recommendations(self):
|
|
||||||
"""Test generation of database optimization recommendations."""
|
|
||||||
def generate_optimization_recommendations(stats):
|
|
||||||
"""Generate optimization recommendations based on stats."""
|
|
||||||
recommendations = []
|
|
||||||
|
|
||||||
# Check space utilization
|
|
||||||
if stats.get("space_utilization_percent", 100) < 70:
|
|
||||||
recommendations.append({
|
|
||||||
"type": "vacuum",
|
|
||||||
"priority": "medium",
|
|
||||||
"description": "Database has significant free space, consider running VACUUM",
|
|
||||||
"estimated_benefit": "Reduce database file size"
|
|
||||||
})
|
|
||||||
|
|
||||||
# Check if analyze is needed (simplified check)
|
|
||||||
if stats.get("table_count", 0) > 0:
|
|
||||||
recommendations.append({
|
|
||||||
"type": "analyze",
|
|
||||||
"priority": "low",
|
|
||||||
"description": "Update table statistics for better query planning",
|
|
||||||
"estimated_benefit": "Improve query performance"
|
|
||||||
})
|
|
||||||
|
|
||||||
# Check index count vs table count ratio
|
|
||||||
table_count = stats.get("table_count", 0)
|
|
||||||
index_count = stats.get("index_count", 0)
|
|
||||||
|
|
||||||
if table_count > 0 and (index_count / table_count) < 1:
|
|
||||||
recommendations.append({
|
|
||||||
"type": "indexing",
|
|
||||||
"priority": "medium",
|
|
||||||
"description": "Some tables may benefit from additional indexes",
|
|
||||||
"estimated_benefit": "Faster query execution"
|
|
||||||
})
|
|
||||||
|
|
||||||
return recommendations
|
|
||||||
|
|
||||||
# Test with stats indicating need for vacuum
|
|
||||||
stats = {
|
|
||||||
"table_count": 5,
|
|
||||||
"index_count": 3,
|
|
||||||
"space_utilization_percent": 60
|
|
||||||
}
|
|
||||||
|
|
||||||
recommendations = generate_optimization_recommendations(stats)
|
|
||||||
assert len(recommendations) >= 2
|
|
||||||
|
|
||||||
vacuum_rec = next((r for r in recommendations if r["type"] == "vacuum"), None)
|
|
||||||
assert vacuum_rec is not None
|
|
||||||
assert vacuum_rec["priority"] == "medium"
|
|
||||||
|
|
||||||
# Test with well-optimized database
|
|
||||||
optimized_stats = {
|
|
||||||
"table_count": 5,
|
|
||||||
"index_count": 8,
|
|
||||||
"space_utilization_percent": 95
|
|
||||||
}
|
|
||||||
|
|
||||||
recommendations = generate_optimization_recommendations(optimized_stats)
|
|
||||||
# Should still recommend analyze, but not vacuum
|
|
||||||
vacuum_rec = next((r for r in recommendations if r["type"] == "vacuum"), None)
|
|
||||||
assert vacuum_rec is None
|
|
||||||
|
|
||||||
def test_maintenance_scheduler(self):
|
|
||||||
"""Test maintenance operation scheduling logic."""
|
|
||||||
def should_run_maintenance(operation_type, last_run_timestamp, current_timestamp):
|
|
||||||
"""Determine if maintenance operation should run."""
|
|
||||||
intervals = {
|
|
||||||
"analyze": 24 * 3600, # 24 hours
|
|
||||||
"vacuum": 7 * 24 * 3600, # 7 days
|
|
||||||
"integrity_check": 30 * 24 * 3600, # 30 days
|
|
||||||
"reindex": 30 * 24 * 3600 # 30 days
|
|
||||||
}
|
|
||||||
|
|
||||||
if operation_type not in intervals:
|
|
||||||
return False
|
|
||||||
|
|
||||||
if last_run_timestamp is None:
|
|
||||||
return True # Never run before
|
|
||||||
|
|
||||||
time_since_last = current_timestamp - last_run_timestamp
|
|
||||||
return time_since_last >= intervals[operation_type]
|
|
||||||
|
|
||||||
current_time = 1000000
|
|
||||||
|
|
||||||
# Test never run before
|
|
||||||
assert should_run_maintenance("analyze", None, current_time) is True
|
|
||||||
|
|
||||||
# Test recent run
|
|
||||||
recent_run = current_time - (12 * 3600) # 12 hours ago
|
|
||||||
assert should_run_maintenance("analyze", recent_run, current_time) is False
|
|
||||||
|
|
||||||
# Test old run
|
|
||||||
old_run = current_time - (25 * 3600) # 25 hours ago
|
|
||||||
assert should_run_maintenance("analyze", old_run, current_time) is True
|
|
||||||
|
|
||||||
# Test vacuum timing
|
|
||||||
week_ago = current_time - (8 * 24 * 3600) # 8 days ago
|
|
||||||
assert should_run_maintenance("vacuum", week_ago, current_time) is True
|
|
||||||
|
|
||||||
day_ago = current_time - (24 * 3600) # 1 day ago
|
|
||||||
assert should_run_maintenance("vacuum", day_ago, current_time) is False
|
|
||||||
@ -1,404 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for logging functionality.
|
|
||||||
|
|
||||||
This module tests the logging configuration, log file management,
|
|
||||||
and error reporting components.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import logging
|
|
||||||
import os
|
|
||||||
import tempfile
|
|
||||||
from pathlib import Path
|
|
||||||
from unittest.mock import Mock, mock_open, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Import logging components
|
|
||||||
try:
|
|
||||||
from src.infrastructure.logging.GlobalLogger import GlobalLogger
|
|
||||||
except ImportError:
|
|
||||||
# Mock GlobalLogger if not available
|
|
||||||
class MockGlobalLogger:
|
|
||||||
def __init__(self):
|
|
||||||
self.logger = logging.getLogger(__name__)
|
|
||||||
|
|
||||||
def get_logger(self, name):
|
|
||||||
return logging.getLogger(name)
|
|
||||||
|
|
||||||
def configure_logging(self, level=logging.INFO):
|
|
||||||
logging.basicConfig(level=level)
|
|
||||||
|
|
||||||
GlobalLogger = MockGlobalLogger
|
|
||||||
|
|
||||||
|
|
||||||
class TestGlobalLoggerConfiguration:
|
|
||||||
"""Test cases for global logger configuration."""
|
|
||||||
|
|
||||||
def test_logger_initialization(self):
|
|
||||||
"""Test logger initialization."""
|
|
||||||
global_logger = GlobalLogger()
|
|
||||||
assert global_logger is not None
|
|
||||||
|
|
||||||
logger = global_logger.get_logger("test_logger")
|
|
||||||
assert logger is not None
|
|
||||||
assert logger.name == "test_logger"
|
|
||||||
|
|
||||||
def test_logger_level_configuration(self):
|
|
||||||
"""Test logger level configuration."""
|
|
||||||
global_logger = GlobalLogger()
|
|
||||||
|
|
||||||
# Test different log levels
|
|
||||||
levels = [logging.DEBUG, logging.INFO, logging.WARNING, logging.ERROR]
|
|
||||||
|
|
||||||
for level in levels:
|
|
||||||
global_logger.configure_logging(level)
|
|
||||||
logger = global_logger.get_logger("test_level")
|
|
||||||
assert logger.level <= level or logger.parent.level <= level
|
|
||||||
|
|
||||||
def test_multiple_logger_instances(self):
|
|
||||||
"""Test multiple logger instances."""
|
|
||||||
global_logger = GlobalLogger()
|
|
||||||
|
|
||||||
logger1 = global_logger.get_logger("logger1")
|
|
||||||
logger2 = global_logger.get_logger("logger2")
|
|
||||||
logger3 = global_logger.get_logger("logger1") # Same name as logger1
|
|
||||||
|
|
||||||
assert logger1 != logger2
|
|
||||||
assert logger1 is logger3 # Should return same instance
|
|
||||||
|
|
||||||
@patch('logging.basicConfig')
|
|
||||||
def test_logging_configuration_calls(self, mock_basic_config):
|
|
||||||
"""Test that logging configuration is called correctly."""
|
|
||||||
global_logger = GlobalLogger()
|
|
||||||
global_logger.configure_logging(logging.DEBUG)
|
|
||||||
|
|
||||||
mock_basic_config.assert_called()
|
|
||||||
|
|
||||||
|
|
||||||
class TestLogFileManagement:
|
|
||||||
"""Test cases for log file management."""
|
|
||||||
|
|
||||||
def setUp(self):
|
|
||||||
"""Set up test environment."""
|
|
||||||
self.temp_dir = tempfile.mkdtemp()
|
|
||||||
self.log_file = os.path.join(self.temp_dir, "test.log")
|
|
||||||
|
|
||||||
def tearDown(self):
|
|
||||||
"""Clean up test environment."""
|
|
||||||
if os.path.exists(self.temp_dir):
|
|
||||||
import shutil
|
|
||||||
shutil.rmtree(self.temp_dir)
|
|
||||||
|
|
||||||
def test_log_file_creation(self):
|
|
||||||
"""Test log file creation."""
|
|
||||||
# Configure logger to write to test file
|
|
||||||
logger = logging.getLogger("test_file")
|
|
||||||
handler = logging.FileHandler(self.log_file)
|
|
||||||
logger.addHandler(handler)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
# Write log message
|
|
||||||
logger.info("Test log message")
|
|
||||||
handler.close()
|
|
||||||
|
|
||||||
# Verify file was created and contains message
|
|
||||||
assert os.path.exists(self.log_file)
|
|
||||||
with open(self.log_file, 'r') as f:
|
|
||||||
content = f.read()
|
|
||||||
assert "Test log message" in content
|
|
||||||
|
|
||||||
def test_log_file_rotation(self):
|
|
||||||
"""Test log file rotation functionality."""
|
|
||||||
from logging.handlers import RotatingFileHandler
|
|
||||||
|
|
||||||
# Create rotating file handler
|
|
||||||
handler = RotatingFileHandler(
|
|
||||||
self.log_file,
|
|
||||||
maxBytes=100, # Small size for testing
|
|
||||||
backupCount=3
|
|
||||||
)
|
|
||||||
|
|
||||||
logger = logging.getLogger("test_rotation")
|
|
||||||
logger.addHandler(handler)
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
# Write enough messages to trigger rotation
|
|
||||||
for i in range(10):
|
|
||||||
logger.info(f"Long test message {i} that should trigger rotation when we write enough of them")
|
|
||||||
|
|
||||||
handler.close()
|
|
||||||
|
|
||||||
# Check that rotation occurred
|
|
||||||
assert os.path.exists(self.log_file)
|
|
||||||
|
|
||||||
@patch('builtins.open', mock_open(read_data="log content"))
|
|
||||||
def test_log_file_reading(self, mock_file):
|
|
||||||
"""Test reading log files."""
|
|
||||||
# Mock reading a log file
|
|
||||||
with open("test.log", 'r') as f:
|
|
||||||
content = f.read()
|
|
||||||
|
|
||||||
assert content == "log content"
|
|
||||||
mock_file.assert_called_once_with("test.log", 'r')
|
|
||||||
|
|
||||||
def test_log_file_permissions(self):
|
|
||||||
"""Test log file permissions."""
|
|
||||||
# Create log file
|
|
||||||
with open(self.log_file, 'w') as f:
|
|
||||||
f.write("test")
|
|
||||||
|
|
||||||
# Check file exists and is readable
|
|
||||||
assert os.path.exists(self.log_file)
|
|
||||||
assert os.access(self.log_file, os.R_OK)
|
|
||||||
assert os.access(self.log_file, os.W_OK)
|
|
||||||
|
|
||||||
|
|
||||||
class TestErrorReporting:
|
|
||||||
"""Test cases for error reporting functionality."""
|
|
||||||
|
|
||||||
def test_error_logging(self):
|
|
||||||
"""Test error message logging."""
|
|
||||||
logger = logging.getLogger("test_errors")
|
|
||||||
|
|
||||||
with patch.object(logger, 'error') as mock_error:
|
|
||||||
logger.error("Test error message")
|
|
||||||
mock_error.assert_called_once_with("Test error message")
|
|
||||||
|
|
||||||
def test_exception_logging(self):
|
|
||||||
"""Test exception logging with traceback."""
|
|
||||||
logger = logging.getLogger("test_exceptions")
|
|
||||||
|
|
||||||
with patch.object(logger, 'exception') as mock_exception:
|
|
||||||
try:
|
|
||||||
raise ValueError("Test exception")
|
|
||||||
except ValueError:
|
|
||||||
logger.exception("An error occurred")
|
|
||||||
|
|
||||||
mock_exception.assert_called_once_with("An error occurred")
|
|
||||||
|
|
||||||
def test_warning_logging(self):
|
|
||||||
"""Test warning message logging."""
|
|
||||||
logger = logging.getLogger("test_warnings")
|
|
||||||
|
|
||||||
with patch.object(logger, 'warning') as mock_warning:
|
|
||||||
logger.warning("Test warning message")
|
|
||||||
mock_warning.assert_called_once_with("Test warning message")
|
|
||||||
|
|
||||||
def test_info_logging(self):
|
|
||||||
"""Test info message logging."""
|
|
||||||
logger = logging.getLogger("test_info")
|
|
||||||
|
|
||||||
with patch.object(logger, 'info') as mock_info:
|
|
||||||
logger.info("Test info message")
|
|
||||||
mock_info.assert_called_once_with("Test info message")
|
|
||||||
|
|
||||||
def test_debug_logging(self):
|
|
||||||
"""Test debug message logging."""
|
|
||||||
logger = logging.getLogger("test_debug")
|
|
||||||
logger.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
with patch.object(logger, 'debug') as mock_debug:
|
|
||||||
logger.debug("Test debug message")
|
|
||||||
mock_debug.assert_called_once_with("Test debug message")
|
|
||||||
|
|
||||||
|
|
||||||
class TestLogFormatter:
|
|
||||||
"""Test cases for log formatting."""
|
|
||||||
|
|
||||||
def test_log_format_structure(self):
|
|
||||||
"""Test log message format structure."""
|
|
||||||
formatter = logging.Formatter(
|
|
||||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
||||||
)
|
|
||||||
|
|
||||||
# Create a log record
|
|
||||||
record = logging.LogRecord(
|
|
||||||
name="test_logger",
|
|
||||||
level=logging.INFO,
|
|
||||||
pathname="test.py",
|
|
||||||
lineno=1,
|
|
||||||
msg="Test message",
|
|
||||||
args=(),
|
|
||||||
exc_info=None
|
|
||||||
)
|
|
||||||
|
|
||||||
formatted = formatter.format(record)
|
|
||||||
|
|
||||||
# Check format components
|
|
||||||
assert "test_logger" in formatted
|
|
||||||
assert "INFO" in formatted
|
|
||||||
assert "Test message" in formatted
|
|
||||||
|
|
||||||
def test_log_format_with_exception(self):
|
|
||||||
"""Test log formatting with exception information."""
|
|
||||||
formatter = logging.Formatter(
|
|
||||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
|
||||||
)
|
|
||||||
|
|
||||||
try:
|
|
||||||
raise ValueError("Test exception")
|
|
||||||
except ValueError:
|
|
||||||
import sys
|
|
||||||
exc_info = sys.exc_info()
|
|
||||||
|
|
||||||
record = logging.LogRecord(
|
|
||||||
name="test_logger",
|
|
||||||
level=logging.ERROR,
|
|
||||||
pathname="test.py",
|
|
||||||
lineno=1,
|
|
||||||
msg="Error occurred",
|
|
||||||
args=(),
|
|
||||||
exc_info=exc_info
|
|
||||||
)
|
|
||||||
|
|
||||||
formatted = formatter.format(record)
|
|
||||||
|
|
||||||
assert "ERROR" in formatted
|
|
||||||
assert "Error occurred" in formatted
|
|
||||||
# Exception info should be included
|
|
||||||
assert "ValueError" in formatted or "Traceback" in formatted
|
|
||||||
|
|
||||||
def test_custom_log_format(self):
|
|
||||||
"""Test custom log format."""
|
|
||||||
custom_formatter = logging.Formatter(
|
|
||||||
'[%(levelname)s] %(name)s: %(message)s'
|
|
||||||
)
|
|
||||||
|
|
||||||
record = logging.LogRecord(
|
|
||||||
name="custom_logger",
|
|
||||||
level=logging.WARNING,
|
|
||||||
pathname="test.py",
|
|
||||||
lineno=1,
|
|
||||||
msg="Custom message",
|
|
||||||
args=(),
|
|
||||||
exc_info=None
|
|
||||||
)
|
|
||||||
|
|
||||||
formatted = custom_formatter.format(record)
|
|
||||||
|
|
||||||
assert formatted.startswith("[WARNING]")
|
|
||||||
assert "custom_logger:" in formatted
|
|
||||||
assert "Custom message" in formatted
|
|
||||||
|
|
||||||
|
|
||||||
class TestLoggerIntegration:
|
|
||||||
"""Integration tests for logging functionality."""
|
|
||||||
|
|
||||||
def test_logger_with_multiple_handlers(self):
|
|
||||||
"""Test logger with multiple handlers."""
|
|
||||||
logger = logging.getLogger("multi_handler_test")
|
|
||||||
logger.setLevel(logging.INFO)
|
|
||||||
|
|
||||||
# Clear any existing handlers
|
|
||||||
logger.handlers = []
|
|
||||||
|
|
||||||
# Add console handler
|
|
||||||
console_handler = logging.StreamHandler()
|
|
||||||
console_handler.setLevel(logging.INFO)
|
|
||||||
logger.addHandler(console_handler)
|
|
||||||
|
|
||||||
# Add file handler
|
|
||||||
with tempfile.NamedTemporaryFile(mode='w', delete=False) as temp_file:
|
|
||||||
file_handler = logging.FileHandler(temp_file.name)
|
|
||||||
file_handler.setLevel(logging.WARNING)
|
|
||||||
logger.addHandler(file_handler)
|
|
||||||
|
|
||||||
# Log messages at different levels
|
|
||||||
with patch.object(console_handler, 'emit') as mock_console:
|
|
||||||
with patch.object(file_handler, 'emit') as mock_file:
|
|
||||||
logger.info("Info message") # Should go to console only
|
|
||||||
logger.warning("Warning message") # Should go to both
|
|
||||||
logger.error("Error message") # Should go to both
|
|
||||||
|
|
||||||
# Console handler should receive all messages
|
|
||||||
assert mock_console.call_count == 3
|
|
||||||
|
|
||||||
# File handler should receive only warning and error
|
|
||||||
assert mock_file.call_count == 2
|
|
||||||
|
|
||||||
file_handler.close()
|
|
||||||
os.unlink(temp_file.name)
|
|
||||||
|
|
||||||
def test_logger_hierarchy(self):
|
|
||||||
"""Test logger hierarchy and inheritance."""
|
|
||||||
parent_logger = logging.getLogger("parent")
|
|
||||||
child_logger = logging.getLogger("parent.child")
|
|
||||||
grandchild_logger = logging.getLogger("parent.child.grandchild")
|
|
||||||
|
|
||||||
# Set level on parent
|
|
||||||
parent_logger.setLevel(logging.WARNING)
|
|
||||||
|
|
||||||
# Child loggers should inherit level
|
|
||||||
assert child_logger.parent == parent_logger
|
|
||||||
assert grandchild_logger.parent == child_logger
|
|
||||||
|
|
||||||
def test_logger_configuration_persistence(self):
|
|
||||||
"""Test that logger configuration persists."""
|
|
||||||
logger_name = "persistent_test"
|
|
||||||
|
|
||||||
# Configure logger
|
|
||||||
logger1 = logging.getLogger(logger_name)
|
|
||||||
logger1.setLevel(logging.DEBUG)
|
|
||||||
|
|
||||||
# Get same logger instance
|
|
||||||
logger2 = logging.getLogger(logger_name)
|
|
||||||
|
|
||||||
# Should be same instance with same configuration
|
|
||||||
assert logger1 is logger2
|
|
||||||
assert logger2.level == logging.DEBUG
|
|
||||||
|
|
||||||
|
|
||||||
class TestLoggerErrorHandling:
|
|
||||||
"""Test error handling in logging functionality."""
|
|
||||||
|
|
||||||
def test_logging_with_invalid_level(self):
|
|
||||||
"""Test logging with invalid level."""
|
|
||||||
logger = logging.getLogger("invalid_level_test")
|
|
||||||
|
|
||||||
# Setting invalid level should not crash
|
|
||||||
try:
|
|
||||||
logger.setLevel("INVALID_LEVEL")
|
|
||||||
except (ValueError, TypeError):
|
|
||||||
# Expected to raise an exception
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_logging_to_readonly_file(self):
|
|
||||||
"""Test logging to read-only file."""
|
|
||||||
with tempfile.NamedTemporaryFile(delete=False) as temp_file:
|
|
||||||
temp_file.write(b"existing content")
|
|
||||||
temp_file_path = temp_file.name
|
|
||||||
|
|
||||||
try:
|
|
||||||
# Make file read-only
|
|
||||||
os.chmod(temp_file_path, 0o444)
|
|
||||||
|
|
||||||
# Try to create file handler - should handle gracefully
|
|
||||||
try:
|
|
||||||
handler = logging.FileHandler(temp_file_path)
|
|
||||||
handler.close()
|
|
||||||
except PermissionError:
|
|
||||||
# Expected behavior
|
|
||||||
pass
|
|
||||||
finally:
|
|
||||||
# Clean up
|
|
||||||
try:
|
|
||||||
os.chmod(temp_file_path, 0o666)
|
|
||||||
os.unlink(temp_file_path)
|
|
||||||
except:
|
|
||||||
pass
|
|
||||||
|
|
||||||
def test_logging_with_missing_directory(self):
|
|
||||||
"""Test logging to file in non-existent directory."""
|
|
||||||
non_existent_path = "/non/existent/directory/test.log"
|
|
||||||
|
|
||||||
# Should handle missing directory gracefully
|
|
||||||
try:
|
|
||||||
handler = logging.FileHandler(non_existent_path)
|
|
||||||
handler.close()
|
|
||||||
except (FileNotFoundError, OSError):
|
|
||||||
# Expected behavior
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
pytest.main([__file__, "-v"])
|
|
||||||
@ -1,551 +0,0 @@
|
|||||||
"""
|
|
||||||
Unit tests for environment configuration, error handling, and modular architecture.
|
|
||||||
|
|
||||||
Tests configuration loading, centralized error handling, module structure,
|
|
||||||
and architectural component integration.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
from unittest.mock import Mock, patch
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
# Add source directory to path
|
|
||||||
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
|
|
||||||
|
|
||||||
# Import after path setup
|
|
||||||
from src.server.fastapi_app import Settings # noqa: E402
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestEnvironmentConfiguration:
|
|
||||||
"""Test environment configuration loading and validation."""
|
|
||||||
|
|
||||||
def test_settings_default_values(self):
|
|
||||||
"""Test that settings have appropriate default values."""
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
# Test that defaults are set
|
|
||||||
assert settings.jwt_secret_key is not None
|
|
||||||
assert settings.password_salt is not None
|
|
||||||
assert settings.token_expiry_hours > 0
|
|
||||||
assert settings.database_url is not None
|
|
||||||
assert settings.log_level in ["DEBUG", "INFO", "WARNING", "ERROR", "FATAL"]
|
|
||||||
|
|
||||||
def test_settings_from_environment(self):
|
|
||||||
"""Test loading settings from environment variables."""
|
|
||||||
env_vars = {
|
|
||||||
'JWT_SECRET_KEY': 'test-jwt-secret',
|
|
||||||
'PASSWORD_SALT': 'test-salt',
|
|
||||||
'MASTER_PASSWORD_HASH': 'test-hash',
|
|
||||||
'SESSION_TIMEOUT_HOURS': '12',
|
|
||||||
'ANIME_DIRECTORY': '/test/anime',
|
|
||||||
'LOG_LEVEL': 'DEBUG',
|
|
||||||
'DATABASE_URL': 'sqlite:///test.db',
|
|
||||||
'CORS_ORIGINS': 'localhost:3000',
|
|
||||||
'API_RATE_LIMIT': '50',
|
|
||||||
'DEFAULT_PROVIDER': 'test.provider',
|
|
||||||
'PROVIDER_TIMEOUT': '15',
|
|
||||||
'RETRY_ATTEMPTS': '5'
|
|
||||||
}
|
|
||||||
|
|
||||||
with patch.dict(os.environ, env_vars):
|
|
||||||
settings = Settings()
|
|
||||||
|
|
||||||
assert settings.jwt_secret_key == 'test-jwt-secret'
|
|
||||||
assert settings.password_salt == 'test-salt'
|
|
||||||
assert settings.master_password_hash == 'test-hash'
|
|
||||||
assert settings.token_expiry_hours == 12
|
|
||||||
assert settings.anime_directory == '/test/anime'
|
|
||||||
assert settings.log_level == 'DEBUG'
|
|
||||||
assert settings.database_url == 'sqlite:///test.db'
|
|
||||||
assert settings.cors_origins == 'localhost:3000'
|
|
||||||
assert settings.api_rate_limit == 50
|
|
||||||
assert settings.default_provider == 'test.provider'
|
|
||||||
assert settings.provider_timeout == 15
|
|
||||||
assert settings.retry_attempts == 5
|
|
||||||
|
|
||||||
def test_settings_validation(self):
|
|
||||||
"""Test settings validation for invalid values."""
|
|
||||||
# Test with invalid timeout hours
|
|
||||||
with patch.dict(os.environ, {'SESSION_TIMEOUT_HOURS': '-1'}):
|
|
||||||
settings = Settings()
|
|
||||||
# Should handle invalid values gracefully or use defaults
|
|
||||||
assert settings.token_expiry_hours >= 0
|
|
||||||
|
|
||||||
# Test with invalid retry attempts
|
|
||||||
with patch.dict(os.environ, {'RETRY_ATTEMPTS': '0'}):
|
|
||||||
settings = Settings()
|
|
||||||
# Should ensure minimum retry attempts
|
|
||||||
assert settings.retry_attempts >= 0
|
|
||||||
|
|
||||||
def test_configuration_file_loading(self):
|
|
||||||
"""Test loading configuration from file."""
|
|
||||||
config_data = {
|
|
||||||
"jwt_secret_key": "file-secret",
|
|
||||||
"anime_directory": "/file/anime/path",
|
|
||||||
"log_level": "INFO"
|
|
||||||
}
|
|
||||||
|
|
||||||
with tempfile.NamedTemporaryFile(mode='w', suffix='.json', delete=False) as f:
|
|
||||||
json.dump(config_data, f)
|
|
||||||
config_file = f.name
|
|
||||||
|
|
||||||
try:
|
|
||||||
def load_config_from_file(file_path):
|
|
||||||
"""Mock function to load config from file."""
|
|
||||||
if os.path.exists(file_path):
|
|
||||||
with open(file_path, 'r') as f:
|
|
||||||
return json.load(f)
|
|
||||||
return {}
|
|
||||||
|
|
||||||
loaded_config = load_config_from_file(config_file)
|
|
||||||
|
|
||||||
assert loaded_config['jwt_secret_key'] == 'file-secret'
|
|
||||||
assert loaded_config['anime_directory'] == '/file/anime/path'
|
|
||||||
assert loaded_config['log_level'] == 'INFO'
|
|
||||||
finally:
|
|
||||||
os.unlink(config_file)
|
|
||||||
|
|
||||||
def test_configuration_precedence(self):
|
|
||||||
"""Test configuration precedence (env vars override defaults)."""
|
|
||||||
# Environment variable should override default
|
|
||||||
with patch.dict(os.environ, {'JWT_SECRET_KEY': 'env-override'}):
|
|
||||||
settings = Settings()
|
|
||||||
assert settings.jwt_secret_key == 'env-override'
|
|
||||||
|
|
||||||
# Default should be used when env var is not set
|
|
||||||
with patch.dict(os.environ, {}, clear=True):
|
|
||||||
settings = Settings()
|
|
||||||
assert settings.jwt_secret_key == "your-secret-key-here" # Default value
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestErrorHandling:
|
|
||||||
"""Test centralized error handling functionality."""
|
|
||||||
|
|
||||||
def test_custom_exception_creation(self):
|
|
||||||
"""Test creation and usage of custom exceptions."""
|
|
||||||
# Import custom exceptions if they exist
|
|
||||||
try:
|
|
||||||
from src.core.exceptions.Exceptions import ( # noqa: F401
|
|
||||||
MatchNotFoundError,
|
|
||||||
NoKeyFoundException,
|
|
||||||
)
|
|
||||||
|
|
||||||
# Test exception creation
|
|
||||||
key_error = NoKeyFoundException("Key not found")
|
|
||||||
assert str(key_error) == "Key not found"
|
|
||||||
assert isinstance(key_error, Exception)
|
|
||||||
|
|
||||||
match_error = MatchNotFoundError("No match found")
|
|
||||||
assert str(match_error) == "No match found"
|
|
||||||
assert isinstance(match_error, Exception)
|
|
||||||
|
|
||||||
except ImportError:
|
|
||||||
# If custom exceptions don't exist, test generic exception handling
|
|
||||||
class CustomError(Exception):
|
|
||||||
pass
|
|
||||||
|
|
||||||
error = CustomError("Test error")
|
|
||||||
assert str(error) == "Test error"
|
|
||||||
|
|
||||||
def test_error_logging_and_reporting(self):
|
|
||||||
"""Test error logging and reporting functionality."""
|
|
||||||
def log_error(error, context=None):
|
|
||||||
"""Mock error logging function."""
|
|
||||||
return {
|
|
||||||
"error_type": type(error).__name__,
|
|
||||||
"error_message": str(error),
|
|
||||||
"context": context or {},
|
|
||||||
"logged": True
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test basic error logging
|
|
||||||
test_error = ValueError("Test value error")
|
|
||||||
result = log_error(test_error)
|
|
||||||
|
|
||||||
assert result["error_type"] == "ValueError"
|
|
||||||
assert result["error_message"] == "Test value error"
|
|
||||||
assert result["logged"] is True
|
|
||||||
|
|
||||||
# Test error logging with context
|
|
||||||
context = {"user": "test_user", "action": "download"}
|
|
||||||
result = log_error(test_error, context)
|
|
||||||
|
|
||||||
assert result["context"] == context
|
|
||||||
|
|
||||||
def test_error_response_formatting(self):
|
|
||||||
"""Test error response formatting for APIs."""
|
|
||||||
def format_error_response(error, status_code=500):
|
|
||||||
"""Format error for API response."""
|
|
||||||
return {
|
|
||||||
"success": False,
|
|
||||||
"error": str(error),
|
|
||||||
"code": type(error).__name__,
|
|
||||||
"status_code": status_code
|
|
||||||
}
|
|
||||||
|
|
||||||
# Test various error types
|
|
||||||
errors = [
|
|
||||||
(ValueError("Invalid input"), 400),
|
|
||||||
(FileNotFoundError("File not found"), 404),
|
|
||||||
(PermissionError("Access denied"), 403),
|
|
||||||
(Exception("Server error"), 500)
|
|
||||||
]
|
|
||||||
|
|
||||||
for error, expected_code in errors:
|
|
||||||
response = format_error_response(error, expected_code)
|
|
||||||
|
|
||||||
assert response["success"] is False
|
|
||||||
assert response["error"] == str(error)
|
|
||||||
assert response["code"] == type(error).__name__
|
|
||||||
assert response["status_code"] == expected_code
|
|
||||||
|
|
||||||
def test_error_recovery_strategies(self):
|
|
||||||
"""Test error recovery strategy implementation."""
|
|
||||||
def execute_with_recovery(func, recovery_strategies=None):
|
|
||||||
"""Execute function with recovery strategies."""
|
|
||||||
if recovery_strategies is None:
|
|
||||||
recovery_strategies = []
|
|
||||||
|
|
||||||
try:
|
|
||||||
return func()
|
|
||||||
except Exception as e:
|
|
||||||
for strategy in recovery_strategies:
|
|
||||||
try:
|
|
||||||
return strategy(e)
|
|
||||||
except Exception:
|
|
||||||
continue
|
|
||||||
raise e
|
|
||||||
|
|
||||||
# Test successful execution
|
|
||||||
success_func = lambda: "success"
|
|
||||||
result = execute_with_recovery(success_func)
|
|
||||||
assert result == "success"
|
|
||||||
|
|
||||||
# Test with recovery strategy
|
|
||||||
def failing_func():
|
|
||||||
raise ValueError("Test error")
|
|
||||||
|
|
||||||
def recovery_strategy(error):
|
|
||||||
return f"recovered from {type(error).__name__}"
|
|
||||||
|
|
||||||
result = execute_with_recovery(failing_func, [recovery_strategy])
|
|
||||||
assert result == "recovered from ValueError"
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestModularArchitecture:
|
|
||||||
"""Test modular architecture components and integration."""
|
|
||||||
|
|
||||||
def test_provider_factory_pattern(self):
|
|
||||||
"""Test provider factory pattern implementation."""
|
|
||||||
def create_provider_factory():
|
|
||||||
"""Mock provider factory implementation."""
|
|
||||||
class ProviderFactory:
|
|
||||||
def __init__(self):
|
|
||||||
self.providers = {}
|
|
||||||
|
|
||||||
def register_provider(self, name, provider_class):
|
|
||||||
self.providers[name] = provider_class
|
|
||||||
|
|
||||||
def get_provider(self, name):
|
|
||||||
if name not in self.providers:
|
|
||||||
raise ValueError(f"Provider {name} not found")
|
|
||||||
return self.providers[name]()
|
|
||||||
|
|
||||||
def list_providers(self):
|
|
||||||
return list(self.providers.keys())
|
|
||||||
|
|
||||||
return ProviderFactory()
|
|
||||||
|
|
||||||
# Test factory functionality
|
|
||||||
factory = create_provider_factory()
|
|
||||||
|
|
||||||
# Mock provider classes
|
|
||||||
class TestProvider:
|
|
||||||
def get_name(self):
|
|
||||||
return "test_provider"
|
|
||||||
|
|
||||||
class AnotherProvider:
|
|
||||||
def get_name(self):
|
|
||||||
return "another_provider"
|
|
||||||
|
|
||||||
# Register providers
|
|
||||||
factory.register_provider("test", TestProvider)
|
|
||||||
factory.register_provider("another", AnotherProvider)
|
|
||||||
|
|
||||||
# Test provider retrieval
|
|
||||||
provider = factory.get_provider("test")
|
|
||||||
assert provider.get_name() == "test_provider"
|
|
||||||
|
|
||||||
# Test provider listing
|
|
||||||
providers = factory.list_providers()
|
|
||||||
assert "test" in providers
|
|
||||||
assert "another" in providers
|
|
||||||
|
|
||||||
# Test error for unknown provider
|
|
||||||
with pytest.raises(ValueError):
|
|
||||||
factory.get_provider("unknown")
|
|
||||||
|
|
||||||
def test_dependency_injection_pattern(self):
|
|
||||||
"""Test dependency injection pattern implementation."""
|
|
||||||
class ServiceContainer:
|
|
||||||
def __init__(self):
|
|
||||||
self.services = {}
|
|
||||||
self.singletons = {}
|
|
||||||
|
|
||||||
def register(self, name, service_class, singleton=False):
|
|
||||||
self.services[name] = {
|
|
||||||
'class': service_class,
|
|
||||||
'singleton': singleton
|
|
||||||
}
|
|
||||||
|
|
||||||
def get(self, name):
|
|
||||||
if name not in self.services:
|
|
||||||
raise ValueError(f"Service {name} not registered")
|
|
||||||
|
|
||||||
service_info = self.services[name]
|
|
||||||
|
|
||||||
if service_info['singleton']:
|
|
||||||
if name not in self.singletons:
|
|
||||||
self.singletons[name] = service_info['class']()
|
|
||||||
return self.singletons[name]
|
|
||||||
else:
|
|
||||||
return service_info['class']()
|
|
||||||
|
|
||||||
# Test container functionality
|
|
||||||
container = ServiceContainer()
|
|
||||||
|
|
||||||
# Mock services
|
|
||||||
class DatabaseService:
|
|
||||||
def connect(self):
|
|
||||||
return "connected"
|
|
||||||
|
|
||||||
class LoggingService:
|
|
||||||
def log(self, message):
|
|
||||||
return f"logged: {message}"
|
|
||||||
|
|
||||||
# Register services
|
|
||||||
container.register("database", DatabaseService, singleton=True)
|
|
||||||
container.register("logging", LoggingService, singleton=False)
|
|
||||||
|
|
||||||
# Test singleton behavior
|
|
||||||
db1 = container.get("database")
|
|
||||||
db2 = container.get("database")
|
|
||||||
assert db1 is db2 # Same instance
|
|
||||||
|
|
||||||
# Test non-singleton behavior
|
|
||||||
log1 = container.get("logging")
|
|
||||||
log2 = container.get("logging")
|
|
||||||
assert log1 is not log2 # Different instances
|
|
||||||
|
|
||||||
def test_repository_pattern(self):
|
|
||||||
"""Test repository pattern implementation."""
|
|
||||||
class BaseRepository:
|
|
||||||
def __init__(self, data_source):
|
|
||||||
self.data_source = data_source
|
|
||||||
|
|
||||||
def find_all(self):
|
|
||||||
return self.data_source.get_all()
|
|
||||||
|
|
||||||
def find_by_id(self, entity_id):
|
|
||||||
return self.data_source.get_by_id(entity_id)
|
|
||||||
|
|
||||||
def save(self, entity):
|
|
||||||
return self.data_source.save(entity)
|
|
||||||
|
|
||||||
def delete(self, entity_id):
|
|
||||||
return self.data_source.delete(entity_id)
|
|
||||||
|
|
||||||
class AnimeRepository(BaseRepository):
|
|
||||||
def find_by_genre(self, genre):
|
|
||||||
all_anime = self.find_all()
|
|
||||||
return [anime for anime in all_anime if anime.get('genre') == genre]
|
|
||||||
|
|
||||||
# Mock data source
|
|
||||||
class MockDataSource:
|
|
||||||
def __init__(self):
|
|
||||||
self.data = {
|
|
||||||
1: {"id": 1, "title": "Anime 1", "genre": "Action"},
|
|
||||||
2: {"id": 2, "title": "Anime 2", "genre": "Romance"}
|
|
||||||
}
|
|
||||||
|
|
||||||
def get_all(self):
|
|
||||||
return list(self.data.values())
|
|
||||||
|
|
||||||
def get_by_id(self, entity_id):
|
|
||||||
return self.data.get(entity_id)
|
|
||||||
|
|
||||||
def save(self, entity):
|
|
||||||
entity_id = len(self.data) + 1
|
|
||||||
entity["id"] = entity_id
|
|
||||||
self.data[entity_id] = entity
|
|
||||||
return entity
|
|
||||||
|
|
||||||
def delete(self, entity_id):
|
|
||||||
return self.data.pop(entity_id, None)
|
|
||||||
|
|
||||||
# Test repository functionality
|
|
||||||
data_source = MockDataSource()
|
|
||||||
repo = AnimeRepository(data_source)
|
|
||||||
|
|
||||||
# Test find operations
|
|
||||||
all_anime = repo.find_all()
|
|
||||||
assert len(all_anime) == 2
|
|
||||||
|
|
||||||
anime = repo.find_by_id(1)
|
|
||||||
assert anime["title"] == "Anime 1"
|
|
||||||
|
|
||||||
action_anime = repo.find_by_genre("Action")
|
|
||||||
assert len(action_anime) == 1
|
|
||||||
assert action_anime[0]["title"] == "Anime 1"
|
|
||||||
|
|
||||||
# Test save operation
|
|
||||||
new_anime = {"title": "New Anime", "genre": "Comedy"}
|
|
||||||
saved = repo.save(new_anime)
|
|
||||||
assert saved["id"] == 3
|
|
||||||
|
|
||||||
# Test delete operation
|
|
||||||
deleted = repo.delete(1)
|
|
||||||
assert deleted["title"] == "Anime 1"
|
|
||||||
assert repo.find_by_id(1) is None
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.unit
|
|
||||||
class TestModuleIntegration:
|
|
||||||
"""Test integration between different modules."""
|
|
||||||
|
|
||||||
def test_service_layer_integration(self):
|
|
||||||
"""Test integration between service layer components."""
|
|
||||||
class AnimeService:
|
|
||||||
def __init__(self, repository, provider):
|
|
||||||
self.repository = repository
|
|
||||||
self.provider = provider
|
|
||||||
|
|
||||||
def search_and_save(self, query):
|
|
||||||
# Search using provider
|
|
||||||
results = self.provider.search(query)
|
|
||||||
|
|
||||||
# Save results using repository
|
|
||||||
saved_results = []
|
|
||||||
for result in results:
|
|
||||||
saved = self.repository.save(result)
|
|
||||||
saved_results.append(saved)
|
|
||||||
|
|
||||||
return saved_results
|
|
||||||
|
|
||||||
# Mock dependencies
|
|
||||||
mock_repository = Mock()
|
|
||||||
mock_repository.save.side_effect = lambda x: {**x, "id": 1}
|
|
||||||
|
|
||||||
mock_provider = Mock()
|
|
||||||
mock_provider.search.return_value = [
|
|
||||||
{"title": "Found Anime", "genre": "Action"}
|
|
||||||
]
|
|
||||||
|
|
||||||
# Test service integration
|
|
||||||
service = AnimeService(mock_repository, mock_provider)
|
|
||||||
results = service.search_and_save("test query")
|
|
||||||
|
|
||||||
assert len(results) == 1
|
|
||||||
assert results[0]["title"] == "Found Anime"
|
|
||||||
assert results[0]["id"] == 1
|
|
||||||
|
|
||||||
mock_provider.search.assert_called_once_with("test query")
|
|
||||||
mock_repository.save.assert_called_once()
|
|
||||||
|
|
||||||
def test_cross_module_event_handling(self):
|
|
||||||
"""Test event handling across modules."""
|
|
||||||
class EventBus:
|
|
||||||
def __init__(self):
|
|
||||||
self.listeners = {}
|
|
||||||
|
|
||||||
def subscribe(self, event_type, listener):
|
|
||||||
if event_type not in self.listeners:
|
|
||||||
self.listeners[event_type] = []
|
|
||||||
self.listeners[event_type].append(listener)
|
|
||||||
|
|
||||||
def publish(self, event_type, data):
|
|
||||||
if event_type in self.listeners:
|
|
||||||
for listener in self.listeners[event_type]:
|
|
||||||
listener(data)
|
|
||||||
|
|
||||||
# Test event bus functionality
|
|
||||||
event_bus = EventBus()
|
|
||||||
|
|
||||||
# Mock event listeners
|
|
||||||
listener1_calls = []
|
|
||||||
listener2_calls = []
|
|
||||||
|
|
||||||
def listener1(data):
|
|
||||||
listener1_calls.append(data)
|
|
||||||
|
|
||||||
def listener2(data):
|
|
||||||
listener2_calls.append(data)
|
|
||||||
|
|
||||||
# Subscribe to events
|
|
||||||
event_bus.subscribe("anime_downloaded", listener1)
|
|
||||||
event_bus.subscribe("anime_downloaded", listener2)
|
|
||||||
|
|
||||||
# Publish event
|
|
||||||
event_data = {"anime_id": 123, "status": "completed"}
|
|
||||||
event_bus.publish("anime_downloaded", event_data)
|
|
||||||
|
|
||||||
# Verify listeners were called
|
|
||||||
assert len(listener1_calls) == 1
|
|
||||||
assert len(listener2_calls) == 1
|
|
||||||
assert listener1_calls[0] == event_data
|
|
||||||
assert listener2_calls[0] == event_data
|
|
||||||
|
|
||||||
def test_configuration_module_integration(self):
|
|
||||||
"""Test integration with configuration module."""
|
|
||||||
class ConfigManager:
|
|
||||||
def __init__(self):
|
|
||||||
self.config = {}
|
|
||||||
self.observers = []
|
|
||||||
|
|
||||||
def set(self, key, value):
|
|
||||||
old_value = self.config.get(key)
|
|
||||||
self.config[key] = value
|
|
||||||
|
|
||||||
# Notify observers of change
|
|
||||||
for observer in self.observers:
|
|
||||||
observer(key, old_value, value)
|
|
||||||
|
|
||||||
def get(self, key, default=None):
|
|
||||||
return self.config.get(key, default)
|
|
||||||
|
|
||||||
def add_observer(self, observer):
|
|
||||||
self.observers.append(observer)
|
|
||||||
|
|
||||||
# Test configuration management
|
|
||||||
config_manager = ConfigManager()
|
|
||||||
|
|
||||||
# Mock observer
|
|
||||||
config_changes = []
|
|
||||||
|
|
||||||
def config_observer(key, old_value, new_value):
|
|
||||||
config_changes.append({
|
|
||||||
"key": key,
|
|
||||||
"old": old_value,
|
|
||||||
"new": new_value
|
|
||||||
})
|
|
||||||
|
|
||||||
config_manager.add_observer(config_observer)
|
|
||||||
|
|
||||||
# Test configuration changes
|
|
||||||
config_manager.set("anime_directory", "/old/path")
|
|
||||||
config_manager.set("anime_directory", "/new/path")
|
|
||||||
|
|
||||||
assert len(config_changes) == 2
|
|
||||||
assert config_changes[0]["key"] == "anime_directory"
|
|
||||||
assert config_changes[0]["old"] is None
|
|
||||||
assert config_changes[0]["new"] == "/old/path"
|
|
||||||
|
|
||||||
assert config_changes[1]["old"] == "/old/path"
|
|
||||||
assert config_changes[1]["new"] == "/new/path"
|
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user