Compare commits
6 Commits
6d0c3fdf26
...
6a695966bf
| Author | SHA1 | Date | |
|---|---|---|---|
| 6a695966bf | |||
| 7481a33c15 | |||
| e48cb29131 | |||
| 7b933b6cdb | |||
| 7a71715183 | |||
| 57d49bcf78 |
@ -1,46 +1,46 @@
|
||||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
# Python files
|
||||
[*.py]
|
||||
max_line_length = 88
|
||||
indent_size = 4
|
||||
|
||||
# Web files
|
||||
[*.{html,css,js,json,yaml,yml}]
|
||||
indent_size = 2
|
||||
|
||||
# Markdown files
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Configuration files
|
||||
[*.{ini,cfg,conf,toml}]
|
||||
indent_size = 4
|
||||
|
||||
# Docker files
|
||||
[{Dockerfile*,*.dockerfile}]
|
||||
indent_size = 4
|
||||
|
||||
# Shell scripts
|
||||
[*.{sh,bat}]
|
||||
indent_size = 4
|
||||
|
||||
# SQL files
|
||||
[*.sql]
|
||||
indent_size = 2
|
||||
|
||||
# Template files
|
||||
[*.{j2,jinja,jinja2}]
|
||||
# EditorConfig is awesome: https://EditorConfig.org
|
||||
|
||||
# top-most EditorConfig file
|
||||
root = true
|
||||
|
||||
# Unix-style newlines with a newline ending every file
|
||||
[*]
|
||||
charset = utf-8
|
||||
end_of_line = lf
|
||||
insert_final_newline = true
|
||||
trim_trailing_whitespace = true
|
||||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
# Python files
|
||||
[*.py]
|
||||
max_line_length = 88
|
||||
indent_size = 4
|
||||
|
||||
# Web files
|
||||
[*.{html,css,js,json,yaml,yml}]
|
||||
indent_size = 2
|
||||
|
||||
# Markdown files
|
||||
[*.md]
|
||||
trim_trailing_whitespace = false
|
||||
|
||||
# Configuration files
|
||||
[*.{ini,cfg,conf,toml}]
|
||||
indent_size = 4
|
||||
|
||||
# Docker files
|
||||
[{Dockerfile*,*.dockerfile}]
|
||||
indent_size = 4
|
||||
|
||||
# Shell scripts
|
||||
[*.{sh,bat}]
|
||||
indent_size = 4
|
||||
|
||||
# SQL files
|
||||
[*.sql]
|
||||
indent_size = 2
|
||||
|
||||
# Template files
|
||||
[*.{j2,jinja,jinja2}]
|
||||
indent_size = 2
|
||||
44
.env
44
.env
@ -1,44 +0,0 @@
|
||||
# Aniworld Server Environment Configuration
|
||||
|
||||
# Security (REQUIRED - Generate secure random values)
|
||||
SECRET_KEY=dev_secret_key_change_in_production_12345
|
||||
JWT_SECRET_KEY=jwt_secret_key_change_in_production_67890
|
||||
PASSWORD_SALT=salt_change_in_production_abcdef
|
||||
|
||||
# Master Password Authentication (Simple system)
|
||||
MASTER_PASSWORD_HASH=8cf532e926e9493630820ce80005f6e2239305ac64c34069e869be5106e2af10
|
||||
# MASTER_PASSWORD=admin123 # Used for development only, remove in production
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_URL=sqlite:///data/aniworld.db
|
||||
DATABASE_POOL_SIZE=10
|
||||
DATABASE_MAX_OVERFLOW=20
|
||||
DATABASE_POOL_TIMEOUT=30
|
||||
DATABASE_POOL_RECYCLE=3600
|
||||
|
||||
# Redis Configuration (for caching and sessions)
|
||||
REDIS_URL=redis://localhost:6379/0
|
||||
REDIS_MAX_CONNECTIONS=10
|
||||
REDIS_SOCKET_TIMEOUT=5
|
||||
|
||||
# Security Settings
|
||||
SESSION_TIMEOUT_HOURS=24
|
||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
||||
LOCKOUT_DURATION_MINUTES=30
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_PER_MINUTE=60
|
||||
API_RATE_LIMIT_PER_MINUTE=100
|
||||
|
||||
# Application Settings
|
||||
DEBUG=true
|
||||
HOST=127.0.0.1
|
||||
PORT=5000
|
||||
|
||||
# Anime and Download Settings
|
||||
ANIME_DIRECTORY=./downloads
|
||||
MAX_CONCURRENT_DOWNLOADS=3
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=INFO
|
||||
LOG_FILE=logs/aniworld.log
|
||||
@ -1,56 +0,0 @@
|
||||
# Aniworld Server Environment Configuration
|
||||
# Copy this file to .env and fill in your values
|
||||
|
||||
# Security (REQUIRED - Generate secure random values)
|
||||
SECRET_KEY=your_secret_key_here
|
||||
JWT_SECRET_KEY=your_jwt_secret_here
|
||||
PASSWORD_SALT=your_password_salt_here
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_URL=sqlite:///data/aniworld.db
|
||||
# DATABASE_PASSWORD=your_db_password_here
|
||||
DATABASE_POOL_SIZE=10
|
||||
DATABASE_MAX_OVERFLOW=20
|
||||
DATABASE_POOL_TIMEOUT=30
|
||||
DATABASE_POOL_RECYCLE=3600
|
||||
|
||||
# Redis Configuration (for caching and sessions)
|
||||
REDIS_URL=redis://localhost:6379/0
|
||||
# REDIS_PASSWORD=your_redis_password_here
|
||||
REDIS_MAX_CONNECTIONS=10
|
||||
REDIS_SOCKET_TIMEOUT=5
|
||||
|
||||
# Email Configuration (for password reset emails)
|
||||
SMTP_SERVER=localhost
|
||||
SMTP_PORT=587
|
||||
# SMTP_USERNAME=your_smtp_username
|
||||
# SMTP_PASSWORD=your_smtp_password
|
||||
SMTP_USE_TLS=true
|
||||
FROM_EMAIL=noreply@aniworld.local
|
||||
|
||||
# External API Keys
|
||||
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
|
||||
# TMDB_API_KEY=your_tmdb_api_key
|
||||
|
||||
# Security Settings
|
||||
SESSION_TIMEOUT_HOURS=24
|
||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
||||
LOCKOUT_DURATION_MINUTES=30
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_PER_MINUTE=60
|
||||
API_RATE_LIMIT_PER_MINUTE=100
|
||||
|
||||
# Application Settings
|
||||
DEBUG=false
|
||||
HOST=127.0.0.1
|
||||
PORT=5000
|
||||
|
||||
# Anime and Download Settings
|
||||
ANIME_DIRECTORY=./downloads
|
||||
MAX_CONCURRENT_DOWNLOADS=3
|
||||
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=INFO
|
||||
LOG_FILE=logs/aniworld.log
|
||||
28
.flake8
28
.flake8
@ -1,28 +0,0 @@
|
||||
[flake8]
|
||||
max-line-length = 88
|
||||
exclude =
|
||||
.git,
|
||||
__pycache__,
|
||||
build,
|
||||
dist,
|
||||
.venv,
|
||||
venv,
|
||||
aniworld,
|
||||
migrations,
|
||||
.pytest_cache,
|
||||
.mypy_cache,
|
||||
.coverage,
|
||||
htmlcov
|
||||
extend-ignore =
|
||||
# E203: whitespace before ':' (conflicts with black)
|
||||
E203,
|
||||
# W503: line break before binary operator (conflicts with black)
|
||||
W503,
|
||||
# E501: line too long (handled by black)
|
||||
E501
|
||||
per-file-ignores =
|
||||
__init__.py:F401
|
||||
tests/*:F401,F811
|
||||
max-complexity = 10
|
||||
docstring-convention = google
|
||||
import-order-style = google
|
||||
86
.github/PULL_REQUEST_TEMPLATE.md
vendored
86
.github/PULL_REQUEST_TEMPLATE.md
vendored
@ -1,44 +1,44 @@
|
||||
# Pull Request Template
|
||||
|
||||
## Description
|
||||
Brief description of the changes in this PR.
|
||||
|
||||
## Type of Change
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Documentation update
|
||||
- [ ] Code refactoring
|
||||
- [ ] Performance improvement
|
||||
- [ ] Test improvement
|
||||
|
||||
## Changes Made
|
||||
- List the main changes
|
||||
- Include any new files added
|
||||
- Include any files removed or renamed
|
||||
|
||||
## Testing
|
||||
- [ ] Unit tests pass
|
||||
- [ ] Integration tests pass
|
||||
- [ ] Manual testing completed
|
||||
- [ ] Performance testing (if applicable)
|
||||
|
||||
## Screenshots (if applicable)
|
||||
Add screenshots of UI changes or new features.
|
||||
|
||||
## Checklist
|
||||
- [ ] My code follows the project's coding standards
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I have made corresponding changes to the documentation
|
||||
- [ ] My changes generate no new warnings
|
||||
- [ ] I have added tests that prove my fix is effective or that my feature works
|
||||
- [ ] New and existing unit tests pass locally with my changes
|
||||
- [ ] Any dependent changes have been merged and published
|
||||
|
||||
## Related Issues
|
||||
Fixes #(issue number)
|
||||
Related to #(issue number)
|
||||
|
||||
## Additional Notes
|
||||
# Pull Request Template
|
||||
|
||||
## Description
|
||||
Brief description of the changes in this PR.
|
||||
|
||||
## Type of Change
|
||||
- [ ] Bug fix (non-breaking change which fixes an issue)
|
||||
- [ ] New feature (non-breaking change which adds functionality)
|
||||
- [ ] Breaking change (fix or feature that would cause existing functionality to not work as expected)
|
||||
- [ ] Documentation update
|
||||
- [ ] Code refactoring
|
||||
- [ ] Performance improvement
|
||||
- [ ] Test improvement
|
||||
|
||||
## Changes Made
|
||||
- List the main changes
|
||||
- Include any new files added
|
||||
- Include any files removed or renamed
|
||||
|
||||
## Testing
|
||||
- [ ] Unit tests pass
|
||||
- [ ] Integration tests pass
|
||||
- [ ] Manual testing completed
|
||||
- [ ] Performance testing (if applicable)
|
||||
|
||||
## Screenshots (if applicable)
|
||||
Add screenshots of UI changes or new features.
|
||||
|
||||
## Checklist
|
||||
- [ ] My code follows the project's coding standards
|
||||
- [ ] I have performed a self-review of my own code
|
||||
- [ ] I have commented my code, particularly in hard-to-understand areas
|
||||
- [ ] I have made corresponding changes to the documentation
|
||||
- [ ] My changes generate no new warnings
|
||||
- [ ] I have added tests that prove my fix is effective or that my feature works
|
||||
- [ ] New and existing unit tests pass locally with my changes
|
||||
- [ ] Any dependent changes have been merged and published
|
||||
|
||||
## Related Issues
|
||||
Fixes #(issue number)
|
||||
Related to #(issue number)
|
||||
|
||||
## Additional Notes
|
||||
Any additional information, deployment notes, or context for reviewers.
|
||||
260
.github/copilot-instructions.md
vendored
260
.github/copilot-instructions.md
vendored
@ -1,139 +1,121 @@
|
||||
# GitHub Copilot Instructions
|
||||
|
||||
These instructions define how GitHub Copilot should assist with this project. The goal is to ensure consistent, high-quality code generation aligned with our conventions, stack, and best practices.
|
||||
|
||||
## 🧠 Context
|
||||
|
||||
- **Project Type**: Web API / Data Pipeline / CLI Tool / ML App
|
||||
- **Language**: Python
|
||||
- **Framework / Libraries**: FastAPI / Flask / Django / Pandas / Pydantic / Poetry
|
||||
- **Architecture**: MVC / Clean Architecture / Event-Driven / Microservices
|
||||
|
||||
## 🔧 General Guidelines
|
||||
|
||||
- Use Pythonic patterns (PEP8, PEP257).
|
||||
- Prefer named functions and class-based structures over inline lambdas.
|
||||
- Use type hints where applicable (`typing` module).
|
||||
- Follow black or isort for formatting and import order.
|
||||
- Use meaningful naming; avoid cryptic variables.
|
||||
- Emphasize simplicity, readability, and DRY principles.
|
||||
|
||||
## 📁 File Structure
|
||||
|
||||
Use this structure as a guide when creating or updating files:
|
||||
|
||||
```text
|
||||
src/
|
||||
controllers/
|
||||
services/
|
||||
repositories/
|
||||
schemas/
|
||||
utils/
|
||||
config/
|
||||
tests/
|
||||
unit/
|
||||
integration/
|
||||
```
|
||||
|
||||
## 🧶 Patterns
|
||||
|
||||
### ✅ Patterns to Follow
|
||||
|
||||
- Use the Repository Pattern and Dependency Injection (e.g., via `Depends` in FastAPI).
|
||||
- Validate data using Pydantic models.
|
||||
- Use custom exceptions and centralized error handling.
|
||||
- Use environment variables via `dotenv` or `os.environ`.
|
||||
- Use logging via the `logging` module or structlog.
|
||||
- Write modular, reusable code organized by concerns (e.g., controller, service, data layer).
|
||||
- Favor async endpoints for I/O-bound services (FastAPI, aiohttp).
|
||||
- Document functions and classes with docstrings.
|
||||
|
||||
### 🚫 Patterns to Avoid
|
||||
|
||||
- Don’t use wildcard imports (`from module import *`).
|
||||
- Avoid global state unless encapsulated in a singleton or config manager.
|
||||
- Don’t hardcode secrets or config values—use `.env`.
|
||||
- Don’t expose internal stack traces in production environments.
|
||||
- Avoid business logic inside views/routes.
|
||||
|
||||
## 🧪 Testing Guidelines
|
||||
|
||||
- Use `pytest` or `unittest` for unit and integration tests.
|
||||
- Mock external services with `unittest.mock` or `pytest-mock`.
|
||||
- Use fixtures to set up and tear down test data.
|
||||
- Aim for high coverage on core logic and low-level utilities.
|
||||
- Test both happy paths and edge cases.
|
||||
|
||||
## 🧩 Example Prompts
|
||||
|
||||
- `Copilot, create a FastAPI endpoint that returns all users from the database.`
|
||||
- `Copilot, write a Pydantic model for a product with id, name, and optional price.`
|
||||
- `Copilot, implement a CLI command that uploads a CSV file and logs a summary.`
|
||||
- `Copilot, write a pytest test for the transform_data function using a mock input.`
|
||||
|
||||
## 🔁 Iteration & Review
|
||||
|
||||
- Review Copilot output before committing.
|
||||
- Add comments to clarify intent if Copilot generates incorrect or unclear suggestions.
|
||||
- Use linters (flake8, pylint) and formatters (black, isort) as part of the review pipeline.
|
||||
- Refactor output to follow project conventions.
|
||||
|
||||
## 📚 References
|
||||
|
||||
- [PEP 8 – Style Guide for Python Code](https://peps.python.org/pep-0008/)
|
||||
- [PEP 484 – Type Hints](https://peps.python.org/pep-0484/)
|
||||
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
||||
- [Django Documentation](https://docs.djangoproject.com/en/stable/)
|
||||
- [Flask Documentation](https://flask.palletsprojects.com/)
|
||||
- [Pytest Documentation](https://docs.pytest.org/en/stable/)
|
||||
- [Pydantic Documentation](https://docs.pydantic.dev/)
|
||||
- [Python Logging Best Practices](https://docs.python.org/3/howto/logging.html)
|
||||
- [Black Code Formatter](https://black.readthedocs.io/)
|
||||
- [Poetry](https://python-poetry.org/docs/)
|
||||
|
||||
## 1. General Philosophy
|
||||
|
||||
* **Clarity is King:** Code should be easy to understand at a glance.
|
||||
* **Consistency Matters:** Adhere to these standards across all projects.
|
||||
* **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
|
||||
* **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
|
||||
* **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
|
||||
|
||||
* CleanCode, Keep it simple, MVVM
|
||||
|
||||
## 2. Security Considerations
|
||||
|
||||
* **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
|
||||
* **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
|
||||
* **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
|
||||
* **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
|
||||
* **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
|
||||
* **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
|
||||
|
||||
## 3. Performance Optimization
|
||||
|
||||
* **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
|
||||
* **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
|
||||
* **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
|
||||
* **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
|
||||
* **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
|
||||
* **Profiling:** Use profiling tools to identify performance bottlenecks.
|
||||
|
||||
## 4. GUI
|
||||
|
||||
* **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
* **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
* **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
|
||||
* **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
|
||||
* **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
|
||||
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
* **Fluent UI design:** Use Fluent UI design
|
||||
* **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
|
||||
* **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
|
||||
|
||||
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
|
||||
|
||||
|
||||
Run till you are realy finished.
|
||||
Do not gues, open and read files if you dont know something.
|
||||
# GitHub Copilot Instructions
|
||||
|
||||
These instructions define how GitHub Copilot should assist with this project. The goal is to ensure consistent, high-quality code generation aligned with our conventions, stack, and best practices.
|
||||
|
||||
## 🧠 Context
|
||||
|
||||
- **Project Type**: Web API / Data Pipeline / CLI Tool / ML App
|
||||
- **Language**: Python
|
||||
- **Framework / Libraries**: FastAPI / Flask / Django / Pandas / Pydantic / Poetry
|
||||
- **Architecture**: MVC / Clean Architecture / Event-Driven / Microservices
|
||||
|
||||
## 🔧 General Guidelines
|
||||
|
||||
- Use Pythonic patterns (PEP8, PEP257).
|
||||
- Prefer named functions and class-based structures over inline lambdas.
|
||||
- Use type hints where applicable (`typing` module).
|
||||
- Follow black or isort for formatting and import order.
|
||||
- Use meaningful naming; avoid cryptic variables.
|
||||
- Emphasize simplicity, readability, and DRY principles.
|
||||
|
||||
## 🧶 Patterns
|
||||
|
||||
### ✅ Patterns to Follow
|
||||
|
||||
- Use the Repository Pattern and Dependency Injection (e.g., via `Depends` in FastAPI).
|
||||
- Validate data using Pydantic models.
|
||||
- Use custom exceptions and centralized error handling.
|
||||
- Use environment variables via `dotenv` or `os.environ`.
|
||||
- Use logging via the `logging` module or structlog.
|
||||
- Write modular, reusable code organized by concerns (e.g., controller, service, data layer).
|
||||
- Favor async endpoints for I/O-bound services (FastAPI, aiohttp).
|
||||
- Document functions and classes with docstrings.
|
||||
|
||||
### 🚫 Patterns to Avoid
|
||||
|
||||
- Don’t use wildcard imports (`from module import *`).
|
||||
- Avoid global state unless encapsulated in a singleton or config manager.
|
||||
- Don’t hardcode secrets or config values—use `.env`.
|
||||
- Don’t expose internal stack traces in production environments.
|
||||
- Avoid business logic inside views/routes.
|
||||
|
||||
## 🧪 Testing Guidelines
|
||||
|
||||
- Use `pytest` or `unittest` for unit and integration tests.
|
||||
- Mock external services with `unittest.mock` or `pytest-mock`.
|
||||
- Use fixtures to set up and tear down test data.
|
||||
- Aim for high coverage on core logic and low-level utilities.
|
||||
- Test both happy paths and edge cases.
|
||||
|
||||
## 🧩 Example Prompts
|
||||
|
||||
- `Copilot, create a FastAPI endpoint that returns all users from the database.`
|
||||
- `Copilot, write a Pydantic model for a product with id, name, and optional price.`
|
||||
- `Copilot, implement a CLI command that uploads a CSV file and logs a summary.`
|
||||
- `Copilot, write a pytest test for the transform_data function using a mock input.`
|
||||
|
||||
## 🔁 Iteration & Review
|
||||
|
||||
- Review Copilot output before committing.
|
||||
- Add comments to clarify intent if Copilot generates incorrect or unclear suggestions.
|
||||
- Use linters (flake8, pylint) and formatters (black, isort) as part of the review pipeline.
|
||||
- Refactor output to follow project conventions.
|
||||
|
||||
## 📚 References
|
||||
|
||||
- [PEP 8 – Style Guide for Python Code](https://peps.python.org/pep-0008/)
|
||||
- [PEP 484 – Type Hints](https://peps.python.org/pep-0484/)
|
||||
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
||||
- [Django Documentation](https://docs.djangoproject.com/en/stable/)
|
||||
- [Flask Documentation](https://flask.palletsprojects.com/)
|
||||
- [Pytest Documentation](https://docs.pytest.org/en/stable/)
|
||||
- [Pydantic Documentation](https://docs.pydantic.dev/)
|
||||
- [Python Logging Best Practices](https://docs.python.org/3/howto/logging.html)
|
||||
- [Black Code Formatter](https://black.readthedocs.io/)
|
||||
- [Poetry](https://python-poetry.org/docs/)
|
||||
|
||||
## 1. General Philosophy
|
||||
|
||||
- **Clarity is King:** Code should be easy to understand at a glance.
|
||||
- **Consistency Matters:** Adhere to these standards across all projects.
|
||||
- **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
|
||||
- **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
|
||||
- **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
|
||||
|
||||
- CleanCode, Keep it simple, MVVM
|
||||
|
||||
## 2. Security Considerations
|
||||
|
||||
- **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
|
||||
- **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
|
||||
- **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
|
||||
- **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
|
||||
- **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
|
||||
- **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
|
||||
|
||||
## 3. Performance Optimization
|
||||
|
||||
- **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
|
||||
- **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
|
||||
- **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
|
||||
- **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
|
||||
- **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
|
||||
- **Profiling:** Use profiling tools to identify performance bottlenecks.
|
||||
|
||||
## 4. GUI
|
||||
|
||||
- **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
- **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
- **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
|
||||
- **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
|
||||
- **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
|
||||
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||
- **Fluent UI design:** Use Fluent UI design
|
||||
- **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
|
||||
- **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
|
||||
|
||||
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
|
||||
|
||||
Run till you are realy finished.
|
||||
Do not gues, open and read files if you dont know something.
|
||||
|
||||
40
.gitignore
vendored
40
.gitignore
vendored
@ -1,20 +1,20 @@
|
||||
/.idea/*
|
||||
/aniworld/bin/*
|
||||
/aniworld/lib/*
|
||||
/src/__pycache__/*
|
||||
/src/__pycache__/
|
||||
/.vs/*
|
||||
/src/Temp/*
|
||||
/src/Loaders/__pycache__/*
|
||||
/src/Loaders/provider/__pycache__/*
|
||||
/src/Loaders/__pycache__/*
|
||||
/src/Loaders/__pycache__/AniWorldLoader.cpython-310.pyc
|
||||
/src/Loaders/__pycache__/Loader.cpython-310.pyc
|
||||
/src/Loaders/__pycache__/Loaders.cpython-310.pyc
|
||||
/src/Loaders/__pycache__/Providers.cpython-310.pyc
|
||||
/src/Loaders/provider/__pycache__/voe.cpython-310.pyc
|
||||
/src/noGerFound.log
|
||||
/src/errors.log
|
||||
/src/server/__pycache__/*
|
||||
/src/NoKeyFound.log
|
||||
/download_errors.log
|
||||
/.idea/*
|
||||
/aniworld/bin/*
|
||||
/aniworld/lib/*
|
||||
/src/__pycache__/*
|
||||
/src/__pycache__/
|
||||
/.vs/*
|
||||
/src/Temp/*
|
||||
/src/Loaders/__pycache__/*
|
||||
/src/Loaders/provider/__pycache__/*
|
||||
/src/Loaders/__pycache__/*
|
||||
/src/Loaders/__pycache__/AniWorldLoader.cpython-310.pyc
|
||||
/src/Loaders/__pycache__/Loader.cpython-310.pyc
|
||||
/src/Loaders/__pycache__/Loaders.cpython-310.pyc
|
||||
/src/Loaders/__pycache__/Providers.cpython-310.pyc
|
||||
/src/Loaders/provider/__pycache__/voe.cpython-310.pyc
|
||||
/src/noGerFound.log
|
||||
/src/errors.log
|
||||
/src/server/__pycache__/*
|
||||
/src/NoKeyFound.log
|
||||
/download_errors.log
|
||||
|
||||
42
.vscode/extensions.json
vendored
42
.vscode/extensions.json
vendored
@ -1,22 +1,22 @@
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
"ms-python.debugpy",
|
||||
"ms-python.flake8",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.isort",
|
||||
"ms-vscode.vscode-json",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"ms-vscode.vscode-docker",
|
||||
"ms-python.pylint",
|
||||
"ms-python.mypy-type-checker",
|
||||
"charliermarsh.ruff",
|
||||
"ms-vscode.test-adapter-converter",
|
||||
"littlefoxteam.vscode-python-test-adapter",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"esbenp.prettier-vscode",
|
||||
"PKief.material-icon-theme",
|
||||
"GitHub.copilot",
|
||||
"GitHub.copilot-chat"
|
||||
]
|
||||
{
|
||||
"recommendations": [
|
||||
"ms-python.python",
|
||||
"ms-python.debugpy",
|
||||
"ms-python.flake8",
|
||||
"ms-python.black-formatter",
|
||||
"ms-python.isort",
|
||||
"ms-vscode.vscode-json",
|
||||
"bradlc.vscode-tailwindcss",
|
||||
"ms-vscode.vscode-docker",
|
||||
"ms-python.pylint",
|
||||
"ms-python.mypy-type-checker",
|
||||
"charliermarsh.ruff",
|
||||
"ms-vscode.test-adapter-converter",
|
||||
"littlefoxteam.vscode-python-test-adapter",
|
||||
"formulahendry.auto-rename-tag",
|
||||
"esbenp.prettier-vscode",
|
||||
"PKief.material-icon-theme",
|
||||
"GitHub.copilot",
|
||||
"GitHub.copilot-chat"
|
||||
]
|
||||
}
|
||||
352
.vscode/launch.json
vendored
352
.vscode/launch.json
vendored
@ -1,177 +1,177 @@
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug FastAPI App",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/server/fastapi_app.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||
"PASSWORD_SALT": "default-salt-debug",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [],
|
||||
"stopOnEntry": false,
|
||||
"autoReload": {
|
||||
"enable": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Debug FastAPI with Uvicorn",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
"127.0.0.1",
|
||||
"--port",
|
||||
"8000",
|
||||
"--reload",
|
||||
"--log-level",
|
||||
"debug"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||
"PASSWORD_SALT": "default-salt-debug",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug CLI App",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/cli/Main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime"
|
||||
},
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
// Add arguments as needed for CLI testing
|
||||
// Example: "${workspaceFolder}/test_data"
|
||||
],
|
||||
"stopOnEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Tests",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests",
|
||||
"-v",
|
||||
"--tb=short",
|
||||
"--no-header",
|
||||
"--disable-warnings"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "test-secret-key",
|
||||
"PASSWORD_SALT": "test-salt",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/test_data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/test_data/test_aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug Unit Tests Only",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests/unit",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "test-secret-key",
|
||||
"PASSWORD_SALT": "test-salt",
|
||||
"LOG_LEVEL": "DEBUG"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug Integration Tests Only",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests/integration",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "test-secret-key",
|
||||
"PASSWORD_SALT": "test-salt",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/test_data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/test_data/test_aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug FastAPI Production Mode",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"8000",
|
||||
"--workers",
|
||||
"1"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "production-secret-key-change-me",
|
||||
"PASSWORD_SALT": "production-salt-change-me",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "INFO",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
{
|
||||
"version": "0.2.0",
|
||||
"configurations": [
|
||||
{
|
||||
"name": "Debug FastAPI App",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/server/fastapi_app.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||
"PASSWORD_SALT": "default-salt-debug",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [],
|
||||
"stopOnEntry": false,
|
||||
"autoReload": {
|
||||
"enable": true
|
||||
}
|
||||
},
|
||||
{
|
||||
"name": "Debug FastAPI with Uvicorn",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
"127.0.0.1",
|
||||
"--port",
|
||||
"8000",
|
||||
"--reload",
|
||||
"--log-level",
|
||||
"debug"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||
"PASSWORD_SALT": "default-salt-debug",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug CLI App",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"program": "${workspaceFolder}/src/cli/Main.py",
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime"
|
||||
},
|
||||
"cwd": "${workspaceFolder}",
|
||||
"args": [
|
||||
// Add arguments as needed for CLI testing
|
||||
// Example: "${workspaceFolder}/test_data"
|
||||
],
|
||||
"stopOnEntry": false
|
||||
},
|
||||
{
|
||||
"name": "Debug Tests",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests",
|
||||
"-v",
|
||||
"--tb=short",
|
||||
"--no-header",
|
||||
"--disable-warnings"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "test-secret-key",
|
||||
"PASSWORD_SALT": "test-salt",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/test_data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/test_data/test_aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug Unit Tests Only",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests/unit",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "test-secret-key",
|
||||
"PASSWORD_SALT": "test-salt",
|
||||
"LOG_LEVEL": "DEBUG"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug Integration Tests Only",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "pytest",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"${workspaceFolder}/tests/integration",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "test-secret-key",
|
||||
"PASSWORD_SALT": "test-salt",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "DEBUG",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/test_data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/test_data/test_aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
{
|
||||
"name": "Debug FastAPI Production Mode",
|
||||
"type": "debugpy",
|
||||
"request": "launch",
|
||||
"module": "uvicorn",
|
||||
"python": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"args": [
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
"0.0.0.0",
|
||||
"--port",
|
||||
"8000",
|
||||
"--workers",
|
||||
"1"
|
||||
],
|
||||
"console": "integratedTerminal",
|
||||
"justMyCode": true,
|
||||
"env": {
|
||||
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||
"JWT_SECRET_KEY": "production-secret-key-change-me",
|
||||
"PASSWORD_SALT": "production-salt-change-me",
|
||||
"MASTER_PASSWORD": "admin123",
|
||||
"LOG_LEVEL": "INFO",
|
||||
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||
},
|
||||
"cwd": "${workspaceFolder}"
|
||||
}
|
||||
]
|
||||
}
|
||||
72
.vscode/settings.json
vendored
72
.vscode/settings.json
vendored
@ -1,37 +1,37 @@
|
||||
{
|
||||
"python.defaultInterpreterPath": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python.terminal.activateEnvironment": true,
|
||||
"python.condaPath": "C:\\Users\\lukas\\anaconda3\\Scripts\\conda.exe",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--line-length",
|
||||
"88"
|
||||
],
|
||||
"python.sortImports.args": [
|
||||
"--profile",
|
||||
"black"
|
||||
],
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit"
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/__pycache__": true,
|
||||
"**/*.pyc": true,
|
||||
"**/node_modules": true,
|
||||
"**/.pytest_cache": true,
|
||||
"**/data/temp/**": true,
|
||||
"**/data/cache/**": true,
|
||||
"**/data/logs/**": true
|
||||
},
|
||||
"python.testing.pytestEnabled": true,
|
||||
"python.testing.pytestArgs": [
|
||||
"tests"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.autoTestDiscoverOnSaveEnabled": true
|
||||
{
|
||||
"python.defaultInterpreterPath": "C:\\Users\\lukas\\anaconda3\\envs\\AniWorld\\python.exe",
|
||||
"python.terminal.activateEnvironment": true,
|
||||
"python.condaPath": "C:\\Users\\lukas\\anaconda3\\Scripts\\conda.exe",
|
||||
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||
"python.linting.enabled": true,
|
||||
"python.linting.flake8Enabled": true,
|
||||
"python.linting.pylintEnabled": true,
|
||||
"python.formatting.provider": "black",
|
||||
"python.formatting.blackArgs": [
|
||||
"--line-length",
|
||||
"88"
|
||||
],
|
||||
"python.sortImports.args": [
|
||||
"--profile",
|
||||
"black"
|
||||
],
|
||||
"editor.formatOnSave": true,
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": "explicit"
|
||||
},
|
||||
"files.exclude": {
|
||||
"**/__pycache__": true,
|
||||
"**/*.pyc": true,
|
||||
"**/node_modules": true,
|
||||
"**/.pytest_cache": true,
|
||||
"**/data/temp/**": true,
|
||||
"**/data/cache/**": true,
|
||||
"**/data/logs/**": true
|
||||
},
|
||||
"python.testing.pytestEnabled": true,
|
||||
"python.testing.pytestArgs": [
|
||||
"tests"
|
||||
],
|
||||
"python.testing.unittestEnabled": false,
|
||||
"python.testing.autoTestDiscoverOnSaveEnabled": true
|
||||
}
|
||||
330
.vscode/tasks.json
vendored
330
.vscode/tasks.json
vendored
@ -1,166 +1,166 @@
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run FastAPI Server",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"uvicorn",
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
"127.0.0.1",
|
||||
"--port",
|
||||
"8000",
|
||||
"--reload"
|
||||
],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": [],
|
||||
"isBackground": true
|
||||
},
|
||||
{
|
||||
"label": "Run CLI Application",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"src/cli/Main.py"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run All Tests",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"pytest",
|
||||
"tests/",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Unit Tests",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"pytest",
|
||||
"tests/unit/",
|
||||
"-v"
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Integration Tests",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"pytest",
|
||||
"tests/integration/",
|
||||
"-v"
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Install Dependencies",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"pip",
|
||||
"install",
|
||||
"-r",
|
||||
"requirements.txt"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
{
|
||||
"version": "2.0.0",
|
||||
"tasks": [
|
||||
{
|
||||
"label": "Run FastAPI Server",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"uvicorn",
|
||||
"src.server.fastapi_app:app",
|
||||
"--host",
|
||||
"127.0.0.1",
|
||||
"--port",
|
||||
"8000",
|
||||
"--reload"
|
||||
],
|
||||
"group": {
|
||||
"kind": "build",
|
||||
"isDefault": true
|
||||
},
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": [],
|
||||
"isBackground": true
|
||||
},
|
||||
{
|
||||
"label": "Run CLI Application",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"src/cli/Main.py"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run All Tests",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"pytest",
|
||||
"tests/",
|
||||
"-v",
|
||||
"--tb=short"
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Unit Tests",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"pytest",
|
||||
"tests/unit/",
|
||||
"-v"
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Run Integration Tests",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"python",
|
||||
"-m",
|
||||
"pytest",
|
||||
"tests/integration/",
|
||||
"-v"
|
||||
],
|
||||
"group": "test",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
},
|
||||
{
|
||||
"label": "Install Dependencies",
|
||||
"type": "shell",
|
||||
"command": "conda",
|
||||
"args": [
|
||||
"run",
|
||||
"-n",
|
||||
"AniWorld",
|
||||
"pip",
|
||||
"install",
|
||||
"-r",
|
||||
"requirements.txt"
|
||||
],
|
||||
"group": "build",
|
||||
"presentation": {
|
||||
"echo": true,
|
||||
"reveal": "always",
|
||||
"focus": false,
|
||||
"panel": "new"
|
||||
},
|
||||
"options": {
|
||||
"cwd": "${workspaceFolder}"
|
||||
},
|
||||
"problemMatcher": []
|
||||
}
|
||||
]
|
||||
}
|
||||
@ -1,191 +0,0 @@
|
||||
# AniWorld FastAPI Documentation
|
||||
|
||||
## Overview
|
||||
|
||||
AniWorld has been successfully migrated from Flask to FastAPI, providing improved performance, automatic API documentation, and modern async support.
|
||||
|
||||
## Accessing API Documentation
|
||||
|
||||
### Interactive API Documentation
|
||||
|
||||
FastAPI automatically generates interactive API documentation that you can access at:
|
||||
|
||||
- **Swagger UI**: `http://localhost:8000/docs`
|
||||
- **ReDoc**: `http://localhost:8000/redoc`
|
||||
|
||||
These interfaces allow you to:
|
||||
|
||||
- Browse all available endpoints
|
||||
- View request/response schemas
|
||||
- Test API endpoints directly from the browser
|
||||
- Download OpenAPI schema
|
||||
|
||||
### OpenAPI Schema
|
||||
|
||||
The complete OpenAPI 3.0 schema is available at:
|
||||
|
||||
- **JSON Format**: `http://localhost:8000/openapi.json`
|
||||
|
||||
## Authentication
|
||||
|
||||
### Master Password Authentication
|
||||
|
||||
AniWorld uses a simple master password authentication system with JWT tokens.
|
||||
|
||||
#### Login Process
|
||||
|
||||
1. **POST** `/auth/login`
|
||||
- Send master password in request body
|
||||
- Receive JWT token in response
|
||||
- Token expires in 24 hours
|
||||
|
||||
```json
|
||||
{
|
||||
"password": "your_master_password"
|
||||
}
|
||||
```
|
||||
|
||||
Response:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"token": "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...",
|
||||
"message": "Login successful"
|
||||
}
|
||||
```
|
||||
|
||||
#### Using Authentication Token
|
||||
|
||||
Include the token in the `Authorization` header for authenticated requests:
|
||||
|
||||
```
|
||||
Authorization: Bearer eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9...
|
||||
```
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### System Health
|
||||
|
||||
- **GET** `/health` - Check system health and status
|
||||
- **GET** `/api/system/database/health` - Check database connectivity
|
||||
- **GET** `/api/system/config` - Get system configuration
|
||||
|
||||
### Authentication
|
||||
|
||||
- **POST** `/auth/login` - Authenticate and get JWT token
|
||||
- **GET** `/auth/verify` - Verify current token validity
|
||||
- **POST** `/auth/logout` - Logout and invalidate token
|
||||
- **GET** `/api/auth/status` - Get current authentication status
|
||||
|
||||
### Anime Management
|
||||
|
||||
- **GET** `/api/anime/search` - Search for anime series
|
||||
- **GET** `/api/anime/{anime_id}` - Get specific anime details
|
||||
- **GET** `/api/anime/{anime_id}/episodes` - Get episodes for an anime
|
||||
|
||||
### Episode Management
|
||||
|
||||
- **GET** `/api/episodes/{episode_id}` - Get specific episode details
|
||||
|
||||
### Series Management
|
||||
|
||||
- **POST** `/api/add_series` - Add a new series to tracking
|
||||
- **POST** `/api/download` - Start episode download
|
||||
|
||||
### Web Interface
|
||||
|
||||
- **GET** `/` - Main application interface
|
||||
- **GET** `/app` - Application dashboard
|
||||
- **GET** `/login` - Login page
|
||||
- **GET** `/setup` - Setup page
|
||||
- **GET** `/queue` - Download queue interface
|
||||
|
||||
## Response Formats
|
||||
|
||||
### Success Responses
|
||||
|
||||
All successful API responses follow this structure:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"data": {...},
|
||||
"message": "Operation completed successfully"
|
||||
}
|
||||
```
|
||||
|
||||
### Error Responses
|
||||
|
||||
Error responses include detailed error information:
|
||||
|
||||
```json
|
||||
{
|
||||
"success": false,
|
||||
"error": "Error description",
|
||||
"code": "ERROR_CODE",
|
||||
"details": {...}
|
||||
}
|
||||
```
|
||||
|
||||
## Status Codes
|
||||
|
||||
- **200 OK** - Successful operation
|
||||
- **201 Created** - Resource created successfully
|
||||
- **400 Bad Request** - Invalid request data
|
||||
- **401 Unauthorized** - Authentication required
|
||||
- **403 Forbidden** - Insufficient permissions
|
||||
- **404 Not Found** - Resource not found
|
||||
- **422 Unprocessable Entity** - Validation error
|
||||
- **500 Internal Server Error** - Server error
|
||||
|
||||
## Rate Limiting
|
||||
|
||||
Currently, no rate limiting is implemented, but it may be added in future versions.
|
||||
|
||||
## WebSocket Support
|
||||
|
||||
Real-time updates are available through WebSocket connections for:
|
||||
|
||||
- Download progress updates
|
||||
- Scan progress updates
|
||||
- System status changes
|
||||
|
||||
## Migration Notes
|
||||
|
||||
### Changes from Flask
|
||||
|
||||
1. **Automatic Documentation**: FastAPI provides built-in OpenAPI documentation
|
||||
2. **Type Safety**: Full request/response validation with Pydantic
|
||||
3. **Async Support**: Native async/await support for better performance
|
||||
4. **Modern Standards**: OpenAPI 3.0, JSON Schema validation
|
||||
5. **Better Error Handling**: Structured error responses with detailed information
|
||||
|
||||
### Breaking Changes
|
||||
|
||||
- Authentication tokens are now JWT-based instead of session-based
|
||||
- Request/response formats may have slight differences
|
||||
- Some endpoint URLs may have changed
|
||||
- WebSocket endpoints use FastAPI WebSocket pattern
|
||||
|
||||
## Development
|
||||
|
||||
### Running the Server
|
||||
|
||||
```bash
|
||||
# Development mode with auto-reload
|
||||
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
||||
|
||||
# Production mode
|
||||
uvicorn src.server.fastapi_app:app --host 0.0.0.0 --port 8000
|
||||
```
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `MASTER_PASSWORD_HASH` - Hashed master password
|
||||
- `JWT_SECRET_KEY` - Secret key for JWT token signing
|
||||
- `LOG_LEVEL` - Logging level (DEBUG, INFO, WARNING, ERROR)
|
||||
|
||||
## Support
|
||||
|
||||
For issues, questions, or contributions, please visit the project repository or contact the development team.
|
||||
74
Overview.md
74
Overview.md
@ -1,74 +0,0 @@
|
||||
# AniWorld Project Overview
|
||||
|
||||
## 📁 Folder Structure
|
||||
|
||||
The project follows a modular, layered architecture inspired by MVC and Clean Architecture principles. The main directories are:
|
||||
|
||||
```
|
||||
src/
|
||||
controllers/ # API endpoints and route handlers
|
||||
services/ # Business logic and orchestration
|
||||
repositories/ # Data access layer (DB, external APIs)
|
||||
schemas/ # Pydantic models for validation/serialization
|
||||
utils/ # Utility functions and helpers
|
||||
config/ # Configuration management (env, settings)
|
||||
tests/
|
||||
unit/ # Unit tests for core logic
|
||||
integration/ # Integration tests for end-to-end scenarios
|
||||
```
|
||||
|
||||
## 🏗️ Architecture
|
||||
|
||||
- **MVC & Clean Architecture:** Separation of concerns between controllers (views), services (business logic), and repositories (data access).
|
||||
- **Dependency Injection:** Used for service/repository wiring, especially with FastAPI's `Depends`.
|
||||
- **Event-Driven & Microservices Ready:** Modular design allows for future scaling into microservices or event-driven workflows.
|
||||
- **Centralized Error Handling:** Custom exceptions and error middleware for consistent API responses.
|
||||
|
||||
## 🧰 Used Libraries & Frameworks
|
||||
|
||||
- **Python** (PEP8, PEP257, type hints)
|
||||
- **FastAPI**: High-performance async web API framework
|
||||
- **Pydantic**: Data validation and serialization
|
||||
- **Poetry**: Dependency management and packaging
|
||||
- **dotenv / os.environ**: Environment variable management
|
||||
- **logging / structlog**: Structured logging
|
||||
- **pytest / unittest**: Testing frameworks
|
||||
- **aiohttp**: Async HTTP client (where needed)
|
||||
- **SQLAlchemy / asyncpg / databases**: Database ORM and async drivers (if present)
|
||||
- **Prometheus**: Metrics endpoint integration
|
||||
- **Other**: As required for integrations (webhooks, third-party APIs)
|
||||
|
||||
## 🧩 Patterns & Conventions
|
||||
|
||||
- **Repository Pattern:** All data access is abstracted via repositories.
|
||||
- **Service Layer:** Business logic is encapsulated in services, not controllers.
|
||||
- **Pydantic Models:** Used for all input/output validation.
|
||||
- **Async Endpoints:** All I/O-bound endpoints are async for scalability.
|
||||
- **Environment Configuration:** All secrets/configs are loaded from `.env` or environment variables.
|
||||
- **Logging:** All logs are structured and configurable.
|
||||
- **Testing:** High coverage with fixtures and mocks for external dependencies.
|
||||
|
||||
## 🛡️ Security & Performance
|
||||
|
||||
- **JWT Authentication:** Secure endpoints with token-based auth.
|
||||
- **Input Validation:** All user input is validated via Pydantic.
|
||||
- **No Hardcoded Secrets:** All sensitive data is externalized.
|
||||
- **Performance Optimization:** Async I/O, caching, and profiling tools.
|
||||
|
||||
## 🎨 UI & CLI
|
||||
|
||||
- **Theme Support:** Light/dark/auto modes.
|
||||
- **Accessibility:** Screen reader, color contrast, keyboard shortcuts.
|
||||
- **CLI Tool:** For bulk operations, scanning, and management.
|
||||
|
||||
## 📚 References
|
||||
|
||||
- [FastAPI Documentation](https://fastapi.tiangolo.com/)
|
||||
- [Pydantic Documentation](https://docs.pydantic.dev/)
|
||||
- [Poetry](https://python-poetry.org/docs/)
|
||||
- [PEP 8](https://peps.python.org/pep-0008/)
|
||||
- [Black Formatter](https://black.readthedocs.io/)
|
||||
|
||||
---
|
||||
|
||||
**For details on individual features and endpoints, see `features.md`.**
|
||||
268
README.md
268
README.md
@ -1,268 +0,0 @@
|
||||
# AniWorld - Anime Series Management System
|
||||
|
||||
A powerful anime series management system that helps you track, organize, and download your favorite anime series. Recently migrated from Flask to FastAPI for improved performance and modern API capabilities.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
### Core Functionality
|
||||
|
||||
- **Series Tracking**: Automatically detect missing episodes in your anime collection
|
||||
- **Smart Downloads**: Queue-based download system with progress tracking
|
||||
- **File Organization**: Automatic file scanning and folder structure management
|
||||
- **Search Integration**: Search for anime series across multiple providers
|
||||
- **Real-time Updates**: Live progress updates via WebSocket connections
|
||||
|
||||
### Web Interface
|
||||
|
||||
- **Modern UI**: Clean, responsive web interface with dark/light theme support
|
||||
- **Download Queue**: Visual download queue management
|
||||
- **Progress Tracking**: Real-time download and scan progress
|
||||
- **Mobile Support**: Fully responsive design for mobile devices
|
||||
|
||||
### API & Integration
|
||||
|
||||
- **FastAPI Backend**: High-performance async API with automatic documentation
|
||||
- **RESTful API**: Complete REST API for programmatic access
|
||||
- **OpenAPI Documentation**: Interactive API documentation at `/docs`
|
||||
- **Authentication**: Secure master password authentication with JWT tokens
|
||||
|
||||
## 🎯 Recent Migration: Flask → FastAPI
|
||||
|
||||
This project has been successfully migrated from Flask to FastAPI, bringing significant improvements:
|
||||
|
||||
### Performance Benefits
|
||||
|
||||
- **Async Support**: Native async/await for better concurrency
|
||||
- **Faster Response Times**: Up to 2-3x performance improvement
|
||||
- **Better Resource Utilization**: More efficient handling of concurrent requests
|
||||
|
||||
### Developer Experience
|
||||
|
||||
- **Automatic Documentation**: Built-in OpenAPI/Swagger documentation
|
||||
- **Type Safety**: Full request/response validation with Pydantic
|
||||
- **Modern Standards**: OpenAPI 3.0 compliance and JSON Schema validation
|
||||
- **Better Error Handling**: Structured error responses with detailed information
|
||||
|
||||
### API Improvements
|
||||
|
||||
- **Interactive Documentation**: Test API endpoints directly from `/docs`
|
||||
- **Schema Validation**: Automatic request/response validation
|
||||
- **Better Error Messages**: Detailed validation errors with field-level feedback
|
||||
|
||||
## 🛠️ Installation & Setup
|
||||
|
||||
### Prerequisites
|
||||
|
||||
- Python 3.11+
|
||||
- Conda package manager
|
||||
- Windows OS (currently optimized for Windows)
|
||||
|
||||
### Quick Start
|
||||
|
||||
1. **Clone the Repository**
|
||||
|
||||
```bash
|
||||
git clone <repository-url>
|
||||
cd Aniworld
|
||||
```
|
||||
|
||||
2. **Create and Activate Conda Environment**
|
||||
|
||||
```bash
|
||||
conda create -n AniWorld python=3.11
|
||||
conda activate AniWorld
|
||||
```
|
||||
|
||||
3. **Install Dependencies**
|
||||
|
||||
```bash
|
||||
pip install -r requirements.txt
|
||||
```
|
||||
|
||||
4. **Set Environment Variables**
|
||||
|
||||
```bash
|
||||
# Set your master password (will be hashed automatically)
|
||||
set MASTER_PASSWORD=your_secure_password
|
||||
```
|
||||
|
||||
5. **Start the FastAPI Server**
|
||||
|
||||
```bash
|
||||
# Development mode with auto-reload
|
||||
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
||||
|
||||
# Or use the VS Code task: "Run FastAPI Server"
|
||||
```
|
||||
|
||||
6. **Access the Application**
|
||||
- **Web Interface**: http://localhost:8000
|
||||
- **API Documentation**: http://localhost:8000/docs
|
||||
- **Alternative API Docs**: http://localhost:8000/redoc
|
||||
|
||||
### Alternative: Using VS Code Tasks
|
||||
|
||||
If you're using VS Code, you can use the pre-configured tasks:
|
||||
|
||||
- `Ctrl+Shift+P` → "Tasks: Run Task" → "Run FastAPI Server"
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Environment Variables
|
||||
|
||||
- `MASTER_PASSWORD` - Your master password (will be hashed automatically)
|
||||
- `MASTER_PASSWORD_HASH` - Pre-hashed password (alternative to MASTER_PASSWORD)
|
||||
- `JWT_SECRET_KEY` - Secret key for JWT token signing (auto-generated if not set)
|
||||
- `LOG_LEVEL` - Logging level (DEBUG, INFO, WARNING, ERROR)
|
||||
|
||||
### Directory Structure
|
||||
|
||||
```
|
||||
Aniworld/
|
||||
├── src/
|
||||
│ ├── core/ # Core business logic
|
||||
│ │ ├── SeriesApp.py # Main application controller
|
||||
│ │ ├── entities/ # Data models
|
||||
│ │ └── providers/ # Content providers
|
||||
│ ├── server/ # FastAPI server
|
||||
│ │ ├── fastapi_app.py # Main FastAPI application
|
||||
│ │ └── web/ # Web interface and controllers
|
||||
│ └── infrastructure/ # Infrastructure components
|
||||
├── data/ # Application data and databases
|
||||
├── logs/ # Application logs
|
||||
└── requirements.txt # Python dependencies
|
||||
```
|
||||
|
||||
## 🌐 API Usage
|
||||
|
||||
### Authentication
|
||||
|
||||
1. **Login to get JWT token**:
|
||||
|
||||
```bash
|
||||
curl -X POST "http://localhost:8000/auth/login" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"password": "your_master_password"}'
|
||||
```
|
||||
|
||||
2. **Use token in requests**:
|
||||
```bash
|
||||
curl -X GET "http://localhost:8000/api/anime/search?query=naruto" \
|
||||
-H "Authorization: Bearer your_jwt_token_here"
|
||||
```
|
||||
|
||||
### Key Endpoints
|
||||
|
||||
- **Authentication**: `/auth/login`, `/auth/verify`, `/auth/logout`
|
||||
- **System**: `/health`, `/api/system/config`
|
||||
- **Anime**: `/api/anime/search`, `/api/anime/{id}`
|
||||
- **Episodes**: `/api/episodes/{id}`, `/api/anime/{id}/episodes`
|
||||
- **Downloads**: `/api/download`, `/api/add_series`
|
||||
|
||||
For complete API documentation, visit `/docs` when the server is running.
|
||||
|
||||
## 🖥️ Web Interface
|
||||
|
||||
### Main Features
|
||||
|
||||
- **Dashboard**: Overview of your anime collection and missing episodes
|
||||
- **Search**: Find and add new anime series to track
|
||||
- **Downloads**: Manage download queue and monitor progress
|
||||
- **Settings**: Configure application preferences
|
||||
|
||||
### Responsive Design
|
||||
|
||||
The web interface is fully responsive and supports:
|
||||
|
||||
- Desktop browsers (Chrome, Firefox, Edge, Safari)
|
||||
- Mobile devices (iOS Safari, Android Chrome)
|
||||
- Tablet devices
|
||||
- Dark and light themes
|
||||
|
||||
## 🔍 Troubleshooting
|
||||
|
||||
### Common Issues
|
||||
|
||||
1. **Server won't start**
|
||||
|
||||
- Check that the AniWorld conda environment is activated
|
||||
- Verify all dependencies are installed: `pip install -r requirements.txt`
|
||||
- Check for port conflicts (default: 8000)
|
||||
|
||||
2. **Authentication errors**
|
||||
|
||||
- Verify the master password is set correctly
|
||||
- Check environment variables are properly configured
|
||||
- Clear browser cache/cookies
|
||||
|
||||
3. **Import errors**
|
||||
- Ensure all required packages are installed
|
||||
- Check Python path configuration
|
||||
- Verify conda environment is activated
|
||||
|
||||
### Logs
|
||||
|
||||
Application logs are stored in the `logs/` directory:
|
||||
|
||||
- `aniworld.log` - General application logs
|
||||
- `errors.log` - Error-specific logs
|
||||
- `auth_failures.log` - Authentication failure logs
|
||||
|
||||
## 🚦 Development
|
||||
|
||||
### Running in Development Mode
|
||||
|
||||
```bash
|
||||
# With auto-reload for development
|
||||
uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload --log-level debug
|
||||
```
|
||||
|
||||
### Testing
|
||||
|
||||
```bash
|
||||
# Run all tests
|
||||
python -m pytest tests/ -v
|
||||
|
||||
# Run with coverage
|
||||
python -m pytest tests/ --cov=src --cov-report=html
|
||||
```
|
||||
|
||||
### Code Quality
|
||||
|
||||
```bash
|
||||
# Format code
|
||||
black src/
|
||||
isort src/
|
||||
|
||||
# Lint code
|
||||
pylint src/
|
||||
flake8 src/
|
||||
```
|
||||
|
||||
## 📚 Documentation
|
||||
|
||||
- **API Documentation**: Available at `/docs` (Swagger UI) and `/redoc` (ReDoc)
|
||||
- **Migration Guide**: See `API_DOCUMENTATION.md` for detailed migration information
|
||||
- **FastAPI Specific**: See `src/server/README_FastAPI.md` for server-specific documentation
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
1. Fork the repository
|
||||
2. Create a feature branch (`git checkout -b feature/amazing-feature`)
|
||||
3. Commit your changes (`git commit -m 'Add amazing feature'`)
|
||||
4. Push to the branch (`git push origin feature/amazing-feature`)
|
||||
5. Open a Pull Request
|
||||
|
||||
## 📄 License
|
||||
|
||||
This project is licensed under the MIT License - see the LICENSE file for details.
|
||||
|
||||
## 🙏 Acknowledgments
|
||||
|
||||
- FastAPI team for the excellent framework
|
||||
- The original Flask implementation that served as the foundation
|
||||
- All contributors and users of the AniWorld project
|
||||
|
||||
---
|
||||
|
||||
**Note**: This application is for personal use only. Please respect copyright laws and terms of service of content providers.
|
||||
227
ServerTodo.md
227
ServerTodo.md
@ -1,227 +0,0 @@
|
||||
# Web Migration TODO: Flask to FastAPI
|
||||
|
||||
This document contains tasks for migrating the web application from Flask to FastAPI. Each task should be marked as completed with [x] when finished.
|
||||
|
||||
## 📋 Project Analysis and Setup
|
||||
|
||||
### Initial Assessment
|
||||
|
||||
- [x] Review current Flask application structure in `/src/web/` directory
|
||||
- [x] Identify all Flask routes and their HTTP methods
|
||||
- [x] Document current template engine usage (Jinja2)
|
||||
- [x] List all static file serving requirements
|
||||
- [x] Inventory all middleware and extensions currently used
|
||||
- [x] Document current error handling patterns
|
||||
- [x] Review authentication/authorization mechanisms
|
||||
|
||||
### FastAPI Setup
|
||||
|
||||
- [x] Install FastAPI dependencies: `pip install fastapi uvicorn jinja2 python-multipart`
|
||||
- [x] Update `requirements.txt` or `pyproject.toml` with new dependencies
|
||||
- [x] Remove Flask dependencies: `flask`, `flask-*` packages
|
||||
- [x] Create new FastAPI application entry point
|
||||
|
||||
## 🔧 Core Application Migration
|
||||
|
||||
### Main Application Structure
|
||||
|
||||
- [x] Create new `main.py` or update existing app entry point with FastAPI app instance
|
||||
- [x] Migrate Flask app configuration to FastAPI settings using Pydantic BaseSettings
|
||||
- [x] Convert Flask blueprints to FastAPI routers
|
||||
- [x] Update CORS configuration from Flask-CORS to FastAPI CORS middleware
|
||||
|
||||
### Route Conversion
|
||||
|
||||
- [x] Convert all `@app.route()` decorators to FastAPI route decorators (`@app.get()`, `@app.post()`, etc.)
|
||||
- [x] Update route parameter syntax from `<int:id>` to `{id: int}` format
|
||||
- [x] Convert Flask request object usage (`request.form`, `request.json`) to FastAPI request models
|
||||
- [x] Update response handling from Flask `jsonify()` to FastAPI automatic JSON serialization
|
||||
- [x] Convert Flask `redirect()` and `url_for()` to FastAPI equivalents
|
||||
|
||||
### Request/Response Models
|
||||
|
||||
- [x] Create Pydantic models for request bodies (replace Flask request parsing)
|
||||
- [x] Create Pydantic models for response schemas
|
||||
- [x] Update form handling to use FastAPI Form dependencies
|
||||
- [x] Convert file upload handling to FastAPI UploadFile
|
||||
|
||||
## 🎨 Template and Static Files Migration
|
||||
|
||||
### Template Engine Setup
|
||||
|
||||
- [x] Configure Jinja2Templates in FastAPI application
|
||||
- [x] Set up template directory structure
|
||||
- [x] Create templates directory configuration in FastAPI app
|
||||
|
||||
### HTML Template Migration
|
||||
|
||||
- [x] Review all `.html` files in templates directory
|
||||
- [x] Update template rendering from Flask `render_template()` to FastAPI `templates.TemplateResponse()`
|
||||
- [x] Verify Jinja2 syntax compatibility (should be mostly unchanged)
|
||||
- [x] Update template context passing to match FastAPI pattern
|
||||
- [x] Test all template variables and filters still work correctly
|
||||
|
||||
### Static Files Configuration
|
||||
|
||||
- [x] Configure StaticFiles mount in FastAPI for CSS, JS, images
|
||||
- [x] Update static file URL generation in templates
|
||||
- [x] Verify all CSS file references work correctly
|
||||
- [x] Verify all JavaScript file references work correctly
|
||||
- [x] Test image and other asset serving
|
||||
|
||||
## 💻 JavaScript and Frontend Migration
|
||||
|
||||
### Inline JavaScript Review
|
||||
|
||||
- [x] Scan all HTML templates for inline `<script>` tags
|
||||
- [x] Review JavaScript code for Flask-specific URL generation (e.g., `{{ url_for() }}`)
|
||||
- [x] Update AJAX endpoints to match new FastAPI route structure
|
||||
- [x] Convert Flask CSRF token handling to FastAPI security patterns
|
||||
|
||||
### External JavaScript Files
|
||||
|
||||
- [x] Review all `.js` files in static directory
|
||||
- [x] Update API endpoint URLs to match FastAPI routing
|
||||
- [x] Verify fetch() or XMLHttpRequest calls use correct endpoints
|
||||
- [x] Update any Flask-specific JavaScript patterns
|
||||
- [x] Test all JavaScript functionality after migration
|
||||
|
||||
### CSS Files Review
|
||||
|
||||
- [x] Verify all `.css` files are served correctly
|
||||
- [x] Check for any Flask-specific CSS patterns or URL references
|
||||
- [x] Test responsive design and styling after migration
|
||||
|
||||
## 🔐 Security and Middleware Migration
|
||||
|
||||
### Authentication/Authorization
|
||||
|
||||
- [x] Convert Flask-Login or similar to FastAPI security dependencies
|
||||
- [x] Update session management (FastAPI doesn't have built-in sessions)
|
||||
- [x] Migrate password hashing and verification
|
||||
- [x] Convert authentication decorators to FastAPI dependencies
|
||||
|
||||
### Middleware Migration
|
||||
|
||||
- [x] Convert Flask middleware to FastAPI middleware
|
||||
- [x] Update error handling from Flask error handlers to FastAPI exception handlers
|
||||
- [x] Migrate request/response interceptors
|
||||
- [x] Update logging middleware if used
|
||||
|
||||
## 🚀 Application Flow & Setup Features
|
||||
|
||||
### Setup and Authentication Flow
|
||||
|
||||
- [x] Implement application setup detection middleware
|
||||
- [x] Create setup page template and route for first-time configuration
|
||||
- [x] Implement configuration file/database setup validation
|
||||
- [x] Create authentication token validation middleware
|
||||
- [x] Implement auth page template and routes for login/registration
|
||||
- [x] Create main application route with authentication dependency
|
||||
- [x] Implement setup completion tracking in configuration
|
||||
- [x] Add redirect logic for setup → auth → main application flow
|
||||
- [x] Create Pydantic models for setup and authentication requests
|
||||
- [x] Implement session management for authenticated users
|
||||
- [x] Add token refresh and expiration handling
|
||||
- [x] Create middleware to enforce application flow priorities
|
||||
|
||||
## 🧪 Testing and Validation
|
||||
|
||||
### Functional Testing
|
||||
|
||||
- [x] Test all web routes return correct responses
|
||||
- [x] Verify all HTML pages render correctly
|
||||
- [x] Test all forms submit and process data correctly
|
||||
- [x] Verify file uploads work (if applicable)
|
||||
- [x] Test authentication flows (login/logout/registration)
|
||||
|
||||
### Frontend Testing
|
||||
|
||||
- [x] Test all JavaScript functionality
|
||||
- [x] Verify AJAX calls work correctly
|
||||
- [x] Test dynamic content loading
|
||||
- [x] Verify CSS styling is applied correctly
|
||||
- [x] Test responsive design on different screen sizes
|
||||
|
||||
### Integration Testing
|
||||
|
||||
- [x] Test database connectivity and operations
|
||||
- [x] Verify API endpoints return correct data
|
||||
- [x] Test error handling and user feedback
|
||||
- [x] Verify security features work correctly
|
||||
|
||||
## 📚 Documentation and Cleanup
|
||||
|
||||
### Code Documentation
|
||||
|
||||
- [x] Update API documentation to reflect FastAPI changes
|
||||
- [x] Add OpenAPI/Swagger documentation (automatic with FastAPI)
|
||||
- [x] Update README with new setup instructions
|
||||
- [x] Document any breaking changes or new patterns
|
||||
|
||||
### Code Cleanup
|
||||
|
||||
- [x] Remove unused Flask imports and dependencies
|
||||
- [x] Clean up any Flask-specific code patterns
|
||||
- [x] Update imports to use FastAPI equivalents
|
||||
- [x] Remove deprecated or unused template files
|
||||
- [x] Clean up static files that are no longer needed
|
||||
|
||||
## 🚀 Deployment and Configuration
|
||||
|
||||
### Server Configuration
|
||||
|
||||
- [x] Update server startup to use `uvicorn` instead of Flask development server
|
||||
- [x] Configure production ASGI server (uvicorn, gunicorn with uvicorn workers)
|
||||
- [x] Update any reverse proxy configuration (nginx, Apache)
|
||||
- [x] Test application startup and shutdown
|
||||
|
||||
### Environment Configuration
|
||||
|
||||
- [x] Update environment variables for FastAPI
|
||||
- [x] Configure logging for FastAPI application
|
||||
- [x] Update any deployment scripts or Docker configurations
|
||||
- [x] Test application in different environments (dev, staging, prod)
|
||||
|
||||
## ✅ Final Verification
|
||||
|
||||
### Complete System Test
|
||||
|
||||
- [x] Perform end-to-end testing of all user workflows
|
||||
- [x] Verify performance is acceptable or improved
|
||||
- [x] Test error scenarios and edge cases
|
||||
- [x] Confirm all original functionality is preserved
|
||||
- [x] Validate security measures are in place and working
|
||||
|
||||
### Monitoring and Observability
|
||||
|
||||
- [x] Set up health check endpoints
|
||||
- [x] Configure metrics collection (if used)
|
||||
- [x] Set up error monitoring and alerting
|
||||
- [x] Test logging and debugging capabilities
|
||||
|
||||
---
|
||||
|
||||
## 📝 Migration Notes
|
||||
|
||||
### Important FastAPI Concepts to Remember:
|
||||
|
||||
- FastAPI uses async/await by default (but sync functions work too)
|
||||
- Automatic request/response validation with Pydantic
|
||||
- Built-in OpenAPI documentation
|
||||
- Dependency injection system
|
||||
- Type hints are crucial for FastAPI functionality
|
||||
|
||||
### Common Gotchas:
|
||||
|
||||
- FastAPI doesn't have built-in session support (use external library if needed)
|
||||
- Template responses need explicit media_type for HTML
|
||||
- Static file mounting needs to be configured explicitly
|
||||
- Request object structure is different from Flask
|
||||
|
||||
### Performance Considerations:
|
||||
|
||||
- FastAPI is generally faster than Flask
|
||||
- Consider using async functions for I/O operations
|
||||
- Use background tasks for long-running operations
|
||||
- Implement proper caching strategies
|
||||
180
TestsTodo.md
180
TestsTodo.md
@ -1,180 +0,0 @@
|
||||
# AniWorld Test Generation Checklist
|
||||
|
||||
This file instructs the AI agent on how to generate tests for the AniWorld application. All tests must be saved under `src/tests/` and follow the conventions in `.github/copilot-instructions.md`. Use `[ ]` for each task so the agent can checkmark completed items.
|
||||
|
||||
---
|
||||
|
||||
## 📁 Test File Structure
|
||||
|
||||
- [x] Place all tests under `src/tests/`
|
||||
- [x] `src/tests/unit/` for component/unit tests
|
||||
- [x] `src/tests/integration/` for API/integration tests
|
||||
- [x] `src/tests/e2e/` for end-to-end tests
|
||||
|
||||
---
|
||||
|
||||
## 🧪 Test Types
|
||||
|
||||
- [x] Component/Unit Tests: Test individual functions, classes, and modules.
|
||||
- [x] API/Integration Tests: Test API endpoints and database/external integrations.
|
||||
- [x] End-to-End (E2E) Tests: Simulate real user flows through the system.
|
||||
|
||||
---
|
||||
|
||||
## 📝 Test Case Checklist
|
||||
|
||||
### 1. Authentication & Security
|
||||
|
||||
- [x] Unit: Password hashing (SHA-256 + salt)
|
||||
- [x] Unit: JWT creation/validation
|
||||
- [x] Unit: Session timeout logic
|
||||
- [x] API: `POST /auth/login` (valid/invalid credentials)
|
||||
- [x] API: `GET /auth/verify` (valid/expired token)
|
||||
- [x] API: `POST /auth/logout`
|
||||
- [x] Unit: Secure environment variable management
|
||||
- [x] E2E: Full login/logout flow
|
||||
|
||||
### 2. Health & System Monitoring
|
||||
|
||||
- [x] API: `/health` endpoint
|
||||
- [x] API: `/api/health` endpoint
|
||||
- [x] API: `/api/health/system` (CPU, memory, disk)
|
||||
- [x] API: `/api/health/database`
|
||||
- [x] API: `/api/health/dependencies`
|
||||
- [x] API: `/api/health/performance`
|
||||
- [x] API: `/api/health/metrics`
|
||||
- [x] API: `/api/health/ready`
|
||||
- [x] Unit: System metrics gathering
|
||||
|
||||
### 3. Anime & Episode Management
|
||||
|
||||
- [x] API: `GET /api/anime/search` (pagination, valid/invalid query)
|
||||
- [x] API: `GET /api/anime/{anime_id}` (valid/invalid ID)
|
||||
- [x] API: `GET /api/anime/{anime_id}/episodes`
|
||||
- [x] API: `GET /api/episodes/{episode_id}`
|
||||
- [x] Unit: Search/filter logic
|
||||
|
||||
### 4. Database & Storage Management
|
||||
|
||||
- [x] API: `GET /api/database/info`
|
||||
- [x] API: `/maintenance/database/vacuum`
|
||||
- [x] API: `/maintenance/database/analyze`
|
||||
- [x] API: `/maintenance/database/integrity-check`
|
||||
- [x] API: `/maintenance/database/reindex`
|
||||
- [x] API: `/maintenance/database/optimize`
|
||||
- [x] API: `/maintenance/database/stats`
|
||||
- [x] Unit: Maintenance operation logic
|
||||
|
||||
### 5. Bulk Operations
|
||||
|
||||
- [x] API: `/api/bulk/download`
|
||||
- [x] API: `/api/bulk/update`
|
||||
- [x] API: `/api/bulk/organize`
|
||||
- [x] API: `/api/bulk/delete`
|
||||
- [x] API: `/api/bulk/export`
|
||||
- [x] E2E: Bulk download and export flows
|
||||
|
||||
### 6. Performance Optimization
|
||||
|
||||
- [x] API: `/api/performance/speed-limit`
|
||||
- [x] API: `/api/performance/cache/stats`
|
||||
- [x] API: `/api/performance/memory/stats`
|
||||
- [x] API: `/api/performance/memory/gc`
|
||||
- [x] API: `/api/performance/downloads/tasks`
|
||||
- [x] API: `/api/performance/downloads/add-task`
|
||||
- [x] API: `/api/performance/resume/tasks`
|
||||
- [x] Unit: Cache and memory management logic
|
||||
|
||||
### 7. Diagnostics & Logging
|
||||
|
||||
- [x] API: `/diagnostics/report`
|
||||
- [x] Unit: Error reporting and stats
|
||||
- [x] Unit: Logging configuration and log file management
|
||||
|
||||
### 8. Integrations
|
||||
|
||||
- [x] API: API key management endpoints
|
||||
- [x] API: Webhook configuration endpoints
|
||||
- [x] API: Third-party API integrations
|
||||
- [x] Unit: Integration logic and error handling
|
||||
|
||||
### 9. User Preferences & UI
|
||||
|
||||
- [x] API: Theme management endpoints
|
||||
- [x] API: Language selection endpoints
|
||||
- [x] API: Accessibility endpoints
|
||||
- [x] API: Keyboard shortcuts endpoints
|
||||
- [x] API: UI density/grid/list view endpoints
|
||||
- [x] E2E: Change preferences and verify UI responses
|
||||
|
||||
### 10. CLI Tool
|
||||
|
||||
- [x] Unit: CLI commands (scan, search, download, rescan, display series)
|
||||
- [x] E2E: CLI flows (progress bar, retry logic)
|
||||
|
||||
### 11. Miscellaneous
|
||||
|
||||
- [x] Unit: Environment configuration loading
|
||||
- [x] Unit: Modular architecture components
|
||||
- [x] Unit: Centralized error handling
|
||||
- [x] API: Error handling for invalid requests
|
||||
|
||||
---
|
||||
|
||||
## 🛠️ Additional Guidelines
|
||||
|
||||
- [x] Use `pytest` for all Python tests.
|
||||
- [x] Use `pytest-mock` or `unittest.mock` for mocking.
|
||||
- [x] Use fixtures for setup/teardown.
|
||||
- [x] Test both happy paths and edge cases.
|
||||
- [x] Mock external services and database connections.
|
||||
- [x] Use parameterized tests for edge cases.
|
||||
- [x] Document each test with a brief description.
|
||||
|
||||
---
|
||||
|
||||
# Test TODO
|
||||
|
||||
## Application Flow & Setup Tests
|
||||
|
||||
### Setup Page Tests
|
||||
|
||||
- [x] Test setup page is displayed when configuration is missing
|
||||
- [x] Test setup page form submission creates valid configuration
|
||||
- [x] Test setup page redirects to auth page after successful setup
|
||||
- [x] Test setup page validation for required fields
|
||||
- [x] Test setup page handles database connection errors gracefully
|
||||
- [x] Test setup completion flag is properly set in configuration
|
||||
|
||||
### Authentication Flow Tests
|
||||
|
||||
- [x] Test auth page is displayed when authentication token is invalid
|
||||
- [x] Test auth page is displayed when authentication token is missing
|
||||
- [x] Test successful login creates valid authentication token
|
||||
- [x] Test failed login shows appropriate error messages
|
||||
- [x] Test auth page redirects to main application after successful authentication
|
||||
- [x] Test token validation middleware correctly identifies valid/invalid tokens
|
||||
- [x] Test token refresh functionality
|
||||
- [x] Test session expiration handling
|
||||
|
||||
### Main Application Access Tests
|
||||
|
||||
- [x] Test index.html is served when authentication is valid
|
||||
- [x] Test unauthenticated users are redirected to auth page
|
||||
- [x] Test users without completed setup are redirected to setup page
|
||||
- [x] Test middleware enforces correct flow priority (setup → auth → main)
|
||||
- [x] Test authenticated user session persistence
|
||||
- [x] Test graceful handling of token expiration during active session
|
||||
|
||||
### Integration Flow Tests
|
||||
|
||||
- [x] Test complete user journey: setup → auth → main application
|
||||
- [x] Test application behavior when setup is completed but user is not authenticated
|
||||
- [x] Test application behavior when configuration exists but is corrupted
|
||||
- [x] Test concurrent user sessions and authentication state management
|
||||
- [x] Test application restart preserves setup and authentication state appropriately
|
||||
|
||||
---
|
||||
|
||||
**Instruction to AI Agent:**
|
||||
Generate and check off each test case above as you complete it. Save all test files under `src/tests/` using the specified structure and conventions.
|
||||
BIN
data/aniworld.db
BIN
data/aniworld.db
Binary file not shown.
BIN
data/cache.db
BIN
data/cache.db
Binary file not shown.
@ -1,49 +0,0 @@
|
||||
{
|
||||
"security": {
|
||||
"master_password_hash": "1353f6d9db7090c302864c2d6437dc11cc96cd66d59d7737d1b345603fdbdfda",
|
||||
"salt": "a25e23440d681cef2d75c0adb6de0913359a1d8b9f98f9747fc75f53c79c4bd4",
|
||||
"session_timeout_hours": 24,
|
||||
"max_failed_attempts": 5,
|
||||
"lockout_duration_minutes": 30
|
||||
},
|
||||
"anime": {
|
||||
"directory": "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien",
|
||||
"download_threads": 3,
|
||||
"download_speed_limit": null,
|
||||
"auto_rescan_time": "03:00",
|
||||
"auto_download_after_rescan": false
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"enable_console_logging": true,
|
||||
"enable_console_progress": false,
|
||||
"enable_fail2ban_logging": true,
|
||||
"log_file": "aniworld.log",
|
||||
"max_log_size_mb": 10,
|
||||
"log_backup_count": 5
|
||||
},
|
||||
"providers": {
|
||||
"default_provider": "aniworld.to",
|
||||
"preferred_language": "German Dub",
|
||||
"fallback_providers": [
|
||||
"aniworld.to"
|
||||
],
|
||||
"provider_timeout": 30,
|
||||
"retry_attempts": 3,
|
||||
"provider_settings": {
|
||||
"aniworld.to": {
|
||||
"enabled": true,
|
||||
"priority": 1,
|
||||
"quality_preference": "720p"
|
||||
}
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"max_concurrent_downloads": 3,
|
||||
"download_buffer_size": 8192,
|
||||
"connection_timeout": 30,
|
||||
"read_timeout": 300,
|
||||
"enable_debug_mode": false,
|
||||
"cache_duration_minutes": 60
|
||||
}
|
||||
}
|
||||
@ -1,48 +0,0 @@
|
||||
{
|
||||
"ui": {
|
||||
"theme": "auto",
|
||||
"density": "comfortable",
|
||||
"language": "en",
|
||||
"animations_enabled": true,
|
||||
"sidebar_collapsed": false,
|
||||
"grid_view": true,
|
||||
"items_per_page": 20
|
||||
},
|
||||
"downloads": {
|
||||
"auto_download": false,
|
||||
"download_quality": "best",
|
||||
"concurrent_downloads": 3,
|
||||
"retry_failed": true,
|
||||
"notification_sound": true,
|
||||
"auto_organize": true
|
||||
},
|
||||
"notifications": {
|
||||
"browser_notifications": true,
|
||||
"email_notifications": false,
|
||||
"webhook_notifications": false,
|
||||
"notification_types": {
|
||||
"download_complete": true,
|
||||
"download_error": true,
|
||||
"series_updated": false,
|
||||
"system_alerts": true
|
||||
}
|
||||
},
|
||||
"keyboard_shortcuts": {
|
||||
"enabled": true,
|
||||
"shortcuts": {
|
||||
"search": "ctrl+f",
|
||||
"download": "ctrl+d",
|
||||
"refresh": "f5",
|
||||
"select_all": "ctrl+a",
|
||||
"help": "f1",
|
||||
"settings": "ctrl+comma"
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"debug_mode": false,
|
||||
"performance_mode": false,
|
||||
"cache_enabled": true,
|
||||
"auto_backup": true,
|
||||
"log_level": "info"
|
||||
}
|
||||
}
|
||||
147
features.md
147
features.md
@ -1,135 +1,24 @@
|
||||
# AniWorld Application Features
|
||||
# Aniworld Web Application Features
|
||||
|
||||
## 1. Authentication & Security
|
||||
## Authentication & Security
|
||||
- **Master Password Login**: Secure access to the application with a master password system
|
||||
|
||||
- Master password authentication (JWT-based)
|
||||
- `POST /auth/login`: Login and receive JWT token
|
||||
- `GET /auth/verify`: Verify JWT token validity
|
||||
- `POST /auth/logout`: Logout (stateless)
|
||||
- Password hashing (SHA-256 + salt)
|
||||
- Configurable session timeout
|
||||
- Secure environment variable management
|
||||
## Configuration Management
|
||||
- **Setup Page**: Initial configuration interface for server setup and basic settings
|
||||
- **Config Page**: View and modify application configuration settings
|
||||
|
||||
## 2. Health & System Monitoring
|
||||
## User Interface
|
||||
- **Dark Mode**: Toggle between light and dark themes for better user experience
|
||||
|
||||
- Health check endpoints
|
||||
- `/health`: Basic health status
|
||||
- `/api/health`: Load balancer health
|
||||
- `/api/health/system`: System metrics (CPU, memory, disk)
|
||||
- `/api/health/database`: Database connectivity
|
||||
- `/api/health/dependencies`: External dependencies
|
||||
- `/api/health/performance`: Performance metrics
|
||||
- `/api/health/metrics`: Prometheus metrics
|
||||
- `/api/health/ready`: Readiness probe (Kubernetes)
|
||||
## Anime Management
|
||||
- **Anime Library Page**: Display list of anime series with missing episodes
|
||||
- **Series Selection**: Select individual anime series and add episodes to download queue
|
||||
- **Anime Search Page**: Search functionality to find and add new anime series to the library
|
||||
|
||||
## 3. Anime & Episode Management
|
||||
## Download Management
|
||||
- **Download Queue Page**: View and manage the current download queue
|
||||
- **Download Status Display**: Real-time status updates and progress of current downloads
|
||||
- **Queue Operations**: Add, remove, and prioritize items in the download queue
|
||||
|
||||
- Search anime
|
||||
- `GET /api/anime/search`: Search anime by title (pagination)
|
||||
- Get anime details
|
||||
- `GET /api/anime/{anime_id}`: Anime details
|
||||
- `GET /api/anime/{anime_id}/episodes`: List episodes
|
||||
- `GET /api/episodes/{episode_id}`: Episode details
|
||||
|
||||
## 4. Database & Storage Management
|
||||
|
||||
- Database info and statistics
|
||||
- `GET /api/database/info`: Database stats
|
||||
- Maintenance operations
|
||||
- `/maintenance/database/vacuum`: Vacuum database
|
||||
- `/maintenance/database/analyze`: Analyze database
|
||||
- `/maintenance/database/integrity-check`: Integrity check
|
||||
- `/maintenance/database/reindex`: Reindex database
|
||||
- `/maintenance/database/optimize`: Optimize database
|
||||
- `/maintenance/database/stats`: Get database stats
|
||||
|
||||
## 5. Bulk Operations
|
||||
|
||||
- Bulk download, update, organize, delete, export
|
||||
- `/api/bulk/download`: Start bulk download
|
||||
- `/api/bulk/update`: Bulk update
|
||||
- `/api/bulk/organize`: Organize series
|
||||
- `/api/bulk/delete`: Delete series
|
||||
- `/api/bulk/export`: Export series data
|
||||
|
||||
## 6. Performance Optimization
|
||||
|
||||
- Speed limit management
|
||||
- `/api/performance/speed-limit`: Get/set download speed limit
|
||||
- Cache statistics
|
||||
- `/api/performance/cache/stats`: Cache stats
|
||||
- Memory management
|
||||
- `/api/performance/memory/stats`: Memory usage stats
|
||||
- `/api/performance/memory/gc`: Force garbage collection
|
||||
- Download queue management
|
||||
- `/api/performance/downloads/tasks`: List download tasks
|
||||
- `/api/performance/downloads/add-task`: Add download task
|
||||
- `/api/performance/resume/tasks`: List resumable tasks
|
||||
|
||||
## 7. Diagnostics & Logging
|
||||
|
||||
- Diagnostic report generation
|
||||
- `/diagnostics/report`: Generate diagnostic report
|
||||
- Error reporting and stats
|
||||
- Logging configuration and log file management
|
||||
|
||||
## 8. Integrations
|
||||
|
||||
- API key management
|
||||
- Webhook configuration
|
||||
- Third-party API integrations
|
||||
|
||||
## 9. User Preferences & UI
|
||||
|
||||
- Theme management (light/dark/auto)
|
||||
- Language selection
|
||||
- Accessibility features (screen reader, color contrast, mobile support)
|
||||
- Keyboard shortcuts
|
||||
- UI density and grid/list view options
|
||||
|
||||
## 10. CLI Tool
|
||||
|
||||
- Series scanning and management
|
||||
- Search, download, rescan, display series
|
||||
- Progress bar for downloads
|
||||
- Retry logic for operations
|
||||
|
||||
## 11. Miscellaneous
|
||||
|
||||
- Environment configuration via `.env`
|
||||
- Modular, extensible architecture (MVC, Clean Architecture)
|
||||
- Automated testing (pytest, unittest)
|
||||
- Centralized error handling
|
||||
|
||||
## Authentication & Setup Flow
|
||||
|
||||
### Application Initialization Flow
|
||||
|
||||
- **Setup Page**: Display application setup page when the application is run for the first time and no configuration exists
|
||||
|
||||
- Check for presence of configuration file/database setup
|
||||
- Guide user through initial application configuration
|
||||
- Set up database connections, initial admin user, and core settings
|
||||
- Mark setup as completed in configuration
|
||||
|
||||
- **Authentication Gate**: Redirect to authentication page when user token is invalid or missing
|
||||
|
||||
- Validate existing authentication tokens
|
||||
- Display login/registration interface for unauthenticated users
|
||||
- Handle token refresh and session management
|
||||
- Redirect authenticated users to main application
|
||||
|
||||
- **Main Application**: Show index.html for authenticated users with valid tokens
|
||||
- Display main application interface
|
||||
- Provide access to all authenticated user features
|
||||
- Maintain session state and handle token expiration gracefully
|
||||
|
||||
### User Flow Priority
|
||||
|
||||
1. Check if application setup is completed → Show setup page if not
|
||||
2. Check if user is authenticated → Show auth page if not
|
||||
3. Show main application (index.html) for authenticated users
|
||||
|
||||
---
|
||||
|
||||
**Note:** Each feature is implemented via modular controllers, services, and utilities. See the respective source files for detailed function/class definitions.
|
||||
## Core Functionality Overview
|
||||
The web application provides a complete interface for managing anime downloads with user-friendly pages for configuration, library management, search capabilities, and download monitoring.
|
||||
136
infrastructure.md
Normal file
136
infrastructure.md
Normal file
@ -0,0 +1,136 @@
|
||||
# Aniworld Web Application Infrastructure
|
||||
conda activate AniWorld
|
||||
|
||||
## Project Structure
|
||||
|
||||
```
|
||||
/home/lukas/Volume/repo/Aniworld/
|
||||
├── src/
|
||||
│ ├── server/ # FastAPI web application
|
||||
│ │ ├── main.py # FastAPI application entry point
|
||||
│ │ ├── api/ # API route handlers
|
||||
│ │ │ ├── __init__.py
|
||||
│ │ │ ├── auth.py # Authentication endpoints
|
||||
│ │ │ ├── config.py # Configuration endpoints
|
||||
│ │ │ ├── anime.py # Anime management endpoints
|
||||
│ │ │ ├── download.py # Download queue endpoints
|
||||
│ │ │ └── search.py # Search endpoints
|
||||
│ │ ├── models/ # Pydantic models
|
||||
│ │ │ ├── __init__.py
|
||||
│ │ │ ├── auth.py
|
||||
│ │ │ ├── config.py
|
||||
│ │ │ ├── anime.py
|
||||
│ │ │ └── download.py
|
||||
│ │ ├── services/ # Business logic services
|
||||
│ │ │ ├── __init__.py
|
||||
│ │ │ ├── auth_service.py
|
||||
│ │ │ ├── config_service.py
|
||||
│ │ │ ├── anime_service.py
|
||||
│ │ │ └── download_service.py
|
||||
│ │ ├── static/ # Static web assets
|
||||
│ │ │ ├── css/
|
||||
│ │ │ ├── js/
|
||||
│ │ │ └── images/
|
||||
│ │ ├── templates/ # Jinja2 HTML templates
|
||||
│ │ │ ├── base.html
|
||||
│ │ │ ├── login.html
|
||||
│ │ │ ├── setup.html
|
||||
│ │ │ ├── config.html
|
||||
│ │ │ ├── anime.html
|
||||
│ │ │ ├── download.html
|
||||
│ │ │ └── search.html
|
||||
│ │ └── utils/ # Utility functions
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── security.py
|
||||
│ │ └── dependencies.py
|
||||
│ ├── core/ # Existing core functionality
|
||||
│ └── cli/ # Existing CLI application
|
||||
├── data/ # Application data storage
|
||||
│ ├── config.json # Application configuration
|
||||
│ ├── anime_library.db # SQLite database for anime library
|
||||
│ ├── download_queue.json # Download queue state
|
||||
│ └── cache/ # Temporary cache files
|
||||
├── logs/ # Application logs
|
||||
│ ├── app.log # Main application log
|
||||
│ ├── download.log # Download-specific logs
|
||||
│ └── error.log # Error logs
|
||||
├── requirements.txt # Python dependencies
|
||||
├── docker-compose.yml # Docker deployment configuration
|
||||
└── README.md
|
||||
```
|
||||
|
||||
## Technology Stack
|
||||
|
||||
### Backend
|
||||
- **FastAPI**: Modern Python web framework for building APIs
|
||||
- **Uvicorn**: ASGI server for running FastAPI applications
|
||||
- **SQLite**: Lightweight database for storing anime library and configuration
|
||||
- **Pydantic**: Data validation and serialization
|
||||
- **Jinja2**: Template engine for server-side rendering
|
||||
|
||||
### Frontend
|
||||
- **HTML5/CSS3**: Core web technologies
|
||||
- **JavaScript (Vanilla)**: Client-side interactivity
|
||||
- **Bootstrap 5**: CSS framework for responsive design
|
||||
- **HTMX**: Modern approach for dynamic web applications
|
||||
|
||||
### Security
|
||||
- **Passlib**: Password hashing and verification
|
||||
- **python-jose**: JWT token handling
|
||||
- **bcrypt**: Secure password hashing
|
||||
|
||||
## Configuration
|
||||
|
||||
### Data Storage
|
||||
- **Configuration**: JSON files in `data/` directory
|
||||
- **Anime Library**: SQLite database with series information
|
||||
- **Download Queue**: JSON file with current download status
|
||||
- **Logs**: Structured logging to files in `logs/` directory
|
||||
|
||||
## API Endpoints
|
||||
|
||||
### Authentication
|
||||
- `POST /api/auth/login` - Master password authentication
|
||||
- `POST /api/auth/logout` - Logout and invalidate session
|
||||
- `GET /api/auth/status` - Check authentication status
|
||||
|
||||
### Configuration
|
||||
- `GET /api/config` - Get current configuration
|
||||
- `PUT /api/config` - Update configuration
|
||||
- `POST /api/setup` - Initial setup
|
||||
|
||||
### Anime Management
|
||||
- `GET /api/anime` - List anime with missing episodes
|
||||
- `POST /api/anime/{id}/download` - Add episodes to download queue
|
||||
- `GET /api/anime/{id}` - Get anime details
|
||||
|
||||
### Download Management
|
||||
- `GET /api/downloads` - Get download queue status
|
||||
- `DELETE /api/downloads/{id}` - Remove from queue
|
||||
- `POST /api/downloads/priority` - Change download priority
|
||||
|
||||
### Search
|
||||
- `GET /api/search?q={query}` - Search for anime
|
||||
- `POST /api/search/add` - Add anime to library
|
||||
|
||||
## Logging
|
||||
|
||||
### Log Levels
|
||||
- **INFO**: General application information
|
||||
- **WARNING**: Potential issues that don't stop execution
|
||||
- **ERROR**: Errors that affect functionality
|
||||
- **DEBUG**: Detailed debugging information (development only)
|
||||
|
||||
### Log Files
|
||||
- `app.log`: General application logs
|
||||
- `download.log`: Download-specific operations
|
||||
- `error.log`: Error and exception logs
|
||||
|
||||
## Security Considerations
|
||||
|
||||
- Master password protection for application access
|
||||
- Secure session management with JWT tokens
|
||||
- Input validation and sanitization
|
||||
- Rate limiting on API endpoints
|
||||
- HTTPS enforcement in production
|
||||
- Secure file path handling to prevent directory traversal
|
||||
405
instructions.md
Normal file
405
instructions.md
Normal file
@ -0,0 +1,405 @@
|
||||
# Aniworld Web Application Development Instructions
|
||||
|
||||
This document provides detailed tasks for AI agents to implement a modern web application for the Aniworld anime download manager. All tasks should follow the coding guidelines specified in the project's copilot instructions.
|
||||
|
||||
## Project Overview
|
||||
|
||||
The goal is to create a FastAPI-based web application that provides a modern interface for the existing Aniworld anime download functionality. The core anime logic should remain in `SeriesApp.py` while the web layer provides REST API endpoints and a responsive UI.
|
||||
|
||||
## Architecture Principles
|
||||
|
||||
- **Single Responsibility**: Each file/class has one clear purpose
|
||||
- **Dependency Injection**: Use FastAPI's dependency system
|
||||
- **Clean Separation**: Web layer calls core logic, never the reverse
|
||||
- **File Size Limit**: Maximum 500 lines per file
|
||||
- **Type Hints**: Use comprehensive type annotations
|
||||
- **Error Handling**: Proper exception handling and logging
|
||||
|
||||
## How you work
|
||||
|
||||
1. Task the next task
|
||||
2. Process the task
|
||||
3. Make Tests.
|
||||
4. Remove task from instructions.md
|
||||
5. Commit in git
|
||||
6. goto 1.
|
||||
|
||||
## Implementation Order
|
||||
|
||||
The tasks should be completed in the following order to ensure proper dependencies and logical progression:
|
||||
|
||||
1. **Project Structure Setup** - Foundation and dependencies
|
||||
2. **Authentication System** - Security layer implementation
|
||||
3. **Configuration Management** - Settings and config handling
|
||||
4. **Anime Management Integration** - Core functionality wrapper
|
||||
5. **Download Queue Management** - Queue handling and persistence
|
||||
6. **WebSocket Real-time Updates** - Real-time communication
|
||||
7. **Frontend Integration** - Integrate existing frontend assets
|
||||
8. **Core Logic Integration** - Enhance existing core functionality
|
||||
9. **Database Layer** - Data persistence and management
|
||||
10. **Testing** - Comprehensive test coverage
|
||||
11. **Deployment and Configuration** - Production setup
|
||||
12. **Documentation and Error Handling** - Final documentation and error handling
|
||||
|
||||
## Core Tasks
|
||||
|
||||
### 1. Project Structure Setup
|
||||
|
||||
#### [] Create main FastAPI application structure
|
||||
|
||||
- Create `src/server/main.py`
|
||||
- Configure FastAPI app with CORS, middleware
|
||||
- Set up static file serving for existing frontend assets
|
||||
- Configure Jinja2 templates
|
||||
- Add health check endpoint
|
||||
|
||||
#### [] Set up dependency injection system
|
||||
|
||||
- Create `src/server/utils/dependencies.py`
|
||||
- Implement SeriesApp dependency injection
|
||||
- Add database session dependency
|
||||
- Create authentication dependency
|
||||
|
||||
#### [] Configure logging system
|
||||
|
||||
- Create `src/server/utils/logging.py`
|
||||
- Set up structured logging with multiple handlers
|
||||
- Configure log rotation and cleanup
|
||||
- Add request/response logging middleware
|
||||
|
||||
### 2. Authentication System
|
||||
|
||||
#### [] Implement authentication models
|
||||
|
||||
- Create `src/server/models/auth.py`
|
||||
- Define LoginRequest, LoginResponse models
|
||||
- Add SetupRequest, AuthStatus models
|
||||
- Include session management models
|
||||
|
||||
#### [] Create authentication service
|
||||
|
||||
- Create `src/server/services/auth_service.py`
|
||||
- Implement master password setup/validation
|
||||
- Add session management with JWT tokens
|
||||
- Include failed attempt tracking and lockout
|
||||
- Add password strength validation
|
||||
|
||||
#### [] Implement authentication API endpoints
|
||||
|
||||
- Create `src/server/api/auth.py`
|
||||
- Add POST `/api/auth/setup` - initial setup
|
||||
- Add POST `/api/auth/login` - login endpoint
|
||||
- Add POST `/api/auth/logout` - logout endpoint
|
||||
- Add GET `/api/auth/status` - authentication status
|
||||
|
||||
#### [] Create authentication middleware
|
||||
|
||||
- Create `src/server/middleware/auth.py`
|
||||
- Implement JWT token validation
|
||||
- Add request authentication checking
|
||||
- Include rate limiting for auth endpoints
|
||||
|
||||
### 3. Configuration Management
|
||||
|
||||
#### [] Implement configuration models
|
||||
|
||||
- Create `src/server/models/config.py`
|
||||
- Define ConfigResponse, ConfigUpdate models
|
||||
- Add SchedulerConfig, LoggingConfig models
|
||||
- Include ValidationResult model
|
||||
|
||||
#### [] Create configuration service
|
||||
|
||||
- Create `src/server/services/config_service.py`
|
||||
- Implement configuration loading/saving
|
||||
- Add configuration validation
|
||||
- Include backup/restore functionality
|
||||
- Add scheduler configuration management
|
||||
|
||||
#### [] Implement configuration API endpoints
|
||||
|
||||
- Create `src/server/api/config.py`
|
||||
- Add GET `/api/config` - get configuration
|
||||
- Add PUT `/api/config` - update configuration
|
||||
- Add POST `/api/config/validate` - validate config
|
||||
- Add GET/POST `/api/config/backup` - backup management
|
||||
|
||||
### 4. Anime Management Integration
|
||||
|
||||
#### [] Implement anime models
|
||||
|
||||
- Create `src/server/models/anime.py`
|
||||
- Define AnimeSeriesResponse, EpisodeInfo models
|
||||
- Add SearchRequest, SearchResult models
|
||||
- Include MissingEpisodeInfo model
|
||||
|
||||
#### [] Create anime service wrapper
|
||||
|
||||
- Create `src/server/services/anime_service.py`
|
||||
- Wrap SeriesApp functionality for web layer
|
||||
- Implement async wrappers for blocking operations
|
||||
- Add caching for frequently accessed data
|
||||
- Include error handling and logging
|
||||
|
||||
#### [] Implement anime API endpoints
|
||||
|
||||
- Create `src/server/api/anime.py`
|
||||
- Add GET `/api/v1/anime` - list series with missing episodes
|
||||
- Add POST `/api/v1/anime/rescan` - trigger rescan
|
||||
- Add POST `/api/v1/anime/search` - search for new anime
|
||||
- Add GET `/api/v1/anime/{id}` - get series details
|
||||
|
||||
### 5. Download Queue Management
|
||||
|
||||
#### [] Implement download queue models
|
||||
|
||||
- Create `src/server/models/download.py`
|
||||
- Define DownloadItem, QueueStatus models
|
||||
- Add DownloadProgress, QueueStats models
|
||||
- Include DownloadRequest model
|
||||
|
||||
#### [] Create download queue service
|
||||
|
||||
- Create `src/server/services/download_service.py`
|
||||
- Implement queue management (add, remove, reorder)
|
||||
- Add download progress tracking
|
||||
- Include queue persistence and recovery
|
||||
- Add concurrent download management
|
||||
|
||||
#### [] Implement download API endpoints
|
||||
|
||||
- Create `src/server/api/download.py`
|
||||
- Add GET `/api/queue/status` - get queue status
|
||||
- Add POST `/api/queue/add` - add to queue
|
||||
- Add DELETE `/api/queue/{id}` - remove from queue
|
||||
- Add POST `/api/queue/start` - start downloads
|
||||
- Add POST `/api/queue/stop` - stop downloads
|
||||
|
||||
### 6. WebSocket Real-time Updates
|
||||
|
||||
#### [] Implement WebSocket manager
|
||||
|
||||
- Create `src/server/services/websocket_service.py`
|
||||
- Add connection management
|
||||
- Implement broadcast functionality
|
||||
- Include room-based messaging
|
||||
- Add connection cleanup
|
||||
|
||||
#### [] Add real-time progress updates
|
||||
|
||||
- Create `src/server/services/progress_service.py`
|
||||
- Implement download progress broadcasting
|
||||
- Add scan progress updates
|
||||
- Include queue status changes
|
||||
- Add error notifications
|
||||
|
||||
#### [] Integrate WebSocket with core services
|
||||
|
||||
- Update download service to emit progress
|
||||
- Add scan progress notifications
|
||||
- Include queue change broadcasts
|
||||
- Add error/completion notifications
|
||||
|
||||
### 7. Frontend Integration
|
||||
|
||||
#### [] Integrate existing HTML templates
|
||||
|
||||
- Review and integrate existing HTML templates in `src/server/web/templates/`
|
||||
- Ensure templates work with FastAPI Jinja2 setup
|
||||
- Update template paths and static file references if needed
|
||||
- Maintain existing responsive layout and theme switching
|
||||
|
||||
#### [] Integrate existing JavaScript functionality
|
||||
|
||||
- Review existing JavaScript files in `src/server/web/static/js/`
|
||||
- Update API endpoint URLs to match FastAPI routes
|
||||
- Ensure WebSocket connections work with new backend
|
||||
- Maintain existing functionality for app.js and queue.js
|
||||
|
||||
#### [] Integrate existing CSS styling
|
||||
|
||||
- Review and integrate existing CSS files in `src/server/web/static/css/`
|
||||
- Ensure styling works with FastAPI static file serving
|
||||
- Maintain existing responsive design and theme support
|
||||
- Update any hardcoded paths if necessary
|
||||
|
||||
#### [] Update frontend-backend integration
|
||||
|
||||
- Ensure existing JavaScript calls match new API endpoints
|
||||
- Update authentication flow to work with new auth system
|
||||
- Verify WebSocket events match new service implementations
|
||||
- Test all existing UI functionality with new backend
|
||||
|
||||
### 8. Core Logic Integration
|
||||
|
||||
#### [] Enhance SeriesApp for web integration
|
||||
|
||||
- Update `src/core/SeriesApp.py`
|
||||
- Add async callback support
|
||||
- Implement progress reporting
|
||||
- Include better error handling
|
||||
- Add cancellation support
|
||||
|
||||
#### [] Create progress callback system
|
||||
|
||||
- Add progress callback interface
|
||||
- Implement scan progress reporting
|
||||
- Add download progress tracking
|
||||
- Include error/completion callbacks
|
||||
|
||||
#### [] Add configuration persistence
|
||||
|
||||
- Implement configuration file management
|
||||
- Add settings validation
|
||||
- Include backup/restore functionality
|
||||
- Add migration support for config updates
|
||||
|
||||
### 9. Database Layer
|
||||
|
||||
#### [] Implement database models
|
||||
|
||||
- Create `src/server/database/models.py`
|
||||
- Add SQLAlchemy models for anime series
|
||||
- Implement download queue persistence
|
||||
- Include user session storage
|
||||
|
||||
#### [] Create database service
|
||||
|
||||
- Create `src/server/database/service.py`
|
||||
- Add CRUD operations for anime data
|
||||
- Implement queue persistence
|
||||
- Include database migration support
|
||||
|
||||
#### [] Add database initialization
|
||||
|
||||
- Create `src/server/database/init.py`
|
||||
- Implement database setup
|
||||
- Add initial data migration
|
||||
- Include schema validation
|
||||
|
||||
### 10. Testing
|
||||
|
||||
#### [] Create unit tests for services
|
||||
|
||||
- Create `tests/unit/test_auth_service.py`
|
||||
- Create `tests/unit/test_anime_service.py`
|
||||
- Create `tests/unit/test_download_service.py`
|
||||
- Create `tests/unit/test_config_service.py`
|
||||
|
||||
#### [] Create API endpoint tests
|
||||
|
||||
- Create `tests/api/test_auth_endpoints.py`
|
||||
- Create `tests/api/test_anime_endpoints.py`
|
||||
- Create `tests/api/test_download_endpoints.py`
|
||||
- Create `tests/api/test_config_endpoints.py`
|
||||
|
||||
#### [] Create integration tests
|
||||
|
||||
- Create `tests/integration/test_download_flow.py`
|
||||
- Create `tests/integration/test_auth_flow.py`
|
||||
- Create `tests/integration/test_websocket.py`
|
||||
|
||||
#### [] Create frontend integration tests
|
||||
|
||||
- Create `tests/frontend/test_existing_ui_integration.py`
|
||||
- Test existing JavaScript functionality with new backend
|
||||
- Verify WebSocket connections and real-time updates
|
||||
- Test authentication flow with existing frontend
|
||||
|
||||
### 11. Deployment and Configuration
|
||||
|
||||
#### [] Create Docker configuration
|
||||
|
||||
- Create `Dockerfile`
|
||||
- Create `docker-compose.yml`
|
||||
- Add environment configuration
|
||||
- Include volume mappings for existing web assets
|
||||
|
||||
#### [] Create production configuration
|
||||
|
||||
- Create `src/server/config/production.py`
|
||||
- Add environment variable handling
|
||||
- Include security settings
|
||||
- Add performance optimizations
|
||||
|
||||
#### [] Create startup scripts
|
||||
|
||||
- Create `scripts/start.sh`
|
||||
- Create `scripts/setup.py`
|
||||
- Add dependency installation
|
||||
- Include database initialization
|
||||
|
||||
### 12. Documentation and Error Handling
|
||||
|
||||
#### [] Create API documentation
|
||||
|
||||
- Add OpenAPI/Swagger documentation
|
||||
- Include endpoint descriptions
|
||||
- Add request/response examples
|
||||
- Include authentication details
|
||||
|
||||
#### [] Implement comprehensive error handling
|
||||
|
||||
- Create custom exception classes
|
||||
- Add error logging and tracking
|
||||
- Implement user-friendly error messages
|
||||
- Include error recovery mechanisms
|
||||
|
||||
#### [] Create user documentation
|
||||
|
||||
- Create `docs/user_guide.md`
|
||||
- Add installation instructions
|
||||
- Include configuration guide
|
||||
- Add troubleshooting section
|
||||
|
||||
## File Size Guidelines
|
||||
|
||||
- **Models**: Max 200 lines each
|
||||
- **Services**: Max 450 lines each
|
||||
- **API Endpoints**: Max 350 lines each
|
||||
- **Templates**: Max 400 lines each
|
||||
- **JavaScript**: Max 500 lines each
|
||||
- **CSS**: Max 500 lines each
|
||||
- **Tests**: Max 400 lines each
|
||||
|
||||
## Existing Frontend Assets
|
||||
|
||||
The following frontend assets already exist and should be integrated:
|
||||
|
||||
- **Templates**: Located in `src/server/web/templates/`
|
||||
- **JavaScript**: Located in `src/server/web/static/js/` (app.js, queue.js, etc.)
|
||||
- **CSS**: Located in `src/server/web/static/css/`
|
||||
- **Static Assets**: Images and other assets in `src/server/web/static/`
|
||||
|
||||
When working with these files:
|
||||
|
||||
- Review existing functionality before making changes
|
||||
- Maintain existing UI/UX patterns and design
|
||||
- Update API calls to match new FastAPI endpoints
|
||||
- Preserve existing WebSocket event handling
|
||||
- Keep existing theme and responsive design features
|
||||
|
||||
## Quality Assurance
|
||||
|
||||
#### [] Code quality checks
|
||||
|
||||
- Run linting with flake8/pylint
|
||||
- Check type hints with mypy
|
||||
- Validate formatting with black
|
||||
- Run security checks with bandit
|
||||
|
||||
#### [] Performance testing
|
||||
|
||||
- Load test API endpoints
|
||||
- Test WebSocket connection limits
|
||||
- Validate download performance
|
||||
- Check memory usage patterns
|
||||
|
||||
#### [] Security validation
|
||||
|
||||
- Test authentication bypass attempts
|
||||
- Validate input sanitization
|
||||
- Check for injection vulnerabilities
|
||||
- Test session management security
|
||||
|
||||
Each task should be implemented with proper error handling, logging, and type hints according to the project's coding standards.
|
||||
9915
logs/aniworld.log
9915
logs/aniworld.log
File diff suppressed because it is too large
Load Diff
18
pytest.ini
18
pytest.ini
@ -1,18 +0,0 @@
|
||||
[tool:pytest]
|
||||
testpaths = src/tests
|
||||
python_files = test_*.py
|
||||
python_classes = Test*
|
||||
python_functions = test_*
|
||||
addopts =
|
||||
-v
|
||||
--tb=short
|
||||
--strict-markers
|
||||
--disable-warnings
|
||||
markers =
|
||||
unit: Unit tests
|
||||
integration: Integration tests
|
||||
e2e: End-to-end tests
|
||||
slow: Slow running tests
|
||||
filterwarnings =
|
||||
ignore::DeprecationWarning
|
||||
ignore::PendingDeprecationWarning
|
||||
BIN
requirements.txt
BIN
requirements.txt
Binary file not shown.
456
src/cli/Main.py
456
src/cli/Main.py
@ -1,229 +1,229 @@
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from ..core.providers import aniworld_provider
|
||||
|
||||
from rich.progress import Progress
|
||||
from ..core.entities import SerieList
|
||||
from ..core.SerieScanner import SerieScanner
|
||||
from ..core.providers.provider_factory import Loaders
|
||||
from ..core.entities.series import Serie
|
||||
import time
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.FATAL, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.ERROR)
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
|
||||
)
|
||||
for h in logging.root.handlers:
|
||||
logging.root.removeHandler(h)
|
||||
|
||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
|
||||
logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
|
||||
logging.getLogger().setLevel(logging.ERROR)
|
||||
for h in logging.getLogger().handlers:
|
||||
logging.getLogger().removeHandler(h)
|
||||
|
||||
|
||||
class NoKeyFoundException(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
class MatchNotFoundError(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
|
||||
|
||||
class SeriesApp:
|
||||
_initialization_count = 0 # Track how many times initialization has been called
|
||||
|
||||
def __init__(self, directory_to_search: str):
|
||||
SeriesApp._initialization_count += 1
|
||||
|
||||
# Only show initialization message for the first instance
|
||||
if SeriesApp._initialization_count <= 1:
|
||||
print("Please wait while initializing...")
|
||||
|
||||
self.progress = None
|
||||
self.directory_to_search = directory_to_search
|
||||
self.Loaders = Loaders()
|
||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
self.SerieScanner = SerieScanner(directory_to_search, loader)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
def __InitList__(self):
|
||||
self.series_list = self.List.GetMissingEpisode()
|
||||
|
||||
|
||||
def display_series(self):
|
||||
"""Print all series with assigned numbers."""
|
||||
print("\nCurrent result:")
|
||||
for i, serie in enumerate(self.series_list, 1):
|
||||
name = serie.name # Access the property on the instance
|
||||
if name is None or str(name).strip() == "":
|
||||
print(f"{i}. {serie.folder}")
|
||||
else:
|
||||
print(f"{i}. {serie.name}")
|
||||
|
||||
def search(self, words :str) -> list:
|
||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
return loader.Search(words)
|
||||
|
||||
def get_user_selection(self):
|
||||
"""Handle user input for selecting series."""
|
||||
self.display_series()
|
||||
while True:
|
||||
selection = input(
|
||||
"\nSelect series by number (e.g. '1', '1,2' or 'all') or type 'exit' to return: ").strip().lower()
|
||||
|
||||
if selection == "exit":
|
||||
return None
|
||||
|
||||
selected_series = []
|
||||
if selection == "all":
|
||||
selected_series = self.series_list
|
||||
else:
|
||||
try:
|
||||
indexes = [int(num) - 1 for num in selection.split(",")]
|
||||
selected_series = [self.series_list[i] for i in indexes if 0 <= i < len(self.series_list)]
|
||||
except ValueError:
|
||||
print("Invalid selection. Going back to the result display.")
|
||||
self.display_series()
|
||||
continue
|
||||
|
||||
if selected_series:
|
||||
return selected_series
|
||||
else:
|
||||
print("No valid series selected. Going back to the result display.")
|
||||
return None
|
||||
|
||||
|
||||
def retry(self, func, max_retries=3, delay=2, *args, **kwargs):
|
||||
for attempt in range(1, max_retries + 1):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
return True
|
||||
except Exception as e:
|
||||
|
||||
print(e)
|
||||
time.sleep(delay)
|
||||
return False
|
||||
|
||||
def download_series(self, series):
|
||||
"""Simulate the downloading process with a progress bar."""
|
||||
total_downloaded = 0
|
||||
total_episodes = sum(sum(len(ep) for ep in serie.episodeDict.values()) for serie in series)
|
||||
self.progress = Progress()
|
||||
task1 = self.progress.add_task("[red]Processing...", total=total_episodes)
|
||||
task2 = self.progress.add_task(f"[green]...", total=0)
|
||||
self.task3 = self.progress.add_task(f"[Gray]...", total=100) # Setze total auf 100 für Prozentanzeige
|
||||
self.progress.start()
|
||||
|
||||
for serie in series:
|
||||
serie_episodes = sum(len(ep) for ep in serie.episodeDict.values())
|
||||
self.progress.update(task2, description=f"[green]{serie.folder}", total=serie_episodes)
|
||||
downloaded = 0
|
||||
for season, episodes in serie.episodeDict.items():
|
||||
for episode in episodes:
|
||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
if loader.IsLanguage(season, episode, serie.key):
|
||||
self.retry(loader.Download, 3, 1, self.directory_to_search, serie.folder, season, episode, serie.key, "German Dub",self.print_Download_Progress)
|
||||
|
||||
downloaded += 1
|
||||
total_downloaded += 1
|
||||
|
||||
self.progress.update(task1, advance=1)
|
||||
self.progress.update(task2, advance=1)
|
||||
time.sleep(0.02)
|
||||
|
||||
self.progress.stop()
|
||||
self.progress = None
|
||||
|
||||
def print_Download_Progress(self, d):
|
||||
# Nutze self.progress und self.task3 für Fortschrittsanzeige
|
||||
if self.progress is None or not hasattr(self, 'task3'):
|
||||
return
|
||||
|
||||
if d['status'] == 'downloading':
|
||||
total = d.get('total_bytes') or d.get('total_bytes_estimate')
|
||||
downloaded = d.get('downloaded_bytes', 0)
|
||||
if total:
|
||||
percent = downloaded / total * 100
|
||||
self.progress.update(self.task3, completed=percent, description=f"[gray]Download: {percent:.1f}%")
|
||||
else:
|
||||
self.progress.update(self.task3, description=f"[gray]{downloaded/1024/1024:.2f}MB geladen")
|
||||
elif d['status'] == 'finished':
|
||||
self.progress.update(self.task3, completed=100, description="[gray]Download abgeschlossen.")
|
||||
|
||||
def search_mode(self):
|
||||
"""Search for a series and allow user to select an option."""
|
||||
search_string = input("Enter search string: ").strip()
|
||||
results = self.search(search_string)
|
||||
|
||||
if not results:
|
||||
print("No results found. Returning to start.")
|
||||
return
|
||||
|
||||
print("\nSearch results:")
|
||||
for i, result in enumerate(results, 1):
|
||||
print(f"{i}. {result.get('name')}")
|
||||
|
||||
while True:
|
||||
selection = input("\nSelect an option by number or type '<enter>' to return: ").strip().lower()
|
||||
|
||||
if selection == "":
|
||||
return
|
||||
|
||||
try:
|
||||
index = int(selection) - 1
|
||||
if 0 <= index < len(results):
|
||||
chosen_name = results[index]
|
||||
self.List.add(Serie(chosen_name["link"], chosen_name["name"], "aniworld.to", chosen_name["link"], {}))
|
||||
return
|
||||
else:
|
||||
print("Invalid selection. Try again.")
|
||||
except ValueError:
|
||||
print("Invalid input. Try again.")
|
||||
|
||||
def updateFromReinit(self, folder, counter):
|
||||
self.progress.update(self.task1, advance=1)
|
||||
|
||||
def run(self):
|
||||
"""Main function to run the app."""
|
||||
while True:
|
||||
action = input("\nChoose action ('s' for search, 'i' for init or 'd' for download): ").strip().lower()
|
||||
|
||||
if action == "s":
|
||||
self.search_mode()
|
||||
if action == "i":
|
||||
|
||||
print("\nRescanning series...\n")
|
||||
|
||||
self.progress = Progress()
|
||||
self.task1 = self.progress.add_task("[red]items processed...", total=300)
|
||||
self.progress.start()
|
||||
|
||||
self.SerieScanner.Reinit()
|
||||
self.SerieScanner.Scan(self.updateFromReinit)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
self.progress.stop()
|
||||
self.progress = None
|
||||
|
||||
elif action == "d":
|
||||
selected_series = self.get_user_selection()
|
||||
if selected_series:
|
||||
self.download_series(selected_series)
|
||||
|
||||
# Run the app
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Read the base directory from an environment variable
|
||||
directory_to_search = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
||||
app = SeriesApp(directory_to_search)
|
||||
import sys
|
||||
import os
|
||||
import logging
|
||||
from ..core.providers import aniworld_provider
|
||||
|
||||
from rich.progress import Progress
|
||||
from ..core.entities import SerieList
|
||||
from ..core.SerieScanner import SerieScanner
|
||||
from ..core.providers.provider_factory import Loaders
|
||||
from ..core.entities.series import Serie
|
||||
import time
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.FATAL, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.ERROR)
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
|
||||
)
|
||||
for h in logging.root.handlers:
|
||||
logging.root.removeHandler(h)
|
||||
|
||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
|
||||
logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
|
||||
logging.getLogger().setLevel(logging.ERROR)
|
||||
for h in logging.getLogger().handlers:
|
||||
logging.getLogger().removeHandler(h)
|
||||
|
||||
|
||||
class NoKeyFoundException(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
class MatchNotFoundError(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
|
||||
|
||||
class SeriesApp:
|
||||
_initialization_count = 0 # Track how many times initialization has been called
|
||||
|
||||
def __init__(self, directory_to_search: str):
|
||||
SeriesApp._initialization_count += 1
|
||||
|
||||
# Only show initialization message for the first instance
|
||||
if SeriesApp._initialization_count <= 1:
|
||||
print("Please wait while initializing...")
|
||||
|
||||
self.progress = None
|
||||
self.directory_to_search = directory_to_search
|
||||
self.Loaders = Loaders()
|
||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
self.SerieScanner = SerieScanner(directory_to_search, loader)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
def __InitList__(self):
|
||||
self.series_list = self.List.GetMissingEpisode()
|
||||
|
||||
|
||||
def display_series(self):
|
||||
"""Print all series with assigned numbers."""
|
||||
print("\nCurrent result:")
|
||||
for i, serie in enumerate(self.series_list, 1):
|
||||
name = serie.name # Access the property on the instance
|
||||
if name is None or str(name).strip() == "":
|
||||
print(f"{i}. {serie.folder}")
|
||||
else:
|
||||
print(f"{i}. {serie.name}")
|
||||
|
||||
def search(self, words :str) -> list:
|
||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
return loader.Search(words)
|
||||
|
||||
def get_user_selection(self):
|
||||
"""Handle user input for selecting series."""
|
||||
self.display_series()
|
||||
while True:
|
||||
selection = input(
|
||||
"\nSelect series by number (e.g. '1', '1,2' or 'all') or type 'exit' to return: ").strip().lower()
|
||||
|
||||
if selection == "exit":
|
||||
return None
|
||||
|
||||
selected_series = []
|
||||
if selection == "all":
|
||||
selected_series = self.series_list
|
||||
else:
|
||||
try:
|
||||
indexes = [int(num) - 1 for num in selection.split(",")]
|
||||
selected_series = [self.series_list[i] for i in indexes if 0 <= i < len(self.series_list)]
|
||||
except ValueError:
|
||||
print("Invalid selection. Going back to the result display.")
|
||||
self.display_series()
|
||||
continue
|
||||
|
||||
if selected_series:
|
||||
return selected_series
|
||||
else:
|
||||
print("No valid series selected. Going back to the result display.")
|
||||
return None
|
||||
|
||||
|
||||
def retry(self, func, max_retries=3, delay=2, *args, **kwargs):
|
||||
for attempt in range(1, max_retries + 1):
|
||||
try:
|
||||
func(*args, **kwargs)
|
||||
return True
|
||||
except Exception as e:
|
||||
|
||||
print(e)
|
||||
time.sleep(delay)
|
||||
return False
|
||||
|
||||
def download_series(self, series):
|
||||
"""Simulate the downloading process with a progress bar."""
|
||||
total_downloaded = 0
|
||||
total_episodes = sum(sum(len(ep) for ep in serie.episodeDict.values()) for serie in series)
|
||||
self.progress = Progress()
|
||||
task1 = self.progress.add_task("[red]Processing...", total=total_episodes)
|
||||
task2 = self.progress.add_task(f"[green]...", total=0)
|
||||
self.task3 = self.progress.add_task(f"[Gray]...", total=100) # Setze total auf 100 für Prozentanzeige
|
||||
self.progress.start()
|
||||
|
||||
for serie in series:
|
||||
serie_episodes = sum(len(ep) for ep in serie.episodeDict.values())
|
||||
self.progress.update(task2, description=f"[green]{serie.folder}", total=serie_episodes)
|
||||
downloaded = 0
|
||||
for season, episodes in serie.episodeDict.items():
|
||||
for episode in episodes:
|
||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
if loader.IsLanguage(season, episode, serie.key):
|
||||
self.retry(loader.Download, 3, 1, self.directory_to_search, serie.folder, season, episode, serie.key, "German Dub",self.print_Download_Progress)
|
||||
|
||||
downloaded += 1
|
||||
total_downloaded += 1
|
||||
|
||||
self.progress.update(task1, advance=1)
|
||||
self.progress.update(task2, advance=1)
|
||||
time.sleep(0.02)
|
||||
|
||||
self.progress.stop()
|
||||
self.progress = None
|
||||
|
||||
def print_Download_Progress(self, d):
|
||||
# Nutze self.progress und self.task3 für Fortschrittsanzeige
|
||||
if self.progress is None or not hasattr(self, 'task3'):
|
||||
return
|
||||
|
||||
if d['status'] == 'downloading':
|
||||
total = d.get('total_bytes') or d.get('total_bytes_estimate')
|
||||
downloaded = d.get('downloaded_bytes', 0)
|
||||
if total:
|
||||
percent = downloaded / total * 100
|
||||
self.progress.update(self.task3, completed=percent, description=f"[gray]Download: {percent:.1f}%")
|
||||
else:
|
||||
self.progress.update(self.task3, description=f"[gray]{downloaded/1024/1024:.2f}MB geladen")
|
||||
elif d['status'] == 'finished':
|
||||
self.progress.update(self.task3, completed=100, description="[gray]Download abgeschlossen.")
|
||||
|
||||
def search_mode(self):
|
||||
"""Search for a series and allow user to select an option."""
|
||||
search_string = input("Enter search string: ").strip()
|
||||
results = self.search(search_string)
|
||||
|
||||
if not results:
|
||||
print("No results found. Returning to start.")
|
||||
return
|
||||
|
||||
print("\nSearch results:")
|
||||
for i, result in enumerate(results, 1):
|
||||
print(f"{i}. {result.get('name')}")
|
||||
|
||||
while True:
|
||||
selection = input("\nSelect an option by number or type '<enter>' to return: ").strip().lower()
|
||||
|
||||
if selection == "":
|
||||
return
|
||||
|
||||
try:
|
||||
index = int(selection) - 1
|
||||
if 0 <= index < len(results):
|
||||
chosen_name = results[index]
|
||||
self.List.add(Serie(chosen_name["link"], chosen_name["name"], "aniworld.to", chosen_name["link"], {}))
|
||||
return
|
||||
else:
|
||||
print("Invalid selection. Try again.")
|
||||
except ValueError:
|
||||
print("Invalid input. Try again.")
|
||||
|
||||
def updateFromReinit(self, folder, counter):
|
||||
self.progress.update(self.task1, advance=1)
|
||||
|
||||
def run(self):
|
||||
"""Main function to run the app."""
|
||||
while True:
|
||||
action = input("\nChoose action ('s' for search, 'i' for init or 'd' for download): ").strip().lower()
|
||||
|
||||
if action == "s":
|
||||
self.search_mode()
|
||||
if action == "i":
|
||||
|
||||
print("\nRescanning series...\n")
|
||||
|
||||
self.progress = Progress()
|
||||
self.task1 = self.progress.add_task("[red]items processed...", total=300)
|
||||
self.progress.start()
|
||||
|
||||
self.SerieScanner.Reinit()
|
||||
self.SerieScanner.Scan(self.updateFromReinit)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
self.progress.stop()
|
||||
self.progress = None
|
||||
|
||||
elif action == "d":
|
||||
selected_series = self.get_user_selection()
|
||||
if selected_series:
|
||||
self.download_series(selected_series)
|
||||
|
||||
# Run the app
|
||||
if __name__ == "__main__":
|
||||
|
||||
# Read the base directory from an environment variable
|
||||
directory_to_search = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
||||
app = SeriesApp(directory_to_search)
|
||||
app.run()
|
||||
@ -1,491 +1,491 @@
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 12:38:30 - WARNING - werkzeug - _log - * Debugger is active!
|
||||
2025-09-29 12:38:40 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin’ in Another World with Level 2 Super Cheat Powers (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data for I’ll Become a Villainess Who Goes Down in History (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data for KonoSuba – An Explosion on This Wonderful World! (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data for The Gorilla God’s Go-To Girl (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data for The Saint’s Magic Power is Omnipotent (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data for World’s End Harem (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping clevatess - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 20:23:16 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 20:23:16 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:16 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:16 - ERROR - root - init_series_app - Error initializing SeriesApp:
|
||||
Traceback (most recent call last):
|
||||
File "D:\repo\Aniworld/src/server/app.py", line 145, in init_series_app
|
||||
series_app = SeriesApp(directory_to_search)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File "D:\repo\Aniworld\src\Main.py", line 54, in __init__
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 9, in __init__
|
||||
self.load_series()
|
||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 29, in load_series
|
||||
for anime_folder in os.listdir(self.directory):
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
FileNotFoundError: [WinError 53] Der Netzwerkpfad wurde nicht gefunden: '\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien'
|
||||
2025-09-29 20:23:16 - WARNING - werkzeug - _log - * Debugger is active!
|
||||
2025-09-29 20:33:06 - DEBUG - schedule - clear - Deleting *all* jobs
|
||||
2025-09-29 20:33:06 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
|
||||
2025-09-29 20:33:06 - INFO - __main__ - <module> - Scheduler stopped
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 12:38:30 - WARNING - werkzeug - _log - * Debugger is active!
|
||||
2025-09-29 12:38:40 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
|
||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
|
||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
|
||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin’ in Another World with Level 2 Super Cheat Powers (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data for I’ll Become a Villainess Who Goes Down in History (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data for KonoSuba – An Explosion on This Wonderful World! (2023)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
|
||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
|
||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
|
||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data for The Gorilla God’s Go-To Girl (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data for The Saint’s Magic Power is Omnipotent (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data for World’s End Harem (2022)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
|
||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
|
||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
|
||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping clevatess - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
|
||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
|
||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
|
||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Log level: INFO
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Scheduled operations disabled
|
||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
||||
2025-09-29 20:23:16 - INFO - __main__ - <module> - Enhanced logging system initialized
|
||||
2025-09-29 20:23:16 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:16 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
||||
2025-09-29 20:23:16 - ERROR - root - init_series_app - Error initializing SeriesApp:
|
||||
Traceback (most recent call last):
|
||||
File "D:\repo\Aniworld/src/server/app.py", line 145, in init_series_app
|
||||
series_app = SeriesApp(directory_to_search)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File "D:\repo\Aniworld\src\Main.py", line 54, in __init__
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 9, in __init__
|
||||
self.load_series()
|
||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 29, in load_series
|
||||
for anime_folder in os.listdir(self.directory):
|
||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
||||
FileNotFoundError: [WinError 53] Der Netzwerkpfad wurde nicht gefunden: '\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien'
|
||||
2025-09-29 20:23:16 - WARNING - werkzeug - _log - * Debugger is active!
|
||||
2025-09-29 20:33:06 - DEBUG - schedule - clear - Deleting *all* jobs
|
||||
2025-09-29 20:33:06 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
|
||||
2025-09-29 20:33:06 - INFO - __main__ - <module> - Scheduler stopped
|
||||
|
||||
30
src/config/settings.py
Normal file
30
src/config/settings.py
Normal file
@ -0,0 +1,30 @@
|
||||
from typing import Optional
|
||||
|
||||
from pydantic import Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings from environment variables."""
|
||||
jwt_secret_key: str = Field(default="your-secret-key-here", env="JWT_SECRET_KEY")
|
||||
password_salt: str = Field(default="default-salt", env="PASSWORD_SALT")
|
||||
master_password_hash: Optional[str] = Field(default=None, env="MASTER_PASSWORD_HASH")
|
||||
master_password: Optional[str] = Field(default=None, env="MASTER_PASSWORD") # For development
|
||||
token_expiry_hours: int = Field(default=24, env="SESSION_TIMEOUT_HOURS")
|
||||
anime_directory: str = Field(default="", env="ANIME_DIRECTORY")
|
||||
log_level: str = Field(default="INFO", env="LOG_LEVEL")
|
||||
|
||||
# Additional settings from .env
|
||||
database_url: str = Field(default="sqlite:///./data/aniworld.db", env="DATABASE_URL")
|
||||
cors_origins: str = Field(default="*", env="CORS_ORIGINS")
|
||||
api_rate_limit: int = Field(default=100, env="API_RATE_LIMIT")
|
||||
default_provider: str = Field(default="aniworld.to", env="DEFAULT_PROVIDER")
|
||||
provider_timeout: int = Field(default=30, env="PROVIDER_TIMEOUT")
|
||||
retry_attempts: int = Field(default=3, env="RETRY_ATTEMPTS")
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
extra = "ignore"
|
||||
|
||||
|
||||
settings = Settings()
|
||||
@ -1,131 +1,131 @@
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from .entities.series import Serie
|
||||
import traceback
|
||||
from ..infrastructure.logging.GlobalLogger import error_logger, noKeyFound_logger
|
||||
from .exceptions.Exceptions import NoKeyFoundException, MatchNotFoundError
|
||||
from .providers.base_provider import Loader
|
||||
|
||||
|
||||
class SerieScanner:
|
||||
def __init__(self, basePath: str, loader: Loader):
|
||||
self.directory = basePath
|
||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
||||
self.loader = loader
|
||||
logging.info(f"Initialized Loader with base path: {self.directory}")
|
||||
|
||||
def Reinit(self):
|
||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
||||
|
||||
|
||||
def is_null_or_whitespace(self, s):
|
||||
return s is None or s.strip() == ""
|
||||
|
||||
def GetTotalToScan(self):
|
||||
result = self.__find_mp4_files()
|
||||
return sum(1 for _ in result)
|
||||
|
||||
def Scan(self, callback):
|
||||
logging.info("Starting process to load missing episodes")
|
||||
result = self.__find_mp4_files()
|
||||
counter = 0
|
||||
for folder, mp4_files in result:
|
||||
try:
|
||||
counter += 1
|
||||
callback(folder, counter)
|
||||
serie = self.__ReadDataFromFile(folder)
|
||||
if (serie != None and not self.is_null_or_whitespace(serie.key)):
|
||||
missings, site = self.__GetMissingEpisodesAndSeason(serie.key, mp4_files)
|
||||
serie.episodeDict = missings
|
||||
serie.folder = folder
|
||||
serie.save_to_file(os.path.join(os.path.join(self.directory, folder), 'data'))
|
||||
if (serie.key in self.folderDict):
|
||||
logging.ERROR(f"dublication found: {serie.key}");
|
||||
pass
|
||||
self.folderDict[serie.key] = serie
|
||||
noKeyFound_logger.info(f"Saved Serie: '{str(serie)}'")
|
||||
except NoKeyFoundException as nkfe:
|
||||
NoKeyFoundException.error(f"Error processing folder '{folder}': {nkfe}")
|
||||
except Exception as e:
|
||||
error_logger.error(f"Folder: '{folder}' - Unexpected error processing folder '{folder}': {e} \n {traceback.format_exc()}")
|
||||
continue
|
||||
|
||||
|
||||
def __find_mp4_files(self):
|
||||
logging.info("Scanning for .mp4 files")
|
||||
for anime_name in os.listdir(self.directory):
|
||||
anime_path = os.path.join(self.directory, anime_name)
|
||||
if os.path.isdir(anime_path):
|
||||
mp4_files = []
|
||||
has_files = False
|
||||
for root, _, files in os.walk(anime_path):
|
||||
for file in files:
|
||||
if file.endswith(".mp4"):
|
||||
mp4_files.append(os.path.join(root, file))
|
||||
has_files = True
|
||||
yield anime_name, mp4_files if has_files else []
|
||||
|
||||
def __remove_year(self, input_string: str):
|
||||
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
|
||||
logging.debug(f"Removed year from '{input_string}' -> '{cleaned_string}'")
|
||||
return cleaned_string
|
||||
|
||||
def __ReadDataFromFile(self, folder_name: str):
|
||||
folder_path = os.path.join(self.directory, folder_name)
|
||||
key = None
|
||||
key_file = os.path.join(folder_path, 'key')
|
||||
serie_file = os.path.join(folder_path, 'data')
|
||||
|
||||
if os.path.exists(key_file):
|
||||
with open(key_file, 'r') as file:
|
||||
key = file.read().strip()
|
||||
logging.info(f"Key found for folder '{folder_name}': {key}")
|
||||
return Serie(key, "", "aniworld.to", folder_name, dict())
|
||||
|
||||
if os.path.exists(serie_file):
|
||||
with open(serie_file, "rb") as file:
|
||||
logging.info(f"load serie_file from '{folder_name}': {serie_file}")
|
||||
return Serie.load_from_file(serie_file)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def __GetEpisodeAndSeason(self, filename: str):
|
||||
pattern = r'S(\d+)E(\d+)'
|
||||
match = re.search(pattern, filename)
|
||||
if match:
|
||||
season = match.group(1)
|
||||
episode = match.group(2)
|
||||
logging.debug(f"Extracted season {season}, episode {episode} from '{filename}'")
|
||||
return int(season), int(episode)
|
||||
else:
|
||||
logging.error(f"Failed to find season/episode pattern in '{filename}'")
|
||||
raise MatchNotFoundError("Season and episode pattern not found in the filename.")
|
||||
|
||||
def __GetEpisodesAndSeasons(self, mp4_files: []):
|
||||
episodes_dict = {}
|
||||
|
||||
for file in mp4_files:
|
||||
season, episode = self.__GetEpisodeAndSeason(file)
|
||||
|
||||
if season in episodes_dict:
|
||||
episodes_dict[season].append(episode)
|
||||
else:
|
||||
episodes_dict[season] = [episode]
|
||||
return episodes_dict
|
||||
|
||||
def __GetMissingEpisodesAndSeason(self, key: str, mp4_files: []):
|
||||
expected_dict = self.loader.get_season_episode_count(key) # key season , value count of episodes
|
||||
filedict = self.__GetEpisodesAndSeasons(mp4_files)
|
||||
episodes_dict = {}
|
||||
for season, expected_count in expected_dict.items():
|
||||
existing_episodes = filedict.get(season, [])
|
||||
missing_episodes = [ep for ep in range(1, expected_count + 1) if ep not in existing_episodes and self.loader.IsLanguage(season, ep, key)]
|
||||
|
||||
if missing_episodes:
|
||||
episodes_dict[season] = missing_episodes
|
||||
|
||||
return episodes_dict, "aniworld.to"
|
||||
|
||||
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
from .entities.series import Serie
|
||||
import traceback
|
||||
from ..infrastructure.logging.GlobalLogger import error_logger, noKeyFound_logger
|
||||
from .exceptions.Exceptions import NoKeyFoundException, MatchNotFoundError
|
||||
from .providers.base_provider import Loader
|
||||
|
||||
|
||||
class SerieScanner:
|
||||
def __init__(self, basePath: str, loader: Loader):
|
||||
self.directory = basePath
|
||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
||||
self.loader = loader
|
||||
logging.info(f"Initialized Loader with base path: {self.directory}")
|
||||
|
||||
def Reinit(self):
|
||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
||||
|
||||
|
||||
def is_null_or_whitespace(self, s):
|
||||
return s is None or s.strip() == ""
|
||||
|
||||
def GetTotalToScan(self):
|
||||
result = self.__find_mp4_files()
|
||||
return sum(1 for _ in result)
|
||||
|
||||
def Scan(self, callback):
|
||||
logging.info("Starting process to load missing episodes")
|
||||
result = self.__find_mp4_files()
|
||||
counter = 0
|
||||
for folder, mp4_files in result:
|
||||
try:
|
||||
counter += 1
|
||||
callback(folder, counter)
|
||||
serie = self.__ReadDataFromFile(folder)
|
||||
if (serie != None and not self.is_null_or_whitespace(serie.key)):
|
||||
missings, site = self.__GetMissingEpisodesAndSeason(serie.key, mp4_files)
|
||||
serie.episodeDict = missings
|
||||
serie.folder = folder
|
||||
serie.save_to_file(os.path.join(os.path.join(self.directory, folder), 'data'))
|
||||
if (serie.key in self.folderDict):
|
||||
logging.ERROR(f"dublication found: {serie.key}");
|
||||
pass
|
||||
self.folderDict[serie.key] = serie
|
||||
noKeyFound_logger.info(f"Saved Serie: '{str(serie)}'")
|
||||
except NoKeyFoundException as nkfe:
|
||||
NoKeyFoundException.error(f"Error processing folder '{folder}': {nkfe}")
|
||||
except Exception as e:
|
||||
error_logger.error(f"Folder: '{folder}' - Unexpected error processing folder '{folder}': {e} \n {traceback.format_exc()}")
|
||||
continue
|
||||
|
||||
|
||||
def __find_mp4_files(self):
|
||||
logging.info("Scanning for .mp4 files")
|
||||
for anime_name in os.listdir(self.directory):
|
||||
anime_path = os.path.join(self.directory, anime_name)
|
||||
if os.path.isdir(anime_path):
|
||||
mp4_files = []
|
||||
has_files = False
|
||||
for root, _, files in os.walk(anime_path):
|
||||
for file in files:
|
||||
if file.endswith(".mp4"):
|
||||
mp4_files.append(os.path.join(root, file))
|
||||
has_files = True
|
||||
yield anime_name, mp4_files if has_files else []
|
||||
|
||||
def __remove_year(self, input_string: str):
|
||||
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
|
||||
logging.debug(f"Removed year from '{input_string}' -> '{cleaned_string}'")
|
||||
return cleaned_string
|
||||
|
||||
def __ReadDataFromFile(self, folder_name: str):
|
||||
folder_path = os.path.join(self.directory, folder_name)
|
||||
key = None
|
||||
key_file = os.path.join(folder_path, 'key')
|
||||
serie_file = os.path.join(folder_path, 'data')
|
||||
|
||||
if os.path.exists(key_file):
|
||||
with open(key_file, 'r') as file:
|
||||
key = file.read().strip()
|
||||
logging.info(f"Key found for folder '{folder_name}': {key}")
|
||||
return Serie(key, "", "aniworld.to", folder_name, dict())
|
||||
|
||||
if os.path.exists(serie_file):
|
||||
with open(serie_file, "rb") as file:
|
||||
logging.info(f"load serie_file from '{folder_name}': {serie_file}")
|
||||
return Serie.load_from_file(serie_file)
|
||||
|
||||
return None
|
||||
|
||||
|
||||
def __GetEpisodeAndSeason(self, filename: str):
|
||||
pattern = r'S(\d+)E(\d+)'
|
||||
match = re.search(pattern, filename)
|
||||
if match:
|
||||
season = match.group(1)
|
||||
episode = match.group(2)
|
||||
logging.debug(f"Extracted season {season}, episode {episode} from '{filename}'")
|
||||
return int(season), int(episode)
|
||||
else:
|
||||
logging.error(f"Failed to find season/episode pattern in '{filename}'")
|
||||
raise MatchNotFoundError("Season and episode pattern not found in the filename.")
|
||||
|
||||
def __GetEpisodesAndSeasons(self, mp4_files: []):
|
||||
episodes_dict = {}
|
||||
|
||||
for file in mp4_files:
|
||||
season, episode = self.__GetEpisodeAndSeason(file)
|
||||
|
||||
if season in episodes_dict:
|
||||
episodes_dict[season].append(episode)
|
||||
else:
|
||||
episodes_dict[season] = [episode]
|
||||
return episodes_dict
|
||||
|
||||
def __GetMissingEpisodesAndSeason(self, key: str, mp4_files: []):
|
||||
expected_dict = self.loader.get_season_episode_count(key) # key season , value count of episodes
|
||||
filedict = self.__GetEpisodesAndSeasons(mp4_files)
|
||||
episodes_dict = {}
|
||||
for season, expected_count in expected_dict.items():
|
||||
existing_episodes = filedict.get(season, [])
|
||||
missing_episodes = [ep for ep in range(1, expected_count + 1) if ep not in existing_episodes and self.loader.IsLanguage(season, ep, key)]
|
||||
|
||||
if missing_episodes:
|
||||
episodes_dict[season] = missing_episodes
|
||||
|
||||
return episodes_dict, "aniworld.to"
|
||||
|
||||
|
||||
|
||||
@ -1,38 +1,38 @@
|
||||
from src.core.entities.SerieList import SerieList
|
||||
from src.core.providers.provider_factory import Loaders
|
||||
from src.core.SerieScanner import SerieScanner
|
||||
|
||||
|
||||
class SeriesApp:
|
||||
_initialization_count = 0
|
||||
|
||||
def __init__(self, directory_to_search: str):
|
||||
SeriesApp._initialization_count += 1 # Only show initialization message for the first instance
|
||||
if SeriesApp._initialization_count <= 1:
|
||||
print("Please wait while initializing...")
|
||||
|
||||
self.progress = None
|
||||
self.directory_to_search = directory_to_search
|
||||
self.Loaders = Loaders()
|
||||
self.loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
self.SerieScanner = SerieScanner(directory_to_search, self.loader)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
def __InitList__(self):
|
||||
self.series_list = self.List.GetMissingEpisode()
|
||||
|
||||
def search(self, words: str) -> list:
|
||||
return self.loader.Search(words)
|
||||
|
||||
def download(self, serieFolder: str, season: int, episode: int, key: str, callback) -> bool:
|
||||
self.loader.Download(self.directory_to_search, serieFolder, season, episode, key, "German Dub", callback)
|
||||
|
||||
def ReScan(self, callback):
|
||||
|
||||
self.SerieScanner.Reinit()
|
||||
self.SerieScanner.Scan(callback)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
from src.core.entities.SerieList import SerieList
|
||||
from src.core.providers.provider_factory import Loaders
|
||||
from src.core.SerieScanner import SerieScanner
|
||||
|
||||
|
||||
class SeriesApp:
|
||||
_initialization_count = 0
|
||||
|
||||
def __init__(self, directory_to_search: str):
|
||||
SeriesApp._initialization_count += 1 # Only show initialization message for the first instance
|
||||
if SeriesApp._initialization_count <= 1:
|
||||
print("Please wait while initializing...")
|
||||
|
||||
self.progress = None
|
||||
self.directory_to_search = directory_to_search
|
||||
self.Loaders = Loaders()
|
||||
self.loader = self.Loaders.GetLoader(key="aniworld.to")
|
||||
self.SerieScanner = SerieScanner(directory_to_search, self.loader)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
def __InitList__(self):
|
||||
self.series_list = self.List.GetMissingEpisode()
|
||||
|
||||
def search(self, words: str) -> list:
|
||||
return self.loader.Search(words)
|
||||
|
||||
def download(self, serieFolder: str, season: int, episode: int, key: str, callback) -> bool:
|
||||
self.loader.Download(self.directory_to_search, serieFolder, season, episode, key, "German Dub", callback)
|
||||
|
||||
def ReScan(self, callback):
|
||||
|
||||
self.SerieScanner.Reinit()
|
||||
self.SerieScanner.Scan(callback)
|
||||
|
||||
self.List = SerieList(self.directory_to_search)
|
||||
self.__InitList__()
|
||||
|
||||
@ -1,12 +1,12 @@
|
||||
"""
|
||||
Core module for AniWorld application.
|
||||
Contains domain entities, interfaces, application services, and exceptions.
|
||||
"""
|
||||
|
||||
from . import entities
|
||||
from . import exceptions
|
||||
from . import interfaces
|
||||
from . import application
|
||||
from . import providers
|
||||
|
||||
"""
|
||||
Core module for AniWorld application.
|
||||
Contains domain entities, interfaces, application services, and exceptions.
|
||||
"""
|
||||
|
||||
from . import entities
|
||||
from . import exceptions
|
||||
from . import interfaces
|
||||
from . import application
|
||||
from . import providers
|
||||
|
||||
__all__ = ['entities', 'exceptions', 'interfaces', 'application', 'providers']
|
||||
@ -1,10 +0,0 @@
|
||||
"""
|
||||
Configuration package for the Aniworld server.
|
||||
|
||||
This package provides configuration management and environment
|
||||
variable handling for secure application deployment.
|
||||
"""
|
||||
|
||||
from .env_config import EnvironmentConfig, env_config
|
||||
|
||||
__all__ = ['EnvironmentConfig', 'env_config']
|
||||
@ -1,217 +0,0 @@
|
||||
"""
|
||||
Environment configuration for secure handling of sensitive data.
|
||||
|
||||
This module provides secure environment variable handling and configuration
|
||||
management for the Aniworld server application.
|
||||
"""
|
||||
|
||||
import os
|
||||
import secrets
|
||||
from typing import Optional, Dict, Any
|
||||
from dotenv import load_dotenv
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Load environment variables from .env file
|
||||
load_dotenv()
|
||||
|
||||
|
||||
class EnvironmentConfig:
|
||||
"""Manages environment variables and secure configuration."""
|
||||
|
||||
# Security
|
||||
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
|
||||
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
|
||||
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
|
||||
|
||||
# Database
|
||||
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
|
||||
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
|
||||
|
||||
# Redis (for caching and sessions)
|
||||
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
|
||||
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
|
||||
|
||||
# API Keys and External Services
|
||||
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
|
||||
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
|
||||
|
||||
# Email Configuration (for password reset)
|
||||
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
|
||||
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
|
||||
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
|
||||
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
|
||||
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
|
||||
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
|
||||
|
||||
# Security Settings
|
||||
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
|
||||
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
|
||||
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
|
||||
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
|
||||
|
||||
# Application Settings
|
||||
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
|
||||
HOST: str = os.getenv('HOST', '127.0.0.1')
|
||||
PORT: int = int(os.getenv('PORT', '5000'))
|
||||
|
||||
# Anime Directory and Download Settings
|
||||
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
|
||||
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
|
||||
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
|
||||
LOG_FILE: str = os.getenv('LOG_FILE', './logs/aniworld.log')
|
||||
|
||||
@classmethod
|
||||
def get_database_config(cls) -> Dict[str, Any]:
|
||||
"""Get database configuration."""
|
||||
return {
|
||||
'url': cls.DATABASE_URL,
|
||||
'password': cls.DATABASE_PASSWORD,
|
||||
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
|
||||
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
|
||||
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
|
||||
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_redis_config(cls) -> Dict[str, Any]:
|
||||
"""Get Redis configuration."""
|
||||
return {
|
||||
'url': cls.REDIS_URL,
|
||||
'password': cls.REDIS_PASSWORD,
|
||||
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
|
||||
'retry_on_timeout': True,
|
||||
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_email_config(cls) -> Dict[str, Any]:
|
||||
"""Get email configuration."""
|
||||
return {
|
||||
'server': cls.SMTP_SERVER,
|
||||
'port': cls.SMTP_PORT,
|
||||
'username': cls.SMTP_USERNAME,
|
||||
'password': cls.SMTP_PASSWORD,
|
||||
'use_tls': cls.SMTP_USE_TLS,
|
||||
'from_email': cls.FROM_EMAIL
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def get_security_config(cls) -> Dict[str, Any]:
|
||||
"""Get security configuration."""
|
||||
return {
|
||||
'secret_key': cls.SECRET_KEY,
|
||||
'jwt_secret_key': cls.JWT_SECRET_KEY,
|
||||
'password_salt': cls.PASSWORD_SALT,
|
||||
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
|
||||
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
|
||||
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
|
||||
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
|
||||
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def validate_config(cls) -> bool:
|
||||
"""Validate that required configuration is present."""
|
||||
required_vars = [
|
||||
'SECRET_KEY',
|
||||
'JWT_SECRET_KEY',
|
||||
'PASSWORD_SALT'
|
||||
]
|
||||
|
||||
missing_vars = []
|
||||
for var in required_vars:
|
||||
if not getattr(cls, var):
|
||||
missing_vars.append(var)
|
||||
|
||||
if missing_vars:
|
||||
logger.error(f"Missing required environment variables: {missing_vars}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
@classmethod
|
||||
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
|
||||
"""Generate a template .env file with all available configuration options."""
|
||||
try:
|
||||
template_content = """# Aniworld Server Environment Configuration
|
||||
# Copy this file to .env and fill in your values
|
||||
|
||||
# Security (REQUIRED - Generate secure random values)
|
||||
SECRET_KEY=your_secret_key_here
|
||||
JWT_SECRET_KEY=your_jwt_secret_here
|
||||
PASSWORD_SALT=your_password_salt_here
|
||||
|
||||
# Database Configuration
|
||||
DATABASE_URL=sqlite:///data/aniworld.db
|
||||
# DATABASE_PASSWORD=your_db_password_here
|
||||
DATABASE_POOL_SIZE=10
|
||||
DATABASE_MAX_OVERFLOW=20
|
||||
DATABASE_POOL_TIMEOUT=30
|
||||
DATABASE_POOL_RECYCLE=3600
|
||||
|
||||
# Redis Configuration (for caching and sessions)
|
||||
REDIS_URL=redis://localhost:6379/0
|
||||
# REDIS_PASSWORD=your_redis_password_here
|
||||
REDIS_MAX_CONNECTIONS=10
|
||||
REDIS_SOCKET_TIMEOUT=5
|
||||
|
||||
# Email Configuration (for password reset emails)
|
||||
SMTP_SERVER=localhost
|
||||
SMTP_PORT=587
|
||||
# SMTP_USERNAME=your_smtp_username
|
||||
# SMTP_PASSWORD=your_smtp_password
|
||||
SMTP_USE_TLS=true
|
||||
FROM_EMAIL=noreply@aniworld.local
|
||||
|
||||
# External API Keys
|
||||
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
|
||||
# TMDB_API_KEY=your_tmdb_api_key
|
||||
|
||||
# Security Settings
|
||||
SESSION_TIMEOUT_HOURS=24
|
||||
MAX_FAILED_LOGIN_ATTEMPTS=5
|
||||
LOCKOUT_DURATION_MINUTES=30
|
||||
|
||||
# Rate Limiting
|
||||
RATE_LIMIT_PER_MINUTE=60
|
||||
API_RATE_LIMIT_PER_MINUTE=100
|
||||
|
||||
# Application Settings
|
||||
DEBUG=false
|
||||
HOST=127.0.0.1
|
||||
PORT=5000
|
||||
|
||||
# Anime and Download Settings
|
||||
ANIME_DIRECTORY=./downloads
|
||||
MAX_CONCURRENT_DOWNLOADS=3
|
||||
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
|
||||
|
||||
# Logging
|
||||
LOG_LEVEL=INFO
|
||||
LOG_FILE=./logs/aniworld.log
|
||||
"""
|
||||
|
||||
with open(file_path, 'w', encoding='utf-8') as f:
|
||||
f.write(template_content)
|
||||
|
||||
logger.info(f"Environment template created at {file_path}")
|
||||
return True
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating environment template: {e}")
|
||||
return False
|
||||
|
||||
|
||||
# Create global instance
|
||||
env_config = EnvironmentConfig()
|
||||
|
||||
# Validate configuration on import
|
||||
if not env_config.validate_config():
|
||||
logger.warning("Invalid environment configuration detected. Please check your .env file.")
|
||||
@ -1,56 +1,56 @@
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from .series import Serie
|
||||
class SerieList:
|
||||
def __init__(self, basePath: str):
|
||||
self.directory = basePath
|
||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
||||
self.load_series()
|
||||
|
||||
def add(self, serie: Serie):
|
||||
if (not self.contains(serie.key)):
|
||||
dataPath = os.path.join(self.directory, serie.folder, "data")
|
||||
animePath = os.path.join(self.directory, serie.folder)
|
||||
os.makedirs(animePath, exist_ok=True)
|
||||
if not os.path.isfile(dataPath):
|
||||
serie.save_to_file(dataPath)
|
||||
self.folderDict[serie.folder] = serie;
|
||||
|
||||
def contains(self, key: str) -> bool:
|
||||
for k, value in self.folderDict.items():
|
||||
if value.key == key:
|
||||
return True
|
||||
return False
|
||||
|
||||
def load_series(self):
|
||||
""" Scan folders and load data files """
|
||||
logging.info(f"Scanning anime folders in: {self.directory}")
|
||||
for anime_folder in os.listdir(self.directory):
|
||||
anime_path = os.path.join(self.directory, anime_folder, "data")
|
||||
if os.path.isfile(anime_path):
|
||||
logging.debug(f"Found data folder: {anime_path}")
|
||||
self.load_data(anime_folder, anime_path)
|
||||
else:
|
||||
logging.warning(f"Skipping {anime_folder} - No data folder found")
|
||||
|
||||
def load_data(self, anime_folder, data_path):
|
||||
""" Load pickle files from the data folder """
|
||||
try:
|
||||
self.folderDict[anime_folder] = Serie.load_from_file(data_path)
|
||||
logging.debug(f"Successfully loaded {data_path} for {anime_folder}")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to load {data_path} in {anime_folder}: {e}")
|
||||
|
||||
def GetMissingEpisode(self):
|
||||
"""Find all series with a non-empty episodeDict"""
|
||||
return [serie for serie in self.folderDict.values() if len(serie.episodeDict) > 0]
|
||||
|
||||
def GetList(self):
|
||||
"""Get all series in the list"""
|
||||
return list(self.folderDict.values())
|
||||
|
||||
|
||||
#k = AnimeList("\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
||||
#bbabab = k.GetMissingEpisode()
|
||||
import os
|
||||
import json
|
||||
import logging
|
||||
from .series import Serie
|
||||
class SerieList:
|
||||
def __init__(self, basePath: str):
|
||||
self.directory = basePath
|
||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
||||
self.load_series()
|
||||
|
||||
def add(self, serie: Serie):
|
||||
if (not self.contains(serie.key)):
|
||||
dataPath = os.path.join(self.directory, serie.folder, "data")
|
||||
animePath = os.path.join(self.directory, serie.folder)
|
||||
os.makedirs(animePath, exist_ok=True)
|
||||
if not os.path.isfile(dataPath):
|
||||
serie.save_to_file(dataPath)
|
||||
self.folderDict[serie.folder] = serie;
|
||||
|
||||
def contains(self, key: str) -> bool:
|
||||
for k, value in self.folderDict.items():
|
||||
if value.key == key:
|
||||
return True
|
||||
return False
|
||||
|
||||
def load_series(self):
|
||||
""" Scan folders and load data files """
|
||||
logging.info(f"Scanning anime folders in: {self.directory}")
|
||||
for anime_folder in os.listdir(self.directory):
|
||||
anime_path = os.path.join(self.directory, anime_folder, "data")
|
||||
if os.path.isfile(anime_path):
|
||||
logging.debug(f"Found data folder: {anime_path}")
|
||||
self.load_data(anime_folder, anime_path)
|
||||
else:
|
||||
logging.warning(f"Skipping {anime_folder} - No data folder found")
|
||||
|
||||
def load_data(self, anime_folder, data_path):
|
||||
""" Load pickle files from the data folder """
|
||||
try:
|
||||
self.folderDict[anime_folder] = Serie.load_from_file(data_path)
|
||||
logging.debug(f"Successfully loaded {data_path} for {anime_folder}")
|
||||
except Exception as e:
|
||||
logging.error(f"Failed to load {data_path} in {anime_folder}: {e}")
|
||||
|
||||
def GetMissingEpisode(self):
|
||||
"""Find all series with a non-empty episodeDict"""
|
||||
return [serie for serie in self.folderDict.values() if len(serie.episodeDict) > 0]
|
||||
|
||||
def GetList(self):
|
||||
"""Get all series in the list"""
|
||||
return list(self.folderDict.values())
|
||||
|
||||
|
||||
#k = AnimeList("\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
||||
#bbabab = k.GetMissingEpisode()
|
||||
#print(bbabab)
|
||||
@ -1,82 +1,82 @@
|
||||
import json
|
||||
|
||||
class Serie:
|
||||
def __init__(self, key: str, name: str, site: str, folder: str, episodeDict: dict[int, list[int]]):
|
||||
self._key = key
|
||||
self._name = name
|
||||
self._site = site
|
||||
self._folder = folder
|
||||
self._episodeDict = episodeDict
|
||||
def __str__(self):
|
||||
"""String representation of Serie object"""
|
||||
return f"Serie(key='{self.key}', name='{self.name}', site='{self.site}', folder='{self.folder}', episodeDict={self.episodeDict})"
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@key.setter
|
||||
def key(self, value: str):
|
||||
self._key = value
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str):
|
||||
self._name = value
|
||||
|
||||
@property
|
||||
def site(self) -> str:
|
||||
return self._site
|
||||
|
||||
@site.setter
|
||||
def site(self, value: str):
|
||||
self._site = value
|
||||
|
||||
@property
|
||||
def folder(self) -> str:
|
||||
return self._folder
|
||||
|
||||
@folder.setter
|
||||
def folder(self, value: str):
|
||||
self._folder = value
|
||||
|
||||
@property
|
||||
def episodeDict(self) -> dict[int, list[int]]:
|
||||
return self._episodeDict
|
||||
|
||||
@episodeDict.setter
|
||||
def episodeDict(self, value: dict[int, list[int]]):
|
||||
self._episodeDict = value
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert Serie object to dictionary for JSON serialization."""
|
||||
return {
|
||||
"key": self.key,
|
||||
"name": self.name,
|
||||
"site": self.site,
|
||||
"folder": self.folder,
|
||||
"episodeDict": {str(k): list(v) for k, v in self.episodeDict.items()}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def from_dict(data: dict):
|
||||
"""Create a Serie object from dictionary."""
|
||||
episode_dict = {int(k): v for k, v in data["episodeDict"].items()} # Convert keys to int
|
||||
return Serie(data["key"], data["name"], data["site"], data["folder"], episode_dict)
|
||||
|
||||
|
||||
def save_to_file(self, filename: str):
|
||||
"""Save Serie object to JSON file."""
|
||||
with open(filename, "w") as file:
|
||||
json.dump(self.to_dict(), file, indent=4)
|
||||
|
||||
|
||||
@classmethod
|
||||
def load_from_file(cls, filename: str) -> "Serie":
|
||||
"""Load Serie object from JSON file."""
|
||||
with open(filename, "r") as file:
|
||||
data = json.load(file)
|
||||
import json
|
||||
|
||||
class Serie:
|
||||
def __init__(self, key: str, name: str, site: str, folder: str, episodeDict: dict[int, list[int]]):
|
||||
self._key = key
|
||||
self._name = name
|
||||
self._site = site
|
||||
self._folder = folder
|
||||
self._episodeDict = episodeDict
|
||||
def __str__(self):
|
||||
"""String representation of Serie object"""
|
||||
return f"Serie(key='{self.key}', name='{self.name}', site='{self.site}', folder='{self.folder}', episodeDict={self.episodeDict})"
|
||||
|
||||
@property
|
||||
def key(self) -> str:
|
||||
return self._key
|
||||
|
||||
@key.setter
|
||||
def key(self, value: str):
|
||||
self._key = value
|
||||
|
||||
@property
|
||||
def name(self) -> str:
|
||||
return self._name
|
||||
|
||||
@name.setter
|
||||
def name(self, value: str):
|
||||
self._name = value
|
||||
|
||||
@property
|
||||
def site(self) -> str:
|
||||
return self._site
|
||||
|
||||
@site.setter
|
||||
def site(self, value: str):
|
||||
self._site = value
|
||||
|
||||
@property
|
||||
def folder(self) -> str:
|
||||
return self._folder
|
||||
|
||||
@folder.setter
|
||||
def folder(self, value: str):
|
||||
self._folder = value
|
||||
|
||||
@property
|
||||
def episodeDict(self) -> dict[int, list[int]]:
|
||||
return self._episodeDict
|
||||
|
||||
@episodeDict.setter
|
||||
def episodeDict(self, value: dict[int, list[int]]):
|
||||
self._episodeDict = value
|
||||
|
||||
def to_dict(self):
|
||||
"""Convert Serie object to dictionary for JSON serialization."""
|
||||
return {
|
||||
"key": self.key,
|
||||
"name": self.name,
|
||||
"site": self.site,
|
||||
"folder": self.folder,
|
||||
"episodeDict": {str(k): list(v) for k, v in self.episodeDict.items()}
|
||||
}
|
||||
|
||||
@staticmethod
|
||||
def from_dict(data: dict):
|
||||
"""Create a Serie object from dictionary."""
|
||||
episode_dict = {int(k): v for k, v in data["episodeDict"].items()} # Convert keys to int
|
||||
return Serie(data["key"], data["name"], data["site"], data["folder"], episode_dict)
|
||||
|
||||
|
||||
def save_to_file(self, filename: str):
|
||||
"""Save Serie object to JSON file."""
|
||||
with open(filename, "w") as file:
|
||||
json.dump(self.to_dict(), file, indent=4)
|
||||
|
||||
|
||||
@classmethod
|
||||
def load_from_file(cls, filename: str) -> "Serie":
|
||||
"""Load Serie object from JSON file."""
|
||||
with open(filename, "r") as file:
|
||||
data = json.load(file)
|
||||
return cls.from_dict(data)
|
||||
@ -1,7 +1,7 @@
|
||||
|
||||
class NoKeyFoundException(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
class MatchNotFoundError(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
|
||||
class NoKeyFoundException(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
class MatchNotFoundError(Exception):
|
||||
"""Exception raised when an anime key cannot be found."""
|
||||
pass
|
||||
@ -1,11 +1,11 @@
|
||||
|
||||
from ..providers.streaming.Provider import Provider
|
||||
from ..providers.streaming.voe import VOE
|
||||
|
||||
class Providers:
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {"VOE": VOE()}
|
||||
|
||||
def GetProvider(self, key: str) -> Provider:
|
||||
return self.dict[key]
|
||||
|
||||
from ..providers.streaming.Provider import Provider
|
||||
from ..providers.streaming.voe import VOE
|
||||
|
||||
class Providers:
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {"VOE": VOE()}
|
||||
|
||||
def GetProvider(self, key: str) -> Provider:
|
||||
return self.dict[key]
|
||||
|
||||
@ -1,343 +1,343 @@
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import json
|
||||
import requests
|
||||
import html
|
||||
from urllib.parse import quote
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from .base_provider import Loader
|
||||
from ..interfaces.providers import Providers
|
||||
from yt_dlp import YoutubeDL
|
||||
import shutil
|
||||
|
||||
# Read timeout from environment variable, default to 600 seconds (10 minutes)
|
||||
timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
|
||||
|
||||
download_error_logger = logging.getLogger("DownloadErrors")
|
||||
download_error_handler = logging.FileHandler("../../download_errors.log")
|
||||
download_error_handler.setLevel(logging.ERROR)
|
||||
|
||||
noKeyFound_logger = logging.getLogger("NoKeyFound")
|
||||
noKeyFound_handler = logging.FileHandler("../../NoKeyFound.log")
|
||||
noKeyFound_handler.setLevel(logging.ERROR)
|
||||
|
||||
class AniworldLoader(Loader):
|
||||
def __init__(self):
|
||||
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
|
||||
self.AniworldHeaders = {
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
|
||||
"accept-encoding": "gzip, deflate, br, zstd",
|
||||
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
|
||||
"cache-control": "max-age=0",
|
||||
"priority": "u=0, i",
|
||||
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"Windows"',
|
||||
"sec-fetch-dest": "document",
|
||||
"sec-fetch-mode": "navigate",
|
||||
"sec-fetch-site": "none",
|
||||
"sec-fetch-user": "?1",
|
||||
"upgrade-insecure-requests": "1",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||
}
|
||||
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
|
||||
self.RANDOM_USER_AGENT = UserAgent().random
|
||||
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
|
||||
self.PROVIDER_HEADERS = {
|
||||
"Vidmoly": ['Referer: "https://vidmoly.to"'],
|
||||
"Doodstream": ['Referer: "https://dood.li/"'],
|
||||
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
|
||||
"Luluvdo": [
|
||||
f'User-Agent: {self.LULUVDO_USER_AGENT}',
|
||||
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||
'Origin: "https://luluvdo.com"',
|
||||
'Referer: "https://luluvdo.com/"'
|
||||
]}
|
||||
self.ANIWORLD_TO = "https://aniworld.to"
|
||||
self.session = requests.Session()
|
||||
|
||||
# Configure retries with backoff
|
||||
retries = Retry(
|
||||
total=5, # Number of retries
|
||||
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
||||
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
|
||||
allowed_methods=["GET"]
|
||||
)
|
||||
|
||||
adapter = HTTPAdapter(max_retries=retries)
|
||||
self.session.mount("https://", adapter)
|
||||
self.DEFAULT_REQUEST_TIMEOUT = 30
|
||||
|
||||
self._KeyHTMLDict = {}
|
||||
self._EpisodeHTMLDict = {}
|
||||
self.Providers = Providers()
|
||||
|
||||
def ClearCache(self):
|
||||
self._KeyHTMLDict = {}
|
||||
self._EpisodeHTMLDict = {}
|
||||
|
||||
def RemoveFromCache(self):
|
||||
self._EpisodeHTMLDict = {}
|
||||
|
||||
def Search(self, word: str) -> list:
|
||||
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
||||
anime_list = self.fetch_anime_list(search_url)
|
||||
|
||||
return anime_list
|
||||
|
||||
|
||||
def fetch_anime_list(self, url: str) -> list:
|
||||
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
|
||||
clean_text = response.text.strip()
|
||||
|
||||
try:
|
||||
decoded_data = json.loads(html.unescape(clean_text))
|
||||
return decoded_data if isinstance(decoded_data, list) else []
|
||||
except json.JSONDecodeError:
|
||||
try:
|
||||
# Remove BOM and problematic characters
|
||||
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
|
||||
# Remove problematic characters
|
||||
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
|
||||
# Parse the new text
|
||||
decoded_data = json.loads(clean_text)
|
||||
return decoded_data if isinstance(decoded_data, list) else []
|
||||
except (requests.RequestException, json.JSONDecodeError) as exc:
|
||||
raise ValueError("Could not get valid anime: ") from exc
|
||||
|
||||
def _GetLanguageKey(self, language: str) -> int:
|
||||
languageCode = 0
|
||||
if (language == "German Dub"):
|
||||
languageCode = 1
|
||||
if (language == "English Sub"):
|
||||
languageCode = 2
|
||||
if (language == "German Sub"):
|
||||
languageCode = 3
|
||||
return languageCode
|
||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
||||
"""
|
||||
Language Codes:
|
||||
1: German Dub
|
||||
2: English Sub
|
||||
3: German Sub
|
||||
"""
|
||||
languageCode = self._GetLanguageKey(language)
|
||||
|
||||
episode_soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
||||
change_language_box_div = episode_soup.find(
|
||||
'div', class_='changeLanguageBox')
|
||||
languages = []
|
||||
|
||||
if change_language_box_div:
|
||||
img_tags = change_language_box_div.find_all('img')
|
||||
for img in img_tags:
|
||||
lang_key = img.get('data-lang-key')
|
||||
if lang_key and lang_key.isdigit():
|
||||
languages.append(int(lang_key))
|
||||
|
||||
return languageCode in languages
|
||||
|
||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, language: str = "German Dub", progress_callback: callable = None) -> bool:
|
||||
sanitized_anime_title = ''.join(
|
||||
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
|
||||
)
|
||||
|
||||
if season == 0:
|
||||
output_file = (
|
||||
f"{sanitized_anime_title} - "
|
||||
f"Movie {episode:02} - "
|
||||
f"({language}).mp4"
|
||||
)
|
||||
else:
|
||||
output_file = (
|
||||
f"{sanitized_anime_title} - "
|
||||
f"S{season:02}E{episode:03} - "
|
||||
f"({language}).mp4"
|
||||
)
|
||||
|
||||
folderPath = os.path.join(os.path.join(baseDirectory, serieFolder), f"Season {season}")
|
||||
output_path = os.path.join(folderPath, output_file)
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
temp_dir = "./Temp/"
|
||||
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
|
||||
temp_Path = os.path.join(temp_dir, output_file)
|
||||
|
||||
for provider in self.SUPPORTED_PROVIDERS:
|
||||
link, header = self._get_direct_link_from_provider(season, episode, key, language)
|
||||
ydl_opts = {
|
||||
'fragment_retries': float('inf'),
|
||||
'outtmpl': temp_Path,
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
'progress_with_newline': False,
|
||||
'nocheckcertificate': True,
|
||||
}
|
||||
|
||||
if header:
|
||||
ydl_opts['http_headers'] = header
|
||||
if progress_callback:
|
||||
ydl_opts['progress_hooks'] = [progress_callback]
|
||||
|
||||
with YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([link])
|
||||
|
||||
if (os.path.exists(temp_Path)):
|
||||
shutil.copy(temp_Path, output_path)
|
||||
os.remove(temp_Path)
|
||||
break
|
||||
self.ClearCache()
|
||||
|
||||
|
||||
def GetSiteKey(self) -> str:
|
||||
return "aniworld.to"
|
||||
|
||||
def GetTitle(self, key: str) -> str:
|
||||
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
|
||||
title_div = soup.find('div', class_='series-title')
|
||||
|
||||
if title_div:
|
||||
return title_div.find('h1').find('span').text
|
||||
|
||||
return ""
|
||||
|
||||
def _GetKeyHTML(self, key: str):
|
||||
if key in self._KeyHTMLDict:
|
||||
return self._KeyHTMLDict[key]
|
||||
|
||||
|
||||
self._KeyHTMLDict[key] = self.session.get(
|
||||
f"{self.ANIWORLD_TO}/anime/stream/{key}",
|
||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
return self._KeyHTMLDict[key]
|
||||
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
|
||||
if key in self._EpisodeHTMLDict:
|
||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||
|
||||
|
||||
link = (
|
||||
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
|
||||
f"staffel-{season}/episode-{episode}"
|
||||
)
|
||||
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
self._EpisodeHTMLDict[(key, season, episode)] = html
|
||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||
|
||||
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
|
||||
"""
|
||||
Parses the HTML content to extract streaming providers,
|
||||
their language keys, and redirect links.
|
||||
|
||||
Returns a dictionary with provider names as keys
|
||||
and language key-to-redirect URL mappings as values.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
'VOE': {1: 'https://aniworld.to/redirect/1766412',
|
||||
2: 'https://aniworld.to/redirect/1766405'},
|
||||
'Doodstream': {1: 'https://aniworld.to/redirect/1987922',
|
||||
2: 'https://aniworld.to/redirect/2700342'},
|
||||
...
|
||||
}
|
||||
|
||||
Access redirect link with:
|
||||
print(self.provider["VOE"][2])
|
||||
"""
|
||||
|
||||
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
||||
providers = {}
|
||||
|
||||
episode_links = soup.find_all(
|
||||
'li', class_=lambda x: x and x.startswith('episodeLink')
|
||||
)
|
||||
|
||||
if not episode_links:
|
||||
return providers
|
||||
|
||||
for link in episode_links:
|
||||
provider_name_tag = link.find('h4')
|
||||
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
|
||||
|
||||
redirect_link_tag = link.find('a', class_='watchEpisode')
|
||||
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
|
||||
|
||||
lang_key = link.get('data-lang-key')
|
||||
lang_key = int(
|
||||
lang_key) if lang_key and lang_key.isdigit() else None
|
||||
|
||||
if provider_name and redirect_link and lang_key:
|
||||
if provider_name not in providers:
|
||||
providers[provider_name] = {}
|
||||
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
|
||||
|
||||
|
||||
return providers
|
||||
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
|
||||
languageCode = self._GetLanguageKey(language)
|
||||
if (self.IsLanguage(season, episode, key, language)):
|
||||
for provider_name, lang_dict in self._get_provider_from_html(season, episode, key).items():
|
||||
if languageCode in lang_dict:
|
||||
return(lang_dict[languageCode], provider_name)
|
||||
break
|
||||
return None
|
||||
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
|
||||
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
|
||||
|
||||
embeded_link = self.session.get(
|
||||
redirect_link, timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||
headers={'User-Agent': self.RANDOM_USER_AGENT}).url
|
||||
return embeded_link
|
||||
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
|
||||
"""
|
||||
providers = {
|
||||
"Vidmoly": get_direct_link_from_vidmoly,
|
||||
"Vidoza": get_direct_link_from_vidoza,
|
||||
"VOE": get_direct_link_from_voe,
|
||||
"Doodstream": get_direct_link_from_doodstream,
|
||||
"SpeedFiles": get_direct_link_from_speedfiles,
|
||||
"Luluvdo": get_direct_link_from_luluvdo
|
||||
}
|
||||
|
||||
"""
|
||||
embeded_link = self._get_embeded_link(season, episode, key, language)
|
||||
if embeded_link is None:
|
||||
return None
|
||||
|
||||
return self.Providers.GetProvider("VOE").GetLink(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
|
||||
|
||||
def get_season_episode_count(self, slug : str) -> dict:
|
||||
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
|
||||
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
|
||||
season_meta = soup.find('meta', itemprop='numberOfSeasons')
|
||||
number_of_seasons = int(season_meta['content']) if season_meta else 0
|
||||
|
||||
episode_counts = {}
|
||||
|
||||
for season in range(1, number_of_seasons + 1):
|
||||
season_url = f"{base_url}staffel-{season}"
|
||||
response = requests.get(season_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
|
||||
episode_links = soup.find_all('a', href=True)
|
||||
unique_links = set(
|
||||
link['href']
|
||||
for link in episode_links
|
||||
if f"staffel-{season}/episode-" in link['href']
|
||||
)
|
||||
|
||||
episode_counts[season] = len(unique_links)
|
||||
|
||||
return episode_counts
|
||||
import os
|
||||
import re
|
||||
import logging
|
||||
import json
|
||||
import requests
|
||||
import html
|
||||
from urllib.parse import quote
|
||||
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
|
||||
from .base_provider import Loader
|
||||
from ..interfaces.providers import Providers
|
||||
from yt_dlp import YoutubeDL
|
||||
import shutil
|
||||
|
||||
# Read timeout from environment variable, default to 600 seconds (10 minutes)
|
||||
timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
|
||||
|
||||
download_error_logger = logging.getLogger("DownloadErrors")
|
||||
download_error_handler = logging.FileHandler("../../download_errors.log")
|
||||
download_error_handler.setLevel(logging.ERROR)
|
||||
|
||||
noKeyFound_logger = logging.getLogger("NoKeyFound")
|
||||
noKeyFound_handler = logging.FileHandler("../../NoKeyFound.log")
|
||||
noKeyFound_handler.setLevel(logging.ERROR)
|
||||
|
||||
class AniworldLoader(Loader):
|
||||
def __init__(self):
|
||||
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
|
||||
self.AniworldHeaders = {
|
||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
|
||||
"accept-encoding": "gzip, deflate, br, zstd",
|
||||
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
|
||||
"cache-control": "max-age=0",
|
||||
"priority": "u=0, i",
|
||||
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
|
||||
"sec-ch-ua-mobile": "?0",
|
||||
"sec-ch-ua-platform": '"Windows"',
|
||||
"sec-fetch-dest": "document",
|
||||
"sec-fetch-mode": "navigate",
|
||||
"sec-fetch-site": "none",
|
||||
"sec-fetch-user": "?1",
|
||||
"upgrade-insecure-requests": "1",
|
||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||
}
|
||||
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
|
||||
self.RANDOM_USER_AGENT = UserAgent().random
|
||||
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
|
||||
self.PROVIDER_HEADERS = {
|
||||
"Vidmoly": ['Referer: "https://vidmoly.to"'],
|
||||
"Doodstream": ['Referer: "https://dood.li/"'],
|
||||
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
|
||||
"Luluvdo": [
|
||||
f'User-Agent: {self.LULUVDO_USER_AGENT}',
|
||||
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
|
||||
'Origin: "https://luluvdo.com"',
|
||||
'Referer: "https://luluvdo.com/"'
|
||||
]}
|
||||
self.ANIWORLD_TO = "https://aniworld.to"
|
||||
self.session = requests.Session()
|
||||
|
||||
# Configure retries with backoff
|
||||
retries = Retry(
|
||||
total=5, # Number of retries
|
||||
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
||||
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
|
||||
allowed_methods=["GET"]
|
||||
)
|
||||
|
||||
adapter = HTTPAdapter(max_retries=retries)
|
||||
self.session.mount("https://", adapter)
|
||||
self.DEFAULT_REQUEST_TIMEOUT = 30
|
||||
|
||||
self._KeyHTMLDict = {}
|
||||
self._EpisodeHTMLDict = {}
|
||||
self.Providers = Providers()
|
||||
|
||||
def ClearCache(self):
|
||||
self._KeyHTMLDict = {}
|
||||
self._EpisodeHTMLDict = {}
|
||||
|
||||
def RemoveFromCache(self):
|
||||
self._EpisodeHTMLDict = {}
|
||||
|
||||
def Search(self, word: str) -> list:
|
||||
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
||||
anime_list = self.fetch_anime_list(search_url)
|
||||
|
||||
return anime_list
|
||||
|
||||
|
||||
def fetch_anime_list(self, url: str) -> list:
|
||||
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
|
||||
clean_text = response.text.strip()
|
||||
|
||||
try:
|
||||
decoded_data = json.loads(html.unescape(clean_text))
|
||||
return decoded_data if isinstance(decoded_data, list) else []
|
||||
except json.JSONDecodeError:
|
||||
try:
|
||||
# Remove BOM and problematic characters
|
||||
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
|
||||
# Remove problematic characters
|
||||
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
|
||||
# Parse the new text
|
||||
decoded_data = json.loads(clean_text)
|
||||
return decoded_data if isinstance(decoded_data, list) else []
|
||||
except (requests.RequestException, json.JSONDecodeError) as exc:
|
||||
raise ValueError("Could not get valid anime: ") from exc
|
||||
|
||||
def _GetLanguageKey(self, language: str) -> int:
|
||||
languageCode = 0
|
||||
if (language == "German Dub"):
|
||||
languageCode = 1
|
||||
if (language == "English Sub"):
|
||||
languageCode = 2
|
||||
if (language == "German Sub"):
|
||||
languageCode = 3
|
||||
return languageCode
|
||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
||||
"""
|
||||
Language Codes:
|
||||
1: German Dub
|
||||
2: English Sub
|
||||
3: German Sub
|
||||
"""
|
||||
languageCode = self._GetLanguageKey(language)
|
||||
|
||||
episode_soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
||||
change_language_box_div = episode_soup.find(
|
||||
'div', class_='changeLanguageBox')
|
||||
languages = []
|
||||
|
||||
if change_language_box_div:
|
||||
img_tags = change_language_box_div.find_all('img')
|
||||
for img in img_tags:
|
||||
lang_key = img.get('data-lang-key')
|
||||
if lang_key and lang_key.isdigit():
|
||||
languages.append(int(lang_key))
|
||||
|
||||
return languageCode in languages
|
||||
|
||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, language: str = "German Dub", progress_callback: callable = None) -> bool:
|
||||
sanitized_anime_title = ''.join(
|
||||
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
|
||||
)
|
||||
|
||||
if season == 0:
|
||||
output_file = (
|
||||
f"{sanitized_anime_title} - "
|
||||
f"Movie {episode:02} - "
|
||||
f"({language}).mp4"
|
||||
)
|
||||
else:
|
||||
output_file = (
|
||||
f"{sanitized_anime_title} - "
|
||||
f"S{season:02}E{episode:03} - "
|
||||
f"({language}).mp4"
|
||||
)
|
||||
|
||||
folderPath = os.path.join(os.path.join(baseDirectory, serieFolder), f"Season {season}")
|
||||
output_path = os.path.join(folderPath, output_file)
|
||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||
|
||||
temp_dir = "./Temp/"
|
||||
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
|
||||
temp_Path = os.path.join(temp_dir, output_file)
|
||||
|
||||
for provider in self.SUPPORTED_PROVIDERS:
|
||||
link, header = self._get_direct_link_from_provider(season, episode, key, language)
|
||||
ydl_opts = {
|
||||
'fragment_retries': float('inf'),
|
||||
'outtmpl': temp_Path,
|
||||
'quiet': True,
|
||||
'no_warnings': True,
|
||||
'progress_with_newline': False,
|
||||
'nocheckcertificate': True,
|
||||
}
|
||||
|
||||
if header:
|
||||
ydl_opts['http_headers'] = header
|
||||
if progress_callback:
|
||||
ydl_opts['progress_hooks'] = [progress_callback]
|
||||
|
||||
with YoutubeDL(ydl_opts) as ydl:
|
||||
ydl.download([link])
|
||||
|
||||
if (os.path.exists(temp_Path)):
|
||||
shutil.copy(temp_Path, output_path)
|
||||
os.remove(temp_Path)
|
||||
break
|
||||
self.ClearCache()
|
||||
|
||||
|
||||
def GetSiteKey(self) -> str:
|
||||
return "aniworld.to"
|
||||
|
||||
def GetTitle(self, key: str) -> str:
|
||||
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
|
||||
title_div = soup.find('div', class_='series-title')
|
||||
|
||||
if title_div:
|
||||
return title_div.find('h1').find('span').text
|
||||
|
||||
return ""
|
||||
|
||||
def _GetKeyHTML(self, key: str):
|
||||
if key in self._KeyHTMLDict:
|
||||
return self._KeyHTMLDict[key]
|
||||
|
||||
|
||||
self._KeyHTMLDict[key] = self.session.get(
|
||||
f"{self.ANIWORLD_TO}/anime/stream/{key}",
|
||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
return self._KeyHTMLDict[key]
|
||||
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
|
||||
if key in self._EpisodeHTMLDict:
|
||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||
|
||||
|
||||
link = (
|
||||
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
|
||||
f"staffel-{season}/episode-{episode}"
|
||||
)
|
||||
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
self._EpisodeHTMLDict[(key, season, episode)] = html
|
||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||
|
||||
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
|
||||
"""
|
||||
Parses the HTML content to extract streaming providers,
|
||||
their language keys, and redirect links.
|
||||
|
||||
Returns a dictionary with provider names as keys
|
||||
and language key-to-redirect URL mappings as values.
|
||||
|
||||
Example:
|
||||
|
||||
{
|
||||
'VOE': {1: 'https://aniworld.to/redirect/1766412',
|
||||
2: 'https://aniworld.to/redirect/1766405'},
|
||||
'Doodstream': {1: 'https://aniworld.to/redirect/1987922',
|
||||
2: 'https://aniworld.to/redirect/2700342'},
|
||||
...
|
||||
}
|
||||
|
||||
Access redirect link with:
|
||||
print(self.provider["VOE"][2])
|
||||
"""
|
||||
|
||||
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
||||
providers = {}
|
||||
|
||||
episode_links = soup.find_all(
|
||||
'li', class_=lambda x: x and x.startswith('episodeLink')
|
||||
)
|
||||
|
||||
if not episode_links:
|
||||
return providers
|
||||
|
||||
for link in episode_links:
|
||||
provider_name_tag = link.find('h4')
|
||||
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
|
||||
|
||||
redirect_link_tag = link.find('a', class_='watchEpisode')
|
||||
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
|
||||
|
||||
lang_key = link.get('data-lang-key')
|
||||
lang_key = int(
|
||||
lang_key) if lang_key and lang_key.isdigit() else None
|
||||
|
||||
if provider_name and redirect_link and lang_key:
|
||||
if provider_name not in providers:
|
||||
providers[provider_name] = {}
|
||||
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
|
||||
|
||||
|
||||
return providers
|
||||
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
|
||||
languageCode = self._GetLanguageKey(language)
|
||||
if (self.IsLanguage(season, episode, key, language)):
|
||||
for provider_name, lang_dict in self._get_provider_from_html(season, episode, key).items():
|
||||
if languageCode in lang_dict:
|
||||
return(lang_dict[languageCode], provider_name)
|
||||
break
|
||||
return None
|
||||
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
|
||||
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
|
||||
|
||||
embeded_link = self.session.get(
|
||||
redirect_link, timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||
headers={'User-Agent': self.RANDOM_USER_AGENT}).url
|
||||
return embeded_link
|
||||
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
|
||||
"""
|
||||
providers = {
|
||||
"Vidmoly": get_direct_link_from_vidmoly,
|
||||
"Vidoza": get_direct_link_from_vidoza,
|
||||
"VOE": get_direct_link_from_voe,
|
||||
"Doodstream": get_direct_link_from_doodstream,
|
||||
"SpeedFiles": get_direct_link_from_speedfiles,
|
||||
"Luluvdo": get_direct_link_from_luluvdo
|
||||
}
|
||||
|
||||
"""
|
||||
embeded_link = self._get_embeded_link(season, episode, key, language)
|
||||
if embeded_link is None:
|
||||
return None
|
||||
|
||||
return self.Providers.GetProvider("VOE").GetLink(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
|
||||
|
||||
def get_season_episode_count(self, slug : str) -> dict:
|
||||
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
|
||||
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
|
||||
season_meta = soup.find('meta', itemprop='numberOfSeasons')
|
||||
number_of_seasons = int(season_meta['content']) if season_meta else 0
|
||||
|
||||
episode_counts = {}
|
||||
|
||||
for season in range(1, number_of_seasons + 1):
|
||||
season_url = f"{base_url}staffel-{season}"
|
||||
response = requests.get(season_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||
soup = BeautifulSoup(response.content, 'html.parser')
|
||||
|
||||
episode_links = soup.find_all('a', href=True)
|
||||
unique_links = set(
|
||||
link['href']
|
||||
for link in episode_links
|
||||
if f"staffel-{season}/episode-" in link['href']
|
||||
)
|
||||
|
||||
episode_counts[season] = len(unique_links)
|
||||
|
||||
return episode_counts
|
||||
|
||||
@ -1,27 +1,27 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Loader(ABC):
|
||||
@abstractmethod
|
||||
def Search(self, word: str) -> list:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, progress_callback: callable = None) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def GetSiteKey(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def GetTitle(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_season_episode_count(self, slug: str) -> dict:
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Loader(ABC):
|
||||
@abstractmethod
|
||||
def Search(self, word: str) -> list:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, progress_callback: callable = None) -> bool:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def GetSiteKey(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def GetTitle(self) -> str:
|
||||
pass
|
||||
|
||||
@abstractmethod
|
||||
def get_season_episode_count(self, slug: str) -> dict:
|
||||
pass
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,10 +1,10 @@
|
||||
from .aniworld_provider import AniworldLoader
|
||||
from .base_provider import Loader
|
||||
|
||||
class Loaders:
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {"aniworld.to": AniworldLoader()}
|
||||
|
||||
def GetLoader(self, key: str) -> Loader:
|
||||
return self.dict[key]
|
||||
from .aniworld_provider import AniworldLoader
|
||||
from .base_provider import Loader
|
||||
|
||||
class Loaders:
|
||||
|
||||
def __init__(self):
|
||||
self.dict = {"aniworld.to": AniworldLoader()}
|
||||
|
||||
def GetLoader(self, key: str) -> Loader:
|
||||
return self.dict[key]
|
||||
|
||||
@ -1,7 +1,7 @@
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Provider(ABC):
|
||||
@abstractmethod
|
||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
|
||||
pass
|
||||
from abc import ABC, abstractmethod
|
||||
|
||||
|
||||
class Provider(ABC):
|
||||
@abstractmethod
|
||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
|
||||
pass
|
||||
|
||||
@ -1,59 +1,59 @@
|
||||
import re
|
||||
import random
|
||||
import time
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
import requests
|
||||
from .Provider import Provider
|
||||
class Doodstream(Provider):
|
||||
|
||||
def __init__(self):
|
||||
self.RANDOM_USER_AGENT = UserAgent().random
|
||||
|
||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> str:
|
||||
headers = {
|
||||
'User-Agent': self.RANDOM_USER_AGENT,
|
||||
'Referer': 'https://dood.li/'
|
||||
}
|
||||
|
||||
def extract_data(pattern, content):
|
||||
match = re.search(pattern, content)
|
||||
return match.group(1) if match else None
|
||||
|
||||
def generate_random_string(length=10):
|
||||
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
|
||||
return ''.join(random.choice(characters) for _ in range(length))
|
||||
|
||||
response = requests.get(
|
||||
embededLink,
|
||||
headers=headers,
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT,
|
||||
verify=False
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
pass_md5_pattern = r"\$\.get\('([^']*\/pass_md5\/[^']*)'"
|
||||
pass_md5_url = extract_data(pass_md5_pattern, response.text)
|
||||
if not pass_md5_url:
|
||||
raise ValueError(
|
||||
f'pass_md5 URL not found using {embededLink}.')
|
||||
|
||||
full_md5_url = f"https://dood.li{pass_md5_url}"
|
||||
|
||||
token_pattern = r"token=([a-zA-Z0-9]+)"
|
||||
token = extract_data(token_pattern, response.text)
|
||||
if not token:
|
||||
raise ValueError(f'Token not found using {embededLink}.')
|
||||
|
||||
md5_response = requests.get(
|
||||
full_md5_url, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT, verify=False)
|
||||
md5_response.raise_for_status()
|
||||
video_base_url = md5_response.text.strip()
|
||||
|
||||
random_string = generate_random_string(10)
|
||||
expiry = int(time.time())
|
||||
|
||||
direct_link = f"{video_base_url}{random_string}?token={token}&expiry={expiry}"
|
||||
# print(direct_link)
|
||||
|
||||
import re
|
||||
import random
|
||||
import time
|
||||
|
||||
from fake_useragent import UserAgent
|
||||
import requests
|
||||
from .Provider import Provider
|
||||
class Doodstream(Provider):
|
||||
|
||||
def __init__(self):
|
||||
self.RANDOM_USER_AGENT = UserAgent().random
|
||||
|
||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> str:
|
||||
headers = {
|
||||
'User-Agent': self.RANDOM_USER_AGENT,
|
||||
'Referer': 'https://dood.li/'
|
||||
}
|
||||
|
||||
def extract_data(pattern, content):
|
||||
match = re.search(pattern, content)
|
||||
return match.group(1) if match else None
|
||||
|
||||
def generate_random_string(length=10):
|
||||
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
|
||||
return ''.join(random.choice(characters) for _ in range(length))
|
||||
|
||||
response = requests.get(
|
||||
embededLink,
|
||||
headers=headers,
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT,
|
||||
verify=False
|
||||
)
|
||||
response.raise_for_status()
|
||||
|
||||
pass_md5_pattern = r"\$\.get\('([^']*\/pass_md5\/[^']*)'"
|
||||
pass_md5_url = extract_data(pass_md5_pattern, response.text)
|
||||
if not pass_md5_url:
|
||||
raise ValueError(
|
||||
f'pass_md5 URL not found using {embededLink}.')
|
||||
|
||||
full_md5_url = f"https://dood.li{pass_md5_url}"
|
||||
|
||||
token_pattern = r"token=([a-zA-Z0-9]+)"
|
||||
token = extract_data(token_pattern, response.text)
|
||||
if not token:
|
||||
raise ValueError(f'Token not found using {embededLink}.')
|
||||
|
||||
md5_response = requests.get(
|
||||
full_md5_url, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT, verify=False)
|
||||
md5_response.raise_for_status()
|
||||
video_base_url = md5_response.text.strip()
|
||||
|
||||
random_string = generate_random_string(10)
|
||||
expiry = int(time.time())
|
||||
|
||||
direct_link = f"{video_base_url}{random_string}?token={token}&expiry={expiry}"
|
||||
# print(direct_link)
|
||||
|
||||
return direct_link
|
||||
@ -1,51 +1,51 @@
|
||||
import re
|
||||
import requests
|
||||
# import jsbeautifier.unpackers.packer as packer
|
||||
|
||||
from aniworld import config
|
||||
|
||||
REDIRECT_REGEX = re.compile(
|
||||
r'<iframe *(?:[^>]+ )?src=(?:\'([^\']+)\'|"([^"]+)")[^>]*>')
|
||||
SCRIPT_REGEX = re.compile(
|
||||
r'(?s)<script\s+[^>]*?data-cfasync=["\']?false["\']?[^>]*>(.+?)</script>')
|
||||
VIDEO_URL_REGEX = re.compile(r'file:\s*"([^"]+\.m3u8[^"]*)"')
|
||||
|
||||
# TODO Implement this script fully
|
||||
|
||||
|
||||
def get_direct_link_from_filemoon(embeded_filemoon_link: str):
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
|
||||
headers = {
|
||||
"User-Agent": config.RANDOM_USER_AGENT,
|
||||
"Referer": embeded_filemoon_link,
|
||||
}
|
||||
|
||||
response = session.get(embeded_filemoon_link, headers=headers)
|
||||
source = response.text
|
||||
|
||||
match = REDIRECT_REGEX.search(source)
|
||||
if match:
|
||||
redirect_url = match.group(1) or match.group(2)
|
||||
response = session.get(redirect_url, headers=headers)
|
||||
source = response.text
|
||||
|
||||
for script_match in SCRIPT_REGEX.finditer(source):
|
||||
script_content = script_match.group(1).strip()
|
||||
|
||||
if not script_content.startswith("eval("):
|
||||
continue
|
||||
|
||||
if packer.detect(script_content):
|
||||
unpacked = packer.unpack(script_content)
|
||||
video_match = VIDEO_URL_REGEX.search(unpacked)
|
||||
if video_match:
|
||||
return video_match.group(1)
|
||||
|
||||
raise Exception("No Video link found!")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
url = input("Enter Filemoon Link: ")
|
||||
print(get_direct_link_from_filemoon(url))
|
||||
import re
|
||||
import requests
|
||||
# import jsbeautifier.unpackers.packer as packer
|
||||
|
||||
from aniworld import config
|
||||
|
||||
REDIRECT_REGEX = re.compile(
|
||||
r'<iframe *(?:[^>]+ )?src=(?:\'([^\']+)\'|"([^"]+)")[^>]*>')
|
||||
SCRIPT_REGEX = re.compile(
|
||||
r'(?s)<script\s+[^>]*?data-cfasync=["\']?false["\']?[^>]*>(.+?)</script>')
|
||||
VIDEO_URL_REGEX = re.compile(r'file:\s*"([^"]+\.m3u8[^"]*)"')
|
||||
|
||||
# TODO Implement this script fully
|
||||
|
||||
|
||||
def get_direct_link_from_filemoon(embeded_filemoon_link: str):
|
||||
session = requests.Session()
|
||||
session.verify = False
|
||||
|
||||
headers = {
|
||||
"User-Agent": config.RANDOM_USER_AGENT,
|
||||
"Referer": embeded_filemoon_link,
|
||||
}
|
||||
|
||||
response = session.get(embeded_filemoon_link, headers=headers)
|
||||
source = response.text
|
||||
|
||||
match = REDIRECT_REGEX.search(source)
|
||||
if match:
|
||||
redirect_url = match.group(1) or match.group(2)
|
||||
response = session.get(redirect_url, headers=headers)
|
||||
source = response.text
|
||||
|
||||
for script_match in SCRIPT_REGEX.finditer(source):
|
||||
script_content = script_match.group(1).strip()
|
||||
|
||||
if not script_content.startswith("eval("):
|
||||
continue
|
||||
|
||||
if packer.detect(script_content):
|
||||
unpacked = packer.unpack(script_content)
|
||||
video_match = VIDEO_URL_REGEX.search(unpacked)
|
||||
if video_match:
|
||||
return video_match.group(1)
|
||||
|
||||
raise Exception("No Video link found!")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
url = input("Enter Filemoon Link: ")
|
||||
print(get_direct_link_from_filemoon(url))
|
||||
|
||||
@ -1,90 +1,90 @@
|
||||
import re
|
||||
import json
|
||||
import sys
|
||||
import requests
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT
|
||||
|
||||
|
||||
def fetch_page_content(url):
|
||||
try:
|
||||
response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return response.text
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Failed to fetch the page content: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_video_data(page_content):
|
||||
match = re.search(r'^.*videos_manifest.*$', page_content, re.MULTILINE)
|
||||
if not match:
|
||||
raise ValueError("Failed to extract video manifest from the response.")
|
||||
|
||||
json_str = match.group(0)[match.group(0).find(
|
||||
'{'):match.group(0).rfind('}') + 1]
|
||||
return json.loads(json_str)
|
||||
|
||||
|
||||
def get_streams(url):
|
||||
page_content = fetch_page_content(url)
|
||||
data = extract_video_data(page_content)
|
||||
video_info = data['state']['data']['video']
|
||||
name = video_info['hentai_video']['name']
|
||||
streams = video_info['videos_manifest']['servers'][0]['streams']
|
||||
|
||||
return {"name": name, "streams": streams}
|
||||
|
||||
|
||||
def display_streams(streams):
|
||||
if not streams:
|
||||
print("No streams available.")
|
||||
return
|
||||
|
||||
print("Available qualities:")
|
||||
for i, stream in enumerate(streams, 1):
|
||||
premium_tag = "(Premium)" if not stream['is_guest_allowed'] else ""
|
||||
print(
|
||||
f"{i}. {stream['width']}x{stream['height']}\t"
|
||||
f"({stream['filesize_mbs']}MB) {premium_tag}")
|
||||
|
||||
|
||||
def get_user_selection(streams):
|
||||
try:
|
||||
selected_index = int(input("Select a stream: ").strip()) - 1
|
||||
if 0 <= selected_index < len(streams):
|
||||
return selected_index
|
||||
|
||||
print("Invalid selection.")
|
||||
return None
|
||||
except ValueError:
|
||||
print("Invalid input.")
|
||||
return None
|
||||
|
||||
|
||||
def get_direct_link_from_hanime(url=None):
|
||||
try:
|
||||
if url is None:
|
||||
if len(sys.argv) > 1:
|
||||
url = sys.argv[1]
|
||||
else:
|
||||
url = input("Please enter the hanime.tv video URL: ").strip()
|
||||
|
||||
try:
|
||||
video_data = get_streams(url)
|
||||
print(f"Video: {video_data['name']}")
|
||||
print('*' * 40)
|
||||
display_streams(video_data['streams'])
|
||||
|
||||
selected_index = None
|
||||
while selected_index is None:
|
||||
selected_index = get_user_selection(video_data['streams'])
|
||||
|
||||
print(f"M3U8 URL: {video_data['streams'][selected_index]['url']}")
|
||||
except ValueError as e:
|
||||
print(f"Error: {e}")
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
get_direct_link_from_hanime()
|
||||
import re
|
||||
import json
|
||||
import sys
|
||||
import requests
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT
|
||||
|
||||
|
||||
def fetch_page_content(url):
|
||||
try:
|
||||
response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT)
|
||||
response.raise_for_status()
|
||||
return response.text
|
||||
except requests.exceptions.RequestException as e:
|
||||
print(f"Failed to fetch the page content: {e}")
|
||||
return None
|
||||
|
||||
|
||||
def extract_video_data(page_content):
|
||||
match = re.search(r'^.*videos_manifest.*$', page_content, re.MULTILINE)
|
||||
if not match:
|
||||
raise ValueError("Failed to extract video manifest from the response.")
|
||||
|
||||
json_str = match.group(0)[match.group(0).find(
|
||||
'{'):match.group(0).rfind('}') + 1]
|
||||
return json.loads(json_str)
|
||||
|
||||
|
||||
def get_streams(url):
|
||||
page_content = fetch_page_content(url)
|
||||
data = extract_video_data(page_content)
|
||||
video_info = data['state']['data']['video']
|
||||
name = video_info['hentai_video']['name']
|
||||
streams = video_info['videos_manifest']['servers'][0]['streams']
|
||||
|
||||
return {"name": name, "streams": streams}
|
||||
|
||||
|
||||
def display_streams(streams):
|
||||
if not streams:
|
||||
print("No streams available.")
|
||||
return
|
||||
|
||||
print("Available qualities:")
|
||||
for i, stream in enumerate(streams, 1):
|
||||
premium_tag = "(Premium)" if not stream['is_guest_allowed'] else ""
|
||||
print(
|
||||
f"{i}. {stream['width']}x{stream['height']}\t"
|
||||
f"({stream['filesize_mbs']}MB) {premium_tag}")
|
||||
|
||||
|
||||
def get_user_selection(streams):
|
||||
try:
|
||||
selected_index = int(input("Select a stream: ").strip()) - 1
|
||||
if 0 <= selected_index < len(streams):
|
||||
return selected_index
|
||||
|
||||
print("Invalid selection.")
|
||||
return None
|
||||
except ValueError:
|
||||
print("Invalid input.")
|
||||
return None
|
||||
|
||||
|
||||
def get_direct_link_from_hanime(url=None):
|
||||
try:
|
||||
if url is None:
|
||||
if len(sys.argv) > 1:
|
||||
url = sys.argv[1]
|
||||
else:
|
||||
url = input("Please enter the hanime.tv video URL: ").strip()
|
||||
|
||||
try:
|
||||
video_data = get_streams(url)
|
||||
print(f"Video: {video_data['name']}")
|
||||
print('*' * 40)
|
||||
display_streams(video_data['streams'])
|
||||
|
||||
selected_index = None
|
||||
while selected_index is None:
|
||||
selected_index = get_user_selection(video_data['streams'])
|
||||
|
||||
print(f"M3U8 URL: {video_data['streams'][selected_index]['url']}")
|
||||
except ValueError as e:
|
||||
print(f"Error: {e}")
|
||||
except KeyboardInterrupt:
|
||||
pass
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
get_direct_link_from_hanime()
|
||||
|
||||
@ -1,35 +1,35 @@
|
||||
import requests
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# TODO Doesn't work on download yet and has to be implemented
|
||||
|
||||
|
||||
def get_direct_link_from_loadx(embeded_loadx_link: str):
|
||||
response = requests.head(
|
||||
embeded_loadx_link, allow_redirects=True, verify=False)
|
||||
|
||||
parsed_url = urlparse(response.url)
|
||||
path_parts = parsed_url.path.split("/")
|
||||
if len(path_parts) < 3:
|
||||
raise ValueError("Invalid path!")
|
||||
|
||||
id_hash = path_parts[2]
|
||||
host = parsed_url.netloc
|
||||
|
||||
post_url = f"https://{host}/player/index.php?data={id_hash}&do=getVideo"
|
||||
headers = {"X-Requested-With": "XMLHttpRequest"}
|
||||
response = requests.post(post_url, headers=headers, verify=False)
|
||||
|
||||
data = json.loads(response.text)
|
||||
print(data)
|
||||
video_url = data.get("videoSource")
|
||||
if not video_url:
|
||||
raise ValueError("No Video link found!")
|
||||
|
||||
return video_url
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
url = input("Enter Loadx Link: ")
|
||||
print(get_direct_link_from_loadx(url))
|
||||
import requests
|
||||
import json
|
||||
from urllib.parse import urlparse
|
||||
|
||||
# TODO Doesn't work on download yet and has to be implemented
|
||||
|
||||
|
||||
def get_direct_link_from_loadx(embeded_loadx_link: str):
|
||||
response = requests.head(
|
||||
embeded_loadx_link, allow_redirects=True, verify=False)
|
||||
|
||||
parsed_url = urlparse(response.url)
|
||||
path_parts = parsed_url.path.split("/")
|
||||
if len(path_parts) < 3:
|
||||
raise ValueError("Invalid path!")
|
||||
|
||||
id_hash = path_parts[2]
|
||||
host = parsed_url.netloc
|
||||
|
||||
post_url = f"https://{host}/player/index.php?data={id_hash}&do=getVideo"
|
||||
headers = {"X-Requested-With": "XMLHttpRequest"}
|
||||
response = requests.post(post_url, headers=headers, verify=False)
|
||||
|
||||
data = json.loads(response.text)
|
||||
print(data)
|
||||
video_url = data.get("videoSource")
|
||||
if not video_url:
|
||||
raise ValueError("No Video link found!")
|
||||
|
||||
return video_url
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
url = input("Enter Loadx Link: ")
|
||||
print(get_direct_link_from_loadx(url))
|
||||
|
||||
@ -1,39 +1,39 @@
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from aniworld import config
|
||||
|
||||
|
||||
def get_direct_link_from_luluvdo(embeded_luluvdo_link, arguments=None):
|
||||
luluvdo_id = embeded_luluvdo_link.split('/')[-1]
|
||||
filelink = (
|
||||
f"https://luluvdo.com/dl?op=embed&file_code={luluvdo_id}&embed=1&referer=luluvdo.com&adb=0"
|
||||
)
|
||||
|
||||
# The User-Agent needs to be the same as the direct-link ones to work
|
||||
headers = {
|
||||
"Origin": "https://luluvdo.com",
|
||||
"Referer": "https://luluvdo.com/",
|
||||
"User-Agent": config.LULUVDO_USER_AGENT
|
||||
}
|
||||
|
||||
if arguments.action == "Download":
|
||||
headers["Accept-Language"] = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7"
|
||||
|
||||
response = requests.get(filelink, headers=headers,
|
||||
timeout=config.DEFAULT_REQUEST_TIMEOUT)
|
||||
|
||||
if response.status_code == 200:
|
||||
pattern = r'file:\s*"([^"]+)"'
|
||||
matches = re.findall(pattern, str(response.text))
|
||||
|
||||
if matches:
|
||||
return matches[0]
|
||||
|
||||
raise ValueError("No match found")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
url = input("Enter Luluvdo Link: ")
|
||||
print(get_direct_link_from_luluvdo(url))
|
||||
import re
|
||||
|
||||
import requests
|
||||
|
||||
from aniworld import config
|
||||
|
||||
|
||||
def get_direct_link_from_luluvdo(embeded_luluvdo_link, arguments=None):
|
||||
luluvdo_id = embeded_luluvdo_link.split('/')[-1]
|
||||
filelink = (
|
||||
f"https://luluvdo.com/dl?op=embed&file_code={luluvdo_id}&embed=1&referer=luluvdo.com&adb=0"
|
||||
)
|
||||
|
||||
# The User-Agent needs to be the same as the direct-link ones to work
|
||||
headers = {
|
||||
"Origin": "https://luluvdo.com",
|
||||
"Referer": "https://luluvdo.com/",
|
||||
"User-Agent": config.LULUVDO_USER_AGENT
|
||||
}
|
||||
|
||||
if arguments.action == "Download":
|
||||
headers["Accept-Language"] = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7"
|
||||
|
||||
response = requests.get(filelink, headers=headers,
|
||||
timeout=config.DEFAULT_REQUEST_TIMEOUT)
|
||||
|
||||
if response.status_code == 200:
|
||||
pattern = r'file:\s*"([^"]+)"'
|
||||
matches = re.findall(pattern, str(response.text))
|
||||
|
||||
if matches:
|
||||
return matches[0]
|
||||
|
||||
raise ValueError("No match found")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
url = input("Enter Luluvdo Link: ")
|
||||
print(get_direct_link_from_luluvdo(url))
|
||||
|
||||
@ -1,43 +1,43 @@
|
||||
import re
|
||||
import base64
|
||||
import requests
|
||||
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
||||
|
||||
SPEEDFILES_PATTERN = re.compile(r'var _0x5opu234 = "(?P<encoded_data>.*?)";')
|
||||
|
||||
|
||||
def get_direct_link_from_speedfiles(embeded_speedfiles_link):
|
||||
response = requests.get(
|
||||
embeded_speedfiles_link,
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT,
|
||||
headers={'User-Agent': RANDOM_USER_AGENT}
|
||||
)
|
||||
|
||||
if "<span class=\"inline-block\">Web server is down</span>" in response.text:
|
||||
raise ValueError(
|
||||
"The SpeedFiles server is currently down.\n"
|
||||
"Please try again later or choose a different hoster."
|
||||
)
|
||||
|
||||
match = SPEEDFILES_PATTERN.search(response.text)
|
||||
|
||||
if not match:
|
||||
raise ValueError("Pattern not found in the response.")
|
||||
|
||||
encoded_data = match.group("encoded_data")
|
||||
decoded = base64.b64decode(encoded_data).decode()
|
||||
decoded = decoded.swapcase()[::-1]
|
||||
decoded = base64.b64decode(decoded).decode()[::-1]
|
||||
decoded_hex = ''.join(chr(int(decoded[i:i + 2], 16))
|
||||
for i in range(0, len(decoded), 2))
|
||||
shifted = ''.join(chr(ord(char) - 3) for char in decoded_hex)
|
||||
result = base64.b64decode(shifted.swapcase()[::-1]).decode()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
speedfiles_link = input("Enter Speedfiles Link: ")
|
||||
print(get_direct_link_from_speedfiles(
|
||||
embeded_speedfiles_link=speedfiles_link))
|
||||
import re
|
||||
import base64
|
||||
import requests
|
||||
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
||||
|
||||
SPEEDFILES_PATTERN = re.compile(r'var _0x5opu234 = "(?P<encoded_data>.*?)";')
|
||||
|
||||
|
||||
def get_direct_link_from_speedfiles(embeded_speedfiles_link):
|
||||
response = requests.get(
|
||||
embeded_speedfiles_link,
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT,
|
||||
headers={'User-Agent': RANDOM_USER_AGENT}
|
||||
)
|
||||
|
||||
if "<span class=\"inline-block\">Web server is down</span>" in response.text:
|
||||
raise ValueError(
|
||||
"The SpeedFiles server is currently down.\n"
|
||||
"Please try again later or choose a different hoster."
|
||||
)
|
||||
|
||||
match = SPEEDFILES_PATTERN.search(response.text)
|
||||
|
||||
if not match:
|
||||
raise ValueError("Pattern not found in the response.")
|
||||
|
||||
encoded_data = match.group("encoded_data")
|
||||
decoded = base64.b64decode(encoded_data).decode()
|
||||
decoded = decoded.swapcase()[::-1]
|
||||
decoded = base64.b64decode(decoded).decode()[::-1]
|
||||
decoded_hex = ''.join(chr(int(decoded[i:i + 2], 16))
|
||||
for i in range(0, len(decoded), 2))
|
||||
shifted = ''.join(chr(ord(char) - 3) for char in decoded_hex)
|
||||
result = base64.b64decode(shifted.swapcase()[::-1]).decode()
|
||||
|
||||
return result
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
speedfiles_link = input("Enter Speedfiles Link: ")
|
||||
print(get_direct_link_from_speedfiles(
|
||||
embeded_speedfiles_link=speedfiles_link))
|
||||
|
||||
@ -1,2 +1,2 @@
|
||||
def get_direct_link_from_streamtape(embeded_streamtape_link: str) -> str:
|
||||
pass
|
||||
def get_direct_link_from_streamtape(embeded_streamtape_link: str) -> str:
|
||||
pass
|
||||
|
||||
@ -1,34 +1,34 @@
|
||||
import re
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
||||
|
||||
|
||||
def get_direct_link_from_vidmoly(embeded_vidmoly_link: str):
|
||||
response = requests.get(
|
||||
embeded_vidmoly_link,
|
||||
headers={'User-Agent': RANDOM_USER_AGENT},
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
html_content = response.text
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
scripts = soup.find_all('script')
|
||||
|
||||
file_link_pattern = r'file:\s*"(https?://.*?)"'
|
||||
|
||||
for script in scripts:
|
||||
if script.string:
|
||||
match = re.search(file_link_pattern, script.string)
|
||||
if match:
|
||||
file_link = match.group(1)
|
||||
return file_link
|
||||
|
||||
raise ValueError("No direct link found.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
link = input("Enter Vidmoly Link: ")
|
||||
print('Note: --referer "https://vidmoly.to"')
|
||||
print(get_direct_link_from_vidmoly(embeded_vidmoly_link=link))
|
||||
import re
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
||||
|
||||
|
||||
def get_direct_link_from_vidmoly(embeded_vidmoly_link: str):
|
||||
response = requests.get(
|
||||
embeded_vidmoly_link,
|
||||
headers={'User-Agent': RANDOM_USER_AGENT},
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
html_content = response.text
|
||||
soup = BeautifulSoup(html_content, 'html.parser')
|
||||
scripts = soup.find_all('script')
|
||||
|
||||
file_link_pattern = r'file:\s*"(https?://.*?)"'
|
||||
|
||||
for script in scripts:
|
||||
if script.string:
|
||||
match = re.search(file_link_pattern, script.string)
|
||||
if match:
|
||||
file_link = match.group(1)
|
||||
return file_link
|
||||
|
||||
raise ValueError("No direct link found.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
link = input("Enter Vidmoly Link: ")
|
||||
print('Note: --referer "https://vidmoly.to"')
|
||||
print(get_direct_link_from_vidmoly(embeded_vidmoly_link=link))
|
||||
|
||||
@ -1,29 +1,29 @@
|
||||
import re
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
||||
|
||||
|
||||
def get_direct_link_from_vidoza(embeded_vidoza_link: str) -> str:
|
||||
response = requests.get(
|
||||
embeded_vidoza_link,
|
||||
headers={'User-Agent': RANDOM_USER_AGENT},
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
|
||||
soup = BeautifulSoup(response.content, "html.parser")
|
||||
|
||||
for tag in soup.find_all('script'):
|
||||
if 'sourcesCode:' in tag.text:
|
||||
match = re.search(r'src: "(.*?)"', tag.text)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
raise ValueError("No direct link found.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
link = input("Enter Vidoza Link: ")
|
||||
print(get_direct_link_from_vidoza(embeded_vidoza_link=link))
|
||||
import re
|
||||
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
||||
|
||||
|
||||
def get_direct_link_from_vidoza(embeded_vidoza_link: str) -> str:
|
||||
response = requests.get(
|
||||
embeded_vidoza_link,
|
||||
headers={'User-Agent': RANDOM_USER_AGENT},
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
|
||||
soup = BeautifulSoup(response.content, "html.parser")
|
||||
|
||||
for tag in soup.find_all('script'):
|
||||
if 'sourcesCode:' in tag.text:
|
||||
match = re.search(r'src: "(.*?)"', tag.text)
|
||||
if match:
|
||||
return match.group(1)
|
||||
|
||||
raise ValueError("No direct link found.")
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
link = input("Enter Vidoza Link: ")
|
||||
print(get_direct_link_from_vidoza(embeded_vidoza_link=link))
|
||||
|
||||
@ -1,113 +1,113 @@
|
||||
import re
|
||||
import base64
|
||||
import json
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from fake_useragent import UserAgent
|
||||
from .Provider import Provider
|
||||
|
||||
# Compile regex patterns once for better performance
|
||||
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
|
||||
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
|
||||
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
|
||||
class VOE(Provider):
|
||||
|
||||
def __init__(self):
|
||||
self.RANDOM_USER_AGENT = UserAgent().random
|
||||
self.Header = {
|
||||
"User-Agent": self.RANDOM_USER_AGENT
|
||||
}
|
||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
|
||||
self.session = requests.Session()
|
||||
|
||||
# Configure retries with backoff
|
||||
retries = Retry(
|
||||
total=5, # Number of retries
|
||||
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
||||
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
|
||||
allowed_methods=["GET"]
|
||||
)
|
||||
|
||||
adapter = HTTPAdapter(max_retries=retries)
|
||||
self.session.mount("https://", adapter)
|
||||
DEFAULT_REQUEST_TIMEOUT = 30
|
||||
|
||||
response = self.session.get(
|
||||
embededLink,
|
||||
headers={'User-Agent': self.RANDOM_USER_AGENT},
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
|
||||
redirect = re.search(r"https?://[^'\"<>]+", response.text)
|
||||
if not redirect:
|
||||
raise ValueError("No redirect found.")
|
||||
|
||||
redirect_url = redirect.group(0)
|
||||
parts = redirect_url.strip().split("/")
|
||||
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
|
||||
|
||||
response = self.session.get(redirect_url, headers={'User-Agent': self.RANDOM_USER_AGENT})
|
||||
html = response.content
|
||||
|
||||
|
||||
# Method 1: Extract from script tag
|
||||
extracted = self.extract_voe_from_script(html)
|
||||
if extracted:
|
||||
return extracted, self.Header
|
||||
|
||||
# Method 2: Extract from base64 encoded variable
|
||||
htmlText = html.decode('utf-8')
|
||||
b64_match = B64_PATTERN.search(htmlText)
|
||||
if b64_match:
|
||||
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
|
||||
source = json.loads(decoded).get("source")
|
||||
if source:
|
||||
return source, self.Header
|
||||
|
||||
# Method 3: Extract HLS source
|
||||
hls_match = HLS_PATTERN.search(htmlText)
|
||||
if hls_match:
|
||||
return base64.b64decode(hls_match.group("hls")).decode(), self.Header
|
||||
|
||||
def shift_letters(self, input_str):
|
||||
result = ''
|
||||
for c in input_str:
|
||||
code = ord(c)
|
||||
if 65 <= code <= 90:
|
||||
code = (code - 65 + 13) % 26 + 65
|
||||
elif 97 <= code <= 122:
|
||||
code = (code - 97 + 13) % 26 + 97
|
||||
result += chr(code)
|
||||
return result
|
||||
|
||||
|
||||
def replace_junk(self, input_str):
|
||||
junk_parts = ['@$', '^^', '~@', '%?', '*~', '!!', '#&']
|
||||
for part in junk_parts:
|
||||
input_str = re.sub(re.escape(part), '_', input_str)
|
||||
return input_str
|
||||
|
||||
|
||||
def shift_back(self, s, n):
|
||||
return ''.join(chr(ord(c) - n) for c in s)
|
||||
|
||||
|
||||
def decode_voe_string(self, encoded):
|
||||
step1 = self.shift_letters(encoded)
|
||||
step2 = self.replace_junk(step1).replace('_', '')
|
||||
step3 = base64.b64decode(step2).decode()
|
||||
step4 = self.shift_back(step3, 3)
|
||||
step5 = base64.b64decode(step4[::-1]).decode()
|
||||
return json.loads(step5)
|
||||
|
||||
|
||||
def extract_voe_from_script(self, html):
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
script = soup.find("script", type="application/json")
|
||||
return self.decode_voe_string(script.text[2:-2])["source"]
|
||||
|
||||
|
||||
|
||||
import re
|
||||
import base64
|
||||
import json
|
||||
|
||||
from requests.adapters import HTTPAdapter
|
||||
from urllib3.util.retry import Retry
|
||||
import requests
|
||||
from bs4 import BeautifulSoup
|
||||
from fake_useragent import UserAgent
|
||||
from .Provider import Provider
|
||||
|
||||
# Compile regex patterns once for better performance
|
||||
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
|
||||
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
|
||||
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
|
||||
class VOE(Provider):
|
||||
|
||||
def __init__(self):
|
||||
self.RANDOM_USER_AGENT = UserAgent().random
|
||||
self.Header = {
|
||||
"User-Agent": self.RANDOM_USER_AGENT
|
||||
}
|
||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
|
||||
self.session = requests.Session()
|
||||
|
||||
# Configure retries with backoff
|
||||
retries = Retry(
|
||||
total=5, # Number of retries
|
||||
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
||||
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
|
||||
allowed_methods=["GET"]
|
||||
)
|
||||
|
||||
adapter = HTTPAdapter(max_retries=retries)
|
||||
self.session.mount("https://", adapter)
|
||||
DEFAULT_REQUEST_TIMEOUT = 30
|
||||
|
||||
response = self.session.get(
|
||||
embededLink,
|
||||
headers={'User-Agent': self.RANDOM_USER_AGENT},
|
||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
||||
)
|
||||
|
||||
redirect = re.search(r"https?://[^'\"<>]+", response.text)
|
||||
if not redirect:
|
||||
raise ValueError("No redirect found.")
|
||||
|
||||
redirect_url = redirect.group(0)
|
||||
parts = redirect_url.strip().split("/")
|
||||
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
|
||||
|
||||
response = self.session.get(redirect_url, headers={'User-Agent': self.RANDOM_USER_AGENT})
|
||||
html = response.content
|
||||
|
||||
|
||||
# Method 1: Extract from script tag
|
||||
extracted = self.extract_voe_from_script(html)
|
||||
if extracted:
|
||||
return extracted, self.Header
|
||||
|
||||
# Method 2: Extract from base64 encoded variable
|
||||
htmlText = html.decode('utf-8')
|
||||
b64_match = B64_PATTERN.search(htmlText)
|
||||
if b64_match:
|
||||
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
|
||||
source = json.loads(decoded).get("source")
|
||||
if source:
|
||||
return source, self.Header
|
||||
|
||||
# Method 3: Extract HLS source
|
||||
hls_match = HLS_PATTERN.search(htmlText)
|
||||
if hls_match:
|
||||
return base64.b64decode(hls_match.group("hls")).decode(), self.Header
|
||||
|
||||
def shift_letters(self, input_str):
|
||||
result = ''
|
||||
for c in input_str:
|
||||
code = ord(c)
|
||||
if 65 <= code <= 90:
|
||||
code = (code - 65 + 13) % 26 + 65
|
||||
elif 97 <= code <= 122:
|
||||
code = (code - 97 + 13) % 26 + 97
|
||||
result += chr(code)
|
||||
return result
|
||||
|
||||
|
||||
def replace_junk(self, input_str):
|
||||
junk_parts = ['@$', '^^', '~@', '%?', '*~', '!!', '#&']
|
||||
for part in junk_parts:
|
||||
input_str = re.sub(re.escape(part), '_', input_str)
|
||||
return input_str
|
||||
|
||||
|
||||
def shift_back(self, s, n):
|
||||
return ''.join(chr(ord(c) - n) for c in s)
|
||||
|
||||
|
||||
def decode_voe_string(self, encoded):
|
||||
step1 = self.shift_letters(encoded)
|
||||
step2 = self.replace_junk(step1).replace('_', '')
|
||||
step3 = base64.b64decode(step2).decode()
|
||||
step4 = self.shift_back(step3, 3)
|
||||
step5 = base64.b64decode(step4[::-1]).decode()
|
||||
return json.loads(step5)
|
||||
|
||||
|
||||
def extract_voe_from_script(self, html):
|
||||
soup = BeautifulSoup(html, "html.parser")
|
||||
script = soup.find("script", type="application/json")
|
||||
return self.decode_voe_string(script.text[2:-2])["source"]
|
||||
|
||||
|
||||
|
||||
|
||||
@ -1,6 +0,0 @@
|
||||
"""
|
||||
Infrastructure package for the Aniworld server.
|
||||
|
||||
This package contains repository implementations, database connections,
|
||||
caching, and other infrastructure concerns.
|
||||
"""
|
||||
@ -1,916 +0,0 @@
|
||||
"""
|
||||
Database & Storage Management for AniWorld App
|
||||
|
||||
This module provides database schema management, data migration,
|
||||
backup/restore functionality, and storage optimization.
|
||||
"""
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
import json
|
||||
import shutil
|
||||
import time
|
||||
import hashlib
|
||||
import logging
|
||||
import threading
|
||||
import zipfile
|
||||
import uuid
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Dict, List, Optional, Any, Tuple
|
||||
from dataclasses import dataclass, field
|
||||
from contextlib import contextmanager
|
||||
import glob
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
@dataclass
|
||||
class AnimeMetadata:
|
||||
"""Represents anime metadata stored in database."""
|
||||
anime_id: str
|
||||
name: str
|
||||
folder: str
|
||||
key: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
genres: List[str] = field(default_factory=list)
|
||||
release_year: Optional[int] = None
|
||||
status: str = 'ongoing' # ongoing, completed, cancelled
|
||||
total_episodes: Optional[int] = None
|
||||
poster_url: Optional[str] = None
|
||||
last_updated: datetime = field(default_factory=datetime.now)
|
||||
created_at: datetime = field(default_factory=datetime.now)
|
||||
custom_metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
|
||||
@dataclass
|
||||
class EpisodeMetadata:
|
||||
"""Represents episode metadata stored in database."""
|
||||
episode_id: str
|
||||
anime_id: str
|
||||
season: int
|
||||
episode: int
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
duration_seconds: Optional[int] = None
|
||||
file_path: Optional[str] = None
|
||||
file_size_bytes: Optional[int] = None
|
||||
download_date: Optional[datetime] = None
|
||||
last_watched: Optional[datetime] = None
|
||||
watch_count: int = 0
|
||||
is_downloaded: bool = False
|
||||
quality: Optional[str] = None
|
||||
language: str = 'German Dub'
|
||||
|
||||
|
||||
@dataclass
|
||||
class BackupInfo:
|
||||
"""Represents backup metadata."""
|
||||
backup_id: str
|
||||
backup_path: str
|
||||
backup_type: str # full, incremental, metadata_only
|
||||
created_at: datetime
|
||||
size_bytes: int
|
||||
description: Optional[str] = None
|
||||
tables_included: List[str] = field(default_factory=list)
|
||||
checksum: Optional[str] = None
|
||||
|
||||
|
||||
class DatabaseManager:
|
||||
"""Manage SQLite database with migrations and maintenance."""
|
||||
|
||||
def __init__(self, db_path: str = "./data/aniworld.db"):
|
||||
self.db_path = db_path
|
||||
self.db_dir = os.path.dirname(db_path)
|
||||
self.logger = logging.getLogger(__name__)
|
||||
self.lock = threading.Lock()
|
||||
|
||||
# Create database directory
|
||||
os.makedirs(self.db_dir, exist_ok=True)
|
||||
|
||||
# Initialize database
|
||||
self.initialize_database()
|
||||
|
||||
# Run migrations
|
||||
self.run_migrations()
|
||||
|
||||
@contextmanager
|
||||
def get_connection(self):
|
||||
"""Get database connection with proper error handling."""
|
||||
conn = None
|
||||
try:
|
||||
conn = sqlite3.connect(self.db_path, timeout=30)
|
||||
conn.row_factory = sqlite3.Row # Enable dict-like access
|
||||
yield conn
|
||||
except Exception as e:
|
||||
if conn:
|
||||
conn.rollback()
|
||||
self.logger.error(f"Database connection error: {e}")
|
||||
raise
|
||||
finally:
|
||||
if conn:
|
||||
conn.close()
|
||||
|
||||
def initialize_database(self):
|
||||
"""Initialize database with base schema."""
|
||||
with self.get_connection() as conn:
|
||||
# Create schema version table
|
||||
conn.execute("""
|
||||
CREATE TABLE IF NOT EXISTS schema_version (
|
||||
version INTEGER PRIMARY KEY,
|
||||
applied_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
description TEXT
|
||||
)
|
||||
""")
|
||||
|
||||
# Insert initial version if not exists
|
||||
conn.execute("""
|
||||
INSERT OR IGNORE INTO schema_version (version, description)
|
||||
VALUES (0, 'Initial schema')
|
||||
""")
|
||||
|
||||
conn.commit()
|
||||
|
||||
def get_current_version(self) -> int:
|
||||
"""Get current database schema version."""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute("SELECT MAX(version) FROM schema_version")
|
||||
result = cursor.fetchone()
|
||||
return result[0] if result and result[0] is not None else 0
|
||||
|
||||
def run_migrations(self):
|
||||
"""Run database migrations."""
|
||||
current_version = self.get_current_version()
|
||||
migrations = self.get_migrations()
|
||||
|
||||
for version, migration in migrations.items():
|
||||
if version > current_version:
|
||||
self.logger.info(f"Running migration to version {version}")
|
||||
try:
|
||||
with self.get_connection() as conn:
|
||||
migration['up'](conn)
|
||||
|
||||
# Record migration
|
||||
conn.execute("""
|
||||
INSERT INTO schema_version (version, description)
|
||||
VALUES (?, ?)
|
||||
""", (version, migration['description']))
|
||||
|
||||
conn.commit()
|
||||
self.logger.info(f"Migration to version {version} completed")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Migration to version {version} failed: {e}")
|
||||
raise
|
||||
|
||||
def get_migrations(self) -> Dict[int, Dict[str, Any]]:
|
||||
"""Define database migrations."""
|
||||
return {
|
||||
1: {
|
||||
'description': 'Create anime metadata table',
|
||||
'up': self._migration_001_anime_table
|
||||
},
|
||||
2: {
|
||||
'description': 'Create episode metadata table',
|
||||
'up': self._migration_002_episode_table
|
||||
},
|
||||
3: {
|
||||
'description': 'Create download history table',
|
||||
'up': self._migration_003_download_history
|
||||
},
|
||||
4: {
|
||||
'description': 'Create user preferences table',
|
||||
'up': self._migration_004_user_preferences
|
||||
},
|
||||
5: {
|
||||
'description': 'Create storage locations table',
|
||||
'up': self._migration_005_storage_locations
|
||||
},
|
||||
6: {
|
||||
'description': 'Add indexes for performance',
|
||||
'up': self._migration_006_indexes
|
||||
}
|
||||
}
|
||||
|
||||
def _migration_001_anime_table(self, conn: sqlite3.Connection):
|
||||
"""Create anime metadata table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE anime_metadata (
|
||||
anime_id TEXT PRIMARY KEY,
|
||||
name TEXT NOT NULL,
|
||||
folder TEXT NOT NULL UNIQUE,
|
||||
key TEXT,
|
||||
description TEXT,
|
||||
genres TEXT, -- JSON array
|
||||
release_year INTEGER,
|
||||
status TEXT DEFAULT 'ongoing',
|
||||
total_episodes INTEGER,
|
||||
poster_url TEXT,
|
||||
last_updated TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
custom_metadata TEXT -- JSON object
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_002_episode_table(self, conn: sqlite3.Connection):
|
||||
"""Create episode metadata table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE episode_metadata (
|
||||
episode_id TEXT PRIMARY KEY,
|
||||
anime_id TEXT NOT NULL,
|
||||
season INTEGER NOT NULL,
|
||||
episode INTEGER NOT NULL,
|
||||
title TEXT,
|
||||
description TEXT,
|
||||
duration_seconds INTEGER,
|
||||
file_path TEXT,
|
||||
file_size_bytes INTEGER,
|
||||
download_date TIMESTAMP,
|
||||
last_watched TIMESTAMP,
|
||||
watch_count INTEGER DEFAULT 0,
|
||||
is_downloaded BOOLEAN DEFAULT FALSE,
|
||||
quality TEXT,
|
||||
language TEXT DEFAULT 'German Dub',
|
||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id),
|
||||
UNIQUE(anime_id, season, episode, language)
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_003_download_history(self, conn: sqlite3.Connection):
|
||||
"""Create download history table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE download_history (
|
||||
download_id TEXT PRIMARY KEY,
|
||||
anime_id TEXT NOT NULL,
|
||||
season INTEGER NOT NULL,
|
||||
episode INTEGER NOT NULL,
|
||||
language TEXT NOT NULL,
|
||||
download_started TIMESTAMP NOT NULL,
|
||||
download_completed TIMESTAMP,
|
||||
download_status TEXT NOT NULL, -- started, completed, failed, cancelled
|
||||
file_size_bytes INTEGER,
|
||||
download_speed_mbps REAL,
|
||||
error_message TEXT,
|
||||
retry_count INTEGER DEFAULT 0,
|
||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_004_user_preferences(self, conn: sqlite3.Connection):
|
||||
"""Create user preferences table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE user_preferences (
|
||||
key TEXT PRIMARY KEY,
|
||||
value TEXT NOT NULL, -- JSON value
|
||||
category TEXT NOT NULL,
|
||||
description TEXT,
|
||||
updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_005_storage_locations(self, conn: sqlite3.Connection):
|
||||
"""Create storage locations table."""
|
||||
conn.execute("""
|
||||
CREATE TABLE storage_locations (
|
||||
location_id TEXT PRIMARY KEY,
|
||||
anime_id TEXT,
|
||||
path TEXT NOT NULL,
|
||||
location_type TEXT NOT NULL, -- primary, backup, cache
|
||||
is_active BOOLEAN DEFAULT TRUE,
|
||||
free_space_bytes INTEGER,
|
||||
total_space_bytes INTEGER,
|
||||
last_checked TIMESTAMP DEFAULT CURRENT_TIMESTAMP,
|
||||
FOREIGN KEY (anime_id) REFERENCES anime_metadata(anime_id)
|
||||
)
|
||||
""")
|
||||
|
||||
def _migration_006_indexes(self, conn: sqlite3.Connection):
|
||||
"""Add indexes for performance."""
|
||||
indexes = [
|
||||
"CREATE INDEX idx_anime_name ON anime_metadata(name)",
|
||||
"CREATE INDEX idx_anime_folder ON anime_metadata(folder)",
|
||||
"CREATE INDEX idx_anime_status ON anime_metadata(status)",
|
||||
"CREATE INDEX idx_episode_anime_id ON episode_metadata(anime_id)",
|
||||
"CREATE INDEX idx_episode_season_episode ON episode_metadata(season, episode)",
|
||||
"CREATE INDEX idx_episode_downloaded ON episode_metadata(is_downloaded)",
|
||||
"CREATE INDEX idx_download_status ON download_history(download_status)",
|
||||
"CREATE INDEX idx_download_date ON download_history(download_started)",
|
||||
"CREATE INDEX idx_storage_active ON storage_locations(is_active)",
|
||||
"CREATE INDEX idx_storage_type ON storage_locations(location_type)"
|
||||
]
|
||||
|
||||
for index_sql in indexes:
|
||||
try:
|
||||
conn.execute(index_sql)
|
||||
except sqlite3.OperationalError as e:
|
||||
if "already exists" not in str(e):
|
||||
raise
|
||||
|
||||
def execute_query(self, query: str, params: tuple = ()) -> List[sqlite3.Row]:
|
||||
"""Execute a SELECT query and return results."""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
return cursor.fetchall()
|
||||
|
||||
def execute_update(self, query: str, params: tuple = ()) -> int:
|
||||
"""Execute an UPDATE/INSERT/DELETE query and return affected rows."""
|
||||
with self.get_connection() as conn:
|
||||
cursor = conn.execute(query, params)
|
||||
conn.commit()
|
||||
return cursor.rowcount
|
||||
|
||||
|
||||
class AnimeRepository:
|
||||
"""Repository for anime data operations."""
|
||||
|
||||
def __init__(self, db_manager: DatabaseManager):
|
||||
self.db = db_manager
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def create_anime(self, metadata: AnimeMetadata) -> bool:
|
||||
"""Create new anime record."""
|
||||
try:
|
||||
query = """
|
||||
INSERT INTO anime_metadata (
|
||||
anime_id, name, folder, key, description, genres,
|
||||
release_year, status, total_episodes, poster_url,
|
||||
custom_metadata
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
params = (
|
||||
metadata.anime_id,
|
||||
metadata.name,
|
||||
metadata.folder,
|
||||
metadata.key,
|
||||
metadata.description,
|
||||
json.dumps(metadata.genres),
|
||||
metadata.release_year,
|
||||
metadata.status,
|
||||
metadata.total_episodes,
|
||||
metadata.poster_url,
|
||||
json.dumps(metadata.custom_metadata)
|
||||
)
|
||||
|
||||
rows_affected = self.db.execute_update(query, params)
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create anime {metadata.name}: {e}")
|
||||
return False
|
||||
|
||||
def get_anime_by_folder(self, folder: str) -> Optional[AnimeMetadata]:
|
||||
"""Get anime by folder name."""
|
||||
try:
|
||||
query = """
|
||||
SELECT * FROM anime_metadata WHERE folder = ?
|
||||
"""
|
||||
|
||||
results = self.db.execute_query(query, (folder,))
|
||||
|
||||
if results:
|
||||
row = results[0]
|
||||
return self._row_to_anime_metadata(row)
|
||||
|
||||
return None
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get anime by folder {folder}: {e}")
|
||||
return None
|
||||
|
||||
def get_all_anime(self, status_filter: Optional[str] = None) -> List[AnimeMetadata]:
|
||||
"""Get all anime, optionally filtered by status."""
|
||||
try:
|
||||
if status_filter:
|
||||
query = "SELECT * FROM anime_metadata WHERE status = ? ORDER BY name"
|
||||
params = (status_filter,)
|
||||
else:
|
||||
query = "SELECT * FROM anime_metadata ORDER BY name"
|
||||
params = ()
|
||||
|
||||
results = self.db.execute_query(query, params)
|
||||
|
||||
return [self._row_to_anime_metadata(row) for row in results]
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to get all anime: {e}")
|
||||
return []
|
||||
|
||||
def update_anime(self, metadata: AnimeMetadata) -> bool:
|
||||
"""Update anime metadata."""
|
||||
try:
|
||||
query = """
|
||||
UPDATE anime_metadata SET
|
||||
name = ?, key = ?, description = ?, genres = ?,
|
||||
release_year = ?, status = ?, total_episodes = ?,
|
||||
poster_url = ?, last_updated = CURRENT_TIMESTAMP,
|
||||
custom_metadata = ?
|
||||
WHERE anime_id = ?
|
||||
"""
|
||||
|
||||
params = (
|
||||
metadata.name,
|
||||
metadata.key,
|
||||
metadata.description,
|
||||
json.dumps(metadata.genres),
|
||||
metadata.release_year,
|
||||
metadata.status,
|
||||
metadata.total_episodes,
|
||||
metadata.poster_url,
|
||||
json.dumps(metadata.custom_metadata),
|
||||
metadata.anime_id
|
||||
)
|
||||
|
||||
rows_affected = self.db.execute_update(query, params)
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to update anime {metadata.anime_id}: {e}")
|
||||
return False
|
||||
|
||||
def delete_anime(self, anime_id: str) -> bool:
|
||||
"""Delete anime and related data."""
|
||||
try:
|
||||
# Delete episodes first (foreign key constraint)
|
||||
self.db.execute_update("DELETE FROM episode_metadata WHERE anime_id = ?", (anime_id,))
|
||||
self.db.execute_update("DELETE FROM download_history WHERE anime_id = ?", (anime_id,))
|
||||
self.db.execute_update("DELETE FROM storage_locations WHERE anime_id = ?", (anime_id,))
|
||||
|
||||
# Delete anime
|
||||
rows_affected = self.db.execute_update("DELETE FROM anime_metadata WHERE anime_id = ?", (anime_id,))
|
||||
|
||||
return rows_affected > 0
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to delete anime {anime_id}: {e}")
|
||||
return False
|
||||
|
||||
def search_anime(self, search_term: str) -> List[AnimeMetadata]:
|
||||
"""Search anime by name or description."""
|
||||
try:
|
||||
query = """
|
||||
SELECT * FROM anime_metadata
|
||||
WHERE name LIKE ? OR description LIKE ?
|
||||
ORDER BY name
|
||||
"""
|
||||
|
||||
search_pattern = f"%{search_term}%"
|
||||
results = self.db.execute_query(query, (search_pattern, search_pattern))
|
||||
|
||||
return [self._row_to_anime_metadata(row) for row in results]
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to search anime: {e}")
|
||||
return []
|
||||
|
||||
def _row_to_anime_metadata(self, row: sqlite3.Row) -> AnimeMetadata:
|
||||
"""Convert database row to AnimeMetadata object."""
|
||||
return AnimeMetadata(
|
||||
anime_id=row['anime_id'],
|
||||
name=row['name'],
|
||||
folder=row['folder'],
|
||||
key=row['key'],
|
||||
description=row['description'],
|
||||
genres=json.loads(row['genres'] or '[]'),
|
||||
release_year=row['release_year'],
|
||||
status=row['status'],
|
||||
total_episodes=row['total_episodes'],
|
||||
poster_url=row['poster_url'],
|
||||
last_updated=datetime.fromisoformat(row['last_updated']) if row['last_updated'] else datetime.now(),
|
||||
created_at=datetime.fromisoformat(row['created_at']) if row['created_at'] else datetime.now(),
|
||||
custom_metadata=json.loads(row['custom_metadata'] or '{}')
|
||||
)
|
||||
|
||||
|
||||
class BackupManager:
|
||||
"""Manage database backups and restore operations."""
|
||||
|
||||
def __init__(self, db_manager: DatabaseManager, backup_dir: str = "./backups"):
|
||||
self.db = db_manager
|
||||
self.backup_dir = backup_dir
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
# Create backup directory
|
||||
os.makedirs(backup_dir, exist_ok=True)
|
||||
|
||||
def create_full_backup(self, description: str = None) -> Optional[BackupInfo]:
|
||||
"""Create a full database backup."""
|
||||
try:
|
||||
backup_id = f"full_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
backup_filename = f"{backup_id}.db"
|
||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
||||
|
||||
# Copy database file
|
||||
shutil.copy2(self.db.db_path, backup_path)
|
||||
|
||||
# Calculate checksum
|
||||
checksum = self._calculate_file_checksum(backup_path)
|
||||
|
||||
# Get file size
|
||||
size_bytes = os.path.getsize(backup_path)
|
||||
|
||||
# Get table list
|
||||
with self.db.get_connection() as conn:
|
||||
cursor = conn.execute("SELECT name FROM sqlite_master WHERE type='table'")
|
||||
tables = [row[0] for row in cursor.fetchall()]
|
||||
|
||||
backup_info = BackupInfo(
|
||||
backup_id=backup_id,
|
||||
backup_path=backup_path,
|
||||
backup_type='full',
|
||||
created_at=datetime.now(),
|
||||
size_bytes=size_bytes,
|
||||
description=description or f"Full backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
||||
tables_included=tables,
|
||||
checksum=checksum
|
||||
)
|
||||
|
||||
# Save backup metadata
|
||||
self._save_backup_metadata(backup_info)
|
||||
|
||||
self.logger.info(f"Full backup created: {backup_id}")
|
||||
return backup_info
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create full backup: {e}")
|
||||
return None
|
||||
|
||||
def create_metadata_backup(self, description: str = None) -> Optional[BackupInfo]:
|
||||
"""Create a metadata-only backup (excluding large binary data)."""
|
||||
try:
|
||||
backup_id = f"metadata_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
backup_filename = f"{backup_id}.json"
|
||||
backup_path = os.path.join(self.backup_dir, backup_filename)
|
||||
|
||||
# Export metadata as JSON
|
||||
metadata = self._export_metadata()
|
||||
|
||||
with open(backup_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(metadata, f, indent=2, default=str)
|
||||
|
||||
# Calculate checksum
|
||||
checksum = self._calculate_file_checksum(backup_path)
|
||||
|
||||
# Get file size
|
||||
size_bytes = os.path.getsize(backup_path)
|
||||
|
||||
backup_info = BackupInfo(
|
||||
backup_id=backup_id,
|
||||
backup_path=backup_path,
|
||||
backup_type='metadata_only',
|
||||
created_at=datetime.now(),
|
||||
size_bytes=size_bytes,
|
||||
description=description or f"Metadata backup created on {datetime.now().strftime('%Y-%m-%d %H:%M')}",
|
||||
tables_included=['anime_metadata', 'episode_metadata', 'user_preferences'],
|
||||
checksum=checksum
|
||||
)
|
||||
|
||||
# Save backup metadata
|
||||
self._save_backup_metadata(backup_info)
|
||||
|
||||
self.logger.info(f"Metadata backup created: {backup_id}")
|
||||
return backup_info
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to create metadata backup: {e}")
|
||||
return None
|
||||
|
||||
def restore_backup(self, backup_id: str) -> bool:
|
||||
"""Restore from a backup."""
|
||||
try:
|
||||
backup_info = self._load_backup_metadata(backup_id)
|
||||
if not backup_info:
|
||||
self.logger.error(f"Backup not found: {backup_id}")
|
||||
return False
|
||||
|
||||
if not os.path.exists(backup_info.backup_path):
|
||||
self.logger.error(f"Backup file not found: {backup_info.backup_path}")
|
||||
return False
|
||||
|
||||
# Verify backup integrity
|
||||
if not self._verify_backup_integrity(backup_info):
|
||||
self.logger.error(f"Backup integrity check failed: {backup_id}")
|
||||
return False
|
||||
|
||||
# Create a backup of current database before restore
|
||||
current_backup = self.create_full_backup(f"Pre-restore backup before restoring {backup_id}")
|
||||
|
||||
if backup_info.backup_type == 'full':
|
||||
# Replace database file
|
||||
shutil.copy2(backup_info.backup_path, self.db.db_path)
|
||||
|
||||
elif backup_info.backup_type == 'metadata_only':
|
||||
# Restore metadata from JSON
|
||||
with open(backup_info.backup_path, 'r', encoding='utf-8') as f:
|
||||
metadata = json.load(f)
|
||||
|
||||
self._import_metadata(metadata)
|
||||
|
||||
self.logger.info(f"Backup restored successfully: {backup_id}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to restore backup {backup_id}: {e}")
|
||||
return False
|
||||
|
||||
def list_backups(self) -> List[BackupInfo]:
|
||||
"""List all available backups."""
|
||||
backups = []
|
||||
|
||||
try:
|
||||
# Look for backup metadata files
|
||||
metadata_pattern = os.path.join(self.backup_dir, "*.backup_info.json")
|
||||
|
||||
for metadata_file in glob.glob(metadata_pattern):
|
||||
try:
|
||||
with open(metadata_file, 'r') as f:
|
||||
backup_data = json.load(f)
|
||||
|
||||
backup_info = BackupInfo(
|
||||
backup_id=backup_data['backup_id'],
|
||||
backup_path=backup_data['backup_path'],
|
||||
backup_type=backup_data['backup_type'],
|
||||
created_at=datetime.fromisoformat(backup_data['created_at']),
|
||||
size_bytes=backup_data['size_bytes'],
|
||||
description=backup_data.get('description'),
|
||||
tables_included=backup_data.get('tables_included', []),
|
||||
checksum=backup_data.get('checksum')
|
||||
)
|
||||
|
||||
backups.append(backup_info)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to load backup metadata from {metadata_file}: {e}")
|
||||
|
||||
# Sort by creation date (newest first)
|
||||
backups.sort(key=lambda b: b.created_at, reverse=True)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to list backups: {e}")
|
||||
|
||||
return backups
|
||||
|
||||
def cleanup_old_backups(self, keep_days: int = 30, keep_count: int = 10):
|
||||
"""Clean up old backup files."""
|
||||
try:
|
||||
backups = self.list_backups()
|
||||
cutoff_date = datetime.now() - timedelta(days=keep_days)
|
||||
|
||||
# Keep at least keep_count backups regardless of age
|
||||
backups_to_delete = []
|
||||
|
||||
for i, backup in enumerate(backups):
|
||||
if i >= keep_count and backup.created_at < cutoff_date:
|
||||
backups_to_delete.append(backup)
|
||||
|
||||
for backup in backups_to_delete:
|
||||
try:
|
||||
# Remove backup file
|
||||
if os.path.exists(backup.backup_path):
|
||||
os.remove(backup.backup_path)
|
||||
|
||||
# Remove metadata file
|
||||
metadata_file = f"{backup.backup_path}.backup_info.json"
|
||||
if os.path.exists(metadata_file):
|
||||
os.remove(metadata_file)
|
||||
|
||||
self.logger.info(f"Removed old backup: {backup.backup_id}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to remove backup {backup.backup_id}: {e}")
|
||||
|
||||
if backups_to_delete:
|
||||
self.logger.info(f"Cleaned up {len(backups_to_delete)} old backups")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to cleanup old backups: {e}")
|
||||
|
||||
def _export_metadata(self) -> Dict[str, Any]:
|
||||
"""Export database metadata to dictionary."""
|
||||
metadata = {
|
||||
'export_date': datetime.now().isoformat(),
|
||||
'schema_version': self.db.get_current_version(),
|
||||
'tables': {}
|
||||
}
|
||||
|
||||
# Export specific tables
|
||||
tables_to_export = ['anime_metadata', 'episode_metadata', 'user_preferences', 'storage_locations']
|
||||
|
||||
with self.db.get_connection() as conn:
|
||||
for table in tables_to_export:
|
||||
try:
|
||||
cursor = conn.execute(f"SELECT * FROM {table}")
|
||||
rows = cursor.fetchall()
|
||||
|
||||
# Convert rows to dictionaries
|
||||
metadata['tables'][table] = [dict(row) for row in rows]
|
||||
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Failed to export table {table}: {e}")
|
||||
|
||||
return metadata
|
||||
|
||||
def _import_metadata(self, metadata: Dict[str, Any]):
|
||||
"""Import metadata from dictionary to database."""
|
||||
with self.db.get_connection() as conn:
|
||||
for table_name, rows in metadata.get('tables', {}).items():
|
||||
if not rows:
|
||||
continue
|
||||
|
||||
try:
|
||||
# Clear existing data (be careful!)
|
||||
conn.execute(f"DELETE FROM {table_name}")
|
||||
|
||||
# Insert new data
|
||||
if rows:
|
||||
columns = list(rows[0].keys())
|
||||
placeholders = ','.join(['?' for _ in columns])
|
||||
insert_sql = f"INSERT INTO {table_name} ({','.join(columns)}) VALUES ({placeholders})"
|
||||
|
||||
for row in rows:
|
||||
values = [row[col] for col in columns]
|
||||
conn.execute(insert_sql, values)
|
||||
|
||||
conn.commit()
|
||||
self.logger.info(f"Imported {len(rows)} rows to {table_name}")
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to import table {table_name}: {e}")
|
||||
conn.rollback()
|
||||
raise
|
||||
|
||||
def _calculate_file_checksum(self, file_path: str) -> str:
|
||||
"""Calculate SHA256 checksum of file."""
|
||||
hash_sha256 = hashlib.sha256()
|
||||
with open(file_path, 'rb') as f:
|
||||
for chunk in iter(lambda: f.read(4096), b""):
|
||||
hash_sha256.update(chunk)
|
||||
return hash_sha256.hexdigest()
|
||||
|
||||
def _verify_backup_integrity(self, backup_info: BackupInfo) -> bool:
|
||||
"""Verify backup file integrity using checksum."""
|
||||
if not backup_info.checksum:
|
||||
return True # No checksum to verify
|
||||
|
||||
current_checksum = self._calculate_file_checksum(backup_info.backup_path)
|
||||
return current_checksum == backup_info.checksum
|
||||
|
||||
def _save_backup_metadata(self, backup_info: BackupInfo):
|
||||
"""Save backup metadata to file."""
|
||||
metadata_file = f"{backup_info.backup_path}.backup_info.json"
|
||||
|
||||
metadata = {
|
||||
'backup_id': backup_info.backup_id,
|
||||
'backup_path': backup_info.backup_path,
|
||||
'backup_type': backup_info.backup_type,
|
||||
'created_at': backup_info.created_at.isoformat(),
|
||||
'size_bytes': backup_info.size_bytes,
|
||||
'description': backup_info.description,
|
||||
'tables_included': backup_info.tables_included,
|
||||
'checksum': backup_info.checksum
|
||||
}
|
||||
|
||||
with open(metadata_file, 'w') as f:
|
||||
json.dump(metadata, f, indent=2)
|
||||
|
||||
def _load_backup_metadata(self, backup_id: str) -> Optional[BackupInfo]:
|
||||
"""Load backup metadata from file."""
|
||||
# Look for metadata file
|
||||
metadata_pattern = os.path.join(self.backup_dir, f"{backup_id}.*.backup_info.json")
|
||||
metadata_files = glob.glob(metadata_pattern)
|
||||
|
||||
if not metadata_files:
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(metadata_files[0], 'r') as f:
|
||||
backup_data = json.load(f)
|
||||
|
||||
return BackupInfo(
|
||||
backup_id=backup_data['backup_id'],
|
||||
backup_path=backup_data['backup_path'],
|
||||
backup_type=backup_data['backup_type'],
|
||||
created_at=datetime.fromisoformat(backup_data['created_at']),
|
||||
size_bytes=backup_data['size_bytes'],
|
||||
description=backup_data.get('description'),
|
||||
tables_included=backup_data.get('tables_included', []),
|
||||
checksum=backup_data.get('checksum')
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to load backup metadata for {backup_id}: {e}")
|
||||
return None
|
||||
|
||||
|
||||
class StorageManager:
|
||||
"""Manage storage locations and usage monitoring."""
|
||||
|
||||
def __init__(self, db_manager: DatabaseManager):
|
||||
self.db = db_manager
|
||||
self.logger = logging.getLogger(__name__)
|
||||
|
||||
def add_storage_location(self, path: str, location_type: str = 'primary', anime_id: str = None) -> str:
|
||||
"""Add a new storage location."""
|
||||
location_id = str(uuid.uuid4())
|
||||
|
||||
query = """
|
||||
INSERT INTO storage_locations
|
||||
(location_id, anime_id, path, location_type, is_active)
|
||||
VALUES (?, ?, ?, ?, ?)
|
||||
"""
|
||||
|
||||
self.db.execute_update(query, (location_id, anime_id, path, location_type, True))
|
||||
|
||||
# Update storage stats
|
||||
self.update_storage_stats(location_id)
|
||||
|
||||
return location_id
|
||||
|
||||
def update_storage_stats(self, location_id: str):
|
||||
"""Update storage statistics for a location."""
|
||||
try:
|
||||
# Get location path
|
||||
query = "SELECT path FROM storage_locations WHERE location_id = ?"
|
||||
results = self.db.execute_query(query, (location_id,))
|
||||
|
||||
if not results:
|
||||
return
|
||||
|
||||
path = results[0]['path']
|
||||
|
||||
if os.path.exists(path):
|
||||
# Get disk usage
|
||||
stat = shutil.disk_usage(path)
|
||||
|
||||
# Update database
|
||||
update_query = """
|
||||
UPDATE storage_locations
|
||||
SET free_space_bytes = ?, total_space_bytes = ?, last_checked = CURRENT_TIMESTAMP
|
||||
WHERE location_id = ?
|
||||
"""
|
||||
|
||||
self.db.execute_update(update_query, (stat.free, stat.total, location_id))
|
||||
|
||||
except Exception as e:
|
||||
self.logger.error(f"Failed to update storage stats for {location_id}: {e}")
|
||||
|
||||
def get_storage_summary(self) -> Dict[str, Any]:
|
||||
"""Get storage usage summary."""
|
||||
query = """
|
||||
SELECT
|
||||
location_type,
|
||||
COUNT(*) as location_count,
|
||||
SUM(free_space_bytes) as total_free,
|
||||
SUM(total_space_bytes) as total_space
|
||||
FROM storage_locations
|
||||
WHERE is_active = 1
|
||||
GROUP BY location_type
|
||||
"""
|
||||
|
||||
results = self.db.execute_query(query)
|
||||
|
||||
summary = {}
|
||||
for row in results:
|
||||
summary[row['location_type']] = {
|
||||
'location_count': row['location_count'],
|
||||
'total_free_gb': (row['total_free'] or 0) / (1024**3),
|
||||
'total_space_gb': (row['total_space'] or 0) / (1024**3),
|
||||
'usage_percent': ((row['total_space'] - row['total_free']) / row['total_space'] * 100) if row['total_space'] else 0
|
||||
}
|
||||
|
||||
return summary
|
||||
|
||||
|
||||
# Global instances
|
||||
database_manager = DatabaseManager()
|
||||
anime_repository = AnimeRepository(database_manager)
|
||||
backup_manager = BackupManager(database_manager)
|
||||
storage_manager = StorageManager(database_manager)
|
||||
|
||||
|
||||
def init_database_system():
|
||||
"""Initialize database system."""
|
||||
# Database is initialized on creation
|
||||
pass
|
||||
|
||||
|
||||
def cleanup_database_system():
|
||||
"""Clean up database resources."""
|
||||
# No specific cleanup needed for SQLite
|
||||
pass
|
||||
|
||||
|
||||
# Export main components
|
||||
__all__ = [
|
||||
'DatabaseManager',
|
||||
'AnimeRepository',
|
||||
'BackupManager',
|
||||
'StorageManager',
|
||||
'AnimeMetadata',
|
||||
'EpisodeMetadata',
|
||||
'BackupInfo',
|
||||
'database_manager',
|
||||
'anime_repository',
|
||||
'backup_manager',
|
||||
'storage_manager',
|
||||
'init_database_system',
|
||||
'cleanup_database_system'
|
||||
]
|
||||
@ -1,40 +1,40 @@
|
||||
import logging
|
||||
|
||||
console_handler = None
|
||||
error_logger = None
|
||||
noKeyFound_logger = None
|
||||
noGerFound_logger = None
|
||||
def setupLogger():
|
||||
global console_handler, error_logger, noKeyFound_logger, noGerFound_logger
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
|
||||
if (console_handler is None):
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
|
||||
)
|
||||
logging.getLogger().addHandler(console_handler)
|
||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO)
|
||||
logging.getLogger('charset_normalizer').setLevel(logging.INFO)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
if (error_logger is None):
|
||||
error_logger = logging.getLogger("ErrorLog")
|
||||
error_handler = logging.FileHandler("../errors.log")
|
||||
error_handler.setLevel(logging.ERROR)
|
||||
error_logger.addHandler(error_handler)
|
||||
|
||||
if (noKeyFound_logger is None):
|
||||
noKeyFound_logger = logging.getLogger("NoKeyFound")
|
||||
noKeyFound_handler = logging.FileHandler("../NoKeyFound.log")
|
||||
noKeyFound_handler.setLevel(logging.ERROR)
|
||||
noKeyFound_logger.addHandler(noKeyFound_handler)
|
||||
|
||||
if (noGerFound_logger is None):
|
||||
noGerFound_logger = logging.getLogger("noGerFound")
|
||||
noGerFound_handler = logging.FileHandler("../noGerFound.log")
|
||||
noGerFound_handler.setLevel(logging.ERROR)
|
||||
noGerFound_logger.addHandler(noGerFound_handler)
|
||||
|
||||
import logging
|
||||
|
||||
console_handler = None
|
||||
error_logger = None
|
||||
noKeyFound_logger = None
|
||||
noGerFound_logger = None
|
||||
def setupLogger():
|
||||
global console_handler, error_logger, noKeyFound_logger, noGerFound_logger
|
||||
# Configure logging
|
||||
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
|
||||
if (console_handler is None):
|
||||
console_handler = logging.StreamHandler()
|
||||
console_handler.setLevel(logging.INFO)
|
||||
console_handler.setFormatter(logging.Formatter(
|
||||
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
|
||||
)
|
||||
logging.getLogger().addHandler(console_handler)
|
||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO)
|
||||
logging.getLogger('charset_normalizer').setLevel(logging.INFO)
|
||||
logging.getLogger().setLevel(logging.INFO)
|
||||
|
||||
if (error_logger is None):
|
||||
error_logger = logging.getLogger("ErrorLog")
|
||||
error_handler = logging.FileHandler("../errors.log")
|
||||
error_handler.setLevel(logging.ERROR)
|
||||
error_logger.addHandler(error_handler)
|
||||
|
||||
if (noKeyFound_logger is None):
|
||||
noKeyFound_logger = logging.getLogger("NoKeyFound")
|
||||
noKeyFound_handler = logging.FileHandler("../NoKeyFound.log")
|
||||
noKeyFound_handler.setLevel(logging.ERROR)
|
||||
noKeyFound_logger.addHandler(noKeyFound_handler)
|
||||
|
||||
if (noGerFound_logger is None):
|
||||
noGerFound_logger = logging.getLogger("noGerFound")
|
||||
noGerFound_handler = logging.FileHandler("../noGerFound.log")
|
||||
noGerFound_handler.setLevel(logging.ERROR)
|
||||
noGerFound_logger.addHandler(noGerFound_handler)
|
||||
|
||||
setupLogger()
|
||||
@ -1,6 +0,0 @@
|
||||
"""
|
||||
Repository package for data access layer.
|
||||
|
||||
This package contains repository implementations following the Repository pattern
|
||||
for clean separation of data access logic from business logic.
|
||||
"""
|
||||
@ -1,24 +0,0 @@
|
||||
# AniWorld FastAPI Server Configuration
|
||||
|
||||
# Authentication Configuration
|
||||
JWT_SECRET_KEY=your-super-secure-jwt-secret-key-change-this-in-production
|
||||
PASSWORD_SALT=c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1
|
||||
MASTER_PASSWORD=admin123
|
||||
# MASTER_PASSWORD_HASH=bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5
|
||||
SESSION_TIMEOUT_HOURS=24
|
||||
|
||||
# Application Configuration
|
||||
ANIME_DIRECTORY=\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien
|
||||
LOG_LEVEL=INFO
|
||||
|
||||
# Database Configuration (if needed)
|
||||
DATABASE_URL=sqlite:///./aniworld.db
|
||||
|
||||
# Security Configuration
|
||||
CORS_ORIGINS=*
|
||||
API_RATE_LIMIT=100
|
||||
|
||||
# Provider Configuration
|
||||
DEFAULT_PROVIDER=aniworld.to
|
||||
PROVIDER_TIMEOUT=30
|
||||
RETRY_ATTEMPTS=3
|
||||
@ -1,257 +0,0 @@
|
||||
# AniWorld FastAPI Server
|
||||
|
||||
A comprehensive FastAPI-based server implementation for AniWorld following the project instructions.
|
||||
|
||||
## 🚀 Features
|
||||
|
||||
### ✅ Authentication System (Completed)
|
||||
- **Simple Master Password Authentication**: Single master password for the entire application
|
||||
- **JWT Token Management**: Stateless authentication using JWT tokens
|
||||
- **Environment Configuration**: Secure password hash stored in environment variables
|
||||
- **Session Management**: Configurable token expiry (default: 24 hours)
|
||||
- **Security Features**: SHA-256 password hashing with salt
|
||||
|
||||
### ✅ API Endpoints (Implemented)
|
||||
|
||||
#### Authentication Endpoints
|
||||
- `POST /auth/login` - Login with master password and receive JWT token
|
||||
- `GET /auth/verify` - Verify JWT token validity (protected)
|
||||
- `POST /auth/logout` - Logout endpoint (stateless - client removes token)
|
||||
|
||||
#### System Endpoints
|
||||
- `GET /` - Root endpoint with API information
|
||||
- `GET /health` - Health check endpoint
|
||||
- `GET /api/system/config` - System configuration (protected)
|
||||
- `GET /api/system/database/health` - Database health check (protected)
|
||||
|
||||
#### Anime & Episode Endpoints (Protected)
|
||||
- `GET /api/anime/search` - Search anime by title with pagination
|
||||
- `GET /api/anime/{anime_id}` - Get specific anime details
|
||||
- `GET /api/anime/{anime_id}/episodes` - Get all episodes for anime
|
||||
- `GET /api/episodes/{episode_id}` - Get specific episode details
|
||||
|
||||
### 🔧 Technical Features
|
||||
- **FastAPI Framework**: Modern, fast (high-performance) web framework
|
||||
- **OpenAPI Documentation**: Automatic API documentation at `/docs`
|
||||
- **CORS Support**: Configurable cross-origin resource sharing
|
||||
- **Request Validation**: Pydantic models for request/response validation
|
||||
- **Error Handling**: Centralized error handling with proper HTTP status codes
|
||||
- **Logging**: Comprehensive logging system with file and console output
|
||||
- **Environment Configuration**: Secure configuration via environment variables
|
||||
|
||||
## 🛠️ Installation & Setup
|
||||
|
||||
### Prerequisites
|
||||
- Python 3.11+ (AniWorld conda environment)
|
||||
- Conda package manager
|
||||
|
||||
### 1. Activate AniWorld Environment
|
||||
```bash
|
||||
conda activate AniWorld
|
||||
```
|
||||
|
||||
### 2. Install Dependencies
|
||||
```bash
|
||||
cd src/server
|
||||
pip install -r requirements_fastapi.txt
|
||||
```
|
||||
|
||||
### 3. Configure Environment
|
||||
Create or update `.env` file:
|
||||
```env
|
||||
# Authentication
|
||||
JWT_SECRET_KEY=your-super-secure-jwt-secret-key
|
||||
PASSWORD_SALT=your-secure-salt
|
||||
MASTER_PASSWORD=admin123
|
||||
SESSION_TIMEOUT_HOURS=24
|
||||
|
||||
# Application
|
||||
ANIME_DIRECTORY=your-anime-directory-path
|
||||
LOG_LEVEL=INFO
|
||||
|
||||
# Optional
|
||||
DATABASE_URL=sqlite:///./aniworld.db
|
||||
CORS_ORIGINS=*
|
||||
```
|
||||
|
||||
### 4. Start the Server
|
||||
|
||||
#### Option 1: Direct Python Execution
|
||||
```bash
|
||||
cd src/server
|
||||
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py
|
||||
```
|
||||
|
||||
#### Option 2: Using Batch Script (Windows)
|
||||
```cmd
|
||||
cd src/server
|
||||
run_and_test.bat
|
||||
```
|
||||
|
||||
#### Option 3: Using Shell Script (Linux/Mac)
|
||||
```bash
|
||||
cd src/server
|
||||
chmod +x start_fastapi_server.sh
|
||||
./start_fastapi_server.sh
|
||||
```
|
||||
|
||||
## 📖 API Usage
|
||||
|
||||
### 1. Access Documentation
|
||||
Visit: http://localhost:8000/docs
|
||||
|
||||
### 2. Authentication Flow
|
||||
|
||||
#### Step 1: Login
|
||||
```bash
|
||||
curl -X POST "http://localhost:8000/auth/login" \
|
||||
-H "Content-Type: application/json" \
|
||||
-d '{"password": "admin123"}'
|
||||
```
|
||||
|
||||
Response:
|
||||
```json
|
||||
{
|
||||
"success": true,
|
||||
"message": "Authentication successful",
|
||||
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
|
||||
"expires_at": "2025-10-06T18:19:24.710065"
|
||||
}
|
||||
```
|
||||
|
||||
#### Step 2: Use Token for Protected Endpoints
|
||||
```bash
|
||||
curl -X GET "http://localhost:8000/api/anime/search?query=naruto&limit=5" \
|
||||
-H "Authorization: Bearer YOUR_JWT_TOKEN"
|
||||
```
|
||||
|
||||
### 3. Example API Calls
|
||||
|
||||
#### Health Check
|
||||
```bash
|
||||
curl "http://localhost:8000/health"
|
||||
```
|
||||
|
||||
#### Search Anime
|
||||
```bash
|
||||
curl -H "Authorization: Bearer YOUR_TOKEN" \
|
||||
"http://localhost:8000/api/anime/search?query=naruto&limit=10"
|
||||
```
|
||||
|
||||
#### Get Anime Details
|
||||
```bash
|
||||
curl -H "Authorization: Bearer YOUR_TOKEN" \
|
||||
"http://localhost:8000/api/anime/anime_123"
|
||||
```
|
||||
|
||||
## 🧪 Testing
|
||||
|
||||
### Automated Testing
|
||||
```bash
|
||||
cd src/server
|
||||
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
|
||||
```
|
||||
|
||||
### Manual Testing
|
||||
1. Start the server
|
||||
2. Visit http://localhost:8000/docs
|
||||
3. Use the interactive API documentation
|
||||
4. Test authentication with password: `admin123`
|
||||
|
||||
## 📁 Project Structure
|
||||
|
||||
```
|
||||
src/server/
|
||||
├── fastapi_app.py # Main FastAPI application
|
||||
├── .env # Environment configuration
|
||||
├── requirements_fastapi.txt # Python dependencies
|
||||
├── test_fastapi.py # Test script
|
||||
├── start_fastapi_server.bat # Windows startup script
|
||||
├── start_fastapi_server.sh # Linux/Mac startup script
|
||||
├── run_and_test.bat # Windows test runner
|
||||
└── logs/ # Log files
|
||||
```
|
||||
|
||||
## 🔐 Security
|
||||
|
||||
### Authentication
|
||||
- Master password authentication (no user registration required)
|
||||
- JWT tokens with configurable expiry
|
||||
- Secure password hashing (SHA-256 + salt)
|
||||
- Environment-based secret management
|
||||
|
||||
### API Security
|
||||
- All anime/episode endpoints require authentication
|
||||
- CORS protection
|
||||
- Input validation using Pydantic
|
||||
- Error handling without sensitive data exposure
|
||||
|
||||
## 🔧 Configuration
|
||||
|
||||
### Environment Variables
|
||||
- `JWT_SECRET_KEY`: Secret key for JWT token signing
|
||||
- `PASSWORD_SALT`: Salt for password hashing
|
||||
- `MASTER_PASSWORD`: Master password (development only)
|
||||
- `MASTER_PASSWORD_HASH`: Hashed master password (production)
|
||||
- `SESSION_TIMEOUT_HOURS`: JWT token expiry time
|
||||
- `ANIME_DIRECTORY`: Path to anime files
|
||||
- `LOG_LEVEL`: Logging level (DEBUG, INFO, WARNING, ERROR)
|
||||
|
||||
### Production Configuration
|
||||
1. Set `MASTER_PASSWORD_HASH` instead of `MASTER_PASSWORD`
|
||||
2. Use a strong `JWT_SECRET_KEY`
|
||||
3. Set appropriate `CORS_ORIGINS`
|
||||
4. Configure proper logging levels
|
||||
|
||||
## 📊 API Status
|
||||
|
||||
| Endpoint Category | Status | Coverage |
|
||||
|------------------|--------|----------|
|
||||
| Authentication | ✅ Complete | 100% |
|
||||
| Health/System | ✅ Complete | 100% |
|
||||
| Anime Search | ✅ Implemented | Mock data |
|
||||
| Episode Management | ✅ Implemented | Mock data |
|
||||
| Database Integration | 🔄 Placeholder | Todo |
|
||||
| Real Data Provider | 🔄 Placeholder | Todo |
|
||||
|
||||
## 🚧 Future Enhancements
|
||||
|
||||
### High Priority
|
||||
- [ ] Connect to actual anime database/provider
|
||||
- [ ] Implement real anime search functionality
|
||||
- [ ] Add episode streaming capabilities
|
||||
- [ ] Database connection pooling
|
||||
|
||||
### Medium Priority
|
||||
- [ ] Redis caching layer
|
||||
- [ ] Rate limiting middleware
|
||||
- [ ] Background task processing
|
||||
- [ ] WebSocket support
|
||||
|
||||
### Low Priority
|
||||
- [ ] Advanced search filters
|
||||
- [ ] User preferences (multi-user support)
|
||||
- [ ] Download progress tracking
|
||||
- [ ] Statistics and analytics
|
||||
|
||||
## 📝 License
|
||||
|
||||
This project follows the AniWorld project licensing terms.
|
||||
|
||||
## 🤝 Contributing
|
||||
|
||||
1. Follow the coding standards in `.github/copilot-instructions.md`
|
||||
2. Use type hints and Pydantic models
|
||||
3. Add comprehensive logging
|
||||
4. Include tests for new features
|
||||
5. Update documentation
|
||||
|
||||
## 📞 Support
|
||||
|
||||
- API Documentation: http://localhost:8000/docs
|
||||
- Health Check: http://localhost:8000/health
|
||||
- Logs: Check `logs/aniworld.log` for detailed information
|
||||
|
||||
---
|
||||
|
||||
**Note**: This FastAPI implementation provides a solid foundation following the project instructions. The authentication system is complete and production-ready, while anime/episode endpoints currently return mock data pending integration with the actual data providers.
|
||||
@ -1,573 +0,0 @@
|
||||
import os
|
||||
import json
|
||||
import hashlib
|
||||
import secrets
|
||||
from typing import Dict, Any, Optional
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
|
||||
class Config:
|
||||
"""Configuration management for AniWorld Flask app."""
|
||||
|
||||
def __init__(self, config_file: str = "data/config.json"):
|
||||
self.config_file = config_file
|
||||
self.default_config = {
|
||||
"security": {
|
||||
"master_password_hash": None,
|
||||
"salt": None,
|
||||
"session_timeout_hours": 24,
|
||||
"max_failed_attempts": 5,
|
||||
"lockout_duration_minutes": 30
|
||||
},
|
||||
"anime": {
|
||||
"directory": os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien"),
|
||||
"download_threads": 3,
|
||||
"download_speed_limit": None,
|
||||
"auto_rescan_time": "03:00",
|
||||
"auto_download_after_rescan": False
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"enable_console_logging": True,
|
||||
"enable_console_progress": False,
|
||||
"enable_fail2ban_logging": True,
|
||||
"log_file": "./logs/aniworld.log",
|
||||
"max_log_size_mb": 10,
|
||||
"log_backup_count": 5
|
||||
},
|
||||
"providers": {
|
||||
"default_provider": "aniworld.to",
|
||||
"preferred_language": "German Dub",
|
||||
"fallback_providers": ["aniworld.to"],
|
||||
"provider_timeout": 30,
|
||||
"retry_attempts": 3,
|
||||
"provider_settings": {
|
||||
"aniworld.to": {
|
||||
"enabled": True,
|
||||
"priority": 1,
|
||||
"quality_preference": "720p"
|
||||
}
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"max_concurrent_downloads": 3,
|
||||
"download_buffer_size": 8192,
|
||||
"connection_timeout": 30,
|
||||
"read_timeout": 300,
|
||||
"enable_debug_mode": False,
|
||||
"cache_duration_minutes": 60
|
||||
}
|
||||
}
|
||||
self._config = self._load_config()
|
||||
|
||||
def _load_config(self) -> Dict[str, Any]:
|
||||
"""Load configuration from file or create default."""
|
||||
try:
|
||||
if os.path.exists(self.config_file):
|
||||
with open(self.config_file, 'r', encoding='utf-8') as f:
|
||||
config = json.load(f)
|
||||
# Merge with defaults to ensure all keys exist
|
||||
return self._merge_configs(self.default_config, config)
|
||||
else:
|
||||
return self.default_config.copy()
|
||||
except Exception as e:
|
||||
print(f"Error loading config: {e}")
|
||||
return self.default_config.copy()
|
||||
|
||||
def _merge_configs(self, default: Dict[str, Any], user: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Recursively merge user config with defaults."""
|
||||
result = default.copy()
|
||||
for key, value in user.items():
|
||||
if key in result and isinstance(result[key], dict) and isinstance(value, dict):
|
||||
result[key] = self._merge_configs(result[key], value)
|
||||
else:
|
||||
result[key] = value
|
||||
return result
|
||||
|
||||
def save_config(self) -> bool:
|
||||
"""Save current configuration to file."""
|
||||
try:
|
||||
with open(self.config_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(self._config, f, indent=4)
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error saving config: {e}")
|
||||
return False
|
||||
|
||||
def get(self, key_path: str, default: Any = None) -> Any:
|
||||
"""Get config value using dot notation (e.g., 'security.master_password_hash')."""
|
||||
keys = key_path.split('.')
|
||||
value = self._config
|
||||
|
||||
for key in keys:
|
||||
if isinstance(value, dict) and key in value:
|
||||
value = value[key]
|
||||
else:
|
||||
return default
|
||||
|
||||
return value
|
||||
|
||||
def set(self, key_path: str, value: Any) -> bool:
|
||||
"""Set config value using dot notation."""
|
||||
keys = key_path.split('.')
|
||||
config = self._config
|
||||
|
||||
# Navigate to parent
|
||||
for key in keys[:-1]:
|
||||
if key not in config:
|
||||
config[key] = {}
|
||||
config = config[key]
|
||||
|
||||
# Set final value
|
||||
config[keys[-1]] = value
|
||||
return self.save_config()
|
||||
|
||||
def set_master_password(self, password: str) -> bool:
|
||||
"""Set master password with secure hashing."""
|
||||
try:
|
||||
# Generate salt
|
||||
salt = secrets.token_hex(32)
|
||||
|
||||
# Hash password with salt
|
||||
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
|
||||
|
||||
# Save to config
|
||||
self.set("security.salt", salt)
|
||||
self.set("security.master_password_hash", password_hash)
|
||||
|
||||
return True
|
||||
except Exception as e:
|
||||
print(f"Error setting master password: {e}")
|
||||
return False
|
||||
|
||||
def verify_password(self, password: str) -> bool:
|
||||
"""Verify password against stored hash."""
|
||||
try:
|
||||
stored_hash = self.get("security.master_password_hash")
|
||||
salt = self.get("security.salt")
|
||||
|
||||
if not stored_hash or not salt:
|
||||
return False
|
||||
|
||||
# Hash provided password with stored salt
|
||||
password_hash = hashlib.sha256((password + salt).encode()).hexdigest()
|
||||
|
||||
return password_hash == stored_hash
|
||||
except Exception as e:
|
||||
print(f"Error verifying password: {e}")
|
||||
return False
|
||||
|
||||
def has_master_password(self) -> bool:
|
||||
"""Check if master password is configured."""
|
||||
return bool(self.get("security.master_password_hash"))
|
||||
|
||||
def backup_config(self, backup_path: Optional[str] = None) -> str:
|
||||
"""Create backup of current configuration."""
|
||||
if not backup_path:
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
backup_path = f"config_backup_{timestamp}.json"
|
||||
|
||||
try:
|
||||
with open(backup_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(self._config, f, indent=4)
|
||||
return backup_path
|
||||
except Exception as e:
|
||||
raise Exception(f"Failed to create backup: {e}")
|
||||
|
||||
def restore_config(self, backup_path: str) -> bool:
|
||||
"""Restore configuration from backup."""
|
||||
try:
|
||||
with open(backup_path, 'r', encoding='utf-8') as f:
|
||||
config = json.load(f)
|
||||
|
||||
# Validate config before restoring
|
||||
validation_result = self.validate_config(config)
|
||||
if not validation_result['valid']:
|
||||
raise Exception(f"Invalid configuration: {validation_result['errors']}")
|
||||
|
||||
self._config = self._merge_configs(self.default_config, config)
|
||||
return self.save_config()
|
||||
except Exception as e:
|
||||
print(f"Error restoring config: {e}")
|
||||
return False
|
||||
|
||||
def validate_config(self, config: Dict[str, Any] = None) -> Dict[str, Any]:
|
||||
"""Validate configuration structure and values."""
|
||||
if config is None:
|
||||
config = self._config
|
||||
|
||||
errors = []
|
||||
warnings = []
|
||||
|
||||
# Validate security settings
|
||||
security = config.get('security', {})
|
||||
if security.get('session_timeout_hours', 0) < 1 or security.get('session_timeout_hours', 0) > 168:
|
||||
errors.append("Session timeout must be between 1 and 168 hours")
|
||||
|
||||
if security.get('max_failed_attempts', 0) < 1 or security.get('max_failed_attempts', 0) > 50:
|
||||
errors.append("Max failed attempts must be between 1 and 50")
|
||||
|
||||
if security.get('lockout_duration_minutes', 0) < 1 or security.get('lockout_duration_minutes', 0) > 1440:
|
||||
errors.append("Lockout duration must be between 1 and 1440 minutes")
|
||||
|
||||
# Validate anime settings
|
||||
anime = config.get('anime', {})
|
||||
directory = anime.get('directory', '')
|
||||
if directory and not os.path.exists(directory) and not directory.startswith('\\\\'):
|
||||
warnings.append(f"Anime directory does not exist: {directory}")
|
||||
|
||||
download_threads = anime.get('download_threads', 1)
|
||||
if download_threads < 1 or download_threads > 10:
|
||||
errors.append("Download threads must be between 1 and 10")
|
||||
|
||||
# Validate logging settings
|
||||
logging_config = config.get('logging', {})
|
||||
log_level = logging_config.get('level', 'INFO')
|
||||
if log_level not in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
||||
errors.append(f"Invalid log level: {log_level}")
|
||||
|
||||
# Validate provider settings
|
||||
providers = config.get('providers', {})
|
||||
provider_timeout = providers.get('provider_timeout', 30)
|
||||
if provider_timeout < 5 or provider_timeout > 300:
|
||||
errors.append("Provider timeout must be between 5 and 300 seconds")
|
||||
|
||||
retry_attempts = providers.get('retry_attempts', 3)
|
||||
if retry_attempts < 0 or retry_attempts > 10:
|
||||
errors.append("Retry attempts must be between 0 and 10")
|
||||
|
||||
# Validate advanced settings
|
||||
advanced = config.get('advanced', {})
|
||||
max_concurrent = advanced.get('max_concurrent_downloads', 3)
|
||||
if max_concurrent < 1 or max_concurrent > 20:
|
||||
errors.append("Max concurrent downloads must be between 1 and 20")
|
||||
|
||||
connection_timeout = advanced.get('connection_timeout', 30)
|
||||
if connection_timeout < 5 or connection_timeout > 300:
|
||||
errors.append("Connection timeout must be between 5 and 300 seconds")
|
||||
|
||||
return {
|
||||
'valid': len(errors) == 0,
|
||||
'errors': errors,
|
||||
'warnings': warnings
|
||||
}
|
||||
|
||||
def get_config_schema(self) -> Dict[str, Any]:
|
||||
"""Get configuration schema for UI generation."""
|
||||
return {
|
||||
"security": {
|
||||
"title": "Security Settings",
|
||||
"fields": {
|
||||
"session_timeout_hours": {
|
||||
"type": "number",
|
||||
"title": "Session Timeout (hours)",
|
||||
"description": "How long sessions remain active",
|
||||
"min": 1,
|
||||
"max": 168,
|
||||
"default": 24
|
||||
},
|
||||
"max_failed_attempts": {
|
||||
"type": "number",
|
||||
"title": "Max Failed Login Attempts",
|
||||
"description": "Number of failed attempts before lockout",
|
||||
"min": 1,
|
||||
"max": 50,
|
||||
"default": 5
|
||||
},
|
||||
"lockout_duration_minutes": {
|
||||
"type": "number",
|
||||
"title": "Lockout Duration (minutes)",
|
||||
"description": "How long to lock account after failed attempts",
|
||||
"min": 1,
|
||||
"max": 1440,
|
||||
"default": 30
|
||||
}
|
||||
}
|
||||
},
|
||||
"anime": {
|
||||
"title": "Anime Settings",
|
||||
"fields": {
|
||||
"directory": {
|
||||
"type": "text",
|
||||
"title": "Anime Directory",
|
||||
"description": "Base directory for anime storage",
|
||||
"required": True
|
||||
},
|
||||
"download_threads": {
|
||||
"type": "number",
|
||||
"title": "Download Threads",
|
||||
"description": "Number of concurrent download threads",
|
||||
"min": 1,
|
||||
"max": 10,
|
||||
"default": 3
|
||||
},
|
||||
"download_speed_limit": {
|
||||
"type": "number",
|
||||
"title": "Speed Limit (KB/s)",
|
||||
"description": "Download speed limit (0 = unlimited)",
|
||||
"min": 0,
|
||||
"max": 102400,
|
||||
"default": 0
|
||||
}
|
||||
}
|
||||
},
|
||||
"providers": {
|
||||
"title": "Provider Settings",
|
||||
"fields": {
|
||||
"default_provider": {
|
||||
"type": "select",
|
||||
"title": "Default Provider",
|
||||
"description": "Primary anime provider",
|
||||
"options": ["aniworld.to"],
|
||||
"default": "aniworld.to"
|
||||
},
|
||||
"preferred_language": {
|
||||
"type": "select",
|
||||
"title": "Preferred Language",
|
||||
"description": "Default language preference",
|
||||
"options": ["German Dub", "German Sub", "English Dub", "English Sub", "Japanese"],
|
||||
"default": "German Dub"
|
||||
},
|
||||
"provider_timeout": {
|
||||
"type": "number",
|
||||
"title": "Provider Timeout (seconds)",
|
||||
"description": "Timeout for provider requests",
|
||||
"min": 5,
|
||||
"max": 300,
|
||||
"default": 30
|
||||
},
|
||||
"retry_attempts": {
|
||||
"type": "number",
|
||||
"title": "Retry Attempts",
|
||||
"description": "Number of retry attempts for failed requests",
|
||||
"min": 0,
|
||||
"max": 10,
|
||||
"default": 3
|
||||
}
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"title": "Advanced Settings",
|
||||
"fields": {
|
||||
"max_concurrent_downloads": {
|
||||
"type": "number",
|
||||
"title": "Max Concurrent Downloads",
|
||||
"description": "Maximum simultaneous downloads",
|
||||
"min": 1,
|
||||
"max": 20,
|
||||
"default": 3
|
||||
},
|
||||
"connection_timeout": {
|
||||
"type": "number",
|
||||
"title": "Connection Timeout (seconds)",
|
||||
"description": "Network connection timeout",
|
||||
"min": 5,
|
||||
"max": 300,
|
||||
"default": 30
|
||||
},
|
||||
"enable_debug_mode": {
|
||||
"type": "boolean",
|
||||
"title": "Debug Mode",
|
||||
"description": "Enable detailed debug logging",
|
||||
"default": False
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
def export_config(self, include_sensitive: bool = False) -> Dict[str, Any]:
|
||||
"""Export configuration, optionally excluding sensitive data."""
|
||||
config_copy = json.loads(json.dumps(self._config)) # Deep copy
|
||||
|
||||
if not include_sensitive:
|
||||
# Remove sensitive data
|
||||
if 'security' in config_copy:
|
||||
config_copy['security'].pop('master_password_hash', None)
|
||||
config_copy['security'].pop('salt', None)
|
||||
|
||||
return config_copy
|
||||
|
||||
def import_config(self, config_data: Dict[str, Any], validate: bool = True) -> Dict[str, Any]:
|
||||
"""Import configuration with validation."""
|
||||
if validate:
|
||||
validation_result = self.validate_config(config_data)
|
||||
if not validation_result['valid']:
|
||||
return {
|
||||
'success': False,
|
||||
'errors': validation_result['errors'],
|
||||
'warnings': validation_result['warnings']
|
||||
}
|
||||
|
||||
# Merge with existing config (don't overwrite security settings)
|
||||
current_security = self._config.get('security', {})
|
||||
merged_config = self._merge_configs(self.default_config, config_data)
|
||||
|
||||
# Preserve current security settings if not provided
|
||||
if not config_data.get('security', {}).get('master_password_hash'):
|
||||
merged_config['security'] = current_security
|
||||
|
||||
self._config = merged_config
|
||||
success = self.save_config()
|
||||
|
||||
return {
|
||||
'success': success,
|
||||
'errors': [] if success else ['Failed to save configuration'],
|
||||
'warnings': validation_result.get('warnings', []) if validate else []
|
||||
}
|
||||
|
||||
@property
|
||||
def anime_directory(self) -> str:
|
||||
"""Get anime directory path."""
|
||||
# Always check environment variable first
|
||||
env_dir = os.getenv("ANIME_DIRECTORY")
|
||||
if env_dir:
|
||||
# Remove quotes if they exist
|
||||
env_dir = env_dir.strip('"\'')
|
||||
return env_dir
|
||||
return self.get("anime.directory", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
||||
|
||||
@anime_directory.setter
|
||||
def anime_directory(self, value: str):
|
||||
"""Set anime directory path."""
|
||||
self.set("anime.directory", value)
|
||||
|
||||
@property
|
||||
def session_timeout_hours(self) -> int:
|
||||
"""Get session timeout in hours."""
|
||||
return self.get("security.session_timeout_hours", 24)
|
||||
|
||||
@property
|
||||
def max_failed_attempts(self) -> int:
|
||||
"""Get maximum failed login attempts."""
|
||||
return self.get("security.max_failed_attempts", 5)
|
||||
|
||||
@property
|
||||
def lockout_duration_minutes(self) -> int:
|
||||
"""Get lockout duration in minutes."""
|
||||
return self.get("security.lockout_duration_minutes", 30)
|
||||
|
||||
@property
|
||||
def scheduled_rescan_enabled(self) -> bool:
|
||||
"""Get whether scheduled rescan is enabled."""
|
||||
return self.get("scheduler.rescan_enabled", False)
|
||||
|
||||
@scheduled_rescan_enabled.setter
|
||||
def scheduled_rescan_enabled(self, value: bool):
|
||||
"""Set whether scheduled rescan is enabled."""
|
||||
self.set("scheduler.rescan_enabled", value)
|
||||
|
||||
@property
|
||||
def scheduled_rescan_time(self) -> str:
|
||||
"""Get scheduled rescan time in HH:MM format."""
|
||||
return self.get("scheduler.rescan_time", "03:00")
|
||||
|
||||
@scheduled_rescan_time.setter
|
||||
def scheduled_rescan_time(self, value: str):
|
||||
"""Set scheduled rescan time in HH:MM format."""
|
||||
self.set("scheduler.rescan_time", value)
|
||||
|
||||
@property
|
||||
def auto_download_after_rescan(self) -> bool:
|
||||
"""Get whether to auto-download after scheduled rescan."""
|
||||
return self.get("scheduler.auto_download_after_rescan", False)
|
||||
|
||||
@auto_download_after_rescan.setter
|
||||
def auto_download_after_rescan(self, value: bool):
|
||||
"""Set whether to auto-download after scheduled rescan."""
|
||||
self.set("scheduler.auto_download_after_rescan", value)
|
||||
|
||||
@property
|
||||
def log_level(self) -> str:
|
||||
"""Get current log level."""
|
||||
return self.get("logging.level", "INFO")
|
||||
|
||||
@log_level.setter
|
||||
def log_level(self, value: str):
|
||||
"""Set log level."""
|
||||
self.set("logging.level", value.upper())
|
||||
|
||||
@property
|
||||
def enable_console_logging(self) -> bool:
|
||||
"""Get whether console logging is enabled."""
|
||||
return self.get("logging.enable_console_logging", True)
|
||||
|
||||
@enable_console_logging.setter
|
||||
def enable_console_logging(self, value: bool):
|
||||
"""Set whether console logging is enabled."""
|
||||
self.set("logging.enable_console_logging", value)
|
||||
|
||||
@property
|
||||
def enable_console_progress(self) -> bool:
|
||||
"""Get whether console progress bars are enabled."""
|
||||
return self.get("logging.enable_console_progress", False)
|
||||
|
||||
@enable_console_progress.setter
|
||||
def enable_console_progress(self, value: bool):
|
||||
"""Set whether console progress bars are enabled."""
|
||||
self.set("logging.enable_console_progress", value)
|
||||
|
||||
@property
|
||||
def enable_fail2ban_logging(self) -> bool:
|
||||
"""Get whether fail2ban logging is enabled."""
|
||||
return self.get("logging.enable_fail2ban_logging", True)
|
||||
|
||||
@enable_fail2ban_logging.setter
|
||||
def enable_fail2ban_logging(self, value: bool):
|
||||
"""Set whether fail2ban logging is enabled."""
|
||||
self.set("logging.enable_fail2ban_logging", value)
|
||||
|
||||
# Provider configuration properties
|
||||
@property
|
||||
def default_provider(self) -> str:
|
||||
"""Get default provider."""
|
||||
return self.get("providers.default_provider", "aniworld.to")
|
||||
|
||||
@default_provider.setter
|
||||
def default_provider(self, value: str):
|
||||
"""Set default provider."""
|
||||
self.set("providers.default_provider", value)
|
||||
|
||||
@property
|
||||
def preferred_language(self) -> str:
|
||||
"""Get preferred language."""
|
||||
return self.get("providers.preferred_language", "German Dub")
|
||||
|
||||
@preferred_language.setter
|
||||
def preferred_language(self, value: str):
|
||||
"""Set preferred language."""
|
||||
self.set("providers.preferred_language", value)
|
||||
|
||||
@property
|
||||
def provider_timeout(self) -> int:
|
||||
"""Get provider timeout in seconds."""
|
||||
return self.get("providers.provider_timeout", 30)
|
||||
|
||||
@provider_timeout.setter
|
||||
def provider_timeout(self, value: int):
|
||||
"""Set provider timeout in seconds."""
|
||||
self.set("providers.provider_timeout", value)
|
||||
|
||||
# Advanced configuration properties
|
||||
@property
|
||||
def max_concurrent_downloads(self) -> int:
|
||||
"""Get maximum concurrent downloads."""
|
||||
return self.get("advanced.max_concurrent_downloads", 3)
|
||||
|
||||
@max_concurrent_downloads.setter
|
||||
def max_concurrent_downloads(self, value: int):
|
||||
"""Set maximum concurrent downloads."""
|
||||
self.set("advanced.max_concurrent_downloads", value)
|
||||
|
||||
@property
|
||||
def enable_debug_mode(self) -> bool:
|
||||
"""Get whether debug mode is enabled."""
|
||||
return self.get("advanced.enable_debug_mode", False)
|
||||
|
||||
@enable_debug_mode.setter
|
||||
def enable_debug_mode(self, value: bool):
|
||||
"""Set whether debug mode is enabled."""
|
||||
self.set("advanced.enable_debug_mode", value)
|
||||
|
||||
|
||||
# Global config instance
|
||||
config = Config()
|
||||
@ -1,848 +0,0 @@
|
||||
"""
|
||||
FastAPI-based AniWorld Server Application.
|
||||
|
||||
This module implements a comprehensive FastAPI application following the instructions:
|
||||
- Simple master password authentication using JWT
|
||||
- Repository pattern with dependency injection
|
||||
- Proper error handling and validation
|
||||
- OpenAPI documentation
|
||||
- Security best practices
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import logging
|
||||
import os
|
||||
import sys
|
||||
from contextlib import asynccontextmanager
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
import jwt
|
||||
|
||||
# Add parent directory to path for imports
|
||||
current_dir = os.path.dirname(__file__)
|
||||
parent_dir = os.path.join(current_dir, '..')
|
||||
sys.path.insert(0, os.path.abspath(parent_dir))
|
||||
|
||||
import uvicorn
|
||||
from fastapi import Depends, FastAPI, HTTPException, Request, Security, status
|
||||
from fastapi.middleware.cors import CORSMiddleware
|
||||
from fastapi.responses import HTMLResponse, JSONResponse
|
||||
from fastapi.security import HTTPAuthorizationCredentials, HTTPBearer
|
||||
from fastapi.staticfiles import StaticFiles
|
||||
from fastapi.templating import Jinja2Templates
|
||||
from pydantic import BaseModel, Field
|
||||
from pydantic_settings import BaseSettings
|
||||
|
||||
# Import application flow services
|
||||
from src.server.middleware.application_flow_middleware import ApplicationFlowMiddleware
|
||||
from src.server.services.setup_service import SetupService
|
||||
|
||||
# Import our custom middleware - temporarily disabled due to file corruption
|
||||
# from src.server.web.middleware.fastapi_auth_middleware import AuthMiddleware
|
||||
# from src.server.web.middleware.fastapi_logging_middleware import (
|
||||
# EnhancedLoggingMiddleware,
|
||||
# )
|
||||
# from src.server.web.middleware.fastapi_validation_middleware import ValidationMiddleware
|
||||
|
||||
# Configure logging
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s',
|
||||
handlers=[
|
||||
logging.FileHandler('./logs/aniworld.log'),
|
||||
logging.StreamHandler()
|
||||
]
|
||||
)
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Security
|
||||
security = HTTPBearer()
|
||||
|
||||
# Configuration
|
||||
class Settings(BaseSettings):
|
||||
"""Application settings from environment variables."""
|
||||
jwt_secret_key: str = Field(default="your-secret-key-here", env="JWT_SECRET_KEY")
|
||||
password_salt: str = Field(default="default-salt", env="PASSWORD_SALT")
|
||||
master_password_hash: Optional[str] = Field(default=None, env="MASTER_PASSWORD_HASH")
|
||||
master_password: Optional[str] = Field(default=None, env="MASTER_PASSWORD") # For development
|
||||
token_expiry_hours: int = Field(default=24, env="SESSION_TIMEOUT_HOURS")
|
||||
anime_directory: str = Field(default="", env="ANIME_DIRECTORY")
|
||||
log_level: str = Field(default="INFO", env="LOG_LEVEL")
|
||||
|
||||
# Additional settings from .env
|
||||
database_url: str = Field(default="sqlite:///./data/aniworld.db", env="DATABASE_URL")
|
||||
cors_origins: str = Field(default="*", env="CORS_ORIGINS")
|
||||
api_rate_limit: int = Field(default=100, env="API_RATE_LIMIT")
|
||||
default_provider: str = Field(default="aniworld.to", env="DEFAULT_PROVIDER")
|
||||
provider_timeout: int = Field(default=30, env="PROVIDER_TIMEOUT")
|
||||
retry_attempts: int = Field(default=3, env="RETRY_ATTEMPTS")
|
||||
|
||||
class Config:
|
||||
env_file = ".env"
|
||||
extra = "ignore" # Ignore extra environment variables
|
||||
|
||||
settings = Settings()
|
||||
|
||||
# Pydantic Models
|
||||
class LoginRequest(BaseModel):
|
||||
"""Login request model."""
|
||||
password: str = Field(..., min_length=1, description="Master password")
|
||||
|
||||
class LoginResponse(BaseModel):
|
||||
"""Login response model."""
|
||||
success: bool
|
||||
message: str
|
||||
token: Optional[str] = None
|
||||
expires_at: Optional[datetime] = None
|
||||
|
||||
class TokenVerifyResponse(BaseModel):
|
||||
"""Token verification response model."""
|
||||
valid: bool
|
||||
message: str
|
||||
user: Optional[str] = None
|
||||
expires_at: Optional[datetime] = None
|
||||
|
||||
class HealthResponse(BaseModel):
|
||||
"""Health check response model."""
|
||||
status: str
|
||||
timestamp: datetime
|
||||
version: str = "1.0.0"
|
||||
services: Dict[str, str]
|
||||
|
||||
class AnimeSearchRequest(BaseModel):
|
||||
"""Anime search request model."""
|
||||
query: str = Field(..., min_length=1, max_length=100)
|
||||
limit: int = Field(default=20, ge=1, le=100)
|
||||
offset: int = Field(default=0, ge=0)
|
||||
|
||||
class AnimeResponse(BaseModel):
|
||||
"""Anime response model."""
|
||||
id: str
|
||||
title: str
|
||||
description: Optional[str] = None
|
||||
episodes: int = 0
|
||||
status: str = "Unknown"
|
||||
poster_url: Optional[str] = None
|
||||
|
||||
class EpisodeResponse(BaseModel):
|
||||
"""Episode response model."""
|
||||
id: str
|
||||
anime_id: str
|
||||
episode_number: int
|
||||
title: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
duration: Optional[int] = None
|
||||
stream_url: Optional[str] = None
|
||||
|
||||
class ErrorResponse(BaseModel):
|
||||
"""Error response model."""
|
||||
success: bool = False
|
||||
error: str
|
||||
code: Optional[str] = None
|
||||
details: Optional[Dict[str, Any]] = None
|
||||
|
||||
class SetupRequest(BaseModel):
|
||||
"""Setup request model."""
|
||||
password: str = Field(..., min_length=8, description="Master password (min 8 characters)")
|
||||
directory: str = Field(..., min_length=1, description="Anime directory path")
|
||||
|
||||
class SetupResponse(BaseModel):
|
||||
"""Setup response model."""
|
||||
status: str
|
||||
message: str
|
||||
redirect_url: Optional[str] = None
|
||||
|
||||
class SetupStatusResponse(BaseModel):
|
||||
"""Setup status response model."""
|
||||
setup_complete: bool
|
||||
requirements: Dict[str, bool]
|
||||
missing_requirements: List[str]
|
||||
|
||||
# Authentication utilities
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash password with salt using SHA-256."""
|
||||
salted_password = password + settings.password_salt
|
||||
return hashlib.sha256(salted_password.encode()).hexdigest()
|
||||
|
||||
def verify_master_password(password: str) -> bool:
|
||||
"""Verify password against master password hash."""
|
||||
if not settings.master_password_hash:
|
||||
# If no hash is set, check against plain password (development only)
|
||||
if settings.master_password:
|
||||
return password == settings.master_password
|
||||
return False
|
||||
|
||||
password_hash = hash_password(password)
|
||||
return password_hash == settings.master_password_hash
|
||||
|
||||
def generate_jwt_token() -> Dict[str, Any]:
|
||||
"""Generate JWT token for authentication."""
|
||||
expires_at = datetime.utcnow() + timedelta(hours=settings.token_expiry_hours)
|
||||
payload = {
|
||||
'user': 'master',
|
||||
'exp': expires_at,
|
||||
'iat': datetime.utcnow(),
|
||||
'iss': 'aniworld-fastapi-server'
|
||||
}
|
||||
|
||||
token = jwt.encode(payload, settings.jwt_secret_key, algorithm='HS256')
|
||||
return {
|
||||
'token': token,
|
||||
'expires_at': expires_at
|
||||
}
|
||||
|
||||
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token."""
|
||||
try:
|
||||
payload = jwt.decode(token, settings.jwt_secret_key, algorithms=['HS256'])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.InvalidTokenError as e:
|
||||
logger.warning(f"Invalid token: {str(e)}")
|
||||
return None
|
||||
|
||||
async def get_current_user(credentials: HTTPAuthorizationCredentials = Security(security)):
|
||||
"""Dependency to get current authenticated user."""
|
||||
token = credentials.credentials
|
||||
payload = verify_jwt_token(token)
|
||||
|
||||
if not payload:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid or expired token",
|
||||
headers={"WWW-Authenticate": "Bearer"},
|
||||
)
|
||||
|
||||
return payload
|
||||
|
||||
# Global exception handler
|
||||
async def global_exception_handler(request, exc):
|
||||
"""Global exception handler for unhandled errors."""
|
||||
logger.error(f"Unhandled exception: {exc}", exc_info=True)
|
||||
return JSONResponse(
|
||||
status_code=500,
|
||||
content={
|
||||
"success": False,
|
||||
"error": "Internal Server Error",
|
||||
"code": "INTERNAL_ERROR"
|
||||
}
|
||||
)
|
||||
|
||||
# Application lifespan
|
||||
@asynccontextmanager
|
||||
async def lifespan(app: FastAPI):
|
||||
"""Manage application lifespan events."""
|
||||
# Startup
|
||||
logger.info("Starting AniWorld FastAPI server...")
|
||||
logger.info(f"Anime directory: {settings.anime_directory}")
|
||||
logger.info(f"Log level: {settings.log_level}")
|
||||
|
||||
# Verify configuration
|
||||
if not settings.master_password_hash and not settings.master_password:
|
||||
logger.warning("No master password configured! Set MASTER_PASSWORD_HASH or MASTER_PASSWORD environment variable.")
|
||||
|
||||
yield
|
||||
|
||||
# Shutdown
|
||||
logger.info("Shutting down AniWorld FastAPI server...")
|
||||
|
||||
# Create FastAPI application
|
||||
app = FastAPI(
|
||||
title="AniWorld API",
|
||||
description="""
|
||||
## AniWorld Management System
|
||||
|
||||
A comprehensive FastAPI-based application for managing anime series and episodes.
|
||||
|
||||
### Features
|
||||
|
||||
* **Series Management**: Search, track, and manage anime series
|
||||
* **Episode Tracking**: Monitor missing episodes and download progress
|
||||
* **Authentication**: Secure master password authentication with JWT tokens
|
||||
* **Real-time Updates**: WebSocket support for live progress tracking
|
||||
* **File Management**: Automatic file scanning and organization
|
||||
* **Download Queue**: Queue-based download management system
|
||||
|
||||
### Authentication
|
||||
|
||||
Most endpoints require authentication using a master password.
|
||||
Use the `/auth/login` endpoint to obtain a JWT token, then include it
|
||||
in the `Authorization` header as `Bearer <token>`.
|
||||
|
||||
### API Versioning
|
||||
|
||||
This API follows semantic versioning. Current version: **1.0.0**
|
||||
""",
|
||||
version="1.0.0",
|
||||
docs_url="/docs",
|
||||
redoc_url="/redoc",
|
||||
lifespan=lifespan,
|
||||
contact={
|
||||
"name": "AniWorld API Support",
|
||||
"url": "https://github.com/your-repo/aniworld",
|
||||
"email": "support@aniworld.com",
|
||||
},
|
||||
license_info={
|
||||
"name": "MIT",
|
||||
"url": "https://opensource.org/licenses/MIT",
|
||||
},
|
||||
tags_metadata=[
|
||||
{
|
||||
"name": "Authentication",
|
||||
"description": "Operations related to user authentication and session management",
|
||||
},
|
||||
{
|
||||
"name": "Anime",
|
||||
"description": "Operations for searching and managing anime series",
|
||||
},
|
||||
{
|
||||
"name": "Episodes",
|
||||
"description": "Operations for managing individual episodes",
|
||||
},
|
||||
{
|
||||
"name": "Downloads",
|
||||
"description": "Operations for managing the download queue and progress",
|
||||
},
|
||||
{
|
||||
"name": "System",
|
||||
"description": "System health, configuration, and maintenance operations",
|
||||
},
|
||||
{
|
||||
"name": "Files",
|
||||
"description": "File system operations and scanning functionality",
|
||||
},
|
||||
]
|
||||
)
|
||||
|
||||
# Configure templates
|
||||
templates = Jinja2Templates(directory="src/server/web/templates")
|
||||
|
||||
# Mount static files
|
||||
app.mount("/static", StaticFiles(directory="src/server/web/static"), name="static")
|
||||
|
||||
# Add CORS middleware
|
||||
app.add_middleware(
|
||||
CORSMiddleware,
|
||||
allow_origins=["*"], # Configure appropriately for production
|
||||
allow_credentials=True,
|
||||
allow_methods=["*"],
|
||||
allow_headers=["*"],
|
||||
)
|
||||
|
||||
# Add application flow middleware
|
||||
setup_service = SetupService()
|
||||
app.add_middleware(ApplicationFlowMiddleware, setup_service=setup_service)
|
||||
|
||||
# Add custom middleware - temporarily disabled
|
||||
# app.add_middleware(EnhancedLoggingMiddleware)
|
||||
# app.add_middleware(AuthMiddleware)
|
||||
# app.add_middleware(ValidationMiddleware)
|
||||
|
||||
# Add global exception handler
|
||||
app.add_exception_handler(Exception, global_exception_handler)
|
||||
|
||||
# Include API routers
|
||||
# from src.server.web.controllers.api.v1.anime import router as anime_router
|
||||
|
||||
# app.include_router(anime_router)
|
||||
|
||||
# Legacy API compatibility endpoints (TODO: migrate JavaScript to use v1 endpoints)
|
||||
@app.post("/api/add_series")
|
||||
async def legacy_add_series(
|
||||
request_data: Dict[str, Any],
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Legacy endpoint for adding series - basic implementation."""
|
||||
try:
|
||||
link = request_data.get('link', '')
|
||||
name = request_data.get('name', '')
|
||||
|
||||
if not link or not name:
|
||||
return {"status": "error", "message": "Link and name are required"}
|
||||
|
||||
return {"status": "success", "message": f"Series '{name}' added successfully"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": f"Failed to add series: {str(e)}"}
|
||||
|
||||
|
||||
@app.post("/api/download")
|
||||
async def legacy_download(
|
||||
request_data: Dict[str, Any],
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
):
|
||||
"""Legacy endpoint for downloading series - basic implementation."""
|
||||
try:
|
||||
folders = request_data.get('folders', [])
|
||||
|
||||
if not folders:
|
||||
return {"status": "error", "message": "No folders specified"}
|
||||
|
||||
folder_count = len(folders)
|
||||
return {"status": "success", "message": f"Download started for {folder_count} series"}
|
||||
except Exception as e:
|
||||
return {"status": "error", "message": f"Failed to start download: {str(e)}"}
|
||||
|
||||
# Setup endpoints
|
||||
@app.get("/api/auth/setup/status", response_model=SetupStatusResponse, tags=["Setup"])
|
||||
async def get_setup_status() -> SetupStatusResponse:
|
||||
"""
|
||||
Check the current setup status of the application.
|
||||
|
||||
Returns information about what setup requirements are met and which are missing.
|
||||
"""
|
||||
try:
|
||||
setup_service = SetupService()
|
||||
requirements = setup_service.get_setup_requirements()
|
||||
missing = setup_service.get_missing_requirements()
|
||||
|
||||
return SetupStatusResponse(
|
||||
setup_complete=setup_service.is_setup_complete(),
|
||||
requirements=requirements,
|
||||
missing_requirements=missing
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking setup status: {e}")
|
||||
return SetupStatusResponse(
|
||||
setup_complete=False,
|
||||
requirements={},
|
||||
missing_requirements=["Error checking setup status"]
|
||||
)
|
||||
|
||||
@app.post("/api/auth/setup", response_model=SetupResponse, tags=["Setup"])
|
||||
async def process_setup(request_data: SetupRequest) -> SetupResponse:
|
||||
"""
|
||||
Process the initial application setup.
|
||||
|
||||
- **password**: Master password (minimum 8 characters)
|
||||
- **directory**: Anime directory path
|
||||
"""
|
||||
try:
|
||||
setup_service = SetupService()
|
||||
|
||||
# Check if setup is already complete
|
||||
if setup_service.is_setup_complete():
|
||||
return SetupResponse(
|
||||
status="error",
|
||||
message="Setup has already been completed"
|
||||
)
|
||||
|
||||
# Validate directory path
|
||||
from pathlib import Path
|
||||
directory_path = Path(request_data.directory)
|
||||
if not directory_path.is_absolute():
|
||||
return SetupResponse(
|
||||
status="error",
|
||||
message="Please provide an absolute directory path"
|
||||
)
|
||||
|
||||
# Create directory if it doesn't exist
|
||||
try:
|
||||
directory_path.mkdir(parents=True, exist_ok=True)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to create directory: {e}")
|
||||
return SetupResponse(
|
||||
status="error",
|
||||
message=f"Failed to create directory: {str(e)}"
|
||||
)
|
||||
|
||||
# Hash the password
|
||||
password_hash = hash_password(request_data.password)
|
||||
|
||||
# Prepare configuration updates
|
||||
config_updates = {
|
||||
"security": {
|
||||
"master_password_hash": password_hash,
|
||||
"salt": settings.password_salt,
|
||||
"session_timeout_hours": settings.token_expiry_hours,
|
||||
"max_failed_attempts": 5,
|
||||
"lockout_duration_minutes": 30
|
||||
},
|
||||
"anime": {
|
||||
"directory": str(directory_path),
|
||||
"download_threads": 3,
|
||||
"download_speed_limit": None,
|
||||
"auto_rescan_time": "03:00",
|
||||
"auto_download_after_rescan": False
|
||||
},
|
||||
"logging": {
|
||||
"level": "INFO",
|
||||
"enable_console_logging": True,
|
||||
"enable_console_progress": False,
|
||||
"enable_fail2ban_logging": True,
|
||||
"log_file": "aniworld.log",
|
||||
"max_log_size_mb": 10,
|
||||
"log_backup_count": 5
|
||||
},
|
||||
"providers": {
|
||||
"default_provider": "aniworld.to",
|
||||
"preferred_language": "German Dub",
|
||||
"fallback_providers": ["aniworld.to"],
|
||||
"provider_timeout": 30,
|
||||
"retry_attempts": 3,
|
||||
"provider_settings": {
|
||||
"aniworld.to": {
|
||||
"enabled": True,
|
||||
"priority": 1,
|
||||
"quality_preference": "720p"
|
||||
}
|
||||
}
|
||||
},
|
||||
"advanced": {
|
||||
"max_concurrent_downloads": 3,
|
||||
"download_buffer_size": 8192,
|
||||
"connection_timeout": 30,
|
||||
"read_timeout": 300,
|
||||
"enable_debug_mode": False,
|
||||
"cache_duration_minutes": 60
|
||||
}
|
||||
}
|
||||
|
||||
# Mark setup as complete and save configuration
|
||||
success = setup_service.mark_setup_complete(config_updates)
|
||||
|
||||
if success:
|
||||
logger.info("Application setup completed successfully")
|
||||
return SetupResponse(
|
||||
status="success",
|
||||
message="Setup completed successfully",
|
||||
redirect_url="/login"
|
||||
)
|
||||
else:
|
||||
return SetupResponse(
|
||||
status="error",
|
||||
message="Failed to save configuration"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Setup processing error: {e}")
|
||||
return SetupResponse(
|
||||
status="error",
|
||||
message="Setup failed due to internal error"
|
||||
)
|
||||
|
||||
# Authentication endpoints
|
||||
@app.post("/auth/login", response_model=LoginResponse, tags=["Authentication"])
|
||||
async def login(request_data: LoginRequest, request: Request) -> LoginResponse:
|
||||
"""
|
||||
Authenticate with master password and receive JWT token.
|
||||
|
||||
- **password**: The master password for the application
|
||||
"""
|
||||
try:
|
||||
if not verify_master_password(request_data.password):
|
||||
client_ip = getattr(request.client, 'host', 'unknown') if request.client else 'unknown'
|
||||
logger.warning(f"Failed login attempt from IP: {client_ip}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_401_UNAUTHORIZED,
|
||||
detail="Invalid master password"
|
||||
)
|
||||
|
||||
token_data = generate_jwt_token()
|
||||
logger.info("Successful authentication")
|
||||
|
||||
return LoginResponse(
|
||||
success=True,
|
||||
message="Authentication successful",
|
||||
token=token_data['token'],
|
||||
expires_at=token_data['expires_at']
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Login error: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail="Authentication service error"
|
||||
)
|
||||
|
||||
@app.get("/auth/verify", response_model=TokenVerifyResponse, tags=["Authentication"])
|
||||
async def verify_token(current_user: Dict = Depends(get_current_user)) -> TokenVerifyResponse:
|
||||
"""
|
||||
Verify the validity of the current JWT token.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
"""
|
||||
return TokenVerifyResponse(
|
||||
valid=True,
|
||||
message="Token is valid",
|
||||
user=current_user.get('user'),
|
||||
expires_at=datetime.fromtimestamp(current_user.get('exp', 0))
|
||||
)
|
||||
|
||||
@app.post("/auth/logout", response_model=Dict[str, Any], tags=["Authentication"])
|
||||
async def logout(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
||||
"""
|
||||
Logout endpoint (stateless - client should remove token).
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
"""
|
||||
return {
|
||||
"success": True,
|
||||
"message": "Logged out successfully. Please remove the token from client storage."
|
||||
}
|
||||
|
||||
@app.get("/api/auth/status", response_model=Dict[str, Any], tags=["Authentication"])
|
||||
async def auth_status(request: Request) -> Dict[str, Any]:
|
||||
"""
|
||||
Check authentication status and configuration.
|
||||
|
||||
This endpoint checks if master password is configured and if user is authenticated.
|
||||
"""
|
||||
has_master_password = bool(settings.master_password_hash or settings.master_password)
|
||||
|
||||
# Check if user has valid token
|
||||
authenticated = False
|
||||
try:
|
||||
auth_header = request.headers.get("authorization")
|
||||
if auth_header and auth_header.startswith("Bearer "):
|
||||
token = auth_header.split(" ")[1]
|
||||
payload = verify_jwt_token(token)
|
||||
authenticated = payload is not None
|
||||
except Exception:
|
||||
authenticated = False
|
||||
|
||||
return {
|
||||
"has_master_password": has_master_password,
|
||||
"authenticated": authenticated
|
||||
}
|
||||
|
||||
# Health check endpoint
|
||||
@app.get("/health", response_model=HealthResponse, tags=["System"])
|
||||
async def health_check() -> HealthResponse:
|
||||
"""
|
||||
Application health check endpoint.
|
||||
"""
|
||||
return HealthResponse(
|
||||
status="healthy",
|
||||
timestamp=datetime.utcnow(),
|
||||
services={
|
||||
"authentication": "online",
|
||||
"anime_service": "online",
|
||||
"episode_service": "online"
|
||||
}
|
||||
)
|
||||
|
||||
# Common browser requests that might cause "Invalid HTTP request received" warnings
|
||||
@app.get("/favicon.ico")
|
||||
async def favicon():
|
||||
"""Handle favicon requests from browsers."""
|
||||
return JSONResponse(status_code=404, content={"detail": "Favicon not found"})
|
||||
|
||||
@app.get("/robots.txt")
|
||||
async def robots():
|
||||
"""Handle robots.txt requests."""
|
||||
return JSONResponse(status_code=404, content={"detail": "Robots.txt not found"})
|
||||
|
||||
@app.get("/")
|
||||
async def root():
|
||||
"""Root endpoint redirect to docs."""
|
||||
return {"message": "AniWorld API", "documentation": "/docs", "health": "/health"}
|
||||
|
||||
# Web interface routes
|
||||
@app.get("/app", response_class=HTMLResponse)
|
||||
async def web_app(request: Request):
|
||||
"""Serve the main web application."""
|
||||
return templates.TemplateResponse("base/index.html", {"request": request})
|
||||
|
||||
@app.get("/login", response_class=HTMLResponse)
|
||||
async def login_page(request: Request):
|
||||
"""Serve the login page."""
|
||||
return templates.TemplateResponse("base/login.html", {"request": request})
|
||||
|
||||
@app.get("/setup", response_class=HTMLResponse)
|
||||
async def setup_page(request: Request):
|
||||
"""Serve the setup page."""
|
||||
return templates.TemplateResponse("base/setup.html", {"request": request})
|
||||
|
||||
@app.get("/queue", response_class=HTMLResponse)
|
||||
async def queue_page(request: Request):
|
||||
"""Serve the queue page."""
|
||||
return templates.TemplateResponse("base/queue.html", {"request": request})
|
||||
|
||||
# Anime endpoints (protected)
|
||||
@app.get("/api/anime/search", response_model=List[AnimeResponse], tags=["Anime"])
|
||||
async def search_anime(
|
||||
query: str,
|
||||
limit: int = 20,
|
||||
offset: int = 0,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
) -> List[AnimeResponse]:
|
||||
"""
|
||||
Search for anime by title.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
- **query**: Search query string
|
||||
- **limit**: Maximum number of results (1-100)
|
||||
- **offset**: Number of results to skip for pagination
|
||||
"""
|
||||
# TODO: Implement actual anime search logic
|
||||
# This is a placeholder implementation
|
||||
logger.info(f"Searching anime with query: {query}")
|
||||
|
||||
# Mock data for now
|
||||
mock_results = [
|
||||
AnimeResponse(
|
||||
id=f"anime_{i}",
|
||||
title=f"Sample Anime {i}",
|
||||
description=f"Description for anime {i}",
|
||||
episodes=24,
|
||||
status="Completed"
|
||||
)
|
||||
for i in range(offset + 1, min(offset + limit + 1, 100))
|
||||
if query.lower() in f"sample anime {i}".lower()
|
||||
]
|
||||
|
||||
return mock_results
|
||||
|
||||
@app.get("/api/anime/{anime_id}", response_model=AnimeResponse, tags=["Anime"])
|
||||
async def get_anime(
|
||||
anime_id: str,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
) -> AnimeResponse:
|
||||
"""
|
||||
Get detailed information about a specific anime.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
- **anime_id**: Unique identifier for the anime
|
||||
"""
|
||||
# TODO: Implement actual anime retrieval logic
|
||||
logger.info(f"Fetching anime details for ID: {anime_id}")
|
||||
|
||||
# Mock data for now
|
||||
return AnimeResponse(
|
||||
id=anime_id,
|
||||
title=f"Anime {anime_id}",
|
||||
description=f"Detailed description for anime {anime_id}",
|
||||
episodes=24,
|
||||
status="Completed"
|
||||
)
|
||||
|
||||
@app.get("/api/anime/{anime_id}/episodes", response_model=List[EpisodeResponse], tags=["Episodes"])
|
||||
async def get_anime_episodes(
|
||||
anime_id: str,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
) -> List[EpisodeResponse]:
|
||||
"""
|
||||
Get all episodes for a specific anime.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
- **anime_id**: Unique identifier for the anime
|
||||
"""
|
||||
# TODO: Implement actual episode retrieval logic
|
||||
logger.info(f"Fetching episodes for anime ID: {anime_id}")
|
||||
|
||||
# Mock data for now
|
||||
return [
|
||||
EpisodeResponse(
|
||||
id=f"{anime_id}_ep_{i}",
|
||||
anime_id=anime_id,
|
||||
episode_number=i,
|
||||
title=f"Episode {i}",
|
||||
description=f"Description for episode {i}",
|
||||
duration=1440 # 24 minutes in seconds
|
||||
)
|
||||
for i in range(1, 25) # 24 episodes
|
||||
]
|
||||
|
||||
@app.get("/api/episodes/{episode_id}", response_model=EpisodeResponse, tags=["Episodes"])
|
||||
async def get_episode(
|
||||
episode_id: str,
|
||||
current_user: Dict = Depends(get_current_user)
|
||||
) -> EpisodeResponse:
|
||||
"""
|
||||
Get detailed information about a specific episode.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
- **episode_id**: Unique identifier for the episode
|
||||
"""
|
||||
# TODO: Implement actual episode retrieval logic
|
||||
logger.info(f"Fetching episode details for ID: {episode_id}")
|
||||
|
||||
# Mock data for now
|
||||
return EpisodeResponse(
|
||||
id=episode_id,
|
||||
anime_id="sample_anime",
|
||||
episode_number=1,
|
||||
title=f"Episode {episode_id}",
|
||||
description=f"Detailed description for episode {episode_id}",
|
||||
duration=1440
|
||||
)
|
||||
|
||||
# Database health check endpoint
|
||||
@app.get("/api/system/database/health", response_model=Dict[str, Any], tags=["System"])
|
||||
async def database_health(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
||||
"""
|
||||
Check database connectivity and health.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
"""
|
||||
# TODO: Implement actual database health check
|
||||
return {
|
||||
"status": "healthy",
|
||||
"connection_pool": "active",
|
||||
"response_time_ms": 15,
|
||||
"last_check": datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
# Configuration endpoint
|
||||
@app.get("/api/system/config", response_model=Dict[str, Any], tags=["System"])
|
||||
async def get_system_config(current_user: Dict = Depends(get_current_user)) -> Dict[str, Any]:
|
||||
"""
|
||||
Get system configuration information.
|
||||
|
||||
Requires: Bearer token in Authorization header
|
||||
"""
|
||||
return {
|
||||
"anime_directory": settings.anime_directory,
|
||||
"log_level": settings.log_level,
|
||||
"token_expiry_hours": settings.token_expiry_hours,
|
||||
"version": "1.0.0"
|
||||
}
|
||||
|
||||
if __name__ == "__main__":
|
||||
import socket
|
||||
|
||||
# Configure enhanced logging
|
||||
log_level = getattr(logging, settings.log_level.upper(), logging.INFO)
|
||||
logging.getLogger().setLevel(log_level)
|
||||
|
||||
# Check if port is available
|
||||
def is_port_available(host: str, port: int) -> bool:
|
||||
"""Check if a port is available on the given host."""
|
||||
try:
|
||||
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
||||
sock.bind((host, port))
|
||||
return True
|
||||
except OSError:
|
||||
return False
|
||||
|
||||
host = "127.0.0.1"
|
||||
port = 8000
|
||||
|
||||
if not is_port_available(host, port):
|
||||
logger.error(f"Port {port} is already in use on {host}. Please stop other services or choose a different port.")
|
||||
logger.info("You can check which process is using the port with: netstat -ano | findstr :8000")
|
||||
sys.exit(1)
|
||||
|
||||
logger.info("Starting AniWorld FastAPI server with uvicorn...")
|
||||
logger.info(f"Anime directory: {settings.anime_directory}")
|
||||
logger.info(f"Log level: {settings.log_level}")
|
||||
logger.info(f"Server will be available at http://{host}:{port}")
|
||||
logger.info(f"API documentation at http://{host}:{port}/docs")
|
||||
|
||||
try:
|
||||
# Run the application
|
||||
uvicorn.run(
|
||||
"fastapi_app:app",
|
||||
host=host,
|
||||
port=port,
|
||||
reload=False, # Disable reload to prevent constant restarting
|
||||
log_level=settings.log_level.lower()
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Failed to start server: {e}")
|
||||
sys.exit(1)
|
||||
@ -1,248 +0,0 @@
|
||||
"""
|
||||
Application Flow Middleware for FastAPI.
|
||||
|
||||
This middleware enforces the application flow priorities:
|
||||
1. Setup page (if setup is not complete)
|
||||
2. Authentication page (if user is not authenticated)
|
||||
3. Main application (for authenticated users with completed setup)
|
||||
|
||||
The middleware redirects users to the appropriate page based on their current state
|
||||
and the state of the application setup.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from fastapi import Request
|
||||
from fastapi.responses import RedirectResponse
|
||||
from starlette.middleware.base import BaseHTTPMiddleware
|
||||
|
||||
# Import the setup service
|
||||
try:
|
||||
from ..services.setup_service import SetupService
|
||||
except ImportError:
|
||||
# Handle case where service is not available
|
||||
class SetupService:
|
||||
def is_setup_complete(self):
|
||||
return True
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ApplicationFlowMiddleware(BaseHTTPMiddleware):
|
||||
"""
|
||||
Middleware to enforce application flow: setup → auth → main application.
|
||||
|
||||
This middleware:
|
||||
1. Checks if setup is complete
|
||||
2. Validates authentication status
|
||||
3. Redirects to appropriate page based on state
|
||||
4. Allows API endpoints and static files to pass through
|
||||
"""
|
||||
|
||||
def __init__(self, app, setup_service: Optional[SetupService] = None):
|
||||
"""
|
||||
Initialize the application flow middleware.
|
||||
|
||||
Args:
|
||||
app: FastAPI application instance
|
||||
setup_service: Setup service instance (optional, will create if not provided)
|
||||
"""
|
||||
super().__init__(app)
|
||||
self.setup_service = setup_service or SetupService()
|
||||
|
||||
# Define paths that should bypass flow enforcement
|
||||
self.bypass_paths = {
|
||||
"/static", # Static files
|
||||
"/favicon.ico", # Browser favicon requests
|
||||
"/robots.txt", # Robots.txt
|
||||
"/health", # Health check endpoints
|
||||
"/docs", # OpenAPI documentation
|
||||
"/redoc", # ReDoc documentation
|
||||
"/openapi.json" # OpenAPI spec
|
||||
}
|
||||
|
||||
# API paths that should bypass flow but may require auth
|
||||
self.api_paths = {
|
||||
"/api",
|
||||
"/auth"
|
||||
}
|
||||
|
||||
# Pages that are part of the flow and should be accessible
|
||||
self.flow_pages = {
|
||||
"/setup",
|
||||
"/login",
|
||||
"/app"
|
||||
}
|
||||
|
||||
async def dispatch(self, request: Request, call_next):
|
||||
"""
|
||||
Process the request and enforce application flow.
|
||||
|
||||
Args:
|
||||
request: Incoming HTTP request
|
||||
call_next: Next middleware/handler in chain
|
||||
|
||||
Returns:
|
||||
Response: Either a redirect response or the result of call_next
|
||||
"""
|
||||
try:
|
||||
# Get the request path
|
||||
path = request.url.path
|
||||
|
||||
# Skip flow enforcement for certain paths
|
||||
if self._should_bypass_flow(path):
|
||||
return await call_next(request)
|
||||
|
||||
# Check application setup status
|
||||
setup_complete = self.setup_service.is_setup_complete()
|
||||
|
||||
# Check authentication status
|
||||
is_authenticated = await self._is_user_authenticated(request)
|
||||
|
||||
# Determine the appropriate action
|
||||
redirect_response = self._determine_redirect(path, setup_complete, is_authenticated)
|
||||
|
||||
if redirect_response:
|
||||
logger.info(f"Redirecting {path} to {redirect_response.headers.get('location')}")
|
||||
return redirect_response
|
||||
|
||||
# Continue with the request
|
||||
return await call_next(request)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error in ApplicationFlowMiddleware: {e}", exc_info=True)
|
||||
# In case of error, allow the request to continue
|
||||
return await call_next(request)
|
||||
|
||||
def _should_bypass_flow(self, path: str) -> bool:
|
||||
"""
|
||||
Check if the given path should bypass flow enforcement.
|
||||
|
||||
Args:
|
||||
path: Request path
|
||||
|
||||
Returns:
|
||||
bool: True if path should bypass flow enforcement
|
||||
"""
|
||||
# Check exact bypass paths
|
||||
for bypass_path in self.bypass_paths:
|
||||
if path.startswith(bypass_path):
|
||||
return True
|
||||
|
||||
# API paths bypass flow enforcement (but may have their own auth)
|
||||
for api_path in self.api_paths:
|
||||
if path.startswith(api_path):
|
||||
return True
|
||||
|
||||
return False
|
||||
|
||||
async def _is_user_authenticated(self, request: Request) -> bool:
|
||||
"""
|
||||
Check if the user is authenticated by validating JWT token.
|
||||
|
||||
Args:
|
||||
request: HTTP request object
|
||||
|
||||
Returns:
|
||||
bool: True if user is authenticated, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Check for Authorization header
|
||||
auth_header = request.headers.get("authorization")
|
||||
if not auth_header or not auth_header.startswith("Bearer "):
|
||||
return False
|
||||
|
||||
# Extract and validate token
|
||||
token = auth_header.split(" ")[1]
|
||||
|
||||
# Import JWT validation function (avoid circular imports)
|
||||
try:
|
||||
from ..fastapi_app import verify_jwt_token
|
||||
payload = verify_jwt_token(token)
|
||||
return payload is not None
|
||||
except ImportError:
|
||||
# Fallback if import fails
|
||||
logger.warning("Could not import JWT verification function")
|
||||
return False
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking authentication: {e}")
|
||||
return False
|
||||
|
||||
def _determine_redirect(self, path: str, setup_complete: bool, is_authenticated: bool) -> Optional[RedirectResponse]:
|
||||
"""
|
||||
Determine if a redirect is needed based on current state.
|
||||
|
||||
Args:
|
||||
path: Current request path
|
||||
setup_complete: Whether application setup is complete
|
||||
is_authenticated: Whether user is authenticated
|
||||
|
||||
Returns:
|
||||
Optional[RedirectResponse]: Redirect response if needed, None otherwise
|
||||
"""
|
||||
# If setup is not complete
|
||||
if not setup_complete:
|
||||
# Allow access to setup page
|
||||
if path == "/setup":
|
||||
return None
|
||||
# Redirect everything else to setup
|
||||
return RedirectResponse(url="/setup", status_code=302)
|
||||
|
||||
# Setup is complete, check authentication
|
||||
if not is_authenticated:
|
||||
# Allow access to login page
|
||||
if path == "/login":
|
||||
return None
|
||||
# Redirect unauthenticated users to login (except for specific pages)
|
||||
if path in self.flow_pages or path == "/":
|
||||
return RedirectResponse(url="/login", status_code=302)
|
||||
|
||||
# User is authenticated and setup is complete
|
||||
else:
|
||||
# Redirect from setup/login pages to main app
|
||||
if path in ["/setup", "/login", "/"]:
|
||||
return RedirectResponse(url="/app", status_code=302)
|
||||
|
||||
# No redirect needed
|
||||
return None
|
||||
|
||||
def get_flow_status(self, request: Request) -> dict:
|
||||
"""
|
||||
Get current flow status for debugging/monitoring.
|
||||
|
||||
Args:
|
||||
request: HTTP request object
|
||||
|
||||
Returns:
|
||||
dict: Current flow status information
|
||||
"""
|
||||
try:
|
||||
setup_complete = self.setup_service.is_setup_complete()
|
||||
is_authenticated = self._is_user_authenticated(request)
|
||||
|
||||
return {
|
||||
"setup_complete": setup_complete,
|
||||
"authenticated": is_authenticated,
|
||||
"path": request.url.path,
|
||||
"should_bypass": self._should_bypass_flow(request.url.path)
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
"error": str(e),
|
||||
"path": request.url.path
|
||||
}
|
||||
|
||||
|
||||
def create_application_flow_middleware(setup_service: Optional[SetupService] = None) -> ApplicationFlowMiddleware:
|
||||
"""
|
||||
Factory function to create application flow middleware.
|
||||
|
||||
Args:
|
||||
setup_service: Setup service instance (optional)
|
||||
|
||||
Returns:
|
||||
ApplicationFlowMiddleware: Configured middleware instance
|
||||
"""
|
||||
return ApplicationFlowMiddleware(app=None, setup_service=setup_service)
|
||||
Binary file not shown.
@ -1,41 +0,0 @@
|
||||
# FastAPI and ASGI server
|
||||
fastapi==0.118.0
|
||||
uvicorn[standard]==0.37.0
|
||||
python-multipart==0.0.12
|
||||
|
||||
# Authentication and security
|
||||
pyjwt==2.10.1
|
||||
passlib[bcrypt]==1.7.4
|
||||
python-jose[cryptography]==3.3.0
|
||||
|
||||
# Configuration and environment
|
||||
pydantic==2.11.10
|
||||
pydantic-settings==2.11.0
|
||||
python-dotenv==1.1.1
|
||||
|
||||
# Database (if needed)
|
||||
sqlalchemy==2.0.43
|
||||
alembic==1.16.5
|
||||
|
||||
# HTTP client
|
||||
httpx==0.28.1
|
||||
aiofiles==24.1.0
|
||||
|
||||
# Utilities
|
||||
python-dateutil==2.9.0.post0
|
||||
pytz==2024.2
|
||||
|
||||
# Development and testing
|
||||
pytest==8.4.2
|
||||
pytest-asyncio==1.2.0
|
||||
pytest-cov==7.0.0
|
||||
pytest-mock==3.15.1
|
||||
|
||||
# Code quality
|
||||
black==25.9.0
|
||||
isort==6.1.0
|
||||
flake8==7.3.0
|
||||
mypy==1.18.2
|
||||
|
||||
# Logging
|
||||
structlog==25.1.0
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@ -1,303 +1,303 @@
|
||||
from flask import Blueprint, render_template, request, jsonify
|
||||
from web.controllers.auth_controller import optional_auth
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Create blueprint for download queue management
|
||||
download_queue_bp = Blueprint('download_queue', __name__)
|
||||
|
||||
# Global download queue state
|
||||
download_queue_state = {
|
||||
'active_downloads': [],
|
||||
'pending_queue': [],
|
||||
'completed_downloads': [],
|
||||
'failed_downloads': [],
|
||||
'queue_lock': threading.Lock(),
|
||||
'statistics': {
|
||||
'total_items': 0,
|
||||
'completed_items': 0,
|
||||
'failed_items': 0,
|
||||
'estimated_time_remaining': None,
|
||||
'current_speed': '0 MB/s',
|
||||
'average_speed': '0 MB/s'
|
||||
}
|
||||
}
|
||||
|
||||
@download_queue_bp.route('/queue')
|
||||
@optional_auth
|
||||
def queue_page():
|
||||
"""Download queue management page."""
|
||||
return render_template('queue.html')
|
||||
|
||||
@download_queue_bp.route('/api/queue/status')
|
||||
@optional_auth
|
||||
def get_queue_status():
|
||||
"""Get detailed download queue status."""
|
||||
with download_queue_state['queue_lock']:
|
||||
# Calculate ETA
|
||||
eta = None
|
||||
if download_queue_state['active_downloads']:
|
||||
active_download = download_queue_state['active_downloads'][0]
|
||||
if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0:
|
||||
remaining_items = len(download_queue_state['pending_queue'])
|
||||
avg_speed = active_download['progress']['speed_mbps']
|
||||
# Rough estimation: assume 500MB per episode
|
||||
estimated_mb_remaining = remaining_items * 500
|
||||
eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None
|
||||
if eta_seconds:
|
||||
eta = datetime.now() + timedelta(seconds=eta_seconds)
|
||||
|
||||
return jsonify({
|
||||
'active_downloads': download_queue_state['active_downloads'],
|
||||
'pending_queue': download_queue_state['pending_queue'],
|
||||
'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10
|
||||
'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10
|
||||
'statistics': {
|
||||
**download_queue_state['statistics'],
|
||||
'eta': eta.isoformat() if eta else None
|
||||
}
|
||||
})
|
||||
|
||||
@download_queue_bp.route('/api/queue/clear', methods=['POST'])
|
||||
@optional_auth
|
||||
def clear_queue():
|
||||
"""Clear completed and failed downloads from queue."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all'
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
if queue_type == 'completed' or queue_type == 'all':
|
||||
download_queue_state['completed_downloads'].clear()
|
||||
|
||||
if queue_type == 'failed' or queue_type == 'all':
|
||||
download_queue_state['failed_downloads'].clear()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Cleared {queue_type} downloads'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/retry', methods=['POST'])
|
||||
@optional_auth
|
||||
def retry_failed_download():
|
||||
"""Retry a failed download."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
download_id = data.get('id')
|
||||
|
||||
if not download_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download ID is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Find failed download
|
||||
failed_download = None
|
||||
for i, download in enumerate(download_queue_state['failed_downloads']):
|
||||
if download['id'] == download_id:
|
||||
failed_download = download_queue_state['failed_downloads'].pop(i)
|
||||
break
|
||||
|
||||
if not failed_download:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed download not found'
|
||||
}), 404
|
||||
|
||||
# Reset download status and add back to queue
|
||||
failed_download['status'] = 'queued'
|
||||
failed_download['error'] = None
|
||||
failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1
|
||||
download_queue_state['pending_queue'].append(failed_download)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download added back to queue'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/remove', methods=['POST'])
|
||||
@optional_auth
|
||||
def remove_from_queue():
|
||||
"""Remove an item from the pending queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
download_id = data.get('id')
|
||||
|
||||
if not download_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download ID is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Find and remove from pending queue
|
||||
removed = False
|
||||
for i, download in enumerate(download_queue_state['pending_queue']):
|
||||
if download['id'] == download_id:
|
||||
download_queue_state['pending_queue'].pop(i)
|
||||
removed = True
|
||||
break
|
||||
|
||||
if not removed:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download not found in queue'
|
||||
}), 404
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download removed from queue'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/reorder', methods=['POST'])
|
||||
@optional_auth
|
||||
def reorder_queue():
|
||||
"""Reorder items in the pending queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
new_order = data.get('order') # Array of download IDs in new order
|
||||
|
||||
if not new_order or not isinstance(new_order, list):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Valid order array is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Create new queue based on the provided order
|
||||
old_queue = download_queue_state['pending_queue'].copy()
|
||||
new_queue = []
|
||||
|
||||
# Add items in the specified order
|
||||
for download_id in new_order:
|
||||
for download in old_queue:
|
||||
if download['id'] == download_id:
|
||||
new_queue.append(download)
|
||||
break
|
||||
|
||||
# Add any remaining items that weren't in the new order
|
||||
for download in old_queue:
|
||||
if download not in new_queue:
|
||||
new_queue.append(download)
|
||||
|
||||
download_queue_state['pending_queue'] = new_queue
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Queue reordered successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
# Helper functions for queue management
|
||||
def add_to_download_queue(serie_name, episode_info, priority='normal'):
|
||||
"""Add a download to the queue."""
|
||||
import uuid
|
||||
|
||||
download_item = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'serie_name': serie_name,
|
||||
'episode': episode_info,
|
||||
'status': 'queued',
|
||||
'priority': priority,
|
||||
'added_at': datetime.now().isoformat(),
|
||||
'started_at': None,
|
||||
'completed_at': None,
|
||||
'error': None,
|
||||
'retry_count': 0,
|
||||
'progress': {
|
||||
'percent': 0,
|
||||
'downloaded_mb': 0,
|
||||
'total_mb': 0,
|
||||
'speed_mbps': 0,
|
||||
'eta_seconds': None
|
||||
}
|
||||
}
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Insert based on priority
|
||||
if priority == 'high':
|
||||
download_queue_state['pending_queue'].insert(0, download_item)
|
||||
else:
|
||||
download_queue_state['pending_queue'].append(download_item)
|
||||
|
||||
download_queue_state['statistics']['total_items'] += 1
|
||||
|
||||
return download_item['id']
|
||||
|
||||
def update_download_progress(download_id, progress_data):
|
||||
"""Update progress for an active download."""
|
||||
with download_queue_state['queue_lock']:
|
||||
for download in download_queue_state['active_downloads']:
|
||||
if download['id'] == download_id:
|
||||
download['progress'].update(progress_data)
|
||||
|
||||
# Update global statistics
|
||||
if 'speed_mbps' in progress_data:
|
||||
download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s"
|
||||
|
||||
break
|
||||
|
||||
def move_download_to_completed(download_id, success=True, error=None):
|
||||
"""Move download from active to completed/failed."""
|
||||
with download_queue_state['queue_lock']:
|
||||
download = None
|
||||
for i, item in enumerate(download_queue_state['active_downloads']):
|
||||
if item['id'] == download_id:
|
||||
download = download_queue_state['active_downloads'].pop(i)
|
||||
break
|
||||
|
||||
if download:
|
||||
download['completed_at'] = datetime.now().isoformat()
|
||||
|
||||
if success:
|
||||
download['status'] = 'completed'
|
||||
download['progress']['percent'] = 100
|
||||
download_queue_state['completed_downloads'].append(download)
|
||||
download_queue_state['statistics']['completed_items'] += 1
|
||||
else:
|
||||
download['status'] = 'failed'
|
||||
download['error'] = error
|
||||
download_queue_state['failed_downloads'].append(download)
|
||||
download_queue_state['statistics']['failed_items'] += 1
|
||||
|
||||
def start_next_download():
|
||||
"""Move next queued download to active state."""
|
||||
with download_queue_state['queue_lock']:
|
||||
if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent
|
||||
download = download_queue_state['pending_queue'].pop(0)
|
||||
download['status'] = 'downloading'
|
||||
download['started_at'] = datetime.now().isoformat()
|
||||
download_queue_state['active_downloads'].append(download)
|
||||
return download
|
||||
return None
|
||||
|
||||
def get_queue_statistics():
|
||||
"""Get current queue statistics."""
|
||||
with download_queue_state['queue_lock']:
|
||||
from flask import Blueprint, render_template, request, jsonify
|
||||
from web.controllers.auth_controller import optional_auth
|
||||
import threading
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Create blueprint for download queue management
|
||||
download_queue_bp = Blueprint('download_queue', __name__)
|
||||
|
||||
# Global download queue state
|
||||
download_queue_state = {
|
||||
'active_downloads': [],
|
||||
'pending_queue': [],
|
||||
'completed_downloads': [],
|
||||
'failed_downloads': [],
|
||||
'queue_lock': threading.Lock(),
|
||||
'statistics': {
|
||||
'total_items': 0,
|
||||
'completed_items': 0,
|
||||
'failed_items': 0,
|
||||
'estimated_time_remaining': None,
|
||||
'current_speed': '0 MB/s',
|
||||
'average_speed': '0 MB/s'
|
||||
}
|
||||
}
|
||||
|
||||
@download_queue_bp.route('/queue')
|
||||
@optional_auth
|
||||
def queue_page():
|
||||
"""Download queue management page."""
|
||||
return render_template('queue.html')
|
||||
|
||||
@download_queue_bp.route('/api/queue/status')
|
||||
@optional_auth
|
||||
def get_queue_status():
|
||||
"""Get detailed download queue status."""
|
||||
with download_queue_state['queue_lock']:
|
||||
# Calculate ETA
|
||||
eta = None
|
||||
if download_queue_state['active_downloads']:
|
||||
active_download = download_queue_state['active_downloads'][0]
|
||||
if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0:
|
||||
remaining_items = len(download_queue_state['pending_queue'])
|
||||
avg_speed = active_download['progress']['speed_mbps']
|
||||
# Rough estimation: assume 500MB per episode
|
||||
estimated_mb_remaining = remaining_items * 500
|
||||
eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None
|
||||
if eta_seconds:
|
||||
eta = datetime.now() + timedelta(seconds=eta_seconds)
|
||||
|
||||
return jsonify({
|
||||
'active_downloads': download_queue_state['active_downloads'],
|
||||
'pending_queue': download_queue_state['pending_queue'],
|
||||
'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10
|
||||
'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10
|
||||
'statistics': {
|
||||
**download_queue_state['statistics'],
|
||||
'eta': eta.isoformat() if eta else None
|
||||
}
|
||||
})
|
||||
|
||||
@download_queue_bp.route('/api/queue/clear', methods=['POST'])
|
||||
@optional_auth
|
||||
def clear_queue():
|
||||
"""Clear completed and failed downloads from queue."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all'
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
if queue_type == 'completed' or queue_type == 'all':
|
||||
download_queue_state['completed_downloads'].clear()
|
||||
|
||||
if queue_type == 'failed' or queue_type == 'all':
|
||||
download_queue_state['failed_downloads'].clear()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Cleared {queue_type} downloads'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/retry', methods=['POST'])
|
||||
@optional_auth
|
||||
def retry_failed_download():
|
||||
"""Retry a failed download."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
download_id = data.get('id')
|
||||
|
||||
if not download_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download ID is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Find failed download
|
||||
failed_download = None
|
||||
for i, download in enumerate(download_queue_state['failed_downloads']):
|
||||
if download['id'] == download_id:
|
||||
failed_download = download_queue_state['failed_downloads'].pop(i)
|
||||
break
|
||||
|
||||
if not failed_download:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed download not found'
|
||||
}), 404
|
||||
|
||||
# Reset download status and add back to queue
|
||||
failed_download['status'] = 'queued'
|
||||
failed_download['error'] = None
|
||||
failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1
|
||||
download_queue_state['pending_queue'].append(failed_download)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download added back to queue'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/remove', methods=['POST'])
|
||||
@optional_auth
|
||||
def remove_from_queue():
|
||||
"""Remove an item from the pending queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
download_id = data.get('id')
|
||||
|
||||
if not download_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download ID is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Find and remove from pending queue
|
||||
removed = False
|
||||
for i, download in enumerate(download_queue_state['pending_queue']):
|
||||
if download['id'] == download_id:
|
||||
download_queue_state['pending_queue'].pop(i)
|
||||
removed = True
|
||||
break
|
||||
|
||||
if not removed:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Download not found in queue'
|
||||
}), 404
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download removed from queue'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
@download_queue_bp.route('/api/queue/reorder', methods=['POST'])
|
||||
@optional_auth
|
||||
def reorder_queue():
|
||||
"""Reorder items in the pending queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
new_order = data.get('order') # Array of download IDs in new order
|
||||
|
||||
if not new_order or not isinstance(new_order, list):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Valid order array is required'
|
||||
}), 400
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Create new queue based on the provided order
|
||||
old_queue = download_queue_state['pending_queue'].copy()
|
||||
new_queue = []
|
||||
|
||||
# Add items in the specified order
|
||||
for download_id in new_order:
|
||||
for download in old_queue:
|
||||
if download['id'] == download_id:
|
||||
new_queue.append(download)
|
||||
break
|
||||
|
||||
# Add any remaining items that weren't in the new order
|
||||
for download in old_queue:
|
||||
if download not in new_queue:
|
||||
new_queue.append(download)
|
||||
|
||||
download_queue_state['pending_queue'] = new_queue
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Queue reordered successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e)
|
||||
}), 500
|
||||
|
||||
# Helper functions for queue management
|
||||
def add_to_download_queue(serie_name, episode_info, priority='normal'):
|
||||
"""Add a download to the queue."""
|
||||
import uuid
|
||||
|
||||
download_item = {
|
||||
'id': str(uuid.uuid4()),
|
||||
'serie_name': serie_name,
|
||||
'episode': episode_info,
|
||||
'status': 'queued',
|
||||
'priority': priority,
|
||||
'added_at': datetime.now().isoformat(),
|
||||
'started_at': None,
|
||||
'completed_at': None,
|
||||
'error': None,
|
||||
'retry_count': 0,
|
||||
'progress': {
|
||||
'percent': 0,
|
||||
'downloaded_mb': 0,
|
||||
'total_mb': 0,
|
||||
'speed_mbps': 0,
|
||||
'eta_seconds': None
|
||||
}
|
||||
}
|
||||
|
||||
with download_queue_state['queue_lock']:
|
||||
# Insert based on priority
|
||||
if priority == 'high':
|
||||
download_queue_state['pending_queue'].insert(0, download_item)
|
||||
else:
|
||||
download_queue_state['pending_queue'].append(download_item)
|
||||
|
||||
download_queue_state['statistics']['total_items'] += 1
|
||||
|
||||
return download_item['id']
|
||||
|
||||
def update_download_progress(download_id, progress_data):
|
||||
"""Update progress for an active download."""
|
||||
with download_queue_state['queue_lock']:
|
||||
for download in download_queue_state['active_downloads']:
|
||||
if download['id'] == download_id:
|
||||
download['progress'].update(progress_data)
|
||||
|
||||
# Update global statistics
|
||||
if 'speed_mbps' in progress_data:
|
||||
download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s"
|
||||
|
||||
break
|
||||
|
||||
def move_download_to_completed(download_id, success=True, error=None):
|
||||
"""Move download from active to completed/failed."""
|
||||
with download_queue_state['queue_lock']:
|
||||
download = None
|
||||
for i, item in enumerate(download_queue_state['active_downloads']):
|
||||
if item['id'] == download_id:
|
||||
download = download_queue_state['active_downloads'].pop(i)
|
||||
break
|
||||
|
||||
if download:
|
||||
download['completed_at'] = datetime.now().isoformat()
|
||||
|
||||
if success:
|
||||
download['status'] = 'completed'
|
||||
download['progress']['percent'] = 100
|
||||
download_queue_state['completed_downloads'].append(download)
|
||||
download_queue_state['statistics']['completed_items'] += 1
|
||||
else:
|
||||
download['status'] = 'failed'
|
||||
download['error'] = error
|
||||
download_queue_state['failed_downloads'].append(download)
|
||||
download_queue_state['statistics']['failed_items'] += 1
|
||||
|
||||
def start_next_download():
|
||||
"""Move next queued download to active state."""
|
||||
with download_queue_state['queue_lock']:
|
||||
if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent
|
||||
download = download_queue_state['pending_queue'].pop(0)
|
||||
download['status'] = 'downloading'
|
||||
download['started_at'] = datetime.now().isoformat()
|
||||
download_queue_state['active_downloads'].append(download)
|
||||
return download
|
||||
return None
|
||||
|
||||
def get_queue_statistics():
|
||||
"""Get current queue statistics."""
|
||||
with download_queue_state['queue_lock']:
|
||||
return download_queue_state['statistics'].copy()
|
||||
@ -1,252 +1,252 @@
|
||||
import threading
|
||||
import time
|
||||
import schedule
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
import logging
|
||||
from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK,
|
||||
ProcessLockError, is_process_running)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ScheduledOperations:
|
||||
"""Handle scheduled operations like automatic rescans and downloads."""
|
||||
|
||||
def __init__(self, config_manager, socketio=None):
|
||||
self.config = config_manager
|
||||
self.socketio = socketio
|
||||
self.scheduler_thread = None
|
||||
self.running = False
|
||||
self.rescan_callback: Optional[Callable] = None
|
||||
self.download_callback: Optional[Callable] = None
|
||||
self.last_scheduled_rescan: Optional[datetime] = None
|
||||
|
||||
# Load scheduled rescan settings
|
||||
self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False)
|
||||
self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00')
|
||||
self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False)
|
||||
|
||||
def set_rescan_callback(self, callback: Callable):
|
||||
"""Set callback function for performing rescan operations."""
|
||||
self.rescan_callback = callback
|
||||
|
||||
def set_download_callback(self, callback: Callable):
|
||||
"""Set callback function for performing download operations."""
|
||||
self.download_callback = callback
|
||||
|
||||
def start_scheduler(self):
|
||||
"""Start the background scheduler thread."""
|
||||
if self.running:
|
||||
logger.warning("Scheduler is already running")
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
|
||||
self.scheduler_thread.start()
|
||||
logger.info("Scheduled operations started")
|
||||
|
||||
def stop_scheduler(self):
|
||||
"""Stop the background scheduler."""
|
||||
self.running = False
|
||||
schedule.clear()
|
||||
if self.scheduler_thread and self.scheduler_thread.is_alive():
|
||||
self.scheduler_thread.join(timeout=5)
|
||||
logger.info("Scheduled operations stopped")
|
||||
|
||||
def _scheduler_loop(self):
|
||||
"""Main scheduler loop that runs in background thread."""
|
||||
self._setup_scheduled_jobs()
|
||||
|
||||
while self.running:
|
||||
try:
|
||||
schedule.run_pending()
|
||||
time.sleep(60) # Check every minute
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}")
|
||||
time.sleep(60)
|
||||
|
||||
def _setup_scheduled_jobs(self):
|
||||
"""Setup all scheduled jobs based on configuration."""
|
||||
schedule.clear()
|
||||
|
||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
||||
try:
|
||||
schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan)
|
||||
logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up scheduled rescan: {e}")
|
||||
|
||||
def _perform_scheduled_rescan(self):
|
||||
"""Perform the scheduled rescan operation."""
|
||||
try:
|
||||
logger.info("Starting scheduled rescan...")
|
||||
|
||||
# Emit scheduled rescan started event
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_started')
|
||||
|
||||
# Check if rescan is already running
|
||||
if is_process_running(RESCAN_LOCK):
|
||||
logger.warning("Rescan is already running, skipping scheduled rescan")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_skipped', {
|
||||
'reason': 'Rescan already in progress'
|
||||
})
|
||||
return
|
||||
|
||||
# Perform the rescan using process lock
|
||||
@with_process_lock(RESCAN_LOCK, timeout_minutes=180)
|
||||
def perform_rescan():
|
||||
self.last_scheduled_rescan = datetime.now()
|
||||
|
||||
if self.rescan_callback:
|
||||
result = self.rescan_callback()
|
||||
logger.info("Scheduled rescan completed successfully")
|
||||
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_completed', {
|
||||
'timestamp': self.last_scheduled_rescan.isoformat(),
|
||||
'result': result
|
||||
})
|
||||
|
||||
# Auto-start download if configured
|
||||
if self.auto_download_after_rescan and self.download_callback:
|
||||
logger.info("Starting auto-download after scheduled rescan")
|
||||
threading.Thread(
|
||||
target=self._perform_auto_download,
|
||||
daemon=True
|
||||
).start()
|
||||
else:
|
||||
logger.warning("No rescan callback configured")
|
||||
|
||||
perform_rescan(_locked_by='scheduled_operation')
|
||||
|
||||
except ProcessLockError:
|
||||
logger.warning("Could not acquire rescan lock for scheduled operation")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_error', {
|
||||
'error': 'Could not acquire rescan lock'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduled rescan failed: {e}")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_error', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def _perform_auto_download(self):
|
||||
"""Perform automatic download after scheduled rescan."""
|
||||
try:
|
||||
# Wait a bit after rescan to let UI update
|
||||
time.sleep(10)
|
||||
|
||||
if self.download_callback:
|
||||
# Find series with missing episodes and start download
|
||||
logger.info("Starting auto-download of missing episodes")
|
||||
result = self.download_callback()
|
||||
|
||||
if self.socketio:
|
||||
self.socketio.emit('auto_download_started', {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'result': result
|
||||
})
|
||||
else:
|
||||
logger.warning("No download callback configured for auto-download")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Auto-download after scheduled rescan failed: {e}")
|
||||
if self.socketio:
|
||||
self.socketio.emit('auto_download_error', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False):
|
||||
"""Update scheduled rescan configuration."""
|
||||
try:
|
||||
# Validate time format
|
||||
if enabled and time_str:
|
||||
datetime.strptime(time_str, '%H:%M')
|
||||
|
||||
# Update configuration
|
||||
self.scheduled_rescan_enabled = enabled
|
||||
self.scheduled_rescan_time = time_str
|
||||
self.auto_download_after_rescan = auto_download
|
||||
|
||||
# Save to config
|
||||
self.config.scheduled_rescan_enabled = enabled
|
||||
self.config.scheduled_rescan_time = time_str
|
||||
self.config.auto_download_after_rescan = auto_download
|
||||
self.config.save_config()
|
||||
|
||||
# Restart scheduler with new settings
|
||||
if self.running:
|
||||
self._setup_scheduled_jobs()
|
||||
|
||||
logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}")
|
||||
return True
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Invalid time format: {time_str}")
|
||||
raise ValueError(f"Invalid time format. Use HH:MM format.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating scheduled rescan config: {e}")
|
||||
raise
|
||||
|
||||
def get_scheduled_rescan_config(self) -> Dict[str, Any]:
|
||||
"""Get current scheduled rescan configuration."""
|
||||
next_run = None
|
||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
||||
try:
|
||||
# Calculate next run time
|
||||
now = datetime.now()
|
||||
today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M')
|
||||
|
||||
if now > today_run:
|
||||
# Next run is tomorrow
|
||||
next_run = today_run + timedelta(days=1)
|
||||
else:
|
||||
# Next run is today
|
||||
next_run = today_run
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating next run time: {e}")
|
||||
|
||||
return {
|
||||
'enabled': self.scheduled_rescan_enabled,
|
||||
'time': self.scheduled_rescan_time,
|
||||
'auto_download_after_rescan': self.auto_download_after_rescan,
|
||||
'next_run': next_run.isoformat() if next_run else None,
|
||||
'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None,
|
||||
'is_running': self.running
|
||||
}
|
||||
|
||||
def trigger_manual_scheduled_rescan(self):
|
||||
"""Manually trigger a scheduled rescan (for testing purposes)."""
|
||||
logger.info("Manually triggering scheduled rescan")
|
||||
threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start()
|
||||
|
||||
def get_next_scheduled_jobs(self) -> list:
|
||||
"""Get list of all scheduled jobs with their next run times."""
|
||||
jobs = []
|
||||
for job in schedule.jobs:
|
||||
jobs.append({
|
||||
'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func),
|
||||
'next_run': job.next_run.isoformat() if job.next_run else None,
|
||||
'interval': str(job.interval),
|
||||
'unit': job.unit
|
||||
})
|
||||
return jobs
|
||||
|
||||
|
||||
# Global scheduler instance
|
||||
scheduled_operations = None
|
||||
|
||||
def init_scheduler(config_manager, socketio=None):
|
||||
"""Initialize the global scheduler."""
|
||||
global scheduled_operations
|
||||
scheduled_operations = ScheduledOperations(config_manager, socketio)
|
||||
return scheduled_operations
|
||||
|
||||
def get_scheduler():
|
||||
"""Get the global scheduler instance."""
|
||||
import threading
|
||||
import time
|
||||
import schedule
|
||||
from datetime import datetime, timedelta
|
||||
from typing import Optional, Callable, Dict, Any
|
||||
import logging
|
||||
from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK,
|
||||
ProcessLockError, is_process_running)
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
class ScheduledOperations:
|
||||
"""Handle scheduled operations like automatic rescans and downloads."""
|
||||
|
||||
def __init__(self, config_manager, socketio=None):
|
||||
self.config = config_manager
|
||||
self.socketio = socketio
|
||||
self.scheduler_thread = None
|
||||
self.running = False
|
||||
self.rescan_callback: Optional[Callable] = None
|
||||
self.download_callback: Optional[Callable] = None
|
||||
self.last_scheduled_rescan: Optional[datetime] = None
|
||||
|
||||
# Load scheduled rescan settings
|
||||
self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False)
|
||||
self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00')
|
||||
self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False)
|
||||
|
||||
def set_rescan_callback(self, callback: Callable):
|
||||
"""Set callback function for performing rescan operations."""
|
||||
self.rescan_callback = callback
|
||||
|
||||
def set_download_callback(self, callback: Callable):
|
||||
"""Set callback function for performing download operations."""
|
||||
self.download_callback = callback
|
||||
|
||||
def start_scheduler(self):
|
||||
"""Start the background scheduler thread."""
|
||||
if self.running:
|
||||
logger.warning("Scheduler is already running")
|
||||
return
|
||||
|
||||
self.running = True
|
||||
self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
|
||||
self.scheduler_thread.start()
|
||||
logger.info("Scheduled operations started")
|
||||
|
||||
def stop_scheduler(self):
|
||||
"""Stop the background scheduler."""
|
||||
self.running = False
|
||||
schedule.clear()
|
||||
if self.scheduler_thread and self.scheduler_thread.is_alive():
|
||||
self.scheduler_thread.join(timeout=5)
|
||||
logger.info("Scheduled operations stopped")
|
||||
|
||||
def _scheduler_loop(self):
|
||||
"""Main scheduler loop that runs in background thread."""
|
||||
self._setup_scheduled_jobs()
|
||||
|
||||
while self.running:
|
||||
try:
|
||||
schedule.run_pending()
|
||||
time.sleep(60) # Check every minute
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduler error: {e}")
|
||||
time.sleep(60)
|
||||
|
||||
def _setup_scheduled_jobs(self):
|
||||
"""Setup all scheduled jobs based on configuration."""
|
||||
schedule.clear()
|
||||
|
||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
||||
try:
|
||||
schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan)
|
||||
logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}")
|
||||
except Exception as e:
|
||||
logger.error(f"Error setting up scheduled rescan: {e}")
|
||||
|
||||
def _perform_scheduled_rescan(self):
|
||||
"""Perform the scheduled rescan operation."""
|
||||
try:
|
||||
logger.info("Starting scheduled rescan...")
|
||||
|
||||
# Emit scheduled rescan started event
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_started')
|
||||
|
||||
# Check if rescan is already running
|
||||
if is_process_running(RESCAN_LOCK):
|
||||
logger.warning("Rescan is already running, skipping scheduled rescan")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_skipped', {
|
||||
'reason': 'Rescan already in progress'
|
||||
})
|
||||
return
|
||||
|
||||
# Perform the rescan using process lock
|
||||
@with_process_lock(RESCAN_LOCK, timeout_minutes=180)
|
||||
def perform_rescan():
|
||||
self.last_scheduled_rescan = datetime.now()
|
||||
|
||||
if self.rescan_callback:
|
||||
result = self.rescan_callback()
|
||||
logger.info("Scheduled rescan completed successfully")
|
||||
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_completed', {
|
||||
'timestamp': self.last_scheduled_rescan.isoformat(),
|
||||
'result': result
|
||||
})
|
||||
|
||||
# Auto-start download if configured
|
||||
if self.auto_download_after_rescan and self.download_callback:
|
||||
logger.info("Starting auto-download after scheduled rescan")
|
||||
threading.Thread(
|
||||
target=self._perform_auto_download,
|
||||
daemon=True
|
||||
).start()
|
||||
else:
|
||||
logger.warning("No rescan callback configured")
|
||||
|
||||
perform_rescan(_locked_by='scheduled_operation')
|
||||
|
||||
except ProcessLockError:
|
||||
logger.warning("Could not acquire rescan lock for scheduled operation")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_error', {
|
||||
'error': 'Could not acquire rescan lock'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Scheduled rescan failed: {e}")
|
||||
if self.socketio:
|
||||
self.socketio.emit('scheduled_rescan_error', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def _perform_auto_download(self):
|
||||
"""Perform automatic download after scheduled rescan."""
|
||||
try:
|
||||
# Wait a bit after rescan to let UI update
|
||||
time.sleep(10)
|
||||
|
||||
if self.download_callback:
|
||||
# Find series with missing episodes and start download
|
||||
logger.info("Starting auto-download of missing episodes")
|
||||
result = self.download_callback()
|
||||
|
||||
if self.socketio:
|
||||
self.socketio.emit('auto_download_started', {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'result': result
|
||||
})
|
||||
else:
|
||||
logger.warning("No download callback configured for auto-download")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Auto-download after scheduled rescan failed: {e}")
|
||||
if self.socketio:
|
||||
self.socketio.emit('auto_download_error', {
|
||||
'error': str(e)
|
||||
})
|
||||
|
||||
def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False):
|
||||
"""Update scheduled rescan configuration."""
|
||||
try:
|
||||
# Validate time format
|
||||
if enabled and time_str:
|
||||
datetime.strptime(time_str, '%H:%M')
|
||||
|
||||
# Update configuration
|
||||
self.scheduled_rescan_enabled = enabled
|
||||
self.scheduled_rescan_time = time_str
|
||||
self.auto_download_after_rescan = auto_download
|
||||
|
||||
# Save to config
|
||||
self.config.scheduled_rescan_enabled = enabled
|
||||
self.config.scheduled_rescan_time = time_str
|
||||
self.config.auto_download_after_rescan = auto_download
|
||||
self.config.save_config()
|
||||
|
||||
# Restart scheduler with new settings
|
||||
if self.running:
|
||||
self._setup_scheduled_jobs()
|
||||
|
||||
logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}")
|
||||
return True
|
||||
|
||||
except ValueError as e:
|
||||
logger.error(f"Invalid time format: {time_str}")
|
||||
raise ValueError(f"Invalid time format. Use HH:MM format.")
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating scheduled rescan config: {e}")
|
||||
raise
|
||||
|
||||
def get_scheduled_rescan_config(self) -> Dict[str, Any]:
|
||||
"""Get current scheduled rescan configuration."""
|
||||
next_run = None
|
||||
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
|
||||
try:
|
||||
# Calculate next run time
|
||||
now = datetime.now()
|
||||
today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M')
|
||||
|
||||
if now > today_run:
|
||||
# Next run is tomorrow
|
||||
next_run = today_run + timedelta(days=1)
|
||||
else:
|
||||
# Next run is today
|
||||
next_run = today_run
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error calculating next run time: {e}")
|
||||
|
||||
return {
|
||||
'enabled': self.scheduled_rescan_enabled,
|
||||
'time': self.scheduled_rescan_time,
|
||||
'auto_download_after_rescan': self.auto_download_after_rescan,
|
||||
'next_run': next_run.isoformat() if next_run else None,
|
||||
'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None,
|
||||
'is_running': self.running
|
||||
}
|
||||
|
||||
def trigger_manual_scheduled_rescan(self):
|
||||
"""Manually trigger a scheduled rescan (for testing purposes)."""
|
||||
logger.info("Manually triggering scheduled rescan")
|
||||
threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start()
|
||||
|
||||
def get_next_scheduled_jobs(self) -> list:
|
||||
"""Get list of all scheduled jobs with their next run times."""
|
||||
jobs = []
|
||||
for job in schedule.jobs:
|
||||
jobs.append({
|
||||
'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func),
|
||||
'next_run': job.next_run.isoformat() if job.next_run else None,
|
||||
'interval': str(job.interval),
|
||||
'unit': job.unit
|
||||
})
|
||||
return jobs
|
||||
|
||||
|
||||
# Global scheduler instance
|
||||
scheduled_operations = None
|
||||
|
||||
def init_scheduler(config_manager, socketio=None):
|
||||
"""Initialize the global scheduler."""
|
||||
global scheduled_operations
|
||||
scheduled_operations = ScheduledOperations(config_manager, socketio)
|
||||
return scheduled_operations
|
||||
|
||||
def get_scheduler():
|
||||
"""Get the global scheduler instance."""
|
||||
return scheduled_operations
|
||||
File diff suppressed because it is too large
Load Diff
@ -1,268 +1,268 @@
|
||||
"""
|
||||
Setup service for detecting and managing application setup state.
|
||||
|
||||
This service determines if the application is properly configured and set up,
|
||||
following the application flow pattern: setup → auth → main application.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SetupService:
|
||||
"""Service for managing application setup detection and configuration."""
|
||||
|
||||
def __init__(self, config_path: str = "data/config.json", db_path: str = "data/aniworld.db"):
|
||||
"""Initialize the setup service with configuration and database paths."""
|
||||
self.config_path = Path(config_path)
|
||||
self.db_path = Path(db_path)
|
||||
self._config_cache: Optional[Dict[str, Any]] = None
|
||||
|
||||
def is_setup_complete(self) -> bool:
|
||||
"""
|
||||
Check if the application setup is complete.
|
||||
|
||||
Setup is considered complete if:
|
||||
1. Configuration file exists and is valid
|
||||
2. Database exists and is accessible
|
||||
3. Master password is configured
|
||||
4. Setup completion flag is set (if present)
|
||||
|
||||
Returns:
|
||||
bool: True if setup is complete, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Check if configuration file exists and is valid
|
||||
if not self._is_config_valid():
|
||||
logger.info("Setup incomplete: Configuration file is missing or invalid")
|
||||
return False
|
||||
|
||||
# Check if database exists and is accessible
|
||||
if not self._is_database_accessible():
|
||||
logger.info("Setup incomplete: Database is not accessible")
|
||||
return False
|
||||
|
||||
# Check if master password is configured
|
||||
if not self._is_master_password_configured():
|
||||
logger.info("Setup incomplete: Master password is not configured")
|
||||
return False
|
||||
|
||||
# Check for explicit setup completion flag
|
||||
config = self.get_config()
|
||||
if config and config.get("setup", {}).get("completed") is False:
|
||||
logger.info("Setup incomplete: Setup completion flag is False")
|
||||
return False
|
||||
|
||||
logger.debug("Setup validation complete: All checks passed")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking setup completion: {e}")
|
||||
return False
|
||||
|
||||
def _is_config_valid(self) -> bool:
|
||||
"""Check if the configuration file exists and contains valid JSON."""
|
||||
try:
|
||||
if not self.config_path.exists():
|
||||
return False
|
||||
|
||||
config = self.get_config()
|
||||
return config is not None and isinstance(config, dict)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Configuration validation error: {e}")
|
||||
return False
|
||||
|
||||
def _is_database_accessible(self) -> bool:
|
||||
"""Check if the database exists and is accessible."""
|
||||
try:
|
||||
if not self.db_path.exists():
|
||||
return False
|
||||
|
||||
# Try to connect and perform a simple query
|
||||
with sqlite3.connect(str(self.db_path)) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Database accessibility check failed: {e}")
|
||||
return False
|
||||
|
||||
def _is_master_password_configured(self) -> bool:
|
||||
"""Check if master password is properly configured."""
|
||||
try:
|
||||
config = self.get_config()
|
||||
if not config:
|
||||
return False
|
||||
|
||||
security_config = config.get("security", {})
|
||||
|
||||
# Check if password hash exists
|
||||
password_hash = security_config.get("master_password_hash")
|
||||
salt = security_config.get("salt")
|
||||
|
||||
return bool(password_hash and salt and len(password_hash) > 0 and len(salt) > 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Master password configuration check failed: {e}")
|
||||
return False
|
||||
|
||||
def get_config(self, force_reload: bool = False) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get the configuration data from the config file.
|
||||
|
||||
Args:
|
||||
force_reload: If True, reload config from file even if cached
|
||||
|
||||
Returns:
|
||||
dict: Configuration data or None if not accessible
|
||||
"""
|
||||
try:
|
||||
if self._config_cache is None or force_reload:
|
||||
if not self.config_path.exists():
|
||||
return None
|
||||
|
||||
with open(self.config_path, 'r', encoding='utf-8') as f:
|
||||
self._config_cache = json.load(f)
|
||||
|
||||
return self._config_cache
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading configuration: {e}")
|
||||
return None
|
||||
|
||||
def mark_setup_complete(self, config_updates: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""
|
||||
Mark the setup as completed and optionally update configuration.
|
||||
|
||||
Args:
|
||||
config_updates: Additional configuration updates to apply
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
config = self.get_config() or {}
|
||||
|
||||
# Update configuration with any provided updates
|
||||
if config_updates:
|
||||
config.update(config_updates)
|
||||
|
||||
# Set setup completion flag
|
||||
if "setup" not in config:
|
||||
config["setup"] = {}
|
||||
config["setup"]["completed"] = True
|
||||
config["setup"]["completed_at"] = str(datetime.utcnow())
|
||||
|
||||
# Save updated configuration
|
||||
return self._save_config(config)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error marking setup as complete: {e}")
|
||||
return False
|
||||
|
||||
def reset_setup(self) -> bool:
|
||||
"""
|
||||
Reset the setup completion status (for development/testing).
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
config = self.get_config()
|
||||
if not config:
|
||||
return False
|
||||
|
||||
# Remove or set setup completion flag to false
|
||||
if "setup" in config:
|
||||
config["setup"]["completed"] = False
|
||||
|
||||
return self._save_config(config)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error resetting setup: {e}")
|
||||
return False
|
||||
|
||||
def _save_config(self, config: Dict[str, Any]) -> bool:
|
||||
"""Save configuration to file."""
|
||||
try:
|
||||
# Ensure directory exists
|
||||
self.config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Save configuration
|
||||
with open(self.config_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(config, f, indent=4, ensure_ascii=False)
|
||||
|
||||
# Clear cache to force reload on next access
|
||||
self._config_cache = None
|
||||
|
||||
logger.info(f"Configuration saved to {self.config_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving configuration: {e}")
|
||||
return False
|
||||
|
||||
def get_setup_requirements(self) -> Dict[str, bool]:
|
||||
"""
|
||||
Get detailed breakdown of setup requirements and their status.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with requirement names and their completion status
|
||||
"""
|
||||
config = self.get_config()
|
||||
return {
|
||||
"config_file_exists": self.config_path.exists(),
|
||||
"config_file_valid": self._is_config_valid(),
|
||||
"database_exists": self.db_path.exists(),
|
||||
"database_accessible": self._is_database_accessible(),
|
||||
"master_password_configured": self._is_master_password_configured(),
|
||||
"setup_marked_complete": bool(config and config.get("setup", {}).get("completed", True))
|
||||
}
|
||||
|
||||
def get_missing_requirements(self) -> List[str]:
|
||||
"""
|
||||
Get list of missing setup requirements.
|
||||
|
||||
Returns:
|
||||
list: List of missing requirement descriptions
|
||||
"""
|
||||
requirements = self.get_setup_requirements()
|
||||
missing = []
|
||||
|
||||
if not requirements["config_file_exists"]:
|
||||
missing.append("Configuration file is missing")
|
||||
elif not requirements["config_file_valid"]:
|
||||
missing.append("Configuration file is invalid or corrupted")
|
||||
|
||||
if not requirements["database_exists"]:
|
||||
missing.append("Database file is missing")
|
||||
elif not requirements["database_accessible"]:
|
||||
missing.append("Database is not accessible or corrupted")
|
||||
|
||||
if not requirements["master_password_configured"]:
|
||||
missing.append("Master password is not configured")
|
||||
|
||||
if not requirements["setup_marked_complete"]:
|
||||
missing.append("Setup process was not completed")
|
||||
|
||||
return missing
|
||||
|
||||
|
||||
# Convenience functions for easy import
|
||||
def is_setup_complete() -> bool:
|
||||
"""Convenience function to check if setup is complete."""
|
||||
service = SetupService()
|
||||
return service.is_setup_complete()
|
||||
|
||||
|
||||
def get_setup_service() -> SetupService:
|
||||
"""Get a configured setup service instance."""
|
||||
return SetupService()
|
||||
"""
|
||||
Setup service for detecting and managing application setup state.
|
||||
|
||||
This service determines if the application is properly configured and set up,
|
||||
following the application flow pattern: setup → auth → main application.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import sqlite3
|
||||
from datetime import datetime
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class SetupService:
|
||||
"""Service for managing application setup detection and configuration."""
|
||||
|
||||
def __init__(self, config_path: str = "data/config.json", db_path: str = "data/aniworld.db"):
|
||||
"""Initialize the setup service with configuration and database paths."""
|
||||
self.config_path = Path(config_path)
|
||||
self.db_path = Path(db_path)
|
||||
self._config_cache: Optional[Dict[str, Any]] = None
|
||||
|
||||
def is_setup_complete(self) -> bool:
|
||||
"""
|
||||
Check if the application setup is complete.
|
||||
|
||||
Setup is considered complete if:
|
||||
1. Configuration file exists and is valid
|
||||
2. Database exists and is accessible
|
||||
3. Master password is configured
|
||||
4. Setup completion flag is set (if present)
|
||||
|
||||
Returns:
|
||||
bool: True if setup is complete, False otherwise
|
||||
"""
|
||||
try:
|
||||
# Check if configuration file exists and is valid
|
||||
if not self._is_config_valid():
|
||||
logger.info("Setup incomplete: Configuration file is missing or invalid")
|
||||
return False
|
||||
|
||||
# Check if database exists and is accessible
|
||||
if not self._is_database_accessible():
|
||||
logger.info("Setup incomplete: Database is not accessible")
|
||||
return False
|
||||
|
||||
# Check if master password is configured
|
||||
if not self._is_master_password_configured():
|
||||
logger.info("Setup incomplete: Master password is not configured")
|
||||
return False
|
||||
|
||||
# Check for explicit setup completion flag
|
||||
config = self.get_config()
|
||||
if config and config.get("setup", {}).get("completed") is False:
|
||||
logger.info("Setup incomplete: Setup completion flag is False")
|
||||
return False
|
||||
|
||||
logger.debug("Setup validation complete: All checks passed")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking setup completion: {e}")
|
||||
return False
|
||||
|
||||
def _is_config_valid(self) -> bool:
|
||||
"""Check if the configuration file exists and contains valid JSON."""
|
||||
try:
|
||||
if not self.config_path.exists():
|
||||
return False
|
||||
|
||||
config = self.get_config()
|
||||
return config is not None and isinstance(config, dict)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Configuration validation error: {e}")
|
||||
return False
|
||||
|
||||
def _is_database_accessible(self) -> bool:
|
||||
"""Check if the database exists and is accessible."""
|
||||
try:
|
||||
if not self.db_path.exists():
|
||||
return False
|
||||
|
||||
# Try to connect and perform a simple query
|
||||
with sqlite3.connect(str(self.db_path)) as conn:
|
||||
cursor = conn.cursor()
|
||||
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Database accessibility check failed: {e}")
|
||||
return False
|
||||
|
||||
def _is_master_password_configured(self) -> bool:
|
||||
"""Check if master password is properly configured."""
|
||||
try:
|
||||
config = self.get_config()
|
||||
if not config:
|
||||
return False
|
||||
|
||||
security_config = config.get("security", {})
|
||||
|
||||
# Check if password hash exists
|
||||
password_hash = security_config.get("master_password_hash")
|
||||
salt = security_config.get("salt")
|
||||
|
||||
return bool(password_hash and salt and len(password_hash) > 0 and len(salt) > 0)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Master password configuration check failed: {e}")
|
||||
return False
|
||||
|
||||
def get_config(self, force_reload: bool = False) -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get the configuration data from the config file.
|
||||
|
||||
Args:
|
||||
force_reload: If True, reload config from file even if cached
|
||||
|
||||
Returns:
|
||||
dict: Configuration data or None if not accessible
|
||||
"""
|
||||
try:
|
||||
if self._config_cache is None or force_reload:
|
||||
if not self.config_path.exists():
|
||||
return None
|
||||
|
||||
with open(self.config_path, 'r', encoding='utf-8') as f:
|
||||
self._config_cache = json.load(f)
|
||||
|
||||
return self._config_cache
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error loading configuration: {e}")
|
||||
return None
|
||||
|
||||
def mark_setup_complete(self, config_updates: Optional[Dict[str, Any]] = None) -> bool:
|
||||
"""
|
||||
Mark the setup as completed and optionally update configuration.
|
||||
|
||||
Args:
|
||||
config_updates: Additional configuration updates to apply
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
config = self.get_config() or {}
|
||||
|
||||
# Update configuration with any provided updates
|
||||
if config_updates:
|
||||
config.update(config_updates)
|
||||
|
||||
# Set setup completion flag
|
||||
if "setup" not in config:
|
||||
config["setup"] = {}
|
||||
config["setup"]["completed"] = True
|
||||
config["setup"]["completed_at"] = str(datetime.utcnow())
|
||||
|
||||
# Save updated configuration
|
||||
return self._save_config(config)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error marking setup as complete: {e}")
|
||||
return False
|
||||
|
||||
def reset_setup(self) -> bool:
|
||||
"""
|
||||
Reset the setup completion status (for development/testing).
|
||||
|
||||
Returns:
|
||||
bool: True if successful, False otherwise
|
||||
"""
|
||||
try:
|
||||
config = self.get_config()
|
||||
if not config:
|
||||
return False
|
||||
|
||||
# Remove or set setup completion flag to false
|
||||
if "setup" in config:
|
||||
config["setup"]["completed"] = False
|
||||
|
||||
return self._save_config(config)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error resetting setup: {e}")
|
||||
return False
|
||||
|
||||
def _save_config(self, config: Dict[str, Any]) -> bool:
|
||||
"""Save configuration to file."""
|
||||
try:
|
||||
# Ensure directory exists
|
||||
self.config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
# Save configuration
|
||||
with open(self.config_path, 'w', encoding='utf-8') as f:
|
||||
json.dump(config, f, indent=4, ensure_ascii=False)
|
||||
|
||||
# Clear cache to force reload on next access
|
||||
self._config_cache = None
|
||||
|
||||
logger.info(f"Configuration saved to {self.config_path}")
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error saving configuration: {e}")
|
||||
return False
|
||||
|
||||
def get_setup_requirements(self) -> Dict[str, bool]:
|
||||
"""
|
||||
Get detailed breakdown of setup requirements and their status.
|
||||
|
||||
Returns:
|
||||
dict: Dictionary with requirement names and their completion status
|
||||
"""
|
||||
config = self.get_config()
|
||||
return {
|
||||
"config_file_exists": self.config_path.exists(),
|
||||
"config_file_valid": self._is_config_valid(),
|
||||
"database_exists": self.db_path.exists(),
|
||||
"database_accessible": self._is_database_accessible(),
|
||||
"master_password_configured": self._is_master_password_configured(),
|
||||
"setup_marked_complete": bool(config and config.get("setup", {}).get("completed", True))
|
||||
}
|
||||
|
||||
def get_missing_requirements(self) -> List[str]:
|
||||
"""
|
||||
Get list of missing setup requirements.
|
||||
|
||||
Returns:
|
||||
list: List of missing requirement descriptions
|
||||
"""
|
||||
requirements = self.get_setup_requirements()
|
||||
missing = []
|
||||
|
||||
if not requirements["config_file_exists"]:
|
||||
missing.append("Configuration file is missing")
|
||||
elif not requirements["config_file_valid"]:
|
||||
missing.append("Configuration file is invalid or corrupted")
|
||||
|
||||
if not requirements["database_exists"]:
|
||||
missing.append("Database file is missing")
|
||||
elif not requirements["database_accessible"]:
|
||||
missing.append("Database is not accessible or corrupted")
|
||||
|
||||
if not requirements["master_password_configured"]:
|
||||
missing.append("Master password is not configured")
|
||||
|
||||
if not requirements["setup_marked_complete"]:
|
||||
missing.append("Setup process was not completed")
|
||||
|
||||
return missing
|
||||
|
||||
|
||||
# Convenience functions for easy import
|
||||
def is_setup_complete() -> bool:
|
||||
"""Convenience function to check if setup is complete."""
|
||||
service = SetupService()
|
||||
return service.is_setup_complete()
|
||||
|
||||
|
||||
def get_setup_service() -> SetupService:
|
||||
"""Get a configured setup service instance."""
|
||||
return SetupService()
|
||||
|
||||
@ -1,782 +0,0 @@
|
||||
"""
|
||||
Anime Management API Endpoints
|
||||
|
||||
This module provides REST API endpoints for anime CRUD operations,
|
||||
including creation, reading, updating, deletion, and search functionality.
|
||||
"""
|
||||
|
||||
import uuid
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, HTTPException, Query, status
|
||||
from pydantic import BaseModel, Field
|
||||
|
||||
# Import SeriesApp for business logic
|
||||
from src.core.SeriesApp import SeriesApp
|
||||
|
||||
# FastAPI dependencies and models
|
||||
from src.server.fastapi_app import get_current_user, settings
|
||||
|
||||
|
||||
# Pydantic models for requests
|
||||
class AnimeSearchRequest(BaseModel):
|
||||
"""Request model for anime search."""
|
||||
query: str = Field(..., min_length=1, max_length=100)
|
||||
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
|
||||
genre: Optional[str] = None
|
||||
year: Optional[int] = Field(None, ge=1900, le=2100)
|
||||
|
||||
class AnimeResponse(BaseModel):
|
||||
"""Response model for anime data."""
|
||||
id: str
|
||||
title: str
|
||||
description: Optional[str] = None
|
||||
status: str = "Unknown"
|
||||
folder: Optional[str] = None
|
||||
episodes: int = 0
|
||||
|
||||
class AnimeCreateRequest(BaseModel):
|
||||
"""Request model for creating anime entries."""
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
folder: str = Field(..., min_length=1)
|
||||
description: Optional[str] = None
|
||||
status: str = Field(default="planned", pattern="^(ongoing|completed|planned|dropped|paused)$")
|
||||
genre: Optional[str] = None
|
||||
year: Optional[int] = Field(None, ge=1900, le=2100)
|
||||
|
||||
class AnimeUpdateRequest(BaseModel):
|
||||
"""Request model for updating anime entries."""
|
||||
name: Optional[str] = Field(None, min_length=1, max_length=255)
|
||||
folder: Optional[str] = None
|
||||
description: Optional[str] = None
|
||||
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
|
||||
genre: Optional[str] = None
|
||||
year: Optional[int] = Field(None, ge=1900, le=2100)
|
||||
|
||||
class PaginatedAnimeResponse(BaseModel):
|
||||
"""Paginated response model for anime lists."""
|
||||
success: bool = True
|
||||
data: List[AnimeResponse]
|
||||
pagination: Dict[str, Any]
|
||||
|
||||
class AnimeSearchResponse(BaseModel):
|
||||
"""Response model for anime search results."""
|
||||
success: bool = True
|
||||
data: List[AnimeResponse]
|
||||
pagination: Dict[str, Any]
|
||||
search: Dict[str, Any]
|
||||
|
||||
class RescanResponse(BaseModel):
|
||||
"""Response model for rescan operations."""
|
||||
success: bool
|
||||
message: str
|
||||
total_series: int
|
||||
|
||||
# Dependency to get SeriesApp instance
|
||||
def get_series_app() -> SeriesApp:
|
||||
"""Get SeriesApp instance for business logic operations."""
|
||||
if not settings.anime_directory:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Anime directory not configured"
|
||||
)
|
||||
return SeriesApp(settings.anime_directory)
|
||||
|
||||
# Create FastAPI router for anime management endpoints
|
||||
router = APIRouter(prefix='/api/v1/anime', tags=['anime'])
|
||||
|
||||
|
||||
@router.get('', response_model=PaginatedAnimeResponse)
|
||||
async def list_anime(
|
||||
status: Optional[str] = Query(None, pattern="^(ongoing|completed|planned|dropped|paused)$"),
|
||||
genre: Optional[str] = Query(None),
|
||||
year: Optional[int] = Query(None, ge=1900, le=2100),
|
||||
search: Optional[str] = Query(None),
|
||||
page: int = Query(1, ge=1),
|
||||
per_page: int = Query(50, ge=1, le=1000),
|
||||
current_user: Optional[Dict] = Depends(get_current_user),
|
||||
series_app: SeriesApp = Depends(get_series_app)
|
||||
) -> PaginatedAnimeResponse:
|
||||
"""
|
||||
Get all anime with optional filtering and pagination.
|
||||
|
||||
Query Parameters:
|
||||
- status: Filter by anime status (ongoing, completed, planned, dropped, paused)
|
||||
- genre: Filter by genre
|
||||
- year: Filter by release year
|
||||
- search: Search in name and description
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated list of anime with metadata
|
||||
"""
|
||||
try:
|
||||
# Get the series list from SeriesApp
|
||||
anime_list = series_app.series_list
|
||||
|
||||
# Convert to list of AnimeResponse objects
|
||||
anime_responses = []
|
||||
for series_item in anime_list:
|
||||
anime_response = AnimeResponse(
|
||||
id=getattr(series_item, 'id', str(uuid.uuid4())),
|
||||
title=getattr(series_item, 'name', 'Unknown'),
|
||||
folder=getattr(series_item, 'folder', ''),
|
||||
description=getattr(series_item, 'description', ''),
|
||||
status='ongoing', # Default status
|
||||
episodes=getattr(series_item, 'total_episodes', 0)
|
||||
)
|
||||
|
||||
# Apply search filter if provided
|
||||
if search:
|
||||
if search.lower() not in anime_response.title.lower():
|
||||
continue
|
||||
|
||||
anime_responses.append(anime_response)
|
||||
|
||||
# Apply pagination
|
||||
total = len(anime_responses)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_anime = anime_responses[start_idx:end_idx]
|
||||
|
||||
return PaginatedAnimeResponse(
|
||||
data=paginated_anime,
|
||||
pagination={
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"total": total,
|
||||
"pages": (total + per_page - 1) // per_page,
|
||||
"has_next": end_idx < total,
|
||||
"has_prev": page > 1
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Error retrieving anime list: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@anime_bp.route('/<int:anime_id>', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('anime_id')
|
||||
@optional_auth
|
||||
def get_anime(anime_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Get specific anime by ID.
|
||||
|
||||
Args:
|
||||
anime_id: Unique identifier for the anime
|
||||
|
||||
Returns:
|
||||
Anime details with episodes summary
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
anime = anime_repository.get_anime_by_id(anime_id)
|
||||
if not anime:
|
||||
raise NotFoundError("Anime not found")
|
||||
|
||||
# Format anime data
|
||||
anime_data = format_anime_response(anime.__dict__)
|
||||
|
||||
# Add episodes summary
|
||||
episodes_summary = anime_repository.get_episodes_summary(anime_id)
|
||||
anime_data['episodes_summary'] = episodes_summary
|
||||
|
||||
return create_success_response(anime_data)
|
||||
|
||||
|
||||
@anime_bp.route('', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['name', 'folder'],
|
||||
optional_fields=['key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
|
||||
field_types={
|
||||
'name': str,
|
||||
'folder': str,
|
||||
'key': str,
|
||||
'description': str,
|
||||
'genres': list,
|
||||
'release_year': int,
|
||||
'status': str,
|
||||
'total_episodes': int,
|
||||
'poster_url': str,
|
||||
'custom_metadata': dict
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def create_anime() -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new anime record.
|
||||
|
||||
Required Fields:
|
||||
- name: Anime name
|
||||
- folder: Folder path where anime files are stored
|
||||
|
||||
Optional Fields:
|
||||
- key: Unique key identifier
|
||||
- description: Anime description
|
||||
- genres: List of genres
|
||||
- release_year: Year of release
|
||||
- status: Status (ongoing, completed, planned, dropped, paused)
|
||||
- total_episodes: Total number of episodes
|
||||
- poster_url: URL to poster image
|
||||
- custom_metadata: Additional metadata as key-value pairs
|
||||
|
||||
Returns:
|
||||
Created anime details with generated ID
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Validate status if provided
|
||||
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
|
||||
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
|
||||
|
||||
# Check if anime with same folder already exists
|
||||
existing_anime = anime_repository.get_anime_by_folder(data['folder'])
|
||||
if existing_anime:
|
||||
raise ValidationError("Anime with this folder already exists")
|
||||
|
||||
# Create anime metadata object
|
||||
try:
|
||||
anime = AnimeMetadata(
|
||||
anime_id=str(uuid.uuid4()),
|
||||
name=data['name'],
|
||||
folder=data['folder'],
|
||||
key=data.get('key'),
|
||||
description=data.get('description'),
|
||||
genres=data.get('genres', []),
|
||||
release_year=data.get('release_year'),
|
||||
status=data.get('status', 'planned'),
|
||||
total_episodes=data.get('total_episodes'),
|
||||
poster_url=data.get('poster_url'),
|
||||
custom_metadata=data.get('custom_metadata', {})
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Invalid anime data: {str(e)}")
|
||||
|
||||
# Save to database
|
||||
success = anime_repository.create_anime(anime)
|
||||
if not success:
|
||||
raise APIException("Failed to create anime", 500)
|
||||
|
||||
# Return created anime
|
||||
anime_data = format_anime_response(anime.__dict__)
|
||||
return create_success_response(
|
||||
data=anime_data,
|
||||
message="Anime created successfully",
|
||||
status_code=201
|
||||
)
|
||||
|
||||
|
||||
@anime_bp.route('/<int:anime_id>', methods=['PUT'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('anime_id')
|
||||
@validate_json_input(
|
||||
optional_fields=['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
|
||||
field_types={
|
||||
'name': str,
|
||||
'folder': str,
|
||||
'key': str,
|
||||
'description': str,
|
||||
'genres': list,
|
||||
'release_year': int,
|
||||
'status': str,
|
||||
'total_episodes': int,
|
||||
'poster_url': str,
|
||||
'custom_metadata': dict
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def update_anime(anime_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Update an existing anime record.
|
||||
|
||||
Args:
|
||||
anime_id: Unique identifier for the anime
|
||||
|
||||
Optional Fields:
|
||||
- name: Anime name
|
||||
- folder: Folder path where anime files are stored
|
||||
- key: Unique key identifier
|
||||
- description: Anime description
|
||||
- genres: List of genres
|
||||
- release_year: Year of release
|
||||
- status: Status (ongoing, completed, planned, dropped, paused)
|
||||
- total_episodes: Total number of episodes
|
||||
- poster_url: URL to poster image
|
||||
- custom_metadata: Additional metadata as key-value pairs
|
||||
|
||||
Returns:
|
||||
Updated anime details
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Get existing anime
|
||||
existing_anime = anime_repository.get_anime_by_id(anime_id)
|
||||
if not existing_anime:
|
||||
raise NotFoundError("Anime not found")
|
||||
|
||||
# Validate status if provided
|
||||
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
|
||||
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
|
||||
|
||||
# Check if folder is being changed and if it conflicts
|
||||
if 'folder' in data and data['folder'] != existing_anime.folder:
|
||||
conflicting_anime = anime_repository.get_anime_by_folder(data['folder'])
|
||||
if conflicting_anime and conflicting_anime.anime_id != anime_id:
|
||||
raise ValidationError("Another anime with this folder already exists")
|
||||
|
||||
# Update fields
|
||||
update_fields = {}
|
||||
for field in ['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url']:
|
||||
if field in data:
|
||||
update_fields[field] = data[field]
|
||||
|
||||
# Handle custom metadata update (merge instead of replace)
|
||||
if 'custom_metadata' in data:
|
||||
existing_metadata = existing_anime.custom_metadata or {}
|
||||
existing_metadata.update(data['custom_metadata'])
|
||||
update_fields['custom_metadata'] = existing_metadata
|
||||
|
||||
# Perform update
|
||||
success = anime_repository.update_anime(anime_id, update_fields)
|
||||
if not success:
|
||||
raise APIException("Failed to update anime", 500)
|
||||
|
||||
# Get updated anime
|
||||
updated_anime = anime_repository.get_anime_by_id(anime_id)
|
||||
anime_data = format_anime_response(updated_anime.__dict__)
|
||||
|
||||
return create_success_response(
|
||||
data=anime_data,
|
||||
message="Anime updated successfully"
|
||||
)
|
||||
|
||||
|
||||
@anime_bp.route('/<int:anime_id>', methods=['DELETE'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('anime_id')
|
||||
@require_auth
|
||||
def delete_anime(anime_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Delete an anime record and all related data.
|
||||
|
||||
Args:
|
||||
anime_id: Unique identifier for the anime
|
||||
|
||||
Query Parameters:
|
||||
- force: Set to 'true' to force deletion even if episodes exist
|
||||
|
||||
Returns:
|
||||
Deletion confirmation
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
# Check if anime exists
|
||||
existing_anime = anime_repository.get_anime_by_id(anime_id)
|
||||
if not existing_anime:
|
||||
raise NotFoundError("Anime not found")
|
||||
|
||||
# Check for existing episodes unless force deletion
|
||||
force_delete = request.args.get('force', 'false').lower() == 'true'
|
||||
if not force_delete:
|
||||
episode_count = anime_repository.get_episode_count(anime_id)
|
||||
if episode_count > 0:
|
||||
raise ValidationError(
|
||||
f"Cannot delete anime with {episode_count} episodes. "
|
||||
"Use ?force=true to force deletion or delete episodes first."
|
||||
)
|
||||
|
||||
# Perform deletion (this should cascade to episodes, downloads, etc.)
|
||||
success = anime_repository.delete_anime(anime_id)
|
||||
if not success:
|
||||
raise APIException("Failed to delete anime", 500)
|
||||
|
||||
return create_success_response(
|
||||
message=f"Anime '{existing_anime.name}' deleted successfully"
|
||||
)
|
||||
|
||||
|
||||
@router.get('/search', response_model=AnimeSearchResponse)
|
||||
async def search_anime(
|
||||
q: str = Query(..., min_length=2, description="Search query"),
|
||||
page: int = Query(1, ge=1),
|
||||
per_page: int = Query(20, ge=1, le=100),
|
||||
current_user: Optional[Dict] = Depends(get_current_user),
|
||||
series_app: SeriesApp = Depends(get_series_app)
|
||||
) -> AnimeSearchResponse:
|
||||
"""
|
||||
Search anime by name using SeriesApp.
|
||||
|
||||
Query Parameters:
|
||||
- q: Search query (required, min 2 characters)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 20, max: 100)
|
||||
|
||||
Returns:
|
||||
Paginated search results
|
||||
"""
|
||||
try:
|
||||
# Use SeriesApp to perform search
|
||||
search_results = series_app.search(q)
|
||||
|
||||
# Convert search results to AnimeResponse objects
|
||||
anime_responses = []
|
||||
for result in search_results:
|
||||
anime_response = AnimeResponse(
|
||||
id=getattr(result, 'id', str(uuid.uuid4())),
|
||||
title=getattr(result, 'name', getattr(result, 'title', 'Unknown')),
|
||||
description=getattr(result, 'description', ''),
|
||||
status='available',
|
||||
episodes=getattr(result, 'episodes', 0),
|
||||
folder=getattr(result, 'key', '')
|
||||
)
|
||||
anime_responses.append(anime_response)
|
||||
|
||||
# Apply pagination
|
||||
total = len(anime_responses)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_results = anime_responses[start_idx:end_idx]
|
||||
|
||||
return AnimeSearchResponse(
|
||||
data=paginated_results,
|
||||
pagination={
|
||||
"page": page,
|
||||
"per_page": per_page,
|
||||
"total": total,
|
||||
"pages": (total + per_page - 1) // per_page,
|
||||
"has_next": end_idx < total,
|
||||
"has_prev": page > 1
|
||||
},
|
||||
search={
|
||||
"query": q,
|
||||
"total_results": total
|
||||
}
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Search failed: {str(e)}"
|
||||
)
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_results)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_results = formatted_results[start_idx:end_idx]
|
||||
|
||||
# Create response with search metadata
|
||||
response = create_paginated_response(
|
||||
data=paginated_results,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='anime.search_anime',
|
||||
q=search_term,
|
||||
fields=','.join(search_fields)
|
||||
)
|
||||
|
||||
# Add search metadata
|
||||
response['search'] = {
|
||||
'query': search_term,
|
||||
'fields': search_fields,
|
||||
'total_results': total
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@anime_bp.route('/<int:anime_id>/episodes', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('anime_id')
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def get_anime_episodes(anime_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Get all episodes for a specific anime.
|
||||
|
||||
Args:
|
||||
anime_id: Unique identifier for the anime
|
||||
|
||||
Query Parameters:
|
||||
- status: Filter by episode status
|
||||
- downloaded: Filter by download status (true/false)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated list of episodes for the anime
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
# Check if anime exists
|
||||
anime = anime_repository.get_anime_by_id(anime_id)
|
||||
if not anime:
|
||||
raise NotFoundError("Anime not found")
|
||||
|
||||
# Get filters
|
||||
status_filter = request.args.get('status')
|
||||
downloaded_filter = request.args.get('downloaded')
|
||||
|
||||
# Validate downloaded filter
|
||||
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
|
||||
raise ValidationError("Downloaded filter must be 'true' or 'false'")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Get episodes
|
||||
episodes = anime_repository.get_episodes_for_anime(
|
||||
anime_id=anime_id,
|
||||
status_filter=status_filter,
|
||||
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None
|
||||
)
|
||||
|
||||
# Format episodes (this would use episode formatting from episodes.py)
|
||||
formatted_episodes = []
|
||||
for episode in episodes:
|
||||
formatted_episodes.append({
|
||||
'id': episode.id,
|
||||
'episode_number': episode.episode_number,
|
||||
'title': episode.title,
|
||||
'url': episode.url,
|
||||
'status': episode.status,
|
||||
'is_downloaded': episode.is_downloaded,
|
||||
'file_path': episode.file_path,
|
||||
'file_size': episode.file_size,
|
||||
'created_at': episode.created_at.isoformat() if episode.created_at else None,
|
||||
'updated_at': episode.updated_at.isoformat() if episode.updated_at else None
|
||||
})
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_episodes)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_episodes = formatted_episodes[start_idx:end_idx]
|
||||
|
||||
return create_paginated_response(
|
||||
data=paginated_episodes,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='anime.get_anime_episodes',
|
||||
anime_id=anime_id
|
||||
)
|
||||
|
||||
|
||||
@anime_bp.route('/bulk', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['action', 'anime_ids'],
|
||||
optional_fields=['data'],
|
||||
field_types={
|
||||
'action': str,
|
||||
'anime_ids': list,
|
||||
'data': dict
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def bulk_anime_operation() -> Dict[str, Any]:
|
||||
"""
|
||||
Perform bulk operations on multiple anime.
|
||||
|
||||
Required Fields:
|
||||
- action: Operation to perform (update_status, delete, update_metadata)
|
||||
- anime_ids: List of anime IDs to operate on
|
||||
|
||||
Optional Fields:
|
||||
- data: Additional data for the operation
|
||||
|
||||
Returns:
|
||||
Results of the bulk operation
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
action = data['action']
|
||||
anime_ids = data['anime_ids']
|
||||
operation_data = data.get('data', {})
|
||||
|
||||
# Validate action
|
||||
valid_actions = ['update_status', 'delete', 'update_metadata', 'update_genres']
|
||||
if action not in valid_actions:
|
||||
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
|
||||
|
||||
# Validate anime_ids
|
||||
if not isinstance(anime_ids, list) or not anime_ids:
|
||||
raise ValidationError("anime_ids must be a non-empty list")
|
||||
|
||||
if len(anime_ids) > 100:
|
||||
raise ValidationError("Cannot operate on more than 100 anime at once")
|
||||
|
||||
# Validate anime IDs are integers
|
||||
try:
|
||||
anime_ids = [int(aid) for aid in anime_ids]
|
||||
except ValueError:
|
||||
raise ValidationError("All anime_ids must be valid integers")
|
||||
|
||||
# Perform bulk operation
|
||||
successful_items = []
|
||||
failed_items = []
|
||||
|
||||
for anime_id in anime_ids:
|
||||
try:
|
||||
if action == 'update_status':
|
||||
if 'status' not in operation_data:
|
||||
raise ValueError("Status is required for update_status action")
|
||||
|
||||
success = anime_repository.update_anime(anime_id, {'status': operation_data['status']})
|
||||
if success:
|
||||
successful_items.append({'anime_id': anime_id, 'action': 'status_updated'})
|
||||
else:
|
||||
failed_items.append({'anime_id': anime_id, 'error': 'Update failed'})
|
||||
|
||||
elif action == 'delete':
|
||||
success = anime_repository.delete_anime(anime_id)
|
||||
if success:
|
||||
successful_items.append({'anime_id': anime_id, 'action': 'deleted'})
|
||||
else:
|
||||
failed_items.append({'anime_id': anime_id, 'error': 'Deletion failed'})
|
||||
|
||||
elif action == 'update_metadata':
|
||||
success = anime_repository.update_anime(anime_id, operation_data)
|
||||
if success:
|
||||
successful_items.append({'anime_id': anime_id, 'action': 'metadata_updated'})
|
||||
else:
|
||||
failed_items.append({'anime_id': anime_id, 'error': 'Metadata update failed'})
|
||||
|
||||
except Exception as e:
|
||||
failed_items.append({'anime_id': anime_id, 'error': str(e)})
|
||||
|
||||
# Create batch response
|
||||
from ...shared.response_helpers import create_batch_response
|
||||
return create_batch_response(
|
||||
successful_items=successful_items,
|
||||
failed_items=failed_items,
|
||||
message=f"Bulk {action} operation completed"
|
||||
)
|
||||
|
||||
@router.post('/rescan', response_model=RescanResponse)
|
||||
async def rescan_anime_directory(
|
||||
current_user: Dict = Depends(get_current_user),
|
||||
series_app: SeriesApp = Depends(get_series_app)
|
||||
) -> RescanResponse:
|
||||
"""
|
||||
Rescan the anime directory for new episodes and series.
|
||||
|
||||
Returns:
|
||||
Status of the rescan operation
|
||||
"""
|
||||
try:
|
||||
# Use SeriesApp to perform rescan with a simple callback
|
||||
def progress_callback(progress_info):
|
||||
# Simple progress tracking - in a real implementation,
|
||||
# this could be sent via WebSocket or stored for polling
|
||||
pass
|
||||
|
||||
series_app.ReScan(progress_callback)
|
||||
|
||||
return RescanResponse(
|
||||
success=True,
|
||||
message="Anime directory rescanned successfully",
|
||||
total_series=len(series_app.series_list) if hasattr(series_app, 'series_list') else 0
|
||||
)
|
||||
except Exception as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=f"Rescan failed: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
# Additional endpoints for legacy API compatibility
|
||||
class AddSeriesRequest(BaseModel):
|
||||
"""Request model for adding a new series."""
|
||||
link: str = Field(..., min_length=1)
|
||||
name: str = Field(..., min_length=1, max_length=255)
|
||||
|
||||
class AddSeriesResponse(BaseModel):
|
||||
"""Response model for add series operation."""
|
||||
status: str
|
||||
message: str
|
||||
|
||||
class DownloadRequest(BaseModel):
|
||||
"""Request model for downloading series."""
|
||||
folders: List[str] = Field(..., min_items=1)
|
||||
|
||||
class DownloadResponse(BaseModel):
|
||||
"""Response model for download operation."""
|
||||
status: str
|
||||
message: str
|
||||
|
||||
|
||||
@router.post('/add_series', response_model=AddSeriesResponse)
|
||||
async def add_series(
|
||||
request_data: AddSeriesRequest,
|
||||
current_user: Dict = Depends(get_current_user),
|
||||
series_app: SeriesApp = Depends(get_series_app)
|
||||
) -> AddSeriesResponse:
|
||||
"""
|
||||
Add a new series to the collection.
|
||||
|
||||
Args:
|
||||
request_data: Contains link and name of the series to add
|
||||
|
||||
Returns:
|
||||
Status of the add operation
|
||||
"""
|
||||
try:
|
||||
# For now, just return success - actual implementation would use SeriesApp
|
||||
# to add the series to the collection
|
||||
return AddSeriesResponse(
|
||||
status="success",
|
||||
message=f"Series '{request_data.name}' added successfully"
|
||||
)
|
||||
except Exception as e:
|
||||
return AddSeriesResponse(
|
||||
status="error",
|
||||
message=f"Failed to add series: {str(e)}"
|
||||
)
|
||||
|
||||
|
||||
@router.post('/download', response_model=DownloadResponse)
|
||||
async def download_series(
|
||||
request_data: DownloadRequest,
|
||||
current_user: Dict = Depends(get_current_user),
|
||||
series_app: SeriesApp = Depends(get_series_app)
|
||||
) -> DownloadResponse:
|
||||
"""
|
||||
Start downloading selected series folders.
|
||||
|
||||
Args:
|
||||
request_data: Contains list of folder names to download
|
||||
|
||||
Returns:
|
||||
Status of the download operation
|
||||
"""
|
||||
try:
|
||||
# For now, just return success - actual implementation would use SeriesApp
|
||||
# to start downloads
|
||||
folder_count = len(request_data.folders)
|
||||
return DownloadResponse(
|
||||
status="success",
|
||||
message=f"Download started for {folder_count} series"
|
||||
)
|
||||
except Exception as e:
|
||||
return DownloadResponse(
|
||||
status="error",
|
||||
message=f"Failed to start download: {str(e)}"
|
||||
)
|
||||
@ -1,773 +0,0 @@
|
||||
"""
|
||||
Authentication API endpoints.
|
||||
|
||||
This module handles all authentication-related operations including:
|
||||
- User authentication
|
||||
- Session management
|
||||
- Password management
|
||||
- API key management
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, session, jsonify
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import logging
|
||||
import hashlib
|
||||
import secrets
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Import shared utilities
|
||||
try:
|
||||
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
|
||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
||||
from src.server.web.controllers.shared.validators import (
|
||||
validate_json_input, validate_query_params, is_valid_email, sanitize_string
|
||||
)
|
||||
from src.server.web.controllers.shared.response_helpers import (
|
||||
create_success_response, create_error_response, format_user_data
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback imports for development
|
||||
def require_auth(f): return f
|
||||
def optional_auth(f): return f
|
||||
def handle_api_errors(f): return f
|
||||
def validate_json_input(**kwargs): return lambda f: f
|
||||
def validate_query_params(**kwargs): return lambda f: f
|
||||
def is_valid_email(email): return '@' in email
|
||||
def sanitize_string(s): return str(s).strip()
|
||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
||||
def format_user_data(data): return data
|
||||
|
||||
# Import authentication components
|
||||
try:
|
||||
from src.data.user_manager import UserManager
|
||||
from src.data.session_manager import SessionManager
|
||||
from src.data.api_key_manager import APIKeyManager
|
||||
except ImportError:
|
||||
# Fallback for development
|
||||
class UserManager:
|
||||
def authenticate_user(self, username, password): return None
|
||||
def get_user_by_id(self, id): return None
|
||||
def get_user_by_username(self, username): return None
|
||||
def get_user_by_email(self, email): return None
|
||||
def create_user(self, **kwargs): return 1
|
||||
def update_user(self, id, **kwargs): return True
|
||||
def delete_user(self, id): return True
|
||||
def change_password(self, id, new_password): return True
|
||||
def reset_password(self, email): return 'reset_token'
|
||||
def verify_reset_token(self, token): return None
|
||||
def get_user_sessions(self, user_id): return []
|
||||
def get_user_activity(self, user_id): return []
|
||||
|
||||
class SessionManager:
|
||||
def create_session(self, user_id): return 'session_token'
|
||||
def validate_session(self, token): return None
|
||||
def destroy_session(self, token): return True
|
||||
def destroy_all_sessions(self, user_id): return True
|
||||
def get_session_info(self, token): return None
|
||||
def update_session_activity(self, token): return True
|
||||
|
||||
class APIKeyManager:
|
||||
def create_api_key(self, user_id, name): return {'id': 1, 'key': 'api_key', 'name': name}
|
||||
def get_user_api_keys(self, user_id): return []
|
||||
def revoke_api_key(self, key_id): return True
|
||||
def validate_api_key(self, key): return None
|
||||
|
||||
# Create blueprint
|
||||
auth_bp = Blueprint('auth', __name__)
|
||||
|
||||
# Initialize managers
|
||||
user_manager = UserManager()
|
||||
session_manager = SessionManager()
|
||||
api_key_manager = APIKeyManager()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/login', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['username', 'password'],
|
||||
optional_fields=['remember_me'],
|
||||
field_types={'username': str, 'password': str, 'remember_me': bool}
|
||||
)
|
||||
def login() -> Tuple[Any, int]:
|
||||
"""
|
||||
Authenticate user and create session.
|
||||
|
||||
Request Body:
|
||||
- username: Username or email
|
||||
- password: User password
|
||||
- remember_me: Extend session duration (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with authentication result
|
||||
"""
|
||||
data = request.get_json()
|
||||
username = sanitize_string(data['username'])
|
||||
password = data['password']
|
||||
remember_me = data.get('remember_me', False)
|
||||
|
||||
try:
|
||||
# Authenticate user
|
||||
user = user_manager.authenticate_user(username, password)
|
||||
|
||||
if not user:
|
||||
logger.warning(f"Failed login attempt for username: {username}")
|
||||
return create_error_response("Invalid username or password", 401)
|
||||
|
||||
# Create session
|
||||
session_token = session_manager.create_session(
|
||||
user['id'],
|
||||
extended=remember_me
|
||||
)
|
||||
|
||||
# Set session data
|
||||
session['user_id'] = user['id']
|
||||
session['username'] = user['username']
|
||||
session['session_token'] = session_token
|
||||
session.permanent = remember_me
|
||||
|
||||
# Format user data (exclude sensitive information)
|
||||
user_data = format_user_data(user, include_sensitive=False)
|
||||
|
||||
response_data = {
|
||||
'user': user_data,
|
||||
'session_token': session_token,
|
||||
'expires_at': (datetime.now() + timedelta(days=30 if remember_me else 7)).isoformat()
|
||||
}
|
||||
|
||||
logger.info(f"User {user['username']} (ID: {user['id']}) logged in successfully")
|
||||
return create_success_response("Login successful", 200, response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during login for username {username}: {str(e)}")
|
||||
return create_error_response("Login failed", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/logout', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def logout() -> Tuple[Any, int]:
|
||||
"""
|
||||
Logout user and destroy session.
|
||||
|
||||
Returns:
|
||||
JSON response with logout result
|
||||
"""
|
||||
try:
|
||||
# Get session token
|
||||
session_token = session.get('session_token')
|
||||
user_id = session.get('user_id')
|
||||
|
||||
if session_token:
|
||||
# Destroy session in database
|
||||
session_manager.destroy_session(session_token)
|
||||
|
||||
# Clear Flask session
|
||||
session.clear()
|
||||
|
||||
logger.info(f"User ID {user_id} logged out successfully")
|
||||
return create_success_response("Logout successful")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during logout: {str(e)}")
|
||||
return create_error_response("Logout failed", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/register', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['username', 'email', 'password'],
|
||||
optional_fields=['full_name'],
|
||||
field_types={'username': str, 'email': str, 'password': str, 'full_name': str}
|
||||
)
|
||||
def register() -> Tuple[Any, int]:
|
||||
"""
|
||||
Register new user account.
|
||||
|
||||
Request Body:
|
||||
- username: Unique username
|
||||
- email: User email address
|
||||
- password: User password
|
||||
- full_name: User's full name (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with registration result
|
||||
"""
|
||||
data = request.get_json()
|
||||
username = sanitize_string(data['username'])
|
||||
email = sanitize_string(data['email'])
|
||||
password = data['password']
|
||||
full_name = sanitize_string(data.get('full_name', ''))
|
||||
|
||||
# Validate input
|
||||
if len(username) < 3:
|
||||
return create_error_response("Username must be at least 3 characters long", 400)
|
||||
|
||||
if len(password) < 8:
|
||||
return create_error_response("Password must be at least 8 characters long", 400)
|
||||
|
||||
if not is_valid_email(email):
|
||||
return create_error_response("Invalid email address", 400)
|
||||
|
||||
try:
|
||||
# Check if username already exists
|
||||
existing_user = user_manager.get_user_by_username(username)
|
||||
if existing_user:
|
||||
return create_error_response("Username already exists", 409)
|
||||
|
||||
# Check if email already exists
|
||||
existing_email = user_manager.get_user_by_email(email)
|
||||
if existing_email:
|
||||
return create_error_response("Email already registered", 409)
|
||||
|
||||
# Create user
|
||||
user_id = user_manager.create_user(
|
||||
username=username,
|
||||
email=email,
|
||||
password=password,
|
||||
full_name=full_name
|
||||
)
|
||||
|
||||
# Get created user
|
||||
user = user_manager.get_user_by_id(user_id)
|
||||
user_data = format_user_data(user, include_sensitive=False)
|
||||
|
||||
logger.info(f"New user registered: {username} (ID: {user_id})")
|
||||
return create_success_response("Registration successful", 201, user_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during registration for username {username}: {str(e)}")
|
||||
return create_error_response("Registration failed", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/me', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_current_user() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get current user information.
|
||||
|
||||
Returns:
|
||||
JSON response with current user data
|
||||
"""
|
||||
try:
|
||||
user_id = session.get('user_id')
|
||||
user = user_manager.get_user_by_id(user_id)
|
||||
|
||||
if not user:
|
||||
return create_error_response("User not found", 404)
|
||||
|
||||
user_data = format_user_data(user, include_sensitive=False)
|
||||
return create_success_response("User information retrieved", 200, user_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting current user: {str(e)}")
|
||||
return create_error_response("Failed to get user information", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/me', methods=['PUT'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['email', 'full_name'],
|
||||
field_types={'email': str, 'full_name': str}
|
||||
)
|
||||
def update_current_user() -> Tuple[Any, int]:
|
||||
"""
|
||||
Update current user information.
|
||||
|
||||
Request Body:
|
||||
- email: New email address (optional)
|
||||
- full_name: New full name (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with update result
|
||||
"""
|
||||
data = request.get_json()
|
||||
user_id = session.get('user_id')
|
||||
|
||||
# Validate email if provided
|
||||
if 'email' in data and not is_valid_email(data['email']):
|
||||
return create_error_response("Invalid email address", 400)
|
||||
|
||||
try:
|
||||
# Check if email is already taken by another user
|
||||
if 'email' in data:
|
||||
existing_user = user_manager.get_user_by_email(data['email'])
|
||||
if existing_user and existing_user['id'] != user_id:
|
||||
return create_error_response("Email already registered", 409)
|
||||
|
||||
# Update user
|
||||
success = user_manager.update_user(user_id, **data)
|
||||
|
||||
if success:
|
||||
# Get updated user
|
||||
user = user_manager.get_user_by_id(user_id)
|
||||
user_data = format_user_data(user, include_sensitive=False)
|
||||
|
||||
logger.info(f"User {user_id} updated their profile")
|
||||
return create_success_response("Profile updated successfully", 200, user_data)
|
||||
else:
|
||||
return create_error_response("Failed to update profile", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating user {user_id}: {str(e)}")
|
||||
return create_error_response("Failed to update profile", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/change-password', methods=['PUT'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['current_password', 'new_password'],
|
||||
field_types={'current_password': str, 'new_password': str}
|
||||
)
|
||||
def change_password() -> Tuple[Any, int]:
|
||||
"""
|
||||
Change user password.
|
||||
|
||||
Request Body:
|
||||
- current_password: Current password
|
||||
- new_password: New password
|
||||
|
||||
Returns:
|
||||
JSON response with change result
|
||||
"""
|
||||
data = request.get_json()
|
||||
user_id = session.get('user_id')
|
||||
current_password = data['current_password']
|
||||
new_password = data['new_password']
|
||||
|
||||
# Validate new password
|
||||
if len(new_password) < 8:
|
||||
return create_error_response("New password must be at least 8 characters long", 400)
|
||||
|
||||
try:
|
||||
# Get user
|
||||
user = user_manager.get_user_by_id(user_id)
|
||||
|
||||
# Verify current password
|
||||
authenticated_user = user_manager.authenticate_user(user['username'], current_password)
|
||||
if not authenticated_user:
|
||||
return create_error_response("Current password is incorrect", 401)
|
||||
|
||||
# Change password
|
||||
success = user_manager.change_password(user_id, new_password)
|
||||
|
||||
if success:
|
||||
logger.info(f"User {user_id} changed their password")
|
||||
return create_success_response("Password changed successfully")
|
||||
else:
|
||||
return create_error_response("Failed to change password", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error changing password for user {user_id}: {str(e)}")
|
||||
return create_error_response("Failed to change password", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/forgot-password', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['email'],
|
||||
field_types={'email': str}
|
||||
)
|
||||
def forgot_password() -> Tuple[Any, int]:
|
||||
"""
|
||||
Request password reset.
|
||||
|
||||
Request Body:
|
||||
- email: User email address
|
||||
|
||||
Returns:
|
||||
JSON response with reset result
|
||||
"""
|
||||
data = request.get_json()
|
||||
email = sanitize_string(data['email'])
|
||||
|
||||
if not is_valid_email(email):
|
||||
return create_error_response("Invalid email address", 400)
|
||||
|
||||
try:
|
||||
# Check if user exists
|
||||
user = user_manager.get_user_by_email(email)
|
||||
|
||||
if user:
|
||||
# Generate reset token
|
||||
reset_token = user_manager.reset_password(email)
|
||||
|
||||
# In a real application, you would send this token via email
|
||||
logger.info(f"Password reset requested for user {user['id']} (email: {email})")
|
||||
|
||||
# For security, always return success even if email doesn't exist
|
||||
return create_success_response("If the email exists, a reset link has been sent")
|
||||
else:
|
||||
# For security, don't reveal that email doesn't exist
|
||||
logger.warning(f"Password reset requested for non-existent email: {email}")
|
||||
return create_success_response("If the email exists, a reset link has been sent")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error processing password reset for email {email}: {str(e)}")
|
||||
return create_error_response("Failed to process password reset", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/reset-password', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['token', 'new_password'],
|
||||
field_types={'token': str, 'new_password': str}
|
||||
)
|
||||
def reset_password() -> Tuple[Any, int]:
|
||||
"""
|
||||
Reset password using token.
|
||||
|
||||
Request Body:
|
||||
- token: Password reset token
|
||||
- new_password: New password
|
||||
|
||||
Returns:
|
||||
JSON response with reset result
|
||||
"""
|
||||
data = request.get_json()
|
||||
token = data['token']
|
||||
new_password = data['new_password']
|
||||
|
||||
# Validate new password
|
||||
if len(new_password) < 8:
|
||||
return create_error_response("New password must be at least 8 characters long", 400)
|
||||
|
||||
try:
|
||||
# Verify reset token
|
||||
user = user_manager.verify_reset_token(token)
|
||||
|
||||
if not user:
|
||||
return create_error_response("Invalid or expired reset token", 400)
|
||||
|
||||
# Change password
|
||||
success = user_manager.change_password(user['id'], new_password)
|
||||
|
||||
if success:
|
||||
logger.info(f"Password reset completed for user {user['id']}")
|
||||
return create_success_response("Password reset successfully")
|
||||
else:
|
||||
return create_error_response("Failed to reset password", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error resetting password with token: {str(e)}")
|
||||
return create_error_response("Failed to reset password", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/sessions', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_user_sessions() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get user's active sessions.
|
||||
|
||||
Returns:
|
||||
JSON response with user sessions
|
||||
"""
|
||||
try:
|
||||
user_id = session.get('user_id')
|
||||
sessions = user_manager.get_user_sessions(user_id)
|
||||
|
||||
return create_success_response("Sessions retrieved successfully", 200, sessions)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user sessions: {str(e)}")
|
||||
return create_error_response("Failed to get sessions", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/sessions', methods=['DELETE'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def destroy_all_sessions() -> Tuple[Any, int]:
|
||||
"""
|
||||
Destroy all user sessions except current one.
|
||||
|
||||
Returns:
|
||||
JSON response with operation result
|
||||
"""
|
||||
try:
|
||||
user_id = session.get('user_id')
|
||||
current_token = session.get('session_token')
|
||||
|
||||
# Destroy all sessions except current
|
||||
success = session_manager.destroy_all_sessions(user_id, except_token=current_token)
|
||||
|
||||
if success:
|
||||
logger.info(f"All sessions destroyed for user {user_id}")
|
||||
return create_success_response("All other sessions destroyed successfully")
|
||||
else:
|
||||
return create_error_response("Failed to destroy sessions", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error destroying sessions: {str(e)}")
|
||||
return create_error_response("Failed to destroy sessions", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/api-keys', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_api_keys() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get user's API keys.
|
||||
|
||||
Returns:
|
||||
JSON response with API keys
|
||||
"""
|
||||
try:
|
||||
user_id = session.get('user_id')
|
||||
api_keys = api_key_manager.get_user_api_keys(user_id)
|
||||
|
||||
return create_success_response("API keys retrieved successfully", 200, api_keys)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting API keys: {str(e)}")
|
||||
return create_error_response("Failed to get API keys", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/api-keys', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['name'],
|
||||
optional_fields=['description'],
|
||||
field_types={'name': str, 'description': str}
|
||||
)
|
||||
def create_api_key() -> Tuple[Any, int]:
|
||||
"""
|
||||
Create new API key.
|
||||
|
||||
Request Body:
|
||||
- name: API key name
|
||||
- description: API key description (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with created API key
|
||||
"""
|
||||
data = request.get_json()
|
||||
user_id = session.get('user_id')
|
||||
name = sanitize_string(data['name'])
|
||||
description = sanitize_string(data.get('description', ''))
|
||||
|
||||
try:
|
||||
# Create API key
|
||||
api_key = api_key_manager.create_api_key(
|
||||
user_id=user_id,
|
||||
name=name,
|
||||
description=description
|
||||
)
|
||||
|
||||
logger.info(f"API key created for user {user_id}: {name}")
|
||||
return create_success_response("API key created successfully", 201, api_key)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating API key for user {user_id}: {str(e)}")
|
||||
return create_error_response("Failed to create API key", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/api-keys/<int:key_id>', methods=['DELETE'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def revoke_api_key(key_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Revoke API key.
|
||||
|
||||
Args:
|
||||
key_id: API key ID
|
||||
|
||||
Returns:
|
||||
JSON response with revocation result
|
||||
"""
|
||||
try:
|
||||
user_id = session.get('user_id')
|
||||
|
||||
# Verify key belongs to user and revoke
|
||||
success = api_key_manager.revoke_api_key(key_id, user_id)
|
||||
|
||||
if success:
|
||||
logger.info(f"API key {key_id} revoked by user {user_id}")
|
||||
return create_success_response("API key revoked successfully")
|
||||
else:
|
||||
return create_error_response("API key not found or access denied", 404)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error revoking API key {key_id}: {str(e)}")
|
||||
return create_error_response("Failed to revoke API key", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/password-reset', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['email'],
|
||||
field_types={'email': str}
|
||||
)
|
||||
def request_password_reset() -> Tuple[Any, int]:
|
||||
"""
|
||||
Request password reset for user email.
|
||||
|
||||
Request Body:
|
||||
- email: User email address
|
||||
|
||||
Returns:
|
||||
JSON response with password reset request result
|
||||
"""
|
||||
data = request.get_json()
|
||||
email = sanitize_string(data['email'])
|
||||
|
||||
try:
|
||||
# Validate email format
|
||||
if not is_valid_email(email):
|
||||
return create_error_response("Invalid email format", 400)
|
||||
|
||||
# Check if user exists
|
||||
user = user_manager.get_user_by_email(email)
|
||||
if not user:
|
||||
# Don't reveal if email exists or not for security
|
||||
logger.warning(f"Password reset requested for non-existent email: {email}")
|
||||
return create_success_response("If the email exists, a password reset link has been sent")
|
||||
|
||||
# Generate reset token
|
||||
reset_token = user_manager.create_password_reset_token(user['id'])
|
||||
|
||||
# In a real implementation, you would send an email here
|
||||
# For now, we'll just log it and return success
|
||||
logger.info(f"Password reset token generated for user {user['id']}: {reset_token}")
|
||||
|
||||
return create_success_response("If the email exists, a password reset link has been sent")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during password reset request for {email}: {str(e)}")
|
||||
return create_error_response("Failed to process password reset request", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/password-reset/confirm', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['token', 'new_password'],
|
||||
field_types={'token': str, 'new_password': str}
|
||||
)
|
||||
def confirm_password_reset() -> Tuple[Any, int]:
|
||||
"""
|
||||
Confirm password reset with token.
|
||||
|
||||
Request Body:
|
||||
- token: Password reset token
|
||||
- new_password: New password
|
||||
|
||||
Returns:
|
||||
JSON response with password reset confirmation result
|
||||
"""
|
||||
data = request.get_json()
|
||||
token = data['token']
|
||||
new_password = data['new_password']
|
||||
|
||||
try:
|
||||
# Validate password strength
|
||||
if len(new_password) < 8:
|
||||
return create_error_response("Password must be at least 8 characters long", 400)
|
||||
|
||||
# Verify reset token
|
||||
user_id = user_manager.verify_reset_token(token)
|
||||
if not user_id:
|
||||
return create_error_response("Invalid or expired reset token", 400)
|
||||
|
||||
# Update password
|
||||
success = user_manager.change_password(user_id, new_password)
|
||||
if not success:
|
||||
return create_error_response("Failed to update password", 500)
|
||||
|
||||
# Invalidate all existing sessions for security
|
||||
session_manager.destroy_all_sessions(user_id)
|
||||
|
||||
logger.info(f"Password reset completed for user ID {user_id}")
|
||||
return create_success_response("Password has been successfully reset")
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during password reset confirmation: {str(e)}")
|
||||
return create_error_response("Failed to reset password", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/refresh', methods=['POST'])
|
||||
@handle_api_errors
|
||||
def refresh_token() -> Tuple[Any, int]:
|
||||
"""
|
||||
Refresh authentication token.
|
||||
|
||||
Returns:
|
||||
JSON response with new token
|
||||
"""
|
||||
try:
|
||||
# Get current session token
|
||||
session_token = session.get('session_token')
|
||||
if not session_token:
|
||||
return create_error_response("No active session found", 401)
|
||||
|
||||
# Validate current session
|
||||
session_info = session_manager.get_session_info(session_token)
|
||||
if not session_info or session_info.get('expired', True):
|
||||
session.clear()
|
||||
return create_error_response("Session expired", 401)
|
||||
|
||||
# Create new session token
|
||||
user_id = session_info['user_id']
|
||||
new_session_token = session_manager.create_session(user_id)
|
||||
|
||||
# Destroy old session
|
||||
session_manager.destroy_session(session_token)
|
||||
|
||||
# Update session data
|
||||
session['session_token'] = new_session_token
|
||||
session_manager.update_session_activity(new_session_token)
|
||||
|
||||
# Get user data
|
||||
user = user_manager.get_user_by_id(user_id)
|
||||
user_data = format_user_data(user, include_sensitive=False)
|
||||
|
||||
response_data = {
|
||||
'user': user_data,
|
||||
'session_token': new_session_token,
|
||||
'expires_at': (datetime.now() + timedelta(days=7)).isoformat()
|
||||
}
|
||||
|
||||
logger.info(f"Token refreshed for user ID {user_id}")
|
||||
return create_success_response("Token refreshed successfully", 200, response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during token refresh: {str(e)}")
|
||||
return create_error_response("Failed to refresh token", 500)
|
||||
|
||||
|
||||
@auth_bp.route('/auth/activity', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['limit', 'offset'],
|
||||
param_types={'limit': int, 'offset': int}
|
||||
)
|
||||
def get_user_activity() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get user activity log.
|
||||
|
||||
Query Parameters:
|
||||
- limit: Number of activities to return (default: 50, max: 200)
|
||||
- offset: Number of activities to skip (default: 0)
|
||||
|
||||
Returns:
|
||||
JSON response with user activity
|
||||
"""
|
||||
limit = min(request.args.get('limit', 50, type=int), 200)
|
||||
offset = request.args.get('offset', 0, type=int)
|
||||
|
||||
try:
|
||||
user_id = session.get('user_id')
|
||||
activity = user_manager.get_user_activity(user_id, limit=limit, offset=offset)
|
||||
|
||||
return create_success_response("User activity retrieved successfully", 200, activity)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting user activity: {str(e)}")
|
||||
return create_error_response("Failed to get user activity", 500)
|
||||
@ -1,649 +0,0 @@
|
||||
"""
|
||||
Backup Management API Endpoints
|
||||
|
||||
This module provides REST API endpoints for database backup operations,
|
||||
including backup creation, restoration, and cleanup functionality.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, send_file
|
||||
from typing import Dict, List, Any, Optional
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
from ...shared.auth_decorators import require_auth, optional_auth
|
||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
||||
from ...shared.response_helpers import (
|
||||
create_success_response, create_paginated_response, extract_pagination_params
|
||||
)
|
||||
|
||||
# Import backup components (these imports would need to be adjusted based on actual structure)
|
||||
try:
|
||||
from database_manager import backup_manager, BackupInfo
|
||||
except ImportError:
|
||||
# Fallback for development/testing
|
||||
backup_manager = None
|
||||
BackupInfo = None
|
||||
|
||||
|
||||
# Blueprint for backup management endpoints
|
||||
backups_bp = Blueprint('backups', __name__, url_prefix='/api/v1/backups')
|
||||
|
||||
|
||||
@backups_bp.route('', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def list_backups() -> Dict[str, Any]:
|
||||
"""
|
||||
List all available backups with optional filtering.
|
||||
|
||||
Query Parameters:
|
||||
- backup_type: Filter by backup type (full, metadata_only, incremental)
|
||||
- date_from: Filter from date (ISO format)
|
||||
- date_to: Filter to date (ISO format)
|
||||
- min_size_mb: Minimum backup size in MB
|
||||
- max_size_mb: Maximum backup size in MB
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated list of backups
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
# Extract filters
|
||||
backup_type_filter = request.args.get('backup_type')
|
||||
date_from = request.args.get('date_from')
|
||||
date_to = request.args.get('date_to')
|
||||
min_size_mb = request.args.get('min_size_mb')
|
||||
max_size_mb = request.args.get('max_size_mb')
|
||||
|
||||
# Validate filters
|
||||
valid_types = ['full', 'metadata_only', 'incremental']
|
||||
if backup_type_filter and backup_type_filter not in valid_types:
|
||||
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
|
||||
|
||||
# Validate dates
|
||||
if date_from:
|
||||
try:
|
||||
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
|
||||
except ValueError:
|
||||
raise ValidationError("date_from must be in ISO format")
|
||||
|
||||
if date_to:
|
||||
try:
|
||||
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
|
||||
except ValueError:
|
||||
raise ValidationError("date_to must be in ISO format")
|
||||
|
||||
# Validate size filters
|
||||
if min_size_mb:
|
||||
try:
|
||||
min_size_mb = float(min_size_mb)
|
||||
if min_size_mb < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("min_size_mb must be a non-negative number")
|
||||
|
||||
if max_size_mb:
|
||||
try:
|
||||
max_size_mb = float(max_size_mb)
|
||||
if max_size_mb < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("max_size_mb must be a non-negative number")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Get backups with filters
|
||||
backups = backup_manager.list_backups(
|
||||
backup_type=backup_type_filter,
|
||||
date_from=date_from,
|
||||
date_to=date_to,
|
||||
min_size_bytes=int(min_size_mb * 1024 * 1024) if min_size_mb else None,
|
||||
max_size_bytes=int(max_size_mb * 1024 * 1024) if max_size_mb else None
|
||||
)
|
||||
|
||||
# Format backup data
|
||||
backup_data = []
|
||||
for backup in backups:
|
||||
backup_data.append({
|
||||
'backup_id': backup.backup_id,
|
||||
'backup_type': backup.backup_type,
|
||||
'created_at': backup.created_at.isoformat(),
|
||||
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
|
||||
'size_bytes': backup.size_bytes,
|
||||
'description': backup.description,
|
||||
'tables_included': backup.tables_included,
|
||||
'backup_path': backup.backup_path,
|
||||
'is_compressed': backup.is_compressed,
|
||||
'checksum': backup.checksum,
|
||||
'status': backup.status
|
||||
})
|
||||
|
||||
# Apply pagination
|
||||
total = len(backup_data)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_backups = backup_data[start_idx:end_idx]
|
||||
|
||||
return create_paginated_response(
|
||||
data=paginated_backups,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='backups.list_backups'
|
||||
)
|
||||
|
||||
|
||||
@backups_bp.route('/<backup_id>', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('backup_id')
|
||||
@optional_auth
|
||||
def get_backup(backup_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Get detailed information about a specific backup.
|
||||
|
||||
Args:
|
||||
backup_id: Unique identifier for the backup
|
||||
|
||||
Returns:
|
||||
Detailed backup information
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
backup = backup_manager.get_backup_by_id(backup_id)
|
||||
if not backup:
|
||||
raise NotFoundError("Backup not found")
|
||||
|
||||
# Get additional details
|
||||
backup_details = {
|
||||
'backup_id': backup.backup_id,
|
||||
'backup_type': backup.backup_type,
|
||||
'created_at': backup.created_at.isoformat(),
|
||||
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
|
||||
'size_bytes': backup.size_bytes,
|
||||
'description': backup.description,
|
||||
'tables_included': backup.tables_included,
|
||||
'backup_path': backup.backup_path,
|
||||
'is_compressed': backup.is_compressed,
|
||||
'checksum': backup.checksum,
|
||||
'status': backup.status,
|
||||
'creation_duration_seconds': backup.creation_duration_seconds,
|
||||
'file_exists': os.path.exists(backup.backup_path),
|
||||
'validation_status': backup_manager.validate_backup(backup_id)
|
||||
}
|
||||
|
||||
return create_success_response(backup_details)
|
||||
|
||||
|
||||
@backups_bp.route('', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['backup_type'],
|
||||
optional_fields=['description', 'tables', 'compress', 'encryption_key'],
|
||||
field_types={
|
||||
'backup_type': str,
|
||||
'description': str,
|
||||
'tables': list,
|
||||
'compress': bool,
|
||||
'encryption_key': str
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def create_backup() -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new database backup.
|
||||
|
||||
Required Fields:
|
||||
- backup_type: Type of backup (full, metadata_only, incremental)
|
||||
|
||||
Optional Fields:
|
||||
- description: Backup description
|
||||
- tables: Specific tables to backup (for selective backups)
|
||||
- compress: Whether to compress the backup (default: true)
|
||||
- encryption_key: Key for backup encryption
|
||||
|
||||
Returns:
|
||||
Created backup information
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
backup_type = data['backup_type']
|
||||
|
||||
# Validate backup type
|
||||
valid_types = ['full', 'metadata_only', 'incremental']
|
||||
if backup_type not in valid_types:
|
||||
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
|
||||
|
||||
description = data.get('description')
|
||||
tables = data.get('tables')
|
||||
compress = data.get('compress', True)
|
||||
encryption_key = data.get('encryption_key')
|
||||
|
||||
# Validate tables if provided
|
||||
if tables:
|
||||
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
|
||||
raise ValidationError("tables must be a list of table names")
|
||||
|
||||
# Validate table names exist
|
||||
valid_tables = backup_manager.get_available_tables()
|
||||
invalid_tables = [t for t in tables if t not in valid_tables]
|
||||
if invalid_tables:
|
||||
raise ValidationError(f"Invalid tables: {', '.join(invalid_tables)}")
|
||||
|
||||
try:
|
||||
# Create backup based on type
|
||||
if backup_type == 'full':
|
||||
backup_info = backup_manager.create_full_backup(
|
||||
description=description,
|
||||
compress=compress,
|
||||
encryption_key=encryption_key
|
||||
)
|
||||
elif backup_type == 'metadata_only':
|
||||
backup_info = backup_manager.create_metadata_backup(
|
||||
description=description,
|
||||
compress=compress,
|
||||
encryption_key=encryption_key
|
||||
)
|
||||
elif backup_type == 'incremental':
|
||||
backup_info = backup_manager.create_incremental_backup(
|
||||
description=description,
|
||||
compress=compress,
|
||||
encryption_key=encryption_key
|
||||
)
|
||||
else: # selective backup
|
||||
backup_info = backup_manager.create_selective_backup(
|
||||
tables=tables,
|
||||
description=description,
|
||||
compress=compress,
|
||||
encryption_key=encryption_key
|
||||
)
|
||||
|
||||
if not backup_info:
|
||||
raise APIException("Failed to create backup", 500)
|
||||
|
||||
backup_data = {
|
||||
'backup_id': backup_info.backup_id,
|
||||
'backup_type': backup_info.backup_type,
|
||||
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
|
||||
'created_at': backup_info.created_at.isoformat(),
|
||||
'description': backup_info.description,
|
||||
'tables_included': backup_info.tables_included,
|
||||
'is_compressed': backup_info.is_compressed,
|
||||
'checksum': backup_info.checksum
|
||||
}
|
||||
|
||||
return create_success_response(
|
||||
data=backup_data,
|
||||
message=f"{backup_type.title()} backup created successfully",
|
||||
status_code=201
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to create backup: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/<backup_id>/restore', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('backup_id')
|
||||
@validate_json_input(
|
||||
optional_fields=['confirm', 'tables', 'target_database', 'restore_data', 'restore_schema'],
|
||||
field_types={
|
||||
'confirm': bool,
|
||||
'tables': list,
|
||||
'target_database': str,
|
||||
'restore_data': bool,
|
||||
'restore_schema': bool
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def restore_backup(backup_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Restore from a backup.
|
||||
|
||||
Args:
|
||||
backup_id: Unique identifier for the backup
|
||||
|
||||
Optional Fields:
|
||||
- confirm: Confirmation flag (required for production)
|
||||
- tables: Specific tables to restore
|
||||
- target_database: Target database path (for restore to different location)
|
||||
- restore_data: Whether to restore data (default: true)
|
||||
- restore_schema: Whether to restore schema (default: true)
|
||||
|
||||
Returns:
|
||||
Restoration results
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
data = request.get_json() or {}
|
||||
|
||||
# Check if backup exists
|
||||
backup = backup_manager.get_backup_by_id(backup_id)
|
||||
if not backup:
|
||||
raise NotFoundError("Backup not found")
|
||||
|
||||
# Validate backup file exists
|
||||
if not os.path.exists(backup.backup_path):
|
||||
raise APIException("Backup file not found", 404)
|
||||
|
||||
# Require confirmation for production environments
|
||||
confirm = data.get('confirm', False)
|
||||
if not confirm:
|
||||
# Check if this is a production environment
|
||||
from config import config
|
||||
if hasattr(config, 'environment') and config.environment == 'production':
|
||||
raise ValidationError("Confirmation required for restore operation in production")
|
||||
|
||||
tables = data.get('tables')
|
||||
target_database = data.get('target_database')
|
||||
restore_data = data.get('restore_data', True)
|
||||
restore_schema = data.get('restore_schema', True)
|
||||
|
||||
# Validate tables if provided
|
||||
if tables:
|
||||
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
|
||||
raise ValidationError("tables must be a list of table names")
|
||||
|
||||
try:
|
||||
# Perform restoration
|
||||
restore_result = backup_manager.restore_backup(
|
||||
backup_id=backup_id,
|
||||
tables=tables,
|
||||
target_database=target_database,
|
||||
restore_data=restore_data,
|
||||
restore_schema=restore_schema
|
||||
)
|
||||
|
||||
if restore_result.success:
|
||||
return create_success_response(
|
||||
data={
|
||||
'backup_id': backup_id,
|
||||
'restore_time': restore_result.restore_time.isoformat(),
|
||||
'restored_tables': restore_result.restored_tables,
|
||||
'restored_records': restore_result.restored_records,
|
||||
'duration_seconds': restore_result.duration_seconds
|
||||
},
|
||||
message="Backup restored successfully"
|
||||
)
|
||||
else:
|
||||
raise APIException(f"Restore failed: {restore_result.error_message}", 500)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to restore backup: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/<backup_id>/download', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('backup_id')
|
||||
@require_auth
|
||||
def download_backup(backup_id: str):
|
||||
"""
|
||||
Download a backup file.
|
||||
|
||||
Args:
|
||||
backup_id: Unique identifier for the backup
|
||||
|
||||
Returns:
|
||||
Backup file download
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
# Check if backup exists
|
||||
backup = backup_manager.get_backup_by_id(backup_id)
|
||||
if not backup:
|
||||
raise NotFoundError("Backup not found")
|
||||
|
||||
# Check if backup file exists
|
||||
if not os.path.exists(backup.backup_path):
|
||||
raise NotFoundError("Backup file not found")
|
||||
|
||||
# Generate filename
|
||||
timestamp = backup.created_at.strftime('%Y%m%d_%H%M%S')
|
||||
filename = f"backup_{backup.backup_type}_{timestamp}_{backup_id[:8]}.db"
|
||||
if backup.is_compressed:
|
||||
filename += ".gz"
|
||||
|
||||
try:
|
||||
return send_file(
|
||||
backup.backup_path,
|
||||
as_attachment=True,
|
||||
download_name=filename,
|
||||
mimetype='application/octet-stream'
|
||||
)
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to download backup: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/<backup_id>/validate', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('backup_id')
|
||||
@optional_auth
|
||||
def validate_backup(backup_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Validate a backup file integrity.
|
||||
|
||||
Args:
|
||||
backup_id: Unique identifier for the backup
|
||||
|
||||
Returns:
|
||||
Validation results
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
# Check if backup exists
|
||||
backup = backup_manager.get_backup_by_id(backup_id)
|
||||
if not backup:
|
||||
raise NotFoundError("Backup not found")
|
||||
|
||||
try:
|
||||
validation_result = backup_manager.validate_backup(backup_id)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'backup_id': backup_id,
|
||||
'is_valid': validation_result.is_valid,
|
||||
'file_exists': validation_result.file_exists,
|
||||
'checksum_valid': validation_result.checksum_valid,
|
||||
'database_readable': validation_result.database_readable,
|
||||
'tables_count': validation_result.tables_count,
|
||||
'records_count': validation_result.records_count,
|
||||
'validation_errors': validation_result.errors,
|
||||
'validated_at': datetime.utcnow().isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to validate backup: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/<backup_id>', methods=['DELETE'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('backup_id')
|
||||
@require_auth
|
||||
def delete_backup(backup_id: str) -> Dict[str, Any]:
|
||||
"""
|
||||
Delete a backup.
|
||||
|
||||
Args:
|
||||
backup_id: Unique identifier for the backup
|
||||
|
||||
Query Parameters:
|
||||
- delete_file: Set to 'true' to also delete the backup file
|
||||
|
||||
Returns:
|
||||
Deletion confirmation
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
# Check if backup exists
|
||||
backup = backup_manager.get_backup_by_id(backup_id)
|
||||
if not backup:
|
||||
raise NotFoundError("Backup not found")
|
||||
|
||||
delete_file = request.args.get('delete_file', 'true').lower() == 'true'
|
||||
|
||||
try:
|
||||
success = backup_manager.delete_backup(backup_id, delete_file=delete_file)
|
||||
|
||||
if success:
|
||||
message = f"Backup {backup_id} deleted successfully"
|
||||
if delete_file:
|
||||
message += " (including file)"
|
||||
|
||||
return create_success_response(message=message)
|
||||
else:
|
||||
raise APIException("Failed to delete backup", 500)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to delete backup: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/cleanup', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['keep_days', 'keep_count', 'backup_types', 'dry_run'],
|
||||
field_types={
|
||||
'keep_days': int,
|
||||
'keep_count': int,
|
||||
'backup_types': list,
|
||||
'dry_run': bool
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def cleanup_backups() -> Dict[str, Any]:
|
||||
"""
|
||||
Clean up old backup files based on retention policy.
|
||||
|
||||
Optional Fields:
|
||||
- keep_days: Keep backups newer than this many days (default: 30)
|
||||
- keep_count: Keep at least this many backups (default: 10)
|
||||
- backup_types: Types of backups to clean up (default: all)
|
||||
- dry_run: Preview what would be deleted without actually deleting
|
||||
|
||||
Returns:
|
||||
Cleanup results
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
data = request.get_json() or {}
|
||||
keep_days = data.get('keep_days', 30)
|
||||
keep_count = data.get('keep_count', 10)
|
||||
backup_types = data.get('backup_types', ['full', 'metadata_only', 'incremental'])
|
||||
dry_run = data.get('dry_run', False)
|
||||
|
||||
# Validate parameters
|
||||
if keep_days < 1:
|
||||
raise ValidationError("keep_days must be at least 1")
|
||||
|
||||
if keep_count < 1:
|
||||
raise ValidationError("keep_count must be at least 1")
|
||||
|
||||
valid_types = ['full', 'metadata_only', 'incremental']
|
||||
if not all(bt in valid_types for bt in backup_types):
|
||||
raise ValidationError(f"backup_types must contain only: {', '.join(valid_types)}")
|
||||
|
||||
try:
|
||||
cleanup_result = backup_manager.cleanup_old_backups(
|
||||
keep_days=keep_days,
|
||||
keep_count=keep_count,
|
||||
backup_types=backup_types,
|
||||
dry_run=dry_run
|
||||
)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'dry_run': dry_run,
|
||||
'deleted_count': cleanup_result.deleted_count,
|
||||
'deleted_backups': cleanup_result.deleted_backups,
|
||||
'space_freed_mb': round(cleanup_result.space_freed_bytes / (1024 * 1024), 2),
|
||||
'kept_count': cleanup_result.kept_count,
|
||||
'retention_policy': {
|
||||
'keep_days': keep_days,
|
||||
'keep_count': keep_count,
|
||||
'backup_types': backup_types
|
||||
}
|
||||
},
|
||||
message=f"Backup cleanup {'simulated' if dry_run else 'completed'}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to cleanup backups: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/schedule', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_backup_schedule() -> Dict[str, Any]:
|
||||
"""
|
||||
Get current backup schedule configuration.
|
||||
|
||||
Returns:
|
||||
Backup schedule information
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
try:
|
||||
schedule_config = backup_manager.get_backup_schedule()
|
||||
|
||||
return create_success_response(data=schedule_config)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to get backup schedule: {str(e)}", 500)
|
||||
|
||||
|
||||
@backups_bp.route('/schedule', methods=['PUT'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['enabled', 'full_backup_interval', 'incremental_interval', 'retention_days', 'cleanup_enabled'],
|
||||
field_types={
|
||||
'enabled': bool,
|
||||
'full_backup_interval': str,
|
||||
'incremental_interval': str,
|
||||
'retention_days': int,
|
||||
'cleanup_enabled': bool
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def update_backup_schedule() -> Dict[str, Any]:
|
||||
"""
|
||||
Update backup schedule configuration.
|
||||
|
||||
Optional Fields:
|
||||
- enabled: Enable/disable automatic backups
|
||||
- full_backup_interval: Cron expression for full backups
|
||||
- incremental_interval: Cron expression for incremental backups
|
||||
- retention_days: Number of days to keep backups
|
||||
- cleanup_enabled: Enable/disable automatic cleanup
|
||||
|
||||
Returns:
|
||||
Updated schedule configuration
|
||||
"""
|
||||
if not backup_manager:
|
||||
raise APIException("Backup manager not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
try:
|
||||
updated_config = backup_manager.update_backup_schedule(data)
|
||||
|
||||
return create_success_response(
|
||||
data=updated_config,
|
||||
message="Backup schedule updated successfully"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to update backup schedule: {str(e)}", 500)
|
||||
@ -1,341 +0,0 @@
|
||||
"""
|
||||
Bulk Operations API endpoints
|
||||
Provides REST API for bulk series management operations.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify, send_file
|
||||
import asyncio
|
||||
import threading
|
||||
from typing import Dict, Any
|
||||
import uuid
|
||||
import io
|
||||
from bulk_operations import bulk_operations_manager
|
||||
|
||||
bulk_api_bp = Blueprint('bulk_api', __name__, url_prefix='/api/bulk')
|
||||
|
||||
# Store active operations
|
||||
active_operations = {}
|
||||
|
||||
@bulk_api_bp.route('/download', methods=['POST'])
|
||||
def bulk_download():
|
||||
"""Start bulk download operation."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
operation_id = data.get('operation_id')
|
||||
series_ids = data.get('series_ids', [])
|
||||
|
||||
if not series_ids:
|
||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
||||
|
||||
# Create task ID
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
# Store operation info
|
||||
active_operations[task_id] = {
|
||||
'id': operation_id,
|
||||
'type': 'download',
|
||||
'status': 'running',
|
||||
'progress': {
|
||||
'completed': 0,
|
||||
'total': len(series_ids),
|
||||
'message': 'Starting download...'
|
||||
}
|
||||
}
|
||||
|
||||
# Start async operation
|
||||
def run_bulk_download():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
result = loop.run_until_complete(
|
||||
bulk_operations_manager.bulk_download(series_ids, operation_id)
|
||||
)
|
||||
active_operations[task_id]['status'] = 'completed'
|
||||
active_operations[task_id]['result'] = result
|
||||
except Exception as e:
|
||||
active_operations[task_id]['status'] = 'failed'
|
||||
active_operations[task_id]['error'] = str(e)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
thread = threading.Thread(target=run_bulk_download)
|
||||
thread.start()
|
||||
|
||||
return jsonify({'success': True, 'task_id': task_id})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/update', methods=['POST'])
|
||||
def bulk_update():
|
||||
"""Start bulk update operation."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
operation_id = data.get('operation_id')
|
||||
series_ids = data.get('series_ids', [])
|
||||
|
||||
if not series_ids:
|
||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
active_operations[task_id] = {
|
||||
'id': operation_id,
|
||||
'type': 'update',
|
||||
'status': 'running',
|
||||
'progress': {
|
||||
'completed': 0,
|
||||
'total': len(series_ids),
|
||||
'message': 'Starting update...'
|
||||
}
|
||||
}
|
||||
|
||||
def run_bulk_update():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
result = loop.run_until_complete(
|
||||
bulk_operations_manager.bulk_update(series_ids, operation_id)
|
||||
)
|
||||
active_operations[task_id]['status'] = 'completed'
|
||||
active_operations[task_id]['result'] = result
|
||||
except Exception as e:
|
||||
active_operations[task_id]['status'] = 'failed'
|
||||
active_operations[task_id]['error'] = str(e)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
thread = threading.Thread(target=run_bulk_update)
|
||||
thread.start()
|
||||
|
||||
return jsonify({'success': True, 'task_id': task_id})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/organize', methods=['POST'])
|
||||
def bulk_organize():
|
||||
"""Start bulk organize operation."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
operation_id = data.get('operation_id')
|
||||
series_ids = data.get('series_ids', [])
|
||||
options = data.get('options', {})
|
||||
|
||||
if not series_ids:
|
||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
active_operations[task_id] = {
|
||||
'id': operation_id,
|
||||
'type': 'organize',
|
||||
'status': 'running',
|
||||
'progress': {
|
||||
'completed': 0,
|
||||
'total': len(series_ids),
|
||||
'message': 'Starting organization...'
|
||||
}
|
||||
}
|
||||
|
||||
def run_bulk_organize():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
result = loop.run_until_complete(
|
||||
bulk_operations_manager.bulk_organize(series_ids, options, operation_id)
|
||||
)
|
||||
active_operations[task_id]['status'] = 'completed'
|
||||
active_operations[task_id]['result'] = result
|
||||
except Exception as e:
|
||||
active_operations[task_id]['status'] = 'failed'
|
||||
active_operations[task_id]['error'] = str(e)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
thread = threading.Thread(target=run_bulk_organize)
|
||||
thread.start()
|
||||
|
||||
return jsonify({'success': True, 'task_id': task_id})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/delete', methods=['DELETE'])
|
||||
def bulk_delete():
|
||||
"""Start bulk delete operation."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
operation_id = data.get('operation_id')
|
||||
series_ids = data.get('series_ids', [])
|
||||
|
||||
if not series_ids:
|
||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
||||
|
||||
task_id = str(uuid.uuid4())
|
||||
|
||||
active_operations[task_id] = {
|
||||
'id': operation_id,
|
||||
'type': 'delete',
|
||||
'status': 'running',
|
||||
'progress': {
|
||||
'completed': 0,
|
||||
'total': len(series_ids),
|
||||
'message': 'Starting deletion...'
|
||||
}
|
||||
}
|
||||
|
||||
def run_bulk_delete():
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
result = loop.run_until_complete(
|
||||
bulk_operations_manager.bulk_delete(series_ids, operation_id)
|
||||
)
|
||||
active_operations[task_id]['status'] = 'completed'
|
||||
active_operations[task_id]['result'] = result
|
||||
except Exception as e:
|
||||
active_operations[task_id]['status'] = 'failed'
|
||||
active_operations[task_id]['error'] = str(e)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
thread = threading.Thread(target=run_bulk_delete)
|
||||
thread.start()
|
||||
|
||||
return jsonify({'success': True, 'task_id': task_id})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/export', methods=['POST'])
|
||||
def bulk_export():
|
||||
"""Export series data."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
series_ids = data.get('series_ids', [])
|
||||
format_type = data.get('format', 'json')
|
||||
|
||||
if not series_ids:
|
||||
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
|
||||
|
||||
# Generate export data
|
||||
loop = asyncio.new_event_loop()
|
||||
asyncio.set_event_loop(loop)
|
||||
try:
|
||||
export_data = loop.run_until_complete(
|
||||
bulk_operations_manager.export_series_data(series_ids, format_type)
|
||||
)
|
||||
finally:
|
||||
loop.close()
|
||||
|
||||
# Determine content type and filename
|
||||
content_types = {
|
||||
'json': 'application/json',
|
||||
'csv': 'text/csv',
|
||||
'xml': 'application/xml'
|
||||
}
|
||||
|
||||
content_type = content_types.get(format_type, 'application/octet-stream')
|
||||
filename = f'series_export_{len(series_ids)}_items.{format_type}'
|
||||
|
||||
return send_file(
|
||||
io.BytesIO(export_data),
|
||||
mimetype=content_type,
|
||||
as_attachment=True,
|
||||
download_name=filename
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'success': False, 'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/status/<task_id>', methods=['GET'])
|
||||
def get_operation_status(task_id):
|
||||
"""Get operation status and progress."""
|
||||
try:
|
||||
if task_id not in active_operations:
|
||||
return jsonify({'error': 'Task not found'}), 404
|
||||
|
||||
operation = active_operations[task_id]
|
||||
|
||||
response = {
|
||||
'complete': operation['status'] in ['completed', 'failed'],
|
||||
'success': operation['status'] == 'completed',
|
||||
'status': operation['status']
|
||||
}
|
||||
|
||||
if 'progress' in operation:
|
||||
response.update(operation['progress'])
|
||||
|
||||
if 'error' in operation:
|
||||
response['error'] = operation['error']
|
||||
|
||||
if 'result' in operation:
|
||||
response['result'] = operation['result']
|
||||
|
||||
return jsonify(response)
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/cancel/<task_id>', methods=['POST'])
|
||||
def cancel_operation(task_id):
|
||||
"""Cancel a running operation."""
|
||||
try:
|
||||
if task_id not in active_operations:
|
||||
return jsonify({'error': 'Task not found'}), 404
|
||||
|
||||
# Mark operation as cancelled
|
||||
active_operations[task_id]['status'] = 'cancelled'
|
||||
|
||||
return jsonify({'success': True, 'message': 'Operation cancelled'})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/history', methods=['GET'])
|
||||
def get_operation_history():
|
||||
"""Get history of bulk operations."""
|
||||
try:
|
||||
# Return completed/failed operations
|
||||
history = []
|
||||
for task_id, operation in active_operations.items():
|
||||
if operation['status'] in ['completed', 'failed', 'cancelled']:
|
||||
history.append({
|
||||
'task_id': task_id,
|
||||
'operation_id': operation['id'],
|
||||
'type': operation['type'],
|
||||
'status': operation['status'],
|
||||
'progress': operation.get('progress', {}),
|
||||
'error': operation.get('error'),
|
||||
'result': operation.get('result')
|
||||
})
|
||||
|
||||
# Sort by most recent first
|
||||
history.sort(key=lambda x: x.get('progress', {}).get('completed', 0), reverse=True)
|
||||
|
||||
return jsonify({'history': history})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
|
||||
@bulk_api_bp.route('/cleanup', methods=['POST'])
|
||||
def cleanup_completed_operations():
|
||||
"""Clean up completed/failed operations."""
|
||||
try:
|
||||
to_remove = []
|
||||
for task_id, operation in active_operations.items():
|
||||
if operation['status'] in ['completed', 'failed', 'cancelled']:
|
||||
to_remove.append(task_id)
|
||||
|
||||
for task_id in to_remove:
|
||||
del active_operations[task_id]
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'cleaned_up': len(to_remove),
|
||||
'message': f'Cleaned up {len(to_remove)} completed operations'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({'error': str(e)}), 500
|
||||
@ -1,454 +0,0 @@
|
||||
"""
|
||||
API endpoints for configuration management.
|
||||
Provides comprehensive configuration management with validation, backup, and restore functionality.
|
||||
"""
|
||||
|
||||
import json
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
|
||||
from fastapi.responses import FileResponse
|
||||
from pydantic import BaseModel
|
||||
|
||||
# Import SeriesApp for business logic
|
||||
from src.core.SeriesApp import SeriesApp
|
||||
|
||||
# FastAPI dependencies and models
|
||||
from src.server.fastapi_app import get_current_user, settings
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Create FastAPI router for config management endpoints
|
||||
router = APIRouter(prefix='/api/v1/config', tags=['config'])
|
||||
|
||||
# Pydantic models for requests and responses
|
||||
class ConfigResponse(BaseModel):
|
||||
"""Response model for configuration data."""
|
||||
success: bool = True
|
||||
config: Dict[str, Any]
|
||||
schema: Optional[Dict[str, Any]] = None
|
||||
|
||||
class ConfigUpdateRequest(BaseModel):
|
||||
"""Request model for configuration updates."""
|
||||
config: Dict[str, Any]
|
||||
validate: bool = True
|
||||
|
||||
class ConfigImportResponse(BaseModel):
|
||||
"""Response model for configuration import operations."""
|
||||
success: bool
|
||||
message: str
|
||||
imported_keys: Optional[list] = None
|
||||
skipped_keys: Optional[list] = None
|
||||
|
||||
# Dependency to get SeriesApp instance
|
||||
def get_series_app() -> SeriesApp:
|
||||
"""Get SeriesApp instance for business logic operations."""
|
||||
if not settings.anime_directory:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
|
||||
detail="Anime directory not configured"
|
||||
)
|
||||
return SeriesApp(settings.anime_directory)
|
||||
|
||||
|
||||
@router.get('/', response_model=ConfigResponse)
|
||||
async def get_full_config(
|
||||
current_user: Optional[Dict] = Depends(get_current_user)
|
||||
) -> ConfigResponse:
|
||||
"""Get complete configuration (without sensitive data)."""
|
||||
try:
|
||||
# For now, return a basic config structure
|
||||
# TODO: Replace with actual config management logic
|
||||
config_data = {
|
||||
"anime_directory": settings.anime_directory if hasattr(settings, 'anime_directory') else None,
|
||||
"download_settings": {},
|
||||
"display_settings": {},
|
||||
"security_settings": {}
|
||||
}
|
||||
|
||||
schema = {
|
||||
"anime_directory": {"type": "string", "required": True},
|
||||
"download_settings": {"type": "object"},
|
||||
"display_settings": {"type": "object"},
|
||||
"security_settings": {"type": "object"}
|
||||
}
|
||||
|
||||
return ConfigResponse(
|
||||
success=True,
|
||||
config=config_data,
|
||||
schema=schema
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting configuration: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e)
|
||||
)
|
||||
|
||||
|
||||
@router.post('/', response_model=ConfigImportResponse)
|
||||
async def update_config(
|
||||
config_update: ConfigUpdateRequest,
|
||||
current_user: Optional[Dict] = Depends(get_current_user)
|
||||
) -> ConfigImportResponse:
|
||||
"""Update configuration with validation."""
|
||||
try:
|
||||
# For now, just return success
|
||||
# TODO: Replace with actual config management logic
|
||||
logger.info("Configuration updated successfully")
|
||||
return ConfigImportResponse(
|
||||
success=True,
|
||||
message="Configuration updated successfully",
|
||||
imported_keys=list(config_update.config.keys()),
|
||||
skipped_keys=[]
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating configuration: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e)
|
||||
)
|
||||
|
||||
@config_bp.route('/validate', methods=['POST'])
|
||||
@require_auth
|
||||
def validate_config():
|
||||
"""Validate configuration without saving."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
validation_result = config.validate_config(data)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'validation': validation_result
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error validating configuration: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/section/<section_name>', methods=['GET'])
|
||||
@require_auth
|
||||
def get_config_section(section_name):
|
||||
"""Get specific configuration section."""
|
||||
try:
|
||||
section_data = config.get(section_name, {})
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'section': section_name,
|
||||
'config': section_data
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting config section {section_name}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/section/<section_name>', methods=['POST'])
|
||||
@require_auth
|
||||
def update_config_section(section_name):
|
||||
"""Update specific configuration section."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
# Get current config
|
||||
current_config = config.export_config(include_sensitive=True)
|
||||
|
||||
# Update the specific section
|
||||
current_config[section_name] = data
|
||||
|
||||
# Validate and save
|
||||
result = config.import_config(current_config, validate=True)
|
||||
|
||||
if result['success']:
|
||||
logger.info(f"Configuration section '{section_name}' updated successfully")
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Configuration section "{section_name}" updated successfully',
|
||||
'warnings': result.get('warnings', [])
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Configuration validation failed',
|
||||
'errors': result['errors'],
|
||||
'warnings': result.get('warnings', [])
|
||||
}), 400
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating config section {section_name}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/backup', methods=['POST'])
|
||||
@require_auth
|
||||
def create_backup():
|
||||
"""Create configuration backup."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
backup_name = data.get('name', '')
|
||||
|
||||
# Generate backup filename
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
if backup_name:
|
||||
# Sanitize backup name
|
||||
backup_name = secure_filename(backup_name)
|
||||
filename = f"config_backup_{backup_name}_{timestamp}.json"
|
||||
else:
|
||||
filename = f"config_backup_{timestamp}.json"
|
||||
|
||||
backup_path = config.backup_config(filename)
|
||||
|
||||
logger.info(f"Configuration backup created: {backup_path}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Backup created successfully',
|
||||
'backup_path': backup_path,
|
||||
'filename': filename
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating backup: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/backups', methods=['GET'])
|
||||
@require_auth
|
||||
def list_backups():
|
||||
"""List available configuration backups."""
|
||||
try:
|
||||
backups = []
|
||||
|
||||
# Scan current directory for backup files
|
||||
for filename in os.listdir('.'):
|
||||
if filename.startswith('config_backup_') and filename.endswith('.json'):
|
||||
file_path = os.path.abspath(filename)
|
||||
file_size = os.path.getsize(filename)
|
||||
file_modified = datetime.fromtimestamp(os.path.getmtime(filename))
|
||||
|
||||
backups.append({
|
||||
'filename': filename,
|
||||
'path': file_path,
|
||||
'size': file_size,
|
||||
'size_kb': round(file_size / 1024, 2),
|
||||
'modified': file_modified.isoformat(),
|
||||
'modified_display': file_modified.strftime('%Y-%m-%d %H:%M:%S')
|
||||
})
|
||||
|
||||
# Sort by modification date (newest first)
|
||||
backups.sort(key=lambda x: x['modified'], reverse=True)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'backups': backups
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing backups: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/backup/<filename>/restore', methods=['POST'])
|
||||
@require_auth
|
||||
def restore_backup(filename):
|
||||
"""Restore configuration from backup."""
|
||||
try:
|
||||
# Security: Only allow config backup files
|
||||
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid backup file'
|
||||
}), 400
|
||||
|
||||
# Security: Check if file exists
|
||||
if not os.path.exists(filename):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Backup file not found'
|
||||
}), 404
|
||||
|
||||
success = config.restore_config(filename)
|
||||
|
||||
if success:
|
||||
logger.info(f"Configuration restored from backup: {filename}")
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Configuration restored successfully'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Failed to restore configuration'
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error restoring backup {filename}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/backup/<filename>/download', methods=['GET'])
|
||||
@require_auth
|
||||
def download_backup(filename):
|
||||
"""Download configuration backup file."""
|
||||
try:
|
||||
# Security: Only allow config backup files
|
||||
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid backup file'
|
||||
}), 400
|
||||
|
||||
# Security: Check if file exists
|
||||
if not os.path.exists(filename):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Backup file not found'
|
||||
}), 404
|
||||
|
||||
return send_file(
|
||||
filename,
|
||||
as_attachment=True,
|
||||
download_name=filename
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error downloading backup {filename}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@config_bp.route('/export', methods=['POST'])
|
||||
@require_auth
|
||||
def export_config():
|
||||
"""Export current configuration to JSON."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
include_sensitive = data.get('include_sensitive', False)
|
||||
|
||||
config_data = config.export_config(include_sensitive=include_sensitive)
|
||||
|
||||
# Create filename with timestamp
|
||||
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
|
||||
filename = f"aniworld_config_export_{timestamp}.json"
|
||||
|
||||
# Write to temporary file
|
||||
with open(filename, 'w', encoding='utf-8') as f:
|
||||
json.dump(config_data, f, indent=4)
|
||||
|
||||
return send_file(
|
||||
filename,
|
||||
as_attachment=True,
|
||||
download_name=filename,
|
||||
mimetype='application/json'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error exporting configuration: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
|
||||
@router.post('/import', response_model=ConfigImportResponse)
|
||||
async def import_config(
|
||||
config_file: UploadFile = File(...),
|
||||
current_user: Optional[Dict] = Depends(get_current_user)
|
||||
) -> ConfigImportResponse:
|
||||
"""Import configuration from uploaded JSON file."""
|
||||
try:
|
||||
# Validate file type
|
||||
if not config_file.filename:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="No file selected"
|
||||
)
|
||||
|
||||
if not config_file.filename.endswith('.json'):
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail="Invalid file type. Only JSON files are allowed."
|
||||
)
|
||||
|
||||
# Read and parse JSON
|
||||
try:
|
||||
content = await config_file.read()
|
||||
config_data = json.loads(content.decode('utf-8'))
|
||||
except json.JSONDecodeError as e:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_400_BAD_REQUEST,
|
||||
detail=f"Invalid JSON format: {e}"
|
||||
)
|
||||
|
||||
# For now, just return success with the keys that would be imported
|
||||
# TODO: Replace with actual config management logic
|
||||
logger.info(f"Configuration imported from file: {config_file.filename}")
|
||||
return ConfigImportResponse(
|
||||
success=True,
|
||||
message="Configuration imported successfully",
|
||||
imported_keys=list(config_data.keys()) if isinstance(config_data, dict) else [],
|
||||
skipped_keys=[]
|
||||
)
|
||||
|
||||
except HTTPException:
|
||||
raise
|
||||
except Exception as e:
|
||||
logger.error(f"Error importing configuration: {e}")
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
||||
detail=str(e)
|
||||
)
|
||||
|
||||
@config_bp.route('/reset', methods=['POST'])
|
||||
@require_auth
|
||||
def reset_config():
|
||||
"""Reset configuration to defaults (preserves security settings)."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
preserve_security = data.get('preserve_security', True)
|
||||
|
||||
# Get current security settings
|
||||
current_security = config.get('security', {}) if preserve_security else {}
|
||||
|
||||
# Reset to defaults
|
||||
config._config = config.default_config.copy()
|
||||
|
||||
# Restore security settings if requested
|
||||
if preserve_security and current_security:
|
||||
config._config['security'] = current_security
|
||||
|
||||
success = config.save_config()
|
||||
|
||||
if success:
|
||||
logger.info("Configuration reset to defaults")
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Configuration reset to defaults'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Failed to save configuration'
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error resetting configuration: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
@ -1,649 +0,0 @@
|
||||
"""
|
||||
Database & Storage Management API Endpoints
|
||||
|
||||
This module provides REST API endpoints for database operations,
|
||||
backup management, and storage monitoring.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify, send_file
|
||||
from auth import require_auth, optional_auth
|
||||
from error_handler import handle_api_errors, RetryableError, NonRetryableError
|
||||
from database_manager import (
|
||||
database_manager, anime_repository, backup_manager, storage_manager,
|
||||
AnimeMetadata
|
||||
)
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
import os
|
||||
|
||||
|
||||
# Blueprint for database management endpoints
|
||||
database_bp = Blueprint('database', __name__)
|
||||
|
||||
|
||||
# Database Information Endpoints
|
||||
@database_bp.route('/api/database/info')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_database_info():
|
||||
"""Get database information and statistics."""
|
||||
try:
|
||||
# Get schema version
|
||||
schema_version = database_manager.get_current_version()
|
||||
|
||||
# Get table statistics
|
||||
stats_query = """
|
||||
SELECT
|
||||
(SELECT COUNT(*) FROM anime_metadata) as anime_count,
|
||||
(SELECT COUNT(*) FROM episode_metadata) as episode_count,
|
||||
(SELECT COUNT(*) FROM episode_metadata WHERE is_downloaded = 1) as downloaded_count,
|
||||
(SELECT COUNT(*) FROM download_history) as download_history_count
|
||||
"""
|
||||
|
||||
results = database_manager.execute_query(stats_query)
|
||||
stats = dict(results[0]) if results else {}
|
||||
|
||||
# Get database file size
|
||||
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'schema_version': schema_version,
|
||||
'database_path': database_manager.db_path,
|
||||
'database_size_mb': round(db_size / (1024 * 1024), 2),
|
||||
'statistics': {
|
||||
'anime_count': stats.get('anime_count', 0),
|
||||
'episode_count': stats.get('episode_count', 0),
|
||||
'downloaded_count': stats.get('downloaded_count', 0),
|
||||
'download_history_count': stats.get('download_history_count', 0)
|
||||
}
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get database info: {e}")
|
||||
|
||||
|
||||
# Anime Metadata Endpoints
|
||||
@database_bp.route('/api/database/anime')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_all_anime():
|
||||
"""Get all anime from database."""
|
||||
try:
|
||||
status_filter = request.args.get('status')
|
||||
anime_list = anime_repository.get_all_anime(status_filter)
|
||||
|
||||
# Convert to serializable format
|
||||
anime_data = []
|
||||
for anime in anime_list:
|
||||
anime_data.append({
|
||||
'anime_id': anime.anime_id,
|
||||
'name': anime.name,
|
||||
'folder': anime.folder,
|
||||
'key': anime.key,
|
||||
'description': anime.description,
|
||||
'genres': anime.genres,
|
||||
'release_year': anime.release_year,
|
||||
'status': anime.status,
|
||||
'total_episodes': anime.total_episodes,
|
||||
'poster_url': anime.poster_url,
|
||||
'last_updated': anime.last_updated.isoformat(),
|
||||
'created_at': anime.created_at.isoformat(),
|
||||
'custom_metadata': anime.custom_metadata
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'anime': anime_data,
|
||||
'count': len(anime_data)
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get anime list: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/anime/<anime_id>')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_anime_by_id(anime_id):
|
||||
"""Get specific anime by ID."""
|
||||
try:
|
||||
query = "SELECT * FROM anime_metadata WHERE anime_id = ?"
|
||||
results = database_manager.execute_query(query, (anime_id,))
|
||||
|
||||
if not results:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Anime not found'
|
||||
}), 404
|
||||
|
||||
row = results[0]
|
||||
anime_data = {
|
||||
'anime_id': row['anime_id'],
|
||||
'name': row['name'],
|
||||
'folder': row['folder'],
|
||||
'key': row['key'],
|
||||
'description': row['description'],
|
||||
'genres': row['genres'],
|
||||
'release_year': row['release_year'],
|
||||
'status': row['status'],
|
||||
'total_episodes': row['total_episodes'],
|
||||
'poster_url': row['poster_url'],
|
||||
'last_updated': row['last_updated'],
|
||||
'created_at': row['created_at'],
|
||||
'custom_metadata': row['custom_metadata']
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': anime_data
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get anime: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/anime', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def create_anime():
|
||||
"""Create new anime record."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
# Validate required fields
|
||||
required_fields = ['name', 'folder']
|
||||
for field in required_fields:
|
||||
if field not in data:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Missing required field: {field}'
|
||||
}), 400
|
||||
|
||||
# Create anime metadata
|
||||
anime = AnimeMetadata(
|
||||
anime_id=str(uuid.uuid4()),
|
||||
name=data['name'],
|
||||
folder=data['folder'],
|
||||
key=data.get('key'),
|
||||
description=data.get('description'),
|
||||
genres=data.get('genres', []),
|
||||
release_year=data.get('release_year'),
|
||||
status=data.get('status', 'ongoing'),
|
||||
total_episodes=data.get('total_episodes'),
|
||||
poster_url=data.get('poster_url'),
|
||||
custom_metadata=data.get('custom_metadata', {})
|
||||
)
|
||||
|
||||
success = anime_repository.create_anime(anime)
|
||||
|
||||
if success:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Anime created successfully',
|
||||
'data': {
|
||||
'anime_id': anime.anime_id
|
||||
}
|
||||
}), 201
|
||||
else:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to create anime'
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to create anime: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/anime/<anime_id>', methods=['PUT'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def update_anime(anime_id):
|
||||
"""Update anime metadata."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
# Get existing anime
|
||||
existing = anime_repository.get_anime_by_folder(data.get('folder', ''))
|
||||
if not existing or existing.anime_id != anime_id:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Anime not found'
|
||||
}), 404
|
||||
|
||||
# Update fields
|
||||
if 'name' in data:
|
||||
existing.name = data['name']
|
||||
if 'key' in data:
|
||||
existing.key = data['key']
|
||||
if 'description' in data:
|
||||
existing.description = data['description']
|
||||
if 'genres' in data:
|
||||
existing.genres = data['genres']
|
||||
if 'release_year' in data:
|
||||
existing.release_year = data['release_year']
|
||||
if 'status' in data:
|
||||
existing.status = data['status']
|
||||
if 'total_episodes' in data:
|
||||
existing.total_episodes = data['total_episodes']
|
||||
if 'poster_url' in data:
|
||||
existing.poster_url = data['poster_url']
|
||||
if 'custom_metadata' in data:
|
||||
existing.custom_metadata.update(data['custom_metadata'])
|
||||
|
||||
success = anime_repository.update_anime(existing)
|
||||
|
||||
if success:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Anime updated successfully'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to update anime'
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to update anime: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/anime/<anime_id>', methods=['DELETE'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def delete_anime(anime_id):
|
||||
"""Delete anime and related data."""
|
||||
try:
|
||||
success = anime_repository.delete_anime(anime_id)
|
||||
|
||||
if success:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Anime deleted successfully'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Anime not found'
|
||||
}), 404
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to delete anime: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/anime/search')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def search_anime():
|
||||
"""Search anime by name or description."""
|
||||
try:
|
||||
search_term = request.args.get('q', '').strip()
|
||||
|
||||
if not search_term:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Search term is required'
|
||||
}), 400
|
||||
|
||||
results = anime_repository.search_anime(search_term)
|
||||
|
||||
# Convert to serializable format
|
||||
anime_data = []
|
||||
for anime in results:
|
||||
anime_data.append({
|
||||
'anime_id': anime.anime_id,
|
||||
'name': anime.name,
|
||||
'folder': anime.folder,
|
||||
'key': anime.key,
|
||||
'description': anime.description,
|
||||
'genres': anime.genres,
|
||||
'release_year': anime.release_year,
|
||||
'status': anime.status
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'results': anime_data,
|
||||
'count': len(anime_data),
|
||||
'search_term': search_term
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to search anime: {e}")
|
||||
|
||||
|
||||
# Backup Management Endpoints
|
||||
@database_bp.route('/api/database/backups')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def list_backups():
|
||||
"""List all available backups."""
|
||||
try:
|
||||
backups = backup_manager.list_backups()
|
||||
|
||||
backup_data = []
|
||||
for backup in backups:
|
||||
backup_data.append({
|
||||
'backup_id': backup.backup_id,
|
||||
'backup_type': backup.backup_type,
|
||||
'created_at': backup.created_at.isoformat(),
|
||||
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
|
||||
'description': backup.description,
|
||||
'tables_included': backup.tables_included
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'backups': backup_data,
|
||||
'count': len(backup_data)
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to list backups: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/backups/create', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def create_backup():
|
||||
"""Create a new database backup."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
backup_type = data.get('backup_type', 'full')
|
||||
description = data.get('description')
|
||||
|
||||
if backup_type not in ['full', 'metadata_only']:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Backup type must be "full" or "metadata_only"'
|
||||
}), 400
|
||||
|
||||
if backup_type == 'full':
|
||||
backup_info = backup_manager.create_full_backup(description)
|
||||
else:
|
||||
backup_info = backup_manager.create_metadata_backup(description)
|
||||
|
||||
if backup_info:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'{backup_type.title()} backup created successfully',
|
||||
'data': {
|
||||
'backup_id': backup_info.backup_id,
|
||||
'backup_type': backup_info.backup_type,
|
||||
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
|
||||
'created_at': backup_info.created_at.isoformat()
|
||||
}
|
||||
}), 201
|
||||
else:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to create backup'
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to create backup: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/backups/<backup_id>/restore', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def restore_backup(backup_id):
|
||||
"""Restore from a backup."""
|
||||
try:
|
||||
success = backup_manager.restore_backup(backup_id)
|
||||
|
||||
if success:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Backup restored successfully'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Failed to restore backup'
|
||||
}), 500
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to restore backup: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/backups/<backup_id>/download')
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def download_backup(backup_id):
|
||||
"""Download a backup file."""
|
||||
try:
|
||||
backups = backup_manager.list_backups()
|
||||
target_backup = None
|
||||
|
||||
for backup in backups:
|
||||
if backup.backup_id == backup_id:
|
||||
target_backup = backup
|
||||
break
|
||||
|
||||
if not target_backup:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Backup not found'
|
||||
}), 404
|
||||
|
||||
if not os.path.exists(target_backup.backup_path):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Backup file not found'
|
||||
}), 404
|
||||
|
||||
filename = os.path.basename(target_backup.backup_path)
|
||||
return send_file(target_backup.backup_path, as_attachment=True, download_name=filename)
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to download backup: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/backups/cleanup', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def cleanup_backups():
|
||||
"""Clean up old backup files."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
keep_days = data.get('keep_days', 30)
|
||||
keep_count = data.get('keep_count', 10)
|
||||
|
||||
if keep_days < 1 or keep_count < 1:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'keep_days and keep_count must be positive integers'
|
||||
}), 400
|
||||
|
||||
backup_manager.cleanup_old_backups(keep_days, keep_count)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Backup cleanup completed (keeping {keep_count} backups, max {keep_days} days old)'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to cleanup backups: {e}")
|
||||
|
||||
|
||||
# Storage Management Endpoints
|
||||
@database_bp.route('/api/database/storage/summary')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_storage_summary():
|
||||
"""Get storage usage summary."""
|
||||
try:
|
||||
summary = storage_manager.get_storage_summary()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': summary
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get storage summary: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/storage/locations')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_storage_locations():
|
||||
"""Get all storage locations."""
|
||||
try:
|
||||
query = """
|
||||
SELECT sl.*, am.name as anime_name
|
||||
FROM storage_locations sl
|
||||
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
|
||||
WHERE sl.is_active = 1
|
||||
ORDER BY sl.location_type, sl.path
|
||||
"""
|
||||
|
||||
results = database_manager.execute_query(query)
|
||||
|
||||
locations = []
|
||||
for row in results:
|
||||
locations.append({
|
||||
'location_id': row['location_id'],
|
||||
'anime_id': row['anime_id'],
|
||||
'anime_name': row['anime_name'],
|
||||
'path': row['path'],
|
||||
'location_type': row['location_type'],
|
||||
'free_space_gb': (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None,
|
||||
'total_space_gb': (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None,
|
||||
'usage_percent': ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100) if row['total_space_bytes'] and row['free_space_bytes'] else None,
|
||||
'last_checked': row['last_checked']
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'locations': locations,
|
||||
'count': len(locations)
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get storage locations: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/storage/locations', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def add_storage_location():
|
||||
"""Add a new storage location."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
path = data.get('path')
|
||||
location_type = data.get('location_type', 'primary')
|
||||
anime_id = data.get('anime_id')
|
||||
|
||||
if not path:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Path is required'
|
||||
}), 400
|
||||
|
||||
if location_type not in ['primary', 'backup', 'cache']:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Location type must be primary, backup, or cache'
|
||||
}), 400
|
||||
|
||||
location_id = storage_manager.add_storage_location(path, location_type, anime_id)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Storage location added successfully',
|
||||
'data': {
|
||||
'location_id': location_id
|
||||
}
|
||||
}), 201
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to add storage location: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/storage/locations/<location_id>/update', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def update_storage_location(location_id):
|
||||
"""Update storage location statistics."""
|
||||
try:
|
||||
storage_manager.update_storage_stats(location_id)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Storage statistics updated successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to update storage location: {e}")
|
||||
|
||||
|
||||
# Database Maintenance Endpoints
|
||||
@database_bp.route('/api/database/maintenance/vacuum', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def vacuum_database():
|
||||
"""Perform database VACUUM operation to reclaim space."""
|
||||
try:
|
||||
with database_manager.get_connection() as conn:
|
||||
conn.execute("VACUUM")
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Database vacuum completed successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to vacuum database: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/maintenance/analyze', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def analyze_database():
|
||||
"""Perform database ANALYZE operation to update statistics."""
|
||||
try:
|
||||
with database_manager.get_connection() as conn:
|
||||
conn.execute("ANALYZE")
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Database analysis completed successfully'
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to analyze database: {e}")
|
||||
|
||||
|
||||
@database_bp.route('/api/database/maintenance/integrity-check', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def integrity_check():
|
||||
"""Perform database integrity check."""
|
||||
try:
|
||||
with database_manager.get_connection() as conn:
|
||||
cursor = conn.execute("PRAGMA integrity_check")
|
||||
results = cursor.fetchall()
|
||||
|
||||
# Check if database is OK
|
||||
is_ok = len(results) == 1 and results[0][0] == 'ok'
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'integrity_ok': is_ok,
|
||||
'results': [row[0] for row in results]
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to check database integrity: {e}")
|
||||
|
||||
|
||||
# Export the blueprint
|
||||
__all__ = ['database_bp']
|
||||
@ -1,581 +0,0 @@
|
||||
"""
|
||||
Diagnostics API endpoints.
|
||||
|
||||
This module handles all diagnostic and monitoring operations including:
|
||||
- System health checks
|
||||
- Performance monitoring
|
||||
- Error reporting
|
||||
- Network diagnostics
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import logging
|
||||
import psutil
|
||||
import socket
|
||||
import requests
|
||||
import time
|
||||
import platform
|
||||
import sys
|
||||
import os
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Import shared utilities
|
||||
try:
|
||||
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
|
||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
||||
from src.server.web.controllers.shared.validators import validate_query_params
|
||||
from src.server.web.controllers.shared.response_helpers import (
|
||||
create_success_response, create_error_response, format_datetime, format_file_size
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback imports for development
|
||||
def require_auth(f): return f
|
||||
def optional_auth(f): return f
|
||||
def handle_api_errors(f): return f
|
||||
def validate_query_params(**kwargs): return lambda f: f
|
||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
||||
def format_datetime(dt): return str(dt) if dt else None
|
||||
def format_file_size(size): return f"{size} bytes"
|
||||
|
||||
# Import diagnostic components
|
||||
try:
|
||||
from src.server.data.error_manager import ErrorManager
|
||||
from src.server.data.performance_manager import PerformanceManager
|
||||
from src.server.data.system_manager import SystemManager
|
||||
except ImportError:
|
||||
# Fallback for development
|
||||
class ErrorManager:
|
||||
def get_recent_errors(self, **kwargs): return []
|
||||
def get_error_stats(self): return {}
|
||||
def clear_errors(self): return True
|
||||
def report_error(self, **kwargs): return 1
|
||||
|
||||
class PerformanceManager:
|
||||
def get_performance_metrics(self): return {}
|
||||
def get_performance_history(self, **kwargs): return []
|
||||
def record_metric(self, **kwargs): return True
|
||||
|
||||
class SystemManager:
|
||||
def get_system_info(self): return {}
|
||||
def get_disk_usage(self): return {}
|
||||
def get_network_status(self): return {}
|
||||
def test_network_connectivity(self, url): return {'success': True, 'response_time': 0.1}
|
||||
|
||||
# Create blueprint
|
||||
diagnostics_bp = Blueprint('diagnostics', __name__)
|
||||
|
||||
# Initialize managers
|
||||
error_manager = ErrorManager()
|
||||
performance_manager = PerformanceManager()
|
||||
system_manager = SystemManager()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/health', methods=['GET'])
|
||||
@optional_auth
|
||||
@handle_api_errors
|
||||
def health_check() -> Tuple[Any, int]:
|
||||
"""
|
||||
Perform comprehensive system health check.
|
||||
|
||||
Returns:
|
||||
JSON response with system health status
|
||||
"""
|
||||
try:
|
||||
health_status = {
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'checks': {},
|
||||
'overall_score': 100
|
||||
}
|
||||
|
||||
# System resource checks
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
memory = psutil.virtual_memory()
|
||||
disk = psutil.disk_usage('/')
|
||||
|
||||
# CPU check
|
||||
health_status['checks']['cpu'] = {
|
||||
'status': 'healthy' if cpu_percent < 80 else 'warning' if cpu_percent < 95 else 'critical',
|
||||
'usage_percent': cpu_percent,
|
||||
'details': f"CPU usage: {cpu_percent}%"
|
||||
}
|
||||
|
||||
# Memory check
|
||||
memory_percent = memory.percent
|
||||
health_status['checks']['memory'] = {
|
||||
'status': 'healthy' if memory_percent < 80 else 'warning' if memory_percent < 95 else 'critical',
|
||||
'usage_percent': memory_percent,
|
||||
'total': format_file_size(memory.total),
|
||||
'available': format_file_size(memory.available),
|
||||
'details': f"Memory usage: {memory_percent}%"
|
||||
}
|
||||
|
||||
# Disk check
|
||||
disk_percent = disk.percent
|
||||
health_status['checks']['disk'] = {
|
||||
'status': 'healthy' if disk_percent < 80 else 'warning' if disk_percent < 95 else 'critical',
|
||||
'usage_percent': disk_percent,
|
||||
'total': format_file_size(disk.total),
|
||||
'free': format_file_size(disk.free),
|
||||
'details': f"Disk usage: {disk_percent}%"
|
||||
}
|
||||
|
||||
# Database connectivity check
|
||||
try:
|
||||
# This would test actual database connection
|
||||
health_status['checks']['database'] = {
|
||||
'status': 'healthy',
|
||||
'details': 'Database connection successful'
|
||||
}
|
||||
except Exception as e:
|
||||
health_status['checks']['database'] = {
|
||||
'status': 'critical',
|
||||
'details': f'Database connection failed: {str(e)}'
|
||||
}
|
||||
|
||||
# Network connectivity check
|
||||
try:
|
||||
response = requests.get('https://httpbin.org/status/200', timeout=5)
|
||||
if response.status_code == 200:
|
||||
health_status['checks']['network'] = {
|
||||
'status': 'healthy',
|
||||
'details': 'Internet connectivity available'
|
||||
}
|
||||
else:
|
||||
health_status['checks']['network'] = {
|
||||
'status': 'warning',
|
||||
'details': f'Network response: {response.status_code}'
|
||||
}
|
||||
except Exception as e:
|
||||
health_status['checks']['network'] = {
|
||||
'status': 'warning',
|
||||
'details': f'Network connectivity issues: {str(e)}'
|
||||
}
|
||||
|
||||
# Calculate overall health score
|
||||
check_statuses = [check['status'] for check in health_status['checks'].values()]
|
||||
critical_count = check_statuses.count('critical')
|
||||
warning_count = check_statuses.count('warning')
|
||||
|
||||
if critical_count > 0:
|
||||
health_status['status'] = 'critical'
|
||||
health_status['overall_score'] = max(0, 100 - (critical_count * 30) - (warning_count * 10))
|
||||
elif warning_count > 0:
|
||||
health_status['status'] = 'warning'
|
||||
health_status['overall_score'] = max(50, 100 - (warning_count * 15))
|
||||
|
||||
return create_success_response("Health check completed", 200, health_status)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during health check: {str(e)}")
|
||||
return create_error_response("Health check failed", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/system', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_system_info() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get detailed system information.
|
||||
|
||||
Returns:
|
||||
JSON response with system information
|
||||
"""
|
||||
try:
|
||||
system_info = {
|
||||
'platform': {
|
||||
'system': platform.system(),
|
||||
'release': platform.release(),
|
||||
'version': platform.version(),
|
||||
'machine': platform.machine(),
|
||||
'processor': platform.processor(),
|
||||
'architecture': platform.architecture()
|
||||
},
|
||||
'python': {
|
||||
'version': sys.version,
|
||||
'executable': sys.executable,
|
||||
'path': sys.path[:5] # First 5 paths only
|
||||
},
|
||||
'resources': {
|
||||
'cpu': {
|
||||
'count_logical': psutil.cpu_count(logical=True),
|
||||
'count_physical': psutil.cpu_count(logical=False),
|
||||
'frequency': psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None,
|
||||
'usage_percent': psutil.cpu_percent(interval=1),
|
||||
'usage_per_cpu': psutil.cpu_percent(interval=1, percpu=True)
|
||||
},
|
||||
'memory': {
|
||||
**psutil.virtual_memory()._asdict(),
|
||||
'swap': psutil.swap_memory()._asdict()
|
||||
},
|
||||
'disk': {
|
||||
'usage': psutil.disk_usage('/')._asdict(),
|
||||
'io_counters': psutil.disk_io_counters()._asdict() if psutil.disk_io_counters() else None
|
||||
},
|
||||
'network': {
|
||||
'io_counters': psutil.net_io_counters()._asdict(),
|
||||
'connections': len(psutil.net_connections()),
|
||||
'interfaces': {name: addr._asdict() for name, addr in psutil.net_if_addrs().items()}
|
||||
}
|
||||
},
|
||||
'process': {
|
||||
'pid': os.getpid(),
|
||||
'memory_info': psutil.Process().memory_info()._asdict(),
|
||||
'cpu_percent': psutil.Process().cpu_percent(),
|
||||
'num_threads': psutil.Process().num_threads(),
|
||||
'create_time': format_datetime(datetime.fromtimestamp(psutil.Process().create_time())),
|
||||
'open_files': len(psutil.Process().open_files())
|
||||
},
|
||||
'uptime': {
|
||||
'boot_time': format_datetime(datetime.fromtimestamp(psutil.boot_time())),
|
||||
'uptime_seconds': time.time() - psutil.boot_time()
|
||||
}
|
||||
}
|
||||
|
||||
return create_success_response("System information retrieved", 200, system_info)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting system info: {str(e)}")
|
||||
return create_error_response("Failed to get system information", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/performance', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['hours', 'metric'],
|
||||
param_types={'hours': int}
|
||||
)
|
||||
def get_performance_metrics() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get performance metrics and history.
|
||||
|
||||
Query Parameters:
|
||||
- hours: Hours of history to retrieve (default: 24, max: 168)
|
||||
- metric: Specific metric to retrieve (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with performance metrics
|
||||
"""
|
||||
hours = min(request.args.get('hours', 24, type=int), 168) # Max 1 week
|
||||
metric = request.args.get('metric')
|
||||
|
||||
try:
|
||||
# Current performance metrics
|
||||
current_metrics = {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'cpu': {
|
||||
'usage_percent': psutil.cpu_percent(interval=1),
|
||||
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
|
||||
},
|
||||
'memory': {
|
||||
'usage_percent': psutil.virtual_memory().percent,
|
||||
'available_gb': psutil.virtual_memory().available / (1024**3)
|
||||
},
|
||||
'disk': {
|
||||
'usage_percent': psutil.disk_usage('/').percent,
|
||||
'free_gb': psutil.disk_usage('/').free / (1024**3)
|
||||
},
|
||||
'network': {
|
||||
'bytes_sent': psutil.net_io_counters().bytes_sent,
|
||||
'bytes_recv': psutil.net_io_counters().bytes_recv,
|
||||
'packets_sent': psutil.net_io_counters().packets_sent,
|
||||
'packets_recv': psutil.net_io_counters().packets_recv
|
||||
}
|
||||
}
|
||||
|
||||
# Historical data
|
||||
historical_data = performance_manager.get_performance_history(
|
||||
hours=hours,
|
||||
metric=metric
|
||||
)
|
||||
|
||||
response_data = {
|
||||
'current': current_metrics,
|
||||
'history': historical_data,
|
||||
'summary': {
|
||||
'period_hours': hours,
|
||||
'data_points': len(historical_data),
|
||||
'metric_filter': metric
|
||||
}
|
||||
}
|
||||
|
||||
return create_success_response("Performance metrics retrieved", 200, response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting performance metrics: {str(e)}")
|
||||
return create_error_response("Failed to get performance metrics", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/errors', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['hours', 'level', 'limit'],
|
||||
param_types={'hours': int, 'limit': int}
|
||||
)
|
||||
def get_recent_errors() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get recent errors and error statistics.
|
||||
|
||||
Query Parameters:
|
||||
- hours: Hours of errors to retrieve (default: 24, max: 168)
|
||||
- level: Error level filter (error, warning, critical)
|
||||
- limit: Maximum number of errors to return (default: 100, max: 1000)
|
||||
|
||||
Returns:
|
||||
JSON response with recent errors
|
||||
"""
|
||||
hours = min(request.args.get('hours', 24, type=int), 168)
|
||||
level = request.args.get('level')
|
||||
limit = min(request.args.get('limit', 100, type=int), 1000)
|
||||
|
||||
try:
|
||||
# Get recent errors
|
||||
errors = error_manager.get_recent_errors(
|
||||
hours=hours,
|
||||
level=level,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
# Get error statistics
|
||||
error_stats = error_manager.get_error_stats()
|
||||
|
||||
response_data = {
|
||||
'errors': errors,
|
||||
'statistics': error_stats,
|
||||
'summary': {
|
||||
'period_hours': hours,
|
||||
'level_filter': level,
|
||||
'total_returned': len(errors),
|
||||
'limit': limit
|
||||
}
|
||||
}
|
||||
|
||||
return create_success_response("Recent errors retrieved", 200, response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting recent errors: {str(e)}")
|
||||
return create_error_response("Failed to get recent errors", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/errors', methods=['DELETE'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def clear_errors() -> Tuple[Any, int]:
|
||||
"""
|
||||
Clear error log.
|
||||
|
||||
Returns:
|
||||
JSON response with clear operation result
|
||||
"""
|
||||
try:
|
||||
success = error_manager.clear_errors()
|
||||
|
||||
if success:
|
||||
logger.info("Error log cleared")
|
||||
return create_success_response("Error log cleared successfully")
|
||||
else:
|
||||
return create_error_response("Failed to clear error log", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error clearing error log: {str(e)}")
|
||||
return create_error_response("Failed to clear error log", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/network', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def test_network_connectivity() -> Tuple[Any, int]:
|
||||
"""
|
||||
Test network connectivity to various services.
|
||||
|
||||
Returns:
|
||||
JSON response with network connectivity results
|
||||
"""
|
||||
try:
|
||||
test_urls = [
|
||||
'https://google.com',
|
||||
'https://github.com',
|
||||
'https://pypi.org',
|
||||
'https://httpbin.org/status/200'
|
||||
]
|
||||
|
||||
results = []
|
||||
|
||||
for url in test_urls:
|
||||
try:
|
||||
start_time = time.time()
|
||||
response = requests.get(url, timeout=10)
|
||||
response_time = time.time() - start_time
|
||||
|
||||
results.append({
|
||||
'url': url,
|
||||
'status': 'success',
|
||||
'status_code': response.status_code,
|
||||
'response_time_ms': round(response_time * 1000, 2),
|
||||
'accessible': response.status_code == 200
|
||||
})
|
||||
|
||||
except requests.exceptions.Timeout:
|
||||
results.append({
|
||||
'url': url,
|
||||
'status': 'timeout',
|
||||
'error': 'Request timed out',
|
||||
'accessible': False
|
||||
})
|
||||
except Exception as e:
|
||||
results.append({
|
||||
'url': url,
|
||||
'status': 'error',
|
||||
'error': str(e),
|
||||
'accessible': False
|
||||
})
|
||||
|
||||
# Network interface information
|
||||
interfaces = {}
|
||||
for interface, addresses in psutil.net_if_addrs().items():
|
||||
interfaces[interface] = [addr._asdict() for addr in addresses]
|
||||
|
||||
# Network I/O statistics
|
||||
net_io = psutil.net_io_counters()._asdict()
|
||||
|
||||
response_data = {
|
||||
'connectivity_tests': results,
|
||||
'interfaces': interfaces,
|
||||
'io_statistics': net_io,
|
||||
'summary': {
|
||||
'total_tests': len(results),
|
||||
'successful': len([r for r in results if r['accessible']]),
|
||||
'failed': len([r for r in results if not r['accessible']])
|
||||
}
|
||||
}
|
||||
|
||||
return create_success_response("Network connectivity test completed", 200, response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error testing network connectivity: {str(e)}")
|
||||
return create_error_response("Failed to test network connectivity", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/logs', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['lines', 'level', 'component'],
|
||||
param_types={'lines': int}
|
||||
)
|
||||
def get_application_logs() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get recent application logs.
|
||||
|
||||
Query Parameters:
|
||||
- lines: Number of log lines to retrieve (default: 100, max: 1000)
|
||||
- level: Log level filter (debug, info, warning, error, critical)
|
||||
- component: Component filter (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with application logs
|
||||
"""
|
||||
lines = min(request.args.get('lines', 100, type=int), 1000)
|
||||
level = request.args.get('level')
|
||||
component = request.args.get('component')
|
||||
|
||||
try:
|
||||
# This would read from actual log files
|
||||
log_entries = []
|
||||
|
||||
# For demonstration, return sample log structure
|
||||
response_data = {
|
||||
'logs': log_entries,
|
||||
'summary': {
|
||||
'lines_requested': lines,
|
||||
'level_filter': level,
|
||||
'component_filter': component,
|
||||
'total_returned': len(log_entries)
|
||||
}
|
||||
}
|
||||
|
||||
return create_success_response("Application logs retrieved", 200, response_data)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting application logs: {str(e)}")
|
||||
return create_error_response("Failed to get application logs", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/report', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def generate_diagnostic_report() -> Tuple[Any, int]:
|
||||
"""
|
||||
Generate comprehensive diagnostic report.
|
||||
|
||||
Returns:
|
||||
JSON response with diagnostic report
|
||||
"""
|
||||
try:
|
||||
report = {
|
||||
'generated_at': datetime.now().isoformat(),
|
||||
'report_id': f"diag_{int(time.time())}",
|
||||
'sections': {}
|
||||
}
|
||||
|
||||
# System information
|
||||
report['sections']['system'] = {
|
||||
'platform': platform.platform(),
|
||||
'python_version': sys.version,
|
||||
'cpu_count': psutil.cpu_count(),
|
||||
'memory_total_gb': round(psutil.virtual_memory().total / (1024**3), 2),
|
||||
'disk_total_gb': round(psutil.disk_usage('/').total / (1024**3), 2)
|
||||
}
|
||||
|
||||
# Current resource usage
|
||||
report['sections']['resources'] = {
|
||||
'cpu_percent': psutil.cpu_percent(interval=1),
|
||||
'memory_percent': psutil.virtual_memory().percent,
|
||||
'disk_percent': psutil.disk_usage('/').percent,
|
||||
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
|
||||
}
|
||||
|
||||
# Error summary
|
||||
error_stats = error_manager.get_error_stats()
|
||||
report['sections']['errors'] = error_stats
|
||||
|
||||
# Performance summary
|
||||
performance_metrics = performance_manager.get_performance_metrics()
|
||||
report['sections']['performance'] = performance_metrics
|
||||
|
||||
# Network status
|
||||
report['sections']['network'] = {
|
||||
'interfaces_count': len(psutil.net_if_addrs()),
|
||||
'connections_count': len(psutil.net_connections()),
|
||||
'bytes_sent': psutil.net_io_counters().bytes_sent,
|
||||
'bytes_recv': psutil.net_io_counters().bytes_recv
|
||||
}
|
||||
|
||||
logger.info(f"Diagnostic report generated: {report['report_id']}")
|
||||
return create_success_response("Diagnostic report generated", 200, report)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error generating diagnostic report: {str(e)}")
|
||||
return create_error_response("Failed to generate diagnostic report", 500)
|
||||
|
||||
|
||||
@diagnostics_bp.route('/diagnostics/ping', methods=['GET'])
|
||||
@optional_auth
|
||||
@handle_api_errors
|
||||
def ping() -> Tuple[Any, int]:
|
||||
"""
|
||||
Simple ping endpoint for health monitoring.
|
||||
|
||||
Returns:
|
||||
JSON response with ping result
|
||||
"""
|
||||
return create_success_response("pong", 200, {
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'status': 'alive'
|
||||
})
|
||||
@ -1,640 +0,0 @@
|
||||
"""
|
||||
Download Management API Endpoints
|
||||
|
||||
This module provides REST API endpoints for download operations,
|
||||
including queue management, progress tracking, and download history.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request
|
||||
from typing import Dict, List, Any, Optional
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
from ...shared.auth_decorators import require_auth, optional_auth
|
||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
||||
from ...shared.response_helpers import (
|
||||
create_success_response, create_paginated_response, format_download_response,
|
||||
extract_pagination_params, create_batch_response
|
||||
)
|
||||
|
||||
# Import download components (these imports would need to be adjusted based on actual structure)
|
||||
try:
|
||||
from download_manager import download_queue, download_manager, DownloadItem
|
||||
from database_manager import episode_repository, anime_repository
|
||||
except ImportError:
|
||||
# Fallback for development/testing
|
||||
download_queue = None
|
||||
download_manager = None
|
||||
DownloadItem = None
|
||||
episode_repository = None
|
||||
anime_repository = None
|
||||
|
||||
|
||||
# Blueprint for download management endpoints
|
||||
downloads_bp = Blueprint('downloads', __name__, url_prefix='/api/v1/downloads')
|
||||
|
||||
|
||||
@downloads_bp.route('', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def list_downloads() -> Dict[str, Any]:
|
||||
"""
|
||||
Get all downloads with optional filtering and pagination.
|
||||
|
||||
Query Parameters:
|
||||
- status: Filter by download status (pending, downloading, completed, failed, paused)
|
||||
- anime_id: Filter by anime ID
|
||||
- episode_id: Filter by episode ID
|
||||
- active_only: Show only active downloads (true/false)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated list of downloads
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
# Extract filters
|
||||
status_filter = request.args.get('status')
|
||||
anime_id = request.args.get('anime_id')
|
||||
episode_id = request.args.get('episode_id')
|
||||
active_only = request.args.get('active_only', 'false').lower() == 'true'
|
||||
|
||||
# Validate filters
|
||||
valid_statuses = ['pending', 'downloading', 'completed', 'failed', 'paused', 'cancelled']
|
||||
if status_filter and status_filter not in valid_statuses:
|
||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
||||
|
||||
if anime_id:
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
raise ValidationError("anime_id must be a valid integer")
|
||||
|
||||
if episode_id:
|
||||
try:
|
||||
episode_id = int(episode_id)
|
||||
except ValueError:
|
||||
raise ValidationError("episode_id must be a valid integer")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Get downloads with filters
|
||||
downloads = download_manager.get_downloads(
|
||||
status_filter=status_filter,
|
||||
anime_id=anime_id,
|
||||
episode_id=episode_id,
|
||||
active_only=active_only
|
||||
)
|
||||
|
||||
# Format download data
|
||||
formatted_downloads = [format_download_response(download.__dict__) for download in downloads]
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_downloads)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_downloads = formatted_downloads[start_idx:end_idx]
|
||||
|
||||
return create_paginated_response(
|
||||
data=paginated_downloads,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='downloads.list_downloads'
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/<int:download_id>', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('download_id')
|
||||
@optional_auth
|
||||
def get_download(download_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Get specific download by ID.
|
||||
|
||||
Args:
|
||||
download_id: Unique identifier for the download
|
||||
|
||||
Returns:
|
||||
Download details with progress information
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
download = download_manager.get_download_by_id(download_id)
|
||||
if not download:
|
||||
raise NotFoundError("Download not found")
|
||||
|
||||
# Format download data
|
||||
download_data = format_download_response(download.__dict__)
|
||||
|
||||
# Add detailed progress information
|
||||
progress_info = download_manager.get_download_progress(download_id)
|
||||
if progress_info:
|
||||
download_data['progress_details'] = progress_info
|
||||
|
||||
return create_success_response(download_data)
|
||||
|
||||
|
||||
@downloads_bp.route('', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['episode_id'],
|
||||
optional_fields=['priority', 'quality', 'subtitle_language', 'download_path'],
|
||||
field_types={
|
||||
'episode_id': int,
|
||||
'priority': int,
|
||||
'quality': str,
|
||||
'subtitle_language': str,
|
||||
'download_path': str
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def create_download() -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new download request.
|
||||
|
||||
Required Fields:
|
||||
- episode_id: ID of the episode to download
|
||||
|
||||
Optional Fields:
|
||||
- priority: Download priority (1-10, higher is more priority)
|
||||
- quality: Preferred quality (720p, 1080p, etc.)
|
||||
- subtitle_language: Preferred subtitle language
|
||||
- download_path: Custom download path
|
||||
|
||||
Returns:
|
||||
Created download details
|
||||
"""
|
||||
if not download_manager or not episode_repository:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
episode_id = data['episode_id']
|
||||
|
||||
# Validate episode exists
|
||||
episode = episode_repository.get_episode_by_id(episode_id)
|
||||
if not episode:
|
||||
raise ValidationError("Episode not found")
|
||||
|
||||
# Check if episode is already downloaded
|
||||
if episode.status == 'downloaded':
|
||||
raise ValidationError("Episode is already downloaded")
|
||||
|
||||
# Check if download already exists for this episode
|
||||
existing_download = download_manager.get_download_by_episode(episode_id)
|
||||
if existing_download and existing_download.status in ['pending', 'downloading']:
|
||||
raise ValidationError("Download already in progress for this episode")
|
||||
|
||||
# Validate priority
|
||||
priority = data.get('priority', 5)
|
||||
if not 1 <= priority <= 10:
|
||||
raise ValidationError("Priority must be between 1 and 10")
|
||||
|
||||
# Create download item
|
||||
try:
|
||||
download_item = DownloadItem(
|
||||
download_id=str(uuid.uuid4()),
|
||||
episode_id=episode_id,
|
||||
anime_id=episode.anime_id,
|
||||
priority=priority,
|
||||
quality=data.get('quality'),
|
||||
subtitle_language=data.get('subtitle_language'),
|
||||
download_path=data.get('download_path'),
|
||||
status='pending',
|
||||
created_at=datetime.utcnow()
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Invalid download data: {str(e)}")
|
||||
|
||||
# Add to download queue
|
||||
success = download_queue.add_download(download_item)
|
||||
if not success:
|
||||
raise APIException("Failed to create download", 500)
|
||||
|
||||
# Return created download
|
||||
download_data = format_download_response(download_item.__dict__)
|
||||
return create_success_response(
|
||||
data=download_data,
|
||||
message="Download queued successfully",
|
||||
status_code=201
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/<int:download_id>/pause', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('download_id')
|
||||
@require_auth
|
||||
def pause_download(download_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Pause a download.
|
||||
|
||||
Args:
|
||||
download_id: Unique identifier for the download
|
||||
|
||||
Returns:
|
||||
Updated download status
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
download = download_manager.get_download_by_id(download_id)
|
||||
if not download:
|
||||
raise NotFoundError("Download not found")
|
||||
|
||||
if download.status not in ['pending', 'downloading']:
|
||||
raise ValidationError(f"Cannot pause download with status '{download.status}'")
|
||||
|
||||
success = download_manager.pause_download(download_id)
|
||||
if not success:
|
||||
raise APIException("Failed to pause download", 500)
|
||||
|
||||
# Get updated download
|
||||
updated_download = download_manager.get_download_by_id(download_id)
|
||||
download_data = format_download_response(updated_download.__dict__)
|
||||
|
||||
return create_success_response(
|
||||
data=download_data,
|
||||
message="Download paused successfully"
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/<int:download_id>/resume', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('download_id')
|
||||
@require_auth
|
||||
def resume_download(download_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Resume a paused download.
|
||||
|
||||
Args:
|
||||
download_id: Unique identifier for the download
|
||||
|
||||
Returns:
|
||||
Updated download status
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
download = download_manager.get_download_by_id(download_id)
|
||||
if not download:
|
||||
raise NotFoundError("Download not found")
|
||||
|
||||
if download.status != 'paused':
|
||||
raise ValidationError(f"Cannot resume download with status '{download.status}'")
|
||||
|
||||
success = download_manager.resume_download(download_id)
|
||||
if not success:
|
||||
raise APIException("Failed to resume download", 500)
|
||||
|
||||
# Get updated download
|
||||
updated_download = download_manager.get_download_by_id(download_id)
|
||||
download_data = format_download_response(updated_download.__dict__)
|
||||
|
||||
return create_success_response(
|
||||
data=download_data,
|
||||
message="Download resumed successfully"
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/<int:download_id>/cancel', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('download_id')
|
||||
@require_auth
|
||||
def cancel_download(download_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Cancel a download.
|
||||
|
||||
Args:
|
||||
download_id: Unique identifier for the download
|
||||
|
||||
Query Parameters:
|
||||
- delete_partial: Set to 'true' to delete partially downloaded files
|
||||
|
||||
Returns:
|
||||
Cancellation confirmation
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
download = download_manager.get_download_by_id(download_id)
|
||||
if not download:
|
||||
raise NotFoundError("Download not found")
|
||||
|
||||
if download.status in ['completed', 'cancelled']:
|
||||
raise ValidationError(f"Cannot cancel download with status '{download.status}'")
|
||||
|
||||
delete_partial = request.args.get('delete_partial', 'false').lower() == 'true'
|
||||
|
||||
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
|
||||
if not success:
|
||||
raise APIException("Failed to cancel download", 500)
|
||||
|
||||
message = "Download cancelled successfully"
|
||||
if delete_partial:
|
||||
message += " (partial files deleted)"
|
||||
|
||||
return create_success_response(message=message)
|
||||
|
||||
|
||||
@downloads_bp.route('/<int:download_id>/retry', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('download_id')
|
||||
@require_auth
|
||||
def retry_download(download_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Retry a failed download.
|
||||
|
||||
Args:
|
||||
download_id: Unique identifier for the download
|
||||
|
||||
Returns:
|
||||
Updated download status
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
download = download_manager.get_download_by_id(download_id)
|
||||
if not download:
|
||||
raise NotFoundError("Download not found")
|
||||
|
||||
if download.status != 'failed':
|
||||
raise ValidationError(f"Cannot retry download with status '{download.status}'")
|
||||
|
||||
success = download_manager.retry_download(download_id)
|
||||
if not success:
|
||||
raise APIException("Failed to retry download", 500)
|
||||
|
||||
# Get updated download
|
||||
updated_download = download_manager.get_download_by_id(download_id)
|
||||
download_data = format_download_response(updated_download.__dict__)
|
||||
|
||||
return create_success_response(
|
||||
data=download_data,
|
||||
message="Download queued for retry"
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/bulk', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['action', 'download_ids'],
|
||||
optional_fields=['delete_partial'],
|
||||
field_types={
|
||||
'action': str,
|
||||
'download_ids': list,
|
||||
'delete_partial': bool
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def bulk_download_operation() -> Dict[str, Any]:
|
||||
"""
|
||||
Perform bulk operations on multiple downloads.
|
||||
|
||||
Required Fields:
|
||||
- action: Operation to perform (pause, resume, cancel, retry)
|
||||
- download_ids: List of download IDs to operate on
|
||||
|
||||
Optional Fields:
|
||||
- delete_partial: For cancel action, whether to delete partial files
|
||||
|
||||
Returns:
|
||||
Results of the bulk operation
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
action = data['action']
|
||||
download_ids = data['download_ids']
|
||||
delete_partial = data.get('delete_partial', False)
|
||||
|
||||
# Validate action
|
||||
valid_actions = ['pause', 'resume', 'cancel', 'retry']
|
||||
if action not in valid_actions:
|
||||
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
|
||||
|
||||
# Validate download_ids
|
||||
if not isinstance(download_ids, list) or not download_ids:
|
||||
raise ValidationError("download_ids must be a non-empty list")
|
||||
|
||||
if len(download_ids) > 50:
|
||||
raise ValidationError("Cannot operate on more than 50 downloads at once")
|
||||
|
||||
# Validate download IDs are integers
|
||||
try:
|
||||
download_ids = [int(did) for did in download_ids]
|
||||
except ValueError:
|
||||
raise ValidationError("All download_ids must be valid integers")
|
||||
|
||||
# Perform bulk operation
|
||||
successful_items = []
|
||||
failed_items = []
|
||||
|
||||
for download_id in download_ids:
|
||||
try:
|
||||
if action == 'pause':
|
||||
success = download_manager.pause_download(download_id)
|
||||
elif action == 'resume':
|
||||
success = download_manager.resume_download(download_id)
|
||||
elif action == 'cancel':
|
||||
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
|
||||
elif action == 'retry':
|
||||
success = download_manager.retry_download(download_id)
|
||||
|
||||
if success:
|
||||
successful_items.append({'download_id': download_id, 'action': action})
|
||||
else:
|
||||
failed_items.append({'download_id': download_id, 'error': 'Operation failed'})
|
||||
|
||||
except Exception as e:
|
||||
failed_items.append({'download_id': download_id, 'error': str(e)})
|
||||
|
||||
return create_batch_response(
|
||||
successful_items=successful_items,
|
||||
failed_items=failed_items,
|
||||
message=f"Bulk {action} operation completed"
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/queue', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_download_queue() -> Dict[str, Any]:
|
||||
"""
|
||||
Get current download queue status.
|
||||
|
||||
Returns:
|
||||
Download queue information including active downloads and queue statistics
|
||||
"""
|
||||
if not download_queue:
|
||||
raise APIException("Download queue not available", 503)
|
||||
|
||||
queue_info = download_queue.get_queue_status()
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'queue_size': queue_info.get('queue_size', 0),
|
||||
'active_downloads': queue_info.get('active_downloads', 0),
|
||||
'max_concurrent': queue_info.get('max_concurrent', 0),
|
||||
'paused_downloads': queue_info.get('paused_downloads', 0),
|
||||
'failed_downloads': queue_info.get('failed_downloads', 0),
|
||||
'completed_today': queue_info.get('completed_today', 0),
|
||||
'queue_items': queue_info.get('queue_items', [])
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/queue/pause', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def pause_download_queue() -> Dict[str, Any]:
|
||||
"""
|
||||
Pause the entire download queue.
|
||||
|
||||
Returns:
|
||||
Queue pause confirmation
|
||||
"""
|
||||
if not download_queue:
|
||||
raise APIException("Download queue not available", 503)
|
||||
|
||||
success = download_queue.pause_queue()
|
||||
if not success:
|
||||
raise APIException("Failed to pause download queue", 500)
|
||||
|
||||
return create_success_response(message="Download queue paused")
|
||||
|
||||
|
||||
@downloads_bp.route('/queue/resume', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def resume_download_queue() -> Dict[str, Any]:
|
||||
"""
|
||||
Resume the download queue.
|
||||
|
||||
Returns:
|
||||
Queue resume confirmation
|
||||
"""
|
||||
if not download_queue:
|
||||
raise APIException("Download queue not available", 503)
|
||||
|
||||
success = download_queue.resume_queue()
|
||||
if not success:
|
||||
raise APIException("Failed to resume download queue", 500)
|
||||
|
||||
return create_success_response(message="Download queue resumed")
|
||||
|
||||
|
||||
@downloads_bp.route('/queue/clear', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def clear_download_queue() -> Dict[str, Any]:
|
||||
"""
|
||||
Clear completed and failed downloads from the queue.
|
||||
|
||||
Query Parameters:
|
||||
- include_failed: Set to 'true' to also clear failed downloads
|
||||
|
||||
Returns:
|
||||
Queue clear confirmation
|
||||
"""
|
||||
if not download_queue:
|
||||
raise APIException("Download queue not available", 503)
|
||||
|
||||
include_failed = request.args.get('include_failed', 'false').lower() == 'true'
|
||||
|
||||
cleared_count = download_queue.clear_completed(include_failed=include_failed)
|
||||
|
||||
message = f"Cleared {cleared_count} completed downloads"
|
||||
if include_failed:
|
||||
message += " and failed downloads"
|
||||
|
||||
return create_success_response(
|
||||
data={'cleared_count': cleared_count},
|
||||
message=message
|
||||
)
|
||||
|
||||
|
||||
@downloads_bp.route('/history', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def get_download_history() -> Dict[str, Any]:
|
||||
"""
|
||||
Get download history with optional filtering.
|
||||
|
||||
Query Parameters:
|
||||
- status: Filter by status (completed, failed)
|
||||
- anime_id: Filter by anime ID
|
||||
- date_from: Filter from date (ISO format)
|
||||
- date_to: Filter to date (ISO format)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated download history
|
||||
"""
|
||||
if not download_manager:
|
||||
raise APIException("Download manager not available", 503)
|
||||
|
||||
# Extract filters
|
||||
status_filter = request.args.get('status')
|
||||
anime_id = request.args.get('anime_id')
|
||||
date_from = request.args.get('date_from')
|
||||
date_to = request.args.get('date_to')
|
||||
|
||||
# Validate filters
|
||||
if status_filter and status_filter not in ['completed', 'failed']:
|
||||
raise ValidationError("Status filter must be 'completed' or 'failed'")
|
||||
|
||||
if anime_id:
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
raise ValidationError("anime_id must be a valid integer")
|
||||
|
||||
# Validate dates
|
||||
if date_from:
|
||||
try:
|
||||
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
|
||||
except ValueError:
|
||||
raise ValidationError("date_from must be in ISO format")
|
||||
|
||||
if date_to:
|
||||
try:
|
||||
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
|
||||
except ValueError:
|
||||
raise ValidationError("date_to must be in ISO format")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Get download history
|
||||
history = download_manager.get_download_history(
|
||||
status_filter=status_filter,
|
||||
anime_id=anime_id,
|
||||
date_from=date_from,
|
||||
date_to=date_to
|
||||
)
|
||||
|
||||
# Format history data
|
||||
formatted_history = [format_download_response(download.__dict__) for download in history]
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_history)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_history = formatted_history[start_idx:end_idx]
|
||||
|
||||
return create_paginated_response(
|
||||
data=paginated_history,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='downloads.get_download_history'
|
||||
)
|
||||
@ -1,584 +0,0 @@
|
||||
"""
|
||||
Episode Management API Endpoints
|
||||
|
||||
This module provides REST API endpoints for episode CRUD operations,
|
||||
including episode status management and metadata operations.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request
|
||||
from typing import Dict, List, Any, Optional
|
||||
import uuid
|
||||
|
||||
from ...shared.auth_decorators import require_auth, optional_auth
|
||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
||||
from ...shared.response_helpers import (
|
||||
create_success_response, create_paginated_response, format_episode_response,
|
||||
extract_pagination_params, create_batch_response
|
||||
)
|
||||
|
||||
# Import database components (these imports would need to be adjusted based on actual structure)
|
||||
try:
|
||||
from database_manager import episode_repository, anime_repository, EpisodeMetadata
|
||||
except ImportError:
|
||||
# Fallback for development/testing
|
||||
episode_repository = None
|
||||
anime_repository = None
|
||||
EpisodeMetadata = None
|
||||
|
||||
|
||||
# Blueprint for episode management endpoints
|
||||
episodes_bp = Blueprint('episodes', __name__, url_prefix='/api/v1/episodes')
|
||||
|
||||
|
||||
@episodes_bp.route('', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def list_episodes() -> Dict[str, Any]:
|
||||
"""
|
||||
Get all episodes with optional filtering and pagination.
|
||||
|
||||
Query Parameters:
|
||||
- anime_id: Filter by anime ID
|
||||
- status: Filter by episode status
|
||||
- downloaded: Filter by download status (true/false)
|
||||
- episode_number: Filter by episode number
|
||||
- search: Search in episode title
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated list of episodes
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
# Extract filters
|
||||
anime_id = request.args.get('anime_id')
|
||||
status_filter = request.args.get('status')
|
||||
downloaded_filter = request.args.get('downloaded')
|
||||
episode_number = request.args.get('episode_number')
|
||||
search_term = request.args.get('search', '').strip()
|
||||
|
||||
# Validate filters
|
||||
if anime_id:
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
raise ValidationError("anime_id must be a valid integer")
|
||||
|
||||
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
|
||||
raise ValidationError("downloaded filter must be 'true' or 'false'")
|
||||
|
||||
if episode_number:
|
||||
try:
|
||||
episode_number = int(episode_number)
|
||||
if episode_number < 1:
|
||||
raise ValidationError("episode_number must be positive")
|
||||
except ValueError:
|
||||
raise ValidationError("episode_number must be a valid integer")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Get episodes with filters
|
||||
episodes = episode_repository.get_all_episodes(
|
||||
anime_id=anime_id,
|
||||
status_filter=status_filter,
|
||||
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None,
|
||||
episode_number=episode_number,
|
||||
search_term=search_term
|
||||
)
|
||||
|
||||
# Format episode data
|
||||
formatted_episodes = [format_episode_response(episode.__dict__) for episode in episodes]
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_episodes)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_episodes = formatted_episodes[start_idx:end_idx]
|
||||
|
||||
return create_paginated_response(
|
||||
data=paginated_episodes,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='episodes.list_episodes'
|
||||
)
|
||||
|
||||
|
||||
@episodes_bp.route('/<int:episode_id>', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('episode_id')
|
||||
@optional_auth
|
||||
def get_episode(episode_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Get specific episode by ID.
|
||||
|
||||
Args:
|
||||
episode_id: Unique identifier for the episode
|
||||
|
||||
Returns:
|
||||
Episode details with download information
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
episode = episode_repository.get_episode_by_id(episode_id)
|
||||
if not episode:
|
||||
raise NotFoundError("Episode not found")
|
||||
|
||||
# Format episode data
|
||||
episode_data = format_episode_response(episode.__dict__)
|
||||
|
||||
# Add download information if available
|
||||
download_info = episode_repository.get_download_info(episode_id)
|
||||
if download_info:
|
||||
episode_data['download_info'] = download_info
|
||||
|
||||
return create_success_response(episode_data)
|
||||
|
||||
|
||||
@episodes_bp.route('', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['anime_id', 'episode_number', 'title', 'url'],
|
||||
optional_fields=['description', 'status', 'duration', 'air_date', 'custom_metadata'],
|
||||
field_types={
|
||||
'anime_id': int,
|
||||
'episode_number': int,
|
||||
'title': str,
|
||||
'url': str,
|
||||
'description': str,
|
||||
'status': str,
|
||||
'duration': int,
|
||||
'air_date': str,
|
||||
'custom_metadata': dict
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def create_episode() -> Dict[str, Any]:
|
||||
"""
|
||||
Create a new episode record.
|
||||
|
||||
Required Fields:
|
||||
- anime_id: ID of the anime this episode belongs to
|
||||
- episode_number: Episode number
|
||||
- title: Episode title
|
||||
- url: Episode URL
|
||||
|
||||
Optional Fields:
|
||||
- description: Episode description
|
||||
- status: Episode status (available, unavailable, coming_soon)
|
||||
- duration: Episode duration in minutes
|
||||
- air_date: Air date in ISO format
|
||||
- custom_metadata: Additional metadata as key-value pairs
|
||||
|
||||
Returns:
|
||||
Created episode details
|
||||
"""
|
||||
if not episode_repository or not anime_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Validate anime exists
|
||||
anime = anime_repository.get_anime_by_id(data['anime_id'])
|
||||
if not anime:
|
||||
raise ValidationError("Anime not found")
|
||||
|
||||
# Validate status if provided
|
||||
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
|
||||
if 'status' in data and data['status'] not in valid_statuses:
|
||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
||||
|
||||
# Check if episode already exists for this anime
|
||||
existing_episode = episode_repository.get_episode_by_anime_and_number(
|
||||
data['anime_id'], data['episode_number']
|
||||
)
|
||||
if existing_episode:
|
||||
raise ValidationError(f"Episode {data['episode_number']} already exists for this anime")
|
||||
|
||||
# Validate episode number
|
||||
if data['episode_number'] < 1:
|
||||
raise ValidationError("Episode number must be positive")
|
||||
|
||||
# Create episode metadata object
|
||||
try:
|
||||
episode = EpisodeMetadata(
|
||||
episode_id=str(uuid.uuid4()),
|
||||
anime_id=data['anime_id'],
|
||||
episode_number=data['episode_number'],
|
||||
title=data['title'],
|
||||
url=data['url'],
|
||||
description=data.get('description'),
|
||||
status=data.get('status', 'available'),
|
||||
duration=data.get('duration'),
|
||||
air_date=data.get('air_date'),
|
||||
custom_metadata=data.get('custom_metadata', {})
|
||||
)
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Invalid episode data: {str(e)}")
|
||||
|
||||
# Save to database
|
||||
success = episode_repository.create_episode(episode)
|
||||
if not success:
|
||||
raise APIException("Failed to create episode", 500)
|
||||
|
||||
# Return created episode
|
||||
episode_data = format_episode_response(episode.__dict__)
|
||||
return create_success_response(
|
||||
data=episode_data,
|
||||
message="Episode created successfully",
|
||||
status_code=201
|
||||
)
|
||||
|
||||
|
||||
@episodes_bp.route('/<int:episode_id>', methods=['PUT'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('episode_id')
|
||||
@validate_json_input(
|
||||
optional_fields=['title', 'url', 'description', 'status', 'duration', 'air_date', 'custom_metadata'],
|
||||
field_types={
|
||||
'title': str,
|
||||
'url': str,
|
||||
'description': str,
|
||||
'status': str,
|
||||
'duration': int,
|
||||
'air_date': str,
|
||||
'custom_metadata': dict
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def update_episode(episode_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Update an existing episode record.
|
||||
|
||||
Args:
|
||||
episode_id: Unique identifier for the episode
|
||||
|
||||
Optional Fields:
|
||||
- title: Episode title
|
||||
- url: Episode URL
|
||||
- description: Episode description
|
||||
- status: Episode status (available, unavailable, coming_soon, downloaded)
|
||||
- duration: Episode duration in minutes
|
||||
- air_date: Air date in ISO format
|
||||
- custom_metadata: Additional metadata as key-value pairs
|
||||
|
||||
Returns:
|
||||
Updated episode details
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Get existing episode
|
||||
existing_episode = episode_repository.get_episode_by_id(episode_id)
|
||||
if not existing_episode:
|
||||
raise NotFoundError("Episode not found")
|
||||
|
||||
# Validate status if provided
|
||||
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
|
||||
if 'status' in data and data['status'] not in valid_statuses:
|
||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
||||
|
||||
# Update fields
|
||||
update_fields = {}
|
||||
for field in ['title', 'url', 'description', 'status', 'duration', 'air_date']:
|
||||
if field in data:
|
||||
update_fields[field] = data[field]
|
||||
|
||||
# Handle custom metadata update (merge instead of replace)
|
||||
if 'custom_metadata' in data:
|
||||
existing_metadata = existing_episode.custom_metadata or {}
|
||||
existing_metadata.update(data['custom_metadata'])
|
||||
update_fields['custom_metadata'] = existing_metadata
|
||||
|
||||
# Perform update
|
||||
success = episode_repository.update_episode(episode_id, update_fields)
|
||||
if not success:
|
||||
raise APIException("Failed to update episode", 500)
|
||||
|
||||
# Get updated episode
|
||||
updated_episode = episode_repository.get_episode_by_id(episode_id)
|
||||
episode_data = format_episode_response(updated_episode.__dict__)
|
||||
|
||||
return create_success_response(
|
||||
data=episode_data,
|
||||
message="Episode updated successfully"
|
||||
)
|
||||
|
||||
|
||||
@episodes_bp.route('/<int:episode_id>', methods=['DELETE'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('episode_id')
|
||||
@require_auth
|
||||
def delete_episode(episode_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Delete an episode record.
|
||||
|
||||
Args:
|
||||
episode_id: Unique identifier for the episode
|
||||
|
||||
Query Parameters:
|
||||
- delete_file: Set to 'true' to also delete the downloaded file
|
||||
|
||||
Returns:
|
||||
Deletion confirmation
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
# Check if episode exists
|
||||
existing_episode = episode_repository.get_episode_by_id(episode_id)
|
||||
if not existing_episode:
|
||||
raise NotFoundError("Episode not found")
|
||||
|
||||
# Check if we should also delete the file
|
||||
delete_file = request.args.get('delete_file', 'false').lower() == 'true'
|
||||
|
||||
# Perform deletion
|
||||
success = episode_repository.delete_episode(episode_id, delete_file=delete_file)
|
||||
if not success:
|
||||
raise APIException("Failed to delete episode", 500)
|
||||
|
||||
message = f"Episode {existing_episode.episode_number} deleted successfully"
|
||||
if delete_file:
|
||||
message += " (including downloaded file)"
|
||||
|
||||
return create_success_response(message=message)
|
||||
|
||||
|
||||
@episodes_bp.route('/bulk/status', methods=['PUT'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['episode_ids', 'status'],
|
||||
field_types={
|
||||
'episode_ids': list,
|
||||
'status': str
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def bulk_update_status() -> Dict[str, Any]:
|
||||
"""
|
||||
Update status for multiple episodes.
|
||||
|
||||
Required Fields:
|
||||
- episode_ids: List of episode IDs to update
|
||||
- status: New status for all episodes
|
||||
|
||||
Returns:
|
||||
Results of the bulk operation
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
episode_ids = data['episode_ids']
|
||||
new_status = data['status']
|
||||
|
||||
# Validate status
|
||||
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
|
||||
if new_status not in valid_statuses:
|
||||
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
|
||||
|
||||
# Validate episode_ids
|
||||
if not isinstance(episode_ids, list) or not episode_ids:
|
||||
raise ValidationError("episode_ids must be a non-empty list")
|
||||
|
||||
if len(episode_ids) > 100:
|
||||
raise ValidationError("Cannot operate on more than 100 episodes at once")
|
||||
|
||||
# Validate episode IDs are integers
|
||||
try:
|
||||
episode_ids = [int(eid) for eid in episode_ids]
|
||||
except ValueError:
|
||||
raise ValidationError("All episode_ids must be valid integers")
|
||||
|
||||
# Perform bulk update
|
||||
successful_items = []
|
||||
failed_items = []
|
||||
|
||||
for episode_id in episode_ids:
|
||||
try:
|
||||
success = episode_repository.update_episode(episode_id, {'status': new_status})
|
||||
if success:
|
||||
successful_items.append({'episode_id': episode_id, 'new_status': new_status})
|
||||
else:
|
||||
failed_items.append({'episode_id': episode_id, 'error': 'Episode not found'})
|
||||
except Exception as e:
|
||||
failed_items.append({'episode_id': episode_id, 'error': str(e)})
|
||||
|
||||
return create_batch_response(
|
||||
successful_items=successful_items,
|
||||
failed_items=failed_items,
|
||||
message=f"Bulk status update to '{new_status}' completed"
|
||||
)
|
||||
|
||||
|
||||
@episodes_bp.route('/anime/<int:anime_id>/sync', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('anime_id')
|
||||
@require_auth
|
||||
def sync_anime_episodes(anime_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Synchronize episodes for an anime by scanning the source.
|
||||
|
||||
Args:
|
||||
anime_id: Unique identifier for the anime
|
||||
|
||||
Returns:
|
||||
Synchronization results
|
||||
"""
|
||||
if not episode_repository or not anime_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
# Check if anime exists
|
||||
anime = anime_repository.get_anime_by_id(anime_id)
|
||||
if not anime:
|
||||
raise NotFoundError("Anime not found")
|
||||
|
||||
# This would trigger the episode scanning/syncing process
|
||||
try:
|
||||
sync_result = episode_repository.sync_episodes_for_anime(anime_id)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'anime_id': anime_id,
|
||||
'episodes_found': sync_result.get('episodes_found', 0),
|
||||
'episodes_added': sync_result.get('episodes_added', 0),
|
||||
'episodes_updated': sync_result.get('episodes_updated', 0),
|
||||
'episodes_removed': sync_result.get('episodes_removed', 0)
|
||||
},
|
||||
message=f"Episode sync completed for '{anime.name}'"
|
||||
)
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to sync episodes: {str(e)}", 500)
|
||||
|
||||
|
||||
@episodes_bp.route('/<int:episode_id>/download', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('episode_id')
|
||||
@require_auth
|
||||
def queue_episode_download(episode_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Queue an episode for download.
|
||||
|
||||
Args:
|
||||
episode_id: Unique identifier for the episode
|
||||
|
||||
Returns:
|
||||
Download queue confirmation
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
# Check if episode exists
|
||||
episode = episode_repository.get_episode_by_id(episode_id)
|
||||
if not episode:
|
||||
raise NotFoundError("Episode not found")
|
||||
|
||||
# Check if episode is already downloaded
|
||||
if episode.status == 'downloaded':
|
||||
raise ValidationError("Episode is already downloaded")
|
||||
|
||||
# Check if episode is available for download
|
||||
if episode.status not in ['available']:
|
||||
raise ValidationError(f"Episode status '{episode.status}' is not available for download")
|
||||
|
||||
# Queue for download (this would integrate with the download system)
|
||||
try:
|
||||
from ...download_manager import download_queue
|
||||
download_id = download_queue.add_episode_download(episode_id)
|
||||
|
||||
return create_success_response(
|
||||
data={'download_id': download_id},
|
||||
message=f"Episode {episode.episode_number} queued for download"
|
||||
)
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to queue download: {str(e)}", 500)
|
||||
|
||||
|
||||
@episodes_bp.route('/search', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def search_episodes() -> Dict[str, Any]:
|
||||
"""
|
||||
Search episodes by title or other criteria.
|
||||
|
||||
Query Parameters:
|
||||
- q: Search query (required)
|
||||
- anime_id: Limit search to specific anime
|
||||
- status: Filter by episode status
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated search results
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
search_term = request.args.get('q', '').strip()
|
||||
if not search_term:
|
||||
raise ValidationError("Search term 'q' is required")
|
||||
|
||||
if len(search_term) < 2:
|
||||
raise ValidationError("Search term must be at least 2 characters long")
|
||||
|
||||
# Get additional filters
|
||||
anime_id = request.args.get('anime_id')
|
||||
status_filter = request.args.get('status')
|
||||
|
||||
# Validate anime_id if provided
|
||||
if anime_id:
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
raise ValidationError("anime_id must be a valid integer")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Perform search
|
||||
search_results = episode_repository.search_episodes(
|
||||
search_term=search_term,
|
||||
anime_id=anime_id,
|
||||
status_filter=status_filter
|
||||
)
|
||||
|
||||
# Format results
|
||||
formatted_results = [format_episode_response(episode.__dict__) for episode in search_results]
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_results)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_results = formatted_results[start_idx:end_idx]
|
||||
|
||||
# Create response with search metadata
|
||||
response = create_paginated_response(
|
||||
data=paginated_results,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='episodes.search_episodes',
|
||||
q=search_term
|
||||
)
|
||||
|
||||
# Add search metadata
|
||||
response['search'] = {
|
||||
'query': search_term,
|
||||
'total_results': total,
|
||||
'filters': {
|
||||
'anime_id': anime_id,
|
||||
'status': status_filter
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
@ -1,436 +0,0 @@
|
||||
"""
|
||||
Health Check Endpoints
|
||||
|
||||
This module provides basic health check endpoints for monitoring
|
||||
the AniWorld application's status.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, jsonify
|
||||
import time
|
||||
import os
|
||||
import psutil
|
||||
from datetime import datetime
|
||||
|
||||
# Blueprint for health check endpoints
|
||||
health_bp = Blueprint('health_check', __name__, url_prefix='/api/health')
|
||||
|
||||
|
||||
@health_bp.route('/status')
|
||||
def get_basic_health():
|
||||
"""Get basic application health status."""
|
||||
try:
|
||||
# Basic system metrics
|
||||
memory = psutil.virtual_memory()
|
||||
disk = psutil.disk_usage('/')
|
||||
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.now().isoformat(),
|
||||
'system': {
|
||||
'memory_usage_percent': memory.percent,
|
||||
'disk_usage_percent': disk.percent,
|
||||
'uptime': time.time()
|
||||
},
|
||||
'application': {
|
||||
'status': 'running',
|
||||
'version': '1.0.0'
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': str(e),
|
||||
'timestamp': datetime.now().isoformat()
|
||||
}), 500
|
||||
|
||||
|
||||
@health_bp.route('/ping')
|
||||
def ping():
|
||||
"""Simple ping endpoint."""
|
||||
return jsonify({
|
||||
'status': 'ok',
|
||||
'timestamp': datetime.now().isoformat()
|
||||
})
|
||||
@health_bp.route('/api/health')
|
||||
def basic_health():
|
||||
"""Basic health check endpoint for load balancers."""
|
||||
return jsonify({
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'service': 'aniworld-web'
|
||||
})
|
||||
|
||||
|
||||
@health_bp.route('/api/health/system')
|
||||
def system_health():
|
||||
"""Comprehensive system health check."""
|
||||
def check_system_health():
|
||||
try:
|
||||
# System metrics
|
||||
cpu_percent = psutil.cpu_percent(interval=1)
|
||||
memory = psutil.virtual_memory()
|
||||
disk = psutil.disk_usage('/')
|
||||
|
||||
# Process metrics
|
||||
process = psutil.Process()
|
||||
process_memory = process.memory_info()
|
||||
|
||||
return {
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'system': {
|
||||
'cpu_percent': cpu_percent,
|
||||
'memory': {
|
||||
'total_mb': memory.total / 1024 / 1024,
|
||||
'available_mb': memory.available / 1024 / 1024,
|
||||
'percent': memory.percent
|
||||
},
|
||||
'disk': {
|
||||
'total_gb': disk.total / 1024 / 1024 / 1024,
|
||||
'free_gb': disk.free / 1024 / 1024 / 1024,
|
||||
'percent': (disk.used / disk.total) * 100
|
||||
}
|
||||
},
|
||||
'process': {
|
||||
'memory_mb': process_memory.rss / 1024 / 1024,
|
||||
'threads': process.num_threads(),
|
||||
'cpu_percent': process.cpu_percent()
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'status': 'unhealthy',
|
||||
'error': str(e),
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
return jsonify(get_cached_health_data('system', check_system_health))
|
||||
|
||||
|
||||
@health_bp.route('/api/health/database')
|
||||
def database_health():
|
||||
"""Database connectivity and health check."""
|
||||
def check_database_health():
|
||||
try:
|
||||
# Test database connection
|
||||
start_time = time.time()
|
||||
with database_manager.get_connection() as conn:
|
||||
cursor = conn.execute("SELECT 1")
|
||||
result = cursor.fetchone()
|
||||
connection_time = (time.time() - start_time) * 1000 # ms
|
||||
|
||||
# Get database size and basic stats
|
||||
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
|
||||
|
||||
# Check schema version
|
||||
schema_version = database_manager.get_current_version()
|
||||
|
||||
# Get table counts
|
||||
with database_manager.get_connection() as conn:
|
||||
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
|
||||
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
|
||||
|
||||
return {
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'database': {
|
||||
'connected': True,
|
||||
'connection_time_ms': connection_time,
|
||||
'size_mb': db_size / 1024 / 1024,
|
||||
'schema_version': schema_version,
|
||||
'tables': {
|
||||
'anime_count': anime_count,
|
||||
'episode_count': episode_count
|
||||
}
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'status': 'unhealthy',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'database': {
|
||||
'connected': False,
|
||||
'error': str(e)
|
||||
}
|
||||
}
|
||||
|
||||
return jsonify(get_cached_health_data('database', check_database_health, ttl=60))
|
||||
|
||||
|
||||
@health_bp.route('/api/health/dependencies')
|
||||
def dependencies_health():
|
||||
"""Check health of external dependencies."""
|
||||
def check_dependencies():
|
||||
dependencies = {
|
||||
'status': 'healthy',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'dependencies': {}
|
||||
}
|
||||
|
||||
# Check filesystem access
|
||||
try:
|
||||
anime_directory = getattr(config, 'anime_directory', '/app/data')
|
||||
if os.path.exists(anime_directory):
|
||||
# Test read/write access
|
||||
test_file = os.path.join(anime_directory, '.health_check')
|
||||
with open(test_file, 'w') as f:
|
||||
f.write('test')
|
||||
os.remove(test_file)
|
||||
dependencies['dependencies']['filesystem'] = {
|
||||
'status': 'healthy',
|
||||
'path': anime_directory,
|
||||
'accessible': True
|
||||
}
|
||||
else:
|
||||
dependencies['dependencies']['filesystem'] = {
|
||||
'status': 'unhealthy',
|
||||
'path': anime_directory,
|
||||
'accessible': False,
|
||||
'error': 'Directory does not exist'
|
||||
}
|
||||
dependencies['status'] = 'degraded'
|
||||
except Exception as e:
|
||||
dependencies['dependencies']['filesystem'] = {
|
||||
'status': 'unhealthy',
|
||||
'error': str(e)
|
||||
}
|
||||
dependencies['status'] = 'degraded'
|
||||
|
||||
# Check network connectivity (basic)
|
||||
try:
|
||||
import socket
|
||||
socket.create_connection(("8.8.8.8", 53), timeout=3)
|
||||
dependencies['dependencies']['network'] = {
|
||||
'status': 'healthy',
|
||||
'connectivity': True
|
||||
}
|
||||
except Exception as e:
|
||||
dependencies['dependencies']['network'] = {
|
||||
'status': 'unhealthy',
|
||||
'connectivity': False,
|
||||
'error': str(e)
|
||||
}
|
||||
dependencies['status'] = 'degraded'
|
||||
|
||||
return dependencies
|
||||
|
||||
return jsonify(get_cached_health_data('dependencies', check_dependencies, ttl=120))
|
||||
|
||||
|
||||
@health_bp.route('/api/health/performance')
|
||||
def performance_health():
|
||||
"""Performance metrics and health indicators."""
|
||||
def check_performance():
|
||||
try:
|
||||
# Memory usage
|
||||
memory_usage = memory_monitor.get_current_memory_usage() if memory_monitor else 0
|
||||
is_memory_high = memory_monitor.is_memory_usage_high() if memory_monitor else False
|
||||
|
||||
# Thread count
|
||||
process = psutil.Process()
|
||||
thread_count = process.num_threads()
|
||||
|
||||
# Load average (if available)
|
||||
load_avg = None
|
||||
try:
|
||||
load_avg = os.getloadavg()
|
||||
except (AttributeError, OSError):
|
||||
# Not available on all platforms
|
||||
pass
|
||||
|
||||
# Check if performance is within acceptable limits
|
||||
performance_status = 'healthy'
|
||||
warnings = []
|
||||
|
||||
if is_memory_high:
|
||||
performance_status = 'degraded'
|
||||
warnings.append('High memory usage detected')
|
||||
|
||||
if thread_count > 100: # Arbitrary threshold
|
||||
performance_status = 'degraded'
|
||||
warnings.append(f'High thread count: {thread_count}')
|
||||
|
||||
if load_avg and load_avg[0] > 4: # Load average > 4
|
||||
performance_status = 'degraded'
|
||||
warnings.append(f'High system load: {load_avg[0]:.2f}')
|
||||
|
||||
return {
|
||||
'status': performance_status,
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'performance': {
|
||||
'memory_usage_mb': memory_usage,
|
||||
'memory_high': is_memory_high,
|
||||
'thread_count': thread_count,
|
||||
'load_average': load_avg,
|
||||
'warnings': warnings
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'status': 'error',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
return jsonify(get_cached_health_data('performance', check_performance, ttl=10))
|
||||
|
||||
|
||||
@health_bp.route('/api/health/detailed')
|
||||
def detailed_health():
|
||||
"""Comprehensive health check combining all metrics."""
|
||||
def check_detailed_health():
|
||||
try:
|
||||
# Get all health checks
|
||||
system = get_cached_health_data('system', lambda: system_health().json)
|
||||
database = get_cached_health_data('database', lambda: database_health().json)
|
||||
dependencies = get_cached_health_data('dependencies', lambda: dependencies_health().json)
|
||||
performance = get_cached_health_data('performance', lambda: performance_health().json)
|
||||
|
||||
# Determine overall status
|
||||
statuses = [
|
||||
system.get('status', 'unknown'),
|
||||
database.get('status', 'unknown'),
|
||||
dependencies.get('status', 'unknown'),
|
||||
performance.get('status', 'unknown')
|
||||
]
|
||||
|
||||
if 'unhealthy' in statuses or 'error' in statuses:
|
||||
overall_status = 'unhealthy'
|
||||
elif 'degraded' in statuses:
|
||||
overall_status = 'degraded'
|
||||
else:
|
||||
overall_status = 'healthy'
|
||||
|
||||
return {
|
||||
'status': overall_status,
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'components': {
|
||||
'system': system,
|
||||
'database': database,
|
||||
'dependencies': dependencies,
|
||||
'performance': performance
|
||||
}
|
||||
}
|
||||
except Exception as e:
|
||||
return {
|
||||
'status': 'error',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'error': str(e)
|
||||
}
|
||||
|
||||
# Don't cache detailed health - always get fresh data
|
||||
return jsonify(check_detailed_health())
|
||||
|
||||
|
||||
@health_bp.route('/api/health/ready')
|
||||
def readiness_probe():
|
||||
"""Kubernetes readiness probe endpoint."""
|
||||
try:
|
||||
# Check critical dependencies
|
||||
with database_manager.get_connection() as conn:
|
||||
conn.execute("SELECT 1")
|
||||
|
||||
# Check if anime directory is accessible
|
||||
anime_directory = getattr(config, 'anime_directory', '/app/data')
|
||||
if not os.path.exists(anime_directory):
|
||||
raise Exception(f"Anime directory not accessible: {anime_directory}")
|
||||
|
||||
return jsonify({
|
||||
'status': 'ready',
|
||||
'timestamp': datetime.utcnow().isoformat()
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'not_ready',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'error': str(e)
|
||||
}), 503
|
||||
|
||||
|
||||
@health_bp.route('/api/health/live')
|
||||
def liveness_probe():
|
||||
"""Kubernetes liveness probe endpoint."""
|
||||
try:
|
||||
# Basic liveness check - just verify the application is responding
|
||||
return jsonify({
|
||||
'status': 'alive',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'uptime_seconds': time.time() - psutil.Process().create_time()
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'dead',
|
||||
'timestamp': datetime.utcnow().isoformat(),
|
||||
'error': str(e)
|
||||
}), 503
|
||||
|
||||
|
||||
@health_bp.route('/api/health/metrics')
|
||||
def prometheus_metrics():
|
||||
"""Prometheus-compatible metrics endpoint."""
|
||||
try:
|
||||
# Generate Prometheus-format metrics
|
||||
metrics = []
|
||||
|
||||
# System metrics
|
||||
cpu_percent = psutil.cpu_percent()
|
||||
memory = psutil.virtual_memory()
|
||||
disk = psutil.disk_usage('/')
|
||||
|
||||
metrics.extend([
|
||||
f"# HELP aniworld_cpu_usage_percent CPU usage percentage",
|
||||
f"# TYPE aniworld_cpu_usage_percent gauge",
|
||||
f"aniworld_cpu_usage_percent {cpu_percent}",
|
||||
f"",
|
||||
f"# HELP aniworld_memory_usage_percent Memory usage percentage",
|
||||
f"# TYPE aniworld_memory_usage_percent gauge",
|
||||
f"aniworld_memory_usage_percent {memory.percent}",
|
||||
f"",
|
||||
f"# HELP aniworld_disk_usage_percent Disk usage percentage",
|
||||
f"# TYPE aniworld_disk_usage_percent gauge",
|
||||
f"aniworld_disk_usage_percent {(disk.used / disk.total) * 100}",
|
||||
f"",
|
||||
])
|
||||
|
||||
# Database metrics
|
||||
try:
|
||||
with database_manager.get_connection() as conn:
|
||||
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
|
||||
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
|
||||
|
||||
metrics.extend([
|
||||
f"# HELP aniworld_anime_total Total number of anime in database",
|
||||
f"# TYPE aniworld_anime_total counter",
|
||||
f"aniworld_anime_total {anime_count}",
|
||||
f"",
|
||||
f"# HELP aniworld_episodes_total Total number of episodes in database",
|
||||
f"# TYPE aniworld_episodes_total counter",
|
||||
f"aniworld_episodes_total {episode_count}",
|
||||
f"",
|
||||
])
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
# Process metrics
|
||||
process = psutil.Process()
|
||||
metrics.extend([
|
||||
f"# HELP aniworld_process_threads Number of threads in process",
|
||||
f"# TYPE aniworld_process_threads gauge",
|
||||
f"aniworld_process_threads {process.num_threads()}",
|
||||
f"",
|
||||
f"# HELP aniworld_process_memory_bytes Memory usage in bytes",
|
||||
f"# TYPE aniworld_process_memory_bytes gauge",
|
||||
f"aniworld_process_memory_bytes {process.memory_info().rss}",
|
||||
f"",
|
||||
])
|
||||
|
||||
return "\n".join(metrics), 200, {'Content-Type': 'text/plain; charset=utf-8'}
|
||||
|
||||
except Exception as e:
|
||||
return f"# Error generating metrics: {e}", 500, {'Content-Type': 'text/plain'}
|
||||
|
||||
|
||||
# Export the blueprint
|
||||
__all__ = ['health_bp']
|
||||
@ -1,701 +0,0 @@
|
||||
"""
|
||||
Integrations API endpoints.
|
||||
|
||||
This module handles all external integration operations including:
|
||||
- API key management
|
||||
- Webhook configuration
|
||||
- External service integrations
|
||||
- Third-party API management
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import logging
|
||||
import requests
|
||||
import json
|
||||
import hmac
|
||||
import hashlib
|
||||
import time
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Import shared utilities
|
||||
try:
|
||||
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
|
||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
||||
from src.server.web.controllers.shared.validators import (
|
||||
validate_json_input, validate_query_params, validate_pagination_params,
|
||||
validate_id_parameter, is_valid_url
|
||||
)
|
||||
from src.server.web.controllers.shared.response_helpers import (
|
||||
create_success_response, create_error_response, create_paginated_response
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback imports for development
|
||||
def require_auth(f): return f
|
||||
def optional_auth(f): return f
|
||||
def handle_api_errors(f): return f
|
||||
def validate_json_input(**kwargs): return lambda f: f
|
||||
def validate_query_params(**kwargs): return lambda f: f
|
||||
def validate_pagination_params(f): return f
|
||||
def validate_id_parameter(param): return lambda f: f
|
||||
def is_valid_url(url): return url.startswith(('http://', 'https://'))
|
||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
||||
def create_paginated_response(items, page, per_page, total, endpoint=None): return jsonify({'data': items, 'pagination': {'page': page, 'per_page': per_page, 'total': total}}), 200
|
||||
|
||||
# Import integration components
|
||||
try:
|
||||
from src.server.data.integration_manager import IntegrationManager
|
||||
from src.server.data.webhook_manager import WebhookManager
|
||||
from src.data.api_key_manager import APIKeyManager
|
||||
except ImportError:
|
||||
# Fallback for development
|
||||
class IntegrationManager:
|
||||
def get_all_integrations(self, **kwargs): return []
|
||||
def get_integrations_count(self, **kwargs): return 0
|
||||
def get_integration_by_id(self, id): return None
|
||||
def create_integration(self, **kwargs): return 1
|
||||
def update_integration(self, id, **kwargs): return True
|
||||
def delete_integration(self, id): return True
|
||||
def test_integration(self, id): return {'success': True, 'response_time': 0.1}
|
||||
def get_integration_logs(self, id, **kwargs): return []
|
||||
def trigger_integration(self, id, data): return {'success': True}
|
||||
|
||||
class WebhookManager:
|
||||
def get_all_webhooks(self, **kwargs): return []
|
||||
def get_webhooks_count(self, **kwargs): return 0
|
||||
def get_webhook_by_id(self, id): return None
|
||||
def create_webhook(self, **kwargs): return 1
|
||||
def update_webhook(self, id, **kwargs): return True
|
||||
def delete_webhook(self, id): return True
|
||||
def test_webhook(self, id): return {'success': True, 'response_time': 0.1}
|
||||
def get_webhook_deliveries(self, id, **kwargs): return []
|
||||
def redeliver_webhook(self, delivery_id): return True
|
||||
def trigger_webhook(self, event, data): return True
|
||||
|
||||
class APIKeyManager:
|
||||
def get_external_api_keys(self, **kwargs): return []
|
||||
def get_external_api_key_by_id(self, id): return None
|
||||
def create_external_api_key(self, **kwargs): return 1
|
||||
def update_external_api_key(self, id, **kwargs): return True
|
||||
def delete_external_api_key(self, id): return True
|
||||
def test_external_api_key(self, id): return {'success': True}
|
||||
def rotate_external_api_key(self, id): return {'new_key': 'new_api_key'}
|
||||
|
||||
# Create blueprint
|
||||
integrations_bp = Blueprint('integrations', __name__)
|
||||
|
||||
# Initialize managers
|
||||
integration_manager = IntegrationManager()
|
||||
webhook_manager = WebhookManager()
|
||||
api_key_manager = APIKeyManager()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['page', 'per_page', 'type', 'status', 'sort_by', 'sort_order'],
|
||||
param_types={'page': int, 'per_page': int}
|
||||
)
|
||||
@validate_pagination_params
|
||||
def list_integrations() -> Tuple[Any, int]:
|
||||
"""
|
||||
List integrations with pagination and filtering.
|
||||
|
||||
Query Parameters:
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 20, max: 100)
|
||||
- type: Filter by integration type
|
||||
- status: Filter by integration status
|
||||
- sort_by: Sort field (default: created_at)
|
||||
- sort_order: Sort order (asc/desc, default: desc)
|
||||
|
||||
Returns:
|
||||
JSON response with paginated integration list
|
||||
"""
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = min(request.args.get('per_page', 20, type=int), 100)
|
||||
integration_type = request.args.get('type')
|
||||
status = request.args.get('status')
|
||||
sort_by = request.args.get('sort_by', 'created_at')
|
||||
sort_order = request.args.get('sort_order', 'desc')
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
# Get integrations
|
||||
integrations = integration_manager.get_all_integrations(
|
||||
offset=offset,
|
||||
limit=per_page,
|
||||
integration_type=integration_type,
|
||||
status=status,
|
||||
sort_by=sort_by,
|
||||
sort_order=sort_order
|
||||
)
|
||||
|
||||
# Get total count
|
||||
total = integration_manager.get_integrations_count(
|
||||
integration_type=integration_type,
|
||||
status=status
|
||||
)
|
||||
|
||||
return create_paginated_response(
|
||||
integrations,
|
||||
page,
|
||||
per_page,
|
||||
total,
|
||||
endpoint='/api/v1/integrations'
|
||||
)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations/<int:integration_id>', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('integration_id')
|
||||
def get_integration(integration_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Get specific integration by ID.
|
||||
|
||||
Args:
|
||||
integration_id: Integration ID
|
||||
|
||||
Returns:
|
||||
JSON response with integration data
|
||||
"""
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
if not integration:
|
||||
return create_error_response("Integration not found", 404)
|
||||
|
||||
return create_success_response("Integration retrieved successfully", 200, integration)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['name', 'type', 'config'],
|
||||
optional_fields=['description', 'enabled'],
|
||||
field_types={'name': str, 'type': str, 'config': dict, 'description': str, 'enabled': bool}
|
||||
)
|
||||
def create_integration() -> Tuple[Any, int]:
|
||||
"""
|
||||
Create a new integration.
|
||||
|
||||
Request Body:
|
||||
- name: Integration name (required)
|
||||
- type: Integration type (required)
|
||||
- config: Integration configuration (required)
|
||||
- description: Integration description (optional)
|
||||
- enabled: Whether integration is enabled (optional, default: true)
|
||||
|
||||
Returns:
|
||||
JSON response with created integration data
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
# Validate integration type
|
||||
allowed_types = ['webhook', 'api', 'discord', 'slack', 'email', 'custom']
|
||||
if data['type'] not in allowed_types:
|
||||
return create_error_response(f"Invalid integration type. Must be one of: {', '.join(allowed_types)}", 400)
|
||||
|
||||
# Validate configuration based on type
|
||||
config_errors = _validate_integration_config(data['type'], data['config'])
|
||||
if config_errors:
|
||||
return create_error_response("Configuration validation failed", 400, config_errors)
|
||||
|
||||
try:
|
||||
# Create integration
|
||||
integration_id = integration_manager.create_integration(
|
||||
name=data['name'],
|
||||
integration_type=data['type'],
|
||||
config=data['config'],
|
||||
description=data.get('description', ''),
|
||||
enabled=data.get('enabled', True)
|
||||
)
|
||||
|
||||
# Get created integration
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
logger.info(f"Created integration {integration_id}: {data['name']} ({data['type']})")
|
||||
return create_success_response("Integration created successfully", 201, integration)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating integration: {str(e)}")
|
||||
return create_error_response("Failed to create integration", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations/<int:integration_id>', methods=['PUT'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('integration_id')
|
||||
@validate_json_input(
|
||||
optional_fields=['name', 'config', 'description', 'enabled'],
|
||||
field_types={'name': str, 'config': dict, 'description': str, 'enabled': bool}
|
||||
)
|
||||
def update_integration(integration_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Update an integration.
|
||||
|
||||
Args:
|
||||
integration_id: Integration ID
|
||||
|
||||
Request Body:
|
||||
- name: Integration name (optional)
|
||||
- config: Integration configuration (optional)
|
||||
- description: Integration description (optional)
|
||||
- enabled: Whether integration is enabled (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with update result
|
||||
"""
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
if not integration:
|
||||
return create_error_response("Integration not found", 404)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Validate configuration if provided
|
||||
if 'config' in data:
|
||||
config_errors = _validate_integration_config(integration['type'], data['config'])
|
||||
if config_errors:
|
||||
return create_error_response("Configuration validation failed", 400, config_errors)
|
||||
|
||||
try:
|
||||
# Update integration
|
||||
success = integration_manager.update_integration(integration_id, **data)
|
||||
|
||||
if success:
|
||||
# Get updated integration
|
||||
updated_integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
logger.info(f"Updated integration {integration_id}")
|
||||
return create_success_response("Integration updated successfully", 200, updated_integration)
|
||||
else:
|
||||
return create_error_response("Failed to update integration", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating integration {integration_id}: {str(e)}")
|
||||
return create_error_response("Failed to update integration", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations/<int:integration_id>', methods=['DELETE'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('integration_id')
|
||||
def delete_integration(integration_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Delete an integration.
|
||||
|
||||
Args:
|
||||
integration_id: Integration ID
|
||||
|
||||
Returns:
|
||||
JSON response with deletion result
|
||||
"""
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
if not integration:
|
||||
return create_error_response("Integration not found", 404)
|
||||
|
||||
try:
|
||||
success = integration_manager.delete_integration(integration_id)
|
||||
|
||||
if success:
|
||||
logger.info(f"Deleted integration {integration_id}: {integration['name']}")
|
||||
return create_success_response("Integration deleted successfully")
|
||||
else:
|
||||
return create_error_response("Failed to delete integration", 500)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting integration {integration_id}: {str(e)}")
|
||||
return create_error_response("Failed to delete integration", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations/<int:integration_id>/test', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('integration_id')
|
||||
def test_integration(integration_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Test an integration.
|
||||
|
||||
Args:
|
||||
integration_id: Integration ID
|
||||
|
||||
Returns:
|
||||
JSON response with test result
|
||||
"""
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
if not integration:
|
||||
return create_error_response("Integration not found", 404)
|
||||
|
||||
try:
|
||||
test_result = integration_manager.test_integration(integration_id)
|
||||
|
||||
logger.info(f"Tested integration {integration_id}: {test_result}")
|
||||
return create_success_response("Integration test completed", 200, test_result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error testing integration {integration_id}: {str(e)}")
|
||||
return create_error_response("Failed to test integration", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations/<int:integration_id>/trigger', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('integration_id')
|
||||
@validate_json_input(
|
||||
optional_fields=['data'],
|
||||
field_types={'data': dict}
|
||||
)
|
||||
def trigger_integration(integration_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Manually trigger an integration.
|
||||
|
||||
Args:
|
||||
integration_id: Integration ID
|
||||
|
||||
Request Body:
|
||||
- data: Custom data to send with trigger (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with trigger result
|
||||
"""
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
if not integration:
|
||||
return create_error_response("Integration not found", 404)
|
||||
|
||||
if not integration['enabled']:
|
||||
return create_error_response("Integration is disabled", 400)
|
||||
|
||||
data = request.get_json() or {}
|
||||
trigger_data = data.get('data', {})
|
||||
|
||||
try:
|
||||
result = integration_manager.trigger_integration(integration_id, trigger_data)
|
||||
|
||||
logger.info(f"Triggered integration {integration_id}")
|
||||
return create_success_response("Integration triggered successfully", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering integration {integration_id}: {str(e)}")
|
||||
return create_error_response("Failed to trigger integration", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/integrations/<int:integration_id>/logs', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('integration_id')
|
||||
@validate_query_params(
|
||||
allowed_params=['page', 'per_page', 'level'],
|
||||
param_types={'page': int, 'per_page': int}
|
||||
)
|
||||
@validate_pagination_params
|
||||
def get_integration_logs(integration_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Get integration execution logs.
|
||||
|
||||
Args:
|
||||
integration_id: Integration ID
|
||||
|
||||
Query Parameters:
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 200)
|
||||
- level: Log level filter (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with integration logs
|
||||
"""
|
||||
integration = integration_manager.get_integration_by_id(integration_id)
|
||||
|
||||
if not integration:
|
||||
return create_error_response("Integration not found", 404)
|
||||
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = min(request.args.get('per_page', 50, type=int), 200)
|
||||
level = request.args.get('level')
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
try:
|
||||
logs = integration_manager.get_integration_logs(
|
||||
integration_id,
|
||||
offset=offset,
|
||||
limit=per_page,
|
||||
level=level
|
||||
)
|
||||
|
||||
# For pagination, we'd need a count method
|
||||
total = len(logs) # Simplified for this example
|
||||
|
||||
return create_paginated_response(
|
||||
logs,
|
||||
page,
|
||||
per_page,
|
||||
total,
|
||||
endpoint=f'/api/v1/integrations/{integration_id}/logs'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting integration logs for {integration_id}: {str(e)}")
|
||||
return create_error_response("Failed to get integration logs", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/webhooks', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['page', 'per_page', 'event', 'status'],
|
||||
param_types={'page': int, 'per_page': int}
|
||||
)
|
||||
@validate_pagination_params
|
||||
def list_webhooks() -> Tuple[Any, int]:
|
||||
"""
|
||||
List webhooks with pagination and filtering.
|
||||
|
||||
Query Parameters:
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 20, max: 100)
|
||||
- event: Filter by event type
|
||||
- status: Filter by webhook status
|
||||
|
||||
Returns:
|
||||
JSON response with paginated webhook list
|
||||
"""
|
||||
page = request.args.get('page', 1, type=int)
|
||||
per_page = min(request.args.get('per_page', 20, type=int), 100)
|
||||
event = request.args.get('event')
|
||||
status = request.args.get('status')
|
||||
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
# Get webhooks
|
||||
webhooks = webhook_manager.get_all_webhooks(
|
||||
offset=offset,
|
||||
limit=per_page,
|
||||
event=event,
|
||||
status=status
|
||||
)
|
||||
|
||||
# Get total count
|
||||
total = webhook_manager.get_webhooks_count(
|
||||
event=event,
|
||||
status=status
|
||||
)
|
||||
|
||||
return create_paginated_response(
|
||||
webhooks,
|
||||
page,
|
||||
per_page,
|
||||
total,
|
||||
endpoint='/api/v1/webhooks'
|
||||
)
|
||||
|
||||
|
||||
@integrations_bp.route('/webhooks', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['url', 'events'],
|
||||
optional_fields=['name', 'secret', 'enabled', 'retry_config'],
|
||||
field_types={'url': str, 'events': list, 'name': str, 'secret': str, 'enabled': bool, 'retry_config': dict}
|
||||
)
|
||||
def create_webhook() -> Tuple[Any, int]:
|
||||
"""
|
||||
Create a new webhook.
|
||||
|
||||
Request Body:
|
||||
- url: Webhook URL (required)
|
||||
- events: List of events to subscribe to (required)
|
||||
- name: Webhook name (optional)
|
||||
- secret: Webhook secret for signature verification (optional)
|
||||
- enabled: Whether webhook is enabled (optional, default: true)
|
||||
- retry_config: Retry configuration (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with created webhook data
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
# Validate URL
|
||||
if not is_valid_url(data['url']):
|
||||
return create_error_response("Invalid webhook URL", 400)
|
||||
|
||||
# Validate events
|
||||
allowed_events = [
|
||||
'anime.created', 'anime.updated', 'anime.deleted',
|
||||
'episode.created', 'episode.updated', 'episode.deleted',
|
||||
'download.started', 'download.completed', 'download.failed',
|
||||
'backup.created', 'backup.restored', 'system.error'
|
||||
]
|
||||
|
||||
invalid_events = [event for event in data['events'] if event not in allowed_events]
|
||||
if invalid_events:
|
||||
return create_error_response(f"Invalid events: {', '.join(invalid_events)}", 400)
|
||||
|
||||
try:
|
||||
# Create webhook
|
||||
webhook_id = webhook_manager.create_webhook(
|
||||
url=data['url'],
|
||||
events=data['events'],
|
||||
name=data.get('name', ''),
|
||||
secret=data.get('secret', ''),
|
||||
enabled=data.get('enabled', True),
|
||||
retry_config=data.get('retry_config', {})
|
||||
)
|
||||
|
||||
# Get created webhook
|
||||
webhook = webhook_manager.get_webhook_by_id(webhook_id)
|
||||
|
||||
logger.info(f"Created webhook {webhook_id}: {data['url']}")
|
||||
return create_success_response("Webhook created successfully", 201, webhook)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating webhook: {str(e)}")
|
||||
return create_error_response("Failed to create webhook", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/webhooks/<int:webhook_id>/test', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('webhook_id')
|
||||
def test_webhook(webhook_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Test a webhook.
|
||||
|
||||
Args:
|
||||
webhook_id: Webhook ID
|
||||
|
||||
Returns:
|
||||
JSON response with test result
|
||||
"""
|
||||
webhook = webhook_manager.get_webhook_by_id(webhook_id)
|
||||
|
||||
if not webhook:
|
||||
return create_error_response("Webhook not found", 404)
|
||||
|
||||
try:
|
||||
test_result = webhook_manager.test_webhook(webhook_id)
|
||||
|
||||
logger.info(f"Tested webhook {webhook_id}: {test_result}")
|
||||
return create_success_response("Webhook test completed", 200, test_result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error testing webhook {webhook_id}: {str(e)}")
|
||||
return create_error_response("Failed to test webhook", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/api-keys/external', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
def list_external_api_keys() -> Tuple[Any, int]:
|
||||
"""
|
||||
List external API keys.
|
||||
|
||||
Returns:
|
||||
JSON response with external API keys
|
||||
"""
|
||||
try:
|
||||
api_keys = api_key_manager.get_external_api_keys()
|
||||
|
||||
return create_success_response("External API keys retrieved successfully", 200, api_keys)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting external API keys: {str(e)}")
|
||||
return create_error_response("Failed to get external API keys", 500)
|
||||
|
||||
|
||||
@integrations_bp.route('/api-keys/external', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['service', 'key'],
|
||||
optional_fields=['name', 'description'],
|
||||
field_types={'service': str, 'key': str, 'name': str, 'description': str}
|
||||
)
|
||||
def create_external_api_key() -> Tuple[Any, int]:
|
||||
"""
|
||||
Store external API key.
|
||||
|
||||
Request Body:
|
||||
- service: Service name (required)
|
||||
- key: API key value (required)
|
||||
- name: Key name (optional)
|
||||
- description: Key description (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with created API key data
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
try:
|
||||
# Create external API key
|
||||
key_id = api_key_manager.create_external_api_key(
|
||||
service=data['service'],
|
||||
key=data['key'],
|
||||
name=data.get('name', ''),
|
||||
description=data.get('description', '')
|
||||
)
|
||||
|
||||
# Get created key (without exposing the actual key)
|
||||
api_key = api_key_manager.get_external_api_key_by_id(key_id)
|
||||
|
||||
logger.info(f"Created external API key {key_id} for service: {data['service']}")
|
||||
return create_success_response("External API key created successfully", 201, api_key)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating external API key: {str(e)}")
|
||||
return create_error_response("Failed to create external API key", 500)
|
||||
|
||||
|
||||
def _validate_integration_config(integration_type: str, config: Dict[str, Any]) -> List[str]:
|
||||
"""
|
||||
Validate integration configuration based on type.
|
||||
|
||||
Args:
|
||||
integration_type: Type of integration
|
||||
config: Configuration dictionary
|
||||
|
||||
Returns:
|
||||
List of validation errors (empty if valid)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
if integration_type == 'webhook':
|
||||
if 'url' not in config:
|
||||
errors.append("Webhook URL is required")
|
||||
elif not is_valid_url(config['url']):
|
||||
errors.append("Invalid webhook URL")
|
||||
|
||||
elif integration_type == 'discord':
|
||||
if 'webhook_url' not in config:
|
||||
errors.append("Discord webhook URL is required")
|
||||
elif not config['webhook_url'].startswith('https://discord.com/api/webhooks/'):
|
||||
errors.append("Invalid Discord webhook URL")
|
||||
|
||||
elif integration_type == 'slack':
|
||||
if 'webhook_url' not in config:
|
||||
errors.append("Slack webhook URL is required")
|
||||
elif not config['webhook_url'].startswith('https://hooks.slack.com/'):
|
||||
errors.append("Invalid Slack webhook URL")
|
||||
|
||||
elif integration_type == 'email':
|
||||
required_fields = ['smtp_host', 'smtp_port', 'from_email']
|
||||
for field in required_fields:
|
||||
if field not in config:
|
||||
errors.append(f"{field} is required for email integration")
|
||||
|
||||
elif integration_type == 'api':
|
||||
if 'base_url' not in config:
|
||||
errors.append("Base URL is required for API integration")
|
||||
elif not is_valid_url(config['base_url']):
|
||||
errors.append("Invalid API base URL")
|
||||
|
||||
return errors
|
||||
@ -1,268 +0,0 @@
|
||||
"""
|
||||
API endpoints for logging configuration and management.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, jsonify, request, send_file
|
||||
from web.controllers.auth_controller import require_auth
|
||||
from config import config
|
||||
import logging
|
||||
import os
|
||||
from datetime import datetime
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
logging_bp = Blueprint('logging', __name__, url_prefix='/api/logging')
|
||||
|
||||
@logging_bp.route('/config', methods=['GET'])
|
||||
@require_auth
|
||||
def get_logging_config():
|
||||
"""Get current logging configuration."""
|
||||
try:
|
||||
# Import here to avoid circular imports
|
||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
||||
|
||||
config_data = {
|
||||
'log_level': config.log_level,
|
||||
'enable_console_logging': config.enable_console_logging,
|
||||
'enable_console_progress': config.enable_console_progress,
|
||||
'enable_fail2ban_logging': config.enable_fail2ban_logging,
|
||||
'log_files': [
|
||||
'./logs/aniworld.log',
|
||||
'./logs/auth_failures.log',
|
||||
'./logs/downloads.log'
|
||||
]
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'config': config_data
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting logging config: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@logging_bp.route('/config', methods=['POST'])
|
||||
@require_auth
|
||||
def update_logging_config():
|
||||
"""Update logging configuration."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
# Update log level
|
||||
log_level = data.get('log_level', config.log_level)
|
||||
if log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
|
||||
config.log_level = log_level
|
||||
|
||||
# Update console logging settings
|
||||
if 'enable_console_logging' in data:
|
||||
config.enable_console_logging = bool(data['enable_console_logging'])
|
||||
|
||||
if 'enable_console_progress' in data:
|
||||
config.enable_console_progress = bool(data['enable_console_progress'])
|
||||
|
||||
if 'enable_fail2ban_logging' in data:
|
||||
config.enable_fail2ban_logging = bool(data['enable_fail2ban_logging'])
|
||||
|
||||
# Save configuration
|
||||
config.save_config()
|
||||
|
||||
# Update runtime logging level
|
||||
try:
|
||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
||||
# Use standard logging level update
|
||||
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
|
||||
logging.getLogger().setLevel(numeric_level)
|
||||
except ImportError:
|
||||
# Fallback for basic logging
|
||||
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
|
||||
logging.getLogger().setLevel(numeric_level)
|
||||
|
||||
logger.info(f"Logging configuration updated: level={config.log_level}, console={config.enable_console_logging}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Logging configuration updated successfully',
|
||||
'config': {
|
||||
'log_level': config.log_level,
|
||||
'enable_console_logging': config.enable_console_logging,
|
||||
'enable_console_progress': config.enable_console_progress,
|
||||
'enable_fail2ban_logging': config.enable_fail2ban_logging
|
||||
}
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating logging config: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@logging_bp.route('/files', methods=['GET'])
|
||||
@require_auth
|
||||
def list_log_files():
|
||||
"""Get list of available log files."""
|
||||
try:
|
||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
||||
# Return basic log files
|
||||
log_files = [
|
||||
'./logs/aniworld.log',
|
||||
'./logs/auth_failures.log',
|
||||
'./logs/downloads.log'
|
||||
]
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'files': log_files
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error listing log files: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@logging_bp.route('/files/<filename>/download', methods=['GET'])
|
||||
@require_auth
|
||||
def download_log_file(filename):
|
||||
"""Download a specific log file."""
|
||||
try:
|
||||
# Security: Only allow log files
|
||||
if not filename.endswith('.log'):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid file type'
|
||||
}), 400
|
||||
|
||||
log_directory = "logs"
|
||||
file_path = os.path.join(log_directory, filename)
|
||||
|
||||
# Security: Check if file exists and is within log directory
|
||||
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'File not found'
|
||||
}), 404
|
||||
|
||||
return send_file(
|
||||
file_path,
|
||||
as_attachment=True,
|
||||
download_name=f"{filename}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
|
||||
)
|
||||
except Exception as e:
|
||||
logger.error(f"Error downloading log file {filename}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@logging_bp.route('/files/<filename>/tail', methods=['GET'])
|
||||
@require_auth
|
||||
def tail_log_file(filename):
|
||||
"""Get the last N lines from a log file."""
|
||||
try:
|
||||
# Security: Only allow log files
|
||||
if not filename.endswith('.log'):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid file type'
|
||||
}), 400
|
||||
|
||||
lines = int(request.args.get('lines', 100))
|
||||
lines = min(lines, 1000) # Limit to 1000 lines max
|
||||
|
||||
log_directory = "logs"
|
||||
file_path = os.path.join(log_directory, filename)
|
||||
|
||||
# Security: Check if file exists and is within log directory
|
||||
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'File not found'
|
||||
}), 404
|
||||
|
||||
# Read last N lines
|
||||
with open(file_path, 'r', encoding='utf-8') as f:
|
||||
all_lines = f.readlines()
|
||||
tail_lines = all_lines[-lines:] if len(all_lines) > lines else all_lines
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'lines': [line.rstrip('\n\r') for line in tail_lines],
|
||||
'total_lines': len(all_lines),
|
||||
'showing_lines': len(tail_lines)
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error tailing log file {filename}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@logging_bp.route('/cleanup', methods=['POST'])
|
||||
@require_auth
|
||||
def cleanup_logs():
|
||||
"""Clean up old log files."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
days = int(data.get('days', 30))
|
||||
days = max(1, min(days, 365)) # Limit between 1-365 days
|
||||
|
||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
||||
# Since we don't have log_config.cleanup_old_logs(), simulate the cleanup
|
||||
cleaned_files = [] # Would implement actual cleanup logic here
|
||||
|
||||
logger.info(f"Cleaned up {len(cleaned_files)} old log files (older than {days} days)")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Cleaned up {len(cleaned_files)} log files',
|
||||
'cleaned_files': cleaned_files
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up logs: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@logging_bp.route('/test', methods=['POST'])
|
||||
@require_auth
|
||||
def test_logging():
|
||||
"""Test logging at different levels."""
|
||||
try:
|
||||
test_message = "Test log message from web interface"
|
||||
|
||||
# Test different log levels
|
||||
logger.debug(f"DEBUG: {test_message}")
|
||||
logger.info(f"INFO: {test_message}")
|
||||
logger.warning(f"WARNING: {test_message}")
|
||||
logger.error(f"ERROR: {test_message}")
|
||||
|
||||
# Test fail2ban logging
|
||||
try:
|
||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
||||
# log_auth_failure would be implemented here
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
# Test download progress logging
|
||||
try:
|
||||
from src.infrastructure.logging.GlobalLogger import error_logger
|
||||
# log_download_progress would be implemented here
|
||||
pass
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Test messages logged successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error testing logging: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
@ -1,656 +0,0 @@
|
||||
"""
|
||||
Maintenance API endpoints.
|
||||
|
||||
This module handles all system maintenance operations including:
|
||||
- Database maintenance
|
||||
- System optimization
|
||||
- Cleanup operations
|
||||
- Scheduled maintenance tasks
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify
|
||||
from typing import Dict, List, Any, Optional, Tuple
|
||||
import logging
|
||||
import os
|
||||
import time
|
||||
import sqlite3
|
||||
from datetime import datetime, timedelta
|
||||
|
||||
# Import shared utilities
|
||||
try:
|
||||
from src.server.web.controllers.shared.auth_decorators import require_auth
|
||||
from src.server.web.controllers.shared.error_handlers import handle_api_errors
|
||||
from src.server.web.controllers.shared.validators import validate_json_input, validate_query_params
|
||||
from src.server.web.controllers.shared.response_helpers import (
|
||||
create_success_response, create_error_response, format_file_size, format_datetime
|
||||
)
|
||||
except ImportError:
|
||||
# Fallback imports for development
|
||||
def require_auth(f): return f
|
||||
def handle_api_errors(f): return f
|
||||
def validate_json_input(**kwargs): return lambda f: f
|
||||
def validate_query_params(**kwargs): return lambda f: f
|
||||
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
|
||||
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
|
||||
def format_file_size(size): return f"{size} bytes"
|
||||
def format_datetime(dt): return str(dt) if dt else None
|
||||
|
||||
# Import maintenance components
|
||||
try:
|
||||
from src.server.data.database_manager import DatabaseManager
|
||||
from src.server.data.cleanup_manager import CleanupManager
|
||||
from src.server.data.scheduler_manager import SchedulerManager
|
||||
except ImportError:
|
||||
# Fallback for development
|
||||
class DatabaseManager:
|
||||
def vacuum_database(self): return {'size_before': 1000000, 'size_after': 800000, 'time_taken': 5.2}
|
||||
def analyze_database(self): return {'tables_analyzed': 10, 'time_taken': 2.1}
|
||||
def integrity_check(self): return {'status': 'ok', 'errors': [], 'warnings': []}
|
||||
def reindex_database(self): return {'indexes_rebuilt': 15, 'time_taken': 3.5}
|
||||
def get_database_stats(self): return {'size': 10000000, 'tables': 10, 'indexes': 15}
|
||||
def optimize_database(self): return {'optimizations': ['vacuum', 'analyze', 'reindex'], 'time_taken': 10.7}
|
||||
def backup_database(self, path): return {'backup_file': path, 'size': 5000000}
|
||||
def get_slow_queries(self, **kwargs): return []
|
||||
|
||||
class CleanupManager:
|
||||
def cleanup_temp_files(self): return {'files_deleted': 50, 'space_freed': 1048576}
|
||||
def cleanup_logs(self, **kwargs): return {'logs_deleted': 100, 'space_freed': 2097152}
|
||||
def cleanup_downloads(self, **kwargs): return {'downloads_cleaned': 25, 'space_freed': 5242880}
|
||||
def cleanup_cache(self): return {'cache_cleared': True, 'space_freed': 10485760}
|
||||
def cleanup_old_backups(self, **kwargs): return {'backups_deleted': 5, 'space_freed': 52428800}
|
||||
def get_cleanup_stats(self): return {'temp_files': 100, 'log_files': 200, 'cache_size': 50000000}
|
||||
|
||||
class SchedulerManager:
|
||||
def get_scheduled_tasks(self): return []
|
||||
def create_scheduled_task(self, **kwargs): return 1
|
||||
def update_scheduled_task(self, id, **kwargs): return True
|
||||
def delete_scheduled_task(self, id): return True
|
||||
def get_task_history(self, **kwargs): return []
|
||||
|
||||
# Create blueprint
|
||||
maintenance_bp = Blueprint('maintenance', __name__)
|
||||
|
||||
# Initialize managers
|
||||
database_manager = DatabaseManager()
|
||||
cleanup_manager = CleanupManager()
|
||||
scheduler_manager = SchedulerManager()
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/database/vacuum', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def vacuum_database() -> Tuple[Any, int]:
|
||||
"""
|
||||
Vacuum the database to reclaim space and optimize performance.
|
||||
|
||||
Returns:
|
||||
JSON response with vacuum operation results
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting database vacuum operation")
|
||||
start_time = time.time()
|
||||
|
||||
result = database_manager.vacuum_database()
|
||||
|
||||
operation_time = time.time() - start_time
|
||||
result['operation_time'] = round(operation_time, 2)
|
||||
|
||||
space_saved = result.get('size_before', 0) - result.get('size_after', 0)
|
||||
result['space_saved'] = format_file_size(space_saved)
|
||||
|
||||
logger.info(f"Database vacuum completed in {operation_time:.2f} seconds, saved {space_saved} bytes")
|
||||
return create_success_response("Database vacuum completed successfully", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during database vacuum: {str(e)}")
|
||||
return create_error_response("Database vacuum failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/database/analyze', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def analyze_database() -> Tuple[Any, int]:
|
||||
"""
|
||||
Analyze the database to update query planner statistics.
|
||||
|
||||
Returns:
|
||||
JSON response with analyze operation results
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting database analyze operation")
|
||||
start_time = time.time()
|
||||
|
||||
result = database_manager.analyze_database()
|
||||
|
||||
operation_time = time.time() - start_time
|
||||
result['operation_time'] = round(operation_time, 2)
|
||||
|
||||
logger.info(f"Database analyze completed in {operation_time:.2f} seconds")
|
||||
return create_success_response("Database analyze completed successfully", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during database analyze: {str(e)}")
|
||||
return create_error_response("Database analyze failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/database/integrity-check', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def integrity_check() -> Tuple[Any, int]:
|
||||
"""
|
||||
Perform database integrity check.
|
||||
|
||||
Returns:
|
||||
JSON response with integrity check results
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting database integrity check")
|
||||
start_time = time.time()
|
||||
|
||||
result = database_manager.integrity_check()
|
||||
|
||||
operation_time = time.time() - start_time
|
||||
result['operation_time'] = round(operation_time, 2)
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
if result['status'] == 'ok':
|
||||
logger.info(f"Database integrity check passed in {operation_time:.2f} seconds")
|
||||
return create_success_response("Database integrity check passed", 200, result)
|
||||
else:
|
||||
logger.warning(f"Database integrity check found issues: {result['errors']}")
|
||||
return create_success_response("Database integrity check completed with issues", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during database integrity check: {str(e)}")
|
||||
return create_error_response("Database integrity check failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/database/reindex', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def reindex_database() -> Tuple[Any, int]:
|
||||
"""
|
||||
Rebuild database indexes for optimal performance.
|
||||
|
||||
Returns:
|
||||
JSON response with reindex operation results
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting database reindex operation")
|
||||
start_time = time.time()
|
||||
|
||||
result = database_manager.reindex_database()
|
||||
|
||||
operation_time = time.time() - start_time
|
||||
result['operation_time'] = round(operation_time, 2)
|
||||
|
||||
logger.info(f"Database reindex completed in {operation_time:.2f} seconds, rebuilt {result.get('indexes_rebuilt', 0)} indexes")
|
||||
return create_success_response("Database reindex completed successfully", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during database reindex: {str(e)}")
|
||||
return create_error_response("Database reindex failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/database/optimize', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['operations', 'force'],
|
||||
field_types={'operations': list, 'force': bool}
|
||||
)
|
||||
def optimize_database() -> Tuple[Any, int]:
|
||||
"""
|
||||
Perform comprehensive database optimization.
|
||||
|
||||
Request Body:
|
||||
- operations: List of operations to perform (optional, default: all)
|
||||
- force: Force optimization even if recently performed (optional, default: false)
|
||||
|
||||
Returns:
|
||||
JSON response with optimization results
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
operations = data.get('operations', ['vacuum', 'analyze', 'reindex'])
|
||||
force = data.get('force', False)
|
||||
|
||||
# Validate operations
|
||||
allowed_operations = ['vacuum', 'analyze', 'reindex', 'integrity_check']
|
||||
invalid_operations = [op for op in operations if op not in allowed_operations]
|
||||
if invalid_operations:
|
||||
return create_error_response(f"Invalid operations: {', '.join(invalid_operations)}", 400)
|
||||
|
||||
try:
|
||||
logger.info(f"Starting database optimization with operations: {operations}")
|
||||
start_time = time.time()
|
||||
|
||||
result = database_manager.optimize_database(
|
||||
operations=operations,
|
||||
force=force
|
||||
)
|
||||
|
||||
operation_time = time.time() - start_time
|
||||
result['operation_time'] = round(operation_time, 2)
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
logger.info(f"Database optimization completed in {operation_time:.2f} seconds")
|
||||
return create_success_response("Database optimization completed successfully", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during database optimization: {str(e)}")
|
||||
return create_error_response("Database optimization failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/database/stats', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_database_stats() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get database statistics and health information.
|
||||
|
||||
Returns:
|
||||
JSON response with database statistics
|
||||
"""
|
||||
try:
|
||||
stats = database_manager.get_database_stats()
|
||||
|
||||
# Add formatted values
|
||||
if 'size' in stats:
|
||||
stats['size_formatted'] = format_file_size(stats['size'])
|
||||
|
||||
# Add slow queries
|
||||
slow_queries = database_manager.get_slow_queries(limit=10)
|
||||
stats['slow_queries'] = slow_queries
|
||||
|
||||
return create_success_response("Database statistics retrieved successfully", 200, stats)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting database stats: {str(e)}")
|
||||
return create_error_response("Failed to get database statistics", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/cleanup/temp-files', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def cleanup_temp_files() -> Tuple[Any, int]:
|
||||
"""
|
||||
Clean up temporary files.
|
||||
|
||||
Returns:
|
||||
JSON response with cleanup results
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting temporary files cleanup")
|
||||
|
||||
result = cleanup_manager.cleanup_temp_files()
|
||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
logger.info(f"Temporary files cleanup completed: {result['files_deleted']} files deleted, {result['space_freed']} bytes freed")
|
||||
return create_success_response("Temporary files cleanup completed", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during temp files cleanup: {str(e)}")
|
||||
return create_error_response("Temporary files cleanup failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/cleanup/logs', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['older_than_days', 'keep_recent'],
|
||||
field_types={'older_than_days': int, 'keep_recent': int}
|
||||
)
|
||||
def cleanup_logs() -> Tuple[Any, int]:
|
||||
"""
|
||||
Clean up old log files.
|
||||
|
||||
Request Body:
|
||||
- older_than_days: Delete logs older than this many days (optional, default: 30)
|
||||
- keep_recent: Number of recent log files to keep (optional, default: 10)
|
||||
|
||||
Returns:
|
||||
JSON response with cleanup results
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
older_than_days = data.get('older_than_days', 30)
|
||||
keep_recent = data.get('keep_recent', 10)
|
||||
|
||||
try:
|
||||
logger.info(f"Starting log cleanup: older than {older_than_days} days, keep {keep_recent} recent")
|
||||
|
||||
result = cleanup_manager.cleanup_logs(
|
||||
older_than_days=older_than_days,
|
||||
keep_recent=keep_recent
|
||||
)
|
||||
|
||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
logger.info(f"Log cleanup completed: {result['logs_deleted']} logs deleted, {result['space_freed']} bytes freed")
|
||||
return create_success_response("Log cleanup completed", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during log cleanup: {str(e)}")
|
||||
return create_error_response("Log cleanup failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/cleanup/downloads', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['remove_failed', 'remove_incomplete', 'older_than_days'],
|
||||
field_types={'remove_failed': bool, 'remove_incomplete': bool, 'older_than_days': int}
|
||||
)
|
||||
def cleanup_downloads() -> Tuple[Any, int]:
|
||||
"""
|
||||
Clean up download files and records.
|
||||
|
||||
Request Body:
|
||||
- remove_failed: Remove failed downloads (optional, default: true)
|
||||
- remove_incomplete: Remove incomplete downloads (optional, default: false)
|
||||
- older_than_days: Remove downloads older than this many days (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with cleanup results
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
remove_failed = data.get('remove_failed', True)
|
||||
remove_incomplete = data.get('remove_incomplete', False)
|
||||
older_than_days = data.get('older_than_days')
|
||||
|
||||
try:
|
||||
logger.info(f"Starting download cleanup: failed={remove_failed}, incomplete={remove_incomplete}, older_than={older_than_days}")
|
||||
|
||||
result = cleanup_manager.cleanup_downloads(
|
||||
remove_failed=remove_failed,
|
||||
remove_incomplete=remove_incomplete,
|
||||
older_than_days=older_than_days
|
||||
)
|
||||
|
||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
logger.info(f"Download cleanup completed: {result['downloads_cleaned']} downloads cleaned, {result['space_freed']} bytes freed")
|
||||
return create_success_response("Download cleanup completed", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during download cleanup: {str(e)}")
|
||||
return create_error_response("Download cleanup failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/cleanup/cache', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def cleanup_cache() -> Tuple[Any, int]:
|
||||
"""
|
||||
Clear application cache.
|
||||
|
||||
Returns:
|
||||
JSON response with cleanup results
|
||||
"""
|
||||
try:
|
||||
logger.info("Starting cache cleanup")
|
||||
|
||||
result = cleanup_manager.cleanup_cache()
|
||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
logger.info(f"Cache cleanup completed: {result['space_freed']} bytes freed")
|
||||
return create_success_response("Cache cleanup completed", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during cache cleanup: {str(e)}")
|
||||
return create_error_response("Cache cleanup failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/cleanup/backups', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['keep_count', 'older_than_days'],
|
||||
field_types={'keep_count': int, 'older_than_days': int}
|
||||
)
|
||||
def cleanup_old_backups() -> Tuple[Any, int]:
|
||||
"""
|
||||
Clean up old backup files.
|
||||
|
||||
Request Body:
|
||||
- keep_count: Number of recent backups to keep (optional, default: 10)
|
||||
- older_than_days: Delete backups older than this many days (optional, default: 90)
|
||||
|
||||
Returns:
|
||||
JSON response with cleanup results
|
||||
"""
|
||||
data = request.get_json() or {}
|
||||
keep_count = data.get('keep_count', 10)
|
||||
older_than_days = data.get('older_than_days', 90)
|
||||
|
||||
try:
|
||||
logger.info(f"Starting backup cleanup: keep {keep_count} backups, older than {older_than_days} days")
|
||||
|
||||
result = cleanup_manager.cleanup_old_backups(
|
||||
keep_count=keep_count,
|
||||
older_than_days=older_than_days
|
||||
)
|
||||
|
||||
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
|
||||
result['timestamp'] = datetime.now().isoformat()
|
||||
|
||||
logger.info(f"Backup cleanup completed: {result['backups_deleted']} backups deleted, {result['space_freed']} bytes freed")
|
||||
return create_success_response("Backup cleanup completed", 200, result)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error during backup cleanup: {str(e)}")
|
||||
return create_error_response("Backup cleanup failed", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/cleanup/stats', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_cleanup_stats() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get cleanup statistics and recommendations.
|
||||
|
||||
Returns:
|
||||
JSON response with cleanup statistics
|
||||
"""
|
||||
try:
|
||||
stats = cleanup_manager.get_cleanup_stats()
|
||||
|
||||
# Add formatted sizes
|
||||
for key in ['temp_files_size', 'log_files_size', 'cache_size', 'old_backups_size']:
|
||||
if key in stats:
|
||||
stats[f"{key}_formatted"] = format_file_size(stats[key])
|
||||
|
||||
# Add recommendations
|
||||
recommendations = []
|
||||
if stats.get('temp_files', 0) > 100:
|
||||
recommendations.append("Consider cleaning temporary files")
|
||||
if stats.get('log_files_size', 0) > 100 * 1024 * 1024: # 100MB
|
||||
recommendations.append("Consider cleaning old log files")
|
||||
if stats.get('cache_size', 0) > 500 * 1024 * 1024: # 500MB
|
||||
recommendations.append("Consider clearing cache")
|
||||
|
||||
stats['recommendations'] = recommendations
|
||||
|
||||
return create_success_response("Cleanup statistics retrieved successfully", 200, stats)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting cleanup stats: {str(e)}")
|
||||
return create_error_response("Failed to get cleanup statistics", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def get_scheduled_tasks() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get scheduled maintenance tasks.
|
||||
|
||||
Returns:
|
||||
JSON response with scheduled tasks
|
||||
"""
|
||||
try:
|
||||
tasks = scheduler_manager.get_scheduled_tasks()
|
||||
|
||||
return create_success_response("Scheduled tasks retrieved successfully", 200, tasks)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting scheduled tasks: {str(e)}")
|
||||
return create_error_response("Failed to get scheduled tasks", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['POST'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['name', 'task_type', 'schedule'],
|
||||
optional_fields=['config', 'enabled'],
|
||||
field_types={'name': str, 'task_type': str, 'schedule': str, 'config': dict, 'enabled': bool}
|
||||
)
|
||||
def create_scheduled_task() -> Tuple[Any, int]:
|
||||
"""
|
||||
Create a new scheduled maintenance task.
|
||||
|
||||
Request Body:
|
||||
- name: Task name (required)
|
||||
- task_type: Type of task (required)
|
||||
- schedule: Cron-style schedule (required)
|
||||
- config: Task configuration (optional)
|
||||
- enabled: Whether task is enabled (optional, default: true)
|
||||
|
||||
Returns:
|
||||
JSON response with created task
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
# Validate task type
|
||||
allowed_task_types = [
|
||||
'database_vacuum', 'database_analyze', 'cleanup_temp_files',
|
||||
'cleanup_logs', 'cleanup_downloads', 'cleanup_cache', 'backup_database'
|
||||
]
|
||||
|
||||
if data['task_type'] not in allowed_task_types:
|
||||
return create_error_response(f"Invalid task type. Must be one of: {', '.join(allowed_task_types)}", 400)
|
||||
|
||||
try:
|
||||
task_id = scheduler_manager.create_scheduled_task(
|
||||
name=data['name'],
|
||||
task_type=data['task_type'],
|
||||
schedule=data['schedule'],
|
||||
config=data.get('config', {}),
|
||||
enabled=data.get('enabled', True)
|
||||
)
|
||||
|
||||
logger.info(f"Created scheduled task {task_id}: {data['name']} ({data['task_type']})")
|
||||
return create_success_response("Scheduled task created successfully", 201, {'id': task_id})
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error creating scheduled task: {str(e)}")
|
||||
return create_error_response("Failed to create scheduled task", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['PUT'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['name', 'schedule', 'config', 'enabled'],
|
||||
field_types={'name': str, 'schedule': str, 'config': dict, 'enabled': bool}
|
||||
)
|
||||
def update_scheduled_task(task_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Update a scheduled maintenance task.
|
||||
|
||||
Args:
|
||||
task_id: Task ID
|
||||
|
||||
Request Body:
|
||||
- name: Task name (optional)
|
||||
- schedule: Cron-style schedule (optional)
|
||||
- config: Task configuration (optional)
|
||||
- enabled: Whether task is enabled (optional)
|
||||
|
||||
Returns:
|
||||
JSON response with update result
|
||||
"""
|
||||
data = request.get_json()
|
||||
|
||||
try:
|
||||
success = scheduler_manager.update_scheduled_task(task_id, **data)
|
||||
|
||||
if success:
|
||||
logger.info(f"Updated scheduled task {task_id}")
|
||||
return create_success_response("Scheduled task updated successfully")
|
||||
else:
|
||||
return create_error_response("Scheduled task not found", 404)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating scheduled task {task_id}: {str(e)}")
|
||||
return create_error_response("Failed to update scheduled task", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['DELETE'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
def delete_scheduled_task(task_id: int) -> Tuple[Any, int]:
|
||||
"""
|
||||
Delete a scheduled maintenance task.
|
||||
|
||||
Args:
|
||||
task_id: Task ID
|
||||
|
||||
Returns:
|
||||
JSON response with deletion result
|
||||
"""
|
||||
try:
|
||||
success = scheduler_manager.delete_scheduled_task(task_id)
|
||||
|
||||
if success:
|
||||
logger.info(f"Deleted scheduled task {task_id}")
|
||||
return create_success_response("Scheduled task deleted successfully")
|
||||
else:
|
||||
return create_error_response("Scheduled task not found", 404)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error deleting scheduled task {task_id}: {str(e)}")
|
||||
return create_error_response("Failed to delete scheduled task", 500)
|
||||
|
||||
|
||||
@maintenance_bp.route('/maintenance/history', methods=['GET'])
|
||||
@require_auth
|
||||
@handle_api_errors
|
||||
@validate_query_params(
|
||||
allowed_params=['task_type', 'days', 'limit'],
|
||||
param_types={'days': int, 'limit': int}
|
||||
)
|
||||
def get_maintenance_history() -> Tuple[Any, int]:
|
||||
"""
|
||||
Get maintenance task execution history.
|
||||
|
||||
Query Parameters:
|
||||
- task_type: Filter by task type (optional)
|
||||
- days: Number of days of history (optional, default: 30)
|
||||
- limit: Maximum number of records (optional, default: 100)
|
||||
|
||||
Returns:
|
||||
JSON response with maintenance history
|
||||
"""
|
||||
task_type = request.args.get('task_type')
|
||||
days = request.args.get('days', 30, type=int)
|
||||
limit = request.args.get('limit', 100, type=int)
|
||||
|
||||
try:
|
||||
history = scheduler_manager.get_task_history(
|
||||
task_type=task_type,
|
||||
days=days,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return create_success_response("Maintenance history retrieved successfully", 200, history)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting maintenance history: {str(e)}")
|
||||
return create_error_response("Failed to get maintenance history", 500)
|
||||
@ -1,406 +0,0 @@
|
||||
"""
|
||||
Performance Optimization API Endpoints
|
||||
|
||||
This module provides REST API endpoints for performance monitoring
|
||||
and optimization features.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request, jsonify
|
||||
from auth import require_auth, optional_auth
|
||||
from error_handler import handle_api_errors, RetryableError
|
||||
from performance_optimizer import (
|
||||
speed_limiter, download_cache, memory_monitor,
|
||||
download_manager, resume_manager, DownloadTask
|
||||
)
|
||||
import uuid
|
||||
from datetime import datetime
|
||||
|
||||
|
||||
# Blueprint for performance optimization endpoints
|
||||
performance_bp = Blueprint('performance', __name__)
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/speed-limit', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_speed_limit():
|
||||
"""Get current download speed limit."""
|
||||
try:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'speed_limit_mbps': speed_limiter.max_speed_mbps,
|
||||
'current_speed_mbps': speed_limiter.get_current_speed()
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get speed limit: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/speed-limit', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def set_speed_limit():
|
||||
"""Set download speed limit."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
speed_mbps = data.get('speed_mbps', 0)
|
||||
|
||||
if speed_mbps < 0:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Speed limit must be non-negative (0 = unlimited)'
|
||||
}), 400
|
||||
|
||||
speed_limiter.set_speed_limit(speed_mbps)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Speed limit set to {speed_mbps} MB/s' if speed_mbps > 0 else 'Speed limit removed',
|
||||
'data': {
|
||||
'speed_limit_mbps': speed_mbps
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to set speed limit: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/cache/stats')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_cache_stats():
|
||||
"""Get cache statistics."""
|
||||
try:
|
||||
stats = download_cache.get_stats()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': stats
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get cache stats: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/cache/clear', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def clear_cache():
|
||||
"""Clear download cache."""
|
||||
try:
|
||||
download_cache.clear()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Cache cleared successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to clear cache: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/memory/stats')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_memory_stats():
|
||||
"""Get memory usage statistics."""
|
||||
try:
|
||||
stats = memory_monitor.get_memory_stats()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': stats
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get memory stats: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/memory/gc', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def force_garbage_collection():
|
||||
"""Force garbage collection to free memory."""
|
||||
try:
|
||||
memory_monitor.force_garbage_collection()
|
||||
stats = memory_monitor.get_memory_stats()
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Garbage collection completed',
|
||||
'data': stats
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to force garbage collection: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/downloads/workers', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_worker_count():
|
||||
"""Get current number of download workers."""
|
||||
try:
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'max_workers': download_manager.max_workers,
|
||||
'active_tasks': len(download_manager.active_tasks)
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get worker count: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/downloads/workers', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def set_worker_count():
|
||||
"""Set number of download workers."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
max_workers = data.get('max_workers', 3)
|
||||
|
||||
if not isinstance(max_workers, int) or max_workers < 1 or max_workers > 10:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Worker count must be between 1 and 10'
|
||||
}), 400
|
||||
|
||||
download_manager.set_max_workers(max_workers)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Worker count set to {max_workers}',
|
||||
'data': {
|
||||
'max_workers': max_workers
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to set worker count: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/downloads/stats')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_download_stats():
|
||||
"""Get download manager statistics."""
|
||||
try:
|
||||
stats = download_manager.get_statistics()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': stats
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get download stats: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/downloads/tasks')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_all_download_tasks():
|
||||
"""Get all download tasks."""
|
||||
try:
|
||||
tasks = download_manager.get_all_tasks()
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': tasks
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get download tasks: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/downloads/tasks/<task_id>')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_download_task(task_id):
|
||||
"""Get specific download task status."""
|
||||
try:
|
||||
task_status = download_manager.get_task_status(task_id)
|
||||
|
||||
if not task_status:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Task not found'
|
||||
}), 404
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': task_status
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get task status: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/downloads/add-task', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def add_download_task():
|
||||
"""Add a new download task to the queue."""
|
||||
try:
|
||||
data = request.get_json()
|
||||
|
||||
required_fields = ['serie_name', 'season', 'episode', 'key', 'output_path', 'temp_path']
|
||||
for field in required_fields:
|
||||
if field not in data:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Missing required field: {field}'
|
||||
}), 400
|
||||
|
||||
# Create download task
|
||||
task = DownloadTask(
|
||||
task_id=str(uuid.uuid4()),
|
||||
serie_name=data['serie_name'],
|
||||
season=int(data['season']),
|
||||
episode=int(data['episode']),
|
||||
key=data['key'],
|
||||
language=data.get('language', 'German Dub'),
|
||||
output_path=data['output_path'],
|
||||
temp_path=data['temp_path'],
|
||||
priority=data.get('priority', 0)
|
||||
)
|
||||
|
||||
task_id = download_manager.add_task(task)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'Download task added successfully',
|
||||
'data': {
|
||||
'task_id': task_id
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to add download task: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/resume/tasks')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_resumable_tasks():
|
||||
"""Get list of tasks that can be resumed."""
|
||||
try:
|
||||
resumable_tasks = resume_manager.get_resumable_tasks()
|
||||
|
||||
# Get detailed info for each resumable task
|
||||
tasks_info = []
|
||||
for task_id in resumable_tasks:
|
||||
resume_info = resume_manager.load_resume_info(task_id)
|
||||
if resume_info:
|
||||
tasks_info.append({
|
||||
'task_id': task_id,
|
||||
'resume_info': resume_info
|
||||
})
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': {
|
||||
'resumable_tasks': tasks_info,
|
||||
'count': len(tasks_info)
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get resumable tasks: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/resume/clear/<task_id>', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def clear_resume_info(task_id):
|
||||
"""Clear resume information for a specific task."""
|
||||
try:
|
||||
resume_manager.clear_resume_info(task_id)
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': f'Resume information cleared for task: {task_id}'
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to clear resume info: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/system/optimize', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@require_auth
|
||||
def optimize_system():
|
||||
"""Perform system optimization tasks."""
|
||||
try:
|
||||
optimization_results = {}
|
||||
|
||||
# Force garbage collection
|
||||
memory_monitor.force_garbage_collection()
|
||||
memory_stats = memory_monitor.get_memory_stats()
|
||||
optimization_results['memory_gc'] = {
|
||||
'completed': True,
|
||||
'memory_mb': memory_stats.get('rss_mb', 0)
|
||||
}
|
||||
|
||||
# Clean up cache expired entries
|
||||
download_cache._cleanup_expired()
|
||||
cache_stats = download_cache.get_stats()
|
||||
optimization_results['cache_cleanup'] = {
|
||||
'completed': True,
|
||||
'entries': cache_stats.get('entry_count', 0),
|
||||
'size_mb': cache_stats.get('total_size_mb', 0)
|
||||
}
|
||||
|
||||
# Clean up old resume files (older than 7 days)
|
||||
import os
|
||||
import time
|
||||
resume_dir = resume_manager.resume_dir
|
||||
cleaned_files = 0
|
||||
|
||||
try:
|
||||
for filename in os.listdir(resume_dir):
|
||||
file_path = os.path.join(resume_dir, filename)
|
||||
if os.path.isfile(file_path):
|
||||
file_age = time.time() - os.path.getmtime(file_path)
|
||||
if file_age > 7 * 24 * 3600: # 7 days in seconds
|
||||
os.remove(file_path)
|
||||
cleaned_files += 1
|
||||
except Exception as e:
|
||||
pass # Ignore errors in cleanup
|
||||
|
||||
optimization_results['resume_cleanup'] = {
|
||||
'completed': True,
|
||||
'files_removed': cleaned_files
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'message': 'System optimization completed',
|
||||
'data': optimization_results
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"System optimization failed: {e}")
|
||||
|
||||
|
||||
@performance_bp.route('/api/performance/config')
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_performance_config():
|
||||
"""Get current performance configuration."""
|
||||
try:
|
||||
config = {
|
||||
'speed_limit': {
|
||||
'current_mbps': speed_limiter.max_speed_mbps,
|
||||
'unlimited': speed_limiter.max_speed_mbps == 0
|
||||
},
|
||||
'downloads': {
|
||||
'max_workers': download_manager.max_workers,
|
||||
'active_tasks': len(download_manager.active_tasks)
|
||||
},
|
||||
'cache': {
|
||||
'max_size_mb': download_cache.max_size_bytes / (1024 * 1024),
|
||||
**download_cache.get_stats()
|
||||
},
|
||||
'memory': {
|
||||
'warning_threshold_mb': memory_monitor.warning_threshold / (1024 * 1024),
|
||||
'critical_threshold_mb': memory_monitor.critical_threshold / (1024 * 1024),
|
||||
**memory_monitor.get_memory_stats()
|
||||
}
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': config
|
||||
})
|
||||
except Exception as e:
|
||||
raise RetryableError(f"Failed to get performance config: {e}")
|
||||
|
||||
|
||||
# Export the blueprint
|
||||
__all__ = ['performance_bp']
|
||||
@ -1,280 +0,0 @@
|
||||
from flask import Blueprint, jsonify, request
|
||||
from web.controllers.auth_controller import require_auth
|
||||
from shared.utils.process_utils import (
|
||||
process_lock_manager,
|
||||
RESCAN_LOCK,
|
||||
DOWNLOAD_LOCK,
|
||||
SEARCH_LOCK,
|
||||
check_process_locks,
|
||||
get_process_status,
|
||||
update_process_progress,
|
||||
is_process_running,
|
||||
episode_deduplicator,
|
||||
ProcessLockError
|
||||
)
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
process_bp = Blueprint('process', __name__, url_prefix='/api/process')
|
||||
|
||||
@process_bp.route('/locks/status', methods=['GET'])
|
||||
@require_auth
|
||||
def get_all_locks_status():
|
||||
"""Get status of all process locks."""
|
||||
try:
|
||||
# Clean up expired locks first
|
||||
cleaned = check_process_locks()
|
||||
if cleaned > 0:
|
||||
logger.info(f"Cleaned up {cleaned} expired locks")
|
||||
|
||||
status = process_lock_manager.get_all_locks_status()
|
||||
|
||||
# Add queue deduplication info
|
||||
status['queue_info'] = {
|
||||
'active_episodes': episode_deduplicator.get_count(),
|
||||
'episodes': episode_deduplicator.get_active_episodes()
|
||||
}
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'locks': status
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting locks status: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/locks/<lock_name>/status', methods=['GET'])
|
||||
@require_auth
|
||||
def get_lock_status(lock_name):
|
||||
"""Get status of a specific process lock."""
|
||||
try:
|
||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid lock name'
|
||||
}), 400
|
||||
|
||||
status = get_process_status(lock_name)
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'status': status
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting lock status for {lock_name}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/locks/<lock_name>/acquire', methods=['POST'])
|
||||
@require_auth
|
||||
def acquire_lock(lock_name):
|
||||
"""Manually acquire a process lock."""
|
||||
try:
|
||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid lock name'
|
||||
}), 400
|
||||
|
||||
data = request.get_json() or {}
|
||||
locked_by = data.get('locked_by', 'manual')
|
||||
timeout_minutes = data.get('timeout_minutes', 60)
|
||||
|
||||
success = process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes)
|
||||
|
||||
if success:
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Lock {lock_name} acquired successfully'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': f'Lock {lock_name} is already held'
|
||||
}), 409
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error acquiring lock {lock_name}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/locks/<lock_name>/release', methods=['POST'])
|
||||
@require_auth
|
||||
def release_lock(lock_name):
|
||||
"""Manually release a process lock."""
|
||||
try:
|
||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid lock name'
|
||||
}), 400
|
||||
|
||||
success = process_lock_manager.release_lock(lock_name)
|
||||
|
||||
if success:
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': f'Lock {lock_name} released successfully'
|
||||
})
|
||||
else:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': f'Lock {lock_name} was not held'
|
||||
}), 404
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Error releasing lock {lock_name}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/locks/cleanup', methods=['POST'])
|
||||
@require_auth
|
||||
def cleanup_expired_locks():
|
||||
"""Manually clean up expired locks."""
|
||||
try:
|
||||
cleaned = check_process_locks()
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'cleaned_count': cleaned,
|
||||
'message': f'Cleaned up {cleaned} expired locks'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error cleaning up locks: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/locks/force-release-all', methods=['POST'])
|
||||
@require_auth
|
||||
def force_release_all_locks():
|
||||
"""Force release all process locks (emergency use)."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
confirm = data.get('confirm', False)
|
||||
|
||||
if not confirm:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Confirmation required for force release'
|
||||
}), 400
|
||||
|
||||
released = process_lock_manager.force_release_all()
|
||||
|
||||
# Also clear queue deduplication
|
||||
episode_deduplicator.clear_all()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'released_count': released,
|
||||
'message': f'Force released {released} locks and cleared queue deduplication'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error force releasing locks: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/locks/<lock_name>/progress', methods=['POST'])
|
||||
@require_auth
|
||||
def update_lock_progress(lock_name):
|
||||
"""Update progress for a running process."""
|
||||
try:
|
||||
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid lock name'
|
||||
}), 400
|
||||
|
||||
if not is_process_running(lock_name):
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': f'Process {lock_name} is not running'
|
||||
}), 404
|
||||
|
||||
data = request.get_json() or {}
|
||||
progress_data = data.get('progress', {})
|
||||
|
||||
update_process_progress(lock_name, progress_data)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Progress updated successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating progress for {lock_name}: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/queue/deduplication', methods=['GET'])
|
||||
@require_auth
|
||||
def get_queue_deduplication():
|
||||
"""Get current queue deduplication status."""
|
||||
try:
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'deduplication': {
|
||||
'active_count': episode_deduplicator.get_count(),
|
||||
'active_episodes': episode_deduplicator.get_active_episodes()
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting queue deduplication: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/queue/deduplication/clear', methods=['POST'])
|
||||
@require_auth
|
||||
def clear_queue_deduplication():
|
||||
"""Clear all queue deduplication entries."""
|
||||
try:
|
||||
episode_deduplicator.clear_all()
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Queue deduplication cleared successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error clearing queue deduplication: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@process_bp.route('/is-running/<process_name>', methods=['GET'])
|
||||
@require_auth
|
||||
def check_if_process_running(process_name):
|
||||
"""Quick check if a specific process is running."""
|
||||
try:
|
||||
if process_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid process name'
|
||||
}), 400
|
||||
|
||||
is_running = is_process_running(process_name)
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'is_running': is_running,
|
||||
'process_name': process_name
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error checking if process {process_name} is running: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
@ -1,187 +0,0 @@
|
||||
from flask import Blueprint, jsonify, request
|
||||
from web.controllers.auth_controller import require_auth
|
||||
from application.services.scheduler_service import get_scheduler
|
||||
import logging
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
scheduler_bp = Blueprint('scheduler', __name__, url_prefix='/api/scheduler')
|
||||
|
||||
@scheduler_bp.route('/config', methods=['GET'])
|
||||
@require_auth
|
||||
def get_scheduler_config():
|
||||
"""Get current scheduler configuration."""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
if not scheduler:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Scheduler not initialized'
|
||||
}), 500
|
||||
|
||||
config = scheduler.get_scheduled_rescan_config()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'config': config
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting scheduler config: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@scheduler_bp.route('/config', methods=['POST'])
|
||||
@require_auth
|
||||
def update_scheduler_config():
|
||||
"""Update scheduler configuration."""
|
||||
try:
|
||||
data = request.get_json() or {}
|
||||
|
||||
enabled = data.get('enabled', False)
|
||||
time_str = data.get('time', '03:00')
|
||||
auto_download = data.get('auto_download_after_rescan', False)
|
||||
|
||||
# Validate inputs
|
||||
if enabled and not time_str:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Time is required when scheduling is enabled'
|
||||
}), 400
|
||||
|
||||
scheduler = get_scheduler()
|
||||
if not scheduler:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Scheduler not initialized'
|
||||
}), 500
|
||||
|
||||
# Update configuration
|
||||
scheduler.update_scheduled_rescan_config(enabled, time_str, auto_download)
|
||||
|
||||
# Get updated config
|
||||
updated_config = scheduler.get_scheduled_rescan_config()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Scheduler configuration updated successfully',
|
||||
'config': updated_config
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 400
|
||||
except Exception as e:
|
||||
logger.error(f"Error updating scheduler config: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@scheduler_bp.route('/status', methods=['GET'])
|
||||
@require_auth
|
||||
def get_scheduler_status():
|
||||
"""Get current scheduler status and next jobs."""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
if not scheduler:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Scheduler not initialized'
|
||||
}), 500
|
||||
|
||||
config = scheduler.get_scheduled_rescan_config()
|
||||
jobs = scheduler.get_next_scheduled_jobs()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'status': {
|
||||
'running': config['is_running'],
|
||||
'config': config,
|
||||
'scheduled_jobs': jobs
|
||||
}
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error getting scheduler status: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@scheduler_bp.route('/start', methods=['POST'])
|
||||
@require_auth
|
||||
def start_scheduler():
|
||||
"""Start the scheduler."""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
if not scheduler:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Scheduler not initialized'
|
||||
}), 500
|
||||
|
||||
scheduler.start_scheduler()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Scheduler started successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error starting scheduler: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@scheduler_bp.route('/stop', methods=['POST'])
|
||||
@require_auth
|
||||
def stop_scheduler():
|
||||
"""Stop the scheduler."""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
if not scheduler:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Scheduler not initialized'
|
||||
}), 500
|
||||
|
||||
scheduler.stop_scheduler()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Scheduler stopped successfully'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error stopping scheduler: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
|
||||
@scheduler_bp.route('/trigger-rescan', methods=['POST'])
|
||||
@require_auth
|
||||
def trigger_manual_rescan():
|
||||
"""Manually trigger a scheduled rescan for testing."""
|
||||
try:
|
||||
scheduler = get_scheduler()
|
||||
if not scheduler:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Scheduler not initialized'
|
||||
}), 500
|
||||
|
||||
scheduler.trigger_manual_scheduled_rescan()
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Manual scheduled rescan triggered'
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error(f"Error triggering manual rescan: {e}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': str(e)
|
||||
}), 500
|
||||
@ -1,637 +0,0 @@
|
||||
"""
|
||||
Search API Endpoints
|
||||
|
||||
This module provides REST API endpoints for advanced search functionality
|
||||
across anime, episodes, and other content.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request
|
||||
from typing import Dict, List, Any, Optional
|
||||
import re
|
||||
|
||||
from ...shared.auth_decorators import require_auth, optional_auth
|
||||
from ...shared.error_handlers import handle_api_errors, APIException, ValidationError
|
||||
from ...shared.validators import validate_pagination_params
|
||||
from ...shared.response_helpers import (
|
||||
create_success_response, create_paginated_response, format_anime_response,
|
||||
format_episode_response, extract_pagination_params
|
||||
)
|
||||
|
||||
# Import search components (these imports would need to be adjusted based on actual structure)
|
||||
try:
|
||||
from search_manager import search_engine, SearchResult
|
||||
from database_manager import anime_repository, episode_repository
|
||||
except ImportError:
|
||||
# Fallback for development/testing
|
||||
search_engine = None
|
||||
SearchResult = None
|
||||
anime_repository = None
|
||||
episode_repository = None
|
||||
|
||||
|
||||
# Blueprint for search endpoints
|
||||
search_bp = Blueprint('search', __name__, url_prefix='/api/v1/search')
|
||||
|
||||
|
||||
@search_bp.route('', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def global_search() -> Dict[str, Any]:
|
||||
"""
|
||||
Perform a global search across all content types.
|
||||
|
||||
Query Parameters:
|
||||
- q: Search query (required)
|
||||
- types: Comma-separated list of content types (anime,episodes,all)
|
||||
- categories: Comma-separated list of categories to search
|
||||
- min_score: Minimum relevance score (0.0-1.0)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated search results grouped by content type
|
||||
"""
|
||||
if not search_engine:
|
||||
raise APIException("Search engine not available", 503)
|
||||
|
||||
search_query = request.args.get('q', '').strip()
|
||||
if not search_query:
|
||||
raise ValidationError("Search query 'q' is required")
|
||||
|
||||
if len(search_query) < 2:
|
||||
raise ValidationError("Search query must be at least 2 characters long")
|
||||
|
||||
# Parse search types
|
||||
search_types = request.args.get('types', 'all').split(',')
|
||||
valid_types = ['anime', 'episodes', 'all']
|
||||
search_types = [t.strip() for t in search_types if t.strip() in valid_types]
|
||||
|
||||
if not search_types or 'all' in search_types:
|
||||
search_types = ['anime', 'episodes']
|
||||
|
||||
# Parse categories
|
||||
categories = request.args.get('categories', '').split(',')
|
||||
categories = [c.strip() for c in categories if c.strip()]
|
||||
|
||||
# Parse minimum score
|
||||
min_score = request.args.get('min_score', '0.0')
|
||||
try:
|
||||
min_score = float(min_score)
|
||||
if not 0.0 <= min_score <= 1.0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("min_score must be a number between 0.0 and 1.0")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Perform search
|
||||
search_results = search_engine.search_all(
|
||||
query=search_query,
|
||||
content_types=search_types,
|
||||
categories=categories,
|
||||
min_score=min_score
|
||||
)
|
||||
|
||||
# Group results by type
|
||||
grouped_results = {
|
||||
'anime': [],
|
||||
'episodes': [],
|
||||
'total_results': 0
|
||||
}
|
||||
|
||||
for result in search_results:
|
||||
if result.content_type == 'anime':
|
||||
grouped_results['anime'].append({
|
||||
'id': result.content_id,
|
||||
'type': 'anime',
|
||||
'title': result.title,
|
||||
'description': result.description,
|
||||
'score': result.relevance_score,
|
||||
'data': format_anime_response(result.content_data)
|
||||
})
|
||||
elif result.content_type == 'episode':
|
||||
grouped_results['episodes'].append({
|
||||
'id': result.content_id,
|
||||
'type': 'episode',
|
||||
'title': result.title,
|
||||
'description': result.description,
|
||||
'score': result.relevance_score,
|
||||
'data': format_episode_response(result.content_data)
|
||||
})
|
||||
|
||||
grouped_results['total_results'] += 1
|
||||
|
||||
# Apply pagination to combined results
|
||||
all_results = []
|
||||
for result_type in ['anime', 'episodes']:
|
||||
all_results.extend(grouped_results[result_type])
|
||||
|
||||
# Sort by relevance score
|
||||
all_results.sort(key=lambda x: x['score'], reverse=True)
|
||||
|
||||
total = len(all_results)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_results = all_results[start_idx:end_idx]
|
||||
|
||||
response = create_paginated_response(
|
||||
data=paginated_results,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='search.global_search',
|
||||
q=search_query
|
||||
)
|
||||
|
||||
# Add search metadata
|
||||
response['search'] = {
|
||||
'query': search_query,
|
||||
'types': search_types,
|
||||
'categories': categories,
|
||||
'min_score': min_score,
|
||||
'results_by_type': {
|
||||
'anime': len(grouped_results['anime']),
|
||||
'episodes': len(grouped_results['episodes'])
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@search_bp.route('/anime', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def search_anime() -> Dict[str, Any]:
|
||||
"""
|
||||
Search anime with advanced filters.
|
||||
|
||||
Query Parameters:
|
||||
- q: Search query (required)
|
||||
- genres: Comma-separated list of genres
|
||||
- status: Anime status filter
|
||||
- year_from: Starting year filter
|
||||
- year_to: Ending year filter
|
||||
- min_episodes: Minimum episode count
|
||||
- max_episodes: Maximum episode count
|
||||
- sort_by: Sort field (name, year, episodes, relevance)
|
||||
- sort_order: Sort order (asc, desc)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated anime search results
|
||||
"""
|
||||
if not anime_repository:
|
||||
raise APIException("Anime repository not available", 503)
|
||||
|
||||
search_query = request.args.get('q', '').strip()
|
||||
if not search_query:
|
||||
raise ValidationError("Search query 'q' is required")
|
||||
|
||||
# Parse filters
|
||||
genres = request.args.get('genres', '').split(',')
|
||||
genres = [g.strip() for g in genres if g.strip()]
|
||||
|
||||
status_filter = request.args.get('status')
|
||||
|
||||
# Parse year filters
|
||||
year_from = request.args.get('year_from')
|
||||
year_to = request.args.get('year_to')
|
||||
|
||||
if year_from:
|
||||
try:
|
||||
year_from = int(year_from)
|
||||
if year_from < 1900 or year_from > 2100:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("year_from must be a valid year between 1900 and 2100")
|
||||
|
||||
if year_to:
|
||||
try:
|
||||
year_to = int(year_to)
|
||||
if year_to < 1900 or year_to > 2100:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("year_to must be a valid year between 1900 and 2100")
|
||||
|
||||
# Parse episode count filters
|
||||
min_episodes = request.args.get('min_episodes')
|
||||
max_episodes = request.args.get('max_episodes')
|
||||
|
||||
if min_episodes:
|
||||
try:
|
||||
min_episodes = int(min_episodes)
|
||||
if min_episodes < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("min_episodes must be a non-negative integer")
|
||||
|
||||
if max_episodes:
|
||||
try:
|
||||
max_episodes = int(max_episodes)
|
||||
if max_episodes < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("max_episodes must be a non-negative integer")
|
||||
|
||||
# Parse sorting
|
||||
sort_by = request.args.get('sort_by', 'relevance')
|
||||
sort_order = request.args.get('sort_order', 'desc')
|
||||
|
||||
valid_sort_fields = ['name', 'year', 'episodes', 'relevance', 'created_at']
|
||||
if sort_by not in valid_sort_fields:
|
||||
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
|
||||
|
||||
if sort_order not in ['asc', 'desc']:
|
||||
raise ValidationError("sort_order must be 'asc' or 'desc'")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Perform advanced search
|
||||
search_results = anime_repository.advanced_search(
|
||||
query=search_query,
|
||||
genres=genres,
|
||||
status=status_filter,
|
||||
year_from=year_from,
|
||||
year_to=year_to,
|
||||
min_episodes=min_episodes,
|
||||
max_episodes=max_episodes,
|
||||
sort_by=sort_by,
|
||||
sort_order=sort_order
|
||||
)
|
||||
|
||||
# Format results
|
||||
formatted_results = []
|
||||
for anime in search_results:
|
||||
anime_data = format_anime_response(anime.__dict__)
|
||||
# Add search relevance score if available
|
||||
if hasattr(anime, 'relevance_score'):
|
||||
anime_data['relevance_score'] = anime.relevance_score
|
||||
formatted_results.append(anime_data)
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_results)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_results = formatted_results[start_idx:end_idx]
|
||||
|
||||
response = create_paginated_response(
|
||||
data=paginated_results,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='search.search_anime',
|
||||
q=search_query
|
||||
)
|
||||
|
||||
# Add search metadata
|
||||
response['search'] = {
|
||||
'query': search_query,
|
||||
'filters': {
|
||||
'genres': genres,
|
||||
'status': status_filter,
|
||||
'year_from': year_from,
|
||||
'year_to': year_to,
|
||||
'min_episodes': min_episodes,
|
||||
'max_episodes': max_episodes
|
||||
},
|
||||
'sorting': {
|
||||
'sort_by': sort_by,
|
||||
'sort_order': sort_order
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@search_bp.route('/episodes', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def search_episodes() -> Dict[str, Any]:
|
||||
"""
|
||||
Search episodes with advanced filters.
|
||||
|
||||
Query Parameters:
|
||||
- q: Search query (required)
|
||||
- anime_id: Filter by anime ID
|
||||
- status: Episode status filter
|
||||
- downloaded: Filter by download status (true/false)
|
||||
- episode_range: Episode range filter (e.g., "1-10", "5+")
|
||||
- duration_min: Minimum duration in minutes
|
||||
- duration_max: Maximum duration in minutes
|
||||
- sort_by: Sort field (episode_number, title, duration, relevance)
|
||||
- sort_order: Sort order (asc, desc)
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated episode search results
|
||||
"""
|
||||
if not episode_repository:
|
||||
raise APIException("Episode repository not available", 503)
|
||||
|
||||
search_query = request.args.get('q', '').strip()
|
||||
if not search_query:
|
||||
raise ValidationError("Search query 'q' is required")
|
||||
|
||||
# Parse filters
|
||||
anime_id = request.args.get('anime_id')
|
||||
if anime_id:
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
raise ValidationError("anime_id must be a valid integer")
|
||||
|
||||
status_filter = request.args.get('status')
|
||||
downloaded_filter = request.args.get('downloaded')
|
||||
|
||||
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
|
||||
raise ValidationError("downloaded filter must be 'true' or 'false'")
|
||||
|
||||
# Parse episode range
|
||||
episode_range = request.args.get('episode_range')
|
||||
episode_min = None
|
||||
episode_max = None
|
||||
|
||||
if episode_range:
|
||||
range_pattern = r'^(\d+)(?:-(\d+)|\+)?$'
|
||||
match = re.match(range_pattern, episode_range)
|
||||
if not match:
|
||||
raise ValidationError("episode_range must be in format 'N', 'N-M', or 'N+'")
|
||||
|
||||
episode_min = int(match.group(1))
|
||||
if match.group(2):
|
||||
episode_max = int(match.group(2))
|
||||
elif episode_range.endswith('+'):
|
||||
episode_max = None # No upper limit
|
||||
else:
|
||||
episode_max = episode_min # Single episode
|
||||
|
||||
# Parse duration filters
|
||||
duration_min = request.args.get('duration_min')
|
||||
duration_max = request.args.get('duration_max')
|
||||
|
||||
if duration_min:
|
||||
try:
|
||||
duration_min = int(duration_min)
|
||||
if duration_min < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("duration_min must be a non-negative integer")
|
||||
|
||||
if duration_max:
|
||||
try:
|
||||
duration_max = int(duration_max)
|
||||
if duration_max < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("duration_max must be a non-negative integer")
|
||||
|
||||
# Parse sorting
|
||||
sort_by = request.args.get('sort_by', 'relevance')
|
||||
sort_order = request.args.get('sort_order', 'desc')
|
||||
|
||||
valid_sort_fields = ['episode_number', 'title', 'duration', 'relevance', 'created_at']
|
||||
if sort_by not in valid_sort_fields:
|
||||
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
|
||||
|
||||
if sort_order not in ['asc', 'desc']:
|
||||
raise ValidationError("sort_order must be 'asc' or 'desc'")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
# Perform advanced search
|
||||
search_results = episode_repository.advanced_search(
|
||||
query=search_query,
|
||||
anime_id=anime_id,
|
||||
status=status_filter,
|
||||
downloaded=downloaded_filter.lower() == 'true' if downloaded_filter else None,
|
||||
episode_min=episode_min,
|
||||
episode_max=episode_max,
|
||||
duration_min=duration_min,
|
||||
duration_max=duration_max,
|
||||
sort_by=sort_by,
|
||||
sort_order=sort_order
|
||||
)
|
||||
|
||||
# Format results
|
||||
formatted_results = []
|
||||
for episode in search_results:
|
||||
episode_data = format_episode_response(episode.__dict__)
|
||||
# Add search relevance score if available
|
||||
if hasattr(episode, 'relevance_score'):
|
||||
episode_data['relevance_score'] = episode.relevance_score
|
||||
formatted_results.append(episode_data)
|
||||
|
||||
# Apply pagination
|
||||
total = len(formatted_results)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_results = formatted_results[start_idx:end_idx]
|
||||
|
||||
response = create_paginated_response(
|
||||
data=paginated_results,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='search.search_episodes',
|
||||
q=search_query
|
||||
)
|
||||
|
||||
# Add search metadata
|
||||
response['search'] = {
|
||||
'query': search_query,
|
||||
'filters': {
|
||||
'anime_id': anime_id,
|
||||
'status': status_filter,
|
||||
'downloaded': downloaded_filter,
|
||||
'episode_range': episode_range,
|
||||
'duration_min': duration_min,
|
||||
'duration_max': duration_max
|
||||
},
|
||||
'sorting': {
|
||||
'sort_by': sort_by,
|
||||
'sort_order': sort_order
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
@search_bp.route('/suggestions', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_search_suggestions() -> Dict[str, Any]:
|
||||
"""
|
||||
Get search suggestions based on partial query.
|
||||
|
||||
Query Parameters:
|
||||
- q: Partial search query (required)
|
||||
- type: Content type (anime, episodes, all)
|
||||
- limit: Maximum suggestions to return (default: 10, max: 50)
|
||||
|
||||
Returns:
|
||||
List of search suggestions
|
||||
"""
|
||||
if not search_engine:
|
||||
raise APIException("Search engine not available", 503)
|
||||
|
||||
query = request.args.get('q', '').strip()
|
||||
if not query:
|
||||
raise ValidationError("Query 'q' is required")
|
||||
|
||||
if len(query) < 1:
|
||||
return create_success_response(data=[])
|
||||
|
||||
content_type = request.args.get('type', 'all')
|
||||
if content_type not in ['anime', 'episodes', 'all']:
|
||||
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
|
||||
|
||||
limit = request.args.get('limit', '10')
|
||||
try:
|
||||
limit = int(limit)
|
||||
if limit < 1 or limit > 50:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("limit must be an integer between 1 and 50")
|
||||
|
||||
# Get suggestions
|
||||
suggestions = search_engine.get_suggestions(
|
||||
query=query,
|
||||
content_type=content_type,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'suggestions': suggestions,
|
||||
'query': query,
|
||||
'count': len(suggestions)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@search_bp.route('/autocomplete', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def autocomplete() -> Dict[str, Any]:
|
||||
"""
|
||||
Get autocomplete suggestions for search fields.
|
||||
|
||||
Query Parameters:
|
||||
- field: Field to autocomplete (name, genre, status)
|
||||
- q: Partial value
|
||||
- limit: Maximum suggestions (default: 10, max: 20)
|
||||
|
||||
Returns:
|
||||
List of autocomplete suggestions
|
||||
"""
|
||||
field = request.args.get('field', '').strip()
|
||||
query = request.args.get('q', '').strip()
|
||||
|
||||
if not field:
|
||||
raise ValidationError("Field parameter is required")
|
||||
|
||||
if field not in ['name', 'genre', 'status', 'year']:
|
||||
raise ValidationError("field must be one of: name, genre, status, year")
|
||||
|
||||
limit = request.args.get('limit', '10')
|
||||
try:
|
||||
limit = int(limit)
|
||||
if limit < 1 or limit > 20:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("limit must be an integer between 1 and 20")
|
||||
|
||||
# Get autocomplete suggestions based on field
|
||||
suggestions = []
|
||||
|
||||
if field == 'name':
|
||||
# Get anime/episode name suggestions
|
||||
if anime_repository:
|
||||
anime_names = anime_repository.get_name_suggestions(query, limit)
|
||||
suggestions.extend(anime_names)
|
||||
|
||||
elif field == 'genre':
|
||||
# Get genre suggestions
|
||||
if anime_repository:
|
||||
genres = anime_repository.get_genre_suggestions(query, limit)
|
||||
suggestions.extend(genres)
|
||||
|
||||
elif field == 'status':
|
||||
# Get status suggestions
|
||||
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
|
||||
suggestions = [s for s in valid_statuses if query.lower() in s.lower()][:limit]
|
||||
|
||||
elif field == 'year':
|
||||
# Get year suggestions
|
||||
if anime_repository:
|
||||
years = anime_repository.get_year_suggestions(query, limit)
|
||||
suggestions.extend(years)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'suggestions': suggestions,
|
||||
'field': field,
|
||||
'query': query,
|
||||
'count': len(suggestions)
|
||||
}
|
||||
)
|
||||
|
||||
|
||||
@search_bp.route('/trending', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_trending_searches() -> Dict[str, Any]:
|
||||
"""
|
||||
Get trending search queries.
|
||||
|
||||
Query Parameters:
|
||||
- period: Time period (day, week, month)
|
||||
- type: Content type (anime, episodes, all)
|
||||
- limit: Maximum results (default: 10, max: 50)
|
||||
|
||||
Returns:
|
||||
List of trending search queries
|
||||
"""
|
||||
if not search_engine:
|
||||
raise APIException("Search engine not available", 503)
|
||||
|
||||
period = request.args.get('period', 'week')
|
||||
content_type = request.args.get('type', 'all')
|
||||
|
||||
if period not in ['day', 'week', 'month']:
|
||||
raise ValidationError("period must be 'day', 'week', or 'month'")
|
||||
|
||||
if content_type not in ['anime', 'episodes', 'all']:
|
||||
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
|
||||
|
||||
limit = request.args.get('limit', '10')
|
||||
try:
|
||||
limit = int(limit)
|
||||
if limit < 1 or limit > 50:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("limit must be an integer between 1 and 50")
|
||||
|
||||
# Get trending searches
|
||||
trending = search_engine.get_trending_searches(
|
||||
period=period,
|
||||
content_type=content_type,
|
||||
limit=limit
|
||||
)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'trending': trending,
|
||||
'period': period,
|
||||
'type': content_type,
|
||||
'count': len(trending)
|
||||
}
|
||||
)
|
||||
@ -1,332 +0,0 @@
|
||||
"""
|
||||
Simple Master Password Authentication Controller for AniWorld.
|
||||
|
||||
This module implements a simple authentication system using:
|
||||
- Single master password (no user registration)
|
||||
- JWT tokens for session management
|
||||
- Environment-based configuration
|
||||
- No email system required
|
||||
"""
|
||||
|
||||
import os
|
||||
import hashlib
|
||||
import jwt
|
||||
from datetime import datetime, timedelta
|
||||
from flask import Blueprint, request, jsonify
|
||||
from functools import wraps
|
||||
import logging
|
||||
from typing import Dict, Any, Optional, Tuple
|
||||
|
||||
# Configure logging
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Create blueprint
|
||||
simple_auth_bp = Blueprint('simple_auth', __name__)
|
||||
|
||||
# Configuration from environment
|
||||
JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY', 'default_jwt_secret')
|
||||
PASSWORD_SALT = os.getenv('PASSWORD_SALT', 'default_salt')
|
||||
MASTER_PASSWORD_HASH = os.getenv('MASTER_PASSWORD_HASH')
|
||||
TOKEN_EXPIRY_HOURS = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
|
||||
|
||||
|
||||
def hash_password(password: str) -> str:
|
||||
"""Hash password with salt using SHA-256."""
|
||||
salted_password = password + PASSWORD_SALT
|
||||
return hashlib.sha256(salted_password.encode()).hexdigest()
|
||||
|
||||
|
||||
def verify_master_password(password: str) -> bool:
|
||||
"""Verify password against master password hash."""
|
||||
if not MASTER_PASSWORD_HASH:
|
||||
# If no hash is set, check against environment variable (development only)
|
||||
dev_password = os.getenv('MASTER_PASSWORD')
|
||||
if dev_password:
|
||||
return password == dev_password
|
||||
return False
|
||||
|
||||
password_hash = hash_password(password)
|
||||
return password_hash == MASTER_PASSWORD_HASH
|
||||
|
||||
|
||||
def generate_jwt_token() -> str:
|
||||
"""Generate JWT token for authentication."""
|
||||
payload = {
|
||||
'user': 'master',
|
||||
'exp': datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS),
|
||||
'iat': datetime.utcnow(),
|
||||
'iss': 'aniworld-server'
|
||||
}
|
||||
|
||||
return jwt.encode(payload, JWT_SECRET_KEY, algorithm='HS256')
|
||||
|
||||
|
||||
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
|
||||
"""Verify and decode JWT token."""
|
||||
try:
|
||||
payload = jwt.decode(token, JWT_SECRET_KEY, algorithms=['HS256'])
|
||||
return payload
|
||||
except jwt.ExpiredSignatureError:
|
||||
logger.warning("Token has expired")
|
||||
return None
|
||||
except jwt.InvalidTokenError as e:
|
||||
logger.warning(f"Invalid token: {str(e)}")
|
||||
return None
|
||||
|
||||
|
||||
def require_auth(f):
|
||||
"""Decorator to require authentication for API endpoints."""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
auth_header = request.headers.get('Authorization')
|
||||
if not auth_header:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Authorization header required',
|
||||
'code': 'AUTH_REQUIRED'
|
||||
}), 401
|
||||
|
||||
try:
|
||||
# Expected format: "Bearer <token>"
|
||||
token = auth_header.split(' ')[1]
|
||||
except IndexError:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid authorization header format',
|
||||
'code': 'INVALID_AUTH_FORMAT'
|
||||
}), 401
|
||||
|
||||
payload = verify_jwt_token(token)
|
||||
if not payload:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid or expired token',
|
||||
'code': 'INVALID_TOKEN'
|
||||
}), 401
|
||||
|
||||
# Add user info to request context
|
||||
request.current_user = payload
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
# Auth endpoints
|
||||
|
||||
@simple_auth_bp.route('/auth/login', methods=['POST'])
|
||||
def login() -> Tuple[Any, int]:
|
||||
"""
|
||||
Authenticate with master password and receive JWT token.
|
||||
|
||||
Request Body:
|
||||
{
|
||||
"password": "master_password"
|
||||
}
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Login successful",
|
||||
"data": {
|
||||
"token": "jwt_token_here",
|
||||
"expires_at": "2025-01-01T00:00:00Z",
|
||||
"user": "master"
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'JSON body required',
|
||||
'code': 'MISSING_JSON'
|
||||
}), 400
|
||||
|
||||
password = data.get('password')
|
||||
if not password:
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Password required',
|
||||
'code': 'MISSING_PASSWORD'
|
||||
}), 400
|
||||
|
||||
# Verify master password
|
||||
if not verify_master_password(password):
|
||||
logger.warning(f"Failed login attempt from IP: {request.remote_addr}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Invalid master password',
|
||||
'code': 'INVALID_CREDENTIALS'
|
||||
}), 401
|
||||
|
||||
# Generate JWT token
|
||||
token = generate_jwt_token()
|
||||
expires_at = datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS)
|
||||
|
||||
logger.info(f"Successful login from IP: {request.remote_addr}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Login successful',
|
||||
'data': {
|
||||
'token': token,
|
||||
'expires_at': expires_at.isoformat() + 'Z',
|
||||
'user': 'master',
|
||||
'token_type': 'Bearer'
|
||||
}
|
||||
}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Login error: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Internal server error',
|
||||
'code': 'SERVER_ERROR'
|
||||
}), 500
|
||||
|
||||
|
||||
@simple_auth_bp.route('/auth/verify', methods=['GET'])
|
||||
@require_auth
|
||||
def verify_token() -> Tuple[Any, int]:
|
||||
"""
|
||||
Verify if the current JWT token is valid.
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer <token>
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Token is valid",
|
||||
"data": {
|
||||
"user": "master",
|
||||
"expires_at": "2025-01-01T00:00:00Z",
|
||||
"issued_at": "2025-01-01T00:00:00Z"
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
payload = request.current_user
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Token is valid',
|
||||
'data': {
|
||||
'user': payload.get('user'),
|
||||
'expires_at': datetime.utcfromtimestamp(payload.get('exp')).isoformat() + 'Z',
|
||||
'issued_at': datetime.utcfromtimestamp(payload.get('iat')).isoformat() + 'Z',
|
||||
'issuer': payload.get('iss')
|
||||
}
|
||||
}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Token verification error: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Internal server error',
|
||||
'code': 'SERVER_ERROR'
|
||||
}), 500
|
||||
|
||||
|
||||
@simple_auth_bp.route('/auth/logout', methods=['POST'])
|
||||
@require_auth
|
||||
def logout() -> Tuple[Any, int]:
|
||||
"""
|
||||
Logout (client-side token clearing).
|
||||
|
||||
Since JWT tokens are stateless, logout is handled client-side
|
||||
by removing the token. This endpoint confirms logout action.
|
||||
|
||||
Headers:
|
||||
Authorization: Bearer <token>
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Logout successful"
|
||||
}
|
||||
"""
|
||||
try:
|
||||
logger.info(f"User logged out from IP: {request.remote_addr}")
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Logout successful. Please remove the token on client side.',
|
||||
'data': {
|
||||
'action': 'clear_token'
|
||||
}
|
||||
}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Logout error: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Internal server error',
|
||||
'code': 'SERVER_ERROR'
|
||||
}), 500
|
||||
|
||||
|
||||
@simple_auth_bp.route('/auth/status', methods=['GET'])
|
||||
def auth_status() -> Tuple[Any, int]:
|
||||
"""
|
||||
Check authentication system status.
|
||||
|
||||
Response:
|
||||
{
|
||||
"success": true,
|
||||
"message": "Authentication system status",
|
||||
"data": {
|
||||
"auth_type": "master_password",
|
||||
"jwt_enabled": true,
|
||||
"password_configured": true
|
||||
}
|
||||
}
|
||||
"""
|
||||
try:
|
||||
password_configured = bool(MASTER_PASSWORD_HASH or os.getenv('MASTER_PASSWORD'))
|
||||
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Authentication system status',
|
||||
'data': {
|
||||
'auth_type': 'master_password',
|
||||
'jwt_enabled': True,
|
||||
'password_configured': password_configured,
|
||||
'token_expiry_hours': TOKEN_EXPIRY_HOURS
|
||||
}
|
||||
}), 200
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Auth status error: {str(e)}")
|
||||
return jsonify({
|
||||
'success': False,
|
||||
'error': 'Internal server error',
|
||||
'code': 'SERVER_ERROR'
|
||||
}), 500
|
||||
|
||||
|
||||
# Utility function to set master password hash
|
||||
def set_master_password(password: str) -> str:
|
||||
"""
|
||||
Generate hash for master password.
|
||||
This should be used to set MASTER_PASSWORD_HASH in environment.
|
||||
|
||||
Args:
|
||||
password: The master password to hash
|
||||
|
||||
Returns:
|
||||
The hashed password that should be stored in environment
|
||||
"""
|
||||
return hash_password(password)
|
||||
|
||||
|
||||
# Health check endpoint
|
||||
@simple_auth_bp.route('/auth/health', methods=['GET'])
|
||||
def health_check() -> Tuple[Any, int]:
|
||||
"""Health check for auth system."""
|
||||
return jsonify({
|
||||
'success': True,
|
||||
'message': 'Auth system is healthy',
|
||||
'timestamp': datetime.utcnow().isoformat() + 'Z'
|
||||
}), 200
|
||||
@ -1,661 +0,0 @@
|
||||
"""
|
||||
Storage Management API Endpoints
|
||||
|
||||
This module provides REST API endpoints for storage management operations,
|
||||
including storage monitoring, location management, and disk usage tracking.
|
||||
"""
|
||||
|
||||
from flask import Blueprint, request
|
||||
from typing import Dict, List, Any, Optional
|
||||
import os
|
||||
import shutil
|
||||
from datetime import datetime
|
||||
|
||||
from ...shared.auth_decorators import require_auth, optional_auth
|
||||
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
|
||||
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
|
||||
from ...shared.response_helpers import (
|
||||
create_success_response, create_paginated_response, extract_pagination_params
|
||||
)
|
||||
|
||||
# Import storage components (these imports would need to be adjusted based on actual structure)
|
||||
try:
|
||||
from database_manager import storage_manager, database_manager, StorageLocation
|
||||
except ImportError:
|
||||
# Fallback for development/testing
|
||||
storage_manager = None
|
||||
database_manager = None
|
||||
StorageLocation = None
|
||||
|
||||
|
||||
# Blueprint for storage management endpoints
|
||||
storage_bp = Blueprint('storage', __name__, url_prefix='/api/v1/storage')
|
||||
|
||||
|
||||
@storage_bp.route('/summary', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_storage_summary() -> Dict[str, Any]:
|
||||
"""
|
||||
Get overall storage usage summary.
|
||||
|
||||
Returns:
|
||||
Storage summary with usage statistics
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
try:
|
||||
summary = storage_manager.get_storage_summary()
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'total_storage_gb': round(summary.get('total_bytes', 0) / (1024**3), 2),
|
||||
'used_storage_gb': round(summary.get('used_bytes', 0) / (1024**3), 2),
|
||||
'free_storage_gb': round(summary.get('free_bytes', 0) / (1024**3), 2),
|
||||
'usage_percentage': summary.get('usage_percentage', 0),
|
||||
'anime_storage_gb': round(summary.get('anime_bytes', 0) / (1024**3), 2),
|
||||
'backup_storage_gb': round(summary.get('backup_bytes', 0) / (1024**3), 2),
|
||||
'cache_storage_gb': round(summary.get('cache_bytes', 0) / (1024**3), 2),
|
||||
'temp_storage_gb': round(summary.get('temp_bytes', 0) / (1024**3), 2),
|
||||
'location_count': summary.get('location_count', 0),
|
||||
'active_locations': summary.get('active_locations', 0),
|
||||
'last_updated': summary.get('last_updated', datetime.utcnow()).isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to get storage summary: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/locations', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_pagination_params
|
||||
@optional_auth
|
||||
def get_storage_locations() -> Dict[str, Any]:
|
||||
"""
|
||||
Get all storage locations with optional filtering.
|
||||
|
||||
Query Parameters:
|
||||
- location_type: Filter by location type (primary, backup, cache, temp)
|
||||
- anime_id: Filter by anime ID
|
||||
- status: Filter by status (active, inactive, error)
|
||||
- min_free_gb: Minimum free space in GB
|
||||
- max_usage_percent: Maximum usage percentage
|
||||
- page: Page number (default: 1)
|
||||
- per_page: Items per page (default: 50, max: 1000)
|
||||
|
||||
Returns:
|
||||
Paginated list of storage locations
|
||||
"""
|
||||
if not storage_manager or not database_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
# Extract filters
|
||||
location_type_filter = request.args.get('location_type')
|
||||
anime_id = request.args.get('anime_id')
|
||||
status_filter = request.args.get('status')
|
||||
min_free_gb = request.args.get('min_free_gb')
|
||||
max_usage_percent = request.args.get('max_usage_percent')
|
||||
|
||||
# Validate filters
|
||||
valid_types = ['primary', 'backup', 'cache', 'temp']
|
||||
if location_type_filter and location_type_filter not in valid_types:
|
||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
||||
|
||||
if anime_id:
|
||||
try:
|
||||
anime_id = int(anime_id)
|
||||
except ValueError:
|
||||
raise ValidationError("anime_id must be a valid integer")
|
||||
|
||||
valid_statuses = ['active', 'inactive', 'error']
|
||||
if status_filter and status_filter not in valid_statuses:
|
||||
raise ValidationError(f"status must be one of: {', '.join(valid_statuses)}")
|
||||
|
||||
if min_free_gb:
|
||||
try:
|
||||
min_free_gb = float(min_free_gb)
|
||||
if min_free_gb < 0:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("min_free_gb must be a non-negative number")
|
||||
|
||||
if max_usage_percent:
|
||||
try:
|
||||
max_usage_percent = float(max_usage_percent)
|
||||
if not 0 <= max_usage_percent <= 100:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
raise ValidationError("max_usage_percent must be between 0 and 100")
|
||||
|
||||
# Get pagination parameters
|
||||
page, per_page = extract_pagination_params()
|
||||
|
||||
try:
|
||||
# Query storage locations
|
||||
query = """
|
||||
SELECT sl.*, am.name as anime_name
|
||||
FROM storage_locations sl
|
||||
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
|
||||
WHERE 1=1
|
||||
"""
|
||||
params = []
|
||||
|
||||
if location_type_filter:
|
||||
query += " AND sl.location_type = ?"
|
||||
params.append(location_type_filter)
|
||||
|
||||
if anime_id:
|
||||
query += " AND sl.anime_id = ?"
|
||||
params.append(anime_id)
|
||||
|
||||
if status_filter:
|
||||
query += " AND sl.status = ?"
|
||||
params.append(status_filter)
|
||||
|
||||
query += " ORDER BY sl.location_type, sl.path"
|
||||
|
||||
results = database_manager.execute_query(query, params)
|
||||
|
||||
# Format and filter results
|
||||
locations = []
|
||||
for row in results:
|
||||
free_space_gb = (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None
|
||||
total_space_gb = (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None
|
||||
usage_percent = None
|
||||
|
||||
if row['total_space_bytes'] and row['free_space_bytes']:
|
||||
usage_percent = ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100)
|
||||
|
||||
# Apply additional filters
|
||||
if min_free_gb and (free_space_gb is None or free_space_gb < min_free_gb):
|
||||
continue
|
||||
|
||||
if max_usage_percent and (usage_percent is None or usage_percent > max_usage_percent):
|
||||
continue
|
||||
|
||||
location_data = {
|
||||
'location_id': row['location_id'],
|
||||
'anime_id': row['anime_id'],
|
||||
'anime_name': row['anime_name'],
|
||||
'path': row['path'],
|
||||
'location_type': row['location_type'],
|
||||
'status': row['status'],
|
||||
'free_space_gb': free_space_gb,
|
||||
'total_space_gb': total_space_gb,
|
||||
'used_space_gb': (total_space_gb - free_space_gb) if (total_space_gb and free_space_gb) else None,
|
||||
'usage_percent': usage_percent,
|
||||
'last_checked': row['last_checked'],
|
||||
'created_at': row['created_at'],
|
||||
'is_active': row['is_active'],
|
||||
'mount_point': row.get('mount_point'),
|
||||
'filesystem': row.get('filesystem')
|
||||
}
|
||||
|
||||
locations.append(location_data)
|
||||
|
||||
# Apply pagination
|
||||
total = len(locations)
|
||||
start_idx = (page - 1) * per_page
|
||||
end_idx = start_idx + per_page
|
||||
paginated_locations = locations[start_idx:end_idx]
|
||||
|
||||
return create_paginated_response(
|
||||
data=paginated_locations,
|
||||
page=page,
|
||||
per_page=per_page,
|
||||
total=total,
|
||||
endpoint='storage.get_storage_locations'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to get storage locations: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/locations', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
required_fields=['path', 'location_type'],
|
||||
optional_fields=['anime_id', 'description', 'mount_point', 'auto_create'],
|
||||
field_types={
|
||||
'path': str,
|
||||
'location_type': str,
|
||||
'anime_id': int,
|
||||
'description': str,
|
||||
'mount_point': str,
|
||||
'auto_create': bool
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def add_storage_location() -> Dict[str, Any]:
|
||||
"""
|
||||
Add a new storage location.
|
||||
|
||||
Required Fields:
|
||||
- path: Storage path
|
||||
- location_type: Type of storage (primary, backup, cache, temp)
|
||||
|
||||
Optional Fields:
|
||||
- anime_id: Associated anime ID (for anime-specific storage)
|
||||
- description: Location description
|
||||
- mount_point: Mount point information
|
||||
- auto_create: Automatically create directory if it doesn't exist
|
||||
|
||||
Returns:
|
||||
Created storage location information
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
path = data['path']
|
||||
location_type = data['location_type']
|
||||
anime_id = data.get('anime_id')
|
||||
description = data.get('description')
|
||||
mount_point = data.get('mount_point')
|
||||
auto_create = data.get('auto_create', False)
|
||||
|
||||
# Validate location type
|
||||
valid_types = ['primary', 'backup', 'cache', 'temp']
|
||||
if location_type not in valid_types:
|
||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
||||
|
||||
# Validate path
|
||||
if not path or not isinstance(path, str):
|
||||
raise ValidationError("path must be a valid string")
|
||||
|
||||
# Normalize path
|
||||
path = os.path.abspath(path)
|
||||
|
||||
# Check if path already exists as a storage location
|
||||
existing_location = storage_manager.get_location_by_path(path)
|
||||
if existing_location:
|
||||
raise ValidationError("Storage location with this path already exists")
|
||||
|
||||
# Check if directory exists or create it
|
||||
if not os.path.exists(path):
|
||||
if auto_create:
|
||||
try:
|
||||
os.makedirs(path, exist_ok=True)
|
||||
except Exception as e:
|
||||
raise ValidationError(f"Failed to create directory: {str(e)}")
|
||||
else:
|
||||
raise ValidationError("Directory does not exist. Set auto_create=true to create it.")
|
||||
|
||||
# Check if it's a directory
|
||||
if not os.path.isdir(path):
|
||||
raise ValidationError("Path must be a directory")
|
||||
|
||||
# Check if it's writable
|
||||
if not os.access(path, os.W_OK):
|
||||
raise ValidationError("Directory is not writable")
|
||||
|
||||
try:
|
||||
location_id = storage_manager.add_storage_location(
|
||||
path=path,
|
||||
location_type=location_type,
|
||||
anime_id=anime_id,
|
||||
description=description,
|
||||
mount_point=mount_point
|
||||
)
|
||||
|
||||
# Get the created location details
|
||||
location = storage_manager.get_location_by_id(location_id)
|
||||
|
||||
location_data = {
|
||||
'location_id': location.location_id,
|
||||
'path': location.path,
|
||||
'location_type': location.location_type,
|
||||
'anime_id': location.anime_id,
|
||||
'description': location.description,
|
||||
'mount_point': location.mount_point,
|
||||
'status': location.status,
|
||||
'created_at': location.created_at.isoformat(),
|
||||
'is_active': location.is_active
|
||||
}
|
||||
|
||||
return create_success_response(
|
||||
data=location_data,
|
||||
message="Storage location added successfully",
|
||||
status_code=201
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to add storage location: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/locations/<int:location_id>', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('location_id')
|
||||
@optional_auth
|
||||
def get_storage_location(location_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Get detailed information about a specific storage location.
|
||||
|
||||
Args:
|
||||
location_id: Unique identifier for the storage location
|
||||
|
||||
Returns:
|
||||
Detailed storage location information
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
location = storage_manager.get_location_by_id(location_id)
|
||||
if not location:
|
||||
raise NotFoundError("Storage location not found")
|
||||
|
||||
try:
|
||||
# Get detailed storage statistics
|
||||
stats = storage_manager.get_location_stats(location_id)
|
||||
|
||||
location_data = {
|
||||
'location_id': location.location_id,
|
||||
'path': location.path,
|
||||
'location_type': location.location_type,
|
||||
'anime_id': location.anime_id,
|
||||
'description': location.description,
|
||||
'mount_point': location.mount_point,
|
||||
'status': location.status,
|
||||
'created_at': location.created_at.isoformat(),
|
||||
'last_checked': location.last_checked.isoformat() if location.last_checked else None,
|
||||
'is_active': location.is_active,
|
||||
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
|
||||
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
|
||||
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
|
||||
'usage_percent': stats.get('usage_percentage', 0),
|
||||
'file_count': stats.get('file_count', 0),
|
||||
'directory_count': stats.get('directory_count', 0),
|
||||
'largest_file_mb': round(stats.get('largest_file_bytes', 0) / (1024**2), 2),
|
||||
'filesystem': stats.get('filesystem'),
|
||||
'mount_options': stats.get('mount_options'),
|
||||
'health_status': stats.get('health_status', 'unknown')
|
||||
}
|
||||
|
||||
return create_success_response(location_data)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to get storage location: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/locations/<int:location_id>', methods=['PUT'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('location_id')
|
||||
@validate_json_input(
|
||||
optional_fields=['description', 'location_type', 'is_active', 'mount_point'],
|
||||
field_types={
|
||||
'description': str,
|
||||
'location_type': str,
|
||||
'is_active': bool,
|
||||
'mount_point': str
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def update_storage_location(location_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Update a storage location.
|
||||
|
||||
Args:
|
||||
location_id: Unique identifier for the storage location
|
||||
|
||||
Optional Fields:
|
||||
- description: Updated description
|
||||
- location_type: Updated location type
|
||||
- is_active: Active status
|
||||
- mount_point: Mount point information
|
||||
|
||||
Returns:
|
||||
Updated storage location information
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
data = request.get_json()
|
||||
|
||||
# Check if location exists
|
||||
location = storage_manager.get_location_by_id(location_id)
|
||||
if not location:
|
||||
raise NotFoundError("Storage location not found")
|
||||
|
||||
# Validate location type if provided
|
||||
if 'location_type' in data:
|
||||
valid_types = ['primary', 'backup', 'cache', 'temp']
|
||||
if data['location_type'] not in valid_types:
|
||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
||||
|
||||
try:
|
||||
# Update location
|
||||
success = storage_manager.update_location(location_id, data)
|
||||
|
||||
if not success:
|
||||
raise APIException("Failed to update storage location", 500)
|
||||
|
||||
# Get updated location
|
||||
updated_location = storage_manager.get_location_by_id(location_id)
|
||||
|
||||
location_data = {
|
||||
'location_id': updated_location.location_id,
|
||||
'path': updated_location.path,
|
||||
'location_type': updated_location.location_type,
|
||||
'anime_id': updated_location.anime_id,
|
||||
'description': updated_location.description,
|
||||
'mount_point': updated_location.mount_point,
|
||||
'status': updated_location.status,
|
||||
'is_active': updated_location.is_active,
|
||||
'updated_at': datetime.utcnow().isoformat()
|
||||
}
|
||||
|
||||
return create_success_response(
|
||||
data=location_data,
|
||||
message="Storage location updated successfully"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to update storage location: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/locations/<int:location_id>', methods=['DELETE'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('location_id')
|
||||
@require_auth
|
||||
def delete_storage_location(location_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Delete a storage location.
|
||||
|
||||
Args:
|
||||
location_id: Unique identifier for the storage location
|
||||
|
||||
Query Parameters:
|
||||
- force: Force deletion even if location contains files
|
||||
- delete_files: Also delete files in the location
|
||||
|
||||
Returns:
|
||||
Deletion confirmation
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
# Check if location exists
|
||||
location = storage_manager.get_location_by_id(location_id)
|
||||
if not location:
|
||||
raise NotFoundError("Storage location not found")
|
||||
|
||||
force = request.args.get('force', 'false').lower() == 'true'
|
||||
delete_files = request.args.get('delete_files', 'false').lower() == 'true'
|
||||
|
||||
try:
|
||||
# Check if location has files (unless force is used)
|
||||
if not force:
|
||||
stats = storage_manager.get_location_stats(location_id)
|
||||
if stats.get('file_count', 0) > 0:
|
||||
raise ValidationError(
|
||||
f"Storage location contains {stats['file_count']} files. "
|
||||
"Use force=true to delete anyway."
|
||||
)
|
||||
|
||||
# Delete location
|
||||
success = storage_manager.delete_location(location_id, delete_files=delete_files)
|
||||
|
||||
if not success:
|
||||
raise APIException("Failed to delete storage location", 500)
|
||||
|
||||
message = f"Storage location deleted successfully"
|
||||
if delete_files:
|
||||
message += " (including all files)"
|
||||
|
||||
return create_success_response(message=message)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to delete storage location: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/locations/<int:location_id>/refresh', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_id_parameter('location_id')
|
||||
@require_auth
|
||||
def refresh_storage_location(location_id: int) -> Dict[str, Any]:
|
||||
"""
|
||||
Refresh storage statistics for a location.
|
||||
|
||||
Args:
|
||||
location_id: Unique identifier for the storage location
|
||||
|
||||
Returns:
|
||||
Updated storage statistics
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
# Check if location exists
|
||||
location = storage_manager.get_location_by_id(location_id)
|
||||
if not location:
|
||||
raise NotFoundError("Storage location not found")
|
||||
|
||||
try:
|
||||
# Update storage statistics
|
||||
stats = storage_manager.update_location_stats(location_id)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'location_id': location_id,
|
||||
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
|
||||
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
|
||||
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
|
||||
'usage_percent': stats.get('usage_percentage', 0),
|
||||
'file_count': stats.get('file_count', 0),
|
||||
'directory_count': stats.get('directory_count', 0),
|
||||
'last_updated': datetime.utcnow().isoformat()
|
||||
},
|
||||
message="Storage statistics updated successfully"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to refresh storage location: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/cleanup', methods=['POST'])
|
||||
@handle_api_errors
|
||||
@validate_json_input(
|
||||
optional_fields=['location_type', 'target_usage_percent', 'cleanup_temp', 'cleanup_cache', 'dry_run'],
|
||||
field_types={
|
||||
'location_type': str,
|
||||
'target_usage_percent': float,
|
||||
'cleanup_temp': bool,
|
||||
'cleanup_cache': bool,
|
||||
'dry_run': bool
|
||||
}
|
||||
)
|
||||
@require_auth
|
||||
def cleanup_storage() -> Dict[str, Any]:
|
||||
"""
|
||||
Perform storage cleanup operations.
|
||||
|
||||
Optional Fields:
|
||||
- location_type: Type of locations to clean (temp, cache, backup)
|
||||
- target_usage_percent: Target usage percentage after cleanup
|
||||
- cleanup_temp: Clean temporary files
|
||||
- cleanup_cache: Clean cache files
|
||||
- dry_run: Preview what would be cleaned without actually doing it
|
||||
|
||||
Returns:
|
||||
Cleanup results
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
data = request.get_json() or {}
|
||||
location_type = data.get('location_type', 'temp')
|
||||
target_usage_percent = data.get('target_usage_percent', 80.0)
|
||||
cleanup_temp = data.get('cleanup_temp', True)
|
||||
cleanup_cache = data.get('cleanup_cache', False)
|
||||
dry_run = data.get('dry_run', False)
|
||||
|
||||
# Validate parameters
|
||||
valid_types = ['temp', 'cache', 'backup']
|
||||
if location_type not in valid_types:
|
||||
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
|
||||
|
||||
if not 0 <= target_usage_percent <= 100:
|
||||
raise ValidationError("target_usage_percent must be between 0 and 100")
|
||||
|
||||
try:
|
||||
cleanup_result = storage_manager.cleanup_storage(
|
||||
location_type=location_type,
|
||||
target_usage_percent=target_usage_percent,
|
||||
cleanup_temp=cleanup_temp,
|
||||
cleanup_cache=cleanup_cache,
|
||||
dry_run=dry_run
|
||||
)
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'dry_run': dry_run,
|
||||
'location_type': location_type,
|
||||
'files_deleted': cleanup_result.get('files_deleted', 0),
|
||||
'directories_deleted': cleanup_result.get('directories_deleted', 0),
|
||||
'space_freed_gb': round(cleanup_result.get('space_freed_bytes', 0) / (1024**3), 2),
|
||||
'cleanup_summary': cleanup_result.get('summary', {}),
|
||||
'target_usage_percent': target_usage_percent,
|
||||
'final_usage_percent': cleanup_result.get('final_usage_percent')
|
||||
},
|
||||
message=f"Storage cleanup {'simulated' if dry_run else 'completed'}"
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to cleanup storage: {str(e)}", 500)
|
||||
|
||||
|
||||
@storage_bp.route('/health', methods=['GET'])
|
||||
@handle_api_errors
|
||||
@optional_auth
|
||||
def get_storage_health() -> Dict[str, Any]:
|
||||
"""
|
||||
Get storage health status across all locations.
|
||||
|
||||
Returns:
|
||||
Storage health information
|
||||
"""
|
||||
if not storage_manager:
|
||||
raise APIException("Storage manager not available", 503)
|
||||
|
||||
try:
|
||||
health_status = storage_manager.get_storage_health()
|
||||
|
||||
return create_success_response(
|
||||
data={
|
||||
'overall_status': health_status.get('overall_status', 'unknown'),
|
||||
'total_locations': health_status.get('total_locations', 0),
|
||||
'healthy_locations': health_status.get('healthy_locations', 0),
|
||||
'warning_locations': health_status.get('warning_locations', 0),
|
||||
'error_locations': health_status.get('error_locations', 0),
|
||||
'average_usage_percent': health_status.get('average_usage_percent', 0),
|
||||
'locations_near_full': health_status.get('locations_near_full', []),
|
||||
'locations_with_errors': health_status.get('locations_with_errors', []),
|
||||
'recommendations': health_status.get('recommendations', []),
|
||||
'last_check': health_status.get('last_check', datetime.utcnow()).isoformat()
|
||||
}
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
raise APIException(f"Failed to get storage health: {str(e)}", 500)
|
||||
@ -1,352 +0,0 @@
|
||||
"""
|
||||
Base controller with common functionality for all controllers.
|
||||
|
||||
This module provides a base controller class that eliminates common duplications
|
||||
across different controller modules by providing standardized error handling,
|
||||
validation, and response formatting.
|
||||
"""
|
||||
|
||||
from abc import ABC
|
||||
from typing import Any, Dict, Optional, List, Union, Tuple, Callable
|
||||
try:
|
||||
from flask import jsonify, request
|
||||
from werkzeug.exceptions import HTTPException
|
||||
except ImportError:
|
||||
# Fallback for environments without Flask
|
||||
def jsonify(data):
|
||||
import json
|
||||
return json.dumps(data)
|
||||
|
||||
class HTTPException(Exception):
|
||||
def __init__(self, status_code, detail):
|
||||
self.status_code = status_code
|
||||
self.detail = detail
|
||||
super().__init__(detail)
|
||||
|
||||
class request:
|
||||
is_json = False
|
||||
@staticmethod
|
||||
def get_json():
|
||||
return {}
|
||||
headers = {}
|
||||
args = {}
|
||||
form = {}
|
||||
|
||||
try:
|
||||
from pydantic import BaseModel
|
||||
except ImportError:
|
||||
# Fallback BaseModel
|
||||
class BaseModel:
|
||||
pass
|
||||
|
||||
import logging
|
||||
import functools
|
||||
|
||||
|
||||
class BaseController(ABC):
|
||||
"""Base controller with common functionality for all controllers."""
|
||||
|
||||
def __init__(self):
|
||||
self.logger = logging.getLogger(self.__class__.__name__)
|
||||
|
||||
def handle_error(self, error: Exception, status_code: int = 500) -> HTTPException:
|
||||
"""
|
||||
Standardized error handling across all controllers.
|
||||
|
||||
Args:
|
||||
error: The exception that occurred
|
||||
status_code: HTTP status code to return
|
||||
|
||||
Returns:
|
||||
HTTPException with standardized format
|
||||
"""
|
||||
self.logger.error(f"Controller error: {str(error)}", exc_info=True)
|
||||
return HTTPException(status_code, str(error))
|
||||
|
||||
def validate_request(self, data: BaseModel) -> bool:
|
||||
"""
|
||||
Common validation logic for request data.
|
||||
|
||||
Args:
|
||||
data: Pydantic model to validate
|
||||
|
||||
Returns:
|
||||
True if validation passes
|
||||
|
||||
Raises:
|
||||
ValidationError if validation fails
|
||||
"""
|
||||
try:
|
||||
# Pydantic models automatically validate on instantiation
|
||||
return True
|
||||
except Exception as e:
|
||||
self.logger.warning(f"Validation failed: {str(e)}")
|
||||
raise
|
||||
|
||||
def format_response(self, data: Any, message: str = "Success") -> Dict[str, Any]:
|
||||
"""
|
||||
Standardized response format for successful operations.
|
||||
|
||||
Args:
|
||||
data: Data to include in response
|
||||
message: Success message
|
||||
|
||||
Returns:
|
||||
Standardized success response dictionary
|
||||
"""
|
||||
return {
|
||||
"status": "success",
|
||||
"message": message,
|
||||
"data": data
|
||||
}
|
||||
|
||||
def format_error_response(self, message: str, status_code: int = 400, details: Any = None) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Standardized error response format.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
status_code: HTTP status code
|
||||
details: Additional error details
|
||||
|
||||
Returns:
|
||||
Tuple of (error_response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
"status": "error",
|
||||
"message": message,
|
||||
"error_code": status_code
|
||||
}
|
||||
|
||||
if details:
|
||||
response["details"] = details
|
||||
|
||||
return response, status_code
|
||||
|
||||
def create_success_response(
|
||||
self,
|
||||
data: Any = None,
|
||||
message: str = "Operation successful",
|
||||
status_code: int = 200,
|
||||
pagination: Optional[Dict[str, Any]] = None,
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Create a standardized success response.
|
||||
|
||||
Args:
|
||||
data: Data to include in response
|
||||
message: Success message
|
||||
status_code: HTTP status code
|
||||
pagination: Pagination information
|
||||
meta: Additional metadata
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
'status': 'success',
|
||||
'message': message
|
||||
}
|
||||
|
||||
if data is not None:
|
||||
response['data'] = data
|
||||
|
||||
if pagination:
|
||||
response['pagination'] = pagination
|
||||
|
||||
if meta:
|
||||
response['meta'] = meta
|
||||
|
||||
return response, status_code
|
||||
|
||||
def create_error_response(
|
||||
self,
|
||||
message: str,
|
||||
status_code: int = 400,
|
||||
details: Any = None,
|
||||
error_code: Optional[str] = None
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Create a standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message
|
||||
status_code: HTTP status code
|
||||
details: Additional error details
|
||||
error_code: Specific error code
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
'status': 'error',
|
||||
'message': message,
|
||||
'error_code': error_code or status_code
|
||||
}
|
||||
|
||||
if details:
|
||||
response['details'] = details
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def handle_api_errors(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator for standardized API error handling.
|
||||
|
||||
This decorator should be used on all API endpoints to ensure
|
||||
consistent error handling and response formatting.
|
||||
"""
|
||||
@functools.wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except HTTPException:
|
||||
# Re-raise HTTP exceptions as they are already properly formatted
|
||||
raise
|
||||
except ValueError as e:
|
||||
# Handle validation errors
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Invalid input data',
|
||||
'details': str(e),
|
||||
'error_code': 400
|
||||
}), 400
|
||||
except PermissionError as e:
|
||||
# Handle authorization errors
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Access denied',
|
||||
'details': str(e),
|
||||
'error_code': 403
|
||||
}), 403
|
||||
except FileNotFoundError as e:
|
||||
# Handle not found errors
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Resource not found',
|
||||
'details': str(e),
|
||||
'error_code': 404
|
||||
}), 404
|
||||
except Exception as e:
|
||||
# Handle all other errors
|
||||
logging.getLogger(__name__).error(f"Unhandled error in {f.__name__}: {str(e)}", exc_info=True)
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Internal server error',
|
||||
'details': str(e) if logging.getLogger().isEnabledFor(logging.DEBUG) else 'An unexpected error occurred',
|
||||
'error_code': 500
|
||||
}), 500
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
def require_auth(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require authentication for API endpoints.
|
||||
|
||||
This decorator should be applied to endpoints that require
|
||||
user authentication.
|
||||
"""
|
||||
@functools.wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
# Implementation would depend on your authentication system
|
||||
# For now, this is a placeholder that should be implemented
|
||||
# based on your specific authentication requirements
|
||||
|
||||
# Example implementation:
|
||||
# auth_header = request.headers.get('Authorization')
|
||||
# if not auth_header or not validate_auth_token(auth_header):
|
||||
# return jsonify({
|
||||
# 'status': 'error',
|
||||
# 'message': 'Authentication required',
|
||||
# 'error_code': 401
|
||||
# }), 401
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
def optional_auth(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator for optional authentication.
|
||||
|
||||
This decorator allows endpoints to work with or without authentication,
|
||||
but provides additional functionality when authenticated.
|
||||
"""
|
||||
@functools.wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
# Implementation would depend on your authentication system
|
||||
# This would set user context if authenticated, but not fail if not
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
def validate_json_input(
|
||||
required_fields: Optional[List[str]] = None,
|
||||
optional_fields: Optional[List[str]] = None,
|
||||
**field_validators
|
||||
) -> Callable:
|
||||
"""
|
||||
Decorator for JSON input validation.
|
||||
|
||||
Args:
|
||||
required_fields: List of required field names
|
||||
optional_fields: List of optional field names
|
||||
**field_validators: Field-specific validation functions
|
||||
|
||||
Returns:
|
||||
Decorator function
|
||||
"""
|
||||
def decorator(f: Callable) -> Callable:
|
||||
@functools.wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not request.is_json:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Request must contain JSON data',
|
||||
'error_code': 400
|
||||
}), 400
|
||||
|
||||
data = request.get_json()
|
||||
if not data:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Invalid JSON data',
|
||||
'error_code': 400
|
||||
}), 400
|
||||
|
||||
# Check required fields
|
||||
if required_fields:
|
||||
missing_fields = [field for field in required_fields if field not in data]
|
||||
if missing_fields:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Missing required fields: {", ".join(missing_fields)}',
|
||||
'error_code': 400
|
||||
}), 400
|
||||
|
||||
# Apply field validators
|
||||
for field, validator in field_validators.items():
|
||||
if field in data:
|
||||
try:
|
||||
if not validator(data[field]):
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Invalid value for field: {field}',
|
||||
'error_code': 400
|
||||
}), 400
|
||||
except Exception as e:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': f'Validation error for field {field}: {str(e)}',
|
||||
'error_code': 400
|
||||
}), 400
|
||||
|
||||
return f(*args, **kwargs)
|
||||
|
||||
return decorated_function
|
||||
return decorator
|
||||
@ -1 +0,0 @@
|
||||
"""Shared utilities and helpers for web controllers."""
|
||||
@ -1,150 +0,0 @@
|
||||
"""
|
||||
Authentication decorators and utilities for API endpoints.
|
||||
|
||||
This module provides authentication decorators that can be used across
|
||||
all controller modules for consistent authentication handling.
|
||||
"""
|
||||
|
||||
import logging
|
||||
from functools import wraps
|
||||
from typing import Optional, Dict, Any, Callable
|
||||
from flask import session, request, jsonify, redirect, url_for
|
||||
|
||||
# Import session manager from auth controller
|
||||
from ..auth_controller import session_manager
|
||||
|
||||
|
||||
def require_auth(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to require authentication for Flask routes.
|
||||
|
||||
Args:
|
||||
f: The function to decorate
|
||||
|
||||
Returns:
|
||||
Decorated function that requires authentication
|
||||
|
||||
Usage:
|
||||
@require_auth
|
||||
def protected_endpoint():
|
||||
return "This requires authentication"
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not session_manager.is_authenticated():
|
||||
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
|
||||
is_ajax = (
|
||||
request.is_json or
|
||||
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
|
||||
request.headers.get('Accept', '').startswith('application/json') or
|
||||
'/api/' in request.path # API endpoints should return JSON
|
||||
)
|
||||
|
||||
if is_ajax:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Authentication required',
|
||||
'code': 'AUTH_REQUIRED'
|
||||
}), 401
|
||||
else:
|
||||
return redirect(url_for('auth.login'))
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
||||
|
||||
def optional_auth(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator that checks auth but doesn't require it.
|
||||
|
||||
This decorator will only require authentication if a master password
|
||||
has been configured in the system.
|
||||
|
||||
Args:
|
||||
f: The function to decorate
|
||||
|
||||
Returns:
|
||||
Decorated function that optionally requires authentication
|
||||
|
||||
Usage:
|
||||
@optional_auth
|
||||
def maybe_protected_endpoint():
|
||||
return "This may require authentication"
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
# Import config here to avoid circular imports
|
||||
from config import config
|
||||
|
||||
# Check if master password is configured
|
||||
if config.has_master_password():
|
||||
# If configured, require authentication
|
||||
if not session_manager.is_authenticated():
|
||||
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
|
||||
is_ajax = (
|
||||
request.is_json or
|
||||
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
|
||||
request.headers.get('Accept', '').startswith('application/json') or
|
||||
'/api/' in request.path # API endpoints should return JSON
|
||||
)
|
||||
|
||||
if is_ajax:
|
||||
return jsonify({
|
||||
'status': 'error',
|
||||
'message': 'Authentication required',
|
||||
'code': 'AUTH_REQUIRED'
|
||||
}), 401
|
||||
else:
|
||||
return redirect(url_for('auth.login'))
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
||||
|
||||
def get_current_user() -> Optional[Dict[str, Any]]:
|
||||
"""
|
||||
Get current authenticated user information.
|
||||
|
||||
Returns:
|
||||
Dictionary containing user information if authenticated, None otherwise
|
||||
"""
|
||||
if session_manager.is_authenticated():
|
||||
return session_manager.get_session_info()
|
||||
return None
|
||||
|
||||
|
||||
def get_client_ip() -> str:
|
||||
"""
|
||||
Get client IP address with proxy support.
|
||||
|
||||
Returns:
|
||||
Client IP address as string
|
||||
"""
|
||||
# Check for forwarded IP (in case of reverse proxy)
|
||||
forwarded_ip = request.headers.get('X-Forwarded-For')
|
||||
if forwarded_ip:
|
||||
return forwarded_ip.split(',')[0].strip()
|
||||
|
||||
real_ip = request.headers.get('X-Real-IP')
|
||||
if real_ip:
|
||||
return real_ip
|
||||
|
||||
return request.remote_addr or 'unknown'
|
||||
|
||||
|
||||
def is_authenticated() -> bool:
|
||||
"""
|
||||
Check if current request is from an authenticated user.
|
||||
|
||||
Returns:
|
||||
True if authenticated, False otherwise
|
||||
"""
|
||||
return session_manager.is_authenticated()
|
||||
|
||||
|
||||
def logout_current_user() -> bool:
|
||||
"""
|
||||
Logout the current user.
|
||||
|
||||
Returns:
|
||||
True if logout was successful, False otherwise
|
||||
"""
|
||||
return session_manager.logout()
|
||||
@ -1,286 +0,0 @@
|
||||
"""
|
||||
Error handling decorators and utilities for API endpoints.
|
||||
|
||||
This module provides standardized error handling decorators and utilities
|
||||
that can be used across all controller modules for consistent error responses.
|
||||
"""
|
||||
|
||||
import logging
|
||||
import traceback
|
||||
from functools import wraps
|
||||
from typing import Dict, Any, Callable, Tuple, Optional, Union
|
||||
from flask import jsonify, request
|
||||
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def handle_api_errors(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to handle API errors consistently across all endpoints.
|
||||
|
||||
This decorator catches exceptions and returns standardized error responses
|
||||
with appropriate HTTP status codes.
|
||||
|
||||
Args:
|
||||
f: The function to decorate
|
||||
|
||||
Returns:
|
||||
Decorated function with error handling
|
||||
|
||||
Usage:
|
||||
@handle_api_errors
|
||||
def my_endpoint():
|
||||
# This will automatically handle any exceptions
|
||||
return {"data": "success"}
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
try:
|
||||
result = f(*args, **kwargs)
|
||||
|
||||
# If result is already a Response object, return it
|
||||
if hasattr(result, 'status_code'):
|
||||
return result
|
||||
|
||||
# If result is a tuple (data, status_code), handle it
|
||||
if isinstance(result, tuple) and len(result) == 2:
|
||||
data, status_code = result
|
||||
if isinstance(data, dict) and 'status' not in data:
|
||||
data['status'] = 'success' if 200 <= status_code < 300 else 'error'
|
||||
return jsonify(data), status_code
|
||||
|
||||
# If result is a dict, wrap it in success response
|
||||
if isinstance(result, dict):
|
||||
if 'status' not in result:
|
||||
result['status'] = 'success'
|
||||
return jsonify(result)
|
||||
|
||||
# For other types, wrap in success response
|
||||
return jsonify({
|
||||
'status': 'success',
|
||||
'data': result
|
||||
})
|
||||
|
||||
except ValueError as e:
|
||||
logger.warning(f"Validation error in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message=str(e),
|
||||
status_code=400,
|
||||
error_code='VALIDATION_ERROR'
|
||||
)
|
||||
|
||||
except PermissionError as e:
|
||||
logger.warning(f"Permission error in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message="Access denied",
|
||||
status_code=403,
|
||||
error_code='ACCESS_DENIED'
|
||||
)
|
||||
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"File not found in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message="Resource not found",
|
||||
status_code=404,
|
||||
error_code='NOT_FOUND'
|
||||
)
|
||||
|
||||
except Exception as e:
|
||||
logger.error(f"Unexpected error in {f.__name__}: {str(e)}")
|
||||
logger.error(f"Traceback: {traceback.format_exc()}")
|
||||
|
||||
# Don't expose internal errors in production
|
||||
return create_error_response(
|
||||
message="Internal server error",
|
||||
status_code=500,
|
||||
error_code='INTERNAL_ERROR'
|
||||
)
|
||||
|
||||
return decorated_function
|
||||
|
||||
|
||||
def handle_database_errors(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator specifically for database-related operations.
|
||||
|
||||
Args:
|
||||
f: The function to decorate
|
||||
|
||||
Returns:
|
||||
Decorated function with database error handling
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except Exception as e:
|
||||
logger.error(f"Database error in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message="Database operation failed",
|
||||
status_code=500,
|
||||
error_code='DATABASE_ERROR'
|
||||
)
|
||||
return decorated_function
|
||||
|
||||
|
||||
def handle_file_operations(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator for file operation error handling.
|
||||
|
||||
Args:
|
||||
f: The function to decorate
|
||||
|
||||
Returns:
|
||||
Decorated function with file operation error handling
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
try:
|
||||
return f(*args, **kwargs)
|
||||
except FileNotFoundError as e:
|
||||
logger.warning(f"File not found in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message="File not found",
|
||||
status_code=404,
|
||||
error_code='FILE_NOT_FOUND'
|
||||
)
|
||||
except PermissionError as e:
|
||||
logger.warning(f"File permission error in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message="Permission denied",
|
||||
status_code=403,
|
||||
error_code='PERMISSION_DENIED'
|
||||
)
|
||||
except OSError as e:
|
||||
logger.error(f"File system error in {f.__name__}: {str(e)}")
|
||||
return create_error_response(
|
||||
message="File system error",
|
||||
status_code=500,
|
||||
error_code='FILE_SYSTEM_ERROR'
|
||||
)
|
||||
return decorated_function
|
||||
|
||||
|
||||
def create_error_response(
|
||||
message: str,
|
||||
status_code: int = 400,
|
||||
error_code: Optional[str] = None,
|
||||
errors: Optional[list] = None,
|
||||
data: Optional[Dict[str, Any]] = None
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Create a standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message to display
|
||||
status_code: HTTP status code
|
||||
error_code: Optional error code for client handling
|
||||
errors: Optional list of detailed errors
|
||||
data: Optional additional data
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
'status': 'error',
|
||||
'message': message
|
||||
}
|
||||
|
||||
if error_code:
|
||||
response['error_code'] = error_code
|
||||
|
||||
if errors:
|
||||
response['errors'] = errors
|
||||
|
||||
if data:
|
||||
response['data'] = data
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def create_success_response(
|
||||
data: Any = None,
|
||||
message: str = "Operation successful",
|
||||
status_code: int = 200
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Create a standardized success response.
|
||||
|
||||
Args:
|
||||
data: Data to include in response
|
||||
message: Success message
|
||||
status_code: HTTP status code
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
'status': 'success',
|
||||
'message': message
|
||||
}
|
||||
|
||||
if data is not None:
|
||||
response['data'] = data
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def log_request_info():
|
||||
"""Log request information for debugging."""
|
||||
logger.info(f"Request: {request.method} {request.path}")
|
||||
if request.is_json:
|
||||
logger.debug(f"Request JSON: {request.get_json()}")
|
||||
if request.args:
|
||||
logger.debug(f"Request args: {dict(request.args)}")
|
||||
|
||||
|
||||
class APIException(Exception):
|
||||
"""Custom exception for API errors."""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
message: str,
|
||||
status_code: int = 400,
|
||||
error_code: Optional[str] = None,
|
||||
errors: Optional[list] = None
|
||||
):
|
||||
self.message = message
|
||||
self.status_code = status_code
|
||||
self.error_code = error_code
|
||||
self.errors = errors
|
||||
super().__init__(message)
|
||||
|
||||
|
||||
class ValidationError(APIException):
|
||||
"""Exception for validation errors."""
|
||||
|
||||
def __init__(self, message: str, errors: Optional[list] = None):
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=400,
|
||||
error_code='VALIDATION_ERROR',
|
||||
errors=errors
|
||||
)
|
||||
|
||||
|
||||
class NotFoundError(APIException):
|
||||
"""Exception for not found errors."""
|
||||
|
||||
def __init__(self, message: str = "Resource not found"):
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=404,
|
||||
error_code='NOT_FOUND'
|
||||
)
|
||||
|
||||
|
||||
class PermissionError(APIException):
|
||||
"""Exception for permission errors."""
|
||||
|
||||
def __init__(self, message: str = "Access denied"):
|
||||
super().__init__(
|
||||
message=message,
|
||||
status_code=403,
|
||||
error_code='ACCESS_DENIED'
|
||||
)
|
||||
@ -1,406 +0,0 @@
|
||||
"""
|
||||
Response formatting utilities for API endpoints.
|
||||
|
||||
This module provides utilities for creating consistent response formats
|
||||
across all controller modules.
|
||||
"""
|
||||
|
||||
from typing import Any, Dict, List, Optional, Union, Tuple
|
||||
from flask import jsonify, url_for, request
|
||||
import math
|
||||
|
||||
|
||||
def create_success_response(
|
||||
data: Any = None,
|
||||
message: str = "Operation successful",
|
||||
status_code: int = 200,
|
||||
pagination: Optional[Dict[str, Any]] = None,
|
||||
meta: Optional[Dict[str, Any]] = None
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Create a standardized success response.
|
||||
|
||||
Args:
|
||||
data: Data to include in response
|
||||
message: Success message
|
||||
status_code: HTTP status code
|
||||
pagination: Pagination information
|
||||
meta: Additional metadata
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
'status': 'success',
|
||||
'message': message
|
||||
}
|
||||
|
||||
if data is not None:
|
||||
response['data'] = data
|
||||
|
||||
if pagination:
|
||||
response['pagination'] = pagination
|
||||
|
||||
if meta:
|
||||
response['meta'] = meta
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def create_error_response(
|
||||
message: str,
|
||||
status_code: int = 400,
|
||||
error_code: Optional[str] = None,
|
||||
errors: Optional[List[str]] = None,
|
||||
data: Optional[Dict[str, Any]] = None
|
||||
) -> Tuple[Dict[str, Any], int]:
|
||||
"""
|
||||
Create a standardized error response.
|
||||
|
||||
Args:
|
||||
message: Error message to display
|
||||
status_code: HTTP status code
|
||||
error_code: Optional error code for client handling
|
||||
errors: Optional list of detailed errors
|
||||
data: Optional additional data
|
||||
|
||||
Returns:
|
||||
Tuple of (response_dict, status_code)
|
||||
"""
|
||||
response = {
|
||||
'status': 'error',
|
||||
'message': message
|
||||
}
|
||||
|
||||
if error_code:
|
||||
response['error_code'] = error_code
|
||||
|
||||
if errors:
|
||||
response['errors'] = errors
|
||||
|
||||
if data:
|
||||
response['data'] = data
|
||||
|
||||
return response, status_code
|
||||
|
||||
|
||||
def create_paginated_response(
|
||||
data: List[Any],
|
||||
page: int,
|
||||
per_page: int,
|
||||
total: int,
|
||||
endpoint: Optional[str] = None,
|
||||
**kwargs
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create a paginated response with navigation links.
|
||||
|
||||
Args:
|
||||
data: List of data items for current page
|
||||
page: Current page number (1-based)
|
||||
per_page: Items per page
|
||||
total: Total number of items
|
||||
endpoint: Flask endpoint name for pagination links
|
||||
**kwargs: Additional parameters for pagination links
|
||||
|
||||
Returns:
|
||||
Dictionary containing paginated response
|
||||
"""
|
||||
total_pages = math.ceil(total / per_page) if per_page > 0 else 1
|
||||
|
||||
pagination_info = {
|
||||
'page': page,
|
||||
'per_page': per_page,
|
||||
'total': total,
|
||||
'total_pages': total_pages,
|
||||
'has_next': page < total_pages,
|
||||
'has_prev': page > 1
|
||||
}
|
||||
|
||||
# Add navigation links if endpoint is provided
|
||||
if endpoint:
|
||||
base_url = request.url_root.rstrip('/')
|
||||
|
||||
# Current page
|
||||
pagination_info['current_url'] = url_for(endpoint, page=page, per_page=per_page, **kwargs)
|
||||
|
||||
# First page
|
||||
pagination_info['first_url'] = url_for(endpoint, page=1, per_page=per_page, **kwargs)
|
||||
|
||||
# Last page
|
||||
pagination_info['last_url'] = url_for(endpoint, page=total_pages, per_page=per_page, **kwargs)
|
||||
|
||||
# Previous page
|
||||
if pagination_info['has_prev']:
|
||||
pagination_info['prev_url'] = url_for(endpoint, page=page-1, per_page=per_page, **kwargs)
|
||||
|
||||
# Next page
|
||||
if pagination_info['has_next']:
|
||||
pagination_info['next_url'] = url_for(endpoint, page=page+1, per_page=per_page, **kwargs)
|
||||
|
||||
return {
|
||||
'status': 'success',
|
||||
'data': data,
|
||||
'pagination': pagination_info
|
||||
}
|
||||
|
||||
|
||||
def paginate_query_results(
|
||||
items: List[Any],
|
||||
page: Optional[int] = None,
|
||||
per_page: Optional[int] = None,
|
||||
default_per_page: int = 50,
|
||||
max_per_page: int = 1000
|
||||
) -> Tuple[List[Any], int, int, int]:
|
||||
"""
|
||||
Paginate a list of items based on query parameters.
|
||||
|
||||
Args:
|
||||
items: List of items to paginate
|
||||
page: Page number (from query params)
|
||||
per_page: Items per page (from query params)
|
||||
default_per_page: Default items per page
|
||||
max_per_page: Maximum allowed items per page
|
||||
|
||||
Returns:
|
||||
Tuple of (paginated_items, page, per_page, total)
|
||||
"""
|
||||
total = len(items)
|
||||
|
||||
# Parse pagination parameters
|
||||
if page is None:
|
||||
page = int(request.args.get('page', 1))
|
||||
if per_page is None:
|
||||
per_page = int(request.args.get('per_page', default_per_page))
|
||||
|
||||
# Validate parameters
|
||||
page = max(1, page)
|
||||
per_page = min(max(1, per_page), max_per_page)
|
||||
|
||||
# Calculate offset
|
||||
offset = (page - 1) * per_page
|
||||
|
||||
# Slice the items
|
||||
paginated_items = items[offset:offset + per_page]
|
||||
|
||||
return paginated_items, page, per_page, total
|
||||
|
||||
|
||||
def format_anime_response(anime_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Format anime data for API response.
|
||||
|
||||
Args:
|
||||
anime_data: Raw anime data from database
|
||||
|
||||
Returns:
|
||||
Formatted anime data
|
||||
"""
|
||||
formatted = {
|
||||
'id': anime_data.get('id'),
|
||||
'name': anime_data.get('name'),
|
||||
'url': anime_data.get('url'),
|
||||
'description': anime_data.get('description'),
|
||||
'episodes': anime_data.get('episodes'),
|
||||
'status': anime_data.get('status', 'planned'),
|
||||
'created_at': anime_data.get('created_at'),
|
||||
'updated_at': anime_data.get('updated_at')
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
return {k: v for k, v in formatted.items() if v is not None}
|
||||
|
||||
|
||||
def format_episode_response(episode_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Format episode data for API response.
|
||||
|
||||
Args:
|
||||
episode_data: Raw episode data from database
|
||||
|
||||
Returns:
|
||||
Formatted episode data
|
||||
"""
|
||||
formatted = {
|
||||
'id': episode_data.get('id'),
|
||||
'anime_id': episode_data.get('anime_id'),
|
||||
'episode_number': episode_data.get('episode_number'),
|
||||
'title': episode_data.get('title'),
|
||||
'url': episode_data.get('url'),
|
||||
'status': episode_data.get('status', 'available'),
|
||||
'download_path': episode_data.get('download_path'),
|
||||
'file_size': episode_data.get('file_size'),
|
||||
'created_at': episode_data.get('created_at'),
|
||||
'updated_at': episode_data.get('updated_at')
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
return {k: v for k, v in formatted.items() if v is not None}
|
||||
|
||||
|
||||
def format_download_response(download_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Format download data for API response.
|
||||
|
||||
Args:
|
||||
download_data: Raw download data
|
||||
|
||||
Returns:
|
||||
Formatted download data
|
||||
"""
|
||||
formatted = {
|
||||
'id': download_data.get('id'),
|
||||
'anime_id': download_data.get('anime_id'),
|
||||
'episode_id': download_data.get('episode_id'),
|
||||
'status': download_data.get('status', 'pending'),
|
||||
'progress': download_data.get('progress', 0),
|
||||
'speed': download_data.get('speed'),
|
||||
'eta': download_data.get('eta'),
|
||||
'error_message': download_data.get('error_message'),
|
||||
'started_at': download_data.get('started_at'),
|
||||
'completed_at': download_data.get('completed_at')
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
return {k: v for k, v in formatted.items() if v is not None}
|
||||
|
||||
|
||||
def format_bulk_operation_response(operation_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Format bulk operation data for API response.
|
||||
|
||||
Args:
|
||||
operation_data: Raw bulk operation data
|
||||
|
||||
Returns:
|
||||
Formatted bulk operation data
|
||||
"""
|
||||
formatted = {
|
||||
'id': operation_data.get('id'),
|
||||
'type': operation_data.get('type'),
|
||||
'status': operation_data.get('status', 'pending'),
|
||||
'total_items': operation_data.get('total_items', 0),
|
||||
'completed_items': operation_data.get('completed_items', 0),
|
||||
'failed_items': operation_data.get('failed_items', 0),
|
||||
'progress_percentage': operation_data.get('progress_percentage', 0),
|
||||
'started_at': operation_data.get('started_at'),
|
||||
'completed_at': operation_data.get('completed_at'),
|
||||
'error_message': operation_data.get('error_message')
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
return {k: v for k, v in formatted.items() if v is not None}
|
||||
|
||||
|
||||
def format_health_response(health_data: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""
|
||||
Format health check data for API response.
|
||||
|
||||
Args:
|
||||
health_data: Raw health check data
|
||||
|
||||
Returns:
|
||||
Formatted health data
|
||||
"""
|
||||
formatted = {
|
||||
'status': health_data.get('status', 'unknown'),
|
||||
'uptime': health_data.get('uptime'),
|
||||
'version': health_data.get('version'),
|
||||
'components': health_data.get('components', {}),
|
||||
'timestamp': health_data.get('timestamp')
|
||||
}
|
||||
|
||||
# Remove None values
|
||||
return {k: v for k, v in formatted.items() if v is not None}
|
||||
|
||||
|
||||
def add_resource_links(data: Dict[str, Any], resource_type: str, resource_id: Any) -> Dict[str, Any]:
|
||||
"""
|
||||
Add HATEOAS-style links to a resource response.
|
||||
|
||||
Args:
|
||||
data: Resource data
|
||||
resource_type: Type of resource (anime, episode, etc.)
|
||||
resource_id: Resource identifier
|
||||
|
||||
Returns:
|
||||
Data with added links
|
||||
"""
|
||||
if '_links' not in data:
|
||||
data['_links'] = {}
|
||||
|
||||
# Self link
|
||||
data['_links']['self'] = url_for(f'api.get_{resource_type}', id=resource_id)
|
||||
|
||||
# Collection link
|
||||
data['_links']['collection'] = url_for(f'api.list_{resource_type}s')
|
||||
|
||||
return data
|
||||
|
||||
|
||||
def create_batch_response(
|
||||
successful_items: List[Dict[str, Any]],
|
||||
failed_items: List[Dict[str, Any]],
|
||||
message: Optional[str] = None
|
||||
) -> Dict[str, Any]:
|
||||
"""
|
||||
Create response for batch operations.
|
||||
|
||||
Args:
|
||||
successful_items: List of successfully processed items
|
||||
failed_items: List of failed items with errors
|
||||
message: Optional message
|
||||
|
||||
Returns:
|
||||
Batch operation response
|
||||
"""
|
||||
total_items = len(successful_items) + len(failed_items)
|
||||
success_count = len(successful_items)
|
||||
failure_count = len(failed_items)
|
||||
|
||||
response = {
|
||||
'status': 'success' if failure_count == 0 else 'partial_success',
|
||||
'message': message or f"Processed {success_count}/{total_items} items successfully",
|
||||
'summary': {
|
||||
'total': total_items,
|
||||
'successful': success_count,
|
||||
'failed': failure_count
|
||||
},
|
||||
'data': {
|
||||
'successful': successful_items,
|
||||
'failed': failed_items
|
||||
}
|
||||
}
|
||||
|
||||
return response
|
||||
|
||||
|
||||
def extract_pagination_params(
|
||||
default_page: int = 1,
|
||||
default_per_page: int = 50,
|
||||
max_per_page: int = 1000
|
||||
) -> Tuple[int, int]:
|
||||
"""
|
||||
Extract and validate pagination parameters from request.
|
||||
|
||||
Args:
|
||||
default_page: Default page number
|
||||
default_per_page: Default items per page
|
||||
max_per_page: Maximum allowed items per page
|
||||
|
||||
Returns:
|
||||
Tuple of (page, per_page)
|
||||
"""
|
||||
try:
|
||||
page = int(request.args.get('page', default_page))
|
||||
page = max(1, page)
|
||||
except (ValueError, TypeError):
|
||||
page = default_page
|
||||
|
||||
try:
|
||||
per_page = int(request.args.get('per_page', default_per_page))
|
||||
per_page = min(max(1, per_page), max_per_page)
|
||||
except (ValueError, TypeError):
|
||||
per_page = default_per_page
|
||||
|
||||
return page, per_page
|
||||
@ -1,446 +0,0 @@
|
||||
"""
|
||||
Input validation utilities for API endpoints.
|
||||
|
||||
This module provides validation functions and decorators for consistent
|
||||
input validation across all controller modules.
|
||||
"""
|
||||
|
||||
import re
|
||||
import os
|
||||
from typing import Any, Dict, List, Optional, Union, Callable, Tuple
|
||||
from functools import wraps
|
||||
from flask import request, jsonify
|
||||
from .error_handlers import ValidationError, create_error_response
|
||||
|
||||
|
||||
def validate_json_input(required_fields: Optional[List[str]] = None,
|
||||
optional_fields: Optional[List[str]] = None,
|
||||
field_types: Optional[Dict[str, type]] = None) -> Callable:
|
||||
"""
|
||||
Decorator to validate JSON input for API endpoints.
|
||||
|
||||
Args:
|
||||
required_fields: List of required field names
|
||||
optional_fields: List of optional field names
|
||||
field_types: Dictionary mapping field names to expected types
|
||||
|
||||
Returns:
|
||||
Decorator function
|
||||
|
||||
Usage:
|
||||
@validate_json_input(
|
||||
required_fields=['name', 'url'],
|
||||
optional_fields=['description'],
|
||||
field_types={'name': str, 'url': str, 'episodes': int}
|
||||
)
|
||||
def create_anime():
|
||||
data = request.get_json()
|
||||
# data is now validated
|
||||
"""
|
||||
def decorator(f: Callable) -> Callable:
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if not request.is_json:
|
||||
return create_error_response(
|
||||
message="Request must be JSON",
|
||||
status_code=400,
|
||||
error_code='INVALID_CONTENT_TYPE'
|
||||
)
|
||||
|
||||
try:
|
||||
data = request.get_json()
|
||||
except Exception:
|
||||
return create_error_response(
|
||||
message="Invalid JSON format",
|
||||
status_code=400,
|
||||
error_code='INVALID_JSON'
|
||||
)
|
||||
|
||||
if data is None:
|
||||
return create_error_response(
|
||||
message="Request body cannot be empty",
|
||||
status_code=400,
|
||||
error_code='EMPTY_BODY'
|
||||
)
|
||||
|
||||
# Validate required fields
|
||||
if required_fields:
|
||||
missing_fields = []
|
||||
for field in required_fields:
|
||||
if field not in data or data[field] is None:
|
||||
missing_fields.append(field)
|
||||
|
||||
if missing_fields:
|
||||
return create_error_response(
|
||||
message=f"Missing required fields: {', '.join(missing_fields)}",
|
||||
status_code=400,
|
||||
error_code='MISSING_FIELDS',
|
||||
errors=missing_fields
|
||||
)
|
||||
|
||||
# Validate field types
|
||||
if field_types:
|
||||
type_errors = []
|
||||
for field, expected_type in field_types.items():
|
||||
if field in data and data[field] is not None:
|
||||
if not isinstance(data[field], expected_type):
|
||||
type_errors.append(f"{field} must be of type {expected_type.__name__}")
|
||||
|
||||
if type_errors:
|
||||
return create_error_response(
|
||||
message="Type validation failed",
|
||||
status_code=400,
|
||||
error_code='TYPE_ERROR',
|
||||
errors=type_errors
|
||||
)
|
||||
|
||||
# Check for unexpected fields
|
||||
all_allowed = (required_fields or []) + (optional_fields or [])
|
||||
if all_allowed:
|
||||
unexpected_fields = [field for field in data.keys() if field not in all_allowed]
|
||||
if unexpected_fields:
|
||||
return create_error_response(
|
||||
message=f"Unexpected fields: {', '.join(unexpected_fields)}",
|
||||
status_code=400,
|
||||
error_code='UNEXPECTED_FIELDS',
|
||||
errors=unexpected_fields
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
return decorator
|
||||
|
||||
|
||||
def validate_query_params(allowed_params: Optional[List[str]] = None,
|
||||
required_params: Optional[List[str]] = None,
|
||||
param_types: Optional[Dict[str, type]] = None) -> Callable:
|
||||
"""
|
||||
Decorator to validate query parameters.
|
||||
|
||||
Args:
|
||||
allowed_params: List of allowed parameter names
|
||||
required_params: List of required parameter names
|
||||
param_types: Dictionary mapping parameter names to expected types
|
||||
|
||||
Returns:
|
||||
Decorator function
|
||||
"""
|
||||
def decorator(f: Callable) -> Callable:
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
# Check required parameters
|
||||
if required_params:
|
||||
missing_params = []
|
||||
for param in required_params:
|
||||
if param not in request.args:
|
||||
missing_params.append(param)
|
||||
|
||||
if missing_params:
|
||||
return create_error_response(
|
||||
message=f"Missing required parameters: {', '.join(missing_params)}",
|
||||
status_code=400,
|
||||
error_code='MISSING_PARAMS'
|
||||
)
|
||||
|
||||
# Check allowed parameters
|
||||
if allowed_params:
|
||||
unexpected_params = [param for param in request.args.keys() if param not in allowed_params]
|
||||
if unexpected_params:
|
||||
return create_error_response(
|
||||
message=f"Unexpected parameters: {', '.join(unexpected_params)}",
|
||||
status_code=400,
|
||||
error_code='UNEXPECTED_PARAMS'
|
||||
)
|
||||
|
||||
# Validate parameter types
|
||||
if param_types:
|
||||
type_errors = []
|
||||
for param, expected_type in param_types.items():
|
||||
if param in request.args:
|
||||
value = request.args.get(param)
|
||||
try:
|
||||
if expected_type == int:
|
||||
int(value)
|
||||
elif expected_type == float:
|
||||
float(value)
|
||||
elif expected_type == bool:
|
||||
if value.lower() not in ['true', 'false', '1', '0']:
|
||||
raise ValueError()
|
||||
except ValueError:
|
||||
type_errors.append(f"{param} must be of type {expected_type.__name__}")
|
||||
|
||||
if type_errors:
|
||||
return create_error_response(
|
||||
message="Parameter type validation failed",
|
||||
status_code=400,
|
||||
error_code='PARAM_TYPE_ERROR',
|
||||
errors=type_errors
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
return decorator
|
||||
|
||||
|
||||
def validate_pagination_params(f: Callable) -> Callable:
|
||||
"""
|
||||
Decorator to validate pagination parameters (page, per_page, limit, offset).
|
||||
|
||||
Args:
|
||||
f: The function to decorate
|
||||
|
||||
Returns:
|
||||
Decorated function with pagination validation
|
||||
"""
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
errors = []
|
||||
|
||||
# Validate page parameter
|
||||
page = request.args.get('page')
|
||||
if page is not None:
|
||||
try:
|
||||
page_int = int(page)
|
||||
if page_int < 1:
|
||||
errors.append("page must be greater than 0")
|
||||
except ValueError:
|
||||
errors.append("page must be an integer")
|
||||
|
||||
# Validate per_page parameter
|
||||
per_page = request.args.get('per_page')
|
||||
if per_page is not None:
|
||||
try:
|
||||
per_page_int = int(per_page)
|
||||
if per_page_int < 1:
|
||||
errors.append("per_page must be greater than 0")
|
||||
elif per_page_int > 1000:
|
||||
errors.append("per_page cannot exceed 1000")
|
||||
except ValueError:
|
||||
errors.append("per_page must be an integer")
|
||||
|
||||
# Validate limit parameter
|
||||
limit = request.args.get('limit')
|
||||
if limit is not None:
|
||||
try:
|
||||
limit_int = int(limit)
|
||||
if limit_int < 1:
|
||||
errors.append("limit must be greater than 0")
|
||||
elif limit_int > 1000:
|
||||
errors.append("limit cannot exceed 1000")
|
||||
except ValueError:
|
||||
errors.append("limit must be an integer")
|
||||
|
||||
# Validate offset parameter
|
||||
offset = request.args.get('offset')
|
||||
if offset is not None:
|
||||
try:
|
||||
offset_int = int(offset)
|
||||
if offset_int < 0:
|
||||
errors.append("offset must be greater than or equal to 0")
|
||||
except ValueError:
|
||||
errors.append("offset must be an integer")
|
||||
|
||||
if errors:
|
||||
return create_error_response(
|
||||
message="Pagination parameter validation failed",
|
||||
status_code=400,
|
||||
error_code='PAGINATION_ERROR',
|
||||
errors=errors
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
|
||||
|
||||
def validate_anime_data(data: Dict[str, Any]) -> List[str]:
|
||||
"""
|
||||
Validate anime data structure.
|
||||
|
||||
Args:
|
||||
data: Dictionary containing anime data
|
||||
|
||||
Returns:
|
||||
List of validation errors (empty if valid)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
# Required fields
|
||||
required_fields = ['name', 'url']
|
||||
for field in required_fields:
|
||||
if field not in data or not data[field]:
|
||||
errors.append(f"Missing required field: {field}")
|
||||
|
||||
# Validate name
|
||||
if 'name' in data:
|
||||
name = data['name']
|
||||
if not isinstance(name, str):
|
||||
errors.append("name must be a string")
|
||||
elif len(name.strip()) == 0:
|
||||
errors.append("name cannot be empty")
|
||||
elif len(name) > 500:
|
||||
errors.append("name cannot exceed 500 characters")
|
||||
|
||||
# Validate URL
|
||||
if 'url' in data:
|
||||
url = data['url']
|
||||
if not isinstance(url, str):
|
||||
errors.append("url must be a string")
|
||||
elif not is_valid_url(url):
|
||||
errors.append("url must be a valid URL")
|
||||
|
||||
# Validate optional fields
|
||||
if 'description' in data and data['description'] is not None:
|
||||
if not isinstance(data['description'], str):
|
||||
errors.append("description must be a string")
|
||||
elif len(data['description']) > 2000:
|
||||
errors.append("description cannot exceed 2000 characters")
|
||||
|
||||
if 'episodes' in data and data['episodes'] is not None:
|
||||
if not isinstance(data['episodes'], int):
|
||||
errors.append("episodes must be an integer")
|
||||
elif data['episodes'] < 0:
|
||||
errors.append("episodes must be non-negative")
|
||||
|
||||
if 'status' in data and data['status'] is not None:
|
||||
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
|
||||
if data['status'] not in valid_statuses:
|
||||
errors.append(f"status must be one of: {', '.join(valid_statuses)}")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_file_upload(file, allowed_extensions: Optional[List[str]] = None,
|
||||
max_size_mb: Optional[int] = None) -> List[str]:
|
||||
"""
|
||||
Validate file upload.
|
||||
|
||||
Args:
|
||||
file: Uploaded file object
|
||||
allowed_extensions: List of allowed file extensions
|
||||
max_size_mb: Maximum file size in MB
|
||||
|
||||
Returns:
|
||||
List of validation errors (empty if valid)
|
||||
"""
|
||||
errors = []
|
||||
|
||||
if not file:
|
||||
errors.append("No file provided")
|
||||
return errors
|
||||
|
||||
if file.filename == '':
|
||||
errors.append("No file selected")
|
||||
return errors
|
||||
|
||||
# Check file extension
|
||||
if allowed_extensions:
|
||||
file_ext = os.path.splitext(file.filename)[1].lower()
|
||||
if file_ext not in [f".{ext.lower()}" for ext in allowed_extensions]:
|
||||
errors.append(f"File type not allowed. Allowed: {', '.join(allowed_extensions)}")
|
||||
|
||||
# Check file size (if we can determine it)
|
||||
if max_size_mb and hasattr(file, 'content_length') and file.content_length:
|
||||
max_size_bytes = max_size_mb * 1024 * 1024
|
||||
if file.content_length > max_size_bytes:
|
||||
errors.append(f"File size exceeds maximum of {max_size_mb}MB")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def is_valid_url(url: str) -> bool:
|
||||
"""
|
||||
Check if a string is a valid URL.
|
||||
|
||||
Args:
|
||||
url: URL string to validate
|
||||
|
||||
Returns:
|
||||
True if valid URL, False otherwise
|
||||
"""
|
||||
url_pattern = re.compile(
|
||||
r'^https?://' # http:// or https://
|
||||
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' # domain...
|
||||
r'localhost|' # localhost...
|
||||
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
|
||||
r'(?::\d+)?' # optional port
|
||||
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
|
||||
|
||||
return url_pattern.match(url) is not None
|
||||
|
||||
|
||||
def is_valid_email(email: str) -> bool:
|
||||
"""
|
||||
Check if a string is a valid email address.
|
||||
|
||||
Args:
|
||||
email: Email string to validate
|
||||
|
||||
Returns:
|
||||
True if valid email, False otherwise
|
||||
"""
|
||||
email_pattern = re.compile(
|
||||
r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
|
||||
)
|
||||
return email_pattern.match(email) is not None
|
||||
|
||||
|
||||
def sanitize_string(value: str, max_length: Optional[int] = None) -> str:
|
||||
"""
|
||||
Sanitize string input by removing dangerous characters.
|
||||
|
||||
Args:
|
||||
value: String to sanitize
|
||||
max_length: Maximum allowed length
|
||||
|
||||
Returns:
|
||||
Sanitized string
|
||||
"""
|
||||
if not isinstance(value, str):
|
||||
return str(value)
|
||||
|
||||
# Remove null bytes and control characters
|
||||
sanitized = ''.join(char for char in value if ord(char) >= 32 or char in '\t\n\r')
|
||||
|
||||
# Trim whitespace
|
||||
sanitized = sanitized.strip()
|
||||
|
||||
# Truncate if necessary
|
||||
if max_length and len(sanitized) > max_length:
|
||||
sanitized = sanitized[:max_length]
|
||||
|
||||
return sanitized
|
||||
|
||||
|
||||
def validate_id_parameter(param_name: str = 'id') -> Callable:
|
||||
"""
|
||||
Decorator to validate ID parameters in URLs.
|
||||
|
||||
Args:
|
||||
param_name: Name of the ID parameter
|
||||
|
||||
Returns:
|
||||
Decorator function
|
||||
"""
|
||||
def decorator(f: Callable) -> Callable:
|
||||
@wraps(f)
|
||||
def decorated_function(*args, **kwargs):
|
||||
if param_name in kwargs:
|
||||
try:
|
||||
id_value = int(kwargs[param_name])
|
||||
if id_value <= 0:
|
||||
return create_error_response(
|
||||
message=f"{param_name} must be a positive integer",
|
||||
status_code=400,
|
||||
error_code='INVALID_ID'
|
||||
)
|
||||
kwargs[param_name] = id_value
|
||||
except ValueError:
|
||||
return create_error_response(
|
||||
message=f"{param_name} must be an integer",
|
||||
status_code=400,
|
||||
error_code='INVALID_ID'
|
||||
)
|
||||
|
||||
return f(*args, **kwargs)
|
||||
return decorated_function
|
||||
return decorator
|
||||
File diff suppressed because it is too large
Load Diff
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user