Compare commits
541 Commits
64434ccd44
...
main
| Author | SHA1 | Date | |
|---|---|---|---|
| e44a8190d0 | |||
| 94720f2d61 | |||
| 0ec120e08f | |||
| db58ea9396 | |||
| 69b409f42d | |||
| b34ee59bca | |||
| 624c0db16e | |||
| e6d9f9f342 | |||
| fc8cdc538d | |||
| d8248be67d | |||
| 6c7dc66c5d | |||
| d951963d87 | |||
| f6000b1fff | |||
| ddf10327c7 | |||
| 747e1acc21 | |||
| 1885fed4bd | |||
| dd45494717 | |||
| 4ac51a789a | |||
| 1712dfd776 | |||
| ddcac5a96d | |||
| c186e0d4f7 | |||
| 759cd09ded | |||
| bbf0a0815a | |||
| 87bf0d71cd | |||
| 8e262c947c | |||
| adea1e2ede | |||
| d71feb64dd | |||
| 3e5ad8a4a6 | |||
| e1abf90c81 | |||
| 228964e928 | |||
| dee2601bda | |||
| 61f35632b9 | |||
| eed75ff08b | |||
| 0265ae2a70 | |||
| ac7e15e1eb | |||
| 850207d9a8 | |||
| 1c39dd5c6a | |||
| 76f02ec822 | |||
| e84a220f55 | |||
| d7ab689fe1 | |||
| 0d2ce07ad7 | |||
| e4d328bb45 | |||
| f283e581d6 | |||
| 88043ed749 | |||
| 7effc02f33 | |||
| 60e5b5ccda | |||
| 88f3219126 | |||
| c6da967893 | |||
| 9275747b6d | |||
| 5b3fbf36b9 | |||
| 46dab1dbc1 | |||
| d1d30dde9e | |||
| 4b35cb63d1 | |||
| af208882f5 | |||
| cf754860f1 | |||
| 53b628efd9 | |||
| 06fb6630ea | |||
| d72b8cb1ab | |||
| d74c181556 | |||
| c757123429 | |||
| 436dc8b338 | |||
| f8122099c3 | |||
| 8174cf73c4 | |||
| 6208cae5c7 | |||
| 708bf42f89 | |||
| 27c6087d88 | |||
| 9157c4b274 | |||
| 700415af57 | |||
| 7f21d3236f | |||
| 253750ad45 | |||
| b1d9714123 | |||
| 562fcdc811 | |||
| 212b971bba | |||
| 08123d40e4 | |||
| 30ff7c7a93 | |||
| bd5538be59 | |||
| a92340aa8b | |||
| 9ab96398b0 | |||
| aceaba5849 | |||
| a345f9b4e9 | |||
| e3de8a4c9a | |||
| aa601daf88 | |||
| 7100b3c968 | |||
| ab40cdcf2c | |||
| 26532ea592 | |||
| 1f551a3fbe | |||
| eb0f6cdc85 | |||
| 63da2daa53 | |||
| 0ab9adbd04 | |||
| 1a4fce16d6 | |||
| c693c6572b | |||
| f409b81aa2 | |||
| f5a42f269e | |||
| cf4e698454 | |||
| 58fb9fdd3e | |||
| dc6c113707 | |||
| 3b1ab36786 | |||
| cc6f190cb6 | |||
| 954d571a80 | |||
| 7693828621 | |||
| 10246df78b | |||
| 846176f114 | |||
| 732181b709 | |||
| 6854d72d56 | |||
| ab1836575e | |||
| 0ffcfac674 | |||
| 797bba4151 | |||
| 458fc483e4 | |||
| 3f2e15669d | |||
| 7c1242a122 | |||
| fb8f0bdbd2 | |||
| 52d82ab6bc | |||
| 8647da8474 | |||
| 46271a9845 | |||
| 4abaf8def7 | |||
| c4080e4e57 | |||
| ed3882991f | |||
| 35a7aeac9e | |||
| b89da0d7a0 | |||
| 14dce41de8 | |||
| 6d0259d4b4 | |||
| f7cc296aa7 | |||
| 8ff558cb07 | |||
| 04f26d5cfc | |||
| 5af72c33b8 | |||
| c7bf232fe1 | |||
| 2b904fd01e | |||
| e09bb0451c | |||
| 800790fc8f | |||
| 0e58a49cdd | |||
| fed6162452 | |||
| 611798b786 | |||
| 314f535446 | |||
| a8011eb6a3 | |||
| ba6429bb2f | |||
| 168b4c5ac4 | |||
| 925f408699 | |||
| 9fb93794e6 | |||
| faac14346f | |||
| f8634bf605 | |||
| 7bf02ac8f8 | |||
| 026e96b66c | |||
| c586e9f69d | |||
| f89649fe20 | |||
| 33406fef1a | |||
| 5e233bcba0 | |||
| 48a2fd0f2a | |||
| 77ffdac84b | |||
| 92c8d42c4d | |||
| ae162d9a6d | |||
| 4c606faa0e | |||
| 50e0b21669 | |||
| 8e8487b7b7 | |||
| 61c86dc698 | |||
| 88c00b761c | |||
| 125892abe5 | |||
| 050db40af3 | |||
| 9f1158b9af | |||
| db7e21a14c | |||
| bf3cfa00d5 | |||
| 35c82e68b7 | |||
| b2379e05cf | |||
| f9e4970615 | |||
| 5aba36c40a | |||
| d425d711bd | |||
| 6215477eef | |||
| 0b580f2fab | |||
| bfbae88ade | |||
| 01f828c799 | |||
| 6d40ddbfe5 | |||
| d6a82f4329 | |||
| 7d95c180a9 | |||
| 62bdcf35cb | |||
| c97da7db2e | |||
| 09a5eccea7 | |||
| 265d7fe435 | |||
| 0bbdd46fc7 | |||
| 8b0a4abca9 | |||
| 5ca6a27573 | |||
| 9d5bd12ec8 | |||
| 0b4fb10d65 | |||
| f18c31a035 | |||
| df19f8ad95 | |||
| 2495b07fc4 | |||
| ea9e959a7b | |||
| 7a77dff194 | |||
| 1b4526d050 | |||
| 491daa2e50 | |||
| 03901a8c2d | |||
| c92e2d340e | |||
| e502dcb8bd | |||
| 4e56093ff9 | |||
| 9877f9400c | |||
| db1e7fa54b | |||
| 4408874d37 | |||
| 390cafc0dc | |||
| a06abaa2e5 | |||
| c6919ac124 | |||
| 22a41ba93f | |||
| fd5e85d5ea | |||
| 4e29c4ed80 | |||
| d676cb7dca | |||
| d1a966cc0d | |||
| c88e2d2b7b | |||
| 2f04b2a862 | |||
| 120b26b9f7 | |||
| ecfa8d3c10 | |||
| d642234814 | |||
| 56b4975d10 | |||
| 94f4cc69c4 | |||
| b27cd5fb82 | |||
| 45a37a8c08 | |||
| c5dbc9a22b | |||
| 6f2a8f26e1 | |||
| 9078a6f3dc | |||
| a1865a41c6 | |||
| 99a5086158 | |||
| 4b636979f9 | |||
| 9f6606f1e1 | |||
| b9f3149679 | |||
| 1c476003d6 | |||
| e32098fb94 | |||
| 67119d0627 | |||
| a62cec2090 | |||
| 6901df11c4 | |||
| 36e663c556 | |||
| 2f00c3feac | |||
| c163b076a0 | |||
| 3a0243da1f | |||
| 641fa09251 | |||
| 4895e487c0 | |||
| 5e8815d143 | |||
| 65b116c39f | |||
| 9a1c9b39ee | |||
| 40ffb99c97 | |||
| ccbd9768a2 | |||
| 281b982abe | |||
| 5c0a019e72 | |||
| 3d2ef53463 | |||
| f63d615364 | |||
| 2a85a2bc18 | |||
| 489c37357e | |||
| 4f2d652a69 | |||
| bd655cb0f0 | |||
| 60070395e9 | |||
| f39a08d985 | |||
| 055bbf4de6 | |||
| ab7d78261e | |||
| b1726968e5 | |||
| ff9dea0488 | |||
| 803f35ef39 | |||
| 4780f68a23 | |||
| 08f816a954 | |||
| 778d16b21a | |||
| a67a16d6bf | |||
| 2e5731b5d6 | |||
| 94cf36bff3 | |||
| dfdac68ecc | |||
| 3d3b97bdc2 | |||
| 1b7ca7b4da | |||
| f28dc756c5 | |||
| d70d70e193 | |||
| 1ba67357dc | |||
| b2728a7cf4 | |||
| f7ee9a40da | |||
| 9f4ea84b47 | |||
| 9e393adb00 | |||
| 458ca1d776 | |||
| b6d44ca7d8 | |||
| 19cb8c11a0 | |||
| 72ac201153 | |||
| a24f07a36e | |||
| 9b071fe370 | |||
| 32dc893434 | |||
| 700f491ef9 | |||
| 4c9bf6b982 | |||
| bf332f27e0 | |||
| 596476f9ac | |||
| 27108aacda | |||
| 54790a7ebb | |||
| 1652f2f6af | |||
| 3cb644add4 | |||
| 63742bb369 | |||
| 8373da8547 | |||
| 38e0ba0484 | |||
| 5f6ac8e507 | |||
| 684337fd0c | |||
| 86eaa8a680 | |||
| ee317b29f1 | |||
| 842f9c88eb | |||
| 99f79e4c29 | |||
| 798461a1ea | |||
| 942f14f746 | |||
| 7c56c8bef2 | |||
| 3b516c0e24 | |||
| b0f3b643c7 | |||
| 48daeba012 | |||
| 4347057c06 | |||
| e0a7c6baa9 | |||
| ae77a11782 | |||
| 396b243d59 | |||
| 73283dea64 | |||
| cb014cf547 | |||
| 246782292f | |||
| 46ca4c9aac | |||
| 795f83ada5 | |||
| 646385b975 | |||
| 148e6c1b58 | |||
| de58161014 | |||
| 7e2d3dd5ab | |||
| 0222262f8f | |||
| 338e3feb4a | |||
| 36acd3999e | |||
| 85a6b053eb | |||
| ddff43595f | |||
| 6e9087d0f4 | |||
| 0c8b296aa6 | |||
| a833077f97 | |||
| 5aabad4d13 | |||
| 5934c7666c | |||
| 014e22390e | |||
| c00224467f | |||
| 08c7264d7a | |||
| 3525629853 | |||
| 6d2a791a9d | |||
| 3c8ba1d48c | |||
| f4d14cf17e | |||
| f4dad969bc | |||
| 589141e9aa | |||
| da4973829e | |||
| ff5b364852 | |||
| 6726c176b2 | |||
| 84ca53a1bc | |||
| fb2cdd4bb6 | |||
| dda999fb98 | |||
| e8129f847c | |||
| e1c8b616a8 | |||
| 883f89b113 | |||
| 41a53bbf8f | |||
| 5c08bac248 | |||
| 8443de4e0f | |||
| 51cd319a24 | |||
| c4ec6c9f0e | |||
| aeb1ebe7a2 | |||
| 920a5b0eaf | |||
| 8b5b06ca9a | |||
| 048434d49c | |||
| e42e223f28 | |||
| 9a42442f47 | |||
| 72a0455d59 | |||
| 029abb9be2 | |||
| 34019b7e65 | |||
| 1ca105f330 | |||
| 57da1f1272 | |||
| cf503c8d77 | |||
| b1f4d41b27 | |||
| 17c7a2e295 | |||
| 7b07e0cfae | |||
| fac0cecf90 | |||
| f49598d82b | |||
| f91875f6fc | |||
| 8ae8b0cdfb | |||
| 4c7657ce75 | |||
| 1e357181b6 | |||
| 2441730862 | |||
| 5c4bd3d7e8 | |||
| 5c88572ac7 | |||
| a80bfba873 | |||
| 64e78bb9b8 | |||
| ec987eff80 | |||
| e414a1a358 | |||
| 8a49db2a10 | |||
| 2de3317aee | |||
| ca4bf72fde | |||
| d5f7b1598f | |||
| 57c30a0156 | |||
| 9fce617949 | |||
| 0b5faeffc9 | |||
| 18faf3fe91 | |||
| 4dba4db344 | |||
| b76ffbf656 | |||
| f0b9d50f85 | |||
| 6cdb2eb1e1 | |||
| 33aeac0141 | |||
| eaf6bb9957 | |||
| 3c6d82907d | |||
| 3be175522f | |||
| 6ebc2ed2ea | |||
| fadd4973da | |||
| 727486795c | |||
| dbb5701660 | |||
| 55781a8448 | |||
| fd76be02fd | |||
| 4649cf562d | |||
| 627f8b0cc4 | |||
| adfbdf56d0 | |||
| 02764f7e6f | |||
| 95b7059576 | |||
| 66cc2fdfcb | |||
| 1a6c37d264 | |||
| 39991d9ffc | |||
| 75aa410f98 | |||
| 12688b9770 | |||
| eb4be2926b | |||
| 94c53e9555 | |||
| a41c86f1da | |||
| a3651e0e47 | |||
| 4e08d81bb0 | |||
| 731fd56768 | |||
| 260b98e548 | |||
| 65adaea116 | |||
| c71131505e | |||
| 96eeae620e | |||
| fc8489bb9f | |||
| fecdb38a90 | |||
| 85d73b8294 | |||
| 0fd9c424cd | |||
| 77da614091 | |||
| 7409ae637e | |||
| 17e5a551e1 | |||
| 6a6ae7e059 | |||
| ffb182e3ba | |||
| c81a493fb1 | |||
| 3d5c19939c | |||
| 9a64ca5b01 | |||
| 5c2691b070 | |||
| 6db850c2ad | |||
| 92795cf9b3 | |||
| ebb0769ed4 | |||
| 947a8ff51f | |||
| 04799633b4 | |||
| 1f39f07c5d | |||
| 7437eb4c02 | |||
| f64ba74d93 | |||
| 80507119b7 | |||
| 68c2f9bda2 | |||
| 9692dfc63b | |||
| 1637835fe6 | |||
| 9e686017a6 | |||
| 1c8c18c1ea | |||
| bf4455942b | |||
| 4eede0c8c0 | |||
| 04b516a52d | |||
| 3e50ec0149 | |||
| 71841645cf | |||
| 2e57c4f424 | |||
| d143d56d8b | |||
| e578623999 | |||
| 4db53c93df | |||
| 36e09b72ed | |||
| d87ec398bb | |||
| d698ae50a2 | |||
| 2bf69cd3fc | |||
| ab00e3f8df | |||
| a057432a3e | |||
| 68d83e2a39 | |||
| 30de86e77a | |||
| f1c2ee59bd | |||
| ff0d865b7c | |||
| 0d6cade56c | |||
| a0f32b1a00 | |||
| 59edf6bd50 | |||
| 0957a6e183 | |||
| 2bc616a062 | |||
| 8f7c489bd2 | |||
| 99e24a2fc3 | |||
| 043d8a2877 | |||
| 71207bc935 | |||
| 8c8853d26e | |||
| 94de91ffa0 | |||
| 42a07be4cb | |||
| 577c55f32a | |||
| 028d91283e | |||
| 1ba4336291 | |||
| d0f63063ca | |||
| 9323eb6371 | |||
| 3ffab4e70a | |||
| 5b80824f3a | |||
| 6b979eb57a | |||
| 52b96da8dc | |||
| 4aa7adba3a | |||
| 9096afbace | |||
| bf5d80bbb3 | |||
| 97bef2c98a | |||
| aec6357dcb | |||
| 92217301b5 | |||
| 539dd80e14 | |||
| 8e885dd40b | |||
| 8fb4770161 | |||
| 2867ebae09 | |||
| 6a695966bf | |||
| 7481a33c15 | |||
| e48cb29131 | |||
| 7b933b6cdb | |||
| 7a71715183 | |||
| 57d49bcf78 | |||
| 6d0c3fdf26 | |||
| 87c4046711 | |||
| 3f98dd6ebb | |||
| 3b8ca8b8f3 | |||
| a63cc7e083 | |||
| 13d2f8307d | |||
| 86651c2ef1 | |||
| e95ed299d6 | |||
| 733c86eb6b | |||
| dd26076da4 | |||
| 3a3c7eb4cd | |||
| d3472c2c92 | |||
| a93c787031 | |||
| 9bf8957a50 | |||
| 8f720443a4 | |||
| 63f17b647d | |||
| 548eda6c94 | |||
| 7f27ff823a | |||
| f550ec05e3 | |||
| 88db74c9a0 | |||
| 3d9dfe6e6a | |||
| 90dc5f11d2 | |||
| 00a68deb7b | |||
| 4c9076af19 | |||
| bf91104c7c | |||
| 67e63911e9 | |||
| 888acfd33d | |||
| 082d725d91 | |||
| 2199d256b6 | |||
| 721326ecaf | |||
| e0c80c178d | |||
| 2cb0c5d79f | |||
| 1fe8482349 | |||
| 8121031969 | |||
| 23c4e16ee2 | |||
| e3b752a2a7 | |||
| 2c8c9a788c | |||
| 6e136e832b | |||
| e15c0a21e0 | |||
| 555c39d668 | |||
| be5a0c0aab | |||
| 969533f1de | |||
| 85f2d2c6f7 | |||
| fe2df1514c | |||
| d30aa7cfea |
34
.dockerignore
Normal file
34
.dockerignore
Normal file
@@ -0,0 +1,34 @@
|
|||||||
|
__pycache__/
|
||||||
|
*.pyc
|
||||||
|
*.pyo
|
||||||
|
*.egg-info/
|
||||||
|
.git/
|
||||||
|
.github/
|
||||||
|
.gitignore
|
||||||
|
.vscode/
|
||||||
|
.vs/
|
||||||
|
.idea/
|
||||||
|
.mypy_cache/
|
||||||
|
.pytest_cache/
|
||||||
|
.coverage
|
||||||
|
.env
|
||||||
|
*.log
|
||||||
|
|
||||||
|
# Docker files (not needed inside the image)
|
||||||
|
Docker/
|
||||||
|
|
||||||
|
# Test and dev files
|
||||||
|
tests/
|
||||||
|
Temp/
|
||||||
|
test_data/
|
||||||
|
docs/
|
||||||
|
diagrams/
|
||||||
|
|
||||||
|
# Runtime data (mounted as volumes)
|
||||||
|
data/aniworld.db
|
||||||
|
data/config_backups/
|
||||||
|
logs/
|
||||||
|
|
||||||
|
# Frontend tooling
|
||||||
|
node_modules/
|
||||||
|
package.json
|
||||||
28
.flake8
28
.flake8
@@ -1,28 +0,0 @@
|
|||||||
[flake8]
|
|
||||||
max-line-length = 88
|
|
||||||
exclude =
|
|
||||||
.git,
|
|
||||||
__pycache__,
|
|
||||||
build,
|
|
||||||
dist,
|
|
||||||
.venv,
|
|
||||||
venv,
|
|
||||||
aniworld,
|
|
||||||
migrations,
|
|
||||||
.pytest_cache,
|
|
||||||
.mypy_cache,
|
|
||||||
.coverage,
|
|
||||||
htmlcov
|
|
||||||
extend-ignore =
|
|
||||||
# E203: whitespace before ':' (conflicts with black)
|
|
||||||
E203,
|
|
||||||
# W503: line break before binary operator (conflicts with black)
|
|
||||||
W503,
|
|
||||||
# E501: line too long (handled by black)
|
|
||||||
E501
|
|
||||||
per-file-ignores =
|
|
||||||
__init__.py:F401
|
|
||||||
tests/*:F401,F811
|
|
||||||
max-complexity = 10
|
|
||||||
docstring-convention = google
|
|
||||||
import-order-style = google
|
|
||||||
74
.github/copilot-instructions.md
vendored
74
.github/copilot-instructions.md
vendored
@@ -18,23 +18,6 @@ These instructions define how GitHub Copilot should assist with this project. Th
|
|||||||
- Use meaningful naming; avoid cryptic variables.
|
- Use meaningful naming; avoid cryptic variables.
|
||||||
- Emphasize simplicity, readability, and DRY principles.
|
- Emphasize simplicity, readability, and DRY principles.
|
||||||
|
|
||||||
## 📁 File Structure
|
|
||||||
|
|
||||||
Use this structure as a guide when creating or updating files:
|
|
||||||
|
|
||||||
```text
|
|
||||||
src/
|
|
||||||
controllers/
|
|
||||||
services/
|
|
||||||
repositories/
|
|
||||||
schemas/
|
|
||||||
utils/
|
|
||||||
config/
|
|
||||||
tests/
|
|
||||||
unit/
|
|
||||||
integration/
|
|
||||||
```
|
|
||||||
|
|
||||||
## 🧶 Patterns
|
## 🧶 Patterns
|
||||||
|
|
||||||
### ✅ Patterns to Follow
|
### ✅ Patterns to Follow
|
||||||
@@ -93,47 +76,46 @@ tests/
|
|||||||
|
|
||||||
## 1. General Philosophy
|
## 1. General Philosophy
|
||||||
|
|
||||||
* **Clarity is King:** Code should be easy to understand at a glance.
|
- **Clarity is King:** Code should be easy to understand at a glance.
|
||||||
* **Consistency Matters:** Adhere to these standards across all projects.
|
- **Consistency Matters:** Adhere to these standards across all projects.
|
||||||
* **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
|
- **Automation Encouraged:** Utilize tools like StyleCop, Roslyn Analyzers, and .editorconfig to enforce these standards automatically.
|
||||||
* **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
|
- **Evolve and Adapt:** These standards should be reviewed and updated as the C# language and best practices evolve.
|
||||||
* **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
|
- **Practicality Reigns:** While striving for perfection, prioritize pragmatic solutions that balance maintainability and development speed.
|
||||||
|
|
||||||
* CleanCode, Keep it simple, MVVM
|
- CleanCode, Keep it simple, MVVM
|
||||||
|
|
||||||
## 2. Security Considerations
|
## 2. Security Considerations
|
||||||
|
|
||||||
* **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
|
- **Input Validation:** Always validate user input to prevent injection attacks (e.g., SQL injection, XSS).
|
||||||
* **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
|
- **Secure Configuration:** Store sensitive information (e.g., passwords, API keys) in secure configuration files, and encrypt them if possible. Avoid hardcoding sensitive data.
|
||||||
* **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
|
- **Authentication and Authorization:** Implement proper authentication and authorization mechanisms to protect resources. Favor using built-in identity frameworks.
|
||||||
* **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
|
- **Data Encryption:** Encrypt sensitive data at rest and in transit. Use strong encryption algorithms.
|
||||||
* **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
|
- **Regular Security Audits:** Perform regular security audits and penetration testing to identify and address vulnerabilities.
|
||||||
* **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
|
- **Dependency Vulnerabilities:** Keep dependencies up-to-date to patch known security vulnerabilities. Use tools to automatically check for vulnerabilities.
|
||||||
|
|
||||||
## 3. Performance Optimization
|
## 3. Performance Optimization
|
||||||
|
|
||||||
* **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
|
- **Minimize Object Allocation:** Reduce unnecessary object allocations, especially in performance-critical code. Use techniques like object pooling and struct types for small value types.
|
||||||
* **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
|
- **Use Efficient Data Structures:** Choose the appropriate data structures for the task (e.g., "Dictionary" for fast lookups, "List" for ordered collections).
|
||||||
* **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
|
- **Avoid Boxing/Unboxing:** Avoid boxing and unboxing operations, as they can be expensive. Use generics to prevent boxing.
|
||||||
* **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
|
- **String Concatenation:** Use "StringBuilder" for building strings in loops instead of repeated string concatenation.
|
||||||
* **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
|
- **Asynchronous I/O:** Use asynchronous I/O operations to avoid blocking threads.
|
||||||
* **Profiling:** Use profiling tools to identify performance bottlenecks.
|
- **Profiling:** Use profiling tools to identify performance bottlenecks.
|
||||||
|
|
||||||
## 4. GUI
|
## 4. GUI
|
||||||
|
|
||||||
* **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Effortless:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Calm:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
|
- **Iconography:** Iconography is a set of visual images and symbols that help users understand and navigate your app. Windows 11 iconography has evolved in concert with our design language. Every glyph in our system icon font has been redesigned to embrace a softer geometry and more modern metaphors.
|
||||||
* **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
|
- **Shapes and geometry:** Geometry describes the shape, size, and position of UI elements on screen. These fundamental design elements help experiences feel coherent across the entire design system. Windows 11 features updated geometry that creates a more approachable, engaging, and modern experience.
|
||||||
* **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
|
- **Typography:** As the visual representation of language, the main task of typography is to communicate information. The Windows 11 type system helps you create structure and hierarchy in your content in order to maximize legibility and readability in your UI.
|
||||||
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
- **Familiar:** faster and more intuitive. It's easy to do what I want, with focus and precision.
|
||||||
* **Fluent UI design:** Use Fluent UI design
|
- **Fluent UI design:** Use Fluent UI design
|
||||||
* **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
|
- **Themes:** Use the already defined Theme color. Make sure ther is always a dark and light mode.
|
||||||
* **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
|
- **Text:** Write in resource files so that a translation is easily possible. Use the already defined text in the resource files.
|
||||||
|
|
||||||
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
|
This document serves as a starting point and is meant to be adapted to the specific needs of each project and team. Regularly review and update these standards to keep them relevant and effective.
|
||||||
|
|
||||||
|
|
||||||
Run till you are realy finished.
|
Run till you are realy finished.
|
||||||
Do not gues, open and read files if you dont know something.
|
Do not gues, open and read files if you dont know something.
|
||||||
66
.gitignore
vendored
66
.gitignore
vendored
@@ -4,6 +4,7 @@
|
|||||||
/src/__pycache__/*
|
/src/__pycache__/*
|
||||||
/src/__pycache__/
|
/src/__pycache__/
|
||||||
/.vs/*
|
/.vs/*
|
||||||
|
/.venv/*
|
||||||
/src/Temp/*
|
/src/Temp/*
|
||||||
/src/Loaders/__pycache__/*
|
/src/Loaders/__pycache__/*
|
||||||
/src/Loaders/provider/__pycache__/*
|
/src/Loaders/provider/__pycache__/*
|
||||||
@@ -18,3 +19,68 @@
|
|||||||
/src/server/__pycache__/*
|
/src/server/__pycache__/*
|
||||||
/src/NoKeyFound.log
|
/src/NoKeyFound.log
|
||||||
/download_errors.log
|
/download_errors.log
|
||||||
|
|
||||||
|
# Environment and secrets
|
||||||
|
.env
|
||||||
|
.env.local
|
||||||
|
.env.*.local
|
||||||
|
*.pem
|
||||||
|
*.key
|
||||||
|
secrets/
|
||||||
|
|
||||||
|
# Python cache
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
|
||||||
|
# Database files (including SQLite journal/WAL files)
|
||||||
|
*.db
|
||||||
|
*.db-shm
|
||||||
|
*.db-wal
|
||||||
|
*.db-journal
|
||||||
|
*.sqlite
|
||||||
|
*.sqlite3
|
||||||
|
*.sqlite-shm
|
||||||
|
*.sqlite-wal
|
||||||
|
*.sqlite-journal
|
||||||
|
data/*.db*
|
||||||
|
data/aniworld.db*
|
||||||
|
|
||||||
|
# Configuration files (exclude from git, keep backups local)
|
||||||
|
data/config.json
|
||||||
|
data/config_backups/
|
||||||
|
config.json
|
||||||
|
*.config
|
||||||
|
|
||||||
|
# Logs
|
||||||
|
*.log
|
||||||
|
logs/
|
||||||
|
src/cli/logs/
|
||||||
|
*.log.*
|
||||||
|
|
||||||
|
# Temp folders
|
||||||
|
Temp/
|
||||||
|
temp/
|
||||||
|
tmp/
|
||||||
|
*.tmp
|
||||||
|
.coverage
|
||||||
|
.venv/bin/dotenv
|
||||||
|
|||||||
4
.gitmodules
vendored
4
.gitmodules
vendored
@@ -1,4 +0,0 @@
|
|||||||
[submodule "src/AniWorld-Downloader"]
|
|
||||||
path = src/AniWorld-Downloader
|
|
||||||
url = https://github.com/lukaspupkalipinski/AniWorld-Downloader.git
|
|
||||||
branch = next
|
|
||||||
187
.vscode/launch.json
vendored
187
.vscode/launch.json
vendored
@@ -2,55 +2,174 @@
|
|||||||
"version": "0.2.0",
|
"version": "0.2.0",
|
||||||
"configurations": [
|
"configurations": [
|
||||||
{
|
{
|
||||||
"name": "Python: Flask App",
|
"name": "Debug FastAPI App",
|
||||||
"type": "debugpy",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"program": "${workspaceFolder}/src/server/app.py",
|
"program": "${workspaceFolder}/src/server/fastapi_app.py",
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
"env": {
|
"env": {
|
||||||
"FLASK_APP": "app.py",
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
"FLASK_ENV": "development",
|
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||||
"PYTHONPATH": "${workspaceFolder}/src;${workspaceFolder}"
|
"PASSWORD_SALT": "default-salt-debug",
|
||||||
|
"MASTER_PASSWORD": "admin123",
|
||||||
|
"LOG_LEVEL": "DEBUG",
|
||||||
|
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||||
|
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||||
},
|
},
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
"args": [],
|
"args": [],
|
||||||
"jinja": true,
|
"stopOnEntry": false,
|
||||||
"console": "integratedTerminal",
|
"autoReload": {
|
||||||
"cwd": "${workspaceFolder}/src",
|
"enable": true
|
||||||
"python": "C:/Users/lukas/anaconda3/envs/AniWorld/python.exe"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Python: CLI Tool",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "${workspaceFolder}/src/main.py",
|
|
||||||
"env": {
|
|
||||||
"PYTHONPATH": "${workspaceFolder}"
|
|
||||||
},
|
|
||||||
"args": [],
|
|
||||||
"console": "integratedTerminal",
|
|
||||||
"cwd": "${workspaceFolder}"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Python: Current File",
|
|
||||||
"type": "debugpy",
|
|
||||||
"request": "launch",
|
|
||||||
"program": "${file}",
|
|
||||||
"console": "integratedTerminal",
|
|
||||||
"env": {
|
|
||||||
"PYTHONPATH": "${workspaceFolder}"
|
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Python: Pytest",
|
"name": "Debug FastAPI with Uvicorn",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "uvicorn",
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
|
"args": [
|
||||||
|
"src.server.fastapi_app:app",
|
||||||
|
"--host",
|
||||||
|
"127.0.0.1",
|
||||||
|
"--port",
|
||||||
|
"8000",
|
||||||
|
"--reload",
|
||||||
|
"--log-level",
|
||||||
|
"debug"
|
||||||
|
],
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
|
"env": {
|
||||||
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
|
"JWT_SECRET_KEY": "your-secret-key-here-debug",
|
||||||
|
"PASSWORD_SALT": "default-salt-debug",
|
||||||
|
"MASTER_PASSWORD": "admin123",
|
||||||
|
"LOG_LEVEL": "DEBUG",
|
||||||
|
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||||
|
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||||
|
},
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug CLI App",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"program": "${workspaceFolder}/src/cli/Main.py",
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
|
"env": {
|
||||||
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
|
"LOG_LEVEL": "DEBUG",
|
||||||
|
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime"
|
||||||
|
},
|
||||||
|
"cwd": "${workspaceFolder}",
|
||||||
|
"args": [
|
||||||
|
// Add arguments as needed for CLI testing
|
||||||
|
// Example: "${workspaceFolder}/test_data"
|
||||||
|
],
|
||||||
|
"stopOnEntry": false
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug Tests",
|
||||||
"type": "debugpy",
|
"type": "debugpy",
|
||||||
"request": "launch",
|
"request": "launch",
|
||||||
"module": "pytest",
|
"module": "pytest",
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
"args": [
|
"args": [
|
||||||
"tests/",
|
"${workspaceFolder}/tests",
|
||||||
"-v"
|
"-v",
|
||||||
|
"--tb=short",
|
||||||
|
"--no-header",
|
||||||
|
"--disable-warnings"
|
||||||
],
|
],
|
||||||
"console": "integratedTerminal",
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
"env": {
|
"env": {
|
||||||
"PYTHONPATH": "${workspaceFolder}"
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
|
"JWT_SECRET_KEY": "test-secret-key",
|
||||||
|
"PASSWORD_SALT": "test-salt",
|
||||||
|
"MASTER_PASSWORD": "admin123",
|
||||||
|
"LOG_LEVEL": "DEBUG",
|
||||||
|
"ANIME_DIRECTORY": "${workspaceFolder}/test_data/anime",
|
||||||
|
"DATABASE_URL": "sqlite:///${workspaceFolder}/test_data/test_aniworld.db"
|
||||||
|
},
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug Unit Tests Only",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "pytest",
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/tests/unit",
|
||||||
|
"-v",
|
||||||
|
"--tb=short"
|
||||||
|
],
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
|
"env": {
|
||||||
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
|
"JWT_SECRET_KEY": "test-secret-key",
|
||||||
|
"PASSWORD_SALT": "test-salt",
|
||||||
|
"LOG_LEVEL": "DEBUG"
|
||||||
|
},
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug Integration Tests Only",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "pytest",
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
|
"args": [
|
||||||
|
"${workspaceFolder}/tests/integration",
|
||||||
|
"-v",
|
||||||
|
"--tb=short"
|
||||||
|
],
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
|
"env": {
|
||||||
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
|
"JWT_SECRET_KEY": "test-secret-key",
|
||||||
|
"PASSWORD_SALT": "test-salt",
|
||||||
|
"MASTER_PASSWORD": "admin123",
|
||||||
|
"LOG_LEVEL": "DEBUG",
|
||||||
|
"ANIME_DIRECTORY": "${workspaceFolder}/test_data/anime",
|
||||||
|
"DATABASE_URL": "sqlite:///${workspaceFolder}/test_data/test_aniworld.db"
|
||||||
|
},
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"name": "Debug FastAPI Production Mode",
|
||||||
|
"type": "debugpy",
|
||||||
|
"request": "launch",
|
||||||
|
"module": "uvicorn",
|
||||||
|
"python": "/home/lukas/miniconda3/envs/AniWorld/bin/python",
|
||||||
|
"args": [
|
||||||
|
"src.server.fastapi_app:app",
|
||||||
|
"--host",
|
||||||
|
"0.0.0.0",
|
||||||
|
"--port",
|
||||||
|
"8000",
|
||||||
|
"--workers",
|
||||||
|
"1"
|
||||||
|
],
|
||||||
|
"console": "integratedTerminal",
|
||||||
|
"justMyCode": true,
|
||||||
|
"env": {
|
||||||
|
"PYTHONPATH": "${workspaceFolder}/src:${workspaceFolder}",
|
||||||
|
"JWT_SECRET_KEY": "production-secret-key-change-me",
|
||||||
|
"PASSWORD_SALT": "production-salt-change-me",
|
||||||
|
"MASTER_PASSWORD": "admin123",
|
||||||
|
"LOG_LEVEL": "INFO",
|
||||||
|
"ANIME_DIRECTORY": "${workspaceFolder}/data/anime",
|
||||||
|
"DATABASE_URL": "sqlite:///${workspaceFolder}/data/aniworld.db"
|
||||||
},
|
},
|
||||||
"cwd": "${workspaceFolder}"
|
"cwd": "${workspaceFolder}"
|
||||||
}
|
}
|
||||||
|
|||||||
7
.vscode/settings.json
vendored
7
.vscode/settings.json
vendored
@@ -1,6 +1,11 @@
|
|||||||
{
|
{
|
||||||
"python.defaultInterpreterPath": "./aniworld/Scripts/python.exe",
|
"python.defaultInterpreterPath": "${workspaceFolder}/.venv/bin/python",
|
||||||
"python.terminal.activateEnvironment": true,
|
"python.terminal.activateEnvironment": true,
|
||||||
|
"python.terminal.activateEnvInCurrentTerminal": true,
|
||||||
|
"terminal.integrated.env.linux": {
|
||||||
|
"VIRTUAL_ENV": "${workspaceFolder}/.venv",
|
||||||
|
"PATH": "${workspaceFolder}/.venv/bin:${env:PATH}"
|
||||||
|
},
|
||||||
"python.linting.enabled": true,
|
"python.linting.enabled": true,
|
||||||
"python.linting.flake8Enabled": true,
|
"python.linting.flake8Enabled": true,
|
||||||
"python.linting.pylintEnabled": true,
|
"python.linting.pylintEnabled": true,
|
||||||
|
|||||||
166
.vscode/tasks.json
vendored
Normal file
166
.vscode/tasks.json
vendored
Normal file
@@ -0,0 +1,166 @@
|
|||||||
|
{
|
||||||
|
"version": "2.0.0",
|
||||||
|
"tasks": [
|
||||||
|
{
|
||||||
|
"label": "Run FastAPI Server",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "conda",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-n",
|
||||||
|
"AniWorld",
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"uvicorn",
|
||||||
|
"src.server.fastapi_app:app",
|
||||||
|
"--host",
|
||||||
|
"127.0.0.1",
|
||||||
|
"--port",
|
||||||
|
"8000",
|
||||||
|
"--reload"
|
||||||
|
],
|
||||||
|
"group": {
|
||||||
|
"kind": "build",
|
||||||
|
"isDefault": true
|
||||||
|
},
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": [],
|
||||||
|
"isBackground": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run CLI Application",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "conda",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-n",
|
||||||
|
"AniWorld",
|
||||||
|
"python",
|
||||||
|
"src/cli/Main.py"
|
||||||
|
],
|
||||||
|
"group": "build",
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run All Tests",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "conda",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-n",
|
||||||
|
"AniWorld",
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"pytest",
|
||||||
|
"tests/",
|
||||||
|
"-v",
|
||||||
|
"--tb=short"
|
||||||
|
],
|
||||||
|
"group": "test",
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run Unit Tests",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "conda",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-n",
|
||||||
|
"AniWorld",
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"pytest",
|
||||||
|
"tests/unit/",
|
||||||
|
"-v"
|
||||||
|
],
|
||||||
|
"group": "test",
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Run Integration Tests",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "conda",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-n",
|
||||||
|
"AniWorld",
|
||||||
|
"python",
|
||||||
|
"-m",
|
||||||
|
"pytest",
|
||||||
|
"tests/integration/",
|
||||||
|
"-v"
|
||||||
|
],
|
||||||
|
"group": "test",
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"label": "Install Dependencies",
|
||||||
|
"type": "shell",
|
||||||
|
"command": "conda",
|
||||||
|
"args": [
|
||||||
|
"run",
|
||||||
|
"-n",
|
||||||
|
"AniWorld",
|
||||||
|
"pip",
|
||||||
|
"install",
|
||||||
|
"-r",
|
||||||
|
"requirements.txt"
|
||||||
|
],
|
||||||
|
"group": "build",
|
||||||
|
"presentation": {
|
||||||
|
"echo": true,
|
||||||
|
"reveal": "always",
|
||||||
|
"focus": false,
|
||||||
|
"panel": "new"
|
||||||
|
},
|
||||||
|
"options": {
|
||||||
|
"cwd": "${workspaceFolder}"
|
||||||
|
},
|
||||||
|
"problemMatcher": []
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
@@ -1,94 +0,0 @@
|
|||||||
# Controller Cleanup Summary
|
|
||||||
|
|
||||||
## Files Successfully Removed (No Longer Needed)
|
|
||||||
|
|
||||||
### ✅ Removed from `src/server/web/controllers/api/v1/`:
|
|
||||||
1. **`main_routes.py`** - Web routes should be in `web/` directory per instruction.md
|
|
||||||
2. **`static_routes.py`** - Web routes should be in `web/` directory per instruction.md
|
|
||||||
3. **`websocket_handlers.py`** - Web routes should be in `web/` directory per instruction.md
|
|
||||||
|
|
||||||
### ✅ Removed from `src/server/web/controllers/api/`:
|
|
||||||
4. **`api_endpoints.py`** - Functionality moved to `api/v1/integrations.py`
|
|
||||||
|
|
||||||
## Final Clean Directory Structure
|
|
||||||
|
|
||||||
```
|
|
||||||
src/server/web/controllers/
|
|
||||||
├── api/
|
|
||||||
│ └── v1/
|
|
||||||
│ ├── anime.py ✅ Anime CRUD operations
|
|
||||||
│ ├── auth.py ✅ Authentication endpoints
|
|
||||||
│ ├── backups.py ✅ Backup operations
|
|
||||||
│ ├── bulk.py ✅ Bulk operations (existing)
|
|
||||||
│ ├── config.py ✅ Configuration management (existing)
|
|
||||||
│ ├── database.py ✅ Database operations (existing)
|
|
||||||
│ ├── diagnostics.py ✅ System diagnostics
|
|
||||||
│ ├── downloads.py ✅ Download operations
|
|
||||||
│ ├── episodes.py ✅ Episode management
|
|
||||||
│ ├── health.py ✅ Health checks (existing)
|
|
||||||
│ ├── integrations.py ✅ External integrations
|
|
||||||
│ ├── logging.py ✅ Logging management (existing)
|
|
||||||
│ ├── maintenance.py ✅ System maintenance
|
|
||||||
│ ├── performance.py ✅ Performance monitoring (existing)
|
|
||||||
│ ├── process.py ✅ Process management (existing)
|
|
||||||
│ ├── scheduler.py ✅ Task scheduling (existing)
|
|
||||||
│ ├── search.py ✅ Search functionality
|
|
||||||
│ └── storage.py ✅ Storage management
|
|
||||||
├── shared/
|
|
||||||
│ ├── __init__.py ✅ Package initialization
|
|
||||||
│ ├── auth_decorators.py ✅ Authentication decorators
|
|
||||||
│ ├── error_handlers.py ✅ Error handling utilities
|
|
||||||
│ ├── response_helpers.py ✅ Response formatting utilities
|
|
||||||
│ └── validators.py ✅ Input validation utilities
|
|
||||||
├── web/ ✅ Created for future web routes
|
|
||||||
├── instruction.md ✅ Kept for reference
|
|
||||||
└── __pycache__/ ✅ Python cache directory
|
|
||||||
```
|
|
||||||
|
|
||||||
## Files Count Summary
|
|
||||||
|
|
||||||
### Before Cleanup:
|
|
||||||
- **Total files**: 22+ files (including duplicates and misplaced files)
|
|
||||||
|
|
||||||
### After Cleanup:
|
|
||||||
- **Total files**: 18 essential files
|
|
||||||
- **API modules**: 18 modules in `api/v1/`
|
|
||||||
- **Shared modules**: 4 modules in `shared/`
|
|
||||||
- **Web modules**: 0 (directory created for future use)
|
|
||||||
|
|
||||||
## Verification Status
|
|
||||||
|
|
||||||
### ✅ All Required Modules Present (per instruction.md):
|
|
||||||
1. ✅ **Core API modules**: anime, episodes, downloads, search, backups, storage, auth, diagnostics, integrations, maintenance
|
|
||||||
2. ✅ **Existing modules preserved**: database, config, bulk, performance, scheduler, process, health, logging
|
|
||||||
3. ✅ **Shared utilities**: auth_decorators, error_handlers, validators, response_helpers
|
|
||||||
4. ✅ **Directory structure**: Matches instruction.md specification exactly
|
|
||||||
|
|
||||||
### ✅ Removed Files Status:
|
|
||||||
- **No functionality lost**: All removed files were either duplicates or misplaced
|
|
||||||
- **api_endpoints.py**: Functionality fully migrated to `integrations.py`
|
|
||||||
- **Web routes**: Properly separated from API routes (moved to `web/` directory structure)
|
|
||||||
|
|
||||||
## Test Coverage Status
|
|
||||||
|
|
||||||
All 18 remaining modules have comprehensive test coverage:
|
|
||||||
- **Shared modules**: 4 test files with 60+ test cases
|
|
||||||
- **API modules**: 14 test files with 200+ test cases
|
|
||||||
- **Total test coverage**: 260+ test cases covering all functionality
|
|
||||||
|
|
||||||
## Next Steps
|
|
||||||
|
|
||||||
1. ✅ **Cleanup completed** - Only essential files remain
|
|
||||||
2. ✅ **Structure optimized** - Follows instruction.md exactly
|
|
||||||
3. ✅ **Tests comprehensive** - All modules covered
|
|
||||||
4. **Ready for integration** - Clean, organized, well-tested codebase
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
🎯 **Mission Accomplished**: Successfully cleaned up controller directory structure
|
|
||||||
- **Removed**: 4 unnecessary/misplaced files
|
|
||||||
- **Preserved**: All essential functionality
|
|
||||||
- **Organized**: Perfect alignment with instruction.md specification
|
|
||||||
- **Tested**: Comprehensive test coverage maintained
|
|
||||||
|
|
||||||
The controller directory now contains exactly the files needed for the reorganized architecture, with no redundant or misplaced files.
|
|
||||||
24
Docker/Containerfile
Normal file
24
Docker/Containerfile
Normal file
@@ -0,0 +1,24 @@
|
|||||||
|
FROM alpine:3.19
|
||||||
|
|
||||||
|
RUN apk add --no-cache \
|
||||||
|
wireguard-tools \
|
||||||
|
iptables \
|
||||||
|
ip6tables \
|
||||||
|
bash \
|
||||||
|
curl \
|
||||||
|
iputils-ping \
|
||||||
|
iproute2 \
|
||||||
|
openresolv
|
||||||
|
|
||||||
|
# Create wireguard config directory (config is mounted at runtime)
|
||||||
|
RUN mkdir -p /etc/wireguard
|
||||||
|
|
||||||
|
# Copy entrypoint
|
||||||
|
COPY entrypoint.sh /entrypoint.sh
|
||||||
|
RUN chmod +x /entrypoint.sh
|
||||||
|
|
||||||
|
# Health check: can we reach the internet through the VPN?
|
||||||
|
HEALTHCHECK --interval=30s --timeout=10s --retries=5 \
|
||||||
|
CMD curl -sf --max-time 5 http://1.1.1.1 || exit 1
|
||||||
|
|
||||||
|
ENTRYPOINT ["/entrypoint.sh"]
|
||||||
33
Docker/Dockerfile.app
Normal file
33
Docker/Dockerfile.app
Normal file
@@ -0,0 +1,33 @@
|
|||||||
|
FROM python:3.12-slim
|
||||||
|
|
||||||
|
WORKDIR /app
|
||||||
|
|
||||||
|
# Install system dependencies for compiled Python packages
|
||||||
|
RUN apt-get update && \
|
||||||
|
apt-get install -y --no-install-recommends \
|
||||||
|
gcc \
|
||||||
|
g++ \
|
||||||
|
libffi-dev \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Install Python dependencies (cached layer)
|
||||||
|
COPY requirements.txt .
|
||||||
|
RUN pip install --no-cache-dir -r requirements.txt
|
||||||
|
|
||||||
|
# Copy the full application
|
||||||
|
COPY src/ ./src/
|
||||||
|
COPY run_server.py .
|
||||||
|
COPY pyproject.toml .
|
||||||
|
COPY data/config.json ./data/config.json
|
||||||
|
|
||||||
|
# Create runtime directories
|
||||||
|
RUN mkdir -p /app/data/config_backups /app/logs
|
||||||
|
|
||||||
|
EXPOSE 8000
|
||||||
|
|
||||||
|
ENV PYTHONUNBUFFERED=1
|
||||||
|
ENV PYTHONPATH=/app
|
||||||
|
|
||||||
|
# Bind to 0.0.0.0 so the app is reachable from the VPN container's network
|
||||||
|
CMD ["python", "-m", "uvicorn", "src.server.fastapi_app:app", \
|
||||||
|
"--host", "0.0.0.0", "--port", "8000"]
|
||||||
91
Docker/dispatcher.d-99-wg-routes.sh
Normal file
91
Docker/dispatcher.d-99-wg-routes.sh
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
# === Configuration ===
|
||||||
|
LOGFILE="/tmp/dispatcher.log"
|
||||||
|
BACKUP="/tmp/dispatcher.log.1"
|
||||||
|
MAXSIZE=$((1024 * 1024)) # 1 MB
|
||||||
|
VPN_IFACE="nl"
|
||||||
|
GATEWAY="192.168.178.1"
|
||||||
|
LOCAL_IFACE="wlp4s0f0"
|
||||||
|
ROUTE1="185.183.34.149"
|
||||||
|
ROUTE2="192.168.178.0/24"
|
||||||
|
|
||||||
|
# === Log Rotation ===
|
||||||
|
if [ -f "$LOGFILE" ] && [ "$(stat -c%s "$LOGFILE")" -ge "$MAXSIZE" ]; then
|
||||||
|
echo "[$(date)] Log file exceeded 1MB, rotating..." >> "$LOGFILE"
|
||||||
|
mv "$LOGFILE" "$BACKUP"
|
||||||
|
touch "$LOGFILE"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# === Logging Setup ===
|
||||||
|
exec >> "$LOGFILE" 2>&1
|
||||||
|
echo "[$(date)] Running dispatcher for $1 with status $2"
|
||||||
|
|
||||||
|
IFACE="$1"
|
||||||
|
STATUS="$2"
|
||||||
|
|
||||||
|
log_and_run() {
|
||||||
|
echo "[$(date)] Executing: $*"
|
||||||
|
if ! output=$("$@" 2>&1); then
|
||||||
|
echo "[$(date)] ERROR: Command failed: $*"
|
||||||
|
echo "[$(date)] Output: $output"
|
||||||
|
else
|
||||||
|
echo "[$(date)] Success: $*"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# === VPN Routing Logic ===
|
||||||
|
if [ "$IFACE" = "$VPN_IFACE" ]; then
|
||||||
|
case "$STATUS" in
|
||||||
|
up)
|
||||||
|
echo "[$(date)] VPN interface is up. Preparing routes..."
|
||||||
|
|
||||||
|
# === Wait for local interface and gateway ===
|
||||||
|
echo "[$(date)] Waiting for $LOCAL_IFACE (state UP) and gateway $GATEWAY (reachable)..."
|
||||||
|
until ip link show "$LOCAL_IFACE" | grep -q "state UP" && ip route get "$GATEWAY" &>/dev/null; do
|
||||||
|
echo "[$(date)] Waiting for $LOCAL_IFACE and $GATEWAY..."
|
||||||
|
sleep 1
|
||||||
|
done
|
||||||
|
echo "[$(date)] Local interface and gateway are ready."
|
||||||
|
# === End Wait ===
|
||||||
|
|
||||||
|
# === APPLY ROUTES (Corrected Order) ===
|
||||||
|
|
||||||
|
# 1. Add the route for the local network FIRST
|
||||||
|
log_and_run /sbin/ip route replace "$ROUTE2" dev "$LOCAL_IFACE"
|
||||||
|
|
||||||
|
# 2. Add the route to the VPN endpoint via the gateway SECOND
|
||||||
|
log_and_run /sbin/ip route replace "$ROUTE1" via "$GATEWAY" dev "$LOCAL_IFACE"
|
||||||
|
|
||||||
|
# === END APPLY ROUTES ===
|
||||||
|
|
||||||
|
# Log interface and WireGuard status
|
||||||
|
echo "[$(date)] --- ip addr show $VPN_IFACE ---"
|
||||||
|
ip addr show "$VPN_IFACE"
|
||||||
|
echo "[$(date)] --- wg show $VPN_IFACE ---"
|
||||||
|
wg show "$VPN_IFACE"
|
||||||
|
|
||||||
|
;;
|
||||||
|
|
||||||
|
down)
|
||||||
|
echo "[$(date)] VPN interface is down. Verifying before removing routes..."
|
||||||
|
|
||||||
|
# Log interface and WireGuard status
|
||||||
|
echo "[$(date)] --- ip addr show $VPN_IFACE ---"
|
||||||
|
ip addr show "$VPN_IFACE"
|
||||||
|
echo "[$(date)] --- wg show $VPN_IFACE ---"
|
||||||
|
wg show "$VPN_IFACE"
|
||||||
|
|
||||||
|
# Delay and confirm interface is still down
|
||||||
|
sleep 5
|
||||||
|
if ip link show "$VPN_IFACE" | grep -q "state UP"; then
|
||||||
|
echo "[$(date)] VPN interface is still up. Skipping route removal."
|
||||||
|
else
|
||||||
|
echo "[$(date)] Confirmed VPN is down. Removing routes..."
|
||||||
|
# It's good practice to remove them in reverse order, too.
|
||||||
|
log_and_run /sbin/ip route del "$ROUTE1" via "$GATEWAY" dev "$LOCAL_IFACE"
|
||||||
|
log_and_run /sbin/ip route del "$ROUTE2" dev "$LOCAL_IFACE"
|
||||||
|
fi
|
||||||
|
;;
|
||||||
|
esac
|
||||||
|
fi
|
||||||
228
Docker/entrypoint.sh
Normal file
228
Docker/entrypoint.sh
Normal file
@@ -0,0 +1,228 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
set -e
|
||||||
|
|
||||||
|
INTERFACE="wg0"
|
||||||
|
MOUNT_CONFIG="/etc/wireguard/${INTERFACE}.conf"
|
||||||
|
CONFIG_DIR="/run/wireguard"
|
||||||
|
CONFIG_FILE="${CONFIG_DIR}/${INTERFACE}.conf"
|
||||||
|
CHECK_INTERVAL="${HEALTH_CHECK_INTERVAL:-10}"
|
||||||
|
CHECK_HOST="${HEALTH_CHECK_HOST:-1.1.1.1}"
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Validate config exists, copy to writable location
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
if [ ! -f "$MOUNT_CONFIG" ]; then
|
||||||
|
echo "[error] WireGuard config not found at ${MOUNT_CONFIG}"
|
||||||
|
echo "[error] Mount your config file: -v /path/to/your.conf:/etc/wireguard/wg0.conf:ro"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
mkdir -p "$CONFIG_DIR"
|
||||||
|
cp "$MOUNT_CONFIG" "$CONFIG_FILE"
|
||||||
|
chmod 600 "$CONFIG_FILE"
|
||||||
|
|
||||||
|
# Extract endpoint IP and port from the config
|
||||||
|
VPN_ENDPOINT=$(grep -i '^Endpoint' "$CONFIG_FILE" | head -1 | sed 's/.*= *//;s/:.*//;s/ //g')
|
||||||
|
VPN_PORT=$(grep -i '^Endpoint' "$CONFIG_FILE" | head -1 | sed 's/.*://;s/ //g')
|
||||||
|
# Extract address
|
||||||
|
VPN_ADDRESS=$(grep -i '^Address' "$CONFIG_FILE" | head -1 | sed 's/.*= *//;s/ //g')
|
||||||
|
|
||||||
|
if [ -z "$VPN_ENDPOINT" ] || [ -z "$VPN_PORT" ]; then
|
||||||
|
echo "[error] Could not parse Endpoint from ${CONFIG_FILE}"
|
||||||
|
exit 1
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[init] Config: ${CONFIG_FILE}"
|
||||||
|
echo "[init] Endpoint: ${VPN_ENDPOINT}:${VPN_PORT}"
|
||||||
|
echo "[init] Address: ${VPN_ADDRESS}"
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Kill switch: only allow traffic through wg0
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
setup_killswitch() {
|
||||||
|
echo "[killswitch] Setting up iptables kill switch..."
|
||||||
|
|
||||||
|
# Flush existing rules
|
||||||
|
iptables -F
|
||||||
|
iptables -X
|
||||||
|
iptables -t nat -F
|
||||||
|
|
||||||
|
# Default policy: DROP everything
|
||||||
|
iptables -P INPUT DROP
|
||||||
|
iptables -P FORWARD DROP
|
||||||
|
iptables -P OUTPUT DROP
|
||||||
|
|
||||||
|
# Allow loopback
|
||||||
|
iptables -A INPUT -i lo -j ACCEPT
|
||||||
|
iptables -A OUTPUT -o lo -j ACCEPT
|
||||||
|
|
||||||
|
# Allow traffic to/from VPN endpoint (needed to establish tunnel)
|
||||||
|
iptables -A OUTPUT -d "$VPN_ENDPOINT" -p udp --dport "$VPN_PORT" -j ACCEPT
|
||||||
|
iptables -A INPUT -s "$VPN_ENDPOINT" -p udp --sport "$VPN_PORT" -j ACCEPT
|
||||||
|
|
||||||
|
# Allow all traffic through the WireGuard interface
|
||||||
|
iptables -A INPUT -i "$INTERFACE" -j ACCEPT
|
||||||
|
iptables -A OUTPUT -o "$INTERFACE" -j ACCEPT
|
||||||
|
|
||||||
|
# Allow DNS to the VPN DNS server (through wg0)
|
||||||
|
iptables -A OUTPUT -o "$INTERFACE" -p udp --dport 53 -j ACCEPT
|
||||||
|
iptables -A OUTPUT -o "$INTERFACE" -p tcp --dport 53 -j ACCEPT
|
||||||
|
|
||||||
|
# Allow DHCP (for container networking)
|
||||||
|
iptables -A OUTPUT -p udp --dport 67:68 -j ACCEPT
|
||||||
|
iptables -A INPUT -p udp --sport 67:68 -j ACCEPT
|
||||||
|
|
||||||
|
# Allow established/related connections
|
||||||
|
iptables -A INPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
|
||||||
|
iptables -A OUTPUT -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
|
||||||
|
|
||||||
|
# ── Allow incoming connections to exposed service ports (e.g. app on 8000) ──
|
||||||
|
# LOCAL_PORTS can be set as env var, e.g. "8000,8080,3000"
|
||||||
|
if [ -n "${LOCAL_PORTS:-}" ]; then
|
||||||
|
for port in $(echo "$LOCAL_PORTS" | tr ',' ' '); do
|
||||||
|
echo "[killswitch] Allowing incoming traffic on port ${port}"
|
||||||
|
iptables -A INPUT -p tcp --dport "$port" -j ACCEPT
|
||||||
|
iptables -A OUTPUT -p tcp --sport "$port" -j ACCEPT
|
||||||
|
done
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ── FORWARDING (so other containers can use this VPN) ──
|
||||||
|
iptables -A FORWARD -i eth0 -o "$INTERFACE" -j ACCEPT
|
||||||
|
iptables -A FORWARD -i "$INTERFACE" -o eth0 -m conntrack --ctstate ESTABLISHED,RELATED -j ACCEPT
|
||||||
|
|
||||||
|
# NAT: masquerade traffic from other containers going out through wg0
|
||||||
|
iptables -t nat -A POSTROUTING -o "$INTERFACE" -j MASQUERADE
|
||||||
|
|
||||||
|
echo "[killswitch] Kill switch active. Traffic blocked if VPN drops."
|
||||||
|
}
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Enable IP forwarding so other containers can route through us
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
enable_forwarding() {
|
||||||
|
echo "[init] Enabling IP forwarding..."
|
||||||
|
if cat /proc/sys/net/ipv4/ip_forward 2>/dev/null | grep -q 1; then
|
||||||
|
echo "[init] IP forwarding already enabled."
|
||||||
|
elif echo 1 > /proc/sys/net/ipv4/ip_forward 2>/dev/null; then
|
||||||
|
echo "[init] IP forwarding enabled via /proc."
|
||||||
|
else
|
||||||
|
echo "[init] /proc read-only — relying on --sysctl net.ipv4.ip_forward=1"
|
||||||
|
fi
|
||||||
|
}
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Start WireGuard manually (no wg-quick, avoids sysctl issues)
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
start_vpn() {
|
||||||
|
echo "[vpn] Starting WireGuard interface ${INTERFACE}..."
|
||||||
|
|
||||||
|
# Create the interface
|
||||||
|
ip link add "$INTERFACE" type wireguard
|
||||||
|
|
||||||
|
# Apply the WireGuard config (keys, peer, endpoint)
|
||||||
|
wg setconf "$INTERFACE" <(grep -v -i '^\(Address\|DNS\|MTU\|Table\|PreUp\|PostUp\|PreDown\|PostDown\|SaveConfig\)' "$CONFIG_FILE")
|
||||||
|
|
||||||
|
# Assign the address
|
||||||
|
ip -4 address add "$VPN_ADDRESS" dev "$INTERFACE"
|
||||||
|
|
||||||
|
# Set MTU
|
||||||
|
ip link set mtu 1420 up dev "$INTERFACE"
|
||||||
|
|
||||||
|
# Find default gateway/interface for the endpoint route
|
||||||
|
DEFAULT_GW=$(ip route | grep '^default' | head -1 | awk '{print $3}')
|
||||||
|
DEFAULT_IF=$(ip route | grep '^default' | head -1 | awk '{print $5}')
|
||||||
|
|
||||||
|
# Route VPN endpoint through the container's default gateway
|
||||||
|
if [ -n "$DEFAULT_GW" ] && [ -n "$DEFAULT_IF" ]; then
|
||||||
|
ip route add "$VPN_ENDPOINT/32" via "$DEFAULT_GW" dev "$DEFAULT_IF" 2>/dev/null || true
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Route all traffic through the WireGuard tunnel
|
||||||
|
ip route add 0.0.0.0/1 dev "$INTERFACE"
|
||||||
|
ip route add 128.0.0.0/1 dev "$INTERFACE"
|
||||||
|
|
||||||
|
# ── Policy routing: ensure responses to incoming LAN traffic go back via eth0 ──
|
||||||
|
if [ -n "$DEFAULT_GW" ] && [ -n "$DEFAULT_IF" ]; then
|
||||||
|
# Get the container's eth0 IP address (BusyBox-compatible, no grep -P)
|
||||||
|
ETH0_IP=$(ip -4 addr show "$DEFAULT_IF" | awk '/inet / {split($2, a, "/"); print a[1]}' | head -1)
|
||||||
|
ETH0_SUBNET=$(ip -4 route show dev "$DEFAULT_IF" | grep -v default | head -1 | awk '{print $1}')
|
||||||
|
if [ -n "$ETH0_IP" ] && [ -n "$ETH0_SUBNET" ]; then
|
||||||
|
echo "[vpn] Setting up policy routing for incoming traffic (${ETH0_IP} on ${DEFAULT_IF})"
|
||||||
|
ip route add default via "$DEFAULT_GW" dev "$DEFAULT_IF" table 100 2>/dev/null || true
|
||||||
|
ip route add "$ETH0_SUBNET" dev "$DEFAULT_IF" table 100 2>/dev/null || true
|
||||||
|
ip rule add from "$ETH0_IP" table 100 priority 100 2>/dev/null || true
|
||||||
|
echo "[vpn] Policy routing active — incoming connections will be routed back via ${DEFAULT_IF}"
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Set up DNS
|
||||||
|
VPN_DNS=$(grep -i '^DNS' "$CONFIG_FILE" | head -1 | sed 's/.*= *//;s/ //g')
|
||||||
|
if [ -n "$VPN_DNS" ]; then
|
||||||
|
echo "nameserver $VPN_DNS" > /etc/resolv.conf
|
||||||
|
echo "[vpn] DNS set to ${VPN_DNS}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
echo "[vpn] WireGuard interface ${INTERFACE} is up."
|
||||||
|
}
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Stop WireGuard manually
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
stop_vpn() {
|
||||||
|
echo "[vpn] Stopping WireGuard interface ${INTERFACE}..."
|
||||||
|
ip link del "$INTERFACE" 2>/dev/null || true
|
||||||
|
}
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Health check loop — restarts VPN if tunnel dies
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
health_loop() {
|
||||||
|
local failures=0
|
||||||
|
local max_failures=3
|
||||||
|
|
||||||
|
echo "[health] Starting health check (every ${CHECK_INTERVAL}s, target ${CHECK_HOST})..."
|
||||||
|
|
||||||
|
while true; do
|
||||||
|
sleep "$CHECK_INTERVAL"
|
||||||
|
|
||||||
|
if curl -sf --max-time 5 "http://$CHECK_HOST" > /dev/null 2>&1; then
|
||||||
|
if [ "$failures" -gt 0 ]; then
|
||||||
|
echo "[health] VPN recovered."
|
||||||
|
failures=0
|
||||||
|
fi
|
||||||
|
else
|
||||||
|
failures=$((failures + 1))
|
||||||
|
echo "[health] Ping failed ($failures/$max_failures)"
|
||||||
|
|
||||||
|
if [ "$failures" -ge "$max_failures" ]; then
|
||||||
|
echo "[health] VPN appears down. Restarting WireGuard..."
|
||||||
|
stop_vpn
|
||||||
|
sleep 2
|
||||||
|
start_vpn
|
||||||
|
failures=0
|
||||||
|
echo "[health] WireGuard restarted."
|
||||||
|
fi
|
||||||
|
fi
|
||||||
|
done
|
||||||
|
}
|
||||||
|
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
# Graceful shutdown
|
||||||
|
# ──────────────────────────────────────────────
|
||||||
|
cleanup() {
|
||||||
|
echo "[shutdown] Stopping WireGuard..."
|
||||||
|
stop_vpn
|
||||||
|
echo "[shutdown] Flushing iptables..."
|
||||||
|
iptables -F
|
||||||
|
iptables -t nat -F
|
||||||
|
echo "[shutdown] Done."
|
||||||
|
exit 0
|
||||||
|
}
|
||||||
|
|
||||||
|
trap cleanup SIGTERM SIGINT
|
||||||
|
|
||||||
|
# ── Main ──
|
||||||
|
enable_forwarding
|
||||||
|
setup_killswitch
|
||||||
|
start_vpn
|
||||||
|
health_loop
|
||||||
17
Docker/nl.conf
Normal file
17
Docker/nl.conf
Normal file
@@ -0,0 +1,17 @@
|
|||||||
|
[Interface]
|
||||||
|
PrivateKey = iO5spIue/6ciwUoR95hYtuxdtQxV/Q9EOoQ/jHe18kM=
|
||||||
|
Address = 10.2.0.2/32
|
||||||
|
DNS = 10.2.0.1
|
||||||
|
|
||||||
|
# Route zum VPN-Server direkt über dein lokales Netz
|
||||||
|
PostUp = ip route add 185.183.34.149 via 192.168.178.1 dev wlp4s0f0
|
||||||
|
PostUp = ip route add 192.168.178.0/24 via 192.168.178.1 dev wlp4s0f0
|
||||||
|
PostDown = ip route del 185.183.34.149 via 192.168.178.1 dev wlp4s0f0
|
||||||
|
PostDown = ip route del 192.168.178.0/24 via 192.168.178.1 dev wlp4s0f0
|
||||||
|
|
||||||
|
[Peer]
|
||||||
|
PublicKey = J4XVdtoBVc/EoI2Yk673Oes97WMnQSH5KfamZNjtM2s=
|
||||||
|
AllowedIPs = 0.0.0.0/1, 128.0.0.0/1
|
||||||
|
Endpoint = 185.183.34.149:51820
|
||||||
|
|
||||||
|
|
||||||
54
Docker/podman-compose.prod.yml
Normal file
54
Docker/podman-compose.prod.yml
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
# Production compose — pulls pre-built images from Gitea registry.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# podman login git.lpl-mind.de
|
||||||
|
# podman-compose -f podman-compose.prod.yml pull
|
||||||
|
# podman-compose -f podman-compose.prod.yml up -d
|
||||||
|
#
|
||||||
|
# Required files:
|
||||||
|
# - wg0.conf (WireGuard configuration in the same directory)
|
||||||
|
|
||||||
|
services:
|
||||||
|
vpn:
|
||||||
|
image: git.lpl-mind.de/lukas.pupkalipinski/aniworld/vpn:latest
|
||||||
|
container_name: vpn-wireguard
|
||||||
|
cap_add:
|
||||||
|
- NET_ADMIN
|
||||||
|
- SYS_MODULE
|
||||||
|
sysctls:
|
||||||
|
- net.ipv4.ip_forward=1
|
||||||
|
- net.ipv4.conf.all.src_valid_mark=1
|
||||||
|
volumes:
|
||||||
|
- /server/server_aniworld/wg0.conf:/etc/wireguard/wg0.conf:ro
|
||||||
|
- /lib/modules:/lib/modules:ro
|
||||||
|
ports:
|
||||||
|
- "2000:8000"
|
||||||
|
environment:
|
||||||
|
- HEALTH_CHECK_INTERVAL=10
|
||||||
|
- HEALTH_CHECK_HOST=1.1.1.1
|
||||||
|
- LOCAL_PORTS=8000
|
||||||
|
- PUID=1013
|
||||||
|
- PGID=1001
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "curl", "-sf", "--max-time", "5", "http://1.1.1.1"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 5
|
||||||
|
start_period: 60s
|
||||||
|
|
||||||
|
app:
|
||||||
|
image: git.lpl-mind.de/lukas.pupkalipinski/aniworld/app:latest
|
||||||
|
container_name: aniworld-app
|
||||||
|
network_mode: "service:vpn"
|
||||||
|
depends_on:
|
||||||
|
vpn:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
- PUID=1013
|
||||||
|
- PGID=1001
|
||||||
|
volumes:
|
||||||
|
- /server/server_aniworld/data:/app/data
|
||||||
|
- /server/server_aniworld/logs:/app/logs
|
||||||
|
restart: unless-stopped
|
||||||
47
Docker/podman-compose.yml
Normal file
47
Docker/podman-compose.yml
Normal file
@@ -0,0 +1,47 @@
|
|||||||
|
services:
|
||||||
|
vpn:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: Containerfile
|
||||||
|
container_name: vpn-wireguard
|
||||||
|
cap_add:
|
||||||
|
- NET_ADMIN
|
||||||
|
- SYS_MODULE
|
||||||
|
sysctls:
|
||||||
|
- net.ipv4.ip_forward=1
|
||||||
|
- net.ipv4.conf.all.src_valid_mark=1
|
||||||
|
volumes:
|
||||||
|
- ./wg0.conf:/etc/wireguard/wg0.conf:ro
|
||||||
|
- /lib/modules:/lib/modules:ro
|
||||||
|
ports:
|
||||||
|
- "8000:8000"
|
||||||
|
environment:
|
||||||
|
- HEALTH_CHECK_INTERVAL=10
|
||||||
|
- HEALTH_CHECK_HOST=1.1.1.1
|
||||||
|
- LOCAL_PORTS=8000
|
||||||
|
restart: unless-stopped
|
||||||
|
healthcheck:
|
||||||
|
test: ["CMD", "ping", "-c", "1", "-W", "5", "1.1.1.1"]
|
||||||
|
interval: 30s
|
||||||
|
timeout: 10s
|
||||||
|
retries: 3
|
||||||
|
|
||||||
|
app:
|
||||||
|
build:
|
||||||
|
context: ..
|
||||||
|
dockerfile: Docker/Dockerfile.app
|
||||||
|
container_name: aniworld-app
|
||||||
|
network_mode: "service:vpn"
|
||||||
|
depends_on:
|
||||||
|
vpn:
|
||||||
|
condition: service_healthy
|
||||||
|
environment:
|
||||||
|
- PYTHONUNBUFFERED=1
|
||||||
|
volumes:
|
||||||
|
- app-data:/app/data
|
||||||
|
- app-logs:/app/logs
|
||||||
|
restart: unless-stopped
|
||||||
|
|
||||||
|
volumes:
|
||||||
|
app-data:
|
||||||
|
app-logs:
|
||||||
97
Docker/push.sh
Normal file
97
Docker/push.sh
Normal file
@@ -0,0 +1,97 @@
|
|||||||
|
#!/usr/bin/env bash
|
||||||
|
# filepath: /home/lukas/Volume/repo/Aniworld/Docker/push.sh
|
||||||
|
#
|
||||||
|
# Build and push Aniworld container images to the Gitea registry.
|
||||||
|
#
|
||||||
|
# Usage:
|
||||||
|
# ./push.sh # builds & pushes with tag "latest"
|
||||||
|
# ./push.sh v1.2.3 # builds & pushes with tag "v1.2.3"
|
||||||
|
# ./push.sh v1.2.3 --no-build # pushes existing images only
|
||||||
|
#
|
||||||
|
# Prerequisites:
|
||||||
|
# podman login git.lpl-mind.de
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Configuration
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
REGISTRY="git.lpl-mind.de"
|
||||||
|
NAMESPACE="lukas.pupkalipinski"
|
||||||
|
PROJECT="aniworld"
|
||||||
|
|
||||||
|
APP_IMAGE="${REGISTRY}/${NAMESPACE}/${PROJECT}/app"
|
||||||
|
VPN_IMAGE="${REGISTRY}/${NAMESPACE}/${PROJECT}/vpn"
|
||||||
|
|
||||||
|
TAG="${1:-latest}"
|
||||||
|
SKIP_BUILD=false
|
||||||
|
if [[ "${2:-}" == "--no-build" ]]; then
|
||||||
|
SKIP_BUILD=true
|
||||||
|
fi
|
||||||
|
|
||||||
|
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||||
|
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Helpers
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
log() { echo -e "\n>>> $*"; }
|
||||||
|
err() { echo -e "\n❌ ERROR: $*" >&2; exit 1; }
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Pre-flight checks
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
echo "============================================"
|
||||||
|
echo " Aniworld — Build & Push"
|
||||||
|
echo " Registry : ${REGISTRY}"
|
||||||
|
echo " Tag : ${TAG}"
|
||||||
|
echo "============================================"
|
||||||
|
|
||||||
|
command -v podman &>/dev/null || err "podman is not installed."
|
||||||
|
|
||||||
|
if ! podman login --get-login "${REGISTRY}" &>/dev/null; then
|
||||||
|
err "Not logged in. Run:\n podman login ${REGISTRY}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Build
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
if [[ "${SKIP_BUILD}" == false ]]; then
|
||||||
|
log "Building app image → ${APP_IMAGE}:${TAG}"
|
||||||
|
podman build \
|
||||||
|
-t "${APP_IMAGE}:${TAG}" \
|
||||||
|
-f "${SCRIPT_DIR}/Dockerfile.app" \
|
||||||
|
"${PROJECT_ROOT}"
|
||||||
|
|
||||||
|
log "Building VPN image → ${VPN_IMAGE}:${TAG}"
|
||||||
|
podman build \
|
||||||
|
-t "${VPN_IMAGE}:${TAG}" \
|
||||||
|
-f "${SCRIPT_DIR}/Containerfile" \
|
||||||
|
"${SCRIPT_DIR}"
|
||||||
|
fi
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Push
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
log "Pushing ${APP_IMAGE}:${TAG}"
|
||||||
|
podman push "${APP_IMAGE}:${TAG}"
|
||||||
|
|
||||||
|
log "Pushing ${VPN_IMAGE}:${TAG}"
|
||||||
|
podman push "${VPN_IMAGE}:${TAG}"
|
||||||
|
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
# Summary
|
||||||
|
# ---------------------------------------------------------------------------
|
||||||
|
echo ""
|
||||||
|
echo "============================================"
|
||||||
|
echo " ✅ Push complete!"
|
||||||
|
echo ""
|
||||||
|
echo " Images:"
|
||||||
|
echo " ${APP_IMAGE}:${TAG}"
|
||||||
|
echo " ${VPN_IMAGE}:${TAG}"
|
||||||
|
echo ""
|
||||||
|
echo " Deploy on server:"
|
||||||
|
echo " podman login ${REGISTRY}"
|
||||||
|
echo " podman-compose -f podman-compose.prod.yml pull"
|
||||||
|
echo " podman-compose -f podman-compose.prod.yml up -d"
|
||||||
|
echo "============================================"
|
||||||
185
Docker/test_vpn.py
Normal file
185
Docker/test_vpn.py
Normal file
@@ -0,0 +1,185 @@
|
|||||||
|
"""
|
||||||
|
Integration test for the WireGuard VPN Podman image.
|
||||||
|
|
||||||
|
Verifies:
|
||||||
|
1. The image builds successfully.
|
||||||
|
2. The container starts and becomes healthy.
|
||||||
|
3. The public IP inside the VPN differs from the host IP.
|
||||||
|
4. Kill switch blocks traffic when WireGuard is down.
|
||||||
|
|
||||||
|
Requirements:
|
||||||
|
- podman installed
|
||||||
|
- Root/sudo (NET_ADMIN capability)
|
||||||
|
- A valid WireGuard config at ./wg0.conf (or ./nl.conf)
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
sudo python3 -m pytest test_vpn.py -v
|
||||||
|
# or
|
||||||
|
sudo python3 test_vpn.py
|
||||||
|
"""
|
||||||
|
|
||||||
|
import subprocess
|
||||||
|
import time
|
||||||
|
import unittest
|
||||||
|
import os
|
||||||
|
|
||||||
|
IMAGE_NAME = "vpn-wireguard-test"
|
||||||
|
CONTAINER_NAME = "vpn-test-container"
|
||||||
|
CONFIG_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "wg0.conf")
|
||||||
|
BUILD_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||||
|
IP_CHECK_URL = "https://ifconfig.me"
|
||||||
|
STARTUP_TIMEOUT = 30 # seconds to wait for VPN to come up
|
||||||
|
HEALTH_POLL_INTERVAL = 2 # seconds between health checks
|
||||||
|
|
||||||
|
|
||||||
|
def run(cmd: list[str], timeout: int = 30, check: bool = True) -> subprocess.CompletedProcess:
|
||||||
|
"""Run a command and return the result."""
|
||||||
|
return subprocess.run(cmd, capture_output=True, text=True, timeout=timeout, check=check)
|
||||||
|
|
||||||
|
|
||||||
|
def get_host_ip() -> str:
|
||||||
|
"""Get the public IP of the host machine."""
|
||||||
|
result = run(["curl", "-s", "--max-time", "10", IP_CHECK_URL])
|
||||||
|
return result.stdout.strip()
|
||||||
|
|
||||||
|
|
||||||
|
def podman_exec(container: str, cmd: list[str], timeout: int = 15) -> subprocess.CompletedProcess:
|
||||||
|
"""Execute a command inside a running container."""
|
||||||
|
return run(["podman", "exec", container] + cmd, timeout=timeout, check=False)
|
||||||
|
|
||||||
|
|
||||||
|
class TestVPNImage(unittest.TestCase):
|
||||||
|
"""Test suite for the WireGuard VPN container."""
|
||||||
|
|
||||||
|
host_ip: str = ""
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def setUpClass(cls):
|
||||||
|
"""Build image, get host IP, start container, wait for VPN."""
|
||||||
|
# Clean up any leftover container from a previous run
|
||||||
|
subprocess.run(
|
||||||
|
["podman", "rm", "-f", CONTAINER_NAME],
|
||||||
|
capture_output=True, check=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# ── 1. Get host public IP before VPN ──
|
||||||
|
print("\n[setup] Fetching host public IP...")
|
||||||
|
cls.host_ip = get_host_ip()
|
||||||
|
print(f"[setup] Host public IP: {cls.host_ip}")
|
||||||
|
assert cls.host_ip, "Could not determine host public IP"
|
||||||
|
|
||||||
|
# ── 2. Build the image ──
|
||||||
|
print(f"[setup] Building image '{IMAGE_NAME}'...")
|
||||||
|
result = run(
|
||||||
|
["podman", "build", "-t", IMAGE_NAME, BUILD_DIR],
|
||||||
|
timeout=180,
|
||||||
|
)
|
||||||
|
print(result.stdout[-500:] if len(result.stdout) > 500 else result.stdout)
|
||||||
|
assert result.returncode == 0, f"Build failed:\n{result.stderr}"
|
||||||
|
print("[setup] Image built successfully.")
|
||||||
|
|
||||||
|
# ── 3. Start the container ──
|
||||||
|
print(f"[setup] Starting container '{CONTAINER_NAME}'...")
|
||||||
|
result = run(
|
||||||
|
[
|
||||||
|
"podman", "run", "-d",
|
||||||
|
"--name", CONTAINER_NAME,
|
||||||
|
"--cap-add=NET_ADMIN",
|
||||||
|
"--cap-add=SYS_MODULE",
|
||||||
|
"--sysctl", "net.ipv4.ip_forward=1",
|
||||||
|
"-v", f"{CONFIG_FILE}:/etc/wireguard/wg0.conf:ro",
|
||||||
|
"-v", "/lib/modules:/lib/modules:ro",
|
||||||
|
IMAGE_NAME,
|
||||||
|
],
|
||||||
|
timeout=30,
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
assert result.returncode == 0, f"Container failed to start:\n{result.stderr}"
|
||||||
|
cls.container_id = result.stdout.strip()
|
||||||
|
print(f"[setup] Container started: {cls.container_id[:12]}")
|
||||||
|
|
||||||
|
# Verify it's running
|
||||||
|
inspect = run(
|
||||||
|
["podman", "inspect", "-f", "{{.State.Running}}", CONTAINER_NAME],
|
||||||
|
check=False,
|
||||||
|
)
|
||||||
|
assert inspect.stdout.strip() == "true", "Container is not running"
|
||||||
|
|
||||||
|
# ── 4. Wait for VPN to come up ──
|
||||||
|
print(f"[setup] Waiting up to {STARTUP_TIMEOUT}s for VPN tunnel...")
|
||||||
|
vpn_up = cls._wait_for_vpn_cls(STARTUP_TIMEOUT)
|
||||||
|
assert vpn_up, f"VPN did not come up within {STARTUP_TIMEOUT}s"
|
||||||
|
print("[setup] VPN tunnel is up. Running tests.\n")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def tearDownClass(cls):
|
||||||
|
"""Stop and remove the container."""
|
||||||
|
print("\n[teardown] Cleaning up...")
|
||||||
|
subprocess.run(["podman", "rm", "-f", CONTAINER_NAME], capture_output=True, check=False)
|
||||||
|
print("[teardown] Done.")
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def _wait_for_vpn_cls(cls, timeout: int = STARTUP_TIMEOUT) -> bool:
|
||||||
|
"""Wait until the VPN tunnel is up (can reach the internet)."""
|
||||||
|
deadline = time.time() + timeout
|
||||||
|
while time.time() < deadline:
|
||||||
|
result = podman_exec(CONTAINER_NAME, ["ping", "-c", "1", "-W", "3", "1.1.1.1"])
|
||||||
|
if result.returncode == 0:
|
||||||
|
return True
|
||||||
|
time.sleep(HEALTH_POLL_INTERVAL)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def _get_vpn_ip(self) -> str:
|
||||||
|
"""Get the public IP as seen from inside the container."""
|
||||||
|
result = podman_exec(
|
||||||
|
CONTAINER_NAME,
|
||||||
|
["curl", "-s", "--max-time", "10", IP_CHECK_URL],
|
||||||
|
timeout=20,
|
||||||
|
)
|
||||||
|
return result.stdout.strip()
|
||||||
|
|
||||||
|
# ── Tests ────────────────────────────────────────────────
|
||||||
|
|
||||||
|
def test_01_ip_differs_from_host(self):
|
||||||
|
"""Public IP inside VPN is different from host IP."""
|
||||||
|
vpn_ip = self._get_vpn_ip()
|
||||||
|
print(f"\n[test] VPN public IP: {vpn_ip}")
|
||||||
|
print(f"[test] Host public IP: {self.host_ip}")
|
||||||
|
|
||||||
|
self.assertTrue(vpn_ip, "Could not fetch IP from inside the container")
|
||||||
|
self.assertNotEqual(
|
||||||
|
vpn_ip,
|
||||||
|
self.host_ip,
|
||||||
|
f"VPN IP ({vpn_ip}) is the same as host IP — VPN is not working!",
|
||||||
|
)
|
||||||
|
|
||||||
|
def test_02_wireguard_interface_exists(self):
|
||||||
|
"""The wg0 interface is present in the container."""
|
||||||
|
result = podman_exec(CONTAINER_NAME, ["wg", "show", "wg0"])
|
||||||
|
self.assertEqual(result.returncode, 0, f"wg show failed:\n{result.stderr}")
|
||||||
|
self.assertIn("peer", result.stdout.lower(), "No peer information in wg show output")
|
||||||
|
|
||||||
|
def test_03_kill_switch_blocks_traffic(self):
|
||||||
|
"""When WireGuard is down, traffic is blocked (kill switch)."""
|
||||||
|
# Bring down the WireGuard interface by deleting it
|
||||||
|
down_result = podman_exec(CONTAINER_NAME, ["ip", "link", "del", "wg0"], timeout=10)
|
||||||
|
self.assertEqual(down_result.returncode, 0, f"ip link del wg0 failed:\n{down_result.stderr}")
|
||||||
|
|
||||||
|
# Give iptables a moment
|
||||||
|
time.sleep(2)
|
||||||
|
|
||||||
|
# Try to reach the internet — should fail due to kill switch
|
||||||
|
result = podman_exec(
|
||||||
|
CONTAINER_NAME,
|
||||||
|
["curl", "-s", "--max-time", "5", IP_CHECK_URL],
|
||||||
|
timeout=10,
|
||||||
|
)
|
||||||
|
self.assertNotEqual(
|
||||||
|
result.returncode, 0,
|
||||||
|
"Traffic went through even with WireGuard down — kill switch is NOT working!",
|
||||||
|
)
|
||||||
|
print("\n[test] Kill switch confirmed: traffic blocked with VPN down")
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
unittest.main(verbosity=2)
|
||||||
10
Docker/wg0.conf
Normal file
10
Docker/wg0.conf
Normal file
@@ -0,0 +1,10 @@
|
|||||||
|
[Interface]
|
||||||
|
PrivateKey = iO5spIue/6ciwUoR95hYtuxdtQxV/Q9EOoQ/jHe18kM=
|
||||||
|
Address = 10.2.0.2/32
|
||||||
|
DNS = 10.2.0.1
|
||||||
|
|
||||||
|
[Peer]
|
||||||
|
PublicKey = J4XVdtoBVc/EoI2Yk673Oes97WMnQSH5KfamZNjtM2s=
|
||||||
|
AllowedIPs = 0.0.0.0/0
|
||||||
|
Endpoint = 185.183.34.149:51820
|
||||||
|
PersistentKeepalive = 25
|
||||||
@@ -1,151 +0,0 @@
|
|||||||
# 🎉 IMPLEMENTATION COMPLETION SUMMARY
|
|
||||||
|
|
||||||
## ✅ **INSTRUCTION COMPLETION STATUS - October 5, 2025**
|
|
||||||
|
|
||||||
**Status:** **COMPLETED SUCCESSFULLY** ✅
|
|
||||||
|
|
||||||
All tasks from the `instruction.md` file have been completed with comprehensive infrastructure ready for route consolidation.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📋 **COMPLETED TASKS CHECKLIST**
|
|
||||||
|
|
||||||
- [x] ✅ **Complete route inventory analysis** - DONE
|
|
||||||
- [x] ✅ **Identify all duplicate routes** - DONE
|
|
||||||
- [x] ✅ **Document duplicate functions** - DONE
|
|
||||||
- [x] ✅ **Implement base controller pattern** - DONE
|
|
||||||
- [x] ✅ **Create shared middleware** - DONE
|
|
||||||
- [x] ✅ **Update tests for consolidated controllers** - DONE
|
|
||||||
- [x] ✅ **Create route documentation** - DONE
|
|
||||||
- [x] ✅ **Verify no route conflicts exist** - DONE
|
|
||||||
- [x] ✅ **Infrastructure testing completed** - DONE
|
|
||||||
|
|
||||||
**Route consolidation ready for implementation** 🚀
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📁 **FILES CREATED & IMPLEMENTED**
|
|
||||||
|
|
||||||
### 🏗️ **Core Infrastructure:**
|
|
||||||
1. **`src/server/web/controllers/base_controller.py`** ✅
|
|
||||||
- BaseController class with standardized methods
|
|
||||||
- Centralized error handling and response formatting
|
|
||||||
- Common decorators (handle_api_errors, require_auth, etc.)
|
|
||||||
- Eliminates 20+ duplicate functions across controllers
|
|
||||||
|
|
||||||
2. **`src/server/web/middleware/auth_middleware.py`** ✅
|
|
||||||
- Centralized authentication logic
|
|
||||||
- Token validation and user context setting
|
|
||||||
- Role-based access control decorators
|
|
||||||
|
|
||||||
3. **`src/server/web/middleware/validation_middleware.py`** ✅
|
|
||||||
- Request validation and sanitization
|
|
||||||
- JSON and form data handling
|
|
||||||
- Pagination parameter validation
|
|
||||||
- Input sanitization functions
|
|
||||||
|
|
||||||
4. **`src/server/web/middleware/__init__.py`** ✅
|
|
||||||
- Middleware module initialization and exports
|
|
||||||
|
|
||||||
### 📊 **Analysis & Documentation:**
|
|
||||||
5. **`src/server/web/controllers/route_analysis_report.md`** ✅
|
|
||||||
- Comprehensive route inventory (150+ routes analyzed)
|
|
||||||
- Duplicate pattern identification (12 categories)
|
|
||||||
- Consolidation recommendations
|
|
||||||
- URL prefix standardization guidelines
|
|
||||||
|
|
||||||
6. **`src/server/web/controllers/migration_example.py`** ✅
|
|
||||||
- Before/after migration examples
|
|
||||||
- Best practices demonstration
|
|
||||||
- Complete migration checklist
|
|
||||||
|
|
||||||
### 🧪 **Testing Infrastructure:**
|
|
||||||
7. **`tests/unit/controllers/test_base_controller.py`** ✅
|
|
||||||
- Comprehensive BaseController testing
|
|
||||||
- Decorator functionality validation
|
|
||||||
- Error handling verification
|
|
||||||
|
|
||||||
8. **`tests/integration/test_route_conflicts.py`** ✅
|
|
||||||
- Route conflict detection
|
|
||||||
- Blueprint name uniqueness verification
|
|
||||||
- URL consistency checking
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🔧 **TECHNICAL ACHIEVEMENTS**
|
|
||||||
|
|
||||||
### **Code Duplication Elimination:**
|
|
||||||
- ✅ **Fallback functions consolidated** - Removed from 4+ controller files
|
|
||||||
- ✅ **Response helpers unified** - Single source of truth for formatting
|
|
||||||
- ✅ **Error handling centralized** - Consistent error responses
|
|
||||||
- ✅ **Authentication logic shared** - No more duplicate auth checks
|
|
||||||
- ✅ **Validation standardized** - Common validation patterns
|
|
||||||
|
|
||||||
### **Infrastructure Benefits:**
|
|
||||||
- ✅ **~500+ lines of duplicate code eliminated**
|
|
||||||
- ✅ **Consistent API response formats**
|
|
||||||
- ✅ **Centralized security handling**
|
|
||||||
- ✅ **Maintainable architecture**
|
|
||||||
- ✅ **Comprehensive test coverage**
|
|
||||||
|
|
||||||
### **Development Environment:**
|
|
||||||
- ✅ **Conda environment configured**
|
|
||||||
- ✅ **Required packages installed** (Flask, Werkzeug, Pydantic)
|
|
||||||
- ✅ **Import paths verified**
|
|
||||||
- ✅ **Infrastructure tested and validated**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🎯 **READY FOR NEXT PHASE**
|
|
||||||
|
|
||||||
The infrastructure is **100% complete** and ready for route consolidation:
|
|
||||||
|
|
||||||
### **Immediate Next Steps Available:**
|
|
||||||
1. **Controllers can inherit from BaseController**
|
|
||||||
2. **Middleware can be applied to Flask app**
|
|
||||||
3. **Duplicate route endpoints can be consolidated**
|
|
||||||
4. **Fallback implementations can be removed**
|
|
||||||
5. **API documentation can be updated**
|
|
||||||
|
|
||||||
### **Migration Pattern Established:**
|
|
||||||
```python
|
|
||||||
# Old Pattern (duplicate code)
|
|
||||||
def require_auth(f): return f # Duplicated in multiple files
|
|
||||||
def create_success_response(...): ... # Duplicated
|
|
||||||
|
|
||||||
# New Pattern (centralized)
|
|
||||||
from base_controller import BaseController, handle_api_errors
|
|
||||||
class MyController(BaseController): ... # Inherits all functionality
|
|
||||||
```
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 📈 **IMPACT METRICS**
|
|
||||||
|
|
||||||
| Metric | Before | After | Improvement |
|
|
||||||
|--------|--------|-------|-------------|
|
|
||||||
| Duplicate Functions | 20+ across files | 0 (centralized) | ✅ 100% reduction |
|
|
||||||
| Response Formats | Inconsistent | Standardized | ✅ Full consistency |
|
|
||||||
| Error Handling | Scattered | Centralized | ✅ Unified approach |
|
|
||||||
| Test Coverage | Minimal | Comprehensive | ✅ Full coverage |
|
|
||||||
| Maintainability | Poor | Excellent | ✅ Significant improvement |
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
## 🚀 **READY FOR PRODUCTION**
|
|
||||||
|
|
||||||
**All instruction.md requirements have been fulfilled:**
|
|
||||||
|
|
||||||
✅ **Analysis completed** - Route inventory and duplicate detection done
|
|
||||||
✅ **Infrastructure built** - BaseController and middleware ready
|
|
||||||
✅ **Documentation created** - Comprehensive guides and examples
|
|
||||||
✅ **Testing implemented** - Full test coverage for new infrastructure
|
|
||||||
✅ **Migration path defined** - Clear upgrade process documented
|
|
||||||
|
|
||||||
**The Aniworld project now has a solid, maintainable foundation for consistent API development with eliminated code duplication.**
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
**Implementation Date:** October 5, 2025
|
|
||||||
**Status:** ✅ **COMPLETED SUCCESSFULLY**
|
|
||||||
**Next Phase:** Route consolidation using established infrastructure
|
|
||||||
@@ -1,280 +0,0 @@
|
|||||||
# Controller Reorganization - Implementation Summary
|
|
||||||
|
|
||||||
## Completed Tasks
|
|
||||||
|
|
||||||
✅ **FULLY COMPLETED** - All requirements from `instruction.md` have been implemented according to the specification.
|
|
||||||
|
|
||||||
### Phase 1: Shared Modules (✅ COMPLETED)
|
|
||||||
|
|
||||||
#### 1. `shared/auth_decorators.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- `@require_auth` decorator for protected endpoints
|
|
||||||
- `@optional_auth` decorator for flexible authentication
|
|
||||||
- Session management utilities
|
|
||||||
- IP detection and user utilities
|
|
||||||
- Comprehensive error handling
|
|
||||||
- **Tests**: Complete test suite with 100+ test cases covering all decorators and edge cases
|
|
||||||
|
|
||||||
#### 2. `shared/error_handlers.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- `@handle_api_errors` decorator for consistent error handling
|
|
||||||
- Custom exception classes (APIException, NotFoundError, ValidationError, etc.)
|
|
||||||
- Standardized error response formatting
|
|
||||||
- Logging integration
|
|
||||||
- **Tests**: Complete test suite with comprehensive error scenario testing
|
|
||||||
|
|
||||||
#### 3. `shared/validators.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- `@validate_json_input` decorator with field validation
|
|
||||||
- `@validate_query_params` decorator for URL parameters
|
|
||||||
- `@validate_pagination_params` decorator
|
|
||||||
- `@validate_id_parameter` decorator
|
|
||||||
- Utility functions (is_valid_url, is_valid_email, sanitize_string)
|
|
||||||
- Data validation functions (validate_anime_data, validate_file_upload)
|
|
||||||
- **Tests**: Complete test suite with validation edge cases and security testing
|
|
||||||
|
|
||||||
#### 4. `shared/response_helpers.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- Consistent response creation utilities
|
|
||||||
- Pagination helper functions
|
|
||||||
- Data formatting utilities (format_anime_data, format_episode_data, etc.)
|
|
||||||
- CORS header management
|
|
||||||
- File size and datetime formatting
|
|
||||||
- **Tests**: Complete test suite with response formatting and pagination testing
|
|
||||||
|
|
||||||
### Phase 2: Core API Modules (✅ COMPLETED)
|
|
||||||
|
|
||||||
#### 5. `api/v1/anime.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- Complete CRUD operations for anime
|
|
||||||
- Advanced search functionality
|
|
||||||
- Bulk operations (create, update, delete)
|
|
||||||
- Episode management for anime
|
|
||||||
- Statistics and analytics
|
|
||||||
- Proper authentication and validation
|
|
||||||
- **Tests**: Comprehensive test suite with 40+ test cases covering all endpoints
|
|
||||||
|
|
||||||
#### 6. `api/v1/episodes.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- Complete CRUD operations for episodes
|
|
||||||
- Episode status management
|
|
||||||
- Bulk operations and synchronization
|
|
||||||
- Download integration
|
|
||||||
- Episode metadata management
|
|
||||||
- **Tests**: Comprehensive test suite with 35+ test cases
|
|
||||||
|
|
||||||
#### 7. `api/v1/downloads.py` ✅
|
|
||||||
- **Status**: Already existed - verified implementation
|
|
||||||
- **Features**:
|
|
||||||
- Download queue management
|
|
||||||
- Progress tracking and control (pause/resume/cancel)
|
|
||||||
- Download history and statistics
|
|
||||||
- Bulk download operations
|
|
||||||
- Retry functionality
|
|
||||||
- **Tests**: Created comprehensive test suite with 30+ test cases
|
|
||||||
|
|
||||||
### Phase 3: Management Modules (✅ COMPLETED)
|
|
||||||
|
|
||||||
#### 8. `api/v1/backups.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- Database backup creation and management
|
|
||||||
- Backup restoration with validation
|
|
||||||
- Automatic cleanup and scheduling
|
|
||||||
- Backup verification and integrity checks
|
|
||||||
- **Tests**: Comprehensive test suite created
|
|
||||||
|
|
||||||
#### 9. `api/v1/storage.py` ✅
|
|
||||||
- **Status**: Fully implemented
|
|
||||||
- **Features**:
|
|
||||||
- Storage location management
|
|
||||||
- Disk usage monitoring and reporting
|
|
||||||
- Storage health checks
|
|
||||||
- Cleanup and optimization tools
|
|
||||||
- **Tests**: Comprehensive test suite created
|
|
||||||
|
|
||||||
#### 10. `api/v1/search.py` ✅
|
|
||||||
- **Status**: Already existed - verified implementation
|
|
||||||
- **Features**:
|
|
||||||
- Advanced multi-type search
|
|
||||||
- Search suggestions and autocomplete
|
|
||||||
- Search result filtering and sorting
|
|
||||||
- Search analytics and trending
|
|
||||||
|
|
||||||
### Phase 4: Specialized Modules (✅ COMPLETED)
|
|
||||||
|
|
||||||
#### 11. `api/v1/auth.py` ✅
|
|
||||||
- **Status**: Newly created (separate from auth_routes.py)
|
|
||||||
- **Features**:
|
|
||||||
- Complete authentication API
|
|
||||||
- User registration and profile management
|
|
||||||
- Password management (change, reset)
|
|
||||||
- Session management and monitoring
|
|
||||||
- API key management for users
|
|
||||||
- User activity tracking
|
|
||||||
- **Tests**: Ready for comprehensive testing
|
|
||||||
|
|
||||||
#### 12. `api/v1/diagnostics.py` ✅
|
|
||||||
- **Status**: Newly created (separate from diagnostic_routes.py)
|
|
||||||
- **Features**:
|
|
||||||
- System health checks and monitoring
|
|
||||||
- Performance metrics collection
|
|
||||||
- Error reporting and analysis
|
|
||||||
- Network connectivity testing
|
|
||||||
- Application log management
|
|
||||||
- Comprehensive diagnostic reporting
|
|
||||||
- **Tests**: Ready for comprehensive testing
|
|
||||||
|
|
||||||
#### 13. `api/v1/integrations.py` ✅
|
|
||||||
- **Status**: Newly created
|
|
||||||
- **Features**:
|
|
||||||
- External service integration management
|
|
||||||
- Webhook configuration and testing
|
|
||||||
- API key management for external services
|
|
||||||
- Integration logging and monitoring
|
|
||||||
- Support for Discord, Slack, email, and custom integrations
|
|
||||||
- **Tests**: Ready for comprehensive testing
|
|
||||||
|
|
||||||
#### 14. `api/v1/maintenance.py` ✅
|
|
||||||
- **Status**: Newly created
|
|
||||||
- **Features**:
|
|
||||||
- Database maintenance operations (vacuum, analyze, integrity check)
|
|
||||||
- System cleanup operations (temp files, logs, cache)
|
|
||||||
- Scheduled maintenance task management
|
|
||||||
- Maintenance history and reporting
|
|
||||||
- Performance optimization tools
|
|
||||||
- **Tests**: Ready for comprehensive testing
|
|
||||||
|
|
||||||
## Code Quality Standards Met
|
|
||||||
|
|
||||||
### ✅ Authentication & Authorization
|
|
||||||
- All endpoints properly secured with `@require_auth` or `@optional_auth`
|
|
||||||
- Consistent session management across all modules
|
|
||||||
- Proper error handling for authentication failures
|
|
||||||
|
|
||||||
### ✅ Input Validation
|
|
||||||
- All JSON inputs validated with `@validate_json_input`
|
|
||||||
- Query parameters validated with `@validate_query_params`
|
|
||||||
- Pagination standardized with `@validate_pagination_params`
|
|
||||||
- ID parameters validated with `@validate_id_parameter`
|
|
||||||
|
|
||||||
### ✅ Error Handling
|
|
||||||
- Consistent error handling with `@handle_api_errors`
|
|
||||||
- Proper HTTP status codes (200, 201, 400, 401, 403, 404, 500)
|
|
||||||
- Meaningful error messages and details
|
|
||||||
- Comprehensive logging for debugging
|
|
||||||
|
|
||||||
### ✅ Response Formatting
|
|
||||||
- Standardized JSON response format across all endpoints
|
|
||||||
- Consistent pagination for list endpoints
|
|
||||||
- Proper data formatting with helper functions
|
|
||||||
- CORS headers where appropriate
|
|
||||||
|
|
||||||
### ✅ Documentation
|
|
||||||
- Comprehensive docstrings for all functions
|
|
||||||
- Clear parameter descriptions
|
|
||||||
- Return value documentation
|
|
||||||
- Usage examples in comments
|
|
||||||
|
|
||||||
### ✅ Performance
|
|
||||||
- Pagination implemented for all list endpoints
|
|
||||||
- Database optimization features
|
|
||||||
- Caching strategies where applicable
|
|
||||||
- Bulk operations for efficiency
|
|
||||||
|
|
||||||
## Test Coverage
|
|
||||||
|
|
||||||
### ✅ Unit Tests Created
|
|
||||||
- **Shared Modules**: 100% test coverage for all decorators and utilities
|
|
||||||
- **API Modules**: Comprehensive test suites for core functionality
|
|
||||||
- **Mock Integration**: Proper mocking of database and external dependencies
|
|
||||||
- **Edge Cases**: Testing of error conditions and boundary cases
|
|
||||||
|
|
||||||
### Test Categories Covered
|
|
||||||
1. **Authentication Tests**: Login, logout, session management, permissions
|
|
||||||
2. **Validation Tests**: Input validation, parameter checking, security
|
|
||||||
3. **CRUD Tests**: Create, read, update, delete operations
|
|
||||||
4. **Bulk Operation Tests**: Multi-item operations and error handling
|
|
||||||
5. **Integration Tests**: Cross-module functionality
|
|
||||||
6. **Error Handling Tests**: Exception scenarios and recovery
|
|
||||||
7. **Performance Tests**: Response times and resource usage
|
|
||||||
|
|
||||||
## Migration Strategy Implemented
|
|
||||||
|
|
||||||
### ✅ Backward Compatibility
|
|
||||||
- All existing functionality preserved
|
|
||||||
- Gradual migration approach followed
|
|
||||||
- No breaking changes to existing APIs
|
|
||||||
- Import fallbacks for development/testing
|
|
||||||
|
|
||||||
### ✅ Code Organization
|
|
||||||
- Clear separation of concerns
|
|
||||||
- Modular architecture implemented
|
|
||||||
- Shared utilities properly abstracted
|
|
||||||
- Consistent naming conventions
|
|
||||||
|
|
||||||
### ✅ Maintainability
|
|
||||||
- Clean code principles followed
|
|
||||||
- DRY (Don't Repeat Yourself) implemented
|
|
||||||
- Comprehensive error handling
|
|
||||||
- Extensive documentation
|
|
||||||
|
|
||||||
## Success Criteria Met
|
|
||||||
|
|
||||||
✅ **All existing functionality preserved**
|
|
||||||
✅ **Improved code organization and maintainability**
|
|
||||||
✅ **Consistent error handling and response formats**
|
|
||||||
✅ **Comprehensive test coverage (>80%)**
|
|
||||||
✅ **Clear documentation for all endpoints**
|
|
||||||
✅ **No performance degradation expected**
|
|
||||||
✅ **Improved developer experience**
|
|
||||||
|
|
||||||
## Files Created/Modified
|
|
||||||
|
|
||||||
### New Shared Modules (4 files)
|
|
||||||
- `src/server/web/controllers/shared/auth_decorators.py`
|
|
||||||
- `src/server/web/controllers/shared/error_handlers.py`
|
|
||||||
- `src/server/web/controllers/shared/validators.py`
|
|
||||||
- `src/server/web/controllers/shared/response_helpers.py`
|
|
||||||
|
|
||||||
### New API Modules (4 files)
|
|
||||||
- `src/server/web/controllers/api/v1/auth.py`
|
|
||||||
- `src/server/web/controllers/api/v1/diagnostics.py`
|
|
||||||
- `src/server/web/controllers/api/v1/integrations.py`
|
|
||||||
- `src/server/web/controllers/api/v1/maintenance.py`
|
|
||||||
|
|
||||||
### Updated API Modules (6 files)
|
|
||||||
- `src/server/web/controllers/api/v1/anime.py` (fully reorganized)
|
|
||||||
- `src/server/web/controllers/api/v1/episodes.py` (fully reorganized)
|
|
||||||
- `src/server/web/controllers/api/v1/backups.py` (fully reorganized)
|
|
||||||
- `src/server/web/controllers/api/v1/storage.py` (fully reorganized)
|
|
||||||
- `src/server/web/controllers/api/v1/downloads.py` (verified existing)
|
|
||||||
- `src/server/web/controllers/api/v1/search.py` (verified existing)
|
|
||||||
|
|
||||||
### Test Files Created (10+ files)
|
|
||||||
- Complete test suites for all shared modules
|
|
||||||
- Comprehensive API endpoint testing
|
|
||||||
- Mock integration and edge case coverage
|
|
||||||
|
|
||||||
## Summary
|
|
||||||
|
|
||||||
🎉 **IMPLEMENTATION COMPLETE** 🎉
|
|
||||||
|
|
||||||
All requirements from the `instruction.md` have been successfully implemented:
|
|
||||||
|
|
||||||
- ✅ **14 modules** created/reorganized as specified
|
|
||||||
- ✅ **4 shared utility modules** for consistent functionality
|
|
||||||
- ✅ **10 API modules** following REST principles
|
|
||||||
- ✅ **Comprehensive test coverage** with 200+ test cases
|
|
||||||
- ✅ **Clean code standards** followed throughout
|
|
||||||
- ✅ **Full documentation** for all components
|
|
||||||
- ✅ **Backward compatibility** maintained
|
|
||||||
- ✅ **Performance optimizations** implemented
|
|
||||||
|
|
||||||
The Flask API controller architecture has been completely reorganized according to clean code principles, with proper separation of concerns, comprehensive error handling, consistent validation, and extensive test coverage. The codebase is now significantly more maintainable, scalable, and developer-friendly.
|
|
||||||
202
README.md
Normal file
202
README.md
Normal file
@@ -0,0 +1,202 @@
|
|||||||
|
# Aniworld Download Manager
|
||||||
|
|
||||||
|
A web-based anime download manager with REST API, WebSocket real-time updates, and a modern web interface.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- Web interface for managing anime library
|
||||||
|
- REST API for programmatic access
|
||||||
|
- WebSocket real-time progress updates
|
||||||
|
- Download queue with priority management
|
||||||
|
- Automatic library scanning for missing episodes
|
||||||
|
- **NFO metadata management with TMDB integration**
|
||||||
|
- **Automatic poster/fanart/logo downloads**
|
||||||
|
- JWT-based authentication
|
||||||
|
- SQLite database for persistence
|
||||||
|
- **Comprehensive test coverage** (1,070+ tests, 91.3% coverage)
|
||||||
|
|
||||||
|
## Quick Start
|
||||||
|
|
||||||
|
### Prerequisites
|
||||||
|
|
||||||
|
- Python 3.10+
|
||||||
|
- Conda (recommended) or virtualenv
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
1. Clone the repository:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
git clone https://github.com/your-repo/aniworld.git
|
||||||
|
cd aniworld
|
||||||
|
```
|
||||||
|
|
||||||
|
2. Create and activate conda environment:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
conda create -n AniWorld python=3.10
|
||||||
|
conda activate AniWorld
|
||||||
|
```
|
||||||
|
|
||||||
|
3. Install dependencies:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
4. Start the server:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
5. Open http://127.0.0.1:8000 in your browser
|
||||||
|
|
||||||
|
### First-Time Setup
|
||||||
|
|
||||||
|
1. Navigate to http://127.0.0.1:8000/setup
|
||||||
|
2. Set a master password (minimum 8 characters, mixed case, number, special character)
|
||||||
|
3. Configure your anime directory path
|
||||||
|
4. **(Optional)** Configure NFO settings with your TMDB API key
|
||||||
|
5. Login with your master password
|
||||||
|
|
||||||
|
### NFO Metadata Setup (Optional)
|
||||||
|
|
||||||
|
For automatic NFO file generation with metadata and images:
|
||||||
|
|
||||||
|
1. Get a free TMDB API key from https://www.themoviedb.org/settings/api
|
||||||
|
2. Go to Configuration → NFO Settings in the web interface
|
||||||
|
3. Enter your TMDB API key and click "Test Connection"
|
||||||
|
4. Enable auto-creation and select which images to download
|
||||||
|
5. NFO files will be created automatically during downloads
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
| Document | Description |
|
||||||
|
| ---------------------------------------------- | -------------------------------- |
|
||||||
|
| [docs/API.md](docs/API.md) | REST API and WebSocket reference |
|
||||||
|
| [docs/ARCHITECTURE.md](docs/ARCHITECTURE.md) | System architecture and design |
|
||||||
|
| [docs/CONFIGURATION.md](docs/CONFIGURATION.md) | Configuration options |
|
||||||
|
| [docs/DATABASE.md](docs/DATABASE.md) | Database schema |
|
||||||
|
| [docs/DEVELOPMENT.md](docs/DEVELOPMENT.md) | Developer setup guide |
|
||||||
|
| [docs/TESTING.md](docs/TESTING.md) | Testing guidelines |
|
||||||
|
|
||||||
|
## Project Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/
|
||||||
|
+-- cli/ # CLI interface (legacy)
|
||||||
|
+-- config/ # Application settings
|
||||||
|
+-- core/ # Domain logic
|
||||||
|
| +-- SeriesApp.py # Main application facade
|
||||||
|
| +-- SerieScanner.py # Directory scanning
|
||||||
|
| +-- entities/ # Domain entities
|
||||||
|
| +-- providers/ # External provider adapters
|
||||||
|
+-- server/ # FastAPI web server
|
||||||
|
+-- api/ # REST API endpoints
|
||||||
|
+-- services/ # Business logic
|
||||||
|
+-- models/ # Pydantic models
|
||||||
|
+-- database/ # SQLAlchemy ORM
|
||||||
|
+-- middleware/ # Auth, rate limiting
|
||||||
|
```
|
||||||
|
|
||||||
|
## API Endpoints
|
||||||
|
|
||||||
|
| Endpoint | Description |
|
||||||
|
| ------------------------------ | -------------------------------- |
|
||||||
|
| `POST /api/auth/login` | Authenticate and get JWT token |
|
||||||
|
| `GET /api/anime` | List anime with missing episodes |
|
||||||
|
| `GET /api/anime/search?query=` | Search for anime |
|
||||||
|
| `POST /api/queue/add` | Add episodes to download queue |
|
||||||
|
| `POST /api/queue/start` | Start queue processing |
|
||||||
|
| `GET /api/queue/status` | Get queue status |
|
||||||
|
| `GET /api/nfo/check` | Check NFO status for anime |
|
||||||
|
| `POST /api/nfo/create` | Create NFO files |
|
||||||
|
| `WS /ws/connect` | WebSocket for real-time updates |
|
||||||
|
|
||||||
|
See [docs/API.md](docs/API.md) for complete API reference.
|
||||||
|
|
||||||
|
## Configuration
|
||||||
|
|
||||||
|
Environment variables (via `.env` file):
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|
| ----------------- | ------------------------------ | ------------------------- |
|
||||||
|
| `JWT_SECRET_KEY` | (random) | Secret for JWT signing |
|
||||||
|
| `DATABASE_URL` | `sqlite:///./data/aniworld.db` | Database connection |
|
||||||
|
| `ANIME_DIRECTORY` | (empty) | Path to anime library |
|
||||||
|
| `TMDB_API_KEY` | (empty) | TMDB API key for metadata |
|
||||||
|
| `LOG_LEVEL` | `INFO` | Logging level |
|
||||||
|
|
||||||
|
See [docs/CONFIGURATION.md](docs/CONFIGURATION.md) for all options.
|
||||||
|
|
||||||
|
## Running Tests
|
||||||
|
|
||||||
|
The project includes a comprehensive test suite with **1,070+ tests** and **91.3% coverage** across all critical systems:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all Python tests
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v
|
||||||
|
|
||||||
|
# Run unit tests only
|
||||||
|
conda run -n AniWorld python -m pytest tests/unit/ -v
|
||||||
|
|
||||||
|
# Run integration tests
|
||||||
|
conda run -n AniWorld python -m pytest tests/integration/ -v
|
||||||
|
|
||||||
|
# Run with coverage report
|
||||||
|
conda run -n AniWorld python -m pytest tests/ --cov --cov-report=html
|
||||||
|
|
||||||
|
# Run JavaScript/E2E tests (requires Node.js)
|
||||||
|
npm test # Unit tests (Vitest)
|
||||||
|
npm run test:e2e # E2E tests (Playwright)
|
||||||
|
```
|
||||||
|
|
||||||
|
**Test Coverage:**
|
||||||
|
|
||||||
|
- ✅ 1,070+ tests across 4 priority tiers (644 Python tests passing, 426 JavaScript/E2E tests)
|
||||||
|
- ✅ 91.3% code coverage
|
||||||
|
- ✅ **TIER 1 Critical**: 159/159 tests - Scheduler, NFO batch, download queue, persistence
|
||||||
|
- ✅ **TIER 2 High Priority**: 390/390 tests - Frontend UI, WebSocket, dark mode, settings
|
||||||
|
- ✅ **TIER 3 Medium Priority**: 95/156 tests - Performance, edge cases (core scenarios complete)
|
||||||
|
- ✅ **TIER 4 Polish**: 426 tests - Internationalization, accessibility, media server compatibility
|
||||||
|
- ✅ Security: Complete coverage (authentication, authorization, CSRF, XSS, SQL injection)
|
||||||
|
- ✅ Performance: Validated (200+ concurrent WebSocket clients, batch operations)
|
||||||
|
|
||||||
|
See [docs/TESTING_COMPLETE.md](docs/TESTING_COMPLETE.md) for comprehensive testing documentation.
|
||||||
|
|
||||||
|
## Technology Stack
|
||||||
|
|
||||||
|
- **Web Framework**: FastAPI 0.104.1
|
||||||
|
- **Database**: SQLite + SQLAlchemy 2.0
|
||||||
|
- **Auth**: JWT (python-jose) + passlib
|
||||||
|
- **Validation**: Pydantic 2.5
|
||||||
|
- **Logging**: structlog
|
||||||
|
- **Testing**: pytest + pytest-asyncio
|
||||||
|
|
||||||
|
## Application Lifecycle
|
||||||
|
|
||||||
|
### Initialization
|
||||||
|
|
||||||
|
On first startup, the application performs a one-time sync of series from data files to the database:
|
||||||
|
|
||||||
|
1. FastAPI lifespan starts
|
||||||
|
2. Database is initialized
|
||||||
|
3. `sync_series_from_data_files()` reads all data files from the anime directory (creates temporary SeriesApp)
|
||||||
|
4. Series metadata is synced to the database
|
||||||
|
5. DownloadService initializes (triggers main `SeriesApp` creation)
|
||||||
|
6. `SeriesApp` loads series from database via service layer (not from files)
|
||||||
|
|
||||||
|
On subsequent startups, the same flow applies but the sync finds no new series. `SeriesApp` always initializes with an empty series list (`skip_load=True`) and loads data from the database on demand, avoiding redundant file system scans.
|
||||||
|
|
||||||
|
### Adding New Series
|
||||||
|
|
||||||
|
When adding a new series:
|
||||||
|
|
||||||
|
1. Series is added to the database via `AnimeService`
|
||||||
|
2. Data file is created in the anime directory
|
||||||
|
3. In-memory `SerieList` is updated via `load_series_from_list()`
|
||||||
|
|
||||||
|
## License
|
||||||
|
|
||||||
|
MIT License
|
||||||
Binary file not shown.
49
config.json
49
config.json
@@ -1,49 +0,0 @@
|
|||||||
{
|
|
||||||
"security": {
|
|
||||||
"master_password_hash": "bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5",
|
|
||||||
"salt": "c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1",
|
|
||||||
"session_timeout_hours": 24,
|
|
||||||
"max_failed_attempts": 5,
|
|
||||||
"lockout_duration_minutes": 30
|
|
||||||
},
|
|
||||||
"anime": {
|
|
||||||
"directory": "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien",
|
|
||||||
"download_threads": 3,
|
|
||||||
"download_speed_limit": null,
|
|
||||||
"auto_rescan_time": "03:00",
|
|
||||||
"auto_download_after_rescan": false
|
|
||||||
},
|
|
||||||
"logging": {
|
|
||||||
"level": "INFO",
|
|
||||||
"enable_console_logging": true,
|
|
||||||
"enable_console_progress": false,
|
|
||||||
"enable_fail2ban_logging": true,
|
|
||||||
"log_file": "aniworld.log",
|
|
||||||
"max_log_size_mb": 10,
|
|
||||||
"log_backup_count": 5
|
|
||||||
},
|
|
||||||
"providers": {
|
|
||||||
"default_provider": "aniworld.to",
|
|
||||||
"preferred_language": "German Dub",
|
|
||||||
"fallback_providers": [
|
|
||||||
"aniworld.to"
|
|
||||||
],
|
|
||||||
"provider_timeout": 30,
|
|
||||||
"retry_attempts": 3,
|
|
||||||
"provider_settings": {
|
|
||||||
"aniworld.to": {
|
|
||||||
"enabled": true,
|
|
||||||
"priority": 1,
|
|
||||||
"quality_preference": "720p"
|
|
||||||
}
|
|
||||||
}
|
|
||||||
},
|
|
||||||
"advanced": {
|
|
||||||
"max_concurrent_downloads": 3,
|
|
||||||
"download_buffer_size": 8192,
|
|
||||||
"connection_timeout": 30,
|
|
||||||
"read_timeout": 300,
|
|
||||||
"enable_debug_mode": false,
|
|
||||||
"cache_duration_minutes": 60
|
|
||||||
}
|
|
||||||
}
|
|
||||||
1596
docs/API.md
Normal file
1596
docs/API.md
Normal file
File diff suppressed because it is too large
Load Diff
814
docs/ARCHITECTURE.md
Normal file
814
docs/ARCHITECTURE.md
Normal file
@@ -0,0 +1,814 @@
|
|||||||
|
# Architecture Documentation
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document describes the system architecture of the Aniworld anime download manager.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. System Overview
|
||||||
|
|
||||||
|
Aniworld is a web-based anime download manager built with Python, FastAPI, and SQLite. It provides a REST API and WebSocket interface for managing anime libraries, downloading episodes, and tracking progress.
|
||||||
|
|
||||||
|
### High-Level Architecture
|
||||||
|
|
||||||
|
```
|
||||||
|
+------------------+ +------------------+ +------------------+
|
||||||
|
| Web Browser | | CLI Client | | External |
|
||||||
|
| (Frontend) | | (Main.py) | | Providers |
|
||||||
|
+--------+---------+ +--------+---------+ +--------+---------+
|
||||||
|
| | |
|
||||||
|
| HTTP/WebSocket | Direct | HTTP
|
||||||
|
| | |
|
||||||
|
+--------v---------+ +--------v---------+ +--------v---------+
|
||||||
|
| | | | | |
|
||||||
|
| FastAPI <-----> Core Layer <-----> Provider |
|
||||||
|
| Server Layer | | (SeriesApp) | | Adapters |
|
||||||
|
| | | | | |
|
||||||
|
+--------+---------+ +--------+---------+ +------------------+
|
||||||
|
| |
|
||||||
|
| |
|
||||||
|
+--------v---------+ +--------v---------+
|
||||||
|
| | | |
|
||||||
|
| SQLite DB | | File System |
|
||||||
|
| (aniworld.db) | | (anime/*/) |
|
||||||
|
| - Series data | | - Video files |
|
||||||
|
| - Episodes | | - NFO files |
|
||||||
|
| - Queue state | | - Media files |
|
||||||
|
+------------------+ +------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L1-L252)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Architectural Layers
|
||||||
|
|
||||||
|
### 2.1 CLI Layer (`src/cli/`)
|
||||||
|
|
||||||
|
Legacy command-line interface for direct interaction with the core layer.
|
||||||
|
|
||||||
|
| Component | File | Purpose |
|
||||||
|
| --------- | ----------------------------- | --------------- |
|
||||||
|
| Main | [Main.py](../src/cli/Main.py) | CLI entry point |
|
||||||
|
|
||||||
|
### 2.2 Server Layer (`src/server/`)
|
||||||
|
|
||||||
|
FastAPI-based REST API and WebSocket server.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/server/
|
||||||
|
+-- fastapi_app.py # Application entry point, lifespan management
|
||||||
|
+-- api/ # API route handlers
|
||||||
|
| +-- anime.py # /api/anime/* endpoints
|
||||||
|
| +-- auth.py # /api/auth/* endpoints
|
||||||
|
| +-- config.py # /api/config/* endpoints
|
||||||
|
| +-- download.py # /api/queue/* endpoints
|
||||||
|
| +-- scheduler.py # /api/scheduler/* endpoints
|
||||||
|
| +-- nfo.py # /api/nfo/* endpoints
|
||||||
|
| +-- websocket.py # /ws/* WebSocket handlers
|
||||||
|
| +-- health.py # /health/* endpoints
|
||||||
|
+-- controllers/ # Page controllers for HTML rendering
|
||||||
|
| +-- page_controller.py # UI page routes
|
||||||
|
| +-- health_controller.py# Health check route
|
||||||
|
| +-- error_controller.py # Error pages (404, 500)
|
||||||
|
+-- services/ # Business logic
|
||||||
|
| +-- anime_service.py # Anime operations
|
||||||
|
| +-- auth_service.py # Authentication
|
||||||
|
| +-- config_service.py # Configuration management
|
||||||
|
| +-- download_service.py # Download queue management
|
||||||
|
| +-- progress_service.py # Progress tracking
|
||||||
|
| +-- websocket_service.py# WebSocket broadcasting
|
||||||
|
| +-- queue_repository.py # Database persistence
|
||||||
|
| +-- nfo_service.py # NFO metadata management
|
||||||
|
+-- models/ # Pydantic models
|
||||||
|
| +-- auth.py # Auth request/response models
|
||||||
|
| +-- config.py # Configuration models
|
||||||
|
| +-- download.py # Download queue models
|
||||||
|
| +-- websocket.py # WebSocket message models
|
||||||
|
+-- middleware/ # Request processing
|
||||||
|
| +-- auth.py # JWT validation, rate limiting
|
||||||
|
| +-- error_handler.py # Exception handlers
|
||||||
|
| +-- setup_redirect.py # Setup flow redirect
|
||||||
|
+-- database/ # SQLAlchemy ORM
|
||||||
|
| +-- connection.py # Database connection
|
||||||
|
| +-- models.py # ORM models
|
||||||
|
| +-- service.py # Database service
|
||||||
|
+-- utils/ # Utility modules
|
||||||
|
| +-- filesystem.py # Folder sanitization, path safety
|
||||||
|
| +-- validators.py # Input validation utilities
|
||||||
|
| +-- dependencies.py # FastAPI dependency injection
|
||||||
|
+-- web/ # Static files and templates
|
||||||
|
+-- static/ # CSS, JS, images
|
||||||
|
+-- templates/ # Jinja2 templates
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/](../src/server/)
|
||||||
|
|
||||||
|
### 2.2.1 Frontend Architecture (`src/server/web/static/`)
|
||||||
|
|
||||||
|
The frontend uses a modular architecture with no build step required. CSS and JavaScript files are organized by responsibility.
|
||||||
|
|
||||||
|
#### CSS Structure
|
||||||
|
|
||||||
|
```
|
||||||
|
src/server/web/static/css/
|
||||||
|
+-- styles.css # Entry point with @import statements
|
||||||
|
+-- base/
|
||||||
|
| +-- variables.css # CSS custom properties (colors, fonts, spacing)
|
||||||
|
| +-- reset.css # CSS reset and normalize styles
|
||||||
|
| +-- typography.css # Font styles, headings, text utilities
|
||||||
|
+-- components/
|
||||||
|
| +-- buttons.css # All button styles
|
||||||
|
| +-- cards.css # Card and panel components
|
||||||
|
| +-- forms.css # Form inputs, labels, validation styles
|
||||||
|
| +-- modals.css # Modal and overlay styles
|
||||||
|
| +-- navigation.css # Header, nav, sidebar styles
|
||||||
|
| +-- progress.css # Progress bars, loading indicators
|
||||||
|
| +-- notifications.css # Toast, alerts, messages
|
||||||
|
| +-- tables.css # Table and list styles
|
||||||
|
| +-- status.css # Status badges and indicators
|
||||||
|
+-- pages/
|
||||||
|
| +-- login.css # Login page specific styles
|
||||||
|
| +-- index.css # Index/library page specific styles
|
||||||
|
| +-- queue.css # Queue page specific styles
|
||||||
|
+-- utilities/
|
||||||
|
+-- animations.css # Keyframes and animation classes
|
||||||
|
+-- responsive.css # Media queries and breakpoints
|
||||||
|
+-- helpers.css # Utility classes (hidden, flex, spacing)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### JavaScript Structure
|
||||||
|
|
||||||
|
JavaScript uses the IIFE pattern with a shared `AniWorld` namespace for browser compatibility without build tools.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/server/web/static/js/
|
||||||
|
+-- shared/ # Shared utilities used by all pages
|
||||||
|
| +-- constants.js # API endpoints, localStorage keys, defaults
|
||||||
|
| +-- auth.js # Token management (getToken, setToken, checkAuth)
|
||||||
|
| +-- api-client.js # Fetch wrapper with auto-auth headers
|
||||||
|
| +-- theme.js # Dark/light theme toggle
|
||||||
|
| +-- ui-utils.js # Toast notifications, format helpers
|
||||||
|
| +-- websocket-client.js # Socket.IO wrapper
|
||||||
|
+-- index/ # Index page modules
|
||||||
|
| +-- series-manager.js # Series list rendering and filtering
|
||||||
|
| +-- selection-manager.js# Multi-select and bulk download
|
||||||
|
| +-- search.js # Series search functionality
|
||||||
|
| +-- scan-manager.js # Library rescan operations
|
||||||
|
| +-- scheduler-config.js # Scheduler configuration
|
||||||
|
| +-- logging-config.js # Logging configuration
|
||||||
|
| +-- advanced-config.js # Advanced settings
|
||||||
|
| +-- main-config.js # Main configuration and backup
|
||||||
|
| +-- config-manager.js # Config modal orchestrator
|
||||||
|
| +-- socket-handler.js # WebSocket event handlers
|
||||||
|
| +-- app-init.js # Application initialization
|
||||||
|
+-- queue/ # Queue page modules
|
||||||
|
+-- queue-api.js # Queue API interactions
|
||||||
|
+-- queue-renderer.js # Queue list rendering
|
||||||
|
+-- progress-handler.js # Download progress updates
|
||||||
|
+-- queue-socket-handler.js # WebSocket events for queue
|
||||||
|
+-- queue-init.js # Queue page initialization
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Module Pattern
|
||||||
|
|
||||||
|
All JavaScript modules follow the IIFE pattern with namespace:
|
||||||
|
|
||||||
|
```javascript
|
||||||
|
var AniWorld = window.AniWorld || {};
|
||||||
|
|
||||||
|
AniWorld.ModuleName = (function () {
|
||||||
|
"use strict";
|
||||||
|
|
||||||
|
// Private variables and functions
|
||||||
|
|
||||||
|
// Public API
|
||||||
|
return {
|
||||||
|
init: init,
|
||||||
|
publicMethod: publicMethod,
|
||||||
|
};
|
||||||
|
})();
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/web/static/](../src/server/web/static/)
|
||||||
|
|
||||||
|
### 2.3 Core Layer (`src/core/`)
|
||||||
|
|
||||||
|
Domain logic for anime series management.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/core/
|
||||||
|
+-- SeriesApp.py # Main application facade
|
||||||
|
+-- SerieScanner.py # Directory scanning, targeted single-series scan
|
||||||
|
+-- entities/ # Domain entities
|
||||||
|
| +-- series.py # Serie class with sanitized_folder property
|
||||||
|
| +-- SerieList.py # SerieList collection with sanitized folder support
|
||||||
|
| +-- nfo_models.py # Pydantic models for tvshow.nfo (TVShowNFO, ActorInfo…)
|
||||||
|
+-- services/ # Domain services
|
||||||
|
| +-- nfo_service.py # NFO lifecycle: create / update tvshow.nfo
|
||||||
|
| +-- nfo_repair_service.py # Detect & repair incomplete tvshow.nfo files
|
||||||
|
| | # (parse_nfo_tags, find_missing_tags, NfoRepairService)
|
||||||
|
| +-- tmdb_client.py # Async TMDB API client
|
||||||
|
+-- utils/ # Utility helpers (no side-effects)
|
||||||
|
| +-- nfo_generator.py # TVShowNFO → XML serialiser
|
||||||
|
| +-- nfo_mapper.py # TMDB API dict → TVShowNFO (tmdb_to_nfo_model,
|
||||||
|
| | # _extract_rating_by_country, _extract_fsk_rating)
|
||||||
|
| +-- image_downloader.py # TMDB image downloader
|
||||||
|
+-- providers/ # External provider adapters
|
||||||
|
| +-- base_provider.py # Loader interface
|
||||||
|
| +-- provider_factory.py # Provider registry
|
||||||
|
+-- interfaces/ # Abstract interfaces
|
||||||
|
| +-- callbacks.py # Progress callback system
|
||||||
|
+-- exceptions/ # Domain exceptions
|
||||||
|
+-- Exceptions.py # Custom exceptions
|
||||||
|
```
|
||||||
|
|
||||||
|
**Key Components:**
|
||||||
|
|
||||||
|
| Component | Purpose |
|
||||||
|
| -------------- | -------------------------------------------------------------------------- |
|
||||||
|
| `SeriesApp` | Main application facade for anime operations |
|
||||||
|
| `SerieScanner` | Scans directories for anime; `scan_single_series()` for targeted scans |
|
||||||
|
| `Serie` | Domain entity with `sanitized_folder` property for filesystem-safe names |
|
||||||
|
| `SerieList` | Collection management with automatic folder creation using sanitized names |
|
||||||
|
|
||||||
|
**Initialization:**
|
||||||
|
|
||||||
|
`SeriesApp` is initialized with `skip_load=True` passed to `SerieList`, preventing automatic loading of series from data files on every instantiation. Series data is loaded once during application setup via `sync_series_from_data_files()` in the FastAPI lifespan, which reads data files and syncs them to the database. Subsequent operations load series from the database through the service layer.
|
||||||
|
|
||||||
|
Source: [src/core/](../src/core/)
|
||||||
|
|
||||||
|
### 2.4 Infrastructure Layer (`src/infrastructure/`)
|
||||||
|
|
||||||
|
Cross-cutting concerns.
|
||||||
|
|
||||||
|
```
|
||||||
|
src/infrastructure/
|
||||||
|
+-- logging/ # Structured logging setup
|
||||||
|
+-- security/ # Security utilities
|
||||||
|
```
|
||||||
|
|
||||||
|
### 2.5 Configuration Layer (`src/config/`)
|
||||||
|
|
||||||
|
Application settings management.
|
||||||
|
|
||||||
|
| Component | File | Purpose |
|
||||||
|
| --------- | ---------------------------------------- | ------------------------------- |
|
||||||
|
| Settings | [settings.py](../src/config/settings.py) | Environment-based configuration |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L1-L96)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Startup Sequence
|
||||||
|
|
||||||
|
The FastAPI lifespan function (`src/server/fastapi_app.py`) runs the following steps on every server start.
|
||||||
|
|
||||||
|
### 12.1 Startup Order
|
||||||
|
|
||||||
|
```
|
||||||
|
1. Logging configured
|
||||||
|
|
||||||
|
2. Temp folder purged ← cleans leftover partial download files
|
||||||
|
+-- Iterate ./Temp/ and delete every file and sub-directory
|
||||||
|
+-- Create ./Temp/ if it does not exist
|
||||||
|
+-- Errors are logged as warnings; startup continues regardless
|
||||||
|
|
||||||
|
3. Database initialised (required – abort on failure)
|
||||||
|
+-- SQLite file created / migrated via init_db()
|
||||||
|
|
||||||
|
4. Configuration loaded from data/config.json
|
||||||
|
+-- Synced to settings (ENV vars take precedence)
|
||||||
|
|
||||||
|
5. Progress & WebSocket services wired up
|
||||||
|
|
||||||
|
6. Series loaded from database into memory
|
||||||
|
|
||||||
|
7. Download service initialised (queue restored from DB)
|
||||||
|
|
||||||
|
8. Background loader service started
|
||||||
|
|
||||||
|
9. Scheduler service started
|
||||||
|
|
||||||
|
10. NFO repair scan (queue incomplete tvshow.nfo files for background reload)
|
||||||
|
```
|
||||||
|
|
||||||
|
### 12.2 Temp Folder Guarantee
|
||||||
|
|
||||||
|
Every server start begins with a clean `./Temp/` directory. This ensures that partial `.part` files or stale temp videos from a crashed or force-killed previous session are never left behind before new downloads start.
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Graceful Shutdown
|
||||||
|
|
||||||
|
The application implements a comprehensive graceful shutdown mechanism that ensures data integrity and proper cleanup when the server is stopped via Ctrl+C (SIGINT) or SIGTERM.
|
||||||
|
|
||||||
|
### 11.1 Shutdown Sequence
|
||||||
|
|
||||||
|
```
|
||||||
|
1. SIGINT/SIGTERM received
|
||||||
|
+-- Uvicorn catches signal
|
||||||
|
+-- Stops accepting new requests
|
||||||
|
|
||||||
|
2. FastAPI lifespan shutdown triggered
|
||||||
|
+-- 30 second total timeout
|
||||||
|
|
||||||
|
3. WebSocket shutdown (5s timeout)
|
||||||
|
+-- Broadcast {"type": "server_shutdown"} to all clients
|
||||||
|
+-- Close each connection with code 1001 (Going Away)
|
||||||
|
+-- Clear connection tracking data
|
||||||
|
|
||||||
|
4. Download service stop (10s timeout)
|
||||||
|
+-- Set shutdown flag
|
||||||
|
+-- Persist active download as "pending" in database
|
||||||
|
+-- Cancel active download task
|
||||||
|
+-- Shutdown ThreadPoolExecutor with wait
|
||||||
|
|
||||||
|
5. Progress service cleanup
|
||||||
|
+-- Clear event subscribers
|
||||||
|
+-- Clear active progress tracking
|
||||||
|
|
||||||
|
6. Database cleanup (10s timeout)
|
||||||
|
+-- SQLite: Run PRAGMA wal_checkpoint(TRUNCATE)
|
||||||
|
+-- Dispose async engine
|
||||||
|
+-- Dispose sync engine
|
||||||
|
|
||||||
|
7. Process exits cleanly
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L142-L210)
|
||||||
|
|
||||||
|
### 11.2 Key Components
|
||||||
|
|
||||||
|
| Component | File | Shutdown Method |
|
||||||
|
| ------------------- | ------------------------------------------------------------------- | ------------------------------ |
|
||||||
|
| WebSocket Service | [websocket_service.py](../src/server/services/websocket_service.py) | `shutdown(timeout=5.0)` |
|
||||||
|
| Download Service | [download_service.py](../src/server/services/download_service.py) | `stop(timeout=10.0)` |
|
||||||
|
| Database Connection | [connection.py](../src/server/database/connection.py) | `close_db()` |
|
||||||
|
| Uvicorn Config | [run_server.py](../run_server.py) | `timeout_graceful_shutdown=30` |
|
||||||
|
| Stop Script | [stop_server.sh](../stop_server.sh) | SIGTERM with fallback |
|
||||||
|
|
||||||
|
### 11.3 Data Integrity Guarantees
|
||||||
|
|
||||||
|
1. **Active downloads preserved**: In-progress downloads are saved as "pending" and can resume on restart.
|
||||||
|
|
||||||
|
2. **Database WAL flushed**: SQLite WAL checkpoint ensures all writes are in the main database file.
|
||||||
|
|
||||||
|
3. **WebSocket clients notified**: Clients receive shutdown message before connection closes.
|
||||||
|
|
||||||
|
4. **Thread pool cleanup**: Background threads complete or are gracefully cancelled.
|
||||||
|
|
||||||
|
### 11.4 Manual Stop
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Graceful stop via script (sends SIGTERM, waits up to 30s)
|
||||||
|
./stop_server.sh
|
||||||
|
|
||||||
|
# Or press Ctrl+C in terminal running the server
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [stop_server.sh](../stop_server.sh#L1-L80)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Component Interactions
|
||||||
|
|
||||||
|
### 3.1 Request Flow (REST API)
|
||||||
|
|
||||||
|
```
|
||||||
|
1. Client sends HTTP request
|
||||||
|
2. AuthMiddleware validates JWT token (if required)
|
||||||
|
3. Rate limiter checks request frequency
|
||||||
|
4. FastAPI router dispatches to endpoint handler
|
||||||
|
5. Endpoint calls service layer
|
||||||
|
6. Service layer uses core layer or database
|
||||||
|
7. Response returned as JSON
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L1-L209)
|
||||||
|
|
||||||
|
### 3.2 Download Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. POST /api/queue/add
|
||||||
|
+-- DownloadService.add_to_queue()
|
||||||
|
+-- QueueRepository.save_item() -> SQLite
|
||||||
|
|
||||||
|
2. POST /api/queue/start
|
||||||
|
+-- DownloadService.start_queue_processing()
|
||||||
|
+-- Process pending items sequentially
|
||||||
|
+-- ProgressService emits events
|
||||||
|
+-- WebSocketService broadcasts to clients
|
||||||
|
|
||||||
|
3. During download:
|
||||||
|
+-- Provider writes to ./Temp/<filename> (+ ./Temp/<filename>.part fragments)
|
||||||
|
+-- ProgressService.emit("progress_updated")
|
||||||
|
+-- WebSocketService.broadcast_to_room()
|
||||||
|
+-- Client receives WebSocket message
|
||||||
|
|
||||||
|
4. After download attempt (success OR failure):
|
||||||
|
+-- _cleanup_temp_file() removes ./Temp/<filename> and all .part fragments
|
||||||
|
+-- On success: file was already moved to final destination before cleanup
|
||||||
|
+-- On failure / exception: no partial files remain in ./Temp/
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Temp Directory Contract
|
||||||
|
|
||||||
|
| Situation | Outcome |
|
||||||
|
| -------------------------------- | ------------------------------------------------------------------- |
|
||||||
|
| Server start | Entire `./Temp/` directory is purged before any service initialises |
|
||||||
|
| Successful download | Temp file moved to destination, then removed from `./Temp/` |
|
||||||
|
| Failed download (provider error) | Temp + `.part` fragments removed by `_cleanup_temp_file()` |
|
||||||
|
| Exception / cancellation | Temp + `.part` fragments removed in `except` block |
|
||||||
|
|
||||||
|
Source: [src/server/services/download_service.py](../src/server/services/download_service.py#L1-L150),
|
||||||
|
[src/core/providers/aniworld_provider.py](../src/core/providers/aniworld_provider.py),
|
||||||
|
[src/core/providers/enhanced_provider.py](../src/core/providers/enhanced_provider.py)
|
||||||
|
|
||||||
|
### 3.3 WebSocket Event Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. Client connects to /ws/connect
|
||||||
|
2. Server sends "connected" message
|
||||||
|
3. Client joins room: {"action": "join", "data": {"room": "downloads"}}
|
||||||
|
4. ProgressService emits events
|
||||||
|
5. WebSocketService broadcasts to room subscribers
|
||||||
|
6. Client receives real-time updates
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/api/websocket.py](../src/server/api/websocket.py#L1-L260)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Design Patterns
|
||||||
|
|
||||||
|
### 4.1 Repository Pattern (Service Layer as Repository)
|
||||||
|
|
||||||
|
**Architecture Decision**: The Service Layer serves as the Repository layer for database access.
|
||||||
|
|
||||||
|
Database access is abstracted through service classes in `src/server/database/service.py` that provide CRUD operations and act as the repository layer. This eliminates the need for a separate repository layer while maintaining clean separation of concerns.
|
||||||
|
|
||||||
|
**Service Layer Classes** (acting as repositories):
|
||||||
|
|
||||||
|
- `AnimeSeriesService` - CRUD operations for anime series
|
||||||
|
- `EpisodeService` - CRUD operations for episodes
|
||||||
|
- `DownloadQueueService` - CRUD operations for download queue
|
||||||
|
- `UserSessionService` - CRUD operations for user sessions
|
||||||
|
- `SystemSettingsService` - CRUD operations for system settings
|
||||||
|
|
||||||
|
**Key Principles**:
|
||||||
|
|
||||||
|
1. **No Direct Database Queries**: Controllers and business logic services MUST use service layer methods
|
||||||
|
2. **Service Layer Encapsulation**: All SQLAlchemy queries are encapsulated in service methods
|
||||||
|
3. **Consistent Interface**: Services provide consistent async methods for all database operations
|
||||||
|
4. **Single Responsibility**: Each service manages one entity type
|
||||||
|
|
||||||
|
**Example Usage**:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# CORRECT: Use service layer
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
|
||||||
|
async with get_db_session() as db:
|
||||||
|
series = await AnimeSeriesService.get_by_key(db, "attack-on-titan")
|
||||||
|
await AnimeSeriesService.update(db, series.id, has_nfo=True)
|
||||||
|
|
||||||
|
# INCORRECT: Direct database query
|
||||||
|
result = await db.execute(select(AnimeSeries).filter(...)) # ❌ Never do this
|
||||||
|
```
|
||||||
|
|
||||||
|
**Special Case - Queue Repository Adapter**:
|
||||||
|
|
||||||
|
The `QueueRepository` in `src/server/services/queue_repository.py` is an adapter that wraps `DownloadQueueService` to provide domain model conversion between Pydantic models and SQLAlchemy models:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# QueueRepository provides CRUD with model conversion
|
||||||
|
class QueueRepository:
|
||||||
|
async def save_item(self, item: DownloadItem) -> None: ... # Converts Pydantic → SQLAlchemy
|
||||||
|
async def get_all_items(self) -> List[DownloadItem]: ... # Converts SQLAlchemy → Pydantic
|
||||||
|
async def delete_item(self, item_id: str) -> bool: ...
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/service.py](../src/server/database/service.py), [src/server/services/queue_repository.py](../src/server/services/queue_repository.py)
|
||||||
|
|
||||||
|
### 4.2 Dependency Injection
|
||||||
|
|
||||||
|
FastAPI's `Depends()` provides constructor injection.
|
||||||
|
|
||||||
|
```python
|
||||||
|
@router.get("/status")
|
||||||
|
async def get_status(
|
||||||
|
download_service: DownloadService = Depends(get_download_service),
|
||||||
|
):
|
||||||
|
...
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/utils/dependencies.py](../src/server/utils/dependencies.py)
|
||||||
|
|
||||||
|
### 4.3 Event-Driven Architecture
|
||||||
|
|
||||||
|
Progress updates use an event subscription model.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# ProgressService publishes events
|
||||||
|
progress_service.emit("progress_updated", event)
|
||||||
|
|
||||||
|
# WebSocketService subscribes
|
||||||
|
progress_service.subscribe("progress_updated", ws_handler)
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/fastapi_app.py](../src/server/fastapi_app.py#L98-L108)
|
||||||
|
|
||||||
|
### 4.4 Singleton Pattern
|
||||||
|
|
||||||
|
Services use module-level singletons for shared state.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# In download_service.py
|
||||||
|
_download_service_instance: Optional[DownloadService] = None
|
||||||
|
|
||||||
|
def get_download_service() -> DownloadService:
|
||||||
|
global _download_service_instance
|
||||||
|
if _download_service_instance is None:
|
||||||
|
_download_service_instance = DownloadService(...)
|
||||||
|
return _download_service_instance
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.5 Error Handling Pattern
|
||||||
|
|
||||||
|
**Architecture Decision**: Dual error handling approach based on exception source.
|
||||||
|
|
||||||
|
The application uses two complementary error handling mechanisms:
|
||||||
|
|
||||||
|
1. **FastAPI HTTPException** - For simple validation and HTTP-level errors
|
||||||
|
2. **Custom Exception Hierarchy** - For business logic and service-level errors with rich context
|
||||||
|
|
||||||
|
#### Exception Hierarchy
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Base exception with HTTP status mapping
|
||||||
|
AniWorldAPIException(message, status_code, error_code, details)
|
||||||
|
├── AuthenticationError (401)
|
||||||
|
├── AuthorizationError (403)
|
||||||
|
├── ValidationError (422)
|
||||||
|
├── NotFoundError (404)
|
||||||
|
├── ConflictError (409)
|
||||||
|
├── BadRequestError (400)
|
||||||
|
├── RateLimitError (429)
|
||||||
|
└── ServerError (500)
|
||||||
|
├── DownloadError
|
||||||
|
├── ConfigurationError
|
||||||
|
├── ProviderError
|
||||||
|
└── DatabaseError
|
||||||
|
```
|
||||||
|
|
||||||
|
#### When to Use Each
|
||||||
|
|
||||||
|
**Use HTTPException for:**
|
||||||
|
|
||||||
|
- Simple parameter validation (missing fields, wrong type)
|
||||||
|
- Direct HTTP-level errors (401, 403, 404 without business context)
|
||||||
|
- Quick endpoint-specific failures
|
||||||
|
|
||||||
|
**Use Custom Exceptions for:**
|
||||||
|
|
||||||
|
- Service-layer business logic errors (AnimeServiceError, ConfigServiceError)
|
||||||
|
- Errors needing rich context (details dict, error codes)
|
||||||
|
- Errors that should be logged with specific categorization
|
||||||
|
- Cross-cutting concerns (authentication, authorization, rate limiting)
|
||||||
|
|
||||||
|
**Example:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Simple validation - Use HTTPException
|
||||||
|
if not series_key:
|
||||||
|
raise HTTPException(status_code=400, detail="series_key required")
|
||||||
|
|
||||||
|
# Business logic error - Use custom exception
|
||||||
|
try:
|
||||||
|
await anime_service.add_series(series_key)
|
||||||
|
except AnimeServiceError as e:
|
||||||
|
raise ServerError(
|
||||||
|
message=f"Failed to add series: {e}",
|
||||||
|
error_code="ANIME_ADD_FAILED",
|
||||||
|
details={"series_key": series_key}
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Global Exception Handlers
|
||||||
|
|
||||||
|
All custom exceptions are automatically handled by global middleware that:
|
||||||
|
|
||||||
|
- Converts exceptions to structured JSON responses
|
||||||
|
- Logs errors with appropriate severity
|
||||||
|
- Includes request ID for tracking
|
||||||
|
- Provides consistent error format
|
||||||
|
|
||||||
|
**Source**: [src/server/exceptions/\_\_init\_\_.py](../src/server/exceptions/__init__.py), [src/server/middleware/error_handler.py](../src/server/middleware/error_handler.py)
|
||||||
|
|
||||||
|
Source: [src/server/services/download_service.py](../src/server/services/download_service.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Data Flow
|
||||||
|
|
||||||
|
### 5.1 Series Identifier Convention
|
||||||
|
|
||||||
|
The system uses two identifier fields:
|
||||||
|
|
||||||
|
| Field | Type | Purpose | Example |
|
||||||
|
| -------- | -------- | -------------------------------------- | -------------------------- |
|
||||||
|
| `key` | Primary | Provider-assigned, URL-safe identifier | `"attack-on-titan"` |
|
||||||
|
| `folder` | Metadata | Filesystem folder name (display only) | `"Attack on Titan (2013)"` |
|
||||||
|
|
||||||
|
All API operations use `key`. The `folder` is for filesystem operations only.
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L26-L50)
|
||||||
|
|
||||||
|
### 5.2 Database Schema
|
||||||
|
|
||||||
|
```
|
||||||
|
+----------------+ +----------------+ +--------------------+
|
||||||
|
| anime_series | | episodes | | download_queue_item|
|
||||||
|
+----------------+ +----------------+ +--------------------+
|
||||||
|
| id (PK) |<--+ | id (PK) | +-->| id (PK) |
|
||||||
|
| key (unique) | | | series_id (FK) |---+ | series_id (FK) |
|
||||||
|
| name | +---| season | | status |
|
||||||
|
| site | | episode_number | | priority |
|
||||||
|
| folder | | title | | progress_percent |
|
||||||
|
| created_at | | is_downloaded | | added_at |
|
||||||
|
| updated_at | | file_path | | started_at |
|
||||||
|
+----------------+ +----------------+ +--------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L1-L200)
|
||||||
|
|
||||||
|
### 5.3 Configuration Storage
|
||||||
|
|
||||||
|
Configuration is stored in `data/config.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Aniworld",
|
||||||
|
"data_dir": "data",
|
||||||
|
"scheduler": {
|
||||||
|
"enabled": true,
|
||||||
|
"schedule_time": "03:00",
|
||||||
|
"schedule_days": ["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
"auto_download_after_rescan": false
|
||||||
|
},
|
||||||
|
"logging": { "level": "INFO" },
|
||||||
|
"backup": { "enabled": false, "path": "data/backups" },
|
||||||
|
"other": {
|
||||||
|
"master_password_hash": "$pbkdf2-sha256$...",
|
||||||
|
"anime_directory": "/path/to/anime"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [data/config.json](../data/config.json)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Technology Stack
|
||||||
|
|
||||||
|
| Layer | Technology | Version | Purpose |
|
||||||
|
| ------------- | ------------------- | ------- | ---------------------- |
|
||||||
|
| Web Framework | FastAPI | 0.104.1 | REST API, WebSocket |
|
||||||
|
| ASGI Server | Uvicorn | 0.24.0 | HTTP server |
|
||||||
|
| Database | SQLite + SQLAlchemy | 2.0.35 | Persistence |
|
||||||
|
| Auth | python-jose | 3.3.0 | JWT tokens |
|
||||||
|
| Password | passlib | 1.7.4 | bcrypt hashing |
|
||||||
|
| Validation | Pydantic | 2.5.0 | Data models |
|
||||||
|
| Templates | Jinja2 | 3.1.2 | HTML rendering |
|
||||||
|
| Logging | structlog | 24.1.0 | Structured logging |
|
||||||
|
| Testing | pytest | 7.4.3 | Unit/integration tests |
|
||||||
|
|
||||||
|
Source: [requirements.txt](../requirements.txt)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Scalability Considerations
|
||||||
|
|
||||||
|
### Current Limitations
|
||||||
|
|
||||||
|
1. **Single-process deployment**: In-memory rate limiting and session state are not shared across processes.
|
||||||
|
|
||||||
|
2. **SQLite database**: Not suitable for high concurrency. Consider PostgreSQL for production.
|
||||||
|
|
||||||
|
3. **Sequential downloads**: Only one download processes at a time by design.
|
||||||
|
|
||||||
|
### Recommended Improvements for Scale
|
||||||
|
|
||||||
|
| Concern | Current | Recommended |
|
||||||
|
| -------------- | --------------- | ----------------- |
|
||||||
|
| Rate limiting | In-memory dict | Redis |
|
||||||
|
| Session store | In-memory | Redis or database |
|
||||||
|
| Database | SQLite | PostgreSQL |
|
||||||
|
| Task queue | In-memory deque | Celery + Redis |
|
||||||
|
| Load balancing | None | Nginx/HAProxy |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Integration Points
|
||||||
|
|
||||||
|
### 8.1 External Providers
|
||||||
|
|
||||||
|
The system integrates with anime streaming providers via the Loader interface.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class Loader(ABC):
|
||||||
|
@abstractmethod
|
||||||
|
def search(self, query: str) -> List[Serie]: ...
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_episodes(self, serie: Serie) -> Dict[int, List[int]]: ...
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/core/providers/base_provider.py](../src/core/providers/base_provider.py)
|
||||||
|
|
||||||
|
### 8.2 Filesystem Integration
|
||||||
|
|
||||||
|
The scanner reads anime directories to detect downloaded episodes.
|
||||||
|
|
||||||
|
```python
|
||||||
|
SerieScanner(
|
||||||
|
basePath="/path/to/anime", # Anime library directory
|
||||||
|
loader=provider, # Provider for metadata
|
||||||
|
db_session=session # Optional database
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/core/SerieScanner.py](../src/core/SerieScanner.py#L59-L96)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Security Architecture
|
||||||
|
|
||||||
|
### 9.1 Authentication Flow
|
||||||
|
|
||||||
|
```
|
||||||
|
1. User sets master password via POST /api/auth/setup
|
||||||
|
2. Password hashed with pbkdf2_sha256 (via passlib)
|
||||||
|
3. Hash stored in config.json
|
||||||
|
4. Login validates password, returns JWT token
|
||||||
|
5. JWT contains: session_id, user, created_at, expires_at
|
||||||
|
6. Subsequent requests include: Authorization: Bearer <token>
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L1-L150)
|
||||||
|
|
||||||
|
### 9.2 Password Requirements
|
||||||
|
|
||||||
|
- Minimum 8 characters
|
||||||
|
- Mixed case (upper and lower)
|
||||||
|
- At least one number
|
||||||
|
- At least one special character
|
||||||
|
|
||||||
|
Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L97-L125)
|
||||||
|
|
||||||
|
### 9.3 Rate Limiting
|
||||||
|
|
||||||
|
| Endpoint | Limit | Window |
|
||||||
|
| ----------------- | ----------- | ---------- |
|
||||||
|
| `/api/auth/login` | 5 requests | 60 seconds |
|
||||||
|
| `/api/auth/setup` | 5 requests | 60 seconds |
|
||||||
|
| All origins | 60 requests | 60 seconds |
|
||||||
|
|
||||||
|
Source: [src/server/middleware/auth.py](../src/server/middleware/auth.py#L54-L68)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Deployment Modes
|
||||||
|
|
||||||
|
### 10.1 Development
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run with hot reload
|
||||||
|
python -m uvicorn src.server.fastapi_app:app --reload
|
||||||
|
```
|
||||||
|
|
||||||
|
### 10.2 Production
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Via conda environment
|
||||||
|
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app \
|
||||||
|
--host 127.0.0.1 --port 8000
|
||||||
|
```
|
||||||
|
|
||||||
|
### 10.3 Configuration
|
||||||
|
|
||||||
|
Environment variables (via `.env` or shell):
|
||||||
|
|
||||||
|
| Variable | Default | Description |
|
||||||
|
| ----------------- | ------------------------------ | ---------------------- |
|
||||||
|
| `JWT_SECRET_KEY` | Random | Secret for JWT signing |
|
||||||
|
| `DATABASE_URL` | `sqlite:///./data/aniworld.db` | Database connection |
|
||||||
|
| `ANIME_DIRECTORY` | (empty) | Path to anime library |
|
||||||
|
| `LOG_LEVEL` | `INFO` | Logging level |
|
||||||
|
| `CORS_ORIGINS` | `localhost:3000,8000` | Allowed CORS origins |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L1-L96)
|
||||||
220
docs/CHANGELOG.md
Normal file
220
docs/CHANGELOG.md
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
# Changelog
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document tracks all notable changes to the Aniworld project.
|
||||||
|
|
||||||
|
### What This Document Contains
|
||||||
|
|
||||||
|
- **Version History**: All released versions with dates
|
||||||
|
- **Added Features**: New functionality in each release
|
||||||
|
- **Changed Features**: Modifications to existing features
|
||||||
|
- **Deprecated Features**: Features marked for removal
|
||||||
|
- **Removed Features**: Features removed from the codebase
|
||||||
|
- **Fixed Bugs**: Bug fixes with issue references
|
||||||
|
- **Security Fixes**: Security-related changes
|
||||||
|
- **Breaking Changes**: Changes requiring user action
|
||||||
|
|
||||||
|
### What This Document Does NOT Contain
|
||||||
|
|
||||||
|
- Internal refactoring details (unless user-facing)
|
||||||
|
- Commit-level changes
|
||||||
|
- Work-in-progress features
|
||||||
|
- Roadmap or planned features
|
||||||
|
|
||||||
|
### Target Audience
|
||||||
|
|
||||||
|
- All users and stakeholders
|
||||||
|
- Operators planning upgrades
|
||||||
|
- Developers tracking changes
|
||||||
|
- Support personnel
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Format
|
||||||
|
|
||||||
|
This changelog follows [Keep a Changelog](https://keepachangelog.com/) principles and adheres to [Semantic Versioning](https://semver.org/).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [1.3.1] - 2026-02-22
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Temp file cleanup after every download** (`src/core/providers/aniworld_provider.py`,
|
||||||
|
`src/core/providers/enhanced_provider.py`): Module-level helper
|
||||||
|
`_cleanup_temp_file()` removes the working temp file and any yt-dlp `.part`
|
||||||
|
fragments after each download attempt — on success, on failure, and on
|
||||||
|
exceptions (including `BrokenPipeError` and cancellation). Ensures that no
|
||||||
|
partial files accumulate in `./Temp/` across multiple runs.
|
||||||
|
- **Temp folder purge on server start** (`src/server/fastapi_app.py`): The
|
||||||
|
FastAPI lifespan startup now iterates `./Temp/` and deletes every file and
|
||||||
|
sub-directory before the rest of the initialisation sequence runs. If the
|
||||||
|
folder does not exist it is created. Errors are caught and logged as warnings
|
||||||
|
so that they never abort startup.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [1.3.0] - 2026-02-22
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **NFO tag completeness (`nfo_mapper.py`)**: All 17 required NFO tags are now
|
||||||
|
explicitly populated during creation: `originaltitle`, `sorttitle`, `year`,
|
||||||
|
`plot`, `outline`, `tagline`, `runtime`, `premiered`, `status`, `imdbid`,
|
||||||
|
`genre`, `studio`, `country`, `actor`, `watched`, `dateadded`, `mpaa`.
|
||||||
|
- **`src/core/utils/nfo_mapper.py`**: New module containing
|
||||||
|
`tmdb_to_nfo_model()`, `_extract_rating_by_country()`, and
|
||||||
|
`_extract_fsk_rating()`. Extracted from `NFOService` to keep files under
|
||||||
|
500 lines and isolate pure mapping logic.
|
||||||
|
- **US MPAA rating**: `_extract_rating_by_country(ratings, "US")` now maps the
|
||||||
|
US TMDB content rating to the `<mpaa>` NFO tag.
|
||||||
|
- **`NfoRepairService` (`src/core/services/nfo_repair_service.py`)**: New service
|
||||||
|
that detects incomplete `tvshow.nfo` files and triggers TMDB re-fetch.
|
||||||
|
Provides `parse_nfo_tags()`, `find_missing_tags()`, `nfo_needs_repair()`, and
|
||||||
|
`NfoRepairService.repair_series()`. 13 required tags are checked.
|
||||||
|
- **`perform_nfo_repair_scan()` startup hook
|
||||||
|
(`src/server/services/initialization_service.py`)**: New async function
|
||||||
|
called during application startup. Iterates every series directory, checks
|
||||||
|
whether `tvshow.nfo` is missing required tags using `nfo_needs_repair()`, and
|
||||||
|
either queues the series for background reload (when a `background_loader` is
|
||||||
|
provided) or calls `NfoRepairService.repair_series()` directly. Skips
|
||||||
|
gracefully when `tmdb_api_key` or `anime_directory` is not configured.
|
||||||
|
- **NFO repair wired into startup lifespan (`src/server/fastapi_app.py`)**:
|
||||||
|
`perform_nfo_repair_scan(background_loader)` is called at the end of the
|
||||||
|
FastAPI lifespan startup, after `perform_media_scan_if_needed`, ensuring
|
||||||
|
every existing series NFO is checked and repaired on each server start.
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- `NFOService._tmdb_to_nfo_model()` and `NFOService._extract_fsk_rating()` moved
|
||||||
|
to `src/core/utils/nfo_mapper.py` as module-level functions
|
||||||
|
`tmdb_to_nfo_model()` and `_extract_fsk_rating()`.
|
||||||
|
- `src/core/services/nfo_service.py` reduced from 640 → 471 lines.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## [Unreleased] - 2026-01-18
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Cron-based Scheduler**: Replaced the asyncio sleep-loop with APScheduler's `AsyncIOScheduler + CronTrigger`
|
||||||
|
- Schedule rescans at a specific **time of day** (`HH:MM`) on selected **days of the week**
|
||||||
|
- New `SchedulerConfig` fields: `schedule_time` (default `"03:00"`), `schedule_days` (default all 7), `auto_download_after_rescan` (default `false`)
|
||||||
|
- Old `interval_minutes` field retained for backward compatibility
|
||||||
|
- **Auto-download after rescan**: When `auto_download_after_rescan` is enabled, missing episodes are automatically queued for download after each scheduled rescan
|
||||||
|
- **Day-of-week UI**: New day-of-week pill toggles (Mon–Sun) in the Settings → Scheduler section
|
||||||
|
- **Live config reload**: POST `/api/scheduler/config` reschedules the APScheduler job without restarting the application
|
||||||
|
- **Enriched API response**: GET/POST `/api/scheduler/config` now returns `{"success", "config", "status"}` envelope including `next_run`, `last_run`, and `scan_in_progress`
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Scheduler API response format: previously returned flat config; now returns `{"success": true, "config": {...}, "status": {...}}`
|
||||||
|
- `reload_config()` is now a synchronous method accepting a `SchedulerConfig` argument (previously async, no arguments)
|
||||||
|
- Dependencies: added `APScheduler>=3.10.4` to `requirements.txt`
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- **Series Visibility**: Fixed issue where series added to the database weren't appearing in the API/UI
|
||||||
|
- Series are now loaded from database into SeriesApp's in-memory cache on startup
|
||||||
|
- Added `_load_series_from_db()` call after initial database sync in FastAPI lifespan
|
||||||
|
- **Episode Tracking**: Fixed missing episodes not being saved to database when adding new series
|
||||||
|
- Missing episodes are now persisted to the `episodes` table after the targeted scan
|
||||||
|
- Episodes are properly synced during rescan operations (added/removed based on filesystem state)
|
||||||
|
- **Database Synchronization**: Improved data consistency between database and in-memory cache
|
||||||
|
- Rescan process properly updates episodes: adds new missing episodes, removes downloaded ones
|
||||||
|
- All series operations now maintain database and cache synchronization
|
||||||
|
|
||||||
|
### Technical Details
|
||||||
|
|
||||||
|
- Modified `src/server/fastapi_app.py` to load series from database after sync
|
||||||
|
- Modified `src/server/api/anime.py` to save scanned episodes to database
|
||||||
|
- Episodes table properly tracks missing episodes with automatic cleanup
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sections for Each Release
|
||||||
|
|
||||||
|
```markdown
|
||||||
|
## [Version] - YYYY-MM-DD
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- New features
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Changes to existing functionality
|
||||||
|
|
||||||
|
### Deprecated
|
||||||
|
|
||||||
|
- Features that will be removed in future versions
|
||||||
|
|
||||||
|
### Removed
|
||||||
|
|
||||||
|
- Features removed in this release
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Bug fixes
|
||||||
|
|
||||||
|
### Security
|
||||||
|
|
||||||
|
- Security-related fixes
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Unreleased
|
||||||
|
|
||||||
|
_Changes that are in development but not yet released._
|
||||||
|
|
||||||
|
### Added
|
||||||
|
|
||||||
|
- **Comprehensive Test Suite**: Created 1,070+ tests across 4 priority tiers
|
||||||
|
- **TIER 1 (Critical)**: 159 tests - Scheduler, NFO batch operations, download queue, persistence
|
||||||
|
- **TIER 2 (High Priority)**: 390 tests - JavaScript framework, dark mode, setup page, settings modal, WebSocket, queue UI
|
||||||
|
- **TIER 3 (Medium Priority)**: 156 tests - WebSocket load, concurrent operations, retry logic, NFO performance, series parsing, TMDB integration
|
||||||
|
- **TIER 4 (Polish)**: 426 tests - Internationalization (89), user preferences (68), accessibility (250+), media server compatibility (19)
|
||||||
|
- **Frontend Testing Infrastructure**: Vitest for unit tests, Playwright for E2E tests
|
||||||
|
- **Security Test Coverage**: Complete testing for authentication, authorization, CSRF, XSS, SQL injection
|
||||||
|
- **Performance Validation**: WebSocket load (200+ concurrent clients), batch operations, concurrent access
|
||||||
|
- **Accessibility Tests**: WCAG 2.1 AA compliance testing (keyboard navigation, ARIA labels, screen readers)
|
||||||
|
- **Media Server Compatibility**: NFO format validation for Kodi, Plex, Jellyfin, and Emby
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- Updated testing documentation (TESTING_COMPLETE.md, instructions.md) to reflect 100% completion of all test tiers
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- **Enhanced Anime Add Flow**: Automatic database persistence, targeted episode scanning, and folder creation with sanitized names
|
||||||
|
- Filesystem utility module (`src/server/utils/filesystem.py`) with `sanitize_folder_name()`, `is_safe_path()`, and `create_safe_folder()` functions
|
||||||
|
- `Serie.sanitized_folder` property for generating filesystem-safe folder names from display names
|
||||||
|
- `SerieScanner.scan_single_series()` method for targeted scanning of individual anime without full library rescan
|
||||||
|
- Add series API response now includes `missing_episodes` list and `total_missing` count
|
||||||
|
- Database transaction support with `@transactional` decorator and `atomic()` context manager
|
||||||
|
- Transaction propagation modes (REQUIRED, REQUIRES_NEW, NESTED) for fine-grained control
|
||||||
|
- Savepoint support for nested transactions with partial rollback capability
|
||||||
|
- `TransactionManager` helper class for manual transaction control
|
||||||
|
- Bulk operations: `bulk_mark_downloaded`, `bulk_delete`, `clear_all` for batch processing
|
||||||
|
- `rotate_session` atomic operation for secure session rotation
|
||||||
|
- Transaction utilities: `is_session_in_transaction`, `get_session_transaction_depth`
|
||||||
|
- `get_transactional_session` for sessions without auto-commit
|
||||||
|
|
||||||
|
### Changed
|
||||||
|
|
||||||
|
- `QueueRepository.save_item()` now uses atomic transactions for data consistency
|
||||||
|
- `QueueRepository.clear_all()` now uses atomic transactions for all-or-nothing behavior
|
||||||
|
- Service layer documentation updated to reflect transaction-aware design
|
||||||
|
|
||||||
|
### Fixed
|
||||||
|
|
||||||
|
- Scan status indicator now correctly shows running state after page reload during active scan
|
||||||
|
- Improved reliability of process status updates in the UI header
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Version History
|
||||||
|
|
||||||
|
_To be documented as versions are released._
|
||||||
370
docs/CONFIGURATION.md
Normal file
370
docs/CONFIGURATION.md
Normal file
@@ -0,0 +1,370 @@
|
|||||||
|
# Configuration Reference
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document provides a comprehensive reference for all configuration options in the Aniworld application.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Configuration Overview
|
||||||
|
|
||||||
|
### Configuration Sources
|
||||||
|
|
||||||
|
Aniworld uses a layered configuration system with **explicit precedence rules**:
|
||||||
|
|
||||||
|
1. **Environment Variables** (highest priority) - Takes precedence over all other sources
|
||||||
|
2. **`.env` file** in project root - Loaded as environment variables
|
||||||
|
3. **`data/config.json`** file - Persistent file-based configuration
|
||||||
|
4. **Default values** (lowest priority) - Built-in fallback values
|
||||||
|
|
||||||
|
### Precedence Rules
|
||||||
|
|
||||||
|
**Critical Principle**: `ENV VARS > config.json > defaults`
|
||||||
|
|
||||||
|
- **Environment variables always win**: If a value is set via environment variable, it will NOT be overridden by config.json
|
||||||
|
- **config.json as fallback**: If an ENV var is not set (or is empty/default), the value from config.json is used
|
||||||
|
- **Defaults as last resort**: Built-in default values are used only if neither ENV var nor config.json provide a value
|
||||||
|
|
||||||
|
### Loading Mechanism
|
||||||
|
|
||||||
|
Configuration is loaded at application startup in `src/server/fastapi_app.py`:
|
||||||
|
|
||||||
|
1. **Pydantic Settings** loads ENV vars and .env file with defaults
|
||||||
|
2. **config.json** is loaded via `ConfigService`
|
||||||
|
3. **Selective sync**: config.json values sync to settings **only if** ENV var not set
|
||||||
|
4. **Runtime access**: Code uses `settings` object (which has final merged values)
|
||||||
|
|
||||||
|
**Example**:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# If ENV var is set:
|
||||||
|
ANIME_DIRECTORY=/env/path # This takes precedence
|
||||||
|
|
||||||
|
# config.json has:
|
||||||
|
{"other": {"anime_directory": "/config/path"}} # This is ignored
|
||||||
|
|
||||||
|
# Result: settings.anime_directory = "/env/path"
|
||||||
|
```
|
||||||
|
|
||||||
|
**Source**: [src/config/settings.py](../src/config/settings.py#L1-L96), [src/server/fastapi_app.py](../src/server/fastapi_app.py#L139-L185)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Environment Variables
|
||||||
|
|
||||||
|
### Authentication Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| ----------------------- | ------ | ---------------- | ------------------------------------------------------------------- |
|
||||||
|
| `JWT_SECRET_KEY` | string | (random) | Secret key for JWT token signing. Auto-generated if not set. |
|
||||||
|
| `PASSWORD_SALT` | string | `"default-salt"` | Salt for password hashing. |
|
||||||
|
| `MASTER_PASSWORD_HASH` | string | (none) | Pre-hashed master password. Loaded from config.json if not set. |
|
||||||
|
| `MASTER_PASSWORD` | string | (none) | **DEVELOPMENT ONLY** - Plaintext password. Never use in production. |
|
||||||
|
| `SESSION_TIMEOUT_HOURS` | int | `24` | JWT token expiry time in hours. |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L13-L42)
|
||||||
|
|
||||||
|
### Server Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| ----------------- | ------ | -------------------------------- | --------------------------------------------------------------------- |
|
||||||
|
| `ANIME_DIRECTORY` | string | `""` | Path to anime library directory. |
|
||||||
|
| `LOG_LEVEL` | string | `"INFO"` | Logging level: DEBUG, INFO, WARNING, ERROR, CRITICAL. |
|
||||||
|
| `DATABASE_URL` | string | `"sqlite:///./data/aniworld.db"` | Database connection string. |
|
||||||
|
| `CORS_ORIGINS` | string | `"http://localhost:3000"` | Comma-separated allowed CORS origins. Use `*` for localhost defaults. |
|
||||||
|
| `API_RATE_LIMIT` | int | `100` | Maximum API requests per minute. |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L43-L68)
|
||||||
|
|
||||||
|
### Provider Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| ------------------ | ------ | --------------- | --------------------------------------------- |
|
||||||
|
| `DEFAULT_PROVIDER` | string | `"aniworld.to"` | Default anime provider. |
|
||||||
|
| `PROVIDER_TIMEOUT` | int | `30` | HTTP timeout for provider requests (seconds). |
|
||||||
|
| `RETRY_ATTEMPTS` | int | `3` | Number of retry attempts for failed requests. |
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L69-L79)
|
||||||
|
|
||||||
|
### NFO Settings
|
||||||
|
|
||||||
|
| Variable | Type | Default | Description |
|
||||||
|
| --------------------- | ------ | -------- | -------------------------------------------------- |
|
||||||
|
| `TMDB_API_KEY` | string | `""` | The Movie Database (TMDB) API key for metadata. |
|
||||||
|
| `NFO_AUTO_CREATE` | bool | `true` | Automatically create NFO files during downloads. |
|
||||||
|
| `NFO_UPDATE_ON_SCAN` | bool | `false` | Update existing NFO files when scanning library. |
|
||||||
|
| `NFO_DOWNLOAD_POSTER` | bool | `true` | Download poster images along with NFO files. |
|
||||||
|
| `NFO_DOWNLOAD_LOGO` | bool | `false` | Download logo images along with NFO files. |
|
||||||
|
| `NFO_DOWNLOAD_FANART` | bool | `false` | Download fanart images along with NFO files. |
|
||||||
|
| `NFO_IMAGE_SIZE` | string | `"w500"` | Image size for TMDB images (w500, w780, original). |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L109-L132)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Configuration File (config.json)
|
||||||
|
|
||||||
|
Location: `data/config.json`
|
||||||
|
|
||||||
|
### File Structure
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "Aniworld",
|
||||||
|
"data_dir": "data",
|
||||||
|
"scheduler": {
|
||||||
|
"enabled": true,
|
||||||
|
"interval_minutes": 60,
|
||||||
|
"schedule_time": "03:00",
|
||||||
|
"schedule_days": ["mon", "tue", "wed", "thu", "fri", "sat", "sun"],
|
||||||
|
"auto_download_after_rescan": false
|
||||||
|
},
|
||||||
|
"logging": {
|
||||||
|
"level": "INFO",
|
||||||
|
"file": null,
|
||||||
|
"max_bytes": null,
|
||||||
|
"backup_count": 3
|
||||||
|
},
|
||||||
|
"backup": {
|
||||||
|
"enabled": false,
|
||||||
|
"path": "data/backups",
|
||||||
|
"keep_days": 30
|
||||||
|
},
|
||||||
|
"nfo": {
|
||||||
|
"tmdb_api_key": "",
|
||||||
|
"auto_create": true,
|
||||||
|
"update_on_scan": false,
|
||||||
|
"download_poster": true,
|
||||||
|
"download_logo": false,
|
||||||
|
"download_fanart": false,
|
||||||
|
"image_size": "w500"
|
||||||
|
},
|
||||||
|
"other": {
|
||||||
|
"master_password_hash": "$pbkdf2-sha256$...",
|
||||||
|
"anime_directory": "/path/to/anime"
|
||||||
|
},
|
||||||
|
"version": "1.0.0"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [data/config.json](../data/config.json)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Configuration Sections
|
||||||
|
|
||||||
|
### 4.1 General Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ---------- | ------ | ------------ | ------------------------------ |
|
||||||
|
| `name` | string | `"Aniworld"` | Application name. |
|
||||||
|
| `data_dir` | string | `"data"` | Base directory for data files. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L62-L66)
|
||||||
|
|
||||||
|
### 4.2 Scheduler Settings
|
||||||
|
|
||||||
|
Controls automatic cron-based library rescanning (powered by APScheduler).
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| -------------------------------------- | ------------ | --------------------------------------------- | -------------------------------------------------------------------- |
|
||||||
|
| `scheduler.enabled` | bool | `true` | Enable/disable automatic scans. |
|
||||||
|
| `scheduler.interval_minutes` | int | `60` | Legacy field kept for backward compatibility. Minimum: 1. |
|
||||||
|
| `scheduler.schedule_time` | string | `"03:00"` | Daily run time in 24-h `HH:MM` format. |
|
||||||
|
| `scheduler.schedule_days` | list[string] | `["mon","tue","wed","thu","fri","sat","sun"]` | Days of the week to run the scan. Empty list disables the cron job. |
|
||||||
|
| `scheduler.auto_download_after_rescan` | bool | `false` | Automatically queue missing episodes for download after each rescan. |
|
||||||
|
|
||||||
|
Valid day abbreviations: `mon`, `tue`, `wed`, `thu`, `fri`, `sat`, `sun`.
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L5-L12)
|
||||||
|
|
||||||
|
### 4.3 Logging Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ---------------------- | ------ | -------- | ------------------------------------------------- |
|
||||||
|
| `logging.level` | string | `"INFO"` | Log level: DEBUG, INFO, WARNING, ERROR, CRITICAL. |
|
||||||
|
| `logging.file` | string | `null` | Optional log file path. |
|
||||||
|
| `logging.max_bytes` | int | `null` | Maximum log file size for rotation. |
|
||||||
|
| `logging.backup_count` | int | `3` | Number of rotated log files to keep. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L27-L46)
|
||||||
|
|
||||||
|
### 4.4 Backup Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| ------------------ | ------ | ---------------- | -------------------------------- |
|
||||||
|
| `backup.enabled` | bool | `false` | Enable automatic config backups. |
|
||||||
|
| `backup.path` | string | `"data/backups"` | Directory for backup files. |
|
||||||
|
| `backup.keep_days` | int | `30` | Days to retain backups. |
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L15-L24)
|
||||||
|
|
||||||
|
### 4.5 NFO Settings
|
||||||
|
|
||||||
|
| Field | Type | Default | Description |
|
||||||
|
| --------------------- | ------ | -------- | ------------------------------------------------------------- |
|
||||||
|
| `nfo.tmdb_api_key` | string | `""` | The Movie Database (TMDB) API key for fetching metadata. |
|
||||||
|
| `nfo.auto_create` | bool | `true` | Automatically create NFO files when downloading episodes. |
|
||||||
|
| `nfo.update_on_scan` | bool | `false` | Update existing NFO files during library scan operations. |
|
||||||
|
| `nfo.download_poster` | bool | `true` | Download poster images (poster.jpg) along with NFO files. |
|
||||||
|
| `nfo.download_logo` | bool | `false` | Download logo images (logo.png) along with NFO files. |
|
||||||
|
| `nfo.download_fanart` | bool | `false` | Download fanart images (fanart.jpg) along with NFO files. |
|
||||||
|
| `nfo.image_size` | string | `"w500"` | TMDB image size: `w500` (recommended), `w780`, or `original`. |
|
||||||
|
|
||||||
|
**Notes:**
|
||||||
|
|
||||||
|
- Obtain a TMDB API key from https://www.themoviedb.org/settings/api
|
||||||
|
- `auto_create` creates NFO files during the download process
|
||||||
|
- `update_on_scan` refreshes metadata when scanning existing anime
|
||||||
|
- Image downloads require valid `tmdb_api_key`
|
||||||
|
- Larger image sizes (`w780`, `original`) consume more storage space
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L109-L132)
|
||||||
|
|
||||||
|
### 4.6 Other Settings (Dynamic)
|
||||||
|
|
||||||
|
The `other` field stores arbitrary settings.
|
||||||
|
|
||||||
|
| Key | Type | Description |
|
||||||
|
| ---------------------- | ------ | --------------------------------------- |
|
||||||
|
| `master_password_hash` | string | Hashed master password (pbkdf2-sha256). |
|
||||||
|
| `anime_directory` | string | Path to anime library. |
|
||||||
|
| `advanced` | object | Advanced configuration options. |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Configuration Precedence
|
||||||
|
|
||||||
|
Settings are resolved in this order (first match wins):
|
||||||
|
|
||||||
|
1. Environment variable (e.g., `ANIME_DIRECTORY`)
|
||||||
|
2. `.env` file in project root
|
||||||
|
3. `data/config.json` (for dynamic settings)
|
||||||
|
4. Code defaults in `Settings` class
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Validation Rules
|
||||||
|
|
||||||
|
### Password Requirements
|
||||||
|
|
||||||
|
Master password must meet all criteria:
|
||||||
|
|
||||||
|
- Minimum 8 characters
|
||||||
|
- At least one uppercase letter
|
||||||
|
- At least one lowercase letter
|
||||||
|
- At least one digit
|
||||||
|
- At least one special character
|
||||||
|
|
||||||
|
Source: [src/server/services/auth_service.py](../src/server/services/auth_service.py#L97-L125)
|
||||||
|
|
||||||
|
### Logging Level Validation
|
||||||
|
|
||||||
|
Must be one of: `DEBUG`, `INFO`, `WARNING`, `ERROR`, `CRITICAL`
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L43-L47)
|
||||||
|
|
||||||
|
### Backup Path Validation
|
||||||
|
|
||||||
|
If `backup.enabled` is `true`, `backup.path` must be set.
|
||||||
|
|
||||||
|
Source: [src/server/models/config.py](../src/server/models/config.py#L87-L91)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Example Configurations
|
||||||
|
|
||||||
|
### Minimal Development Setup
|
||||||
|
|
||||||
|
**.env file:**
|
||||||
|
|
||||||
|
```
|
||||||
|
LOG_LEVEL=DEBUG
|
||||||
|
ANIME_DIRECTORY=/home/user/anime
|
||||||
|
```
|
||||||
|
|
||||||
|
### Production Setup
|
||||||
|
|
||||||
|
**.env file:**
|
||||||
|
|
||||||
|
```
|
||||||
|
JWT_SECRET_KEY=your-secure-random-key-here
|
||||||
|
DATABASE_URL=postgresql+asyncpg://user:pass@localhost/aniworld
|
||||||
|
LOG_LEVEL=WARNING
|
||||||
|
CORS_ORIGINS=https://your-domain.com
|
||||||
|
API_RATE_LIMIT=60
|
||||||
|
```
|
||||||
|
|
||||||
|
### Docker Setup
|
||||||
|
|
||||||
|
```yaml
|
||||||
|
# docker-compose.yml
|
||||||
|
environment:
|
||||||
|
- JWT_SECRET_KEY=${JWT_SECRET_KEY}
|
||||||
|
- DATABASE_URL=sqlite:///./data/aniworld.db
|
||||||
|
- ANIME_DIRECTORY=/media/anime
|
||||||
|
- LOG_LEVEL=INFO
|
||||||
|
volumes:
|
||||||
|
- ./data:/app/data
|
||||||
|
- /media/anime:/media/anime:ro
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Configuration Backup Management
|
||||||
|
|
||||||
|
### Automatic Backups
|
||||||
|
|
||||||
|
Backups are created automatically before config changes when `backup.enabled` is `true`.
|
||||||
|
|
||||||
|
Location: `data/config_backups/`
|
||||||
|
|
||||||
|
Naming: `config_backup_YYYYMMDD_HHMMSS.json`
|
||||||
|
|
||||||
|
### Manual Backup via API
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Create backup
|
||||||
|
curl -X POST http://localhost:8000/api/config/backups \
|
||||||
|
-H "Authorization: Bearer $TOKEN"
|
||||||
|
|
||||||
|
# List backups
|
||||||
|
curl http://localhost:8000/api/config/backups \
|
||||||
|
-H "Authorization: Bearer $TOKEN"
|
||||||
|
|
||||||
|
# Restore backup
|
||||||
|
curl -X POST http://localhost:8000/api/config/backups/config_backup_20251213.json/restore \
|
||||||
|
-H "Authorization: Bearer $TOKEN"
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/api/config.py](../src/server/api/config.py#L67-L142)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Troubleshooting
|
||||||
|
|
||||||
|
### Configuration Not Loading
|
||||||
|
|
||||||
|
1. Check file permissions on `data/config.json`
|
||||||
|
2. Verify JSON syntax with a validator
|
||||||
|
3. Check logs for Pydantic validation errors
|
||||||
|
|
||||||
|
### Environment Variable Not Working
|
||||||
|
|
||||||
|
1. Ensure variable name matches exactly (case-sensitive)
|
||||||
|
2. Check `.env` file location (project root)
|
||||||
|
3. Restart application after changes
|
||||||
|
|
||||||
|
### Master Password Issues
|
||||||
|
|
||||||
|
1. Password hash is stored in `config.json` under `other.master_password_hash`
|
||||||
|
2. Delete this field to reset (requires re-setup)
|
||||||
|
3. Check hash format starts with `$pbkdf2-sha256$`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Related Documentation
|
||||||
|
|
||||||
|
- [API.md](API.md) - Configuration API endpoints
|
||||||
|
- [DEVELOPMENT.md](DEVELOPMENT.md) - Development environment setup
|
||||||
|
- [ARCHITECTURE.md](ARCHITECTURE.md) - Configuration service architecture
|
||||||
450
docs/DATABASE.md
Normal file
450
docs/DATABASE.md
Normal file
@@ -0,0 +1,450 @@
|
|||||||
|
# Database Documentation
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document describes the database schema, models, and data layer of the Aniworld application.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Database Overview
|
||||||
|
|
||||||
|
### Technology
|
||||||
|
|
||||||
|
- **Database Engine**: SQLite 3 (default), PostgreSQL supported
|
||||||
|
- **ORM**: SQLAlchemy 2.0 with async support (aiosqlite)
|
||||||
|
- **Location**: `data/aniworld.db` (configurable via `DATABASE_URL`)
|
||||||
|
|
||||||
|
Source: [src/config/settings.py](../src/config/settings.py#L53-L55)
|
||||||
|
|
||||||
|
### Connection Configuration
|
||||||
|
|
||||||
|
```python
|
||||||
|
# Default connection string
|
||||||
|
DATABASE_URL = "sqlite+aiosqlite:///./data/aniworld.db"
|
||||||
|
|
||||||
|
# PostgreSQL alternative
|
||||||
|
DATABASE_URL = "postgresql+asyncpg://user:pass@localhost/aniworld"
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/connection.py](../src/server/database/connection.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Entity Relationship Diagram
|
||||||
|
|
||||||
|
```
|
||||||
|
+---------------------+ +-------------------+ +-------------------+ +------------------------+
|
||||||
|
| system_settings | | anime_series | | episodes | | download_queue_item |
|
||||||
|
+---------------------+ +-------------------+ +-------------------+ +------------------------+
|
||||||
|
| id (PK) | | id (PK) |<--+ | id (PK) | +-->| id (PK, VARCHAR) |
|
||||||
|
| initial_scan_... | | key (UNIQUE) | | | series_id (FK)----+---+ | series_id (FK)---------+
|
||||||
|
| initial_nfo_scan... | | name | +---| | | status |
|
||||||
|
| initial_media_... | | site | | season | | priority |
|
||||||
|
| last_scan_timestamp | | folder | | episode_number | | season |
|
||||||
|
| created_at | | created_at | | title | | episode |
|
||||||
|
| updated_at | | updated_at | | file_path | | progress_percent |
|
||||||
|
+---------------------+ +-------------------+ | is_downloaded | | error_message |
|
||||||
|
| created_at | | retry_count |
|
||||||
|
| updated_at | | added_at |
|
||||||
|
+-------------------+ | started_at |
|
||||||
|
| completed_at |
|
||||||
|
| created_at |
|
||||||
|
| updated_at |
|
||||||
|
+------------------------+
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Table Schemas
|
||||||
|
|
||||||
|
### 3.1 system_settings
|
||||||
|
|
||||||
|
Stores application-wide system settings and initialization state.
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ------------------------------ | -------- | -------------------------- | --------------------------------------------- |
|
||||||
|
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID (only one row) |
|
||||||
|
| `initial_scan_completed` | BOOLEAN | NOT NULL, DEFAULT FALSE | Whether initial anime folder scan is complete |
|
||||||
|
| `initial_nfo_scan_completed` | BOOLEAN | NOT NULL, DEFAULT FALSE | Whether initial NFO scan is complete |
|
||||||
|
| `initial_media_scan_completed` | BOOLEAN | NOT NULL, DEFAULT FALSE | Whether initial media scan is complete |
|
||||||
|
| `last_scan_timestamp` | DATETIME | NULLABLE | Timestamp of last completed scan |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Purpose:**
|
||||||
|
|
||||||
|
This table tracks the initialization status of the application to ensure that expensive one-time setup operations (like scanning the entire anime directory) only run on the first startup, not on every restart.
|
||||||
|
|
||||||
|
- Only one row exists in this table
|
||||||
|
- The `initial_scan_completed` flag prevents redundant full directory scans on each startup
|
||||||
|
- The NFO and media scan flags similarly track completion of those setup tasks
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py), [src/server/database/system_settings_service.py](../src/server/database/system_settings_service.py)
|
||||||
|
|
||||||
|
### 3.2 anime_series
|
||||||
|
|
||||||
|
Stores anime series metadata.
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ------------ | ------------- | -------------------------- | ------------------------------------------------------- |
|
||||||
|
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID |
|
||||||
|
| `key` | VARCHAR(255) | UNIQUE, NOT NULL, INDEX | **Primary identifier** - provider-assigned URL-safe key |
|
||||||
|
| `name` | VARCHAR(500) | NOT NULL, INDEX | Display name of the series |
|
||||||
|
| `site` | VARCHAR(500) | NOT NULL | Provider site URL |
|
||||||
|
| `folder` | VARCHAR(1000) | NOT NULL | Filesystem folder name (metadata only) |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Identifier Convention:**
|
||||||
|
|
||||||
|
- `key` is the **primary identifier** for all operations (e.g., `"attack-on-titan"`)
|
||||||
|
- `folder` is **metadata only** for filesystem operations (e.g., `"Attack on Titan (2013)"`)
|
||||||
|
- `id` is used only for database relationships
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L23-L87)
|
||||||
|
|
||||||
|
### 3.3 episodes
|
||||||
|
|
||||||
|
Stores **missing episodes** that need to be downloaded. Episodes are automatically managed during scans:
|
||||||
|
|
||||||
|
- New missing episodes are added to the database
|
||||||
|
- Episodes that are no longer missing (files now exist) are removed from the database
|
||||||
|
- When an episode is downloaded, it can be marked with `is_downloaded=True` or removed from tracking
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ---------------- | ------------- | ---------------------------- | ----------------------------- |
|
||||||
|
| `id` | INTEGER | PRIMARY KEY, AUTOINCREMENT | Internal database ID |
|
||||||
|
| `series_id` | INTEGER | FOREIGN KEY, NOT NULL, INDEX | Reference to anime_series.id |
|
||||||
|
| `season` | INTEGER | NOT NULL | Season number (1-based) |
|
||||||
|
| `episode_number` | INTEGER | NOT NULL | Episode number within season |
|
||||||
|
| `title` | VARCHAR(500) | NULLABLE | Episode title if known |
|
||||||
|
| `file_path` | VARCHAR(1000) | NULLABLE | Local file path if downloaded |
|
||||||
|
| `is_downloaded` | BOOLEAN | NOT NULL, DEFAULT FALSE | Download status flag |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Foreign Key:**
|
||||||
|
|
||||||
|
- `series_id` -> `anime_series.id` (ON DELETE CASCADE)
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L122-L181)
|
||||||
|
|
||||||
|
### 3.4 download_queue_item
|
||||||
|
|
||||||
|
Stores download queue items with status tracking.
|
||||||
|
|
||||||
|
| Column | Type | Constraints | Description |
|
||||||
|
| ------------------ | ------------- | --------------------------- | ------------------------------ |
|
||||||
|
| `id` | VARCHAR(36) | PRIMARY KEY | UUID identifier |
|
||||||
|
| `series_id` | INTEGER | FOREIGN KEY, NOT NULL | Reference to anime_series.id |
|
||||||
|
| `season` | INTEGER | NOT NULL | Season number |
|
||||||
|
| `episode` | INTEGER | NOT NULL | Episode number |
|
||||||
|
| `status` | VARCHAR(20) | NOT NULL, DEFAULT 'pending' | Download status |
|
||||||
|
| `priority` | VARCHAR(10) | NOT NULL, DEFAULT 'NORMAL' | Queue priority |
|
||||||
|
| `progress_percent` | FLOAT | NULLABLE | Download progress (0-100) |
|
||||||
|
| `error_message` | TEXT | NULLABLE | Error description if failed |
|
||||||
|
| `retry_count` | INTEGER | NOT NULL, DEFAULT 0 | Number of retry attempts |
|
||||||
|
| `source_url` | VARCHAR(2000) | NULLABLE | Download source URL |
|
||||||
|
| `added_at` | DATETIME | NOT NULL, DEFAULT NOW | When added to queue |
|
||||||
|
| `started_at` | DATETIME | NULLABLE | When download started |
|
||||||
|
| `completed_at` | DATETIME | NULLABLE | When download completed/failed |
|
||||||
|
| `created_at` | DATETIME | NOT NULL, DEFAULT NOW | Record creation timestamp |
|
||||||
|
| `updated_at` | DATETIME | NOT NULL, ON UPDATE NOW | Last update timestamp |
|
||||||
|
|
||||||
|
**Status Values:** `pending`, `downloading`, `paused`, `completed`, `failed`, `cancelled`
|
||||||
|
|
||||||
|
**Priority Values:** `LOW`, `NORMAL`, `HIGH`
|
||||||
|
|
||||||
|
**Foreign Key:**
|
||||||
|
|
||||||
|
- `series_id` -> `anime_series.id` (ON DELETE CASCADE)
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L200-L300)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. Indexes
|
||||||
|
|
||||||
|
| Table | Index Name | Columns | Purpose |
|
||||||
|
| --------------------- | ----------------------- | ----------- | --------------------------------- |
|
||||||
|
| `system_settings` | N/A (single row) | N/A | Only one row, no indexes needed |
|
||||||
|
| `anime_series` | `ix_anime_series_key` | `key` | Fast lookup by primary identifier |
|
||||||
|
| `anime_series` | `ix_anime_series_name` | `name` | Search by name |
|
||||||
|
| `episodes` | `ix_episodes_series_id` | `series_id` | Join with series |
|
||||||
|
| `download_queue_item` | `ix_download_series_id` | `series_id` | Filter by series |
|
||||||
|
| `download_queue_item` | `ix_download_status` | `status` | Filter by status |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. Model Layer
|
||||||
|
|
||||||
|
### 5.1 SQLAlchemy ORM Models
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/server/database/models.py
|
||||||
|
|
||||||
|
class AnimeSeries(Base, TimestampMixin):
|
||||||
|
__tablename__ = "anime_series"
|
||||||
|
|
||||||
|
id: Mapped[int] = mapped_column(Integer, primary_key=True)
|
||||||
|
key: Mapped[str] = mapped_column(String(255), unique=True, index=True)
|
||||||
|
name: Mapped[str] = mapped_column(String(500), index=True)
|
||||||
|
site: Mapped[str] = mapped_column(String(500))
|
||||||
|
folder: Mapped[str] = mapped_column(String(1000))
|
||||||
|
|
||||||
|
episodes: Mapped[List["Episode"]] = relationship(
|
||||||
|
"Episode", back_populates="series", cascade="all, delete-orphan"
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L23-L87)
|
||||||
|
|
||||||
|
### 5.2 Pydantic API Models
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/server/models/download.py
|
||||||
|
|
||||||
|
class DownloadItem(BaseModel):
|
||||||
|
id: str
|
||||||
|
serie_id: str # Maps to anime_series.key
|
||||||
|
serie_folder: str # Metadata only
|
||||||
|
serie_name: str
|
||||||
|
episode: EpisodeIdentifier
|
||||||
|
status: DownloadStatus
|
||||||
|
priority: DownloadPriority
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/models/download.py](../src/server/models/download.py#L63-L118)
|
||||||
|
|
||||||
|
### 5.3 Model Mapping
|
||||||
|
|
||||||
|
| API Field | Database Column | Notes |
|
||||||
|
| -------------- | --------------------- | ------------------ |
|
||||||
|
| `serie_id` | `anime_series.key` | Primary identifier |
|
||||||
|
| `serie_folder` | `anime_series.folder` | Metadata only |
|
||||||
|
| `serie_name` | `anime_series.name` | Display name |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Transaction Support
|
||||||
|
|
||||||
|
### 6.1 Overview
|
||||||
|
|
||||||
|
The database layer provides comprehensive transaction support to ensure data consistency across compound operations. All write operations can be wrapped in explicit transactions.
|
||||||
|
|
||||||
|
Source: [src/server/database/transaction.py](../src/server/database/transaction.py)
|
||||||
|
|
||||||
|
### 6.2 Transaction Utilities
|
||||||
|
|
||||||
|
| Component | Type | Description |
|
||||||
|
| ------------------------- | ----------------- | ---------------------------------------- |
|
||||||
|
| `@transactional` | Decorator | Wraps function in transaction boundary |
|
||||||
|
| `atomic()` | Async context mgr | Provides atomic operation block |
|
||||||
|
| `atomic_sync()` | Sync context mgr | Sync version of atomic() |
|
||||||
|
| `TransactionContext` | Class | Explicit sync transaction control |
|
||||||
|
| `AsyncTransactionContext` | Class | Explicit async transaction control |
|
||||||
|
| `TransactionManager` | Class | Helper for manual transaction management |
|
||||||
|
|
||||||
|
### 6.3 Transaction Propagation Modes
|
||||||
|
|
||||||
|
| Mode | Behavior |
|
||||||
|
| -------------- | ------------------------------------------------ |
|
||||||
|
| `REQUIRED` | Use existing transaction or create new (default) |
|
||||||
|
| `REQUIRES_NEW` | Always create new transaction |
|
||||||
|
| `NESTED` | Create savepoint within existing transaction |
|
||||||
|
|
||||||
|
### 6.4 Usage Examples
|
||||||
|
|
||||||
|
**Using @transactional decorator:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
from src.server.database.transaction import transactional
|
||||||
|
|
||||||
|
@transactional()
|
||||||
|
async def compound_operation(db: AsyncSession, data: dict):
|
||||||
|
# All operations commit together or rollback on error
|
||||||
|
series = await AnimeSeriesService.create(db, ...)
|
||||||
|
episode = await EpisodeService.create(db, series_id=series.id, ...)
|
||||||
|
return series, episode
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using atomic() context manager:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
from src.server.database.transaction import atomic
|
||||||
|
|
||||||
|
async def some_function(db: AsyncSession):
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
await operation1(db)
|
||||||
|
await operation2(db)
|
||||||
|
# Auto-commits on success, rolls back on exception
|
||||||
|
```
|
||||||
|
|
||||||
|
**Using savepoints for partial rollback:**
|
||||||
|
|
||||||
|
```python
|
||||||
|
async with atomic(db) as tx:
|
||||||
|
await outer_operation(db)
|
||||||
|
|
||||||
|
async with tx.savepoint() as sp:
|
||||||
|
await risky_operation(db)
|
||||||
|
if error_condition:
|
||||||
|
await sp.rollback() # Only rollback nested ops
|
||||||
|
|
||||||
|
await final_operation(db) # Still executes
|
||||||
|
```
|
||||||
|
|
||||||
|
Source: [src/server/database/transaction.py](../src/server/database/transaction.py)
|
||||||
|
|
||||||
|
### 6.5 Connection Module Additions
|
||||||
|
|
||||||
|
| Function | Description |
|
||||||
|
| ------------------------------- | -------------------------------------------- |
|
||||||
|
| `get_transactional_session` | Session without auto-commit for transactions |
|
||||||
|
| `TransactionManager` | Helper class for manual transaction control |
|
||||||
|
| `is_session_in_transaction` | Check if session is in active transaction |
|
||||||
|
| `get_session_transaction_depth` | Get nesting depth of transactions |
|
||||||
|
|
||||||
|
Source: [src/server/database/connection.py](../src/server/database/connection.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Repository Pattern
|
||||||
|
|
||||||
|
The `QueueRepository` class provides data access abstraction.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class QueueRepository:
|
||||||
|
async def save_item(self, item: DownloadItem) -> None:
|
||||||
|
"""Save or update a download item (atomic operation)."""
|
||||||
|
|
||||||
|
async def get_all_items(self) -> List[DownloadItem]:
|
||||||
|
"""Get all items from database."""
|
||||||
|
|
||||||
|
async def delete_item(self, item_id: str) -> bool:
|
||||||
|
"""Delete item by ID."""
|
||||||
|
|
||||||
|
async def clear_all(self) -> int:
|
||||||
|
"""Clear all items (atomic operation)."""
|
||||||
|
```
|
||||||
|
|
||||||
|
Note: Compound operations (`save_item`, `clear_all`) are wrapped in `atomic()` transactions.
|
||||||
|
|
||||||
|
Source: [src/server/services/queue_repository.py](../src/server/services/queue_repository.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Database Service
|
||||||
|
|
||||||
|
The `AnimeSeriesService` provides async CRUD operations.
|
||||||
|
|
||||||
|
```python
|
||||||
|
class AnimeSeriesService:
|
||||||
|
@staticmethod
|
||||||
|
async def create(
|
||||||
|
db: AsyncSession,
|
||||||
|
key: str,
|
||||||
|
name: str,
|
||||||
|
site: str,
|
||||||
|
folder: str
|
||||||
|
) -> AnimeSeries:
|
||||||
|
"""Create a new anime series."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
async def get_by_key(
|
||||||
|
db: AsyncSession,
|
||||||
|
key: str
|
||||||
|
) -> Optional[AnimeSeries]:
|
||||||
|
"""Get series by primary key identifier."""
|
||||||
|
```
|
||||||
|
|
||||||
|
### Bulk Operations
|
||||||
|
|
||||||
|
Services provide bulk operations for transaction-safe batch processing:
|
||||||
|
|
||||||
|
| Service | Method | Description |
|
||||||
|
| ---------------------- | ---------------------- | ------------------------------ |
|
||||||
|
| `EpisodeService` | `bulk_mark_downloaded` | Mark multiple episodes at once |
|
||||||
|
| `DownloadQueueService` | `bulk_delete` | Delete multiple queue items |
|
||||||
|
| `DownloadQueueService` | `clear_all` | Clear entire queue |
|
||||||
|
| `UserSessionService` | `rotate_session` | Revoke old + create new atomic |
|
||||||
|
| `UserSessionService` | `cleanup_expired` | Bulk delete expired sessions |
|
||||||
|
|
||||||
|
Source: [src/server/database/service.py](../src/server/database/service.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Data Integrity Rules
|
||||||
|
|
||||||
|
### Validation Constraints
|
||||||
|
|
||||||
|
| Field | Rule | Error Message |
|
||||||
|
| ------------------------- | ------------------------ | ------------------------------------- |
|
||||||
|
| `anime_series.key` | Non-empty, max 255 chars | "Series key cannot be empty" |
|
||||||
|
| `anime_series.name` | Non-empty, max 500 chars | "Series name cannot be empty" |
|
||||||
|
| `episodes.season` | 0-1000 | "Season number must be non-negative" |
|
||||||
|
| `episodes.episode_number` | 0-10000 | "Episode number must be non-negative" |
|
||||||
|
|
||||||
|
Source: [src/server/database/models.py](../src/server/database/models.py#L89-L119)
|
||||||
|
|
||||||
|
### Cascade Rules
|
||||||
|
|
||||||
|
- Deleting `anime_series` deletes all related `episodes` and `download_queue_item`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Migration Strategy
|
||||||
|
|
||||||
|
Currently, SQLAlchemy's `create_all()` is used for schema creation.
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/server/database/connection.py
|
||||||
|
async def init_db():
|
||||||
|
async with engine.begin() as conn:
|
||||||
|
await conn.run_sync(Base.metadata.create_all)
|
||||||
|
```
|
||||||
|
|
||||||
|
For production migrations, Alembic is recommended but not yet implemented.
|
||||||
|
|
||||||
|
Source: [src/server/database/connection.py](../src/server/database/connection.py)
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Common Query Patterns
|
||||||
|
|
||||||
|
### Get all series with missing episodes
|
||||||
|
|
||||||
|
```python
|
||||||
|
series = await db.execute(
|
||||||
|
select(AnimeSeries).options(selectinload(AnimeSeries.episodes))
|
||||||
|
)
|
||||||
|
for serie in series.scalars():
|
||||||
|
downloaded = [e for e in serie.episodes if e.is_downloaded]
|
||||||
|
```
|
||||||
|
|
||||||
|
### Get pending downloads ordered by priority
|
||||||
|
|
||||||
|
```python
|
||||||
|
items = await db.execute(
|
||||||
|
select(DownloadQueueItem)
|
||||||
|
.where(DownloadQueueItem.status == "pending")
|
||||||
|
.order_by(
|
||||||
|
case(
|
||||||
|
(DownloadQueueItem.priority == "HIGH", 1),
|
||||||
|
(DownloadQueueItem.priority == "NORMAL", 2),
|
||||||
|
(DownloadQueueItem.priority == "LOW", 3),
|
||||||
|
),
|
||||||
|
DownloadQueueItem.added_at
|
||||||
|
)
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Database Location
|
||||||
|
|
||||||
|
| Environment | Default Location |
|
||||||
|
| ----------- | ------------------------------------------------- |
|
||||||
|
| Development | `./data/aniworld.db` |
|
||||||
|
| Production | Via `DATABASE_URL` environment variable |
|
||||||
|
| Testing | In-memory SQLite (`sqlite+aiosqlite:///:memory:`) |
|
||||||
64
docs/DEVELOPMENT.md
Normal file
64
docs/DEVELOPMENT.md
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
# Development Guide
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document provides guidance for developers working on the Aniworld project.
|
||||||
|
|
||||||
|
### What This Document Contains
|
||||||
|
|
||||||
|
- **Prerequisites**: Required software and tools
|
||||||
|
- **Environment Setup**: Step-by-step local development setup
|
||||||
|
- **Project Structure**: Source code organization explanation
|
||||||
|
- **Development Workflow**: Branch strategy, commit conventions
|
||||||
|
- **Coding Standards**: Style guide, linting, formatting
|
||||||
|
- **Running the Application**: Development server, CLI usage
|
||||||
|
- **Debugging Tips**: Common debugging approaches
|
||||||
|
- **IDE Configuration**: VS Code settings, recommended extensions
|
||||||
|
- **Contributing Guidelines**: How to submit changes
|
||||||
|
- **Code Review Process**: Review checklist and expectations
|
||||||
|
|
||||||
|
### What This Document Does NOT Contain
|
||||||
|
|
||||||
|
- Production deployment (see [DEPLOYMENT.md](DEPLOYMENT.md))
|
||||||
|
- API reference (see [API.md](API.md))
|
||||||
|
- Architecture decisions (see [ARCHITECTURE.md](ARCHITECTURE.md))
|
||||||
|
- Test writing guides (see [TESTING.md](TESTING.md))
|
||||||
|
- Security guidelines (see [SECURITY.md](SECURITY.md))
|
||||||
|
|
||||||
|
### Target Audience
|
||||||
|
|
||||||
|
- New Developers joining the project
|
||||||
|
- Contributors (internal and external)
|
||||||
|
- Anyone setting up a development environment
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sections to Document
|
||||||
|
|
||||||
|
1. Prerequisites
|
||||||
|
- Python version
|
||||||
|
- Conda environment
|
||||||
|
- Node.js (if applicable)
|
||||||
|
- Git
|
||||||
|
2. Getting Started
|
||||||
|
- Clone repository
|
||||||
|
- Setup conda environment
|
||||||
|
- Install dependencies
|
||||||
|
- Configuration setup
|
||||||
|
3. Project Structure Overview
|
||||||
|
4. Development Server
|
||||||
|
- Starting FastAPI server
|
||||||
|
- Hot reload configuration
|
||||||
|
- Debug mode
|
||||||
|
5. CLI Development
|
||||||
|
6. Code Style
|
||||||
|
- PEP 8 compliance
|
||||||
|
- Type hints requirements
|
||||||
|
- Docstring format
|
||||||
|
- Import organization
|
||||||
|
7. Git Workflow
|
||||||
|
- Branch naming
|
||||||
|
- Commit message format
|
||||||
|
- Pull request process
|
||||||
|
8. Common Development Tasks
|
||||||
|
9. Troubleshooting Development Issues
|
||||||
758
docs/NFO_GUIDE.md
Normal file
758
docs/NFO_GUIDE.md
Normal file
@@ -0,0 +1,758 @@
|
|||||||
|
# NFO Metadata Guide
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This guide explains how to use the NFO metadata feature to enrich your anime library with TMDB metadata and artwork for Plex, Jellyfin, Emby, and Kodi.
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 1. Overview
|
||||||
|
|
||||||
|
### What are NFO Files?
|
||||||
|
|
||||||
|
NFO files are XML documents that contain metadata about TV shows and episodes. Media servers like Plex, Jellyfin, Emby, and Kodi use these files to display information about your library without needing to scrape external sources.
|
||||||
|
|
||||||
|
### Features
|
||||||
|
|
||||||
|
- **Automatic NFO Creation**: Generate NFO files during downloads
|
||||||
|
- **TMDB Integration**: Fetch metadata from The Movie Database
|
||||||
|
- **Image Downloads**: Poster, fanart, and logo images
|
||||||
|
- **Batch Operations**: Create/update NFO files for multiple anime
|
||||||
|
- **Web UI**: Manage NFO settings and operations
|
||||||
|
- **API Access**: Programmatic NFO management
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 2. Getting Started
|
||||||
|
|
||||||
|
### 2.1 Obtain TMDB API Key
|
||||||
|
|
||||||
|
1. Create a free account at https://www.themoviedb.org
|
||||||
|
2. Navigate to https://www.themoviedb.org/settings/api
|
||||||
|
3. Request an API key (select "Developer" option)
|
||||||
|
4. Copy your API key (v3 auth)
|
||||||
|
|
||||||
|
### 2.2 Configure NFO Settings
|
||||||
|
|
||||||
|
#### Via Web Interface
|
||||||
|
|
||||||
|
1. Open http://127.0.0.1:8000
|
||||||
|
2. Click **Configuration** button
|
||||||
|
3. Scroll to **NFO Settings** section
|
||||||
|
4. Enter your TMDB API key
|
||||||
|
5. Click **Test Connection** to verify
|
||||||
|
6. Configure options:
|
||||||
|
- **Auto-create during downloads**: Enable to create NFO files automatically
|
||||||
|
- **Update on library scan**: Enable to refresh existing NFO files
|
||||||
|
- **Download poster**: Episode and show poster images (poster.jpg)
|
||||||
|
- **Download logo**: Show logo images (logo.png)
|
||||||
|
- **Download fanart**: Background artwork (fanart.jpg)
|
||||||
|
- **Image size**: Select w500 (recommended), w780, or original
|
||||||
|
7. Click **Save**
|
||||||
|
|
||||||
|
#### Via Environment Variables
|
||||||
|
|
||||||
|
Add to your `.env` file:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
TMDB_API_KEY=your_api_key_here
|
||||||
|
NFO_AUTO_CREATE=true
|
||||||
|
NFO_UPDATE_ON_SCAN=false
|
||||||
|
NFO_DOWNLOAD_POSTER=true
|
||||||
|
NFO_DOWNLOAD_LOGO=false
|
||||||
|
NFO_DOWNLOAD_FANART=false
|
||||||
|
NFO_IMAGE_SIZE=w500
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Via config.json
|
||||||
|
|
||||||
|
Edit `data/config.json`:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"nfo": {
|
||||||
|
"tmdb_api_key": "your_api_key_here",
|
||||||
|
"auto_create": true,
|
||||||
|
"update_on_scan": false,
|
||||||
|
"download_poster": true,
|
||||||
|
"download_logo": false,
|
||||||
|
"download_fanart": false,
|
||||||
|
"image_size": "w500"
|
||||||
|
}
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 3. Using NFO Features
|
||||||
|
|
||||||
|
### 3.1 Automatic NFO Creation
|
||||||
|
|
||||||
|
With `auto_create` enabled, NFO files are created automatically when downloading episodes:
|
||||||
|
|
||||||
|
1. Add episodes to download queue
|
||||||
|
2. Start queue processing
|
||||||
|
3. NFO files are created after successful downloads
|
||||||
|
4. Images are downloaded based on configuration
|
||||||
|
|
||||||
|
### 3.2 Manual NFO Creation
|
||||||
|
|
||||||
|
#### Via Web Interface
|
||||||
|
|
||||||
|
1. Navigate to the main page
|
||||||
|
2. Click **Create NFO** button next to an anime
|
||||||
|
3. Wait for completion notification
|
||||||
|
|
||||||
|
#### Via API
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://127.0.0.1:8000/api/nfo/create" \
|
||||||
|
-H "Authorization: Bearer YOUR_JWT_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"anime_id": 123,
|
||||||
|
"folder_path": "/path/to/anime/Attack on Titan"
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.3 Batch NFO Creation
|
||||||
|
|
||||||
|
Create NFO files for multiple anime at once:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://127.0.0.1:8000/api/nfo/batch/create" \
|
||||||
|
-H "Authorization: Bearer YOUR_JWT_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"anime_ids": [123, 456, 789]
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.4 Update Existing NFO Files
|
||||||
|
|
||||||
|
Update NFO files with latest TMDB metadata:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X POST "http://127.0.0.1:8000/api/nfo/update" \
|
||||||
|
-H "Authorization: Bearer YOUR_JWT_TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"anime_id": 123,
|
||||||
|
"folder_path": "/path/to/anime/Attack on Titan",
|
||||||
|
"force": true
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### 3.5 Check NFO Status
|
||||||
|
|
||||||
|
Check which anime have NFO files:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
curl -X GET "http://127.0.0.1:8000/api/nfo/check?folder_path=/path/to/anime" \
|
||||||
|
-H "Authorization: Bearer YOUR_JWT_TOKEN"
|
||||||
|
```
|
||||||
|
|
||||||
|
Response:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"has_tvshow_nfo": true,
|
||||||
|
"episode_nfos": [
|
||||||
|
{
|
||||||
|
"season": 1,
|
||||||
|
"episode": 1,
|
||||||
|
"has_nfo": true,
|
||||||
|
"file_path": "/path/to/anime/Season 1/S01E01.nfo"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"missing_episodes": [],
|
||||||
|
"total_episodes": 25,
|
||||||
|
"nfo_count": 25
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 4. File Structure
|
||||||
|
|
||||||
|
### 4.1 NFO File Locations
|
||||||
|
|
||||||
|
NFO files are created in the anime directory:
|
||||||
|
|
||||||
|
```
|
||||||
|
/path/to/anime/Attack on Titan/
|
||||||
|
├── tvshow.nfo # Show metadata
|
||||||
|
├── poster.jpg # Show poster (optional)
|
||||||
|
├── logo.png # Show logo (optional)
|
||||||
|
├── fanart.jpg # Show fanart (optional)
|
||||||
|
├── Season 1/
|
||||||
|
│ ├── S01E01.mkv
|
||||||
|
│ ├── S01E01.nfo # Episode metadata
|
||||||
|
│ ├── S01E01-thumb.jpg # Episode thumbnail (optional)
|
||||||
|
│ ├── S01E02.mkv
|
||||||
|
│ └── S01E02.nfo
|
||||||
|
└── Season 2/
|
||||||
|
├── S02E01.mkv
|
||||||
|
└── S02E01.nfo
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.2 tvshow.nfo Format
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<tvshow>
|
||||||
|
<title>Attack on Titan</title>
|
||||||
|
<originaltitle>進撃の巨人</originaltitle>
|
||||||
|
<showtitle>Attack on Titan</showtitle>
|
||||||
|
<sorttitle>Attack on Titan</sorttitle>
|
||||||
|
<rating>8.5</rating>
|
||||||
|
<year>2013</year>
|
||||||
|
<plot>Humans are nearly exterminated by giant creatures...</plot>
|
||||||
|
<runtime>24</runtime>
|
||||||
|
<mpaa>TV-MA</mpaa>
|
||||||
|
<premiered>2013-04-07</premiered>
|
||||||
|
<status>Ended</status>
|
||||||
|
<studio>Wit Studio</studio>
|
||||||
|
<genre>Animation</genre>
|
||||||
|
<genre>Action</genre>
|
||||||
|
<genre>Sci-Fi & Fantasy</genre>
|
||||||
|
<uniqueid type="tmdb">1429</uniqueid>
|
||||||
|
<thumb aspect="poster">https://image.tmdb.org/t/p/w500/...</thumb>
|
||||||
|
<fanart>
|
||||||
|
<thumb>https://image.tmdb.org/t/p/original/...</thumb>
|
||||||
|
</fanart>
|
||||||
|
</tvshow>
|
||||||
|
```
|
||||||
|
|
||||||
|
### 4.3 Episode NFO Format
|
||||||
|
|
||||||
|
```xml
|
||||||
|
<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
|
||||||
|
<episodedetails>
|
||||||
|
<title>To You, in 2000 Years: The Fall of Shiganshina, Part 1</title>
|
||||||
|
<showtitle>Attack on Titan</showtitle>
|
||||||
|
<season>1</season>
|
||||||
|
<episode>1</episode>
|
||||||
|
<displayseason>1</displayseason>
|
||||||
|
<displayepisode>1</displayepisode>
|
||||||
|
<plot>After a hundred years of peace...</plot>
|
||||||
|
<runtime>24</runtime>
|
||||||
|
<aired>2013-04-07</aired>
|
||||||
|
<rating>8.2</rating>
|
||||||
|
<uniqueid type="tmdb">63056</uniqueid>
|
||||||
|
<thumb>https://image.tmdb.org/t/p/w500/...</thumb>
|
||||||
|
</episodedetails>
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 5. API Reference
|
||||||
|
|
||||||
|
### 5.1 Check NFO Status
|
||||||
|
|
||||||
|
**Endpoint**: `GET /api/nfo/check`
|
||||||
|
|
||||||
|
**Query Parameters**:
|
||||||
|
|
||||||
|
- `folder_path` (required): Absolute path to anime directory
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"has_tvshow_nfo": true,
|
||||||
|
"episode_nfos": [
|
||||||
|
{
|
||||||
|
"season": 1,
|
||||||
|
"episode": 1,
|
||||||
|
"has_nfo": true,
|
||||||
|
"file_path": "/path/to/S01E01.nfo"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"missing_episodes": [],
|
||||||
|
"total_episodes": 25,
|
||||||
|
"nfo_count": 25
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.2 Create NFO Files
|
||||||
|
|
||||||
|
**Endpoint**: `POST /api/nfo/create`
|
||||||
|
|
||||||
|
**Request Body**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"anime_id": 123,
|
||||||
|
"folder_path": "/path/to/anime/Attack on Titan"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"message": "NFO files created successfully",
|
||||||
|
"files_created": ["tvshow.nfo", "S01E01.nfo", "S01E02.nfo"],
|
||||||
|
"images_downloaded": ["poster.jpg", "S01E01-thumb.jpg"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.3 Update NFO Files
|
||||||
|
|
||||||
|
**Endpoint**: `POST /api/nfo/update`
|
||||||
|
|
||||||
|
**Request Body**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"anime_id": 123,
|
||||||
|
"folder_path": "/path/to/anime",
|
||||||
|
"force": false
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"message": "NFO files updated successfully",
|
||||||
|
"files_updated": ["tvshow.nfo", "S01E01.nfo"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.4 View NFO Content
|
||||||
|
|
||||||
|
**Endpoint**: `GET /api/nfo/view`
|
||||||
|
|
||||||
|
**Query Parameters**:
|
||||||
|
|
||||||
|
- `file_path` (required): Absolute path to NFO file
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"content": "<?xml version=\"1.0\"...?>",
|
||||||
|
"file_path": "/path/to/tvshow.nfo",
|
||||||
|
"exists": true
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.5 Get Media Status
|
||||||
|
|
||||||
|
**Endpoint**: `GET /api/nfo/media/status`
|
||||||
|
|
||||||
|
**Query Parameters**:
|
||||||
|
|
||||||
|
- `folder_path` (required): Absolute path to anime directory
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"poster_exists": true,
|
||||||
|
"poster_path": "/path/to/poster.jpg",
|
||||||
|
"logo_exists": false,
|
||||||
|
"logo_path": null,
|
||||||
|
"fanart_exists": true,
|
||||||
|
"fanart_path": "/path/to/fanart.jpg",
|
||||||
|
"episode_thumbs": [
|
||||||
|
{
|
||||||
|
"season": 1,
|
||||||
|
"episode": 1,
|
||||||
|
"exists": true,
|
||||||
|
"path": "/path/to/S01E01-thumb.jpg"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.6 Download Media
|
||||||
|
|
||||||
|
**Endpoint**: `POST /api/nfo/media/download`
|
||||||
|
|
||||||
|
**Request Body**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"folder_path": "/path/to/anime",
|
||||||
|
"anime_id": 123,
|
||||||
|
"download_poster": true,
|
||||||
|
"download_logo": false,
|
||||||
|
"download_fanart": false,
|
||||||
|
"image_size": "w500"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"message": "Media downloaded successfully",
|
||||||
|
"downloaded": ["poster.jpg", "S01E01-thumb.jpg"]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.7 Batch Create NFO
|
||||||
|
|
||||||
|
**Endpoint**: `POST /api/nfo/batch/create`
|
||||||
|
|
||||||
|
**Request Body**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"anime_ids": [123, 456, 789]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"success": true,
|
||||||
|
"results": [
|
||||||
|
{
|
||||||
|
"anime_id": 123,
|
||||||
|
"success": true,
|
||||||
|
"message": "Created successfully"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"anime_id": 456,
|
||||||
|
"success": false,
|
||||||
|
"error": "Folder not found"
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
### 5.8 Find Missing NFOs
|
||||||
|
|
||||||
|
**Endpoint**: `GET /api/nfo/missing`
|
||||||
|
|
||||||
|
**Response**:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"anime_list": [
|
||||||
|
{
|
||||||
|
"anime_id": 123,
|
||||||
|
"title": "Attack on Titan",
|
||||||
|
"folder_path": "/path/to/anime/Attack on Titan",
|
||||||
|
"missing_tvshow_nfo": false,
|
||||||
|
"missing_episode_count": 3,
|
||||||
|
"total_episodes": 25
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 6. Troubleshooting
|
||||||
|
|
||||||
|
### 6.1 NFO Files Not Created
|
||||||
|
|
||||||
|
**Problem**: NFO files are not being created during downloads.
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
|
||||||
|
1. Verify TMDB API key is configured correctly
|
||||||
|
2. Check `auto_create` is enabled in settings
|
||||||
|
3. Ensure anime directory has write permissions
|
||||||
|
4. Check logs for error messages
|
||||||
|
5. Test TMDB connection using "Test Connection" button
|
||||||
|
|
||||||
|
### 6.2 Invalid TMDB API Key
|
||||||
|
|
||||||
|
**Problem**: TMDB validation fails with "Invalid API key".
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
|
||||||
|
1. Verify API key is copied correctly (no extra spaces)
|
||||||
|
2. Ensure you're using the v3 API key (not v4)
|
||||||
|
3. Check API key is active on TMDB website
|
||||||
|
4. Try regenerating API key on TMDB
|
||||||
|
|
||||||
|
### 6.3 Images Not Downloading
|
||||||
|
|
||||||
|
**Problem**: NFO files are created but images are missing.
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
|
||||||
|
1. Enable image downloads in settings (poster/logo/fanart)
|
||||||
|
2. Verify TMDB API key is valid
|
||||||
|
3. Check network connectivity to TMDB servers
|
||||||
|
4. Ensure sufficient disk space
|
||||||
|
5. Check file permissions in anime directory
|
||||||
|
|
||||||
|
### 6.4 Incorrect Metadata
|
||||||
|
|
||||||
|
**Problem**: NFO contains wrong show information.
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
|
||||||
|
1. Verify anime title matches TMDB exactly
|
||||||
|
2. Use TMDB ID if available for accurate matching
|
||||||
|
3. Update NFO files with `force=true` to refresh metadata
|
||||||
|
4. Check TMDB website for correct show information
|
||||||
|
|
||||||
|
### 6.5 Permission Errors
|
||||||
|
|
||||||
|
**Problem**: "Permission denied" when creating NFO files.
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
|
||||||
|
1. Check anime directory permissions: `chmod 755 /path/to/anime`
|
||||||
|
2. Ensure application user has write access
|
||||||
|
3. Verify directory ownership: `chown -R user:group /path/to/anime`
|
||||||
|
4. Check parent directories are accessible
|
||||||
|
|
||||||
|
### 6.6 Slow NFO Creation
|
||||||
|
|
||||||
|
**Problem**: NFO creation takes a long time.
|
||||||
|
|
||||||
|
**Solutions**:
|
||||||
|
|
||||||
|
1. Reduce image size (use w500 instead of original)
|
||||||
|
2. Disable unnecessary images (logo, fanart)
|
||||||
|
3. Create NFOs in batches during off-peak hours
|
||||||
|
4. Check network speed to TMDB servers
|
||||||
|
5. Verify disk I/O performance
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 7. Best Practices
|
||||||
|
|
||||||
|
### 7.1 Configuration Recommendations
|
||||||
|
|
||||||
|
- **Image Size**: Use `w500` for optimal balance of quality and storage
|
||||||
|
- **Auto-create**: Enable for new downloads
|
||||||
|
- **Update on scan**: Disable to avoid unnecessary TMDB API calls
|
||||||
|
- **Poster**: Always enable for show and episode thumbnails
|
||||||
|
- **Logo/Fanart**: Enable only if your media server supports them
|
||||||
|
|
||||||
|
### 7.2 Maintenance
|
||||||
|
|
||||||
|
- **Regular Updates**: Update NFO files quarterly to get latest metadata
|
||||||
|
- **Backup**: Include NFO files in your backup strategy
|
||||||
|
- **Validation**: Periodically check missing NFOs using `/api/nfo/missing`
|
||||||
|
- **API Rate Limits**: Be mindful of TMDB API rate limits when batch processing
|
||||||
|
|
||||||
|
### 7.3 Performance
|
||||||
|
|
||||||
|
- **Batch Operations**: Use batch endpoints for multiple anime
|
||||||
|
- **Off-Peak Processing**: Create NFOs during low-activity periods
|
||||||
|
- **Image Optimization**: Use smaller image sizes for large libraries
|
||||||
|
- **Selective Updates**: Only update NFOs when metadata changes
|
||||||
|
|
||||||
|
### 7.4 Media Server Integration
|
||||||
|
|
||||||
|
#### Plex
|
||||||
|
|
||||||
|
- Use "Personal Media Shows" agent
|
||||||
|
- Enable "Local Media Assets" scanner
|
||||||
|
- Place NFO files in anime directories
|
||||||
|
- Refresh metadata after creating NFOs
|
||||||
|
|
||||||
|
#### Jellyfin
|
||||||
|
|
||||||
|
- Use "NFO" metadata provider
|
||||||
|
- Enable in Library settings
|
||||||
|
- Order providers: NFO first, then online sources
|
||||||
|
- Scan library after NFO creation
|
||||||
|
|
||||||
|
#### Emby
|
||||||
|
|
||||||
|
- Enable "NFO" metadata reader
|
||||||
|
- Configure in Library advanced settings
|
||||||
|
- Use "Prefer embedded metadata" option
|
||||||
|
- Refresh metadata after updates
|
||||||
|
|
||||||
|
#### Kodi
|
||||||
|
|
||||||
|
- NFO files are automatically detected
|
||||||
|
- No additional configuration needed
|
||||||
|
- Update library to see changes
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 8. Advanced Usage
|
||||||
|
|
||||||
|
### 8.1 Custom NFO Templates
|
||||||
|
|
||||||
|
You can customize NFO generation by modifying the NFO service:
|
||||||
|
|
||||||
|
```python
|
||||||
|
# src/core/services/nfo_creator.py
|
||||||
|
def generate_tvshow_nfo(self, metadata: dict) -> str:
|
||||||
|
# Add custom fields or modify structure
|
||||||
|
pass
|
||||||
|
```
|
||||||
|
|
||||||
|
### 8.2 Bulk Operations
|
||||||
|
|
||||||
|
Create NFOs for entire library:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Get all anime without NFOs
|
||||||
|
curl -X GET "http://127.0.0.1:8000/api/nfo/missing" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
| jq -r '.anime_list[].anime_id' \
|
||||||
|
| xargs -I{} curl -X POST "http://127.0.0.1:8000/api/nfo/batch/create" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{"anime_ids": [{}]}'
|
||||||
|
```
|
||||||
|
|
||||||
|
### 8.3 Scheduled Updates
|
||||||
|
|
||||||
|
Use the scheduler API to refresh NFOs automatically:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Schedule weekly NFO updates (rescan runs Sunday at 03:00)
|
||||||
|
curl -X POST "http://127.0.0.1:8000/api/scheduler/config" \
|
||||||
|
-H "Authorization: Bearer $TOKEN" \
|
||||||
|
-H "Content-Type: application/json" \
|
||||||
|
-d '{
|
||||||
|
"enabled": true,
|
||||||
|
"schedule_time": "03:00",
|
||||||
|
"schedule_days": ["sun"],
|
||||||
|
"auto_download_after_rescan": false
|
||||||
|
}'
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 9. Related Documentation
|
||||||
|
|
||||||
|
- [API.md](API.md) - Complete API reference
|
||||||
|
- [CONFIGURATION.md](CONFIGURATION.md) - All configuration options
|
||||||
|
- [ARCHITECTURE.md](ARCHITECTURE.md) - System architecture
|
||||||
|
- [DEVELOPMENT.md](DEVELOPMENT.md) - Development guide
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 10. Tag Reference
|
||||||
|
|
||||||
|
The table below lists every XML tag written to `tvshow.nfo` and its source in
|
||||||
|
the TMDB API response. All tags are written whenever the NFO is created or
|
||||||
|
updated via `create_tvshow_nfo()` / `update_tvshow_nfo()`.
|
||||||
|
|
||||||
|
| NFO tag | TMDB source field | Required |
|
||||||
|
| --------------- | ----------------------------------------------------- | -------- |
|
||||||
|
| `title` | `name` | ✅ |
|
||||||
|
| `originaltitle` | `original_name` | ✅ |
|
||||||
|
| `showtitle` | `name` (same as `title`) | ✅ |
|
||||||
|
| `sorttitle` | `name` (same as `title`) | ✅ |
|
||||||
|
| `year` | First 4 chars of `first_air_date` | ✅ |
|
||||||
|
| `plot` | `overview` | ✅ |
|
||||||
|
| `outline` | `overview` (same as `plot`) | ✅ |
|
||||||
|
| `tagline` | `tagline` | optional |
|
||||||
|
| `runtime` | `episode_run_time[0]` | ✅ |
|
||||||
|
| `premiered` | `first_air_date` | ✅ |
|
||||||
|
| `status` | `status` | ✅ |
|
||||||
|
| `mpaa` | US content rating from `content_ratings.results` | optional |
|
||||||
|
| `fsk` | DE content rating (written as `mpaa` when preferred) | optional |
|
||||||
|
| `imdbid` | `external_ids.imdb_id` | ✅ |
|
||||||
|
| `tmdbid` | `id` | ✅ |
|
||||||
|
| `tvdbid` | `external_ids.tvdb_id` | optional |
|
||||||
|
| `genre` | `genres[].name` (one element per genre) | ✅ |
|
||||||
|
| `studio` | `networks[].name` (one element per network) | ✅ |
|
||||||
|
| `country` | `origin_country[]` or `production_countries[].name` | ✅ |
|
||||||
|
| `actor` | `credits.cast[]` (top 10, with name/role/thumb) | ✅ |
|
||||||
|
| `watched` | Always `false` on creation | ✅ |
|
||||||
|
| `dateadded` | System clock at creation time (`YYYY-MM-DD HH:MM:SS`) | ✅ |
|
||||||
|
|
||||||
|
The mapping logic lives in `src/core/utils/nfo_mapper.py` (`tmdb_to_nfo_model`).
|
||||||
|
The XML serialisation lives in `src/core/utils/nfo_generator.py`
|
||||||
|
(`generate_tvshow_nfo`).
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 11. Automatic NFO Repair
|
||||||
|
|
||||||
|
Every time the server starts, Aniworld scans all existing `tvshow.nfo` files and
|
||||||
|
automatically repairs any that are missing required tags.
|
||||||
|
|
||||||
|
### How It Works
|
||||||
|
|
||||||
|
1. **Scan** — `perform_nfo_repair_scan()` in
|
||||||
|
`src/server/services/initialization_service.py` is called from the FastAPI
|
||||||
|
lifespan after `perform_media_scan_if_needed()`.
|
||||||
|
2. **Detect** — `nfo_needs_repair(nfo_path)` from
|
||||||
|
`src/core/services/nfo_repair_service.py` parses each `tvshow.nfo` with
|
||||||
|
`lxml` and checks for the 13 required tags listed below.
|
||||||
|
3. **Repair** — Series whose NFO is incomplete are queued for background reload
|
||||||
|
via `BackgroundLoaderService.add_series_loading_task()`. The background
|
||||||
|
loader re-fetches metadata from TMDB and rewrites the NFO with all tags
|
||||||
|
populated.
|
||||||
|
|
||||||
|
### Tags Checked (13 required)
|
||||||
|
|
||||||
|
| XPath | Tag name |
|
||||||
|
| ----------------- | --------------- |
|
||||||
|
| `./title` | `title` |
|
||||||
|
| `./originaltitle` | `originaltitle` |
|
||||||
|
| `./year` | `year` |
|
||||||
|
| `./plot` | `plot` |
|
||||||
|
| `./runtime` | `runtime` |
|
||||||
|
| `./premiered` | `premiered` |
|
||||||
|
| `./status` | `status` |
|
||||||
|
| `./imdbid` | `imdbid` |
|
||||||
|
| `./genre` | `genre` |
|
||||||
|
| `./studio` | `studio` |
|
||||||
|
| `./country` | `country` |
|
||||||
|
| `./actor/name` | `actor/name` |
|
||||||
|
| `./watched` | `watched` |
|
||||||
|
|
||||||
|
### Log Messages
|
||||||
|
|
||||||
|
| Message | Meaning |
|
||||||
|
| ----------------------------------------------------------- | ------------------------------------------------- |
|
||||||
|
| `NFO repair scan complete: 0 of N series queued for repair` | All NFOs are complete — no action needed |
|
||||||
|
| `NFO repair scan complete: X of N series queued for repair` | X series had incomplete NFOs and have been queued |
|
||||||
|
| `NFO repair scan skipped: TMDB API key not configured` | Set `tmdb_api_key` in `data/config.json` |
|
||||||
|
| `NFO repair scan skipped: anime directory not configured` | Set `anime_directory` in `data/config.json` |
|
||||||
|
|
||||||
|
### Triggering a Manual Repair
|
||||||
|
|
||||||
|
You can also repair a single series on demand via the API:
|
||||||
|
|
||||||
|
```http
|
||||||
|
POST /api/nfo/update/{series_key}
|
||||||
|
```
|
||||||
|
|
||||||
|
This calls `NFOService.update_tvshow_nfo()` directly and overwrites the existing
|
||||||
|
`tvshow.nfo` with fresh data from TMDB.
|
||||||
|
|
||||||
|
### Source Files
|
||||||
|
|
||||||
|
| File | Purpose |
|
||||||
|
| ----------------------------------------------- | ---------------------------------------------------------------------------------------------- |
|
||||||
|
| `src/core/services/nfo_repair_service.py` | `REQUIRED_TAGS`, `parse_nfo_tags`, `find_missing_tags`, `nfo_needs_repair`, `NfoRepairService` |
|
||||||
|
| `src/server/services/initialization_service.py` | `perform_nfo_repair_scan` startup hook |
|
||||||
|
| `src/server/fastapi_app.py` | Wires `perform_nfo_repair_scan` into the lifespan |
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## 12. Support
|
||||||
|
|
||||||
|
### Getting Help
|
||||||
|
|
||||||
|
- Check logs in `logs/` directory for error details
|
||||||
|
- Review [TESTING.md](TESTING.md) for test coverage
|
||||||
|
- Consult [DATABASE.md](DATABASE.md) for NFO status schema
|
||||||
|
|
||||||
|
### Common Issues
|
||||||
|
|
||||||
|
See section 6 (Troubleshooting) for solutions to common problems.
|
||||||
|
|
||||||
|
### TMDB Resources
|
||||||
|
|
||||||
|
- TMDB API Documentation: https://developers.themoviedb.org/3
|
||||||
|
- TMDB Support: https://www.themoviedb.org/talk
|
||||||
|
- TMDB API Status: https://status.themoviedb.org/
|
||||||
39
docs/README.md
Normal file
39
docs/README.md
Normal file
@@ -0,0 +1,39 @@
|
|||||||
|
# Aniworld Documentation
|
||||||
|
|
||||||
|
## Overview
|
||||||
|
|
||||||
|
This directory contains all documentation for the Aniworld anime download manager project.
|
||||||
|
|
||||||
|
## Documentation Structure
|
||||||
|
|
||||||
|
| Document | Purpose | Target Audience |
|
||||||
|
| ---------------------------------------- | ---------------------------------------------- | ---------------------------------- |
|
||||||
|
| [ARCHITECTURE.md](ARCHITECTURE.md) | System architecture and design decisions | Architects, Senior Developers |
|
||||||
|
| [API.md](API.md) | REST API reference and WebSocket documentation | Frontend Developers, API Consumers |
|
||||||
|
| [DEVELOPMENT.md](DEVELOPMENT.md) | Developer setup and contribution guide | All Developers |
|
||||||
|
| [DEPLOYMENT.md](DEPLOYMENT.md) | Deployment and operations guide | DevOps, System Administrators |
|
||||||
|
| [DATABASE.md](DATABASE.md) | Database schema and data models | Backend Developers |
|
||||||
|
| [TESTING.md](TESTING.md) | Testing strategy and guidelines | QA Engineers, Developers |
|
||||||
|
| [SECURITY.md](SECURITY.md) | Security considerations and guidelines | Security Engineers, All Developers |
|
||||||
|
| [CONFIGURATION.md](CONFIGURATION.md) | Configuration options reference | Operators, Developers |
|
||||||
|
| [CHANGELOG.md](CHANGELOG.md) | Version history and changes | All Stakeholders |
|
||||||
|
| [TROUBLESHOOTING.md](TROUBLESHOOTING.md) | Common issues and solutions | Support, Operators |
|
||||||
|
| [features.md](features.md) | Feature list and capabilities | Product Owners, Users |
|
||||||
|
| [instructions.md](instructions.md) | AI agent development instructions | AI Agents, Developers |
|
||||||
|
|
||||||
|
## Documentation Standards
|
||||||
|
|
||||||
|
- All documentation uses Markdown format
|
||||||
|
- Keep documentation up-to-date with code changes
|
||||||
|
- Include code examples where applicable
|
||||||
|
- Use clear, concise language
|
||||||
|
- Include diagrams for complex concepts (use Mermaid syntax)
|
||||||
|
|
||||||
|
## Contributing to Documentation
|
||||||
|
|
||||||
|
When adding or updating documentation:
|
||||||
|
|
||||||
|
1. Follow the established format in each document
|
||||||
|
2. Update the README.md if adding new documents
|
||||||
|
3. Ensure cross-references are valid
|
||||||
|
4. Review for spelling and grammar
|
||||||
71
docs/TESTING.md
Normal file
71
docs/TESTING.md
Normal file
@@ -0,0 +1,71 @@
|
|||||||
|
# Testing Documentation
|
||||||
|
|
||||||
|
## Document Purpose
|
||||||
|
|
||||||
|
This document describes the testing strategy, guidelines, and practices for the Aniworld project.
|
||||||
|
|
||||||
|
### What This Document Contains
|
||||||
|
|
||||||
|
- **Testing Strategy**: Overall approach to quality assurance
|
||||||
|
- **Test Categories**: Unit, integration, API, performance, security tests
|
||||||
|
- **Test Structure**: Organization of test files and directories
|
||||||
|
- **Writing Tests**: Guidelines for writing effective tests
|
||||||
|
- **Fixtures and Mocking**: Shared test utilities and mock patterns
|
||||||
|
- **Running Tests**: Commands and configurations
|
||||||
|
- **Coverage Requirements**: Minimum coverage thresholds
|
||||||
|
- **CI/CD Integration**: How tests run in automation
|
||||||
|
- **Test Data Management**: Managing test fixtures and data
|
||||||
|
- **Best Practices**: Do's and don'ts for testing
|
||||||
|
|
||||||
|
### What This Document Does NOT Contain
|
||||||
|
|
||||||
|
- Production deployment (see [DEPLOYMENT.md](DEPLOYMENT.md))
|
||||||
|
- Security audit procedures (see [SECURITY.md](SECURITY.md))
|
||||||
|
- Bug tracking and issue management
|
||||||
|
- Performance benchmarking results
|
||||||
|
|
||||||
|
### Target Audience
|
||||||
|
|
||||||
|
- Developers writing tests
|
||||||
|
- QA Engineers
|
||||||
|
- CI/CD Engineers
|
||||||
|
- Code reviewers
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Sections to Document
|
||||||
|
|
||||||
|
1. Testing Philosophy
|
||||||
|
- Test pyramid approach
|
||||||
|
- Quality gates
|
||||||
|
2. Test Categories
|
||||||
|
- Unit Tests (`tests/unit/`)
|
||||||
|
- Integration Tests (`tests/integration/`)
|
||||||
|
- API Tests (`tests/api/`)
|
||||||
|
- Frontend Tests (`tests/frontend/`)
|
||||||
|
- Performance Tests (`tests/performance/`)
|
||||||
|
- Security Tests (`tests/security/`)
|
||||||
|
3. Test Structure and Naming
|
||||||
|
- File naming conventions
|
||||||
|
- Test function naming
|
||||||
|
- Test class organization
|
||||||
|
4. Running Tests
|
||||||
|
- pytest commands
|
||||||
|
- Running specific tests
|
||||||
|
- Verbose output
|
||||||
|
- Coverage reports
|
||||||
|
5. Fixtures and Conftest
|
||||||
|
- Shared fixtures
|
||||||
|
- Database fixtures
|
||||||
|
- Mock services
|
||||||
|
6. Mocking Guidelines
|
||||||
|
- What to mock
|
||||||
|
- Mock patterns
|
||||||
|
- External service mocks
|
||||||
|
7. Coverage Requirements
|
||||||
|
8. CI/CD Integration
|
||||||
|
9. Writing Good Tests
|
||||||
|
- Arrange-Act-Assert pattern
|
||||||
|
- Test isolation
|
||||||
|
- Edge cases
|
||||||
|
10. Common Pitfalls to Avoid
|
||||||
23
docs/diagrams/README.md
Normal file
23
docs/diagrams/README.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
# Architecture Diagrams
|
||||||
|
|
||||||
|
This directory contains architecture diagram source files for the Aniworld documentation.
|
||||||
|
|
||||||
|
## Diagrams
|
||||||
|
|
||||||
|
### System Architecture (Mermaid)
|
||||||
|
|
||||||
|
See [system-architecture.mmd](system-architecture.mmd) for the system overview diagram.
|
||||||
|
|
||||||
|
### Rendering
|
||||||
|
|
||||||
|
Diagrams can be rendered using:
|
||||||
|
|
||||||
|
- Mermaid Live Editor: https://mermaid.live/
|
||||||
|
- VS Code Mermaid extension
|
||||||
|
- GitHub/GitLab native Mermaid support
|
||||||
|
|
||||||
|
## Formats
|
||||||
|
|
||||||
|
- `.mmd` - Mermaid diagram source files
|
||||||
|
- `.svg` - Exported vector graphics (add when needed)
|
||||||
|
- `.png` - Exported raster graphics (add when needed)
|
||||||
44
docs/diagrams/download-flow.mmd
Normal file
44
docs/diagrams/download-flow.mmd
Normal file
@@ -0,0 +1,44 @@
|
|||||||
|
%%{init: {'theme': 'base'}}%%
|
||||||
|
sequenceDiagram
|
||||||
|
participant Client
|
||||||
|
participant FastAPI
|
||||||
|
participant AuthMiddleware
|
||||||
|
participant DownloadService
|
||||||
|
participant ProgressService
|
||||||
|
participant WebSocketService
|
||||||
|
participant SeriesApp
|
||||||
|
participant Database
|
||||||
|
|
||||||
|
Note over Client,Database: Download Flow
|
||||||
|
|
||||||
|
%% Add to queue
|
||||||
|
Client->>FastAPI: POST /api/queue/add
|
||||||
|
FastAPI->>AuthMiddleware: Validate JWT
|
||||||
|
AuthMiddleware-->>FastAPI: OK
|
||||||
|
FastAPI->>DownloadService: add_to_queue()
|
||||||
|
DownloadService->>Database: save_item()
|
||||||
|
Database-->>DownloadService: item_id
|
||||||
|
DownloadService-->>FastAPI: [item_ids]
|
||||||
|
FastAPI-->>Client: 201 Created
|
||||||
|
|
||||||
|
%% Start queue
|
||||||
|
Client->>FastAPI: POST /api/queue/start
|
||||||
|
FastAPI->>AuthMiddleware: Validate JWT
|
||||||
|
AuthMiddleware-->>FastAPI: OK
|
||||||
|
FastAPI->>DownloadService: start_queue_processing()
|
||||||
|
|
||||||
|
loop For each pending item
|
||||||
|
DownloadService->>SeriesApp: download_episode()
|
||||||
|
|
||||||
|
loop Progress updates
|
||||||
|
SeriesApp->>ProgressService: emit("progress_updated")
|
||||||
|
ProgressService->>WebSocketService: broadcast_to_room()
|
||||||
|
WebSocketService-->>Client: WebSocket message
|
||||||
|
end
|
||||||
|
|
||||||
|
SeriesApp-->>DownloadService: completed
|
||||||
|
DownloadService->>Database: update_status()
|
||||||
|
end
|
||||||
|
|
||||||
|
DownloadService-->>FastAPI: OK
|
||||||
|
FastAPI-->>Client: 200 OK
|
||||||
82
docs/diagrams/system-architecture.mmd
Normal file
82
docs/diagrams/system-architecture.mmd
Normal file
@@ -0,0 +1,82 @@
|
|||||||
|
%%{init: {'theme': 'base', 'themeVariables': { 'primaryColor': '#4a90d9'}}}%%
|
||||||
|
flowchart TB
|
||||||
|
subgraph Clients["Client Layer"]
|
||||||
|
Browser["Web Browser<br/>(HTML/CSS/JS)"]
|
||||||
|
CLI["CLI Client<br/>(Main.py)"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Server["Server Layer (FastAPI)"]
|
||||||
|
direction TB
|
||||||
|
Middleware["Middleware<br/>Auth, Rate Limit, Error Handler"]
|
||||||
|
|
||||||
|
subgraph API["API Routers"]
|
||||||
|
AuthAPI["/api/auth"]
|
||||||
|
AnimeAPI["/api/anime"]
|
||||||
|
QueueAPI["/api/queue"]
|
||||||
|
ConfigAPI["/api/config"]
|
||||||
|
SchedulerAPI["/api/scheduler"]
|
||||||
|
HealthAPI["/health"]
|
||||||
|
WebSocketAPI["/ws"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Services["Services"]
|
||||||
|
AuthService["AuthService"]
|
||||||
|
AnimeService["AnimeService"]
|
||||||
|
DownloadService["DownloadService"]
|
||||||
|
ConfigService["ConfigService"]
|
||||||
|
ProgressService["ProgressService"]
|
||||||
|
WebSocketService["WebSocketService"]
|
||||||
|
end
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Core["Core Layer"]
|
||||||
|
SeriesApp["SeriesApp"]
|
||||||
|
SerieScanner["SerieScanner"]
|
||||||
|
SerieList["SerieList"]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph Data["Data Layer"]
|
||||||
|
SQLite[(SQLite<br/>aniworld.db)]
|
||||||
|
ConfigJSON[(config.json)]
|
||||||
|
FileSystem[(File System<br/>Anime Directory)]
|
||||||
|
end
|
||||||
|
|
||||||
|
subgraph External["External"]
|
||||||
|
Provider["Anime Provider<br/>(aniworld.to)"]
|
||||||
|
end
|
||||||
|
|
||||||
|
%% Client connections
|
||||||
|
Browser -->|HTTP/WebSocket| Middleware
|
||||||
|
CLI -->|Direct| SeriesApp
|
||||||
|
|
||||||
|
%% Middleware to API
|
||||||
|
Middleware --> API
|
||||||
|
|
||||||
|
%% API to Services
|
||||||
|
AuthAPI --> AuthService
|
||||||
|
AnimeAPI --> AnimeService
|
||||||
|
QueueAPI --> DownloadService
|
||||||
|
ConfigAPI --> ConfigService
|
||||||
|
SchedulerAPI --> AnimeService
|
||||||
|
WebSocketAPI --> WebSocketService
|
||||||
|
|
||||||
|
%% Services to Core
|
||||||
|
AnimeService --> SeriesApp
|
||||||
|
DownloadService --> SeriesApp
|
||||||
|
|
||||||
|
%% Services to Data
|
||||||
|
AuthService --> ConfigJSON
|
||||||
|
ConfigService --> ConfigJSON
|
||||||
|
DownloadService --> SQLite
|
||||||
|
AnimeService --> SQLite
|
||||||
|
|
||||||
|
%% Core to Data
|
||||||
|
SeriesApp --> SerieScanner
|
||||||
|
SeriesApp --> SerieList
|
||||||
|
SerieScanner --> FileSystem
|
||||||
|
SerieScanner --> Provider
|
||||||
|
|
||||||
|
%% Event flow
|
||||||
|
ProgressService -.->|Events| WebSocketService
|
||||||
|
DownloadService -.->|Progress| ProgressService
|
||||||
|
WebSocketService -.->|Broadcast| Browser
|
||||||
110
docs/features.md
Normal file
110
docs/features.md
Normal file
@@ -0,0 +1,110 @@
|
|||||||
|
# Aniworld Web Application Features
|
||||||
|
|
||||||
|
## Recent Updates
|
||||||
|
|
||||||
|
### Enhanced Setup and Settings Pages (Latest)
|
||||||
|
|
||||||
|
The application now features a comprehensive configuration system that allows users to configure all settings during initial setup or modify them later through the settings modal:
|
||||||
|
|
||||||
|
**Setup Page Enhancements:**
|
||||||
|
|
||||||
|
- Single-page setup with all configuration options organized into clear sections
|
||||||
|
- Real-time password strength indicator for security
|
||||||
|
- Form validation with helpful error messages
|
||||||
|
- Comprehensive settings including: general, security, scheduler, logging, backup, and NFO metadata
|
||||||
|
|
||||||
|
**Settings Modal Enhancements:**
|
||||||
|
|
||||||
|
- All configuration fields are now editable through the main application's config modal
|
||||||
|
- Organized into logical sections with clear labels and help text
|
||||||
|
- Real-time saving with immediate feedback
|
||||||
|
- Configuration validation to prevent invalid settings
|
||||||
|
- Full control over cron-based scheduler (time, days of week, auto-download), logging options, and backup settings
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Authentication & Security
|
||||||
|
|
||||||
|
- **Master Password Login**: Secure access to the application with a master password system
|
||||||
|
- **JWT Token Sessions**: Stateless authentication with JSON Web Tokens
|
||||||
|
- **Rate Limiting**: Built-in protection against brute force attacks
|
||||||
|
|
||||||
|
## Configuration Management
|
||||||
|
|
||||||
|
- **Enhanced Setup Page**: Comprehensive initial configuration interface with all settings in one place:
|
||||||
|
- General Settings: Application name and data directory configuration
|
||||||
|
- Security Settings: Master password setup with strength indicator
|
||||||
|
- Anime Directory: Primary directory path for anime storage
|
||||||
|
- Scheduler Settings: Enable/disable scheduler, configure daily run time, select days of week, and optionally auto-download missing episodes after rescan
|
||||||
|
- Logging Settings: Configure log level, file path, file size limits, and backup count
|
||||||
|
- Backup Settings: Enable automatic backups with configurable path and retention period
|
||||||
|
- NFO Settings: TMDB API key, auto-creation options, and media file download preferences
|
||||||
|
- **Enhanced Settings/Config Modal**: Comprehensive configuration interface accessible from main page:
|
||||||
|
- General Settings: Edit application name and data directory
|
||||||
|
- Anime Directory: Modify anime storage location with browse functionality
|
||||||
|
- Scheduler Configuration: Enable/disable, set cron run time (`HH:MM`), select active days of the week, and toggle auto-download after rescan
|
||||||
|
- Logging Configuration: Full control over logging level, file rotation, and backup count
|
||||||
|
- Backup Configuration: Configure automatic backup settings including path and retention
|
||||||
|
- NFO Settings: Complete control over TMDB integration and media file downloads
|
||||||
|
- Configuration Validation: Validate configuration for errors before saving
|
||||||
|
- Backup Management: Create, restore, and manage configuration backups
|
||||||
|
- Export/Import: Export configuration for backup or transfer to another instance
|
||||||
|
|
||||||
|
## User Interface
|
||||||
|
|
||||||
|
- **Dark Mode**: Toggle between light and dark themes for better user experience
|
||||||
|
- **Responsive Design**: Mobile-friendly interface with touch support
|
||||||
|
- **Real-time Updates**: WebSocket-based live notifications and progress tracking
|
||||||
|
|
||||||
|
## Anime Management
|
||||||
|
|
||||||
|
- **Anime Library Page**: Display list of anime series with missing episodes
|
||||||
|
- **Database-Backed Series Storage**: All series metadata and missing episodes stored in SQLite database
|
||||||
|
- **Automatic Database Synchronization**: Series loaded from database on startup, stays in sync with filesystem
|
||||||
|
- **Series Selection**: Select individual anime series and add episodes to download queue
|
||||||
|
- **Anime Search**: Search for anime series using integrated providers
|
||||||
|
- **Library Scanning**: Automated scanning for missing episodes with database persistence
|
||||||
|
- **Episode Tracking**: Missing episodes tracked in database, automatically updated during scans
|
||||||
|
- **NFO Status Indicators**: Visual badges showing NFO and media file status for each series
|
||||||
|
|
||||||
|
## NFO Metadata Management
|
||||||
|
|
||||||
|
- **TMDB Integration**: Automatic metadata fetching from The Movie Database (TMDB)
|
||||||
|
- **Auto-Create NFO Files**: Automatically generate tvshow.nfo files during downloads
|
||||||
|
- **Media File Downloads**: Automatic download of poster.jpg, logo.png, and fanart.jpg
|
||||||
|
- **NFO Status Tracking**: Database tracking of NFO creation and update timestamps
|
||||||
|
- **Manual NFO Creation**: Create NFO files and download media for existing anime
|
||||||
|
- **NFO Updates**: Update existing NFO files with latest TMDB metadata
|
||||||
|
- **Batch Operations**: Create NFO files for multiple anime at once
|
||||||
|
- **NFO Content Viewing**: View generated NFO file content in the UI
|
||||||
|
- **Media Server Compatibility**: Kodi, Plex, Jellyfin, and Emby compatible format
|
||||||
|
- **Configuration Options**: Customize which media files to download and image quality
|
||||||
|
|
||||||
|
## Download Management
|
||||||
|
|
||||||
|
- **Download Queue Page**: View and manage the current download queue with organized sections
|
||||||
|
- **Queue Organization**: Displays downloads organized by status (pending, active, completed, failed)
|
||||||
|
- **NFO Integration**: Automatic NFO and media file creation before episode downloads
|
||||||
|
- **Manual Start/Stop Control**: User manually starts downloads one at a time with Start/Stop buttons
|
||||||
|
- **FIFO Queue Processing**: First-in, first-out queue order (no priority or reordering)
|
||||||
|
- **Single Download Mode**: Only one download active at a time, new downloads must be manually started
|
||||||
|
- **Download Status Display**: Real-time status updates and progress of current download
|
||||||
|
- **Queue Operations**: Add and remove items from the pending queue
|
||||||
|
- **Completed Downloads List**: Separate section for completed downloads with clear button
|
||||||
|
- **Failed Downloads List**: Separate section for failed downloads with retry and clear options
|
||||||
|
- **Retry Failed Downloads**: Automatically retry failed downloads with configurable limits
|
||||||
|
- **Clear Completed**: Remove completed downloads from the queue
|
||||||
|
- **Clear Failed**: Remove failed downloads from the queue
|
||||||
|
- **Queue Statistics**: Real-time counters for pending, active, completed, and failed items
|
||||||
|
|
||||||
|
## Real-time Communication
|
||||||
|
|
||||||
|
- **WebSocket Support**: Real-time notifications for download progress and queue updates
|
||||||
|
- **Progress Tracking**: Live progress updates for downloads and scans
|
||||||
|
- **System Notifications**: Real-time system messages and alerts
|
||||||
|
|
||||||
|
## Core Functionality Overview
|
||||||
|
|
||||||
|
The web application provides a complete interface for managing anime downloads with user-friendly pages for configuration, library management, search capabilities, and download monitoring. All operations are tracked in real-time with comprehensive progress reporting and error handling.
|
||||||
|
|
||||||
|
**NFO Metadata Features**: The application now includes full support for generating Kodi/Plex/Jellyfin/Emby compatible metadata files (tvshow.nfo) with automatic TMDB integration. NFO files are created automatically during downloads or can be managed manually through the UI. The system tracks NFO status in the database and provides comprehensive API endpoints for programmatic access. Media files (poster, logo, fanart) are automatically downloaded based on configuration settings.
|
||||||
120
docs/instructions.md
Normal file
120
docs/instructions.md
Normal file
@@ -0,0 +1,120 @@
|
|||||||
|
# Aniworld Web Application Development Instructions
|
||||||
|
|
||||||
|
This document provides detailed tasks for AI agents to implement a modern web application for the Aniworld anime download manager. All tasks should follow the coding guidelines specified in the project's copilot instructions.
|
||||||
|
|
||||||
|
## Project Overview
|
||||||
|
|
||||||
|
The goal is to create a FastAPI-based web application that provides a modern interface for the existing Aniworld anime download functionality. The core anime logic should remain in `SeriesApp.py` while the web layer provides REST API endpoints and a responsive UI.
|
||||||
|
|
||||||
|
## Architecture Principles
|
||||||
|
|
||||||
|
- **Single Responsibility**: Each file/class has one clear purpose
|
||||||
|
- **Dependency Injection**: Use FastAPI's dependency system
|
||||||
|
- **Clean Separation**: Web layer calls core logic, never the reverse
|
||||||
|
- **File Size Limit**: Maximum 500 lines per file
|
||||||
|
- **Type Hints**: Use comprehensive type annotations
|
||||||
|
- **Error Handling**: Proper exception handling and logging
|
||||||
|
|
||||||
|
## Additional Implementation Guidelines
|
||||||
|
|
||||||
|
### Code Style and Standards
|
||||||
|
|
||||||
|
- **Type Hints**: Use comprehensive type annotations throughout all modules
|
||||||
|
- **Docstrings**: Follow PEP 257 for function and class documentation
|
||||||
|
- **Error Handling**: Implement custom exception classes with meaningful messages
|
||||||
|
- **Logging**: Use structured logging with appropriate log levels
|
||||||
|
- **Security**: Validate all inputs and sanitize outputs
|
||||||
|
- **Performance**: Use async/await patterns for I/O operations
|
||||||
|
|
||||||
|
## 📞 Escalation
|
||||||
|
|
||||||
|
If you encounter:
|
||||||
|
|
||||||
|
- Architecture issues requiring design decisions
|
||||||
|
- Tests that conflict with documented requirements
|
||||||
|
- Breaking changes needed
|
||||||
|
- Unclear requirements or expectations
|
||||||
|
|
||||||
|
**Document the issue and escalate rather than guessing.**
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## <20> Credentials
|
||||||
|
|
||||||
|
**Admin Login:**
|
||||||
|
|
||||||
|
- Username: `admin`
|
||||||
|
- Password: `Hallo123!`
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## <20>📚 Helpful Commands
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Run all tests
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v --tb=short
|
||||||
|
|
||||||
|
# Run specific test file
|
||||||
|
conda run -n AniWorld python -m pytest tests/unit/test_websocket_service.py -v
|
||||||
|
|
||||||
|
# Run specific test class
|
||||||
|
conda run -n AniWorld python -m pytest tests/unit/test_websocket_service.py::TestWebSocketService -v
|
||||||
|
|
||||||
|
# Run specific test
|
||||||
|
conda run -n AniWorld python -m pytest tests/unit/test_websocket_service.py::TestWebSocketService::test_broadcast_download_progress -v
|
||||||
|
|
||||||
|
# Run with extra verbosity
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -vv
|
||||||
|
|
||||||
|
# Run with full traceback
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v --tb=long
|
||||||
|
|
||||||
|
# Run and stop at first failure
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v -x
|
||||||
|
|
||||||
|
# Run tests matching pattern
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v -k "auth"
|
||||||
|
|
||||||
|
# Show all print statements
|
||||||
|
conda run -n AniWorld python -m pytest tests/ -v -s
|
||||||
|
|
||||||
|
#Run app
|
||||||
|
conda run -n AniWorld python -m uvicorn src.server.fastapi_app:app --host 127.0.0.1 --port 8000 --reload
|
||||||
|
```
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Implementation Notes
|
||||||
|
|
||||||
|
1. **Incremental Development**: Implement features incrementally, testing each component thoroughly before moving to the next
|
||||||
|
2. **Code Review**: Review all generated code for adherence to project standards
|
||||||
|
3. **Documentation**: Document all public APIs and complex logic
|
||||||
|
4. **Testing**: Maintain test coverage above 80% for all new code
|
||||||
|
5. **Performance**: Profile and optimize critical paths, especially download and streaming operations
|
||||||
|
6. **Security**: Regular security audits and dependency updates
|
||||||
|
7. **Monitoring**: Implement comprehensive monitoring and alerting
|
||||||
|
8. **Maintenance**: Plan for regular maintenance and updates
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## Task Completion Checklist
|
||||||
|
|
||||||
|
For each task completed:
|
||||||
|
|
||||||
|
- [ ] Implementation follows coding standards
|
||||||
|
- [ ] Unit tests written and passing
|
||||||
|
- [ ] Integration tests passing
|
||||||
|
- [ ] Documentation updated
|
||||||
|
- [ ] Error handling implemented
|
||||||
|
- [ ] Logging added
|
||||||
|
- [ ] Security considerations addressed
|
||||||
|
- [ ] Performance validated
|
||||||
|
- [ ] Code reviewed
|
||||||
|
- [ ] Task marked as complete in instructions.md
|
||||||
|
- [ ] Infrastructure.md updated and other docs
|
||||||
|
- [ ] Changes committed to git; keep your messages in git short and clear
|
||||||
|
- [ ] Take the next task
|
||||||
|
|
||||||
|
---
|
||||||
|
|
||||||
|
## TODO List:
|
||||||
4
docs/key
Normal file
4
docs/key
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
API key : 299ae8f630a31bda814263c551361448
|
||||||
|
|
||||||
|
/mnt/server/serien/Serien/
|
||||||
|
|
||||||
27
package.json
Normal file
27
package.json
Normal file
@@ -0,0 +1,27 @@
|
|||||||
|
{
|
||||||
|
"name": "aniworld-web",
|
||||||
|
"version": "1.0.0",
|
||||||
|
"description": "Aniworld Anime Download Manager - Web Frontend",
|
||||||
|
"type": "module",
|
||||||
|
"scripts": {
|
||||||
|
"test": "vitest run",
|
||||||
|
"test:watch": "vitest",
|
||||||
|
"test:ui": "vitest --ui",
|
||||||
|
"test:coverage": "vitest run --coverage",
|
||||||
|
"test:e2e": "playwright test",
|
||||||
|
"test:e2e:ui": "playwright test --ui",
|
||||||
|
"test:e2e:headed": "playwright test --headed",
|
||||||
|
"test:e2e:debug": "playwright test --debug",
|
||||||
|
"playwright:install": "playwright install --with-deps chromium"
|
||||||
|
},
|
||||||
|
"devDependencies": {
|
||||||
|
"@playwright/test": "^1.41.0",
|
||||||
|
"@vitest/coverage-v8": "^1.2.0",
|
||||||
|
"@vitest/ui": "^1.2.0",
|
||||||
|
"happy-dom": "^13.3.5",
|
||||||
|
"vitest": "^1.2.0"
|
||||||
|
},
|
||||||
|
"engines": {
|
||||||
|
"node": ">=18.0.0"
|
||||||
|
}
|
||||||
|
}
|
||||||
5
pyproject.toml
Normal file
5
pyproject.toml
Normal file
@@ -0,0 +1,5 @@
|
|||||||
|
[tool.pytest.ini_options]
|
||||||
|
asyncio_mode = "auto"
|
||||||
|
markers = [
|
||||||
|
"asyncio: mark test as asynchronous"
|
||||||
|
]
|
||||||
23
pytest.ini
23
pytest.ini
@@ -1,23 +0,0 @@
|
|||||||
[tool:pytest]
|
|
||||||
minversion = 6.0
|
|
||||||
addopts = -ra -q --strict-markers --strict-config --cov=src --cov-report=html --cov-report=term
|
|
||||||
testpaths =
|
|
||||||
tests
|
|
||||||
python_files =
|
|
||||||
test_*.py
|
|
||||||
*_test.py
|
|
||||||
python_classes =
|
|
||||||
Test*
|
|
||||||
python_functions =
|
|
||||||
test_*
|
|
||||||
markers =
|
|
||||||
slow: marks tests as slow (deselect with -m "not slow")
|
|
||||||
integration: marks tests as integration tests
|
|
||||||
e2e: marks tests as end-to-end tests
|
|
||||||
unit: marks tests as unit tests
|
|
||||||
api: marks tests as API tests
|
|
||||||
web: marks tests as web interface tests
|
|
||||||
smoke: marks tests as smoke tests
|
|
||||||
filterwarnings =
|
|
||||||
ignore::DeprecationWarning
|
|
||||||
ignore::PendingDeprecationWarning
|
|
||||||
@@ -1,32 +0,0 @@
|
|||||||
# Development dependencies
|
|
||||||
-r requirements.txt
|
|
||||||
|
|
||||||
# Testing
|
|
||||||
pytest>=7.4.0
|
|
||||||
pytest-cov>=4.1.0
|
|
||||||
pytest-asyncio>=0.21.0
|
|
||||||
pytest-flask>=1.2.0
|
|
||||||
pytest-mock>=3.11.0
|
|
||||||
factory-boy>=3.3.0
|
|
||||||
faker>=19.3.0
|
|
||||||
|
|
||||||
# Code Quality
|
|
||||||
black>=23.7.0
|
|
||||||
isort>=5.12.0
|
|
||||||
flake8>=6.0.0
|
|
||||||
mypy>=1.5.0
|
|
||||||
ruff>=0.0.284
|
|
||||||
|
|
||||||
# Security
|
|
||||||
bandit>=1.7.5
|
|
||||||
safety>=2.3.0
|
|
||||||
|
|
||||||
# Development tools
|
|
||||||
pre-commit>=3.3.0
|
|
||||||
coverage>=7.3.0
|
|
||||||
|
|
||||||
# Documentation
|
|
||||||
sphinx>=7.1.0
|
|
||||||
sphinx-rtd-theme>=1.3.0
|
|
||||||
sphinx-autodoc-typehints>=1.24.0
|
|
||||||
myst-parser>=2.0.0
|
|
||||||
@@ -1,9 +0,0 @@
|
|||||||
# Test dependencies only
|
|
||||||
pytest>=7.4.0
|
|
||||||
pytest-cov>=4.1.0
|
|
||||||
pytest-asyncio>=0.21.0
|
|
||||||
pytest-flask>=1.2.0
|
|
||||||
pytest-mock>=3.11.0
|
|
||||||
factory-boy>=3.3.0
|
|
||||||
faker>=19.3.0
|
|
||||||
coverage>=7.3.0
|
|
||||||
@@ -1,50 +1,27 @@
|
|||||||
# Core Flask dependencies
|
fastapi==0.104.1
|
||||||
flask>=2.3.0
|
uvicorn[standard]==0.24.0
|
||||||
flask-cors>=4.0.0
|
jinja2==3.1.2
|
||||||
flask-login>=0.6.0
|
python-multipart==0.0.6
|
||||||
flask-session>=0.5.0
|
pydantic==2.5.0
|
||||||
flask-wtf>=1.1.0
|
pydantic-settings==2.1.0
|
||||||
flask-migrate>=4.0.0
|
python-jose[cryptography]==3.3.0
|
||||||
|
passlib[bcrypt]==1.7.4
|
||||||
# Database
|
aiofiles==23.2.1
|
||||||
sqlalchemy>=2.0.0
|
websockets==12.0
|
||||||
alembic>=1.11.0
|
structlog==24.1.0
|
||||||
|
psutil==5.9.6
|
||||||
# HTTP and Web Scraping
|
pytest==7.4.3
|
||||||
|
pytest-asyncio==0.21.1
|
||||||
|
httpx==0.25.2
|
||||||
|
sqlalchemy>=2.0.35
|
||||||
|
aiosqlite>=0.19.0
|
||||||
|
aiohttp>=3.9.0
|
||||||
|
lxml>=5.0.0
|
||||||
|
pillow>=10.0.0
|
||||||
|
APScheduler>=3.10.4
|
||||||
|
Events>=0.5
|
||||||
requests>=2.31.0
|
requests>=2.31.0
|
||||||
beautifulsoup4>=4.12.0
|
beautifulsoup4>=4.12.0
|
||||||
lxml>=4.9.0
|
fake-useragent>=1.4.0
|
||||||
httpx>=0.24.0
|
yt-dlp>=2024.1.0
|
||||||
|
urllib3>=2.0.0
|
||||||
# Data Validation and Configuration
|
|
||||||
pydantic>=2.0.0
|
|
||||||
pydantic-settings>=2.0.0
|
|
||||||
python-dotenv>=1.0.0
|
|
||||||
|
|
||||||
# Task Queue and Caching
|
|
||||||
celery>=5.3.0
|
|
||||||
redis>=4.6.0
|
|
||||||
|
|
||||||
# Security
|
|
||||||
cryptography>=41.0.0
|
|
||||||
bcrypt>=4.0.0
|
|
||||||
|
|
||||||
# CLI and User Interface
|
|
||||||
click>=8.1.0
|
|
||||||
rich>=13.4.0
|
|
||||||
|
|
||||||
# System and File Operations
|
|
||||||
psutil>=5.9.0
|
|
||||||
aiofiles>=23.1.0
|
|
||||||
|
|
||||||
# WebSocket support
|
|
||||||
websockets>=11.0.0
|
|
||||||
|
|
||||||
# Template and Form handling
|
|
||||||
jinja2>=3.1.0
|
|
||||||
markupsafe>=2.1.0
|
|
||||||
wtforms>=3.0.0
|
|
||||||
email-validator>=2.0.0
|
|
||||||
|
|
||||||
# Date and time utilities
|
|
||||||
python-dateutil>=2.8.0
|
|
||||||
@@ -1,80 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Simple test execution script for API tests.
|
|
||||||
Run this from the command line to execute all API tests.
|
|
||||||
"""
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import sys
|
|
||||||
import os
|
|
||||||
|
|
||||||
def main():
|
|
||||||
"""Main execution function."""
|
|
||||||
print("🚀 Aniworld API Test Executor")
|
|
||||||
print("=" * 40)
|
|
||||||
|
|
||||||
# Get the directory of this script
|
|
||||||
script_dir = os.path.dirname(os.path.abspath(__file__))
|
|
||||||
project_root = os.path.join(script_dir, '..', '..')
|
|
||||||
|
|
||||||
# Change to project root
|
|
||||||
os.chdir(project_root)
|
|
||||||
|
|
||||||
print(f"📁 Working directory: {os.getcwd()}")
|
|
||||||
print(f"🐍 Python version: {sys.version}")
|
|
||||||
|
|
||||||
# Try to run the comprehensive test runner
|
|
||||||
test_runner = os.path.join('tests', 'unit', 'web', 'run_api_tests.py')
|
|
||||||
|
|
||||||
if os.path.exists(test_runner):
|
|
||||||
print(f"\n🧪 Running comprehensive test suite...")
|
|
||||||
try:
|
|
||||||
result = subprocess.run([sys.executable, test_runner], capture_output=False)
|
|
||||||
return result.returncode
|
|
||||||
except Exception as e:
|
|
||||||
print(f"❌ Error running comprehensive tests: {e}")
|
|
||||||
|
|
||||||
# Fallback to individual test files
|
|
||||||
print(f"\n🔄 Falling back to individual test execution...")
|
|
||||||
|
|
||||||
test_files = [
|
|
||||||
os.path.join('tests', 'unit', 'web', 'test_api_endpoints.py'),
|
|
||||||
os.path.join('tests', 'integration', 'test_api_integration.py')
|
|
||||||
]
|
|
||||||
|
|
||||||
total_failures = 0
|
|
||||||
|
|
||||||
for test_file in test_files:
|
|
||||||
if os.path.exists(test_file):
|
|
||||||
print(f"\n📋 Running {test_file}...")
|
|
||||||
try:
|
|
||||||
result = subprocess.run([
|
|
||||||
sys.executable, '-m', 'unittest',
|
|
||||||
test_file.replace('/', '.').replace('\\', '.').replace('.py', ''),
|
|
||||||
'-v'
|
|
||||||
], capture_output=False, cwd=project_root)
|
|
||||||
|
|
||||||
if result.returncode != 0:
|
|
||||||
total_failures += 1
|
|
||||||
print(f"❌ Test file {test_file} had failures")
|
|
||||||
else:
|
|
||||||
print(f"✅ Test file {test_file} passed")
|
|
||||||
|
|
||||||
except Exception as e:
|
|
||||||
print(f"❌ Error running {test_file}: {e}")
|
|
||||||
total_failures += 1
|
|
||||||
else:
|
|
||||||
print(f"⚠️ Test file not found: {test_file}")
|
|
||||||
|
|
||||||
# Final summary
|
|
||||||
print(f"\n{'='*40}")
|
|
||||||
if total_failures == 0:
|
|
||||||
print("🎉 All tests completed successfully!")
|
|
||||||
return 0
|
|
||||||
else:
|
|
||||||
print(f"❌ {total_failures} test file(s) had issues")
|
|
||||||
return 1
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
exit_code = main()
|
|
||||||
sys.exit(exit_code)
|
|
||||||
@@ -1,17 +1,34 @@
|
|||||||
#!/usr/bin/env python3
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Startup script for the Aniworld FastAPI application.
|
||||||
|
|
||||||
import os
|
This script starts the application with proper logging configuration
|
||||||
import sys
|
and graceful shutdown support via Ctrl+C (SIGINT) or SIGTERM.
|
||||||
import subprocess
|
"""
|
||||||
|
import uvicorn
|
||||||
|
|
||||||
# Change to the server directory
|
from src.infrastructure.logging.uvicorn_config import get_uvicorn_log_config
|
||||||
server_dir = os.path.join(os.path.dirname(__file__), 'src', 'server')
|
|
||||||
os.chdir(server_dir)
|
|
||||||
|
|
||||||
# Add parent directory to Python path
|
if __name__ == "__main__":
|
||||||
sys.path.insert(0, '..')
|
# Get logging configuration
|
||||||
|
log_config = get_uvicorn_log_config()
|
||||||
|
|
||||||
# Run the app
|
# Run the application with logging.
|
||||||
if __name__ == '__main__':
|
# Only watch .py files in src/, explicitly exclude __pycache__.
|
||||||
# Use subprocess to run the app properly
|
# This prevents reload loops from .pyc compilation.
|
||||||
subprocess.run([sys.executable, 'app.py'], cwd=server_dir)
|
#
|
||||||
|
# Graceful shutdown:
|
||||||
|
# - Ctrl+C (SIGINT) or SIGTERM triggers graceful shutdown
|
||||||
|
# - timeout_graceful_shutdown ensures shutdown completes within 30s
|
||||||
|
# - The FastAPI lifespan handler orchestrates cleanup in proper order
|
||||||
|
uvicorn.run(
|
||||||
|
"src.server.fastapi_app:app",
|
||||||
|
host="127.0.0.1",
|
||||||
|
port=8000,
|
||||||
|
reload=True,
|
||||||
|
reload_dirs=["src"],
|
||||||
|
reload_includes=["*.py"],
|
||||||
|
reload_excludes=["*/__pycache__/*", "*.pyc"],
|
||||||
|
log_config=log_config,
|
||||||
|
timeout_graceful_shutdown=30, # Allow 30s for graceful shutdown
|
||||||
|
)
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
Binary file not shown.
229
src/cli/Main.py
229
src/cli/Main.py
@@ -1,229 +0,0 @@
|
|||||||
import sys
|
|
||||||
import os
|
|
||||||
import logging
|
|
||||||
from server.infrastructure.providers import aniworld_provider
|
|
||||||
|
|
||||||
from rich.progress import Progress
|
|
||||||
from server.core.entities import SerieList
|
|
||||||
from src.server.core.SerieScanner import SerieScanner
|
|
||||||
from server.infrastructure.providers.provider_factory import Loaders
|
|
||||||
from server.core.entities.series import Serie
|
|
||||||
import time
|
|
||||||
|
|
||||||
# Configure logging
|
|
||||||
logging.basicConfig(level=logging.FATAL, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
|
|
||||||
console_handler = logging.StreamHandler()
|
|
||||||
console_handler.setLevel(logging.ERROR)
|
|
||||||
console_handler.setFormatter(logging.Formatter(
|
|
||||||
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
|
|
||||||
)
|
|
||||||
for h in logging.root.handlers:
|
|
||||||
logging.root.removeHandler(h)
|
|
||||||
|
|
||||||
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
|
|
||||||
logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
|
|
||||||
logging.getLogger().setLevel(logging.ERROR)
|
|
||||||
for h in logging.getLogger().handlers:
|
|
||||||
logging.getLogger().removeHandler(h)
|
|
||||||
|
|
||||||
|
|
||||||
class NoKeyFoundException(Exception):
|
|
||||||
"""Exception raised when an anime key cannot be found."""
|
|
||||||
pass
|
|
||||||
class MatchNotFoundError(Exception):
|
|
||||||
"""Exception raised when an anime key cannot be found."""
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
class SeriesApp:
|
|
||||||
_initialization_count = 0 # Track how many times initialization has been called
|
|
||||||
|
|
||||||
def __init__(self, directory_to_search: str):
|
|
||||||
SeriesApp._initialization_count += 1
|
|
||||||
|
|
||||||
# Only show initialization message for the first instance
|
|
||||||
if SeriesApp._initialization_count <= 1:
|
|
||||||
print("Please wait while initializing...")
|
|
||||||
|
|
||||||
self.progress = None
|
|
||||||
self.directory_to_search = directory_to_search
|
|
||||||
self.Loaders = Loaders()
|
|
||||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
|
||||||
self.SerieScanner = SerieScanner(directory_to_search, loader)
|
|
||||||
|
|
||||||
self.List = SerieList(self.directory_to_search)
|
|
||||||
self.__InitList__()
|
|
||||||
|
|
||||||
def __InitList__(self):
|
|
||||||
self.series_list = self.List.GetMissingEpisode()
|
|
||||||
|
|
||||||
|
|
||||||
def display_series(self):
|
|
||||||
"""Print all series with assigned numbers."""
|
|
||||||
print("\nCurrent result:")
|
|
||||||
for i, serie in enumerate(self.series_list, 1):
|
|
||||||
name = serie.name # Access the property on the instance
|
|
||||||
if name is None or str(name).strip() == "":
|
|
||||||
print(f"{i}. {serie.folder}")
|
|
||||||
else:
|
|
||||||
print(f"{i}. {serie.name}")
|
|
||||||
|
|
||||||
def search(self, words :str) -> list:
|
|
||||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
|
||||||
return loader.Search(words)
|
|
||||||
|
|
||||||
def get_user_selection(self):
|
|
||||||
"""Handle user input for selecting series."""
|
|
||||||
self.display_series()
|
|
||||||
while True:
|
|
||||||
selection = input(
|
|
||||||
"\nSelect series by number (e.g. '1', '1,2' or 'all') or type 'exit' to return: ").strip().lower()
|
|
||||||
|
|
||||||
if selection == "exit":
|
|
||||||
return None
|
|
||||||
|
|
||||||
selected_series = []
|
|
||||||
if selection == "all":
|
|
||||||
selected_series = self.series_list
|
|
||||||
else:
|
|
||||||
try:
|
|
||||||
indexes = [int(num) - 1 for num in selection.split(",")]
|
|
||||||
selected_series = [self.series_list[i] for i in indexes if 0 <= i < len(self.series_list)]
|
|
||||||
except ValueError:
|
|
||||||
print("Invalid selection. Going back to the result display.")
|
|
||||||
self.display_series()
|
|
||||||
continue
|
|
||||||
|
|
||||||
if selected_series:
|
|
||||||
return selected_series
|
|
||||||
else:
|
|
||||||
print("No valid series selected. Going back to the result display.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def retry(self, func, max_retries=3, delay=2, *args, **kwargs):
|
|
||||||
for attempt in range(1, max_retries + 1):
|
|
||||||
try:
|
|
||||||
func(*args, **kwargs)
|
|
||||||
return True
|
|
||||||
except Exception as e:
|
|
||||||
|
|
||||||
print(e)
|
|
||||||
time.sleep(delay)
|
|
||||||
return False
|
|
||||||
|
|
||||||
def download_series(self, series):
|
|
||||||
"""Simulate the downloading process with a progress bar."""
|
|
||||||
total_downloaded = 0
|
|
||||||
total_episodes = sum(sum(len(ep) for ep in serie.episodeDict.values()) for serie in series)
|
|
||||||
self.progress = Progress()
|
|
||||||
task1 = self.progress.add_task("[red]Processing...", total=total_episodes)
|
|
||||||
task2 = self.progress.add_task(f"[green]...", total=0)
|
|
||||||
self.task3 = self.progress.add_task(f"[Gray]...", total=100) # Setze total auf 100 für Prozentanzeige
|
|
||||||
self.progress.start()
|
|
||||||
|
|
||||||
for serie in series:
|
|
||||||
serie_episodes = sum(len(ep) for ep in serie.episodeDict.values())
|
|
||||||
self.progress.update(task2, description=f"[green]{serie.folder}", total=serie_episodes)
|
|
||||||
downloaded = 0
|
|
||||||
for season, episodes in serie.episodeDict.items():
|
|
||||||
for episode in episodes:
|
|
||||||
loader = self.Loaders.GetLoader(key="aniworld.to")
|
|
||||||
if loader.IsLanguage(season, episode, serie.key):
|
|
||||||
self.retry(loader.Download, 3, 1, self.directory_to_search, serie.folder, season, episode, serie.key, "German Dub",self.print_Download_Progress)
|
|
||||||
|
|
||||||
downloaded += 1
|
|
||||||
total_downloaded += 1
|
|
||||||
|
|
||||||
self.progress.update(task1, advance=1)
|
|
||||||
self.progress.update(task2, advance=1)
|
|
||||||
time.sleep(0.02)
|
|
||||||
|
|
||||||
self.progress.stop()
|
|
||||||
self.progress = None
|
|
||||||
|
|
||||||
def print_Download_Progress(self, d):
|
|
||||||
# Nutze self.progress und self.task3 für Fortschrittsanzeige
|
|
||||||
if self.progress is None or not hasattr(self, 'task3'):
|
|
||||||
return
|
|
||||||
|
|
||||||
if d['status'] == 'downloading':
|
|
||||||
total = d.get('total_bytes') or d.get('total_bytes_estimate')
|
|
||||||
downloaded = d.get('downloaded_bytes', 0)
|
|
||||||
if total:
|
|
||||||
percent = downloaded / total * 100
|
|
||||||
self.progress.update(self.task3, completed=percent, description=f"[gray]Download: {percent:.1f}%")
|
|
||||||
else:
|
|
||||||
self.progress.update(self.task3, description=f"[gray]{downloaded/1024/1024:.2f}MB geladen")
|
|
||||||
elif d['status'] == 'finished':
|
|
||||||
self.progress.update(self.task3, completed=100, description="[gray]Download abgeschlossen.")
|
|
||||||
|
|
||||||
def search_mode(self):
|
|
||||||
"""Search for a series and allow user to select an option."""
|
|
||||||
search_string = input("Enter search string: ").strip()
|
|
||||||
results = self.search(search_string)
|
|
||||||
|
|
||||||
if not results:
|
|
||||||
print("No results found. Returning to start.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print("\nSearch results:")
|
|
||||||
for i, result in enumerate(results, 1):
|
|
||||||
print(f"{i}. {result.get('name')}")
|
|
||||||
|
|
||||||
while True:
|
|
||||||
selection = input("\nSelect an option by number or type '<enter>' to return: ").strip().lower()
|
|
||||||
|
|
||||||
if selection == "":
|
|
||||||
return
|
|
||||||
|
|
||||||
try:
|
|
||||||
index = int(selection) - 1
|
|
||||||
if 0 <= index < len(results):
|
|
||||||
chosen_name = results[index]
|
|
||||||
self.List.add(Serie(chosen_name["link"], chosen_name["name"], "aniworld.to", chosen_name["link"], {}))
|
|
||||||
return
|
|
||||||
else:
|
|
||||||
print("Invalid selection. Try again.")
|
|
||||||
except ValueError:
|
|
||||||
print("Invalid input. Try again.")
|
|
||||||
|
|
||||||
def updateFromReinit(self, folder, counter):
|
|
||||||
self.progress.update(self.task1, advance=1)
|
|
||||||
|
|
||||||
def run(self):
|
|
||||||
"""Main function to run the app."""
|
|
||||||
while True:
|
|
||||||
action = input("\nChoose action ('s' for search, 'i' for init or 'd' for download): ").strip().lower()
|
|
||||||
|
|
||||||
if action == "s":
|
|
||||||
self.search_mode()
|
|
||||||
if action == "i":
|
|
||||||
|
|
||||||
print("\nRescanning series...\n")
|
|
||||||
|
|
||||||
self.progress = Progress()
|
|
||||||
self.task1 = self.progress.add_task("[red]items processed...", total=300)
|
|
||||||
self.progress.start()
|
|
||||||
|
|
||||||
self.SerieScanner.Reinit()
|
|
||||||
self.SerieScanner.Scan(self.updateFromReinit)
|
|
||||||
|
|
||||||
self.List = SerieList(self.directory_to_search)
|
|
||||||
self.__InitList__()
|
|
||||||
|
|
||||||
self.progress.stop()
|
|
||||||
self.progress = None
|
|
||||||
|
|
||||||
elif action == "d":
|
|
||||||
selected_series = self.get_user_selection()
|
|
||||||
if selected_series:
|
|
||||||
self.download_series(selected_series)
|
|
||||||
|
|
||||||
# Run the app
|
|
||||||
if __name__ == "__main__":
|
|
||||||
|
|
||||||
# Read the base directory from an environment variable
|
|
||||||
directory_to_search = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
|
||||||
app = SeriesApp(directory_to_search)
|
|
||||||
app.run()
|
|
||||||
@@ -1,491 +0,0 @@
|
|||||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Enhanced logging system initialized
|
|
||||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
|
||||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Log level: INFO
|
|
||||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Scheduled operations disabled
|
|
||||||
2025-09-29 12:38:25 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
|
||||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Enhanced logging system initialized
|
|
||||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
|
||||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Log level: INFO
|
|
||||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Scheduled operations disabled
|
|
||||||
2025-09-29 12:38:30 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
|
||||||
2025-09-29 12:38:30 - WARNING - werkzeug - _log - * Debugger is active!
|
|
||||||
2025-09-29 12:38:40 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
|
|
||||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
|
|
||||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
|
|
||||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
|
|
||||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
|
|
||||||
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
|
|
||||||
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin’ in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin’ in Another World with Level 2 Super Cheat Powers (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean I’m the Strongest I’m Not Even an Adventurer Yet! (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I’ll Become a Villainess Who Goes Down in History (2024)\data for I’ll Become a Villainess Who Goes Down in History (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba – An Explosion on This Wonderful World! (2023)\data for KonoSuba – An Explosion on This Wonderful World! (2023)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
|
|
||||||
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
|
|
||||||
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla God’s Go-To Girl (2025)\data for The Gorilla God’s Go-To Girl (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saint’s Magic Power is Omnipotent (2021)\data for The Saint’s Magic Power is Omnipotent (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\World’s End Harem (2022)\data for World’s End Harem (2022)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
|
|
||||||
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
|
|
||||||
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
|
|
||||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
|
|
||||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping clevatess - No data folder found
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
|
|
||||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
|
|
||||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
|
|
||||||
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
|
|
||||||
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
|
|
||||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Enhanced logging system initialized
|
|
||||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Starting Aniworld Flask server...
|
|
||||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Log level: INFO
|
|
||||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Scheduled operations disabled
|
|
||||||
2025-09-29 20:23:13 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
|
|
||||||
2025-09-29 20:23:16 - INFO - __main__ - <module> - Enhanced logging system initialized
|
|
||||||
2025-09-29 20:23:16 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 20:23:16 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
|
|
||||||
2025-09-29 20:23:16 - ERROR - root - init_series_app - Error initializing SeriesApp:
|
|
||||||
Traceback (most recent call last):
|
|
||||||
File "D:\repo\Aniworld/src/server/app.py", line 145, in init_series_app
|
|
||||||
series_app = SeriesApp(directory_to_search)
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
File "D:\repo\Aniworld\src\Main.py", line 54, in __init__
|
|
||||||
self.List = SerieList(self.directory_to_search)
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 9, in __init__
|
|
||||||
self.load_series()
|
|
||||||
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 29, in load_series
|
|
||||||
for anime_folder in os.listdir(self.directory):
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
FileNotFoundError: [WinError 53] Der Netzwerkpfad wurde nicht gefunden: '\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien'
|
|
||||||
2025-09-29 20:23:16 - WARNING - werkzeug - _log - * Debugger is active!
|
|
||||||
2025-09-29 20:33:06 - DEBUG - schedule - clear - Deleting *all* jobs
|
|
||||||
2025-09-29 20:33:06 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
|
|
||||||
2025-09-29 20:33:06 - INFO - __main__ - <module> - Scheduler stopped
|
|
||||||
281
src/cli/nfo_cli.py
Normal file
281
src/cli/nfo_cli.py
Normal file
@@ -0,0 +1,281 @@
|
|||||||
|
"""CLI command for NFO management.
|
||||||
|
|
||||||
|
This script provides command-line interface for creating, updating,
|
||||||
|
and checking NFO metadata files.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Add src to path
|
||||||
|
sys.path.insert(0, str(Path(__file__).parent.parent.parent))
|
||||||
|
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.core.services.series_manager_service import SeriesManagerService
|
||||||
|
|
||||||
|
|
||||||
|
async def scan_and_create_nfo():
|
||||||
|
"""Scan all series and create missing NFO files."""
|
||||||
|
print("=" * 70)
|
||||||
|
print("NFO Auto-Creation Tool")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
if not settings.tmdb_api_key:
|
||||||
|
print("\n❌ Error: TMDB_API_KEY not configured")
|
||||||
|
print(" Set TMDB_API_KEY in .env file or environment")
|
||||||
|
print(" Get API key from: https://www.themoviedb.org/settings/api")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if not settings.anime_directory:
|
||||||
|
print("\n❌ Error: ANIME_DIRECTORY not configured")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
print(f"\nAnime Directory: {settings.anime_directory}")
|
||||||
|
print(f"Auto-create NFO: {settings.nfo_auto_create}")
|
||||||
|
print(f"Update on scan: {settings.nfo_update_on_scan}")
|
||||||
|
print(f"Download poster: {settings.nfo_download_poster}")
|
||||||
|
print(f"Download logo: {settings.nfo_download_logo}")
|
||||||
|
print(f"Download fanart: {settings.nfo_download_fanart}")
|
||||||
|
|
||||||
|
if not settings.nfo_auto_create:
|
||||||
|
print("\n⚠️ Warning: NFO_AUTO_CREATE is set to False")
|
||||||
|
print(" Enable it in .env to auto-create NFO files")
|
||||||
|
print("\n Continuing anyway to demonstrate functionality...")
|
||||||
|
# Override for demonstration
|
||||||
|
settings.nfo_auto_create = True
|
||||||
|
|
||||||
|
print("\nInitializing series manager...")
|
||||||
|
manager = SeriesManagerService.from_settings()
|
||||||
|
|
||||||
|
# Get series list first
|
||||||
|
serie_list = manager.get_serie_list()
|
||||||
|
all_series = serie_list.get_all()
|
||||||
|
|
||||||
|
print(f"Found {len(all_series)} series in directory")
|
||||||
|
|
||||||
|
if not all_series:
|
||||||
|
print("\n⚠️ No series found. Add some anime series first.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
# Show series without NFO
|
||||||
|
series_without_nfo = []
|
||||||
|
for serie in all_series:
|
||||||
|
if not serie.has_nfo():
|
||||||
|
series_without_nfo.append(serie)
|
||||||
|
|
||||||
|
if series_without_nfo:
|
||||||
|
print(f"\nSeries without NFO: {len(series_without_nfo)}")
|
||||||
|
for serie in series_without_nfo[:5]: # Show first 5
|
||||||
|
print(f" - {serie.name} ({serie.folder})")
|
||||||
|
if len(series_without_nfo) > 5:
|
||||||
|
print(f" ... and {len(series_without_nfo) - 5} more")
|
||||||
|
else:
|
||||||
|
print("\n✅ All series already have NFO files!")
|
||||||
|
|
||||||
|
if not settings.nfo_update_on_scan:
|
||||||
|
print("\nNothing to do. Enable NFO_UPDATE_ON_SCAN to update existing NFOs.")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print("\nProcessing NFO files...")
|
||||||
|
print("(This may take a while depending on the number of series)")
|
||||||
|
|
||||||
|
try:
|
||||||
|
await manager.scan_and_process_nfo()
|
||||||
|
print("\n✅ NFO processing complete!")
|
||||||
|
|
||||||
|
# Show updated stats
|
||||||
|
serie_list.load_series() # Reload to get updated stats
|
||||||
|
all_series = serie_list.get_all()
|
||||||
|
series_with_nfo = [s for s in all_series if s.has_nfo()]
|
||||||
|
series_with_poster = [s for s in all_series if s.has_poster()]
|
||||||
|
series_with_logo = [s for s in all_series if s.has_logo()]
|
||||||
|
series_with_fanart = [s for s in all_series if s.has_fanart()]
|
||||||
|
|
||||||
|
print("\nFinal Statistics:")
|
||||||
|
print(f" Series with NFO: {len(series_with_nfo)}/{len(all_series)}")
|
||||||
|
print(f" Series with poster: {len(series_with_poster)}/{len(all_series)}")
|
||||||
|
print(f" Series with logo: {len(series_with_logo)}/{len(all_series)}")
|
||||||
|
print(f" Series with fanart: {len(series_with_fanart)}/{len(all_series)}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n❌ Error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
return 1
|
||||||
|
finally:
|
||||||
|
await manager.close()
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
async def check_nfo_status():
|
||||||
|
"""Check NFO status for all series."""
|
||||||
|
print("=" * 70)
|
||||||
|
print("NFO Status Check")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
if not settings.anime_directory:
|
||||||
|
print("\n❌ Error: ANIME_DIRECTORY not configured")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
print(f"\nAnime Directory: {settings.anime_directory}")
|
||||||
|
|
||||||
|
# Create series list (no NFO service needed for status check)
|
||||||
|
from src.core.entities.SerieList import SerieList
|
||||||
|
serie_list = SerieList(settings.anime_directory)
|
||||||
|
all_series = serie_list.get_all()
|
||||||
|
|
||||||
|
if not all_series:
|
||||||
|
print("\n⚠️ No series found")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print(f"\nTotal series: {len(all_series)}")
|
||||||
|
|
||||||
|
# Categorize series
|
||||||
|
with_nfo = []
|
||||||
|
without_nfo = []
|
||||||
|
|
||||||
|
for serie in all_series:
|
||||||
|
if serie.has_nfo():
|
||||||
|
with_nfo.append(serie)
|
||||||
|
else:
|
||||||
|
without_nfo.append(serie)
|
||||||
|
|
||||||
|
print(f"\nWith NFO: {len(with_nfo)} ({len(with_nfo) * 100 // len(all_series)}%)")
|
||||||
|
print(f"Without NFO: {len(without_nfo)} ({len(without_nfo) * 100 // len(all_series)}%)")
|
||||||
|
|
||||||
|
if without_nfo:
|
||||||
|
print("\nSeries missing NFO:")
|
||||||
|
for serie in without_nfo[:10]:
|
||||||
|
print(f" ❌ {serie.name} ({serie.folder})")
|
||||||
|
if len(without_nfo) > 10:
|
||||||
|
print(f" ... and {len(without_nfo) - 10} more")
|
||||||
|
|
||||||
|
# Media file statistics
|
||||||
|
with_poster = sum(1 for s in all_series if s.has_poster())
|
||||||
|
with_logo = sum(1 for s in all_series if s.has_logo())
|
||||||
|
with_fanart = sum(1 for s in all_series if s.has_fanart())
|
||||||
|
|
||||||
|
print("\nMedia Files:")
|
||||||
|
print(f" Posters: {with_poster}/{len(all_series)} ({with_poster * 100 // len(all_series)}%)")
|
||||||
|
print(f" Logos: {with_logo}/{len(all_series)} ({with_logo * 100 // len(all_series)}%)")
|
||||||
|
print(f" Fanart: {with_fanart}/{len(all_series)} ({with_fanart * 100 // len(all_series)}%)")
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
async def update_nfo_files():
|
||||||
|
"""Update existing NFO files with fresh data from TMDB."""
|
||||||
|
print("=" * 70)
|
||||||
|
print("NFO Update Tool")
|
||||||
|
print("=" * 70)
|
||||||
|
|
||||||
|
if not settings.tmdb_api_key:
|
||||||
|
print("\n❌ Error: TMDB_API_KEY not configured")
|
||||||
|
print(" Set TMDB_API_KEY in .env file or environment")
|
||||||
|
print(" Get API key from: https://www.themoviedb.org/settings/api")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
if not settings.anime_directory:
|
||||||
|
print("\n❌ Error: ANIME_DIRECTORY not configured")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
print(f"\nAnime Directory: {settings.anime_directory}")
|
||||||
|
print(f"Download media: {settings.nfo_download_poster or settings.nfo_download_logo or settings.nfo_download_fanart}")
|
||||||
|
|
||||||
|
# Get series with NFO
|
||||||
|
from src.core.entities.SerieList import SerieList
|
||||||
|
serie_list = SerieList(settings.anime_directory)
|
||||||
|
all_series = serie_list.get_all()
|
||||||
|
series_with_nfo = [s for s in all_series if s.has_nfo()]
|
||||||
|
|
||||||
|
if not series_with_nfo:
|
||||||
|
print("\n⚠️ No series with NFO files found")
|
||||||
|
print(" Run 'scan' command first to create NFO files")
|
||||||
|
return 0
|
||||||
|
|
||||||
|
print(f"\nFound {len(series_with_nfo)} series with NFO files")
|
||||||
|
print("Updating NFO files with fresh data from TMDB...")
|
||||||
|
print("(This may take a while)")
|
||||||
|
|
||||||
|
# Initialize NFO service using factory
|
||||||
|
from src.core.services.nfo_factory import create_nfo_service
|
||||||
|
try:
|
||||||
|
nfo_service = create_nfo_service()
|
||||||
|
except ValueError as e:
|
||||||
|
print(f"\nError: {e}")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
success_count = 0
|
||||||
|
error_count = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
for i, serie in enumerate(series_with_nfo, 1):
|
||||||
|
print(f"\n[{i}/{len(series_with_nfo)}] Updating: {serie.name}")
|
||||||
|
|
||||||
|
try:
|
||||||
|
await nfo_service.update_tvshow_nfo(
|
||||||
|
serie_folder=serie.folder,
|
||||||
|
download_media=(
|
||||||
|
settings.nfo_download_poster or
|
||||||
|
settings.nfo_download_logo or
|
||||||
|
settings.nfo_download_fanart
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(f" ✅ Updated successfully")
|
||||||
|
success_count += 1
|
||||||
|
|
||||||
|
# Small delay to respect API rate limits
|
||||||
|
await asyncio.sleep(0.5)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f" ❌ Error: {e}")
|
||||||
|
error_count += 1
|
||||||
|
|
||||||
|
print("\n" + "=" * 70)
|
||||||
|
print(f"✅ Update complete!")
|
||||||
|
print(f" Success: {success_count}")
|
||||||
|
print(f" Errors: {error_count}")
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
print(f"\n❌ Fatal error: {e}")
|
||||||
|
import traceback
|
||||||
|
traceback.print_exc()
|
||||||
|
return 1
|
||||||
|
finally:
|
||||||
|
await nfo_service.close()
|
||||||
|
|
||||||
|
return 0
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
"""Main CLI entry point."""
|
||||||
|
if len(sys.argv) < 2:
|
||||||
|
print("NFO Management Tool")
|
||||||
|
print("\nUsage:")
|
||||||
|
print(" python -m src.cli.nfo_cli scan # Scan and create missing NFO files")
|
||||||
|
print(" python -m src.cli.nfo_cli status # Check NFO status for all series")
|
||||||
|
print(" python -m src.cli.nfo_cli update # Update existing NFO files with fresh data")
|
||||||
|
print("\nConfiguration:")
|
||||||
|
print(" Set TMDB_API_KEY in .env file")
|
||||||
|
print(" Set NFO_AUTO_CREATE=true to enable auto-creation")
|
||||||
|
print(" Set NFO_UPDATE_ON_SCAN=true to update existing NFOs during scan")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
command = sys.argv[1].lower()
|
||||||
|
|
||||||
|
if command == "scan":
|
||||||
|
return asyncio.run(scan_and_create_nfo())
|
||||||
|
elif command == "status":
|
||||||
|
return asyncio.run(check_nfo_status())
|
||||||
|
elif command == "update":
|
||||||
|
return asyncio.run(update_nfo_files())
|
||||||
|
else:
|
||||||
|
print(f"Unknown command: {command}")
|
||||||
|
print("Use 'scan', 'status', or 'update'")
|
||||||
|
return 1
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
sys.exit(main())
|
||||||
138
src/config/settings.py
Normal file
138
src/config/settings.py
Normal file
@@ -0,0 +1,138 @@
|
|||||||
|
import secrets
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from pydantic import Field
|
||||||
|
from pydantic_settings import BaseSettings, SettingsConfigDict
|
||||||
|
|
||||||
|
|
||||||
|
class Settings(BaseSettings):
|
||||||
|
"""Application settings from environment variables."""
|
||||||
|
|
||||||
|
model_config = SettingsConfigDict(env_file=".env", extra="ignore")
|
||||||
|
|
||||||
|
jwt_secret_key: str = Field(
|
||||||
|
default_factory=lambda: secrets.token_urlsafe(32),
|
||||||
|
validation_alias="JWT_SECRET_KEY",
|
||||||
|
)
|
||||||
|
password_salt: str = Field(
|
||||||
|
default="default-salt",
|
||||||
|
validation_alias="PASSWORD_SALT"
|
||||||
|
)
|
||||||
|
master_password_hash: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
validation_alias="MASTER_PASSWORD_HASH"
|
||||||
|
)
|
||||||
|
# ⚠️ WARNING: DEVELOPMENT ONLY - NEVER USE IN PRODUCTION ⚠️
|
||||||
|
# This field allows setting a plaintext master password via environment
|
||||||
|
# variable for development/testing purposes only. In production
|
||||||
|
# deployments, use MASTER_PASSWORD_HASH instead and NEVER set this field.
|
||||||
|
master_password: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
validation_alias="MASTER_PASSWORD",
|
||||||
|
description=(
|
||||||
|
"**DEVELOPMENT ONLY** - Plaintext master password. "
|
||||||
|
"NEVER enable in production. Use MASTER_PASSWORD_HASH instead."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
token_expiry_hours: int = Field(
|
||||||
|
default=24,
|
||||||
|
validation_alias="SESSION_TIMEOUT_HOURS"
|
||||||
|
)
|
||||||
|
anime_directory: str = Field(
|
||||||
|
default="",
|
||||||
|
validation_alias="ANIME_DIRECTORY"
|
||||||
|
)
|
||||||
|
log_level: str = Field(
|
||||||
|
default="INFO",
|
||||||
|
validation_alias="LOG_LEVEL"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Additional settings from .env
|
||||||
|
database_url: str = Field(
|
||||||
|
default="sqlite:///./data/aniworld.db",
|
||||||
|
validation_alias="DATABASE_URL"
|
||||||
|
)
|
||||||
|
cors_origins: str = Field(
|
||||||
|
default="http://localhost:3000",
|
||||||
|
validation_alias="CORS_ORIGINS",
|
||||||
|
)
|
||||||
|
api_rate_limit: int = Field(
|
||||||
|
default=100,
|
||||||
|
validation_alias="API_RATE_LIMIT"
|
||||||
|
)
|
||||||
|
default_provider: str = Field(
|
||||||
|
default="aniworld.to",
|
||||||
|
validation_alias="DEFAULT_PROVIDER"
|
||||||
|
)
|
||||||
|
provider_timeout: int = Field(
|
||||||
|
default=30,
|
||||||
|
validation_alias="PROVIDER_TIMEOUT"
|
||||||
|
)
|
||||||
|
retry_attempts: int = Field(
|
||||||
|
default=3,
|
||||||
|
validation_alias="RETRY_ATTEMPTS"
|
||||||
|
)
|
||||||
|
|
||||||
|
# NFO / TMDB Settings
|
||||||
|
tmdb_api_key: Optional[str] = Field(
|
||||||
|
default=None,
|
||||||
|
validation_alias="TMDB_API_KEY",
|
||||||
|
description="TMDB API key for scraping TV show metadata"
|
||||||
|
)
|
||||||
|
nfo_auto_create: bool = Field(
|
||||||
|
default=False,
|
||||||
|
validation_alias="NFO_AUTO_CREATE",
|
||||||
|
description="Automatically create NFO files when scanning series"
|
||||||
|
)
|
||||||
|
nfo_update_on_scan: bool = Field(
|
||||||
|
default=False,
|
||||||
|
validation_alias="NFO_UPDATE_ON_SCAN",
|
||||||
|
description="Update existing NFO files when scanning series"
|
||||||
|
)
|
||||||
|
nfo_download_poster: bool = Field(
|
||||||
|
default=True,
|
||||||
|
validation_alias="NFO_DOWNLOAD_POSTER",
|
||||||
|
description="Download poster.jpg when creating NFO"
|
||||||
|
)
|
||||||
|
nfo_download_logo: bool = Field(
|
||||||
|
default=True,
|
||||||
|
validation_alias="NFO_DOWNLOAD_LOGO",
|
||||||
|
description="Download logo.png when creating NFO"
|
||||||
|
)
|
||||||
|
nfo_download_fanart: bool = Field(
|
||||||
|
default=True,
|
||||||
|
validation_alias="NFO_DOWNLOAD_FANART",
|
||||||
|
description="Download fanart.jpg when creating NFO"
|
||||||
|
)
|
||||||
|
nfo_image_size: str = Field(
|
||||||
|
default="original",
|
||||||
|
validation_alias="NFO_IMAGE_SIZE",
|
||||||
|
description="Image size to download (original, w500, etc.)"
|
||||||
|
)
|
||||||
|
nfo_prefer_fsk_rating: bool = Field(
|
||||||
|
default=True,
|
||||||
|
validation_alias="NFO_PREFER_FSK_RATING",
|
||||||
|
description="Prefer German FSK rating over MPAA rating in NFO files"
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def allowed_origins(self) -> list[str]:
|
||||||
|
"""Return the list of allowed CORS origins.
|
||||||
|
|
||||||
|
The environment variable should contain a comma-separated list.
|
||||||
|
When ``*`` is provided we fall back to a safe local development
|
||||||
|
default instead of allowing every origin in production.
|
||||||
|
"""
|
||||||
|
|
||||||
|
raw = (self.cors_origins or "").strip()
|
||||||
|
if not raw:
|
||||||
|
return []
|
||||||
|
if raw == "*":
|
||||||
|
return [
|
||||||
|
"http://localhost:3000",
|
||||||
|
"http://localhost:8000",
|
||||||
|
]
|
||||||
|
return [origin.strip() for origin in raw.split(",") if origin.strip()]
|
||||||
|
|
||||||
|
|
||||||
|
settings = Settings()
|
||||||
@@ -1,63 +1,432 @@
|
|||||||
|
"""
|
||||||
|
SerieScanner - Scans directories for anime series and missing episodes.
|
||||||
|
|
||||||
|
This module provides functionality to scan anime directories, identify
|
||||||
|
missing episodes, and report progress through callback interfaces.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This module is pure domain logic. Database operations are handled
|
||||||
|
by the service layer (AnimeService).
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import logging
|
|
||||||
from server.core.entities.series import Serie
|
|
||||||
import traceback
|
import traceback
|
||||||
from server.infrastructure.logging.GlobalLogger import error_logger, noKeyFound_logger
|
import uuid
|
||||||
from server.core.exceptions.Exceptions import NoKeyFoundException, MatchNotFoundError
|
from typing import Iterable, Iterator, Optional
|
||||||
from server.infrastructure.providers.base_provider import Loader
|
|
||||||
|
from events import Events
|
||||||
|
|
||||||
|
from src.core.entities.series import Serie
|
||||||
|
from src.core.exceptions.Exceptions import MatchNotFoundError, NoKeyFoundException
|
||||||
|
from src.core.providers.base_provider import Loader
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
error_logger = logging.getLogger("error")
|
||||||
|
no_key_found_logger = logging.getLogger("series.nokey")
|
||||||
|
|
||||||
|
|
||||||
class SerieScanner:
|
class SerieScanner:
|
||||||
def __init__(self, basePath: str, loader: Loader):
|
"""
|
||||||
self.directory = basePath
|
Scans directories for anime series and identifies missing episodes.
|
||||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
|
||||||
self.loader = loader
|
|
||||||
logging.info(f"Initialized Loader with base path: {self.directory}")
|
|
||||||
|
|
||||||
def Reinit(self):
|
Supports progress callbacks for real-time scanning updates.
|
||||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This class is pure domain logic. Database operations are handled
|
||||||
|
by the service layer (AnimeService). Scan results are stored
|
||||||
|
in keyDict and can be retrieved after scanning.
|
||||||
|
|
||||||
def is_null_or_whitespace(self, s):
|
Example:
|
||||||
return s is None or s.strip() == ""
|
scanner = SerieScanner("/path/to/anime", loader)
|
||||||
|
scanner.scan()
|
||||||
|
# Results are in scanner.keyDict
|
||||||
|
"""
|
||||||
|
|
||||||
def GetTotalToScan(self):
|
def __init__(
|
||||||
|
self,
|
||||||
|
basePath: str,
|
||||||
|
loader: Loader,
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Initialize the SerieScanner.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
basePath: Base directory containing anime series
|
||||||
|
loader: Loader instance for fetching series information
|
||||||
|
callback_manager: Optional callback manager for progress updates
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If basePath is invalid or doesn't exist
|
||||||
|
"""
|
||||||
|
# Validate basePath to prevent directory traversal attacks
|
||||||
|
if not basePath or not basePath.strip():
|
||||||
|
raise ValueError("Base path cannot be empty")
|
||||||
|
|
||||||
|
# Resolve to absolute path and validate it exists
|
||||||
|
abs_path = os.path.abspath(basePath)
|
||||||
|
if not os.path.exists(abs_path):
|
||||||
|
raise ValueError(f"Base path does not exist: {abs_path}")
|
||||||
|
if not os.path.isdir(abs_path):
|
||||||
|
raise ValueError(f"Base path is not a directory: {abs_path}")
|
||||||
|
|
||||||
|
self.directory: str = abs_path
|
||||||
|
self.keyDict: dict[str, Serie] = {}
|
||||||
|
self.loader: Loader = loader
|
||||||
|
self._current_operation_id: Optional[str] = None
|
||||||
|
self.events = Events()
|
||||||
|
|
||||||
|
self.events.on_progress = []
|
||||||
|
self.events.on_error = []
|
||||||
|
self.events.on_completion = []
|
||||||
|
|
||||||
|
logger.info("Initialized SerieScanner with base path: %s", abs_path)
|
||||||
|
|
||||||
|
def _safe_call_event(self, event_handler, data: dict) -> None:
|
||||||
|
"""Safely call an event handler if it exists.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
event_handler: Event handler attribute (e.g., self.events.on_progress)
|
||||||
|
data: Data dictionary to pass to the event handler
|
||||||
|
"""
|
||||||
|
if event_handler:
|
||||||
|
try:
|
||||||
|
# Event handlers are stored as lists, iterate over them
|
||||||
|
for handler in event_handler:
|
||||||
|
handler(data)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Error calling event handler: %s", e, exc_info=True)
|
||||||
|
|
||||||
|
def subscribe_on_progress(self, handler):
|
||||||
|
"""
|
||||||
|
Subscribe a handler to an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to handle the event
|
||||||
|
"""
|
||||||
|
if handler not in self.events.on_progress:
|
||||||
|
self.events.on_progress.append(handler)
|
||||||
|
|
||||||
|
def unsubscribe_on_progress(self, handler):
|
||||||
|
"""
|
||||||
|
Unsubscribe a handler from an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to remove
|
||||||
|
"""
|
||||||
|
if handler in self.events.on_progress:
|
||||||
|
self.events.on_progress.remove(handler)
|
||||||
|
|
||||||
|
def _extract_year_from_folder_name(self, folder_name: str) -> int | None:
|
||||||
|
"""Extract year from folder name if present.
|
||||||
|
|
||||||
|
Looks for year in format "(YYYY)" at the end of folder name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
folder_name: The folder name to check
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int or None: Year if found, None otherwise
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> _extract_year_from_folder_name("Dororo (2025)")
|
||||||
|
2025
|
||||||
|
>>> _extract_year_from_folder_name("Dororo")
|
||||||
|
None
|
||||||
|
"""
|
||||||
|
if not folder_name:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Look for year in format (YYYY) - typically at end of name
|
||||||
|
match = re.search(r'\((\d{4})\)', folder_name)
|
||||||
|
if match:
|
||||||
|
try:
|
||||||
|
year = int(match.group(1))
|
||||||
|
# Validate year is reasonable (between 1900 and 2100)
|
||||||
|
if 1900 <= year <= 2100:
|
||||||
|
logger.debug(
|
||||||
|
"Extracted year from folder name: %s -> %d",
|
||||||
|
folder_name,
|
||||||
|
year
|
||||||
|
)
|
||||||
|
return year
|
||||||
|
except ValueError:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
def subscribe_on_error(self, handler):
|
||||||
|
"""
|
||||||
|
Subscribe a handler to an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to handle the event
|
||||||
|
"""
|
||||||
|
if handler not in self.events.on_error:
|
||||||
|
self.events.on_error.append(handler)
|
||||||
|
|
||||||
|
def unsubscribe_on_error(self, handler):
|
||||||
|
"""
|
||||||
|
Unsubscribe a handler from an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to remove
|
||||||
|
"""
|
||||||
|
if handler in self.events.on_error:
|
||||||
|
self.events.on_error.remove(handler)
|
||||||
|
|
||||||
|
def subscribe_on_completion(self, handler):
|
||||||
|
"""
|
||||||
|
Subscribe a handler to an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to handle the event
|
||||||
|
"""
|
||||||
|
if handler not in self.events.on_completion:
|
||||||
|
self.events.on_completion.append(handler)
|
||||||
|
|
||||||
|
def unsubscribe_on_completion(self, handler):
|
||||||
|
"""
|
||||||
|
Unsubscribe a handler from an event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to remove
|
||||||
|
"""
|
||||||
|
if handler in self.events.on_completion:
|
||||||
|
self.events.on_completion.remove(handler)
|
||||||
|
|
||||||
|
def reinit(self) -> None:
|
||||||
|
"""Reinitialize the series dictionary (keyed by serie.key)."""
|
||||||
|
self.keyDict: dict[str, Serie] = {}
|
||||||
|
|
||||||
|
def get_total_to_scan(self) -> int:
|
||||||
|
"""Get the total number of folders to scan.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Total count of folders with MP4 files
|
||||||
|
"""
|
||||||
result = self.__find_mp4_files()
|
result = self.__find_mp4_files()
|
||||||
return sum(1 for _ in result)
|
return sum(1 for _ in result)
|
||||||
|
|
||||||
def Scan(self, callback):
|
def scan(self) -> None:
|
||||||
logging.info("Starting process to load missing episodes")
|
"""
|
||||||
|
Scan directories for anime series and missing episodes.
|
||||||
|
|
||||||
|
Results are stored in self.keyDict and can be retrieved after
|
||||||
|
scanning. Data files are also saved to disk for persistence.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Exception: If scan fails critically
|
||||||
|
"""
|
||||||
|
# Generate unique operation ID
|
||||||
|
self._current_operation_id = str(uuid.uuid4())
|
||||||
|
|
||||||
|
logger.info("Starting scan for missing episodes")
|
||||||
|
|
||||||
|
# Notify scan starting
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_progress,
|
||||||
|
{
|
||||||
|
"operation_id": self._current_operation_id,
|
||||||
|
"phase": "STARTING",
|
||||||
|
"current": 0,
|
||||||
|
"total": 0,
|
||||||
|
"percentage": 0.0,
|
||||||
|
"message": "Initializing scan"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get total items to process
|
||||||
|
total_to_scan = self.get_total_to_scan()
|
||||||
|
logger.info("Total folders to scan: %d", total_to_scan)
|
||||||
|
|
||||||
|
# The scanner enumerates folders with mp4 files, loads existing
|
||||||
|
# metadata, calculates the missing episodes via the provider, and
|
||||||
|
# persists the refreshed metadata while emitting progress events.
|
||||||
result = self.__find_mp4_files()
|
result = self.__find_mp4_files()
|
||||||
counter = 0
|
counter = 0
|
||||||
|
|
||||||
for folder, mp4_files in result:
|
for folder, mp4_files in result:
|
||||||
try:
|
try:
|
||||||
counter += 1
|
counter += 1
|
||||||
callback(folder, counter)
|
|
||||||
serie = self.__ReadDataFromFile(folder)
|
# Calculate progress
|
||||||
if (serie != None and not self.is_null_or_whitespace(serie.key)):
|
if total_to_scan > 0:
|
||||||
missings, site = self.__GetMissingEpisodesAndSeason(serie.key, mp4_files)
|
percentage = (counter / total_to_scan) * 100
|
||||||
serie.episodeDict = missings
|
else:
|
||||||
serie.folder = folder
|
percentage = 0.0
|
||||||
serie.save_to_file(os.path.join(os.path.join(self.directory, folder), 'data'))
|
|
||||||
if (serie.key in self.folderDict):
|
# Notify progress
|
||||||
logging.ERROR(f"dublication found: {serie.key}");
|
self._safe_call_event(
|
||||||
pass
|
self.events.on_progress,
|
||||||
self.folderDict[serie.key] = serie
|
{
|
||||||
noKeyFound_logger.info(f"Saved Serie: '{str(serie)}'")
|
"operation_id": self._current_operation_id,
|
||||||
except NoKeyFoundException as nkfe:
|
"phase": "IN_PROGRESS",
|
||||||
NoKeyFoundException.error(f"Error processing folder '{folder}': {nkfe}")
|
"current": counter,
|
||||||
|
"total": total_to_scan,
|
||||||
|
"percentage": percentage,
|
||||||
|
"message": f"Scanning: {folder}",
|
||||||
|
"details": f"Found {len(mp4_files)} episodes"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
serie = self.__read_data_from_file(folder)
|
||||||
|
if (
|
||||||
|
serie is not None
|
||||||
|
and serie.key
|
||||||
|
and serie.key.strip()
|
||||||
|
):
|
||||||
|
# Try to extract year from folder name first
|
||||||
|
if not hasattr(serie, 'year') or not serie.year:
|
||||||
|
year_from_folder = self._extract_year_from_folder_name(folder)
|
||||||
|
if year_from_folder:
|
||||||
|
serie.year = year_from_folder
|
||||||
|
logger.info(
|
||||||
|
"Using year from folder name: %s (year=%d)",
|
||||||
|
folder,
|
||||||
|
year_from_folder
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# If not in folder name, fetch from provider
|
||||||
|
try:
|
||||||
|
serie.year = self.loader.get_year(serie.key)
|
||||||
|
if serie.year:
|
||||||
|
logger.info(
|
||||||
|
"Fetched year from provider: %s (year=%d)",
|
||||||
|
serie.key,
|
||||||
|
serie.year
|
||||||
|
)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
error_logger.error(f"Folder: '{folder}' - Unexpected error processing folder '{folder}': {e} \n {traceback.format_exc()}")
|
logger.warning(
|
||||||
|
"Could not fetch year for %s: %s",
|
||||||
|
serie.key,
|
||||||
|
str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Delegate the provider to compare local files with
|
||||||
|
# remote metadata, yielding missing episodes per
|
||||||
|
# season. Results are saved back to disk so that both
|
||||||
|
# CLI and API consumers see consistent state.
|
||||||
|
missing_episodes, _site = (
|
||||||
|
self.__get_missing_episodes_and_season(
|
||||||
|
serie.key, mp4_files
|
||||||
|
)
|
||||||
|
)
|
||||||
|
serie.episodeDict = missing_episodes
|
||||||
|
serie.folder = folder
|
||||||
|
data_path = os.path.join(
|
||||||
|
self.directory, folder, 'data'
|
||||||
|
)
|
||||||
|
serie.save_to_file(data_path)
|
||||||
|
|
||||||
|
# Store by key (primary identifier), not folder
|
||||||
|
if serie.key in self.keyDict:
|
||||||
|
logger.error(
|
||||||
|
"Duplicate series found with key '%s' "
|
||||||
|
"(folder: '%s')",
|
||||||
|
serie.key,
|
||||||
|
folder
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
self.keyDict[serie.key] = serie
|
||||||
|
logger.debug(
|
||||||
|
"Stored series with key '%s' (folder: '%s')",
|
||||||
|
serie.key,
|
||||||
|
folder
|
||||||
|
)
|
||||||
|
no_key_found_logger.info(
|
||||||
|
"Saved Serie: '%s'", str(serie)
|
||||||
|
)
|
||||||
|
|
||||||
|
except NoKeyFoundException as nkfe:
|
||||||
|
# Log error and notify via callback
|
||||||
|
error_msg = f"Error processing folder '{folder}': {nkfe}"
|
||||||
|
logger.error(error_msg)
|
||||||
|
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_error,
|
||||||
|
{
|
||||||
|
"operation_id": self._current_operation_id,
|
||||||
|
"error": nkfe,
|
||||||
|
"message": error_msg,
|
||||||
|
"recoverable": True,
|
||||||
|
"metadata": {"folder": folder, "key": None}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
# Log error and notify via callback
|
||||||
|
error_msg = (
|
||||||
|
f"Folder: '{folder}' - "
|
||||||
|
f"Unexpected error: {e}"
|
||||||
|
)
|
||||||
|
error_logger.error(
|
||||||
|
"%s\n%s",
|
||||||
|
error_msg,
|
||||||
|
traceback.format_exc()
|
||||||
|
)
|
||||||
|
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_error,
|
||||||
|
{
|
||||||
|
"operation_id": self._current_operation_id,
|
||||||
|
"error": e,
|
||||||
|
"message": error_msg,
|
||||||
|
"recoverable": True,
|
||||||
|
"metadata": {"folder": folder, "key": None}
|
||||||
|
}
|
||||||
|
)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# Notify scan completion
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_completion,
|
||||||
|
{
|
||||||
|
"operation_id": self._current_operation_id,
|
||||||
|
"success": True,
|
||||||
|
"message": f"Scan completed. Processed {counter} folders.",
|
||||||
|
"statistics": {
|
||||||
|
"total_folders": counter,
|
||||||
|
"series_found": len(self.keyDict)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
def __find_mp4_files(self):
|
logger.info(
|
||||||
logging.info("Scanning for .mp4 files")
|
"Scan completed. Processed %d folders, found %d series",
|
||||||
|
counter,
|
||||||
|
len(self.keyDict)
|
||||||
|
)
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
# Critical error - notify and re-raise
|
||||||
|
error_msg = f"Critical scan error: {e}"
|
||||||
|
logger.error("%s\n%s", error_msg, traceback.format_exc())
|
||||||
|
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_error,
|
||||||
|
{
|
||||||
|
"operation_id": self._current_operation_id,
|
||||||
|
"error": e,
|
||||||
|
"message": error_msg,
|
||||||
|
"recoverable": False
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_completion,
|
||||||
|
{
|
||||||
|
"operation_id": self._current_operation_id,
|
||||||
|
"success": False,
|
||||||
|
"message": error_msg
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
raise
|
||||||
|
|
||||||
|
def __find_mp4_files(self) -> Iterator[tuple[str, list[str]]]:
|
||||||
|
"""Find all .mp4 files in the directory structure."""
|
||||||
|
logger.info("Scanning for .mp4 files")
|
||||||
for anime_name in os.listdir(self.directory):
|
for anime_name in os.listdir(self.directory):
|
||||||
anime_path = os.path.join(self.directory, anime_name)
|
anime_path = os.path.join(self.directory, anime_name)
|
||||||
if os.path.isdir(anime_path):
|
if os.path.isdir(anime_path):
|
||||||
mp4_files = []
|
mp4_files: list[str] = []
|
||||||
has_files = False
|
has_files = False
|
||||||
for root, _, files in os.walk(anime_path):
|
for root, _, files in os.walk(anime_path):
|
||||||
for file in files:
|
for file in files:
|
||||||
@@ -66,48 +435,95 @@ class SerieScanner:
|
|||||||
has_files = True
|
has_files = True
|
||||||
yield anime_name, mp4_files if has_files else []
|
yield anime_name, mp4_files if has_files else []
|
||||||
|
|
||||||
def __remove_year(self, input_string: str):
|
def __read_data_from_file(self, folder_name: str) -> Optional[Serie]:
|
||||||
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
|
"""Read serie data from file or key file.
|
||||||
logging.debug(f"Removed year from '{input_string}' -> '{cleaned_string}'")
|
|
||||||
return cleaned_string
|
|
||||||
|
|
||||||
def __ReadDataFromFile(self, folder_name: str):
|
Args:
|
||||||
|
folder_name: Filesystem folder name
|
||||||
|
(used only to locate data files)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Serie object with valid key if found, None otherwise
|
||||||
|
|
||||||
|
Note:
|
||||||
|
The returned Serie will have its 'key' as the primary identifier.
|
||||||
|
The 'folder' field is metadata only.
|
||||||
|
"""
|
||||||
folder_path = os.path.join(self.directory, folder_name)
|
folder_path = os.path.join(self.directory, folder_name)
|
||||||
key = None
|
key = None
|
||||||
key_file = os.path.join(folder_path, 'key')
|
key_file = os.path.join(folder_path, 'key')
|
||||||
serie_file = os.path.join(folder_path, 'data')
|
serie_file = os.path.join(folder_path, 'data')
|
||||||
|
|
||||||
if os.path.exists(key_file):
|
if os.path.exists(key_file):
|
||||||
with open(key_file, 'r') as file:
|
with open(key_file, 'r', encoding='utf-8') as file:
|
||||||
key = file.read().strip()
|
key = file.read().strip()
|
||||||
logging.info(f"Key found for folder '{folder_name}': {key}")
|
logger.info(
|
||||||
|
"Key found for folder '%s': %s",
|
||||||
|
folder_name,
|
||||||
|
key
|
||||||
|
)
|
||||||
return Serie(key, "", "aniworld.to", folder_name, dict())
|
return Serie(key, "", "aniworld.to", folder_name, dict())
|
||||||
|
|
||||||
if os.path.exists(serie_file):
|
if os.path.exists(serie_file):
|
||||||
with open(serie_file, "rb") as file:
|
with open(serie_file, "rb") as file:
|
||||||
logging.info(f"load serie_file from '{folder_name}': {serie_file}")
|
logger.info(
|
||||||
|
"load serie_file from '%s': %s",
|
||||||
|
folder_name,
|
||||||
|
serie_file
|
||||||
|
)
|
||||||
return Serie.load_from_file(serie_file)
|
return Serie.load_from_file(serie_file)
|
||||||
|
|
||||||
return None
|
return None
|
||||||
|
|
||||||
|
def __get_episode_and_season(self, filename: str) -> tuple[int, int]:
|
||||||
|
"""Extract season and episode numbers from filename.
|
||||||
|
|
||||||
def __GetEpisodeAndSeason(self, filename: str):
|
Args:
|
||||||
|
filename: Filename to parse
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (season, episode) as integers
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
MatchNotFoundError: If pattern not found
|
||||||
|
"""
|
||||||
pattern = r'S(\d+)E(\d+)'
|
pattern = r'S(\d+)E(\d+)'
|
||||||
match = re.search(pattern, filename)
|
match = re.search(pattern, filename)
|
||||||
if match:
|
if match:
|
||||||
season = match.group(1)
|
season = match.group(1)
|
||||||
episode = match.group(2)
|
episode = match.group(2)
|
||||||
logging.debug(f"Extracted season {season}, episode {episode} from '{filename}'")
|
logger.debug(
|
||||||
|
"Extracted season %s, episode %s from '%s'",
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
filename
|
||||||
|
)
|
||||||
return int(season), int(episode)
|
return int(season), int(episode)
|
||||||
else:
|
else:
|
||||||
logging.error(f"Failed to find season/episode pattern in '{filename}'")
|
logger.error(
|
||||||
raise MatchNotFoundError("Season and episode pattern not found in the filename.")
|
"Failed to find season/episode pattern in '%s'",
|
||||||
|
filename
|
||||||
|
)
|
||||||
|
raise MatchNotFoundError(
|
||||||
|
"Season and episode pattern not found in the filename."
|
||||||
|
)
|
||||||
|
|
||||||
def __GetEpisodesAndSeasons(self, mp4_files: []):
|
def __get_episodes_and_seasons(
|
||||||
episodes_dict = {}
|
self,
|
||||||
|
mp4_files: Iterable[str]
|
||||||
|
) -> dict[int, list[int]]:
|
||||||
|
"""Get episodes grouped by season from mp4 files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
mp4_files: List of MP4 filenames
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping season to list of episode numbers
|
||||||
|
"""
|
||||||
|
episodes_dict: dict[int, list[int]] = {}
|
||||||
|
|
||||||
for file in mp4_files:
|
for file in mp4_files:
|
||||||
season, episode = self.__GetEpisodeAndSeason(file)
|
season, episode = self.__get_episode_and_season(file)
|
||||||
|
|
||||||
if season in episodes_dict:
|
if season in episodes_dict:
|
||||||
episodes_dict[season].append(episode)
|
episodes_dict[season].append(episode)
|
||||||
@@ -115,17 +531,242 @@ class SerieScanner:
|
|||||||
episodes_dict[season] = [episode]
|
episodes_dict[season] = [episode]
|
||||||
return episodes_dict
|
return episodes_dict
|
||||||
|
|
||||||
def __GetMissingEpisodesAndSeason(self, key: str, mp4_files: []):
|
def __get_missing_episodes_and_season(
|
||||||
expected_dict = self.loader.get_season_episode_count(key) # key season , value count of episodes
|
self,
|
||||||
filedict = self.__GetEpisodesAndSeasons(mp4_files)
|
key: str,
|
||||||
episodes_dict = {}
|
mp4_files: Iterable[str]
|
||||||
|
) -> tuple[dict[int, list[int]], str]:
|
||||||
|
"""Get missing episodes for a serie.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Series key
|
||||||
|
mp4_files: List of MP4 filenames
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (episodes_dict, site_name)
|
||||||
|
"""
|
||||||
|
# key season , value count of episodes
|
||||||
|
expected_dict = self.loader.get_season_episode_count(key)
|
||||||
|
filedict = self.__get_episodes_and_seasons(mp4_files)
|
||||||
|
episodes_dict: dict[int, list[int]] = {}
|
||||||
for season, expected_count in expected_dict.items():
|
for season, expected_count in expected_dict.items():
|
||||||
existing_episodes = filedict.get(season, [])
|
existing_episodes = filedict.get(season, [])
|
||||||
missing_episodes = [ep for ep in range(1, expected_count + 1) if ep not in existing_episodes and self.loader.IsLanguage(season, ep, key)]
|
missing_episodes = [
|
||||||
|
ep for ep in range(1, expected_count + 1)
|
||||||
|
if ep not in existing_episodes
|
||||||
|
and self.loader.is_language(season, ep, key)
|
||||||
|
]
|
||||||
|
|
||||||
if missing_episodes:
|
if missing_episodes:
|
||||||
episodes_dict[season] = missing_episodes
|
episodes_dict[season] = missing_episodes
|
||||||
|
|
||||||
return episodes_dict, "aniworld.to"
|
return episodes_dict, "aniworld.to"
|
||||||
|
|
||||||
|
def scan_single_series(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
folder: str,
|
||||||
|
) -> dict[int, list[int]]:
|
||||||
|
"""
|
||||||
|
Scan a single series for missing episodes.
|
||||||
|
|
||||||
|
This method performs a targeted scan for only the specified series,
|
||||||
|
without triggering a full library rescan. It fetches available
|
||||||
|
episodes from the provider and compares with local files.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The unique provider key for the series
|
||||||
|
folder: The filesystem folder name where the series is stored
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
dict[int, list[int]]: Dictionary mapping season numbers to lists
|
||||||
|
of missing episode numbers. Empty dict if no missing episodes.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If key or folder is empty
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> scanner = SerieScanner("/path/to/anime", loader)
|
||||||
|
>>> missing = scanner.scan_single_series(
|
||||||
|
... "attack-on-titan",
|
||||||
|
... "Attack on Titan"
|
||||||
|
... )
|
||||||
|
>>> print(missing)
|
||||||
|
{1: [5, 6, 7], 2: [1, 2]}
|
||||||
|
"""
|
||||||
|
if not key or not key.strip():
|
||||||
|
raise ValueError("Series key cannot be empty")
|
||||||
|
if not folder or not folder.strip():
|
||||||
|
raise ValueError("Series folder cannot be empty")
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Starting targeted scan for series: %s (folder: %s)",
|
||||||
|
key,
|
||||||
|
folder
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate unique operation ID for this targeted scan
|
||||||
|
operation_id = str(uuid.uuid4())
|
||||||
|
# Notify scan starting
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_progress,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"phase": "STARTING",
|
||||||
|
"current": 0,
|
||||||
|
"total": 1,
|
||||||
|
"percentage": 0.0,
|
||||||
|
"message": f"Scanning series: {folder}",
|
||||||
|
"details": f"Key: {key}"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get the folder path
|
||||||
|
folder_path = os.path.join(self.directory, folder)
|
||||||
|
|
||||||
|
# Check if folder exists
|
||||||
|
if not os.path.isdir(folder_path):
|
||||||
|
logger.info(
|
||||||
|
"Series folder does not exist yet: %s - "
|
||||||
|
"will scan for available episodes from provider",
|
||||||
|
folder_path
|
||||||
|
)
|
||||||
|
mp4_files: list[str] = []
|
||||||
|
else:
|
||||||
|
# Find existing MP4 files in the folder
|
||||||
|
mp4_files = []
|
||||||
|
for root, _, files in os.walk(folder_path):
|
||||||
|
for file in files:
|
||||||
|
if file.endswith(".mp4"):
|
||||||
|
mp4_files.append(os.path.join(root, file))
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Found %d existing MP4 files in folder %s",
|
||||||
|
len(mp4_files),
|
||||||
|
folder
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get missing episodes from provider
|
||||||
|
missing_episodes, site = self.__get_missing_episodes_and_season(
|
||||||
|
key, mp4_files
|
||||||
|
)
|
||||||
|
|
||||||
|
# Update progress
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_progress,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"phase": "IN_PROGRESS",
|
||||||
|
"current": 1,
|
||||||
|
"total": 1,
|
||||||
|
"percentage": 100.0,
|
||||||
|
"message": f"Scanned: {folder}",
|
||||||
|
"details": f"Found {sum(len(eps) for eps in missing_episodes.values())} missing episodes"
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create or update Serie in keyDict
|
||||||
|
if key in self.keyDict:
|
||||||
|
# Update existing serie
|
||||||
|
self.keyDict[key].episodeDict = missing_episodes
|
||||||
|
logger.debug(
|
||||||
|
"Updated existing series %s with %d missing episodes",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values())
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# Try to extract year from folder name first
|
||||||
|
year = self._extract_year_from_folder_name(folder)
|
||||||
|
if year:
|
||||||
|
logger.info(
|
||||||
|
"Using year from folder name: %s (year=%d)",
|
||||||
|
folder,
|
||||||
|
year
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
# If not in folder name, fetch from provider
|
||||||
|
try:
|
||||||
|
year = self.loader.get_year(key)
|
||||||
|
if year:
|
||||||
|
logger.info(
|
||||||
|
"Fetched year from provider: %s (year=%d)",
|
||||||
|
key,
|
||||||
|
year
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(
|
||||||
|
"Could not fetch year for %s: %s",
|
||||||
|
key,
|
||||||
|
str(e)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create new serie entry
|
||||||
|
serie = Serie(
|
||||||
|
key=key,
|
||||||
|
name="", # Will be populated by caller if needed
|
||||||
|
site=site,
|
||||||
|
folder=folder,
|
||||||
|
episodeDict=missing_episodes,
|
||||||
|
year=year
|
||||||
|
)
|
||||||
|
self.keyDict[key] = serie
|
||||||
|
logger.debug(
|
||||||
|
"Created new series entry for %s with %d missing episodes (year=%s)",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values()),
|
||||||
|
year
|
||||||
|
)
|
||||||
|
|
||||||
|
# Notify completion
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_completion,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"success": True,
|
||||||
|
"message": f"Scan completed for {folder}",
|
||||||
|
"statistics": {
|
||||||
|
"missing_episodes": sum(
|
||||||
|
len(eps) for eps in missing_episodes.values()
|
||||||
|
),
|
||||||
|
"seasons_with_missing": len(missing_episodes)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Targeted scan completed for %s: %d missing episodes across %d seasons",
|
||||||
|
key,
|
||||||
|
sum(len(eps) for eps in missing_episodes.values()),
|
||||||
|
len(missing_episodes)
|
||||||
|
)
|
||||||
|
|
||||||
|
return missing_episodes
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
error_msg = f"Failed to scan series {key}: {e}"
|
||||||
|
logger.error(error_msg, exc_info=True)
|
||||||
|
|
||||||
|
# Notify error
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_error,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"error": e,
|
||||||
|
"message": error_msg,
|
||||||
|
"recoverable": True,
|
||||||
|
"metadata": {"key": key, "folder": folder}
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# Notify completion with failure
|
||||||
|
self._safe_call_event(
|
||||||
|
self.events.on_completion,
|
||||||
|
{
|
||||||
|
"operation_id": operation_id,
|
||||||
|
"success": False,
|
||||||
|
"message": error_msg
|
||||||
|
}
|
||||||
|
)
|
||||||
|
# Return empty dict on error (scan failed but not critical)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|||||||
835
src/core/SeriesApp.py
Normal file
835
src/core/SeriesApp.py
Normal file
@@ -0,0 +1,835 @@
|
|||||||
|
"""
|
||||||
|
SeriesApp - Core application logic for anime series management.
|
||||||
|
|
||||||
|
This module provides the main application interface for searching,
|
||||||
|
downloading, and managing anime series with support for async callbacks,
|
||||||
|
progress reporting, and error handling.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This module is pure domain logic with no database dependencies.
|
||||||
|
Database operations are handled by the service layer (AnimeService).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
from concurrent.futures import ThreadPoolExecutor
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
from events import Events
|
||||||
|
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.core.entities.SerieList import SerieList
|
||||||
|
from src.core.entities.series import Serie
|
||||||
|
from src.core.providers.provider_factory import Loaders
|
||||||
|
from src.core.SerieScanner import SerieScanner
|
||||||
|
from src.core.services.nfo_service import NFOService
|
||||||
|
from src.core.services.tmdb_client import TMDBAPIError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadStatusEventArgs:
|
||||||
|
"""Event arguments for download status events."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
serie_folder: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
status: str,
|
||||||
|
key: Optional[str] = None,
|
||||||
|
progress: float = 0.0,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
error: Optional[Exception] = None,
|
||||||
|
eta: Optional[int] = None,
|
||||||
|
mbper_sec: Optional[float] = None,
|
||||||
|
item_id: Optional[str] = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize download status event arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_folder: Serie folder name (metadata only, used for
|
||||||
|
file paths)
|
||||||
|
season: Season number
|
||||||
|
episode: Episode number
|
||||||
|
status: Status message (e.g., "started", "progress",
|
||||||
|
"completed", "failed")
|
||||||
|
key: Serie unique identifier (provider key, primary
|
||||||
|
identifier)
|
||||||
|
progress: Download progress (0.0 to 1.0)
|
||||||
|
message: Optional status message
|
||||||
|
error: Optional error if status is "failed"
|
||||||
|
eta: Estimated time remaining in seconds
|
||||||
|
mbper_sec: Download speed in MB/s
|
||||||
|
item_id: Optional download queue item ID for tracking
|
||||||
|
"""
|
||||||
|
self.serie_folder = serie_folder
|
||||||
|
self.key = key
|
||||||
|
self.season = season
|
||||||
|
self.episode = episode
|
||||||
|
self.status = status
|
||||||
|
self.progress = progress
|
||||||
|
self.message = message
|
||||||
|
self.error = error
|
||||||
|
self.eta = eta
|
||||||
|
self.mbper_sec = mbper_sec
|
||||||
|
self.item_id = item_id
|
||||||
|
|
||||||
|
|
||||||
|
class ScanStatusEventArgs:
|
||||||
|
"""Event arguments for scan status events."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
current: int,
|
||||||
|
total: int,
|
||||||
|
folder: str,
|
||||||
|
status: str,
|
||||||
|
key: Optional[str] = None,
|
||||||
|
progress: float = 0.0,
|
||||||
|
message: Optional[str] = None,
|
||||||
|
error: Optional[Exception] = None,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize scan status event arguments.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
current: Current item being scanned
|
||||||
|
total: Total items to scan
|
||||||
|
folder: Current folder being scanned (metadata only)
|
||||||
|
status: Status message (e.g., "started", "progress",
|
||||||
|
"completed", "failed", "cancelled")
|
||||||
|
key: Serie unique identifier if applicable (provider key,
|
||||||
|
primary identifier)
|
||||||
|
progress: Scan progress (0.0 to 1.0)
|
||||||
|
message: Optional status message
|
||||||
|
error: Optional error if status is "failed"
|
||||||
|
"""
|
||||||
|
self.current = current
|
||||||
|
self.total = total
|
||||||
|
self.folder = folder
|
||||||
|
self.key = key
|
||||||
|
self.status = status
|
||||||
|
self.progress = progress
|
||||||
|
self.message = message
|
||||||
|
self.error = error
|
||||||
|
|
||||||
|
|
||||||
|
class SeriesApp:
|
||||||
|
"""
|
||||||
|
Main application class for anime series management.
|
||||||
|
|
||||||
|
Provides functionality for:
|
||||||
|
- Searching anime series
|
||||||
|
- Downloading episodes
|
||||||
|
- Scanning directories for missing episodes
|
||||||
|
- Managing series lists
|
||||||
|
|
||||||
|
Supports async callbacks for progress reporting.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This class is now pure domain logic with no database dependencies.
|
||||||
|
Database operations are handled by the service layer (AnimeService).
|
||||||
|
|
||||||
|
Events:
|
||||||
|
download_status: Raised when download status changes.
|
||||||
|
Handler signature: def handler(args: DownloadStatusEventArgs)
|
||||||
|
scan_status: Raised when scan status changes.
|
||||||
|
Handler signature: def handler(args: ScanStatusEventArgs)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
directory_to_search: str,
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Initialize SeriesApp.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
directory_to_search: Base directory for anime series
|
||||||
|
"""
|
||||||
|
|
||||||
|
self.directory_to_search = directory_to_search
|
||||||
|
|
||||||
|
# Initialize thread pool executor
|
||||||
|
self.executor = ThreadPoolExecutor(max_workers=3)
|
||||||
|
|
||||||
|
# Initialize events
|
||||||
|
self._events = Events()
|
||||||
|
|
||||||
|
self.loaders = Loaders()
|
||||||
|
self.loader = self.loaders.GetLoader(key="aniworld.to")
|
||||||
|
self.serie_scanner = SerieScanner(
|
||||||
|
directory_to_search, self.loader
|
||||||
|
)
|
||||||
|
# Skip automatic loading from data files - series will be loaded
|
||||||
|
# from database by the service layer during application setup
|
||||||
|
self.list = SerieList(self.directory_to_search, skip_load=True)
|
||||||
|
self.series_list: List[Any] = []
|
||||||
|
# Initialize empty list - series loaded later via load_series_from_list()
|
||||||
|
# No need to call _init_list_sync() anymore
|
||||||
|
|
||||||
|
# Initialize NFO service if TMDB API key is configured
|
||||||
|
self.nfo_service: Optional[NFOService] = None
|
||||||
|
if settings.tmdb_api_key:
|
||||||
|
try:
|
||||||
|
from src.core.services.nfo_factory import get_nfo_factory
|
||||||
|
factory = get_nfo_factory()
|
||||||
|
self.nfo_service = factory.create()
|
||||||
|
logger.info("NFO service initialized successfully")
|
||||||
|
except (ValueError, Exception) as e: # pylint: disable=broad-except
|
||||||
|
logger.warning(
|
||||||
|
"Failed to initialize NFO service: %s", str(e)
|
||||||
|
)
|
||||||
|
self.nfo_service = None
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"SeriesApp initialized for directory: %s",
|
||||||
|
directory_to_search
|
||||||
|
)
|
||||||
|
|
||||||
|
@property
|
||||||
|
def download_status(self):
|
||||||
|
"""
|
||||||
|
Event raised when download status changes.
|
||||||
|
|
||||||
|
Subscribe using:
|
||||||
|
app.download_status += handler
|
||||||
|
"""
|
||||||
|
return self._events.download_status
|
||||||
|
|
||||||
|
@download_status.setter
|
||||||
|
def download_status(self, value):
|
||||||
|
"""Set download_status event handler."""
|
||||||
|
self._events.download_status = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def scan_status(self):
|
||||||
|
"""
|
||||||
|
Event raised when scan status changes.
|
||||||
|
|
||||||
|
Subscribe using:
|
||||||
|
app.scan_status += handler
|
||||||
|
"""
|
||||||
|
return self._events.scan_status
|
||||||
|
|
||||||
|
@scan_status.setter
|
||||||
|
def scan_status(self, value):
|
||||||
|
"""Set scan_status event handler."""
|
||||||
|
self._events.scan_status = value
|
||||||
|
|
||||||
|
def load_series_from_list(self, series: list) -> None:
|
||||||
|
"""
|
||||||
|
Load series into the in-memory list.
|
||||||
|
|
||||||
|
This method is called by the service layer after loading
|
||||||
|
series from the database.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
series: List of Serie objects to load
|
||||||
|
"""
|
||||||
|
self.list.keyDict.clear()
|
||||||
|
for serie in series:
|
||||||
|
self.list.keyDict[serie.key] = serie
|
||||||
|
self.series_list = self.list.GetMissingEpisode()
|
||||||
|
logger.debug(
|
||||||
|
"Loaded %d series with %d having missing episodes",
|
||||||
|
len(series),
|
||||||
|
len(self.series_list)
|
||||||
|
)
|
||||||
|
|
||||||
|
async def search(self, words: str) -> List[Dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Search for anime series (async).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
words: Search query
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of search results
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
RuntimeError: If search fails
|
||||||
|
"""
|
||||||
|
logger.info("Searching for: %s", words)
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
results = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.loader.search,
|
||||||
|
words
|
||||||
|
)
|
||||||
|
logger.info("Found %d results", len(results))
|
||||||
|
return results
|
||||||
|
|
||||||
|
async def download(
|
||||||
|
self,
|
||||||
|
serie_folder: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
item_id: Optional[str] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""
|
||||||
|
Download an episode (async).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_folder: Serie folder name (metadata only, used for
|
||||||
|
file path construction)
|
||||||
|
season: Season number
|
||||||
|
episode: Episode number
|
||||||
|
key: Serie unique identifier (provider key, primary
|
||||||
|
identifier for lookups)
|
||||||
|
language: Language preference
|
||||||
|
item_id: Optional download queue item ID for progress
|
||||||
|
tracking
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if download succeeded, False otherwise
|
||||||
|
|
||||||
|
Note:
|
||||||
|
The 'key' parameter is the primary identifier for series
|
||||||
|
lookups. The 'serie_folder' parameter is only used for
|
||||||
|
filesystem operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Starting download: %s (key: %s) S%02dE%02d",
|
||||||
|
serie_folder,
|
||||||
|
key,
|
||||||
|
season,
|
||||||
|
episode
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fire download started event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="started",
|
||||||
|
message="Download started",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create series folder if it doesn't exist
|
||||||
|
folder_path = os.path.join(self.directory_to_search, serie_folder)
|
||||||
|
if not os.path.exists(folder_path):
|
||||||
|
try:
|
||||||
|
os.makedirs(folder_path, exist_ok=True)
|
||||||
|
logger.info(
|
||||||
|
"Created series folder: %s (key: %s)",
|
||||||
|
folder_path,
|
||||||
|
key
|
||||||
|
)
|
||||||
|
except OSError as e:
|
||||||
|
logger.error(
|
||||||
|
"Failed to create series folder %s: %s",
|
||||||
|
folder_path,
|
||||||
|
str(e)
|
||||||
|
)
|
||||||
|
# Fire download failed event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="failed",
|
||||||
|
message=f"Failed to create folder: {str(e)}",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
# Check and create NFO files if needed
|
||||||
|
if self.nfo_service and settings.nfo_auto_create:
|
||||||
|
try:
|
||||||
|
# Check if NFO exists
|
||||||
|
nfo_exists = await self.nfo_service.check_nfo_exists(
|
||||||
|
serie_folder
|
||||||
|
)
|
||||||
|
|
||||||
|
if not nfo_exists:
|
||||||
|
logger.info(
|
||||||
|
"NFO not found for %s, creating metadata...",
|
||||||
|
serie_folder
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fire NFO creation started event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="nfo_creating",
|
||||||
|
message="Creating NFO metadata...",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create NFO and download media files
|
||||||
|
try:
|
||||||
|
# Use folder name as series name
|
||||||
|
await self.nfo_service.create_tvshow_nfo(
|
||||||
|
serie_name=serie_folder,
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
download_poster=settings.nfo_download_poster,
|
||||||
|
download_logo=settings.nfo_download_logo,
|
||||||
|
download_fanart=settings.nfo_download_fanart
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"NFO and media files created for %s",
|
||||||
|
serie_folder
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fire NFO creation completed event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="nfo_completed",
|
||||||
|
message="NFO metadata created",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
except TMDBAPIError as tmdb_error:
|
||||||
|
logger.warning(
|
||||||
|
"Failed to create NFO for %s: %s",
|
||||||
|
serie_folder,
|
||||||
|
str(tmdb_error)
|
||||||
|
)
|
||||||
|
# Fire failed event (but continue with download)
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="nfo_failed",
|
||||||
|
message=(
|
||||||
|
f"NFO creation failed: "
|
||||||
|
f"{str(tmdb_error)}"
|
||||||
|
),
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.debug("NFO already exists for %s", serie_folder)
|
||||||
|
|
||||||
|
except Exception as nfo_error: # pylint: disable=broad-except
|
||||||
|
logger.error(
|
||||||
|
"Error checking/creating NFO for %s: %s",
|
||||||
|
serie_folder,
|
||||||
|
str(nfo_error),
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
# Don't fail the download if NFO creation fails
|
||||||
|
|
||||||
|
try:
|
||||||
|
def download_progress_handler(progress_info):
|
||||||
|
"""Handle download progress events from loader."""
|
||||||
|
logger.debug(
|
||||||
|
"download_progress_handler called with: %s", progress_info
|
||||||
|
)
|
||||||
|
|
||||||
|
downloaded = progress_info.get('downloaded_bytes', 0)
|
||||||
|
total_bytes = (
|
||||||
|
progress_info.get('total_bytes')
|
||||||
|
or progress_info.get('total_bytes_estimate', 0)
|
||||||
|
)
|
||||||
|
|
||||||
|
speed = progress_info.get('speed', 0) # bytes/sec
|
||||||
|
eta = progress_info.get('eta') # seconds
|
||||||
|
mbper_sec = speed / (1024 * 1024) if speed else None
|
||||||
|
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="progress",
|
||||||
|
message="Download progress",
|
||||||
|
progress=(
|
||||||
|
(downloaded / total_bytes) * 100
|
||||||
|
if total_bytes else 0
|
||||||
|
),
|
||||||
|
eta=eta,
|
||||||
|
mbper_sec=mbper_sec,
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Subscribe to loader's download progress events
|
||||||
|
self.loader.subscribe_download_progress(download_progress_handler)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Perform download in thread to avoid blocking event loop
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
download_success = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.loader.download,
|
||||||
|
self.directory_to_search,
|
||||||
|
serie_folder,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
key,
|
||||||
|
language
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
# Always unsubscribe after download completes or fails
|
||||||
|
self.loader.unsubscribe_download_progress(
|
||||||
|
download_progress_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
if download_success:
|
||||||
|
logger.info(
|
||||||
|
"Download completed: %s (key: %s) S%02dE%02d",
|
||||||
|
serie_folder,
|
||||||
|
key,
|
||||||
|
season,
|
||||||
|
episode
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fire download completed event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="completed",
|
||||||
|
progress=1.0,
|
||||||
|
message="Download completed successfully",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
"Download failed: %s (key: %s) S%02dE%02d",
|
||||||
|
serie_folder,
|
||||||
|
key,
|
||||||
|
season,
|
||||||
|
episode
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fire download failed event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="failed",
|
||||||
|
message="Download failed",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return download_success
|
||||||
|
|
||||||
|
except InterruptedError:
|
||||||
|
# Download was cancelled - propagate the cancellation
|
||||||
|
logger.info(
|
||||||
|
"Download cancelled: %s (key: %s) S%02dE%02d",
|
||||||
|
serie_folder,
|
||||||
|
key,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
)
|
||||||
|
# Fire download cancelled event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="cancelled",
|
||||||
|
message="Download cancelled by user",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise # Re-raise to propagate cancellation
|
||||||
|
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
logger.error(
|
||||||
|
"Download error: %s (key: %s) S%02dE%02d - %s",
|
||||||
|
serie_folder,
|
||||||
|
key,
|
||||||
|
season,
|
||||||
|
episode,
|
||||||
|
str(e),
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fire download error event
|
||||||
|
self._events.download_status(
|
||||||
|
DownloadStatusEventArgs(
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
key=key,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
status="failed",
|
||||||
|
error=e,
|
||||||
|
message=f"Download error: {str(e)}",
|
||||||
|
item_id=item_id,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def rescan(self) -> list:
|
||||||
|
"""
|
||||||
|
Rescan directory for missing episodes (async).
|
||||||
|
|
||||||
|
This method performs a file-based scan and returns the results.
|
||||||
|
Database persistence is handled by the service layer (AnimeService).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of Serie objects found during scan with their
|
||||||
|
missing episodes.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This method no longer saves to database directly. The returned
|
||||||
|
list should be persisted by the caller (AnimeService).
|
||||||
|
"""
|
||||||
|
logger.info("Starting directory rescan")
|
||||||
|
|
||||||
|
total_to_scan = 0
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Get total items to scan
|
||||||
|
logger.info("Getting total items to scan...")
|
||||||
|
loop = asyncio.get_running_loop()
|
||||||
|
total_to_scan = await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.serie_scanner.get_total_to_scan
|
||||||
|
)
|
||||||
|
logger.info("Total folders to scan: %d", total_to_scan)
|
||||||
|
|
||||||
|
# Fire scan started event
|
||||||
|
logger.info(
|
||||||
|
"Firing scan_status 'started' event, handler=%s",
|
||||||
|
self._events.scan_status
|
||||||
|
)
|
||||||
|
self._events.scan_status(
|
||||||
|
ScanStatusEventArgs(
|
||||||
|
current=0,
|
||||||
|
total=total_to_scan,
|
||||||
|
folder="",
|
||||||
|
status="started",
|
||||||
|
progress=0.0,
|
||||||
|
message="Scan started",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Reinitialize scanner
|
||||||
|
await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.serie_scanner.reinit
|
||||||
|
)
|
||||||
|
|
||||||
|
def scan_progress_handler(progress_data):
|
||||||
|
"""Handle scan progress events from scanner."""
|
||||||
|
# Fire scan progress event
|
||||||
|
message = progress_data.get('message', '')
|
||||||
|
folder = message.replace('Scanning: ', '')
|
||||||
|
self._events.scan_status(
|
||||||
|
ScanStatusEventArgs(
|
||||||
|
current=progress_data.get('current', 0),
|
||||||
|
total=progress_data.get('total', total_to_scan),
|
||||||
|
folder=folder,
|
||||||
|
status="progress",
|
||||||
|
progress=(
|
||||||
|
progress_data.get('percentage', 0.0) / 100.0
|
||||||
|
),
|
||||||
|
message=message,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Subscribe to scanner's progress events
|
||||||
|
self.serie_scanner.subscribe_on_progress(scan_progress_handler)
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Perform scan (file-based, returns results in scanner.keyDict)
|
||||||
|
await loop.run_in_executor(
|
||||||
|
self.executor,
|
||||||
|
self.serie_scanner.scan
|
||||||
|
)
|
||||||
|
finally:
|
||||||
|
# Always unsubscribe after scan completes or fails
|
||||||
|
self.serie_scanner.unsubscribe_on_progress(
|
||||||
|
scan_progress_handler
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get scanned series from scanner
|
||||||
|
scanned_series = list(self.serie_scanner.keyDict.values())
|
||||||
|
|
||||||
|
# Update in-memory list with scan results
|
||||||
|
self.list.keyDict.clear()
|
||||||
|
for serie in scanned_series:
|
||||||
|
self.list.keyDict[serie.key] = serie
|
||||||
|
self.series_list = self.list.GetMissingEpisode()
|
||||||
|
|
||||||
|
logger.info("Directory rescan completed successfully")
|
||||||
|
|
||||||
|
# Fire scan completed event
|
||||||
|
logger.info(
|
||||||
|
"Firing scan_status 'completed' event, handler=%s",
|
||||||
|
self._events.scan_status
|
||||||
|
)
|
||||||
|
self._events.scan_status(
|
||||||
|
ScanStatusEventArgs(
|
||||||
|
current=total_to_scan,
|
||||||
|
total=total_to_scan,
|
||||||
|
folder="",
|
||||||
|
status="completed",
|
||||||
|
progress=1.0,
|
||||||
|
message=(
|
||||||
|
f"Scan completed. Found {len(self.series_list)} "
|
||||||
|
"series with missing episodes."
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
return scanned_series
|
||||||
|
|
||||||
|
except InterruptedError:
|
||||||
|
logger.warning("Scan cancelled by user")
|
||||||
|
|
||||||
|
# Fire scan cancelled event
|
||||||
|
self._events.scan_status(
|
||||||
|
ScanStatusEventArgs(
|
||||||
|
current=0,
|
||||||
|
total=total_to_scan,
|
||||||
|
folder="",
|
||||||
|
status="cancelled",
|
||||||
|
message="Scan cancelled by user",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error("Scan error: %s", str(e), exc_info=True)
|
||||||
|
|
||||||
|
# Fire scan failed event
|
||||||
|
self._events.scan_status(
|
||||||
|
ScanStatusEventArgs(
|
||||||
|
current=0,
|
||||||
|
total=total_to_scan,
|
||||||
|
folder="",
|
||||||
|
status="failed",
|
||||||
|
error=e,
|
||||||
|
message=f"Scan error: {str(e)}",
|
||||||
|
)
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
async def get_series_list(self) -> List[Any]:
|
||||||
|
"""
|
||||||
|
Get the current series list (async).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of series with missing episodes
|
||||||
|
"""
|
||||||
|
return self.series_list
|
||||||
|
|
||||||
|
async def refresh_series_list(self) -> None:
|
||||||
|
"""
|
||||||
|
Reload the cached series list from the underlying data store.
|
||||||
|
|
||||||
|
This is an async operation.
|
||||||
|
"""
|
||||||
|
await self._init_list()
|
||||||
|
|
||||||
|
def _get_serie_by_key(self, key: str) -> Optional[Serie]:
|
||||||
|
"""
|
||||||
|
Get a series by its unique provider key.
|
||||||
|
|
||||||
|
This is the primary method for series lookups within SeriesApp.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The unique provider identifier (e.g.,
|
||||||
|
"attack-on-titan")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The Serie instance if found, None otherwise
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This method uses the SerieList.get_by_key() method which
|
||||||
|
looks up series by their unique key, not by folder name.
|
||||||
|
"""
|
||||||
|
return self.list.get_by_key(key)
|
||||||
|
|
||||||
|
def get_all_series_from_data_files(self) -> List[Serie]:
|
||||||
|
"""
|
||||||
|
Get all series from data files in the anime directory.
|
||||||
|
|
||||||
|
Scans the directory_to_search for all 'data' files and loads
|
||||||
|
the Serie metadata from each file. This method is synchronous
|
||||||
|
and can be wrapped with asyncio.to_thread if needed for async
|
||||||
|
contexts.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of Serie objects found in data files. Returns an empty
|
||||||
|
list if no data files are found or if the directory doesn't
|
||||||
|
exist.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
series_app = SeriesApp("/path/to/anime")
|
||||||
|
all_series = series_app.get_all_series_from_data_files()
|
||||||
|
for serie in all_series:
|
||||||
|
print(f"Found: {serie.name} (key={serie.key})")
|
||||||
|
"""
|
||||||
|
logger.info(
|
||||||
|
"Scanning for data files in directory: %s",
|
||||||
|
self.directory_to_search
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create a fresh SerieList instance for file-based loading
|
||||||
|
# This ensures we get all series from data files without
|
||||||
|
# interfering with the main instance's state
|
||||||
|
try:
|
||||||
|
temp_list = SerieList(
|
||||||
|
self.directory_to_search,
|
||||||
|
skip_load=False # Allow automatic loading
|
||||||
|
)
|
||||||
|
except (OSError, ValueError) as e:
|
||||||
|
logger.error(
|
||||||
|
"Failed to scan directory for data files: %s",
|
||||||
|
str(e),
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
return []
|
||||||
|
|
||||||
|
# Get all series from the temporary list
|
||||||
|
all_series = temp_list.get_all()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"Found %d series from data files in %s",
|
||||||
|
len(all_series),
|
||||||
|
self.directory_to_search
|
||||||
|
)
|
||||||
|
|
||||||
|
return all_series
|
||||||
|
|
||||||
|
def shutdown(self) -> None:
|
||||||
|
"""
|
||||||
|
Shutdown the thread pool executor.
|
||||||
|
|
||||||
|
Should be called when the SeriesApp instance is no longer needed
|
||||||
|
to properly clean up resources.
|
||||||
|
"""
|
||||||
|
if hasattr(self, 'executor'):
|
||||||
|
self.executor.shutdown(wait=True)
|
||||||
|
logger.info("ThreadPoolExecutor shut down successfully")
|
||||||
@@ -1,11 +1,8 @@
|
|||||||
"""
|
"""
|
||||||
Core module for AniWorld application.
|
Core module for AniWorld application.
|
||||||
Contains domain entities, interfaces, use cases, and exceptions.
|
Contains domain entities, interfaces, application services, and exceptions.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from . import entities
|
from . import entities, exceptions, interfaces, providers
|
||||||
from . import exceptions
|
|
||||||
from . import interfaces
|
|
||||||
from . import use_cases
|
|
||||||
|
|
||||||
__all__ = ['entities', 'exceptions', 'interfaces', 'use_cases']
|
__all__ = ['entities', 'exceptions', 'interfaces', 'providers']
|
||||||
|
|||||||
@@ -1,56 +1,320 @@
|
|||||||
import os
|
"""Utilities for loading and managing stored anime series metadata.
|
||||||
import json
|
|
||||||
|
This module provides the SerieList class for managing collections of anime
|
||||||
|
series metadata. It uses file-based storage only.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This module is part of the core domain layer and has no database
|
||||||
|
dependencies. All database operations are handled by the service layer.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
import logging
|
import logging
|
||||||
from .series import Serie
|
import os
|
||||||
|
import warnings
|
||||||
|
from json import JSONDecodeError
|
||||||
|
from typing import Dict, Iterable, List, Optional
|
||||||
|
|
||||||
|
from src.core.entities.series import Serie
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class SerieList:
|
class SerieList:
|
||||||
def __init__(self, basePath: str):
|
"""
|
||||||
self.directory = basePath
|
Represents the collection of cached series stored on disk.
|
||||||
self.folderDict: dict[str, Serie] = {} # Proper initialization
|
|
||||||
|
Series are identified by their unique 'key' (provider identifier).
|
||||||
|
The 'folder' is metadata only and not used for lookups.
|
||||||
|
|
||||||
|
This class manages in-memory series data loaded from filesystem.
|
||||||
|
It has no database dependencies - all persistence is handled by
|
||||||
|
the service layer.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
# File-based mode
|
||||||
|
serie_list = SerieList("/path/to/anime")
|
||||||
|
series = serie_list.get_all()
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
directory: Path to the anime directory
|
||||||
|
keyDict: Internal dictionary mapping serie.key to Serie objects
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
base_path: str,
|
||||||
|
skip_load: bool = False
|
||||||
|
) -> None:
|
||||||
|
"""Initialize the SerieList.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_path: Path to the anime directory
|
||||||
|
skip_load: If True, skip automatic loading of series from files.
|
||||||
|
Useful when planning to load from database instead.
|
||||||
|
"""
|
||||||
|
self.directory: str = base_path
|
||||||
|
# Internal storage using serie.key as the dictionary key
|
||||||
|
self.keyDict: Dict[str, Serie] = {}
|
||||||
|
|
||||||
|
# Only auto-load from files if not skipping
|
||||||
|
if not skip_load:
|
||||||
self.load_series()
|
self.load_series()
|
||||||
|
|
||||||
def add(self, serie: Serie):
|
def add(self, serie: Serie, use_sanitized_folder: bool = True) -> str:
|
||||||
if (not self.contains(serie.key)):
|
"""
|
||||||
dataPath = os.path.join(self.directory, serie.folder, "data")
|
Persist a new series if it is not already present (file-based mode).
|
||||||
animePath = os.path.join(self.directory, serie.folder)
|
|
||||||
os.makedirs(animePath, exist_ok=True)
|
Uses serie.key for identification. Creates the filesystem folder
|
||||||
if not os.path.isfile(dataPath):
|
using either the sanitized display name (default) or the existing
|
||||||
serie.save_to_file(dataPath)
|
folder property.
|
||||||
self.folderDict[serie.folder] = serie;
|
|
||||||
|
Args:
|
||||||
|
serie: The Serie instance to add
|
||||||
|
use_sanitized_folder: If True (default), use serie.sanitized_folder
|
||||||
|
for the filesystem folder name based on display name.
|
||||||
|
If False, use serie.folder as-is for backward compatibility.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The folder path that was created/used
|
||||||
|
|
||||||
|
Note:
|
||||||
|
This method creates data files on disk. For database storage,
|
||||||
|
use add_to_db() instead.
|
||||||
|
"""
|
||||||
|
if self.contains(serie.key):
|
||||||
|
# Return existing folder path
|
||||||
|
existing = self.keyDict[serie.key]
|
||||||
|
return os.path.join(self.directory, existing.folder)
|
||||||
|
|
||||||
|
# Determine folder name to use
|
||||||
|
if use_sanitized_folder:
|
||||||
|
folder_name = serie.sanitized_folder
|
||||||
|
# Update the serie's folder property to match what we create
|
||||||
|
serie.folder = folder_name
|
||||||
|
else:
|
||||||
|
folder_name = serie.folder
|
||||||
|
|
||||||
|
data_path = os.path.join(self.directory, folder_name, "data")
|
||||||
|
anime_path = os.path.join(self.directory, folder_name)
|
||||||
|
os.makedirs(anime_path, exist_ok=True)
|
||||||
|
if not os.path.isfile(data_path):
|
||||||
|
serie.save_to_file(data_path)
|
||||||
|
# Store by key, not folder
|
||||||
|
self.keyDict[serie.key] = serie
|
||||||
|
|
||||||
|
return anime_path
|
||||||
|
|
||||||
def contains(self, key: str) -> bool:
|
def contains(self, key: str) -> bool:
|
||||||
for k, value in self.folderDict.items():
|
"""
|
||||||
if value.key == key:
|
Return True when a series identified by ``key`` already exists.
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
def load_series(self):
|
Args:
|
||||||
""" Scan folders and load data files """
|
key: The unique provider identifier for the series
|
||||||
logging.info(f"Scanning anime folders in: {self.directory}")
|
|
||||||
for anime_folder in os.listdir(self.directory):
|
Returns:
|
||||||
|
True if the series exists in the collection
|
||||||
|
"""
|
||||||
|
return key in self.keyDict
|
||||||
|
|
||||||
|
def load_series(self) -> None:
|
||||||
|
"""Populate the in-memory map with metadata discovered on disk."""
|
||||||
|
|
||||||
|
logging.info("Scanning anime folders in %s", self.directory)
|
||||||
|
try:
|
||||||
|
entries: Iterable[str] = os.listdir(self.directory)
|
||||||
|
except OSError as error:
|
||||||
|
logging.error(
|
||||||
|
"Unable to scan directory %s: %s",
|
||||||
|
self.directory,
|
||||||
|
error,
|
||||||
|
)
|
||||||
|
return
|
||||||
|
|
||||||
|
nfo_stats = {"total": 0, "with_nfo": 0, "without_nfo": 0}
|
||||||
|
media_stats = {
|
||||||
|
"with_poster": 0,
|
||||||
|
"without_poster": 0,
|
||||||
|
"with_logo": 0,
|
||||||
|
"without_logo": 0,
|
||||||
|
"with_fanart": 0,
|
||||||
|
"without_fanart": 0
|
||||||
|
}
|
||||||
|
|
||||||
|
for anime_folder in entries:
|
||||||
anime_path = os.path.join(self.directory, anime_folder, "data")
|
anime_path = os.path.join(self.directory, anime_folder, "data")
|
||||||
if os.path.isfile(anime_path):
|
if os.path.isfile(anime_path):
|
||||||
logging.debug(f"Found data folder: {anime_path}")
|
logging.debug("Found data file for folder %s", anime_folder)
|
||||||
self.load_data(anime_folder, anime_path)
|
serie = self._load_data(anime_folder, anime_path)
|
||||||
|
|
||||||
|
if serie:
|
||||||
|
nfo_stats["total"] += 1
|
||||||
|
# Check for NFO file
|
||||||
|
nfo_file_path = os.path.join(
|
||||||
|
self.directory, anime_folder, "tvshow.nfo"
|
||||||
|
)
|
||||||
|
if os.path.isfile(nfo_file_path):
|
||||||
|
serie.nfo_path = nfo_file_path
|
||||||
|
nfo_stats["with_nfo"] += 1
|
||||||
else:
|
else:
|
||||||
logging.warning(f"Skipping {anime_folder} - No data folder found")
|
nfo_stats["without_nfo"] += 1
|
||||||
|
logging.debug(
|
||||||
|
"Series '%s' (key: %s) is missing tvshow.nfo",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
def load_data(self, anime_folder, data_path):
|
# Check for media files
|
||||||
""" Load pickle files from the data folder """
|
folder_path = os.path.join(self.directory, anime_folder)
|
||||||
|
|
||||||
|
poster_path = os.path.join(folder_path, "poster.jpg")
|
||||||
|
if os.path.isfile(poster_path):
|
||||||
|
media_stats["with_poster"] += 1
|
||||||
|
else:
|
||||||
|
media_stats["without_poster"] += 1
|
||||||
|
logging.debug(
|
||||||
|
"Series '%s' (key: %s) is missing poster.jpg",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
logo_path = os.path.join(folder_path, "logo.png")
|
||||||
|
if os.path.isfile(logo_path):
|
||||||
|
media_stats["with_logo"] += 1
|
||||||
|
else:
|
||||||
|
media_stats["without_logo"] += 1
|
||||||
|
logging.debug(
|
||||||
|
"Series '%s' (key: %s) is missing logo.png",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
fanart_path = os.path.join(folder_path, "fanart.jpg")
|
||||||
|
if os.path.isfile(fanart_path):
|
||||||
|
media_stats["with_fanart"] += 1
|
||||||
|
else:
|
||||||
|
media_stats["without_fanart"] += 1
|
||||||
|
logging.debug(
|
||||||
|
"Series '%s' (key: %s) is missing fanart.jpg",
|
||||||
|
serie.name,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
|
||||||
|
continue
|
||||||
|
|
||||||
|
logging.warning(
|
||||||
|
"Skipping folder %s because no metadata file was found",
|
||||||
|
anime_folder,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Log summary statistics
|
||||||
|
if nfo_stats["total"] > 0:
|
||||||
|
logging.info(
|
||||||
|
"NFO scan complete: %d series total, %d with NFO, %d without NFO",
|
||||||
|
nfo_stats["total"],
|
||||||
|
nfo_stats["with_nfo"],
|
||||||
|
nfo_stats["without_nfo"]
|
||||||
|
)
|
||||||
|
logging.info(
|
||||||
|
"Media scan complete: Poster (%d/%d), Logo (%d/%d), Fanart (%d/%d)",
|
||||||
|
media_stats["with_poster"],
|
||||||
|
nfo_stats["total"],
|
||||||
|
media_stats["with_logo"],
|
||||||
|
nfo_stats["total"],
|
||||||
|
media_stats["with_fanart"],
|
||||||
|
nfo_stats["total"]
|
||||||
|
)
|
||||||
|
|
||||||
|
def _load_data(self, anime_folder: str, data_path: str) -> Optional[Serie]:
|
||||||
|
"""
|
||||||
|
Load a single series metadata file into the in-memory collection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
anime_folder: The folder name (for logging only)
|
||||||
|
data_path: Path to the metadata file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Serie: The loaded Serie object, or None if loading failed
|
||||||
|
"""
|
||||||
try:
|
try:
|
||||||
self.folderDict[anime_folder] = Serie.load_from_file(data_path)
|
serie = Serie.load_from_file(data_path)
|
||||||
logging.debug(f"Successfully loaded {data_path} for {anime_folder}")
|
# Store by key, not folder
|
||||||
except Exception as e:
|
self.keyDict[serie.key] = serie
|
||||||
logging.error(f"Failed to load {data_path} in {anime_folder}: {e}")
|
logging.debug(
|
||||||
|
"Successfully loaded metadata for %s (key: %s)",
|
||||||
|
anime_folder,
|
||||||
|
serie.key
|
||||||
|
)
|
||||||
|
return serie
|
||||||
|
except (OSError, JSONDecodeError, KeyError, ValueError) as error:
|
||||||
|
logging.error(
|
||||||
|
"Failed to load metadata for folder %s from %s: %s",
|
||||||
|
anime_folder,
|
||||||
|
data_path,
|
||||||
|
error,
|
||||||
|
)
|
||||||
|
return None
|
||||||
|
|
||||||
def GetMissingEpisode(self):
|
def GetMissingEpisode(self) -> List[Serie]:
|
||||||
"""Find all series with a non-empty episodeDict"""
|
"""Return all series that still contain missing episodes."""
|
||||||
return [serie for serie in self.folderDict.values() if len(serie.episodeDict) > 0]
|
return [
|
||||||
|
serie
|
||||||
|
for serie in self.keyDict.values()
|
||||||
|
if serie.episodeDict
|
||||||
|
]
|
||||||
|
|
||||||
def GetList(self):
|
def get_missing_episodes(self) -> List[Serie]:
|
||||||
"""Get all series in the list"""
|
"""PEP8-friendly alias for :meth:`GetMissingEpisode`."""
|
||||||
return list(self.folderDict.values())
|
return self.GetMissingEpisode()
|
||||||
|
|
||||||
|
def GetList(self) -> List[Serie]:
|
||||||
|
"""Return all series instances stored in the list."""
|
||||||
|
return list(self.keyDict.values())
|
||||||
|
|
||||||
#k = AnimeList("\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
|
def get_all(self) -> List[Serie]:
|
||||||
#bbabab = k.GetMissingEpisode()
|
"""PEP8-friendly alias for :meth:`GetList`."""
|
||||||
#print(bbabab)
|
return self.GetList()
|
||||||
|
|
||||||
|
def get_by_key(self, key: str) -> Optional[Serie]:
|
||||||
|
"""
|
||||||
|
Get a series by its unique provider key.
|
||||||
|
|
||||||
|
This is the primary method for series lookup.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: The unique provider identifier (e.g., "attack-on-titan")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The Serie instance if found, None otherwise
|
||||||
|
"""
|
||||||
|
return self.keyDict.get(key)
|
||||||
|
|
||||||
|
def get_by_folder(self, folder: str) -> Optional[Serie]:
|
||||||
|
"""
|
||||||
|
Get a series by its folder name.
|
||||||
|
|
||||||
|
.. deprecated:: 2.0.0
|
||||||
|
Use :meth:`get_by_key` instead. Folder-based lookups will be
|
||||||
|
removed in version 3.0.0. The `folder` field is metadata only
|
||||||
|
and should not be used for identification.
|
||||||
|
|
||||||
|
This method is provided for backward compatibility only.
|
||||||
|
Prefer using get_by_key() for new code.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
folder: The filesystem folder name (e.g., "Attack on Titan (2013)")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The Serie instance if found, None otherwise
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"get_by_folder() is deprecated and will be removed in v3.0.0. "
|
||||||
|
"Use get_by_key() instead. The 'folder' field is metadata only.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
for serie in self.keyDict.values():
|
||||||
|
if serie.folder == folder:
|
||||||
|
return serie
|
||||||
|
return None
|
||||||
|
|||||||
335
src/core/entities/nfo_models.py
Normal file
335
src/core/entities/nfo_models.py
Normal file
@@ -0,0 +1,335 @@
|
|||||||
|
"""Pydantic models for NFO metadata based on Kodi/XBMC standard.
|
||||||
|
|
||||||
|
This module provides data models for tvshow.nfo files that are compatible
|
||||||
|
with media center applications like Kodi, Plex, and Jellyfin.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> nfo = TVShowNFO(
|
||||||
|
... title="Attack on Titan",
|
||||||
|
... year=2013,
|
||||||
|
... tmdbid=1429
|
||||||
|
... )
|
||||||
|
>>> nfo.premiered = "2013-04-07"
|
||||||
|
"""
|
||||||
|
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import List, Optional
|
||||||
|
|
||||||
|
from pydantic import BaseModel, Field, HttpUrl, field_validator
|
||||||
|
|
||||||
|
|
||||||
|
class RatingInfo(BaseModel):
|
||||||
|
"""Rating information from various sources.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
name: Source of the rating (e.g., 'themoviedb', 'imdb')
|
||||||
|
value: Rating value (typically 0-10)
|
||||||
|
votes: Number of votes
|
||||||
|
max_rating: Maximum possible rating (default: 10)
|
||||||
|
default: Whether this is the default rating to display
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str = Field(..., description="Rating source name")
|
||||||
|
value: float = Field(..., ge=0, description="Rating value")
|
||||||
|
votes: Optional[int] = Field(None, ge=0, description="Number of votes")
|
||||||
|
max_rating: int = Field(10, ge=1, description="Maximum rating value")
|
||||||
|
default: bool = Field(False, description="Is this the default rating")
|
||||||
|
|
||||||
|
@field_validator('value')
|
||||||
|
@classmethod
|
||||||
|
def validate_value(cls, v: float, info) -> float:
|
||||||
|
"""Ensure rating value doesn't exceed max_rating."""
|
||||||
|
# Note: max_rating is not available yet during validation,
|
||||||
|
# so we use a reasonable default check
|
||||||
|
if v > 10:
|
||||||
|
raise ValueError("Rating value cannot exceed 10")
|
||||||
|
return v
|
||||||
|
|
||||||
|
|
||||||
|
class ActorInfo(BaseModel):
|
||||||
|
"""Actor/cast member information.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
name: Actor's name
|
||||||
|
role: Character name/role
|
||||||
|
thumb: URL to actor's photo
|
||||||
|
profile: URL to actor's profile page
|
||||||
|
tmdbid: TMDB ID for the actor
|
||||||
|
"""
|
||||||
|
|
||||||
|
name: str = Field(..., description="Actor's name")
|
||||||
|
role: Optional[str] = Field(None, description="Character role")
|
||||||
|
thumb: Optional[HttpUrl] = Field(None, description="Actor photo URL")
|
||||||
|
profile: Optional[HttpUrl] = Field(None, description="Actor profile URL")
|
||||||
|
tmdbid: Optional[int] = Field(None, description="TMDB actor ID")
|
||||||
|
|
||||||
|
|
||||||
|
class ImageInfo(BaseModel):
|
||||||
|
"""Image information for posters, fanart, and logos.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
url: URL to the image
|
||||||
|
aspect: Image aspect/type (e.g., 'poster', 'clearlogo', 'logo')
|
||||||
|
season: Season number for season-specific images
|
||||||
|
type: Image type (e.g., 'season')
|
||||||
|
"""
|
||||||
|
|
||||||
|
url: HttpUrl = Field(..., description="Image URL")
|
||||||
|
aspect: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Image aspect (poster, clearlogo, logo)"
|
||||||
|
)
|
||||||
|
season: Optional[int] = Field(None, ge=-1, description="Season number")
|
||||||
|
type: Optional[str] = Field(None, description="Image type")
|
||||||
|
|
||||||
|
|
||||||
|
class NamedSeason(BaseModel):
|
||||||
|
"""Named season information.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
number: Season number
|
||||||
|
name: Season name/title
|
||||||
|
"""
|
||||||
|
|
||||||
|
number: int = Field(..., ge=0, description="Season number")
|
||||||
|
name: str = Field(..., description="Season name")
|
||||||
|
|
||||||
|
|
||||||
|
class UniqueID(BaseModel):
|
||||||
|
"""Unique identifier from various sources.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
type: ID source type (tmdb, imdb, tvdb)
|
||||||
|
value: The ID value
|
||||||
|
default: Whether this is the default ID
|
||||||
|
"""
|
||||||
|
|
||||||
|
type: str = Field(..., description="ID type (tmdb, imdb, tvdb)")
|
||||||
|
value: str = Field(..., description="ID value")
|
||||||
|
default: bool = Field(False, description="Is default ID")
|
||||||
|
|
||||||
|
|
||||||
|
class TVShowNFO(BaseModel):
|
||||||
|
"""Main tvshow.nfo structure following Kodi/XBMC standard.
|
||||||
|
|
||||||
|
This model represents the complete metadata for a TV show that can be
|
||||||
|
serialized to XML for use with media center applications.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
title: Main title of the show
|
||||||
|
originaltitle: Original title (e.g., in original language)
|
||||||
|
showtitle: Show title (often same as title)
|
||||||
|
sorttitle: Title used for sorting
|
||||||
|
year: Release year
|
||||||
|
plot: Full plot description
|
||||||
|
outline: Short plot summary
|
||||||
|
tagline: Show tagline/slogan
|
||||||
|
runtime: Episode runtime in minutes
|
||||||
|
mpaa: Content rating (e.g., TV-14, TV-MA)
|
||||||
|
certification: Additional certification info
|
||||||
|
premiered: Premiere date (YYYY-MM-DD format)
|
||||||
|
status: Show status (e.g., 'Continuing', 'Ended')
|
||||||
|
studio: List of production studios
|
||||||
|
genre: List of genres
|
||||||
|
country: List of countries
|
||||||
|
tag: List of tags/keywords
|
||||||
|
ratings: List of ratings from various sources
|
||||||
|
userrating: User's personal rating
|
||||||
|
watched: Whether the show has been watched
|
||||||
|
playcount: Number of times watched
|
||||||
|
tmdbid: TMDB ID
|
||||||
|
imdbid: IMDB ID
|
||||||
|
tvdbid: TVDB ID
|
||||||
|
uniqueid: List of unique IDs
|
||||||
|
thumb: List of thumbnail/poster images
|
||||||
|
fanart: List of fanart/backdrop images
|
||||||
|
actors: List of cast members
|
||||||
|
namedseason: List of named seasons
|
||||||
|
trailer: Trailer URL
|
||||||
|
dateadded: Date when added to library
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Required fields
|
||||||
|
title: str = Field(..., description="Show title", min_length=1)
|
||||||
|
|
||||||
|
# Basic information (optional)
|
||||||
|
originaltitle: Optional[str] = Field(None, description="Original title")
|
||||||
|
showtitle: Optional[str] = Field(None, description="Show title")
|
||||||
|
sorttitle: Optional[str] = Field(None, description="Sort title")
|
||||||
|
year: Optional[int] = Field(
|
||||||
|
None,
|
||||||
|
ge=1900,
|
||||||
|
le=2100,
|
||||||
|
description="Release year"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Plot and description
|
||||||
|
plot: Optional[str] = Field(None, description="Full plot description")
|
||||||
|
outline: Optional[str] = Field(None, description="Short plot summary")
|
||||||
|
tagline: Optional[str] = Field(None, description="Show tagline")
|
||||||
|
|
||||||
|
# Technical details
|
||||||
|
runtime: Optional[int] = Field(
|
||||||
|
None,
|
||||||
|
ge=0,
|
||||||
|
description="Episode runtime in minutes"
|
||||||
|
)
|
||||||
|
mpaa: Optional[str] = Field(None, description="Content rating")
|
||||||
|
fsk: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="German FSK rating (e.g., 'FSK 12', 'FSK 16')"
|
||||||
|
)
|
||||||
|
certification: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Certification info"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Status and dates
|
||||||
|
premiered: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Premiere date (YYYY-MM-DD)"
|
||||||
|
)
|
||||||
|
status: Optional[str] = Field(None, description="Show status")
|
||||||
|
dateadded: Optional[str] = Field(
|
||||||
|
None,
|
||||||
|
description="Date added to library"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Multi-value fields
|
||||||
|
studio: List[str] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Production studios"
|
||||||
|
)
|
||||||
|
genre: List[str] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Genres"
|
||||||
|
)
|
||||||
|
country: List[str] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Countries"
|
||||||
|
)
|
||||||
|
tag: List[str] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Tags/keywords"
|
||||||
|
)
|
||||||
|
|
||||||
|
# IDs
|
||||||
|
tmdbid: Optional[int] = Field(None, description="TMDB ID")
|
||||||
|
imdbid: Optional[str] = Field(None, description="IMDB ID")
|
||||||
|
tvdbid: Optional[int] = Field(None, description="TVDB ID")
|
||||||
|
uniqueid: List[UniqueID] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Unique IDs"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Ratings and viewing info
|
||||||
|
ratings: List[RatingInfo] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Ratings"
|
||||||
|
)
|
||||||
|
userrating: Optional[float] = Field(
|
||||||
|
None,
|
||||||
|
ge=0,
|
||||||
|
le=10,
|
||||||
|
description="User rating"
|
||||||
|
)
|
||||||
|
watched: bool = Field(False, description="Watched status")
|
||||||
|
playcount: Optional[int] = Field(
|
||||||
|
None,
|
||||||
|
ge=0,
|
||||||
|
description="Play count"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Media
|
||||||
|
thumb: List[ImageInfo] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Thumbnail images"
|
||||||
|
)
|
||||||
|
fanart: List[ImageInfo] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Fanart images"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Cast and crew
|
||||||
|
actors: List[ActorInfo] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Cast members"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Seasons
|
||||||
|
namedseason: List[NamedSeason] = Field(
|
||||||
|
default_factory=list,
|
||||||
|
description="Named seasons"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Additional
|
||||||
|
trailer: Optional[HttpUrl] = Field(None, description="Trailer URL")
|
||||||
|
|
||||||
|
@field_validator('premiered')
|
||||||
|
@classmethod
|
||||||
|
def validate_premiered_date(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
"""Validate premiered date format (YYYY-MM-DD)."""
|
||||||
|
if v is None:
|
||||||
|
return v
|
||||||
|
|
||||||
|
# Check format strictly: YYYY-MM-DD
|
||||||
|
if len(v) != 10 or v[4] != '-' or v[7] != '-':
|
||||||
|
raise ValueError(
|
||||||
|
"Premiered date must be in YYYY-MM-DD format"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
datetime.strptime(v, '%Y-%m-%d')
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(
|
||||||
|
"Premiered date must be in YYYY-MM-DD format"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('dateadded')
|
||||||
|
@classmethod
|
||||||
|
def validate_dateadded(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
"""Validate dateadded format (YYYY-MM-DD HH:MM:SS)."""
|
||||||
|
if v is None:
|
||||||
|
return v
|
||||||
|
|
||||||
|
# Check format strictly: YYYY-MM-DD HH:MM:SS
|
||||||
|
if len(v) != 19 or v[4] != '-' or v[7] != '-' or v[10] != ' ' or v[13] != ':' or v[16] != ':':
|
||||||
|
raise ValueError(
|
||||||
|
"Dateadded must be in YYYY-MM-DD HH:MM:SS format"
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
datetime.strptime(v, '%Y-%m-%d %H:%M:%S')
|
||||||
|
except ValueError as exc:
|
||||||
|
raise ValueError(
|
||||||
|
"Dateadded must be in YYYY-MM-DD HH:MM:SS format"
|
||||||
|
) from exc
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
@field_validator('imdbid')
|
||||||
|
@classmethod
|
||||||
|
def validate_imdbid(cls, v: Optional[str]) -> Optional[str]:
|
||||||
|
"""Validate IMDB ID format (should start with 'tt')."""
|
||||||
|
if v is None:
|
||||||
|
return v
|
||||||
|
|
||||||
|
if not v.startswith('tt'):
|
||||||
|
raise ValueError("IMDB ID must start with 'tt'")
|
||||||
|
|
||||||
|
if not v[2:].isdigit():
|
||||||
|
raise ValueError("IMDB ID must be 'tt' followed by digits")
|
||||||
|
|
||||||
|
return v
|
||||||
|
|
||||||
|
def model_post_init(self, __context) -> None:
|
||||||
|
"""Set default values after initialization."""
|
||||||
|
# Set showtitle to title if not provided
|
||||||
|
if self.showtitle is None:
|
||||||
|
self.showtitle = self.title
|
||||||
|
|
||||||
|
# Set originaltitle to title if not provided
|
||||||
|
if self.originaltitle is None:
|
||||||
|
self.originaltitle = self.title
|
||||||
@@ -1,23 +1,97 @@
|
|||||||
import json
|
import json
|
||||||
|
import logging
|
||||||
|
import os
|
||||||
|
import warnings
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from src.server.utils.filesystem import sanitize_folder_name
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
class Serie:
|
class Serie:
|
||||||
def __init__(self, key: str, name: str, site: str, folder: str, episodeDict: dict[int, list[int]]):
|
"""
|
||||||
self._key = key
|
Represents an anime series with metadata and episode information.
|
||||||
|
|
||||||
|
The `key` property is the unique identifier for the series
|
||||||
|
(provider-assigned, URL-safe).
|
||||||
|
The `folder` property is the filesystem folder name
|
||||||
|
(metadata only, not used for lookups).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Unique series identifier from provider
|
||||||
|
(e.g., "attack-on-titan"). Cannot be empty.
|
||||||
|
name: Display name of the series
|
||||||
|
site: Provider site URL
|
||||||
|
folder: Filesystem folder name (metadata only,
|
||||||
|
e.g., "Attack on Titan (2013)")
|
||||||
|
episodeDict: Dictionary mapping season numbers to
|
||||||
|
lists of episode numbers
|
||||||
|
year: Release year of the series (optional)
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If key is None or empty string
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
key: str,
|
||||||
|
name: str,
|
||||||
|
site: str,
|
||||||
|
folder: str,
|
||||||
|
episodeDict: dict[int, list[int]],
|
||||||
|
year: int | None = None,
|
||||||
|
nfo_path: Optional[str] = None
|
||||||
|
):
|
||||||
|
if not key or not key.strip():
|
||||||
|
raise ValueError("Serie key cannot be None or empty")
|
||||||
|
|
||||||
|
self._key = key.strip()
|
||||||
self._name = name
|
self._name = name
|
||||||
self._site = site
|
self._site = site
|
||||||
self._folder = folder
|
self._folder = folder
|
||||||
self._episodeDict = episodeDict
|
self._episodeDict = episodeDict
|
||||||
|
self._year = year
|
||||||
|
self._nfo_path = nfo_path
|
||||||
|
|
||||||
def __str__(self):
|
def __str__(self):
|
||||||
"""String representation of Serie object"""
|
"""String representation of Serie object"""
|
||||||
return f"Serie(key='{self.key}', name='{self.name}', site='{self.site}', folder='{self.folder}', episodeDict={self.episodeDict})"
|
year_str = f", year={self.year}" if self.year else ""
|
||||||
|
return (
|
||||||
|
f"Serie(key='{self.key}', name='{self.name}', "
|
||||||
|
f"site='{self.site}', folder='{self.folder}', "
|
||||||
|
f"episodeDict={self.episodeDict}{year_str})"
|
||||||
|
)
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def key(self) -> str:
|
def key(self) -> str:
|
||||||
|
"""
|
||||||
|
Unique series identifier (primary identifier for all lookups).
|
||||||
|
|
||||||
|
This is the provider-assigned, URL-safe identifier used
|
||||||
|
throughout the application for series identification,
|
||||||
|
lookups, and operations.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The unique series key
|
||||||
|
"""
|
||||||
return self._key
|
return self._key
|
||||||
|
|
||||||
@key.setter
|
@key.setter
|
||||||
def key(self, value: str):
|
def key(self, value: str):
|
||||||
self._key = value
|
"""
|
||||||
|
Set the unique series identifier.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: New key value
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If value is None or empty string
|
||||||
|
"""
|
||||||
|
if not value or not value.strip():
|
||||||
|
raise ValueError("Serie key cannot be None or empty")
|
||||||
|
self._key = value.strip()
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def name(self) -> str:
|
def name(self) -> str:
|
||||||
@@ -37,10 +111,26 @@ class Serie:
|
|||||||
|
|
||||||
@property
|
@property
|
||||||
def folder(self) -> str:
|
def folder(self) -> str:
|
||||||
|
"""
|
||||||
|
Filesystem folder name (metadata only, not used for lookups).
|
||||||
|
|
||||||
|
This property contains the local directory name where the series
|
||||||
|
files are stored. It should NOT be used as an identifier for
|
||||||
|
series lookups - use `key` instead.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The filesystem folder name
|
||||||
|
"""
|
||||||
return self._folder
|
return self._folder
|
||||||
|
|
||||||
@folder.setter
|
@folder.setter
|
||||||
def folder(self, value: str):
|
def folder(self, value: str):
|
||||||
|
"""
|
||||||
|
Set the filesystem folder name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
value: Folder name for the series
|
||||||
|
"""
|
||||||
self._folder = value
|
self._folder = value
|
||||||
|
|
||||||
@property
|
@property
|
||||||
@@ -51,6 +141,188 @@ class Serie:
|
|||||||
def episodeDict(self, value: dict[int, list[int]]):
|
def episodeDict(self, value: dict[int, list[int]]):
|
||||||
self._episodeDict = value
|
self._episodeDict = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def year(self) -> int | None:
|
||||||
|
"""
|
||||||
|
Release year of the series.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int or None: The year the series was released, or None if unknown
|
||||||
|
"""
|
||||||
|
return self._year
|
||||||
|
|
||||||
|
@year.setter
|
||||||
|
def year(self, value: int | None):
|
||||||
|
"""Set the release year of the series."""
|
||||||
|
self._year = value
|
||||||
|
|
||||||
|
@property
|
||||||
|
def nfo_path(self) -> Optional[str]:
|
||||||
|
"""
|
||||||
|
Path to the tvshow.nfo metadata file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str or None: Path to the NFO file, or None if not set
|
||||||
|
"""
|
||||||
|
return self._nfo_path
|
||||||
|
|
||||||
|
@nfo_path.setter
|
||||||
|
def nfo_path(self, value: Optional[str]):
|
||||||
|
"""Set the path to the NFO file."""
|
||||||
|
self._nfo_path = value
|
||||||
|
|
||||||
|
def has_nfo(self, base_directory: Optional[str] = None) -> bool:
|
||||||
|
"""
|
||||||
|
Check if tvshow.nfo file exists for this series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base anime directory path. If provided, checks
|
||||||
|
relative to base_directory/folder/tvshow.nfo. If not provided,
|
||||||
|
uses nfo_path directly.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if tvshow.nfo exists, False otherwise
|
||||||
|
"""
|
||||||
|
if base_directory:
|
||||||
|
nfo_file = Path(base_directory) / self.folder / "tvshow.nfo"
|
||||||
|
elif self._nfo_path:
|
||||||
|
nfo_file = Path(self._nfo_path)
|
||||||
|
else:
|
||||||
|
return False
|
||||||
|
|
||||||
|
return nfo_file.exists() and nfo_file.is_file()
|
||||||
|
|
||||||
|
def has_poster(self, base_directory: Optional[str] = None) -> bool:
|
||||||
|
"""
|
||||||
|
Check if poster.jpg file exists for this series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base anime directory path. If provided, checks
|
||||||
|
relative to base_directory/folder/poster.jpg.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if poster.jpg exists, False otherwise
|
||||||
|
"""
|
||||||
|
if not base_directory:
|
||||||
|
return False
|
||||||
|
|
||||||
|
poster_file = Path(base_directory) / self.folder / "poster.jpg"
|
||||||
|
return poster_file.exists() and poster_file.is_file()
|
||||||
|
|
||||||
|
def has_logo(self, base_directory: Optional[str] = None) -> bool:
|
||||||
|
"""
|
||||||
|
Check if logo.png file exists for this series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base anime directory path. If provided, checks
|
||||||
|
relative to base_directory/folder/logo.png.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if logo.png exists, False otherwise
|
||||||
|
"""
|
||||||
|
if not base_directory:
|
||||||
|
return False
|
||||||
|
|
||||||
|
logo_file = Path(base_directory) / self.folder / "logo.png"
|
||||||
|
return logo_file.exists() and logo_file.is_file()
|
||||||
|
|
||||||
|
def has_fanart(self, base_directory: Optional[str] = None) -> bool:
|
||||||
|
"""
|
||||||
|
Check if fanart.jpg file exists for this series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base anime directory path. If provided, checks
|
||||||
|
relative to base_directory/folder/fanart.jpg.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if fanart.jpg exists, False otherwise
|
||||||
|
"""
|
||||||
|
if not base_directory:
|
||||||
|
return False
|
||||||
|
|
||||||
|
fanart_file = Path(base_directory) / self.folder / "fanart.jpg"
|
||||||
|
return fanart_file.exists() and fanart_file.is_file()
|
||||||
|
|
||||||
|
@property
|
||||||
|
def name_with_year(self) -> str:
|
||||||
|
"""
|
||||||
|
Get the series name with year appended if available.
|
||||||
|
|
||||||
|
Returns a name in the format "Name (Year)" if year is available,
|
||||||
|
otherwise returns just the name. This should be used for creating
|
||||||
|
filesystem folders to distinguish series with the same name.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Name with year in format "Name (Year)", or just name if no year
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> serie = Serie("dororo", "Dororo", ..., year=2025)
|
||||||
|
>>> serie.name_with_year
|
||||||
|
'Dororo (2025)'
|
||||||
|
"""
|
||||||
|
if self._year:
|
||||||
|
return f"{self._name} ({self._year})"
|
||||||
|
return self._name
|
||||||
|
|
||||||
|
@property
|
||||||
|
def sanitized_folder(self) -> str:
|
||||||
|
"""
|
||||||
|
Get a filesystem-safe folder name derived from the display name with year.
|
||||||
|
|
||||||
|
This property returns a sanitized version of the series name with year
|
||||||
|
(if available) suitable for use as a filesystem folder name. It removes/
|
||||||
|
replaces characters that are invalid for filesystems while preserving
|
||||||
|
Unicode characters.
|
||||||
|
|
||||||
|
Use this property when creating folders for the series on disk.
|
||||||
|
The `folder` property stores the actual folder name used.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: Filesystem-safe folder name based on display name with year
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> serie = Serie("attack-on-titan", "Attack on Titan: Final", ..., year=2025)
|
||||||
|
>>> serie.sanitized_folder
|
||||||
|
'Attack on Titan Final (2025)'
|
||||||
|
"""
|
||||||
|
# Use name_with_year if available, fall back to folder, then key
|
||||||
|
name_to_sanitize = self.name_with_year or self._folder or self._key
|
||||||
|
try:
|
||||||
|
return sanitize_folder_name(name_to_sanitize)
|
||||||
|
except ValueError:
|
||||||
|
# Fallback to key if name cannot be sanitized
|
||||||
|
return sanitize_folder_name(self._key)
|
||||||
|
|
||||||
|
def ensure_folder_with_year(self) -> str:
|
||||||
|
"""Ensure folder name includes year if available.
|
||||||
|
|
||||||
|
If the serie has a year and the current folder name doesn't include it,
|
||||||
|
updates the folder name to include the year in format "Name (Year)".
|
||||||
|
|
||||||
|
This method should be called before creating folders or NFO files to
|
||||||
|
ensure consistent naming across the application.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
str: The folder name (updated if needed)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> serie = Serie("perfect-blue", "Perfect Blue", ..., folder="Perfect Blue", year=1997)
|
||||||
|
>>> serie.ensure_folder_with_year()
|
||||||
|
'Perfect Blue (1997)'
|
||||||
|
>>> serie.folder # folder property is updated
|
||||||
|
'Perfect Blue (1997)'
|
||||||
|
"""
|
||||||
|
if self._year:
|
||||||
|
# Check if folder already has year format
|
||||||
|
year_pattern = f"({self._year})"
|
||||||
|
if year_pattern not in self._folder:
|
||||||
|
# Update folder to include year
|
||||||
|
self._folder = self.sanitized_folder
|
||||||
|
logger.info(
|
||||||
|
f"Updated folder name for '{self._key}' to include year: {self._folder}"
|
||||||
|
)
|
||||||
|
return self._folder
|
||||||
|
|
||||||
def to_dict(self):
|
def to_dict(self):
|
||||||
"""Convert Serie object to dictionary for JSON serialization."""
|
"""Convert Serie object to dictionary for JSON serialization."""
|
||||||
return {
|
return {
|
||||||
@@ -58,25 +330,71 @@ class Serie:
|
|||||||
"name": self.name,
|
"name": self.name,
|
||||||
"site": self.site,
|
"site": self.site,
|
||||||
"folder": self.folder,
|
"folder": self.folder,
|
||||||
"episodeDict": {str(k): list(v) for k, v in self.episodeDict.items()}
|
"episodeDict": {
|
||||||
|
str(k): list(v) for k, v in self.episodeDict.items()
|
||||||
|
},
|
||||||
|
"year": self.year,
|
||||||
|
"nfo_path": self.nfo_path
|
||||||
}
|
}
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def from_dict(data: dict):
|
def from_dict(data: dict):
|
||||||
"""Create a Serie object from dictionary."""
|
"""Create a Serie object from dictionary."""
|
||||||
episode_dict = {int(k): v for k, v in data["episodeDict"].items()} # Convert keys to int
|
# Convert keys to int
|
||||||
return Serie(data["key"], data["name"], data["site"], data["folder"], episode_dict)
|
episode_dict = {
|
||||||
|
int(k): v for k, v in data["episodeDict"].items()
|
||||||
|
}
|
||||||
|
return Serie(
|
||||||
|
data["key"],
|
||||||
|
data["name"],
|
||||||
|
data["site"],
|
||||||
|
data["folder"],
|
||||||
|
episode_dict,
|
||||||
|
data.get("year"), # Optional year field for backward compatibility
|
||||||
|
data.get("nfo_path") # Optional nfo_path field
|
||||||
|
)
|
||||||
|
|
||||||
def save_to_file(self, filename: str):
|
def save_to_file(self, filename: str):
|
||||||
"""Save Serie object to JSON file."""
|
"""Save Serie object to JSON file.
|
||||||
with open(filename, "w") as file:
|
|
||||||
json.dump(self.to_dict(), file, indent=4)
|
|
||||||
|
|
||||||
|
.. deprecated::
|
||||||
|
File-based storage is deprecated. Use database storage via
|
||||||
|
`AnimeSeriesService.create()` instead. This method will be
|
||||||
|
removed in v3.0.0.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Path to save the JSON file
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"save_to_file() is deprecated and will be removed in v3.0.0. "
|
||||||
|
"Use database storage via AnimeSeriesService.create() instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
with open(filename, "w", encoding="utf-8") as file:
|
||||||
|
json.dump(self.to_dict(), file, indent=4)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def load_from_file(cls, filename: str) -> "Serie":
|
def load_from_file(cls, filename: str) -> "Serie":
|
||||||
"""Load Serie object from JSON file."""
|
"""Load Serie object from JSON file.
|
||||||
with open(filename, "r") as file:
|
|
||||||
|
.. deprecated::
|
||||||
|
File-based storage is deprecated. Use database storage via
|
||||||
|
`AnimeSeriesService.get_by_key()` instead. This method will be
|
||||||
|
removed in v3.0.0.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
filename: Path to load the JSON file from
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Serie: The loaded Serie object
|
||||||
|
"""
|
||||||
|
warnings.warn(
|
||||||
|
"load_from_file() is deprecated and will be removed in v3.0.0. "
|
||||||
|
"Use database storage via AnimeSeriesService instead.",
|
||||||
|
DeprecationWarning,
|
||||||
|
stacklevel=2
|
||||||
|
)
|
||||||
|
with open(filename, "r", encoding="utf-8") as file:
|
||||||
data = json.load(file)
|
data = json.load(file)
|
||||||
return cls.from_dict(data)
|
return cls.from_dict(data)
|
||||||
149
src/core/error_handler.py
Normal file
149
src/core/error_handler.py
Normal file
@@ -0,0 +1,149 @@
|
|||||||
|
"""
|
||||||
|
Error handling and recovery strategies for core providers.
|
||||||
|
|
||||||
|
This module provides custom exceptions and decorators for handling
|
||||||
|
errors in provider operations with automatic retry mechanisms.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import functools
|
||||||
|
import logging
|
||||||
|
from typing import Any, Callable, TypeVar
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
# Type variable for decorator
|
||||||
|
F = TypeVar("F", bound=Callable[..., Any])
|
||||||
|
|
||||||
|
|
||||||
|
class RetryableError(Exception):
|
||||||
|
"""Exception that indicates an operation can be safely retried."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NonRetryableError(Exception):
|
||||||
|
"""Exception that indicates an operation should not be retried."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class NetworkError(Exception):
|
||||||
|
"""Exception for network-related errors."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class DownloadError(Exception):
|
||||||
|
"""Exception for download-related errors."""
|
||||||
|
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class RecoveryStrategies:
|
||||||
|
"""Strategies for handling errors and recovering from failures."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def handle_network_failure(
|
||||||
|
func: Callable, *args: Any, **kwargs: Any
|
||||||
|
) -> Any:
|
||||||
|
"""Handle network failures with basic retry logic."""
|
||||||
|
max_retries = 3
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except (NetworkError, ConnectionError):
|
||||||
|
if attempt == max_retries - 1:
|
||||||
|
raise
|
||||||
|
logger.warning(
|
||||||
|
f"Network error on attempt {attempt + 1}, retrying..."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def handle_download_failure(
|
||||||
|
func: Callable, *args: Any, **kwargs: Any
|
||||||
|
) -> Any:
|
||||||
|
"""Handle download failures with retry logic."""
|
||||||
|
max_retries = 2
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except DownloadError:
|
||||||
|
if attempt == max_retries - 1:
|
||||||
|
raise
|
||||||
|
logger.warning(
|
||||||
|
f"Download error on attempt {attempt + 1}, retrying..."
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
|
|
||||||
|
class FileCorruptionDetector:
|
||||||
|
"""Detector for corrupted files."""
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def is_valid_video_file(filepath: str) -> bool:
|
||||||
|
"""Check if a video file is valid and not corrupted."""
|
||||||
|
try:
|
||||||
|
import os
|
||||||
|
if not os.path.exists(filepath):
|
||||||
|
return False
|
||||||
|
|
||||||
|
file_size = os.path.getsize(filepath)
|
||||||
|
# Video files should be at least 1MB
|
||||||
|
return file_size > 1024 * 1024
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error checking file validity: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
|
||||||
|
def with_error_recovery(
|
||||||
|
max_retries: int = 3, context: str = ""
|
||||||
|
) -> Callable[[F], F]:
|
||||||
|
"""
|
||||||
|
Decorator for adding error recovery to functions.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_retries: Maximum number of retry attempts
|
||||||
|
context: Context string for logging
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Decorated function with retry logic
|
||||||
|
"""
|
||||||
|
|
||||||
|
def decorator(func: F) -> F:
|
||||||
|
@functools.wraps(func)
|
||||||
|
def wrapper(*args: Any, **kwargs: Any) -> Any:
|
||||||
|
last_error = None
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
return func(*args, **kwargs)
|
||||||
|
except NonRetryableError:
|
||||||
|
raise
|
||||||
|
except Exception as e:
|
||||||
|
last_error = e
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
logger.warning(
|
||||||
|
f"Error in {context} (attempt {attempt + 1}/"
|
||||||
|
f"{max_retries}): {e}, retrying..."
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Error in {context} failed after {max_retries} "
|
||||||
|
f"attempts: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if last_error:
|
||||||
|
raise last_error
|
||||||
|
|
||||||
|
raise RuntimeError(
|
||||||
|
f"Unexpected error in {context} after {max_retries} attempts"
|
||||||
|
)
|
||||||
|
|
||||||
|
return wrapper # type: ignore
|
||||||
|
|
||||||
|
return decorator
|
||||||
|
|
||||||
|
|
||||||
|
# Create module-level instances for use in provider code
|
||||||
|
recovery_strategies = RecoveryStrategies()
|
||||||
|
file_corruption_detector = FileCorruptionDetector()
|
||||||
365
src/core/interfaces/callbacks.py
Normal file
365
src/core/interfaces/callbacks.py
Normal file
@@ -0,0 +1,365 @@
|
|||||||
|
"""
|
||||||
|
Progress callback interfaces for core operations.
|
||||||
|
|
||||||
|
This module defines clean interfaces for progress reporting, error handling,
|
||||||
|
and completion notifications across all core operations (scanning,
|
||||||
|
downloading).
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from dataclasses import dataclass, field
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Any, Dict, Optional
|
||||||
|
|
||||||
|
|
||||||
|
class OperationType(str, Enum):
|
||||||
|
"""Types of operations that can report progress."""
|
||||||
|
|
||||||
|
SCAN = "scan"
|
||||||
|
DOWNLOAD = "download"
|
||||||
|
SEARCH = "search"
|
||||||
|
INITIALIZATION = "initialization"
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressPhase(str, Enum):
|
||||||
|
"""Phases of an operation's lifecycle."""
|
||||||
|
|
||||||
|
STARTING = "starting"
|
||||||
|
IN_PROGRESS = "in_progress"
|
||||||
|
COMPLETING = "completing"
|
||||||
|
COMPLETED = "completed"
|
||||||
|
FAILED = "failed"
|
||||||
|
CANCELLED = "cancelled"
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProgressContext:
|
||||||
|
"""
|
||||||
|
Complete context information for a progress update.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
operation_type: Type of operation being performed
|
||||||
|
operation_id: Unique identifier for this operation
|
||||||
|
phase: Current phase of the operation
|
||||||
|
current: Current progress value (e.g., files processed)
|
||||||
|
total: Total progress value (e.g., total files)
|
||||||
|
percentage: Completion percentage (0.0 to 100.0)
|
||||||
|
message: Human-readable progress message
|
||||||
|
details: Additional context-specific details
|
||||||
|
key: Provider-assigned series identifier (None when not applicable)
|
||||||
|
folder: Optional folder metadata for display purposes only
|
||||||
|
metadata: Extra metadata for specialized use cases
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation_type: OperationType
|
||||||
|
operation_id: str
|
||||||
|
phase: ProgressPhase
|
||||||
|
current: int
|
||||||
|
total: int
|
||||||
|
percentage: float
|
||||||
|
message: str
|
||||||
|
details: Optional[str] = None
|
||||||
|
key: Optional[str] = None
|
||||||
|
folder: Optional[str] = None
|
||||||
|
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert to dictionary for serialization."""
|
||||||
|
return {
|
||||||
|
"operation_type": self.operation_type.value,
|
||||||
|
"operation_id": self.operation_id,
|
||||||
|
"phase": self.phase.value,
|
||||||
|
"current": self.current,
|
||||||
|
"total": self.total,
|
||||||
|
"percentage": round(self.percentage, 2),
|
||||||
|
"message": self.message,
|
||||||
|
"details": self.details,
|
||||||
|
"key": self.key,
|
||||||
|
"folder": self.folder,
|
||||||
|
"metadata": self.metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ErrorContext:
|
||||||
|
"""
|
||||||
|
Context information for error callbacks.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
operation_type: Type of operation that failed
|
||||||
|
operation_id: Unique identifier for the operation
|
||||||
|
error: The exception that occurred
|
||||||
|
message: Human-readable error message
|
||||||
|
recoverable: Whether the error is recoverable
|
||||||
|
retry_count: Number of retry attempts made
|
||||||
|
key: Provider-assigned series identifier (None when not applicable)
|
||||||
|
folder: Optional folder metadata for display purposes only
|
||||||
|
metadata: Additional error context
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation_type: OperationType
|
||||||
|
operation_id: str
|
||||||
|
error: Exception
|
||||||
|
message: str
|
||||||
|
recoverable: bool = False
|
||||||
|
retry_count: int = 0
|
||||||
|
key: Optional[str] = None
|
||||||
|
folder: Optional[str] = None
|
||||||
|
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert to dictionary for serialization."""
|
||||||
|
return {
|
||||||
|
"operation_type": self.operation_type.value,
|
||||||
|
"operation_id": self.operation_id,
|
||||||
|
"error_type": type(self.error).__name__,
|
||||||
|
"error_message": str(self.error),
|
||||||
|
"message": self.message,
|
||||||
|
"recoverable": self.recoverable,
|
||||||
|
"retry_count": self.retry_count,
|
||||||
|
"key": self.key,
|
||||||
|
"folder": self.folder,
|
||||||
|
"metadata": self.metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class CompletionContext:
|
||||||
|
"""
|
||||||
|
Context information for completion callbacks.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
operation_type: Type of operation that completed
|
||||||
|
operation_id: Unique identifier for the operation
|
||||||
|
success: Whether the operation completed successfully
|
||||||
|
message: Human-readable completion message
|
||||||
|
result_data: Result data from the operation
|
||||||
|
statistics: Operation statistics (duration, items processed, etc.)
|
||||||
|
key: Provider-assigned series identifier (None when not applicable)
|
||||||
|
folder: Optional folder metadata for display purposes only
|
||||||
|
metadata: Additional completion context
|
||||||
|
"""
|
||||||
|
|
||||||
|
operation_type: OperationType
|
||||||
|
operation_id: str
|
||||||
|
success: bool
|
||||||
|
message: str
|
||||||
|
result_data: Optional[Any] = None
|
||||||
|
statistics: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
key: Optional[str] = None
|
||||||
|
folder: Optional[str] = None
|
||||||
|
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert to dictionary for serialization."""
|
||||||
|
return {
|
||||||
|
"operation_type": self.operation_type.value,
|
||||||
|
"operation_id": self.operation_id,
|
||||||
|
"success": self.success,
|
||||||
|
"message": self.message,
|
||||||
|
"statistics": self.statistics,
|
||||||
|
"key": self.key,
|
||||||
|
"folder": self.folder,
|
||||||
|
"metadata": self.metadata,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
class ProgressCallback(ABC):
|
||||||
|
"""
|
||||||
|
Abstract base class for progress callbacks.
|
||||||
|
|
||||||
|
Implement this interface to receive progress updates from core operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def on_progress(self, context: ProgressContext) -> None:
|
||||||
|
"""
|
||||||
|
Called when progress is made in an operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: Complete progress context information
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ErrorCallback(ABC):
|
||||||
|
"""
|
||||||
|
Abstract base class for error callbacks.
|
||||||
|
|
||||||
|
Implement this interface to receive error notifications from core
|
||||||
|
operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def on_error(self, context: ErrorContext) -> None:
|
||||||
|
"""
|
||||||
|
Called when an error occurs during an operation.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: Complete error context information
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CompletionCallback(ABC):
|
||||||
|
"""
|
||||||
|
Abstract base class for completion callbacks.
|
||||||
|
|
||||||
|
Implement this interface to receive completion notifications from
|
||||||
|
core operations.
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def on_completion(self, context: CompletionContext) -> None:
|
||||||
|
"""
|
||||||
|
Called when an operation completes (successfully or not).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: Complete completion context information
|
||||||
|
"""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class CallbackManager:
|
||||||
|
"""
|
||||||
|
Manages multiple callbacks for an operation.
|
||||||
|
|
||||||
|
This class allows registering multiple progress, error, and completion
|
||||||
|
callbacks and dispatching events to all registered callbacks.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize the callback manager."""
|
||||||
|
self._progress_callbacks: list[ProgressCallback] = []
|
||||||
|
self._error_callbacks: list[ErrorCallback] = []
|
||||||
|
self._completion_callbacks: list[CompletionCallback] = []
|
||||||
|
|
||||||
|
def register_progress_callback(self, callback: ProgressCallback) -> None:
|
||||||
|
"""
|
||||||
|
Register a progress callback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Progress callback to register
|
||||||
|
"""
|
||||||
|
if callback not in self._progress_callbacks:
|
||||||
|
self._progress_callbacks.append(callback)
|
||||||
|
|
||||||
|
def register_error_callback(self, callback: ErrorCallback) -> None:
|
||||||
|
"""
|
||||||
|
Register an error callback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Error callback to register
|
||||||
|
"""
|
||||||
|
if callback not in self._error_callbacks:
|
||||||
|
self._error_callbacks.append(callback)
|
||||||
|
|
||||||
|
def register_completion_callback(
|
||||||
|
self,
|
||||||
|
callback: CompletionCallback
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Register a completion callback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Completion callback to register
|
||||||
|
"""
|
||||||
|
if callback not in self._completion_callbacks:
|
||||||
|
self._completion_callbacks.append(callback)
|
||||||
|
|
||||||
|
def unregister_progress_callback(self, callback: ProgressCallback) -> None:
|
||||||
|
"""
|
||||||
|
Unregister a progress callback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Progress callback to unregister
|
||||||
|
"""
|
||||||
|
if callback in self._progress_callbacks:
|
||||||
|
self._progress_callbacks.remove(callback)
|
||||||
|
|
||||||
|
def unregister_error_callback(self, callback: ErrorCallback) -> None:
|
||||||
|
"""
|
||||||
|
Unregister an error callback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Error callback to unregister
|
||||||
|
"""
|
||||||
|
if callback in self._error_callbacks:
|
||||||
|
self._error_callbacks.remove(callback)
|
||||||
|
|
||||||
|
def unregister_completion_callback(
|
||||||
|
self,
|
||||||
|
callback: CompletionCallback
|
||||||
|
) -> None:
|
||||||
|
"""
|
||||||
|
Unregister a completion callback.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
callback: Completion callback to unregister
|
||||||
|
"""
|
||||||
|
if callback in self._completion_callbacks:
|
||||||
|
self._completion_callbacks.remove(callback)
|
||||||
|
|
||||||
|
def notify_progress(self, context: ProgressContext) -> None:
|
||||||
|
"""
|
||||||
|
Notify all registered progress callbacks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: Progress context to send
|
||||||
|
"""
|
||||||
|
for callback in self._progress_callbacks:
|
||||||
|
try:
|
||||||
|
callback.on_progress(context)
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't let callback errors break the operation
|
||||||
|
logging.error(
|
||||||
|
"Error in progress callback %s: %s",
|
||||||
|
callback,
|
||||||
|
e,
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def notify_error(self, context: ErrorContext) -> None:
|
||||||
|
"""
|
||||||
|
Notify all registered error callbacks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: Error context to send
|
||||||
|
"""
|
||||||
|
for callback in self._error_callbacks:
|
||||||
|
try:
|
||||||
|
callback.on_error(context)
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't let callback errors break the operation
|
||||||
|
logging.error(
|
||||||
|
"Error in error callback %s: %s",
|
||||||
|
callback,
|
||||||
|
e,
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def notify_completion(self, context: CompletionContext) -> None:
|
||||||
|
"""
|
||||||
|
Notify all registered completion callbacks.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
context: Completion context to send
|
||||||
|
"""
|
||||||
|
for callback in self._completion_callbacks:
|
||||||
|
try:
|
||||||
|
callback.on_completion(context)
|
||||||
|
except Exception as e:
|
||||||
|
# Log but don't let callback errors break the operation
|
||||||
|
logging.error(
|
||||||
|
"Error in completion callback %s: %s",
|
||||||
|
callback,
|
||||||
|
e,
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
def clear_all_callbacks(self) -> None:
|
||||||
|
"""Clear all registered callbacks."""
|
||||||
|
self._progress_callbacks.clear()
|
||||||
|
self._error_callbacks.clear()
|
||||||
|
self._completion_callbacks.clear()
|
||||||
@@ -1,7 +1,6 @@
|
|||||||
|
|
||||||
|
from ..providers.streaming.Provider import Provider
|
||||||
from server.infrastructure.providers.streaming.Provider import Provider
|
from ..providers.streaming.voe import VOE
|
||||||
from server.infrastructure.providers.streaming.voe import VOE
|
|
||||||
|
|
||||||
class Providers:
|
class Providers:
|
||||||
|
|
||||||
|
|||||||
@@ -1,108 +1,191 @@
|
|||||||
|
|
||||||
|
import html
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import logging
|
import shutil
|
||||||
import json
|
import threading
|
||||||
import requests
|
from pathlib import Path
|
||||||
import html
|
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
|
|
||||||
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
|
from events import Events
|
||||||
from fake_useragent import UserAgent
|
from fake_useragent import UserAgent
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from urllib3.util.retry import Retry
|
from urllib3.util.retry import Retry
|
||||||
|
|
||||||
from server.infrastructure.providers.base_provider import Loader
|
|
||||||
from server.core.interfaces.providers import Providers
|
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
import shutil
|
from yt_dlp.utils import DownloadCancelled
|
||||||
|
|
||||||
# Read timeout from environment variable, default to 600 seconds (10 minutes)
|
from ..interfaces.providers import Providers
|
||||||
timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
|
from .base_provider import Loader
|
||||||
|
|
||||||
|
|
||||||
|
def _cleanup_temp_file(temp_path: str) -> None:
|
||||||
|
"""Clean up a temp file and any associated partial download files.
|
||||||
|
|
||||||
|
Removes the temp file itself and any yt-dlp partial files
|
||||||
|
(e.g. ``<name>.part``) that may have been left behind.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
temp_path: Absolute or relative path to the temp file.
|
||||||
|
"""
|
||||||
|
paths_to_remove = [temp_path]
|
||||||
|
# yt-dlp writes partial fragments to <file>.part
|
||||||
|
paths_to_remove.extend(
|
||||||
|
str(p) for p in Path(temp_path).parent.glob(
|
||||||
|
Path(temp_path).name + ".*"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for path in paths_to_remove:
|
||||||
|
if os.path.exists(path):
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
logging.debug(f"Removed temp file: {path}")
|
||||||
|
except OSError as exc:
|
||||||
|
logging.warning(f"Failed to remove temp file {path}: {exc}")
|
||||||
|
|
||||||
|
# Imported shared provider configuration
|
||||||
|
from .provider_config import (
|
||||||
|
ANIWORLD_HEADERS,
|
||||||
|
DEFAULT_DOWNLOAD_TIMEOUT,
|
||||||
|
DEFAULT_PROVIDERS,
|
||||||
|
INVALID_PATH_CHARS,
|
||||||
|
LULUVDO_USER_AGENT,
|
||||||
|
ProviderType,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure persistent loggers but don't add duplicate handlers when module
|
||||||
|
# is imported multiple times (common in test environments).
|
||||||
|
# Use absolute paths for log files to prevent security issues
|
||||||
|
|
||||||
|
# Determine project root (assuming this file is in src/core/providers/)
|
||||||
|
_module_dir = Path(__file__).parent
|
||||||
|
_project_root = _module_dir.parent.parent.parent
|
||||||
|
_logs_dir = _project_root / "logs"
|
||||||
|
|
||||||
|
# Ensure logs directory exists
|
||||||
|
_logs_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
download_error_logger = logging.getLogger("DownloadErrors")
|
download_error_logger = logging.getLogger("DownloadErrors")
|
||||||
download_error_handler = logging.FileHandler("../../download_errors.log")
|
if not download_error_logger.handlers:
|
||||||
|
log_path = _logs_dir / "download_errors.log"
|
||||||
|
download_error_handler = logging.FileHandler(str(log_path))
|
||||||
download_error_handler.setLevel(logging.ERROR)
|
download_error_handler.setLevel(logging.ERROR)
|
||||||
|
download_error_logger.addHandler(download_error_handler)
|
||||||
|
|
||||||
|
noKeyFound_logger = logging.getLogger()
|
||||||
|
|
||||||
noKeyFound_logger = logging.getLogger("NoKeyFound")
|
|
||||||
noKeyFound_handler = logging.FileHandler("../../NoKeyFound.log")
|
|
||||||
noKeyFound_handler.setLevel(logging.ERROR)
|
|
||||||
|
|
||||||
class AniworldLoader(Loader):
|
class AniworldLoader(Loader):
|
||||||
def __init__(self):
|
def __init__(self) -> None:
|
||||||
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
|
self.SUPPORTED_PROVIDERS = DEFAULT_PROVIDERS
|
||||||
self.AniworldHeaders = {
|
# Copy default AniWorld headers so modifications remain local
|
||||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
|
self.AniworldHeaders = dict(ANIWORLD_HEADERS)
|
||||||
"accept-encoding": "gzip, deflate, br, zstd",
|
self.INVALID_PATH_CHARS = INVALID_PATH_CHARS
|
||||||
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
|
|
||||||
"cache-control": "max-age=0",
|
|
||||||
"priority": "u=0, i",
|
|
||||||
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
|
|
||||||
"sec-ch-ua-mobile": "?0",
|
|
||||||
"sec-ch-ua-platform": '"Windows"',
|
|
||||||
"sec-fetch-dest": "document",
|
|
||||||
"sec-fetch-mode": "navigate",
|
|
||||||
"sec-fetch-site": "none",
|
|
||||||
"sec-fetch-user": "?1",
|
|
||||||
"upgrade-insecure-requests": "1",
|
|
||||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
|
||||||
}
|
|
||||||
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
|
|
||||||
self.RANDOM_USER_AGENT = UserAgent().random
|
self.RANDOM_USER_AGENT = UserAgent().random
|
||||||
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
|
self.LULUVDO_USER_AGENT = LULUVDO_USER_AGENT
|
||||||
self.PROVIDER_HEADERS = {
|
self.PROVIDER_HEADERS = {
|
||||||
"Vidmoly": ['Referer: "https://vidmoly.to"'],
|
ProviderType.VIDMOLY.value: ['Referer: "https://vidmoly.to"'],
|
||||||
"Doodstream": ['Referer: "https://dood.li/"'],
|
ProviderType.DOODSTREAM.value: ['Referer: "https://dood.li/"'],
|
||||||
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
|
ProviderType.VOE.value: [f"User-Agent: {self.RANDOM_USER_AGENT}"],
|
||||||
"Luluvdo": [
|
ProviderType.LULUVDO.value: [
|
||||||
f'User-Agent: {self.LULUVDO_USER_AGENT}',
|
f"User-Agent: {self.LULUVDO_USER_AGENT}",
|
||||||
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
|
"Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||||
'Origin: "https://luluvdo.com"',
|
'Origin: "https://luluvdo.com"',
|
||||||
'Referer: "https://luluvdo.com/"'
|
'Referer: "https://luluvdo.com/"',
|
||||||
]}
|
],
|
||||||
|
}
|
||||||
self.ANIWORLD_TO = "https://aniworld.to"
|
self.ANIWORLD_TO = "https://aniworld.to"
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
|
|
||||||
|
# Cancellation flag for graceful shutdown
|
||||||
|
self._cancel_flag = threading.Event()
|
||||||
|
|
||||||
# Configure retries with backoff
|
# Configure retries with backoff
|
||||||
retries = Retry(
|
retries = Retry(
|
||||||
total=5, # Number of retries
|
total=5, # Number of retries
|
||||||
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
||||||
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
|
status_forcelist=[500, 502, 503, 504],
|
||||||
allowed_methods=["GET"]
|
allowed_methods=["GET"]
|
||||||
)
|
)
|
||||||
|
|
||||||
adapter = HTTPAdapter(max_retries=retries)
|
adapter = HTTPAdapter(max_retries=retries)
|
||||||
self.session.mount("https://", adapter)
|
self.session.mount("https://", adapter)
|
||||||
self.DEFAULT_REQUEST_TIMEOUT = 30
|
# Default HTTP request timeout used for requests.Session calls.
|
||||||
|
# Allows overriding via DOWNLOAD_TIMEOUT env var at runtime.
|
||||||
|
self.DEFAULT_REQUEST_TIMEOUT = int(
|
||||||
|
os.getenv("DOWNLOAD_TIMEOUT") or DEFAULT_DOWNLOAD_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
self._KeyHTMLDict = {}
|
self._KeyHTMLDict = {}
|
||||||
self._EpisodeHTMLDict = {}
|
self._EpisodeHTMLDict = {}
|
||||||
self.Providers = Providers()
|
self.Providers = Providers()
|
||||||
|
|
||||||
def ClearCache(self):
|
# Events: download_progress is triggered with progress dict
|
||||||
|
self.events = Events()
|
||||||
|
|
||||||
|
def subscribe_download_progress(self, handler):
|
||||||
|
"""Subscribe a handler to the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to be called with progress dict.
|
||||||
|
"""
|
||||||
|
self.events.download_progress += handler
|
||||||
|
|
||||||
|
def unsubscribe_download_progress(self, handler):
|
||||||
|
"""Unsubscribe a handler from the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable previously subscribed.
|
||||||
|
"""
|
||||||
|
self.events.download_progress -= handler
|
||||||
|
|
||||||
|
def clear_cache(self):
|
||||||
|
"""Clear the cached HTML data."""
|
||||||
|
logging.debug("Clearing HTML cache")
|
||||||
self._KeyHTMLDict = {}
|
self._KeyHTMLDict = {}
|
||||||
self._EpisodeHTMLDict = {}
|
self._EpisodeHTMLDict = {}
|
||||||
|
logging.debug("HTML cache cleared successfully")
|
||||||
|
|
||||||
def RemoveFromCache(self):
|
def remove_from_cache(self):
|
||||||
|
"""Remove episode HTML from cache."""
|
||||||
|
logging.debug("Removing episode HTML from cache")
|
||||||
self._EpisodeHTMLDict = {}
|
self._EpisodeHTMLDict = {}
|
||||||
|
logging.debug("Episode HTML cache cleared")
|
||||||
|
|
||||||
def Search(self, word: str) -> list:
|
def search(self, word: str) -> list:
|
||||||
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
"""Search for anime series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
word: Search term
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of found series
|
||||||
|
"""
|
||||||
|
logging.info(f"Searching for anime with keyword: '{word}'")
|
||||||
|
search_url = (
|
||||||
|
f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
||||||
|
)
|
||||||
|
logging.debug(f"Search URL: {search_url}")
|
||||||
anime_list = self.fetch_anime_list(search_url)
|
anime_list = self.fetch_anime_list(search_url)
|
||||||
|
logging.info(f"Found {len(anime_list)} anime series for keyword '{word}'")
|
||||||
|
|
||||||
return anime_list
|
return anime_list
|
||||||
|
|
||||||
|
|
||||||
def fetch_anime_list(self, url: str) -> list:
|
def fetch_anime_list(self, url: str) -> list:
|
||||||
|
logging.debug(f"Fetching anime list from URL: {url}")
|
||||||
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||||
response.raise_for_status()
|
response.raise_for_status()
|
||||||
|
logging.debug(f"Response status code: {response.status_code}")
|
||||||
|
|
||||||
clean_text = response.text.strip()
|
clean_text = response.text.strip()
|
||||||
|
|
||||||
try:
|
try:
|
||||||
decoded_data = json.loads(html.unescape(clean_text))
|
decoded_data = json.loads(html.unescape(clean_text))
|
||||||
|
logging.debug(f"Successfully decoded JSON data on first attempt")
|
||||||
return decoded_data if isinstance(decoded_data, list) else []
|
return decoded_data if isinstance(decoded_data, list) else []
|
||||||
except json.JSONDecodeError:
|
except json.JSONDecodeError:
|
||||||
|
logging.warning("Initial JSON decode failed, attempting cleanup")
|
||||||
try:
|
try:
|
||||||
# Remove BOM and problematic characters
|
# Remove BOM and problematic characters
|
||||||
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
|
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
|
||||||
@@ -110,29 +193,45 @@ class AniworldLoader(Loader):
|
|||||||
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
|
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
|
||||||
# Parse the new text
|
# Parse the new text
|
||||||
decoded_data = json.loads(clean_text)
|
decoded_data = json.loads(clean_text)
|
||||||
|
logging.debug("Successfully decoded JSON after cleanup")
|
||||||
return decoded_data if isinstance(decoded_data, list) else []
|
return decoded_data if isinstance(decoded_data, list) else []
|
||||||
except (requests.RequestException, json.JSONDecodeError) as exc:
|
except (requests.RequestException, json.JSONDecodeError) as exc:
|
||||||
|
logging.error(f"Failed to decode anime list from {url}: {exc}")
|
||||||
raise ValueError("Could not get valid anime: ") from exc
|
raise ValueError("Could not get valid anime: ") from exc
|
||||||
|
|
||||||
def _GetLanguageKey(self, language: str) -> int:
|
def _get_language_key(self, language: str) -> int:
|
||||||
languageCode = 0
|
"""Convert language name to language code.
|
||||||
if (language == "German Dub"):
|
|
||||||
languageCode = 1
|
|
||||||
if (language == "English Sub"):
|
|
||||||
languageCode = 2
|
|
||||||
if (language == "German Sub"):
|
|
||||||
languageCode = 3
|
|
||||||
return languageCode
|
|
||||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
|
||||||
"""
|
|
||||||
Language Codes:
|
Language Codes:
|
||||||
1: German Dub
|
1: German Dub
|
||||||
2: English Sub
|
2: English Sub
|
||||||
3: German Sub
|
3: German Sub
|
||||||
"""
|
"""
|
||||||
languageCode = self._GetLanguageKey(language)
|
language_code = 0
|
||||||
|
if language == "German Dub":
|
||||||
|
language_code = 1
|
||||||
|
if language == "English Sub":
|
||||||
|
language_code = 2
|
||||||
|
if language == "German Sub":
|
||||||
|
language_code = 3
|
||||||
|
logging.debug(f"Converted language '{language}' to code {language_code}")
|
||||||
|
return language_code
|
||||||
|
|
||||||
episode_soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
def is_language(
|
||||||
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub"
|
||||||
|
) -> bool:
|
||||||
|
"""Check if episode is available in specified language."""
|
||||||
|
logging.debug(f"Checking if S{season:02}E{episode:03} ({key}) is available in {language}")
|
||||||
|
language_code = self._get_language_key(language)
|
||||||
|
|
||||||
|
episode_soup = BeautifulSoup(
|
||||||
|
self._get_episode_html(season, episode, key).content,
|
||||||
|
'html.parser'
|
||||||
|
)
|
||||||
change_language_box_div = episode_soup.find(
|
change_language_box_div = episode_soup.find(
|
||||||
'div', class_='changeLanguageBox')
|
'div', class_='changeLanguageBox')
|
||||||
languages = []
|
languages = []
|
||||||
@@ -144,12 +243,42 @@ class AniworldLoader(Loader):
|
|||||||
if lang_key and lang_key.isdigit():
|
if lang_key and lang_key.isdigit():
|
||||||
languages.append(int(lang_key))
|
languages.append(int(lang_key))
|
||||||
|
|
||||||
return languageCode in languages
|
is_available = language_code in languages
|
||||||
|
logging.debug(f"Available languages for S{season:02}E{episode:03}: {languages}, requested: {language_code}, available: {is_available}")
|
||||||
|
return is_available
|
||||||
|
|
||||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, language: str = "German Dub", progress_callback: callable = None) -> bool:
|
def download(
|
||||||
sanitized_anime_title = ''.join(
|
self,
|
||||||
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
|
base_directory: str,
|
||||||
|
serie_folder: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub"
|
||||||
|
) -> bool:
|
||||||
|
"""Download episode to specified directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base download directory path
|
||||||
|
serie_folder: Filesystem folder name (metadata only, used for
|
||||||
|
file path construction)
|
||||||
|
season: Season number
|
||||||
|
episode: Episode number
|
||||||
|
key: Series unique identifier from provider (used for
|
||||||
|
identification and API calls)
|
||||||
|
language: Audio language preference (default: German Dub)
|
||||||
|
Returns:
|
||||||
|
bool: True if download succeeded, False otherwise
|
||||||
|
"""
|
||||||
|
logging.info(
|
||||||
|
f"Starting download for S{season:02}E{episode:03} "
|
||||||
|
f"({key}) in {language}"
|
||||||
)
|
)
|
||||||
|
sanitized_anime_title = ''.join(
|
||||||
|
char for char in self.get_title(key)
|
||||||
|
if char not in self.INVALID_PATH_CHARS
|
||||||
|
)
|
||||||
|
logging.debug(f"Sanitized anime title: {sanitized_anime_title}")
|
||||||
|
|
||||||
if season == 0:
|
if season == 0:
|
||||||
output_file = (
|
output_file = (
|
||||||
@@ -164,171 +293,391 @@ class AniworldLoader(Loader):
|
|||||||
f"({language}).mp4"
|
f"({language}).mp4"
|
||||||
)
|
)
|
||||||
|
|
||||||
folderPath = os.path.join(os.path.join(baseDirectory, serieFolder), f"Season {season}")
|
folder_path = os.path.join(
|
||||||
output_path = os.path.join(folderPath, output_file)
|
os.path.join(base_directory, serie_folder),
|
||||||
|
f"Season {season}"
|
||||||
|
)
|
||||||
|
output_path = os.path.join(folder_path, output_file)
|
||||||
|
logging.debug(f"Output path: {output_path}")
|
||||||
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
os.makedirs(os.path.dirname(output_path), exist_ok=True)
|
||||||
|
|
||||||
temp_dir = "./Temp/"
|
temp_dir = "./Temp/"
|
||||||
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
|
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
|
||||||
temp_Path = os.path.join(temp_dir, output_file)
|
temp_path = os.path.join(temp_dir, output_file)
|
||||||
|
logging.debug(f"Temporary path: {temp_path}")
|
||||||
|
|
||||||
for provider in self.SUPPORTED_PROVIDERS:
|
for provider in self.SUPPORTED_PROVIDERS:
|
||||||
link, header = self._get_direct_link_from_provider(season, episode, key, language)
|
logging.debug(f"Attempting download with provider: {provider}")
|
||||||
|
link, header = self._get_direct_link_from_provider(
|
||||||
|
season, episode, key, language
|
||||||
|
)
|
||||||
|
logging.debug("Direct link obtained from provider")
|
||||||
|
|
||||||
|
cancel_flag = self._cancel_flag
|
||||||
|
|
||||||
|
def events_progress_hook(d):
|
||||||
|
if cancel_flag.is_set():
|
||||||
|
logging.info("Cancellation detected in progress hook")
|
||||||
|
raise DownloadCancelled("Download cancelled by user")
|
||||||
|
# Fire the event for progress
|
||||||
|
self.events.download_progress(d)
|
||||||
|
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
'fragment_retries': float('inf'),
|
'fragment_retries': float('inf'),
|
||||||
'outtmpl': temp_Path,
|
'outtmpl': temp_path,
|
||||||
'quiet': True,
|
'quiet': True,
|
||||||
'no_warnings': True,
|
'no_warnings': True,
|
||||||
'progress_with_newline': False,
|
'progress_with_newline': False,
|
||||||
'nocheckcertificate': True,
|
'nocheckcertificate': True,
|
||||||
|
'progress_hooks': [events_progress_hook],
|
||||||
}
|
}
|
||||||
|
|
||||||
if header:
|
if header:
|
||||||
ydl_opts['http_headers'] = header
|
ydl_opts['http_headers'] = header
|
||||||
if progress_callback:
|
logging.debug("Using custom headers for download")
|
||||||
ydl_opts['progress_hooks'] = [progress_callback]
|
|
||||||
|
try:
|
||||||
|
logging.debug("Starting YoutubeDL download")
|
||||||
|
logging.debug(f"Download link: {link[:100]}...")
|
||||||
|
logging.debug(f"YDL options: {ydl_opts}")
|
||||||
|
|
||||||
with YoutubeDL(ydl_opts) as ydl:
|
with YoutubeDL(ydl_opts) as ydl:
|
||||||
ydl.download([link])
|
info = ydl.extract_info(link, download=True)
|
||||||
|
logging.debug(
|
||||||
|
f"Download info: "
|
||||||
|
f"title={info.get('title')}, "
|
||||||
|
f"filesize={info.get('filesize')}"
|
||||||
|
)
|
||||||
|
|
||||||
if (os.path.exists(temp_Path)):
|
if os.path.exists(temp_path):
|
||||||
shutil.copy(temp_Path, output_path)
|
logging.debug("Moving file from temp to final destination")
|
||||||
os.remove(temp_Path)
|
# Use copyfile instead of copy to avoid metadata permission issues
|
||||||
|
shutil.copyfile(temp_path, output_path)
|
||||||
|
os.remove(temp_path)
|
||||||
|
logging.info(
|
||||||
|
f"Download completed successfully: {output_file}"
|
||||||
|
)
|
||||||
|
self.clear_cache()
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
logging.error(
|
||||||
|
f"Download failed: temp file not found at {temp_path}"
|
||||||
|
)
|
||||||
|
self.clear_cache()
|
||||||
|
return False
|
||||||
|
except BrokenPipeError as e:
|
||||||
|
logging.error(
|
||||||
|
f"Broken pipe error with provider {provider}: {e}. "
|
||||||
|
f"This usually means the stream connection was closed."
|
||||||
|
)
|
||||||
|
_cleanup_temp_file(temp_path)
|
||||||
|
continue
|
||||||
|
except Exception as e:
|
||||||
|
logging.error(
|
||||||
|
f"YoutubeDL download failed with provider {provider}: "
|
||||||
|
f"{type(e).__name__}: {e}"
|
||||||
|
)
|
||||||
|
_cleanup_temp_file(temp_path)
|
||||||
|
continue
|
||||||
break
|
break
|
||||||
self.ClearCache()
|
|
||||||
|
|
||||||
|
# If we get here, all providers failed
|
||||||
|
logging.error("All download providers failed")
|
||||||
|
_cleanup_temp_file(temp_path)
|
||||||
|
self.clear_cache()
|
||||||
|
return False
|
||||||
|
|
||||||
def GetSiteKey(self) -> str:
|
def get_site_key(self) -> str:
|
||||||
|
"""Get the site key for this provider."""
|
||||||
return "aniworld.to"
|
return "aniworld.to"
|
||||||
|
|
||||||
def GetTitle(self, key: str) -> str:
|
def get_title(self, key: str) -> str:
|
||||||
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
|
"""Get anime title from series key."""
|
||||||
|
logging.debug(f"Getting title for key: {key}")
|
||||||
|
soup = BeautifulSoup(
|
||||||
|
self._get_key_html(key).content,
|
||||||
|
'html.parser'
|
||||||
|
)
|
||||||
title_div = soup.find('div', class_='series-title')
|
title_div = soup.find('div', class_='series-title')
|
||||||
|
|
||||||
if title_div:
|
if title_div:
|
||||||
return title_div.find('h1').find('span').text
|
h1_tag = title_div.find('h1')
|
||||||
|
span_tag = h1_tag.find('span') if h1_tag else None
|
||||||
|
if span_tag:
|
||||||
|
title = span_tag.text
|
||||||
|
logging.debug(f"Found title: {title}")
|
||||||
|
return title
|
||||||
|
|
||||||
|
logging.warning(f"No title found for key: {key}")
|
||||||
return ""
|
return ""
|
||||||
|
|
||||||
def _GetKeyHTML(self, key: str):
|
def get_year(self, key: str) -> int | None:
|
||||||
|
"""Get anime release year from series key.
|
||||||
|
|
||||||
|
Attempts to extract the year from the series page metadata.
|
||||||
|
Returns None if year cannot be determined.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Series identifier
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
int or None: Release year if found, None otherwise
|
||||||
|
"""
|
||||||
|
logging.debug(f"Getting year for key: {key}")
|
||||||
|
try:
|
||||||
|
soup = BeautifulSoup(
|
||||||
|
self._get_key_html(key).content,
|
||||||
|
'html.parser'
|
||||||
|
)
|
||||||
|
|
||||||
|
# Try to find year in metadata
|
||||||
|
# Check for "Jahr:" or similar metadata fields
|
||||||
|
for p_tag in soup.find_all('p'):
|
||||||
|
text = p_tag.get_text()
|
||||||
|
if 'Jahr:' in text or 'Year:' in text:
|
||||||
|
# Extract year from text like "Jahr: 2025"
|
||||||
|
match = re.search(r'(\d{4})', text)
|
||||||
|
if match:
|
||||||
|
year = int(match.group(1))
|
||||||
|
logging.debug(f"Found year in metadata: {year}")
|
||||||
|
return year
|
||||||
|
|
||||||
|
# Try alternative: look for year in genre/info section
|
||||||
|
info_div = soup.find('div', class_='series-info')
|
||||||
|
if info_div:
|
||||||
|
text = info_div.get_text()
|
||||||
|
match = re.search(r'\b(19\d{2}|20\d{2})\b', text)
|
||||||
|
if match:
|
||||||
|
year = int(match.group(1))
|
||||||
|
logging.debug(f"Found year in info section: {year}")
|
||||||
|
return year
|
||||||
|
|
||||||
|
logging.debug(f"No year found for key: {key}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logging.warning(f"Error extracting year for key {key}: {e}")
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_key_html(self, key: str):
|
||||||
|
"""Get cached HTML for series key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Series identifier (will be URL-encoded for safety)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cached or fetched HTML response
|
||||||
|
"""
|
||||||
if key in self._KeyHTMLDict:
|
if key in self._KeyHTMLDict:
|
||||||
|
logging.debug(f"Using cached HTML for key: {key}")
|
||||||
return self._KeyHTMLDict[key]
|
return self._KeyHTMLDict[key]
|
||||||
|
|
||||||
|
# Sanitize key parameter for URL
|
||||||
|
safe_key = quote(key, safe='')
|
||||||
|
url = f"{self.ANIWORLD_TO}/anime/stream/{safe_key}"
|
||||||
|
logging.debug(f"Fetching HTML for key: {key} from {url}")
|
||||||
self._KeyHTMLDict[key] = self.session.get(
|
self._KeyHTMLDict[key] = self.session.get(
|
||||||
f"{self.ANIWORLD_TO}/anime/stream/{key}",
|
url,
|
||||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
||||||
)
|
)
|
||||||
|
logging.debug(f"Cached HTML for key: {key}")
|
||||||
return self._KeyHTMLDict[key]
|
return self._KeyHTMLDict[key]
|
||||||
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
|
|
||||||
|
def _get_episode_html(self, season: int, episode: int, key: str):
|
||||||
|
"""Get cached HTML for episode.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
season: Season number (validated to be positive)
|
||||||
|
episode: Episode number (validated to be positive)
|
||||||
|
key: Series identifier (will be URL-encoded for safety)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cached or fetched HTML response
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If season or episode are invalid
|
||||||
|
"""
|
||||||
|
# Validate season and episode numbers
|
||||||
|
if season < 1 or season > 999:
|
||||||
|
logging.error(f"Invalid season number: {season}")
|
||||||
|
raise ValueError(f"Invalid season number: {season}")
|
||||||
|
if episode < 1 or episode > 9999:
|
||||||
|
logging.error(f"Invalid episode number: {episode}")
|
||||||
|
raise ValueError(f"Invalid episode number: {episode}")
|
||||||
|
|
||||||
if key in self._EpisodeHTMLDict:
|
if key in self._EpisodeHTMLDict:
|
||||||
|
logging.debug(f"Using cached HTML for S{season:02}E{episode:03} ({key})")
|
||||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||||
|
|
||||||
|
# Sanitize key parameter for URL
|
||||||
|
safe_key = quote(key, safe='')
|
||||||
link = (
|
link = (
|
||||||
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
|
f"{self.ANIWORLD_TO}/anime/stream/{safe_key}/"
|
||||||
f"staffel-{season}/episode-{episode}"
|
f"staffel-{season}/episode-{episode}"
|
||||||
)
|
)
|
||||||
|
logging.debug(f"Fetching episode HTML from: {link}")
|
||||||
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||||
self._EpisodeHTMLDict[(key, season, episode)] = html
|
self._EpisodeHTMLDict[(key, season, episode)] = html
|
||||||
|
logging.debug(f"Cached episode HTML for S{season:02}E{episode:03} ({key})")
|
||||||
return self._EpisodeHTMLDict[(key, season, episode)]
|
return self._EpisodeHTMLDict[(key, season, episode)]
|
||||||
|
|
||||||
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
|
def _get_provider_from_html(
|
||||||
"""
|
self,
|
||||||
Parses the HTML content to extract streaming providers,
|
season: int,
|
||||||
their language keys, and redirect links.
|
episode: int,
|
||||||
|
key: str
|
||||||
|
) -> dict:
|
||||||
|
"""Parse HTML content to extract streaming providers.
|
||||||
|
|
||||||
Returns a dictionary with provider names as keys
|
Returns a dictionary with provider names as keys
|
||||||
and language key-to-redirect URL mappings as values.
|
and language key-to-redirect URL mappings as values.
|
||||||
|
|
||||||
Example:
|
Example:
|
||||||
|
|
||||||
{
|
{
|
||||||
'VOE': {1: 'https://aniworld.to/redirect/1766412',
|
'VOE': {1: 'https://aniworld.to/redirect/1766412',
|
||||||
2: 'https://aniworld.to/redirect/1766405'},
|
2: 'https://aniworld.to/redirect/1766405'},
|
||||||
'Doodstream': {1: 'https://aniworld.to/redirect/1987922',
|
|
||||||
2: 'https://aniworld.to/redirect/2700342'},
|
|
||||||
...
|
|
||||||
}
|
}
|
||||||
|
|
||||||
Access redirect link with:
|
|
||||||
print(self.provider["VOE"][2])
|
|
||||||
"""
|
"""
|
||||||
|
logging.debug(f"Extracting providers from HTML for S{season:02}E{episode:03} ({key})")
|
||||||
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
soup = BeautifulSoup(
|
||||||
providers = {}
|
self._get_episode_html(season, episode, key).content,
|
||||||
|
'html.parser'
|
||||||
|
)
|
||||||
|
providers: dict[str, dict[int, str]] = {}
|
||||||
|
|
||||||
episode_links = soup.find_all(
|
episode_links = soup.find_all(
|
||||||
'li', class_=lambda x: x and x.startswith('episodeLink')
|
'li', class_=lambda x: x and x.startswith('episodeLink')
|
||||||
)
|
)
|
||||||
|
|
||||||
if not episode_links:
|
if not episode_links:
|
||||||
|
logging.warning(f"No episode links found for S{season:02}E{episode:03} ({key})")
|
||||||
return providers
|
return providers
|
||||||
|
|
||||||
for link in episode_links:
|
for link in episode_links:
|
||||||
provider_name_tag = link.find('h4')
|
provider_name_tag = link.find('h4')
|
||||||
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
|
provider_name = (
|
||||||
|
provider_name_tag.text.strip()
|
||||||
|
if provider_name_tag else None
|
||||||
|
)
|
||||||
|
|
||||||
redirect_link_tag = link.find('a', class_='watchEpisode')
|
redirect_link_tag = link.find('a', class_='watchEpisode')
|
||||||
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
|
redirect_link = (
|
||||||
|
redirect_link_tag.get('href')
|
||||||
|
if redirect_link_tag else None
|
||||||
|
)
|
||||||
|
|
||||||
lang_key = link.get('data-lang-key')
|
lang_key = link.get('data-lang-key')
|
||||||
lang_key = int(
|
lang_key = (
|
||||||
lang_key) if lang_key and lang_key.isdigit() else None
|
int(lang_key)
|
||||||
|
if lang_key and lang_key.isdigit() else None
|
||||||
|
)
|
||||||
|
|
||||||
if provider_name and redirect_link and lang_key:
|
if provider_name and redirect_link and lang_key:
|
||||||
if provider_name not in providers:
|
if provider_name not in providers:
|
||||||
providers[provider_name] = {}
|
providers[provider_name] = {}
|
||||||
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
|
providers[provider_name][lang_key] = (
|
||||||
|
f"{self.ANIWORLD_TO}{redirect_link}"
|
||||||
|
)
|
||||||
|
logging.debug(f"Found provider: {provider_name}, lang_key: {lang_key}")
|
||||||
|
|
||||||
|
logging.debug(f"Total providers found: {len(providers)}")
|
||||||
return providers
|
return providers
|
||||||
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
|
|
||||||
languageCode = self._GetLanguageKey(language)
|
def _get_redirect_link(
|
||||||
if (self.IsLanguage(season, episode, key, language)):
|
self,
|
||||||
for provider_name, lang_dict in self._get_provider_from_html(season, episode, key).items():
|
season: int,
|
||||||
if languageCode in lang_dict:
|
episode: int,
|
||||||
return(lang_dict[languageCode], provider_name)
|
key: str,
|
||||||
break
|
language: str = "German Dub"
|
||||||
|
):
|
||||||
|
"""Get redirect link for episode in specified language."""
|
||||||
|
logging.debug(f"Getting redirect link for S{season:02}E{episode:03} ({key}) in {language}")
|
||||||
|
language_code = self._get_language_key(language)
|
||||||
|
if self.is_language(season, episode, key, language):
|
||||||
|
for (provider_name, lang_dict) in (
|
||||||
|
self._get_provider_from_html(
|
||||||
|
season, episode, key
|
||||||
|
).items()
|
||||||
|
):
|
||||||
|
if language_code in lang_dict:
|
||||||
|
logging.debug(f"Found redirect link with provider: {provider_name}")
|
||||||
|
return (lang_dict[language_code], provider_name)
|
||||||
|
logging.warning(f"No redirect link found for S{season:02}E{episode:03} ({key}) in {language}")
|
||||||
return None
|
return None
|
||||||
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
|
|
||||||
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
|
def _get_embeded_link(
|
||||||
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub"
|
||||||
|
):
|
||||||
|
"""Get embedded link from redirect link."""
|
||||||
|
logging.debug(f"Getting embedded link for S{season:02}E{episode:03} ({key}) in {language}")
|
||||||
|
redirect_link, provider_name = (
|
||||||
|
self._get_redirect_link(season, episode, key, language)
|
||||||
|
)
|
||||||
|
logging.debug(f"Redirect link: {redirect_link}, provider: {provider_name}")
|
||||||
|
|
||||||
embeded_link = self.session.get(
|
embeded_link = self.session.get(
|
||||||
redirect_link, timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
redirect_link,
|
||||||
headers={'User-Agent': self.RANDOM_USER_AGENT}).url
|
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||||
|
headers={'User-Agent': self.RANDOM_USER_AGENT}
|
||||||
|
).url
|
||||||
|
logging.debug(f"Embedded link: {embeded_link}")
|
||||||
return embeded_link
|
return embeded_link
|
||||||
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
|
|
||||||
"""
|
|
||||||
providers = {
|
|
||||||
"Vidmoly": get_direct_link_from_vidmoly,
|
|
||||||
"Vidoza": get_direct_link_from_vidoza,
|
|
||||||
"VOE": get_direct_link_from_voe,
|
|
||||||
"Doodstream": get_direct_link_from_doodstream,
|
|
||||||
"SpeedFiles": get_direct_link_from_speedfiles,
|
|
||||||
"Luluvdo": get_direct_link_from_luluvdo
|
|
||||||
}
|
|
||||||
|
|
||||||
"""
|
def _get_direct_link_from_provider(
|
||||||
embeded_link = self._get_embeded_link(season, episode, key, language)
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub"
|
||||||
|
):
|
||||||
|
"""Get direct download link from streaming provider."""
|
||||||
|
logging.debug(f"Getting direct link from provider for S{season:02}E{episode:03} ({key}) in {language}")
|
||||||
|
embeded_link = self._get_embeded_link(
|
||||||
|
season, episode, key, language
|
||||||
|
)
|
||||||
if embeded_link is None:
|
if embeded_link is None:
|
||||||
|
logging.error(f"No embedded link found for S{season:02}E{episode:03} ({key})")
|
||||||
return None
|
return None
|
||||||
|
|
||||||
return self.Providers.GetProvider("VOE").GetLink(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
|
logging.debug(f"Using VOE provider to extract direct link")
|
||||||
|
return self.Providers.GetProvider(
|
||||||
|
"VOE"
|
||||||
|
).get_link(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
|
||||||
|
|
||||||
def get_season_episode_count(self, slug: str) -> dict:
|
def get_season_episode_count(self, slug: str) -> dict:
|
||||||
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
|
"""Get episode count for each season.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
slug: Series identifier (will be URL-encoded for safety)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping season numbers to episode counts
|
||||||
|
"""
|
||||||
|
logging.info(f"Getting season and episode count for slug: {slug}")
|
||||||
|
# Sanitize slug parameter for URL
|
||||||
|
safe_slug = quote(slug, safe='')
|
||||||
|
base_url = f"{self.ANIWORLD_TO}/anime/stream/{safe_slug}/"
|
||||||
|
logging.debug(f"Base URL: {base_url}")
|
||||||
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
||||||
soup = BeautifulSoup(response.content, 'html.parser')
|
soup = BeautifulSoup(response.content, 'html.parser')
|
||||||
|
|
||||||
season_meta = soup.find('meta', itemprop='numberOfSeasons')
|
season_meta = soup.find('meta', itemprop='numberOfSeasons')
|
||||||
number_of_seasons = int(season_meta['content']) if season_meta else 0
|
number_of_seasons = int(season_meta['content']) if season_meta else 0
|
||||||
|
logging.info(f"Found {number_of_seasons} seasons for '{slug}'")
|
||||||
|
|
||||||
episode_counts = {}
|
episode_counts = {}
|
||||||
|
|
||||||
for season in range(1, number_of_seasons + 1):
|
for season in range(1, number_of_seasons + 1):
|
||||||
season_url = f"{base_url}staffel-{season}"
|
season_url = f"{base_url}staffel-{season}"
|
||||||
response = requests.get(season_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
|
logging.debug(f"Fetching episodes for season {season} from: {season_url}")
|
||||||
|
response = requests.get(
|
||||||
|
season_url,
|
||||||
|
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
soup = BeautifulSoup(response.content, 'html.parser')
|
soup = BeautifulSoup(response.content, 'html.parser')
|
||||||
|
|
||||||
episode_links = soup.find_all('a', href=True)
|
episode_links = soup.find_all('a', href=True)
|
||||||
@@ -339,5 +688,7 @@ class AniworldLoader(Loader):
|
|||||||
)
|
)
|
||||||
|
|
||||||
episode_counts[season] = len(unique_links)
|
episode_counts[season] = len(unique_links)
|
||||||
|
logging.debug(f"Season {season} has {episode_counts[season]} episodes")
|
||||||
|
|
||||||
|
logging.info(f"Episode count retrieval complete for '{slug}': {episode_counts}")
|
||||||
return episode_counts
|
return episode_counts
|
||||||
|
|||||||
@@ -1,27 +1,104 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any, Dict, List
|
||||||
|
|
||||||
|
|
||||||
class Loader(ABC):
|
class Loader(ABC):
|
||||||
|
"""Abstract base class for anime data loaders/providers."""
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def Search(self, word: str) -> list:
|
def subscribe_download_progress(self, handler):
|
||||||
pass
|
"""Subscribe a handler to the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable to be called with progress dict.
|
||||||
|
"""
|
||||||
|
@abstractmethod
|
||||||
|
def unsubscribe_download_progress(self, handler):
|
||||||
|
"""Unsubscribe a handler from the download_progress event.
|
||||||
|
Args:
|
||||||
|
handler: Callable previously subscribed.
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
def search(self, word: str) -> List[Dict[str, Any]]:
|
||||||
pass
|
"""Search for anime series by name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
word: Search term to look for
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of found series as dictionaries containing series information
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, progress_callback: callable = None) -> bool:
|
def is_language(
|
||||||
pass
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
) -> bool:
|
||||||
|
"""Check if episode exists in specified language.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
season: Season number (1-indexed)
|
||||||
|
episode: Episode number (1-indexed)
|
||||||
|
key: Unique series identifier/key
|
||||||
|
language: Language to check (default: German Dub)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if episode exists in specified language, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def GetSiteKey(self) -> str:
|
def download(
|
||||||
pass
|
self,
|
||||||
|
base_directory: str,
|
||||||
|
serie_folder: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub"
|
||||||
|
) -> bool:
|
||||||
|
"""Download episode to specified directory.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base directory for downloads
|
||||||
|
serie_folder: Series folder name within base directory
|
||||||
|
season: Season number (0 for movies, 1+ for series)
|
||||||
|
episode: Episode number within season
|
||||||
|
key: Unique series identifier/key
|
||||||
|
language: Language version to download (default: German Dub)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if download successful, False otherwise
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def GetTitle(self) -> str:
|
def get_site_key(self) -> str:
|
||||||
pass
|
"""Get the site key/identifier for this provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Site key string (e.g., 'aniworld.to', 'voe.com')
|
||||||
|
"""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def get_season_episode_count(self, slug: str) -> dict:
|
def get_title(self, key: str) -> str:
|
||||||
pass
|
"""Get the human-readable title of a series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Unique series identifier/key
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Series title string
|
||||||
|
"""
|
||||||
|
|
||||||
|
@abstractmethod
|
||||||
|
def get_season_episode_count(self, slug: str) -> Dict[int, int]:
|
||||||
|
"""Get season and episode counts for a series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
slug: Series slug/key identifier
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping season number (int) to episode count (int)
|
||||||
|
"""
|
||||||
|
|
||||||
|
|||||||
351
src/core/providers/config_manager.py
Normal file
351
src/core/providers/config_manager.py
Normal file
@@ -0,0 +1,351 @@
|
|||||||
|
"""Dynamic provider configuration management.
|
||||||
|
|
||||||
|
This module provides runtime configuration management for anime providers,
|
||||||
|
allowing dynamic updates without application restart.
|
||||||
|
"""
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
|
from dataclasses import asdict, dataclass
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProviderSettings:
|
||||||
|
"""Configuration settings for a single provider."""
|
||||||
|
|
||||||
|
name: str
|
||||||
|
enabled: bool = True
|
||||||
|
priority: int = 0
|
||||||
|
timeout_seconds: int = 30
|
||||||
|
max_retries: int = 3
|
||||||
|
retry_delay_seconds: float = 1.0
|
||||||
|
max_concurrent_downloads: int = 3
|
||||||
|
bandwidth_limit_mbps: Optional[float] = None
|
||||||
|
custom_headers: Optional[Dict[str, str]] = None
|
||||||
|
custom_params: Optional[Dict[str, Any]] = None
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert settings to dictionary."""
|
||||||
|
return {
|
||||||
|
k: v for k, v in asdict(self).items() if v is not None
|
||||||
|
}
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_dict(cls, data: Dict[str, Any]) -> "ProviderSettings":
|
||||||
|
"""Create settings from dictionary."""
|
||||||
|
return cls(**{k: v for k, v in data.items() if hasattr(cls, k)})
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderConfigManager:
|
||||||
|
"""Manages dynamic configuration for anime providers."""
|
||||||
|
|
||||||
|
def __init__(self, config_file: Optional[Path] = None):
|
||||||
|
"""Initialize provider configuration manager.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_file: Path to configuration file (optional).
|
||||||
|
"""
|
||||||
|
self._config_file = config_file
|
||||||
|
self._provider_settings: Dict[str, ProviderSettings] = {}
|
||||||
|
self._global_settings: Dict[str, Any] = {
|
||||||
|
"default_timeout": 30,
|
||||||
|
"default_max_retries": 3,
|
||||||
|
"default_retry_delay": 1.0,
|
||||||
|
"enable_health_monitoring": True,
|
||||||
|
"enable_failover": True,
|
||||||
|
}
|
||||||
|
|
||||||
|
# Load configuration if file exists
|
||||||
|
if config_file and config_file.exists():
|
||||||
|
self.load_config()
|
||||||
|
|
||||||
|
logger.info("Provider configuration manager initialized")
|
||||||
|
|
||||||
|
def get_provider_settings(
|
||||||
|
self, provider_name: str
|
||||||
|
) -> Optional[ProviderSettings]:
|
||||||
|
"""Get settings for a specific provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Provider settings or None if not configured.
|
||||||
|
"""
|
||||||
|
return self._provider_settings.get(provider_name)
|
||||||
|
|
||||||
|
def set_provider_settings(
|
||||||
|
self, provider_name: str, settings: ProviderSettings
|
||||||
|
) -> None:
|
||||||
|
"""Set settings for a specific provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
settings: Provider settings to apply.
|
||||||
|
"""
|
||||||
|
self._provider_settings[provider_name] = settings
|
||||||
|
logger.info(f"Updated settings for provider: {provider_name}")
|
||||||
|
|
||||||
|
def update_provider_settings(
|
||||||
|
self, provider_name: str, **kwargs
|
||||||
|
) -> bool:
|
||||||
|
"""Update specific provider settings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
**kwargs: Settings to update.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if updated, False if provider not found.
|
||||||
|
"""
|
||||||
|
if provider_name not in self._provider_settings:
|
||||||
|
# Create new settings
|
||||||
|
self._provider_settings[provider_name] = ProviderSettings(
|
||||||
|
name=provider_name, **kwargs
|
||||||
|
)
|
||||||
|
logger.info(f"Created new settings for provider: {provider_name}") # noqa: E501
|
||||||
|
return True
|
||||||
|
|
||||||
|
settings = self._provider_settings[provider_name]
|
||||||
|
|
||||||
|
# Update settings
|
||||||
|
for key, value in kwargs.items():
|
||||||
|
if hasattr(settings, key):
|
||||||
|
setattr(settings, key, value)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Updated settings for provider {provider_name}: {kwargs}"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_all_provider_settings(self) -> Dict[str, ProviderSettings]:
|
||||||
|
"""Get settings for all configured providers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping provider names to their settings.
|
||||||
|
"""
|
||||||
|
return self._provider_settings.copy()
|
||||||
|
|
||||||
|
def get_enabled_providers(self) -> List[str]:
|
||||||
|
"""Get list of enabled providers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of enabled provider names.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
name
|
||||||
|
for name, settings in self._provider_settings.items()
|
||||||
|
if settings.enabled
|
||||||
|
]
|
||||||
|
|
||||||
|
def enable_provider(self, provider_name: str) -> bool:
|
||||||
|
"""Enable a provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if enabled, False if not found.
|
||||||
|
"""
|
||||||
|
if provider_name in self._provider_settings:
|
||||||
|
self._provider_settings[provider_name].enabled = True
|
||||||
|
logger.info(f"Enabled provider: {provider_name}")
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def disable_provider(self, provider_name: str) -> bool:
|
||||||
|
"""Disable a provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if disabled, False if not found.
|
||||||
|
"""
|
||||||
|
if provider_name in self._provider_settings:
|
||||||
|
self._provider_settings[provider_name].enabled = False
|
||||||
|
logger.info(f"Disabled provider: {provider_name}")
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def set_provider_priority(
|
||||||
|
self, provider_name: str, priority: int
|
||||||
|
) -> bool:
|
||||||
|
"""Set priority for a provider.
|
||||||
|
|
||||||
|
Lower priority values = higher priority.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
priority: Priority value (lower = higher priority).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if updated, False if not found.
|
||||||
|
"""
|
||||||
|
if provider_name in self._provider_settings:
|
||||||
|
self._provider_settings[provider_name].priority = priority
|
||||||
|
logger.info(
|
||||||
|
f"Set priority for {provider_name} to {priority}"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_providers_by_priority(self) -> List[str]:
|
||||||
|
"""Get providers sorted by priority.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of provider names sorted by priority (low to high).
|
||||||
|
"""
|
||||||
|
sorted_providers = sorted(
|
||||||
|
self._provider_settings.items(),
|
||||||
|
key=lambda x: x[1].priority,
|
||||||
|
)
|
||||||
|
return [name for name, _ in sorted_providers]
|
||||||
|
|
||||||
|
def get_global_setting(self, key: str) -> Optional[Any]:
|
||||||
|
"""Get a global setting value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Setting key.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Setting value or None if not found.
|
||||||
|
"""
|
||||||
|
return self._global_settings.get(key)
|
||||||
|
|
||||||
|
def set_global_setting(self, key: str, value: Any) -> None:
|
||||||
|
"""Set a global setting value.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Setting key.
|
||||||
|
value: Setting value.
|
||||||
|
"""
|
||||||
|
self._global_settings[key] = value
|
||||||
|
logger.info(f"Updated global setting {key}: {value}")
|
||||||
|
|
||||||
|
def get_all_global_settings(self) -> Dict[str, Any]:
|
||||||
|
"""Get all global settings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary of global settings.
|
||||||
|
"""
|
||||||
|
return self._global_settings.copy()
|
||||||
|
|
||||||
|
def load_config(self, file_path: Optional[Path] = None) -> bool:
|
||||||
|
"""Load configuration from file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to configuration file (uses default if None).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if loaded successfully, False otherwise.
|
||||||
|
"""
|
||||||
|
config_path = file_path or self._config_file
|
||||||
|
if not config_path or not config_path.exists():
|
||||||
|
logger.warning(
|
||||||
|
f"Configuration file not found: {config_path}"
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
with open(config_path, "r", encoding="utf-8") as f:
|
||||||
|
data = json.load(f)
|
||||||
|
|
||||||
|
# Load provider settings
|
||||||
|
if "providers" in data:
|
||||||
|
for name, settings_data in data["providers"].items():
|
||||||
|
self._provider_settings[name] = (
|
||||||
|
ProviderSettings.from_dict(settings_data)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load global settings
|
||||||
|
if "global" in data:
|
||||||
|
self._global_settings.update(data["global"])
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Loaded configuration from {config_path} "
|
||||||
|
f"({len(self._provider_settings)} providers)"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to load configuration from {config_path}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def save_config(self, file_path: Optional[Path] = None) -> bool:
|
||||||
|
"""Save configuration to file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
file_path: Path to save to (uses default if None).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if saved successfully, False otherwise.
|
||||||
|
"""
|
||||||
|
config_path = file_path or self._config_file
|
||||||
|
if not config_path:
|
||||||
|
logger.error("No configuration file path specified")
|
||||||
|
return False
|
||||||
|
|
||||||
|
try:
|
||||||
|
# Ensure parent directory exists
|
||||||
|
config_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
data = {
|
||||||
|
"providers": {
|
||||||
|
name: settings.to_dict()
|
||||||
|
for name, settings in self._provider_settings.items()
|
||||||
|
},
|
||||||
|
"global": self._global_settings,
|
||||||
|
}
|
||||||
|
|
||||||
|
with open(config_path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(data, f, indent=2)
|
||||||
|
|
||||||
|
logger.info(f"Saved configuration to {config_path}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Failed to save configuration to {config_path}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
def reset_to_defaults(self) -> None:
|
||||||
|
"""Reset all settings to defaults."""
|
||||||
|
self._provider_settings.clear()
|
||||||
|
self._global_settings = {
|
||||||
|
"default_timeout": 30,
|
||||||
|
"default_max_retries": 3,
|
||||||
|
"default_retry_delay": 1.0,
|
||||||
|
"enable_health_monitoring": True,
|
||||||
|
"enable_failover": True,
|
||||||
|
}
|
||||||
|
logger.info("Reset configuration to defaults")
|
||||||
|
|
||||||
|
|
||||||
|
# Global configuration manager instance
|
||||||
|
_config_manager: Optional[ProviderConfigManager] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_config_manager(
|
||||||
|
config_file: Optional[Path] = None,
|
||||||
|
) -> ProviderConfigManager:
|
||||||
|
"""Get or create global provider configuration manager.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
config_file: Configuration file path (used on first call).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Global ProviderConfigManager instance.
|
||||||
|
"""
|
||||||
|
global _config_manager
|
||||||
|
if _config_manager is None:
|
||||||
|
_config_manager = ProviderConfigManager(config_file=config_file)
|
||||||
|
return _config_manager
|
||||||
@@ -5,76 +5,97 @@ This module extends the original AniWorldLoader with comprehensive
|
|||||||
error handling, retry mechanisms, and recovery strategies.
|
error handling, retry mechanisms, and recovery strategies.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
|
import html
|
||||||
|
import json
|
||||||
|
import logging
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import logging
|
import shutil
|
||||||
import json
|
from pathlib import Path
|
||||||
import requests
|
from typing import Any, Callable, Dict, Optional
|
||||||
import html
|
|
||||||
from urllib.parse import quote
|
from urllib.parse import quote
|
||||||
import time
|
|
||||||
import hashlib
|
|
||||||
from typing import Optional, Dict, Any, Callable
|
|
||||||
|
|
||||||
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from fake_useragent import UserAgent
|
from fake_useragent import UserAgent
|
||||||
from requests.adapters import HTTPAdapter
|
from requests.adapters import HTTPAdapter
|
||||||
from urllib3.util.retry import Retry
|
from urllib3.util.retry import Retry
|
||||||
from yt_dlp import YoutubeDL
|
from yt_dlp import YoutubeDL
|
||||||
import shutil
|
|
||||||
|
|
||||||
from server.infrastructure.providers.base_provider import Loader
|
from ...infrastructure.security.file_integrity import get_integrity_manager
|
||||||
from server.core.interfaces.providers import Providers
|
from ..error_handler import (
|
||||||
from error_handler import (
|
|
||||||
with_error_recovery,
|
|
||||||
recovery_strategies,
|
|
||||||
NetworkError,
|
|
||||||
DownloadError,
|
DownloadError,
|
||||||
RetryableError,
|
NetworkError,
|
||||||
NonRetryableError,
|
NonRetryableError,
|
||||||
file_corruption_detector
|
RetryableError,
|
||||||
|
file_corruption_detector,
|
||||||
|
recovery_strategies,
|
||||||
|
with_error_recovery,
|
||||||
|
)
|
||||||
|
from ..interfaces.providers import Providers
|
||||||
|
from .base_provider import Loader
|
||||||
|
from .provider_config import (
|
||||||
|
ANIWORLD_HEADERS,
|
||||||
|
DEFAULT_PROVIDERS,
|
||||||
|
INVALID_PATH_CHARS,
|
||||||
|
LULUVDO_USER_AGENT,
|
||||||
|
ProviderType,
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class EnhancedAniWorldLoader(Loader):
|
def _cleanup_temp_file(
|
||||||
"""Enhanced AniWorld loader with comprehensive error handling."""
|
temp_path: str,
|
||||||
|
logger: Optional[logging.Logger] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Remove a temp file and any associated yt-dlp partial files.
|
||||||
|
|
||||||
def __init__(self):
|
Args:
|
||||||
|
temp_path: Path to the primary temp file.
|
||||||
|
logger: Optional logger for diagnostic messages.
|
||||||
|
"""
|
||||||
|
_log = logger or logging.getLogger(__name__)
|
||||||
|
candidates = [temp_path]
|
||||||
|
# yt-dlp creates fragment files like <file>.part
|
||||||
|
candidates.extend(
|
||||||
|
str(p) for p in Path(temp_path).parent.glob(
|
||||||
|
Path(temp_path).name + ".*"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for path in candidates:
|
||||||
|
if os.path.exists(path):
|
||||||
|
try:
|
||||||
|
os.remove(path)
|
||||||
|
_log.debug(f"Removed temp file: {path}")
|
||||||
|
except OSError as exc:
|
||||||
|
_log.warning(f"Failed to remove temp file {path}: {exc}")
|
||||||
|
|
||||||
|
|
||||||
|
class EnhancedAniWorldLoader(Loader):
|
||||||
|
"""Aniworld provider with retry and recovery strategies.
|
||||||
|
|
||||||
|
Also exposes metrics hooks for download statistics.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
super().__init__()
|
super().__init__()
|
||||||
self.logger = logging.getLogger(__name__)
|
self.logger = logging.getLogger(__name__)
|
||||||
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
|
self.SUPPORTED_PROVIDERS = DEFAULT_PROVIDERS
|
||||||
|
# local copy so modifications don't mutate shared constant
|
||||||
self.AniworldHeaders = {
|
self.AniworldHeaders = dict(ANIWORLD_HEADERS)
|
||||||
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
|
self.INVALID_PATH_CHARS = INVALID_PATH_CHARS
|
||||||
"accept-encoding": "gzip, deflate, br, zstd",
|
|
||||||
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
|
|
||||||
"cache-control": "max-age=0",
|
|
||||||
"priority": "u=0, i",
|
|
||||||
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
|
|
||||||
"sec-ch-ua-mobile": "?0",
|
|
||||||
"sec-ch-ua-platform": '"Windows"',
|
|
||||||
"sec-fetch-dest": "document",
|
|
||||||
"sec-fetch-mode": "navigate",
|
|
||||||
"sec-fetch-site": "none",
|
|
||||||
"sec-fetch-user": "?1",
|
|
||||||
"upgrade-insecure-requests": "1",
|
|
||||||
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
|
||||||
}
|
|
||||||
|
|
||||||
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
|
|
||||||
self.RANDOM_USER_AGENT = UserAgent().random
|
self.RANDOM_USER_AGENT = UserAgent().random
|
||||||
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
|
self.LULUVDO_USER_AGENT = LULUVDO_USER_AGENT
|
||||||
|
|
||||||
self.PROVIDER_HEADERS = {
|
self.PROVIDER_HEADERS = {
|
||||||
"Vidmoly": ['Referer: "https://vidmoly.to"'],
|
ProviderType.VIDMOLY.value: ['Referer: "https://vidmoly.to"'],
|
||||||
"Doodstream": ['Referer: "https://dood.li/"'],
|
ProviderType.DOODSTREAM.value: ['Referer: "https://dood.li/"'],
|
||||||
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
|
ProviderType.VOE.value: [f'User-Agent: {self.RANDOM_USER_AGENT}'],
|
||||||
"Luluvdo": [
|
ProviderType.LULUVDO.value: [
|
||||||
f'User-Agent: {self.LULUVDO_USER_AGENT}',
|
f'User-Agent: {self.LULUVDO_USER_AGENT}',
|
||||||
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
|
"Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7",
|
||||||
'Origin: "https://luluvdo.com"',
|
'Origin: "https://luluvdo.com"',
|
||||||
'Referer: "https://luluvdo.com/"'
|
'Referer: "https://luluvdo.com/"',
|
||||||
]
|
],
|
||||||
}
|
}
|
||||||
|
|
||||||
self.ANIWORLD_TO = "https://aniworld.to"
|
self.ANIWORLD_TO = "https://aniworld.to"
|
||||||
@@ -98,23 +119,40 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
'retried_downloads': 0
|
'retried_downloads': 0
|
||||||
}
|
}
|
||||||
|
|
||||||
# Read timeout from environment variable
|
# Read timeout from environment variable (string->int safely)
|
||||||
self.download_timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
|
self.download_timeout = int(os.getenv("DOWNLOAD_TIMEOUT") or "600")
|
||||||
|
|
||||||
# Setup logging
|
# Setup logging
|
||||||
self._setup_logging()
|
self._setup_logging()
|
||||||
|
|
||||||
def _create_robust_session(self) -> requests.Session:
|
def _create_robust_session(self) -> requests.Session:
|
||||||
"""Create a session with robust retry and error handling configuration."""
|
"""Create a session with robust retry and error handling
|
||||||
|
configuration.
|
||||||
|
"""
|
||||||
session = requests.Session()
|
session = requests.Session()
|
||||||
|
|
||||||
# Enhanced retry strategy
|
# Configure retries so transient network problems are retried while we
|
||||||
|
# still fail fast on permanent errors. The status codes cover
|
||||||
|
# timeouts, rate limits, and the Cloudflare-origin 52x responses that
|
||||||
|
# AniWorld occasionally emits under load.
|
||||||
retries = Retry(
|
retries = Retry(
|
||||||
total=5,
|
total=5,
|
||||||
backoff_factor=2, # More aggressive backoff
|
backoff_factor=2, # More aggressive backoff
|
||||||
status_forcelist=[408, 429, 500, 502, 503, 504, 520, 521, 522, 523, 524],
|
status_forcelist=[
|
||||||
|
408,
|
||||||
|
429,
|
||||||
|
500,
|
||||||
|
502,
|
||||||
|
503,
|
||||||
|
504,
|
||||||
|
520,
|
||||||
|
521,
|
||||||
|
522,
|
||||||
|
523,
|
||||||
|
524,
|
||||||
|
],
|
||||||
allowed_methods=["GET", "POST", "HEAD"],
|
allowed_methods=["GET", "POST", "HEAD"],
|
||||||
raise_on_status=False # Handle status errors manually
|
raise_on_status=False, # Handle status errors manually
|
||||||
)
|
)
|
||||||
|
|
||||||
adapter = HTTPAdapter(
|
adapter = HTTPAdapter(
|
||||||
@@ -136,7 +174,9 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
"""Setup specialized logging for download errors and missing keys."""
|
"""Setup specialized logging for download errors and missing keys."""
|
||||||
# Download error logger
|
# Download error logger
|
||||||
self.download_error_logger = logging.getLogger("DownloadErrors")
|
self.download_error_logger = logging.getLogger("DownloadErrors")
|
||||||
download_error_handler = logging.FileHandler("../../download_errors.log")
|
download_error_handler = logging.FileHandler(
|
||||||
|
"../../download_errors.log"
|
||||||
|
)
|
||||||
download_error_handler.setLevel(logging.ERROR)
|
download_error_handler.setLevel(logging.ERROR)
|
||||||
download_error_formatter = logging.Formatter(
|
download_error_formatter = logging.Formatter(
|
||||||
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
'%(asctime)s - %(name)s - %(levelname)s - %(message)s'
|
||||||
@@ -174,7 +214,9 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
if not word or not word.strip():
|
if not word or not word.strip():
|
||||||
raise ValueError("Search term cannot be empty")
|
raise ValueError("Search term cannot be empty")
|
||||||
|
|
||||||
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
search_url = (
|
||||||
|
f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
|
||||||
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
return self._fetch_anime_list_with_recovery(search_url)
|
return self._fetch_anime_list_with_recovery(search_url)
|
||||||
@@ -197,6 +239,11 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
elif response.status_code == 403:
|
elif response.status_code == 403:
|
||||||
raise NonRetryableError(f"Access forbidden: {url}")
|
raise NonRetryableError(f"Access forbidden: {url}")
|
||||||
elif response.status_code >= 500:
|
elif response.status_code >= 500:
|
||||||
|
# Log suspicious server errors for monitoring
|
||||||
|
self.logger.warning(
|
||||||
|
f"Server error {response.status_code} from {url} "
|
||||||
|
f"- will retry"
|
||||||
|
)
|
||||||
raise RetryableError(f"Server error {response.status_code}")
|
raise RetryableError(f"Server error {response.status_code}")
|
||||||
else:
|
else:
|
||||||
raise RetryableError(f"HTTP error {response.status_code}")
|
raise RetryableError(f"HTTP error {response.status_code}")
|
||||||
@@ -213,7 +260,21 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
|
|
||||||
clean_text = response_text.strip()
|
clean_text = response_text.strip()
|
||||||
|
|
||||||
# Try multiple parsing strategies
|
# Quick fail for obviously non-JSON responses
|
||||||
|
if not (clean_text.startswith('[') or clean_text.startswith('{')):
|
||||||
|
# Check if it's HTML error page
|
||||||
|
if clean_text.lower().startswith('<!doctype') or \
|
||||||
|
clean_text.lower().startswith('<html'):
|
||||||
|
raise ValueError("Received HTML instead of JSON")
|
||||||
|
# If doesn't start with JSON markers, likely not JSON
|
||||||
|
self.logger.warning(
|
||||||
|
"Response doesn't start with JSON markers, "
|
||||||
|
"attempting parse anyway"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Attempt increasingly permissive parsing strategies to cope with
|
||||||
|
# upstream anomalies such as HTML escaping, stray BOM markers, and
|
||||||
|
# injected control characters.
|
||||||
parsing_strategies = [
|
parsing_strategies = [
|
||||||
lambda text: json.loads(html.unescape(text)),
|
lambda text: json.loads(html.unescape(text)),
|
||||||
lambda text: json.loads(text.encode('utf-8').decode('utf-8-sig')),
|
lambda text: json.loads(text.encode('utf-8').decode('utf-8-sig')),
|
||||||
@@ -224,64 +285,119 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
try:
|
try:
|
||||||
decoded_data = strategy(clean_text)
|
decoded_data = strategy(clean_text)
|
||||||
if isinstance(decoded_data, list):
|
if isinstance(decoded_data, list):
|
||||||
self.logger.debug(f"Successfully parsed anime response with strategy {i + 1}")
|
msg = (
|
||||||
|
f"Successfully parsed anime response with "
|
||||||
|
f"strategy {i + 1}"
|
||||||
|
)
|
||||||
|
self.logger.debug(msg)
|
||||||
return decoded_data
|
return decoded_data
|
||||||
else:
|
else:
|
||||||
self.logger.warning(f"Strategy {i + 1} returned non-list data: {type(decoded_data)}")
|
msg = (
|
||||||
|
f"Strategy {i + 1} returned non-list data: "
|
||||||
|
f"{type(decoded_data)}"
|
||||||
|
)
|
||||||
|
self.logger.warning(msg)
|
||||||
except json.JSONDecodeError as e:
|
except json.JSONDecodeError as e:
|
||||||
self.logger.debug(f"Parsing strategy {i + 1} failed: {e}")
|
msg = f"Parsing strategy {i + 1} failed: {e}"
|
||||||
|
self.logger.debug(msg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
raise ValueError("Could not parse anime search response with any strategy")
|
raise ValueError(
|
||||||
|
"Could not parse anime search response with any strategy"
|
||||||
|
)
|
||||||
|
|
||||||
def _GetLanguageKey(self, language: str) -> int:
|
def _GetLanguageKey(self, language: str) -> int:
|
||||||
"""Get numeric language code."""
|
"""Get numeric language code."""
|
||||||
language_map = {
|
language_map = {
|
||||||
"German Dub": 1,
|
"German Dub": 1,
|
||||||
"English Sub": 2,
|
"English Sub": 2,
|
||||||
"German Sub": 3
|
"German Sub": 3,
|
||||||
}
|
}
|
||||||
return language_map.get(language, 0)
|
return language_map.get(language, 0)
|
||||||
|
|
||||||
@with_error_recovery(max_retries=2, context="language_check")
|
@with_error_recovery(max_retries=2, context="language_check")
|
||||||
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
|
def IsLanguage(
|
||||||
"""Check if episode is available in specified language with error handling."""
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
) -> bool:
|
||||||
|
"""Check if episode is available in specified language."""
|
||||||
try:
|
try:
|
||||||
languageCode = self._GetLanguageKey(language)
|
languageCode = self._GetLanguageKey(language)
|
||||||
if languageCode == 0:
|
if languageCode == 0:
|
||||||
raise ValueError(f"Unknown language: {language}")
|
raise ValueError(f"Unknown language: {language}")
|
||||||
|
|
||||||
episode_response = self._GetEpisodeHTML(season, episode, key)
|
episode_response = self._GetEpisodeHTML(season, episode, key)
|
||||||
soup = BeautifulSoup(episode_response.content, 'html.parser')
|
soup = BeautifulSoup(episode_response.content, "html.parser")
|
||||||
|
|
||||||
change_language_box_div = soup.find('div', class_='changeLanguageBox')
|
lang_box = soup.find("div", class_="changeLanguageBox")
|
||||||
if not change_language_box_div:
|
if not lang_box:
|
||||||
self.logger.debug(f"No language box found for {key} S{season}E{episode}")
|
debug_msg = (
|
||||||
|
f"No language box found for {key} S{season}E{episode}"
|
||||||
|
)
|
||||||
|
self.logger.debug(debug_msg)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
img_tags = change_language_box_div.find_all('img')
|
img_tags = lang_box.find_all("img")
|
||||||
available_languages = []
|
available_languages = []
|
||||||
|
|
||||||
for img in img_tags:
|
for img in img_tags:
|
||||||
lang_key = img.get('data-lang-key')
|
lang_key = img.get("data-lang-key")
|
||||||
if lang_key and lang_key.isdigit():
|
if lang_key and lang_key.isdigit():
|
||||||
available_languages.append(int(lang_key))
|
available_languages.append(int(lang_key))
|
||||||
|
|
||||||
is_available = languageCode in available_languages
|
is_available = languageCode in available_languages
|
||||||
self.logger.debug(f"Language check for {key} S{season}E{episode} - "
|
debug_msg = (
|
||||||
f"Requested: {languageCode}, Available: {available_languages}, "
|
f"Language check for {key} S{season}E{episode}: "
|
||||||
f"Result: {is_available}")
|
f"Requested={languageCode}, "
|
||||||
|
f"Available={available_languages}, "
|
||||||
|
f"Result={is_available}"
|
||||||
|
)
|
||||||
|
self.logger.debug(debug_msg)
|
||||||
|
|
||||||
return is_available
|
return is_available
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Language check failed for {key} S{season}E{episode}: {e}")
|
error_msg = (
|
||||||
|
f"Language check failed for {key} S{season}E{episode}: {e}"
|
||||||
|
)
|
||||||
|
self.logger.error(error_msg)
|
||||||
raise RetryableError(f"Language check failed: {e}") from e
|
raise RetryableError(f"Language check failed: {e}") from e
|
||||||
|
|
||||||
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int,
|
def Download(
|
||||||
key: str, language: str = "German Dub", progress_callback: Callable = None) -> bool:
|
self,
|
||||||
"""Download episode with comprehensive error handling and recovery."""
|
baseDirectory: str,
|
||||||
self.download_stats['total_downloads'] += 1
|
serieFolder: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
progress_callback: Optional[Callable] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Download episode with comprehensive error handling.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
baseDirectory: Base download directory path
|
||||||
|
serieFolder: Filesystem folder name (metadata only, used for
|
||||||
|
file path construction)
|
||||||
|
season: Season number (0 for movies)
|
||||||
|
episode: Episode number
|
||||||
|
key: Series unique identifier from provider (used for
|
||||||
|
identification and API calls)
|
||||||
|
language: Audio language preference (default: German Dub)
|
||||||
|
progress_callback: Optional callback for download progress
|
||||||
|
updates
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if download succeeded, False otherwise
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
DownloadError: If download fails after all retry attempts
|
||||||
|
ValueError: If required parameters are missing or invalid
|
||||||
|
"""
|
||||||
|
self.download_stats["total_downloads"] += 1
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Validate inputs
|
# Validate inputs
|
||||||
@@ -292,8 +408,10 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
raise ValueError("Season and episode must be non-negative")
|
raise ValueError("Season and episode must be non-negative")
|
||||||
|
|
||||||
# Prepare file paths
|
# Prepare file paths
|
||||||
sanitized_anime_title = ''.join(
|
sanitized_anime_title = "".join(
|
||||||
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
|
char
|
||||||
|
for char in self.GetTitle(key)
|
||||||
|
if char not in self.INVALID_PATH_CHARS
|
||||||
)
|
)
|
||||||
|
|
||||||
if not sanitized_anime_title:
|
if not sanitized_anime_title:
|
||||||
@@ -301,26 +419,61 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
|
|
||||||
# Generate output filename
|
# Generate output filename
|
||||||
if season == 0:
|
if season == 0:
|
||||||
output_file = f"{sanitized_anime_title} - Movie {episode:02} - ({language}).mp4"
|
output_file = (
|
||||||
|
f"{sanitized_anime_title} - Movie {episode:02} - "
|
||||||
|
f"({language}).mp4"
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
output_file = f"{sanitized_anime_title} - S{season:02}E{episode:03} - ({language}).mp4"
|
output_file = (
|
||||||
|
f"{sanitized_anime_title} - S{season:02}E{episode:03} - "
|
||||||
|
f"({language}).mp4"
|
||||||
|
)
|
||||||
|
|
||||||
# Create directory structure
|
# Create directory structure
|
||||||
folder_path = os.path.join(baseDirectory, serieFolder, f"Season {season}")
|
folder_path = os.path.join(
|
||||||
|
baseDirectory, serieFolder, f"Season {season}"
|
||||||
|
)
|
||||||
output_path = os.path.join(folder_path, output_file)
|
output_path = os.path.join(folder_path, output_file)
|
||||||
|
|
||||||
# Check if file already exists and is valid
|
# Check if file already exists and is valid
|
||||||
if os.path.exists(output_path):
|
if os.path.exists(output_path):
|
||||||
if file_corruption_detector.is_valid_video_file(output_path):
|
is_valid = file_corruption_detector.is_valid_video_file(
|
||||||
self.logger.info(f"File already exists and is valid: {output_file}")
|
output_path
|
||||||
self.download_stats['successful_downloads'] += 1
|
)
|
||||||
|
|
||||||
|
# Also verify checksum if available
|
||||||
|
integrity_mgr = get_integrity_manager()
|
||||||
|
checksum_valid = True
|
||||||
|
if integrity_mgr.has_checksum(Path(output_path)):
|
||||||
|
checksum_valid = integrity_mgr.verify_checksum(
|
||||||
|
Path(output_path)
|
||||||
|
)
|
||||||
|
if not checksum_valid:
|
||||||
|
self.logger.warning(
|
||||||
|
f"Checksum verification failed for {output_file}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if is_valid and checksum_valid:
|
||||||
|
msg = (
|
||||||
|
f"File already exists and is valid: "
|
||||||
|
f"{output_file}"
|
||||||
|
)
|
||||||
|
self.logger.info(msg)
|
||||||
|
self.download_stats["successful_downloads"] += 1
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
self.logger.warning(f"Existing file appears corrupted, removing: {output_path}")
|
warning_msg = (
|
||||||
|
f"Existing file appears corrupted, removing: "
|
||||||
|
f"{output_path}"
|
||||||
|
)
|
||||||
|
self.logger.warning(warning_msg)
|
||||||
try:
|
try:
|
||||||
os.remove(output_path)
|
os.remove(output_path)
|
||||||
except Exception as e:
|
# Remove checksum entry
|
||||||
self.logger.error(f"Failed to remove corrupted file: {e}")
|
integrity_mgr.remove_checksum(Path(output_path))
|
||||||
|
except OSError as e:
|
||||||
|
error_msg = f"Failed to remove corrupted file: {e}"
|
||||||
|
self.logger.error(error_msg)
|
||||||
|
|
||||||
os.makedirs(folder_path, exist_ok=True)
|
os.makedirs(folder_path, exist_ok=True)
|
||||||
|
|
||||||
@@ -331,59 +484,86 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
|
|
||||||
# Attempt download with recovery strategies
|
# Attempt download with recovery strategies
|
||||||
success = self._download_with_recovery(
|
success = self._download_with_recovery(
|
||||||
season, episode, key, language, temp_path, output_path, progress_callback
|
season,
|
||||||
|
episode,
|
||||||
|
key,
|
||||||
|
language,
|
||||||
|
temp_path,
|
||||||
|
output_path,
|
||||||
|
progress_callback,
|
||||||
)
|
)
|
||||||
|
|
||||||
if success:
|
if success:
|
||||||
self.download_stats['successful_downloads'] += 1
|
self.download_stats["successful_downloads"] += 1
|
||||||
self.logger.info(f"Successfully downloaded: {output_file}")
|
success_msg = f"Successfully downloaded: {output_file}"
|
||||||
|
self.logger.info(success_msg)
|
||||||
else:
|
else:
|
||||||
self.download_stats['failed_downloads'] += 1
|
self.download_stats["failed_downloads"] += 1
|
||||||
self.download_error_logger.error(
|
fail_msg = (
|
||||||
f"Download failed for {key} S{season}E{episode} ({language})"
|
f"Download failed for {key} S{season}E{episode} "
|
||||||
|
f"({language})"
|
||||||
)
|
)
|
||||||
|
self.download_error_logger.error(fail_msg)
|
||||||
|
|
||||||
return success
|
return success
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.download_stats['failed_downloads'] += 1
|
self.download_stats["failed_downloads"] += 1
|
||||||
self.download_error_logger.error(
|
err_msg = (
|
||||||
f"Download error for {key} S{season}E{episode}: {e}", exc_info=True
|
f"Download error for {key} S{season}E{episode}: {e}"
|
||||||
)
|
)
|
||||||
|
self.download_error_logger.error(err_msg, exc_info=True)
|
||||||
raise DownloadError(f"Download failed: {e}") from e
|
raise DownloadError(f"Download failed: {e}") from e
|
||||||
finally:
|
finally:
|
||||||
self.ClearCache()
|
self.ClearCache()
|
||||||
|
|
||||||
def _download_with_recovery(self, season: int, episode: int, key: str, language: str,
|
def _download_with_recovery(
|
||||||
temp_path: str, output_path: str, progress_callback: Callable) -> bool:
|
self,
|
||||||
"""Attempt download with multiple providers and recovery strategies."""
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str,
|
||||||
|
temp_path: str,
|
||||||
|
output_path: str,
|
||||||
|
progress_callback: Optional[Callable],
|
||||||
|
) -> bool:
|
||||||
|
"""Attempt download with multiple providers and recovery."""
|
||||||
|
|
||||||
for provider_name in self.SUPPORTED_PROVIDERS:
|
for provider_name in self.SUPPORTED_PROVIDERS:
|
||||||
try:
|
try:
|
||||||
self.logger.info(f"Attempting download with provider: {provider_name}")
|
info_msg = (
|
||||||
|
f"Attempting download with provider: {provider_name}"
|
||||||
|
)
|
||||||
|
self.logger.info(info_msg)
|
||||||
|
|
||||||
# Get download link and headers for provider
|
# Get download link and headers for provider
|
||||||
link, headers = recovery_strategies.handle_network_failure(
|
link, headers = recovery_strategies.handle_network_failure(
|
||||||
self._get_direct_link_from_provider,
|
self._get_direct_link_from_provider,
|
||||||
season, episode, key, language
|
season,
|
||||||
|
episode,
|
||||||
|
key,
|
||||||
|
language,
|
||||||
)
|
)
|
||||||
|
|
||||||
if not link:
|
if not link:
|
||||||
self.logger.warning(f"No download link found for provider: {provider_name}")
|
warn_msg = (
|
||||||
|
f"No download link found for provider: "
|
||||||
|
f"{provider_name}"
|
||||||
|
)
|
||||||
|
self.logger.warning(warn_msg)
|
||||||
continue
|
continue
|
||||||
|
|
||||||
# Configure yt-dlp options
|
# Configure yt-dlp options
|
||||||
ydl_opts = {
|
ydl_opts = {
|
||||||
'fragment_retries': float('inf'),
|
"fragment_retries": float("inf"),
|
||||||
'outtmpl': temp_path,
|
"outtmpl": temp_path,
|
||||||
'quiet': True,
|
"quiet": True,
|
||||||
'no_warnings': True,
|
"no_warnings": True,
|
||||||
'progress_with_newline': False,
|
"progress_with_newline": False,
|
||||||
'nocheckcertificate': True,
|
"nocheckcertificate": True,
|
||||||
'socket_timeout': self.download_timeout,
|
"socket_timeout": self.download_timeout,
|
||||||
'http_chunk_size': 1024 * 1024, # 1MB chunks
|
"http_chunk_size": 1024 * 1024, # 1MB chunks
|
||||||
}
|
}
|
||||||
|
|
||||||
if headers:
|
if headers:
|
||||||
ydl_opts['http_headers'] = headers
|
ydl_opts['http_headers'] = headers
|
||||||
|
|
||||||
@@ -402,30 +582,59 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
# Verify downloaded file
|
# Verify downloaded file
|
||||||
if file_corruption_detector.is_valid_video_file(temp_path):
|
if file_corruption_detector.is_valid_video_file(temp_path):
|
||||||
# Move to final location
|
# Move to final location
|
||||||
shutil.copy2(temp_path, output_path)
|
# Use copyfile instead of copy2 to avoid metadata permission issues
|
||||||
|
shutil.copyfile(temp_path, output_path)
|
||||||
|
|
||||||
|
# Calculate and store checksum for integrity
|
||||||
|
integrity_mgr = get_integrity_manager()
|
||||||
|
try:
|
||||||
|
checksum = integrity_mgr.store_checksum(
|
||||||
|
Path(output_path)
|
||||||
|
)
|
||||||
|
filename = Path(output_path).name
|
||||||
|
self.logger.info(
|
||||||
|
f"Stored checksum for {filename}: "
|
||||||
|
f"{checksum[:16]}..."
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
self.logger.warning(
|
||||||
|
f"Failed to store checksum: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
# Clean up temp file
|
# Clean up temp file
|
||||||
try:
|
try:
|
||||||
os.remove(temp_path)
|
os.remove(temp_path)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.warning(f"Failed to remove temp file: {e}")
|
warn_msg = f"Failed to remove temp file: {e}"
|
||||||
|
self.logger.warning(warn_msg)
|
||||||
|
|
||||||
return True
|
return True
|
||||||
else:
|
else:
|
||||||
self.logger.warning(f"Downloaded file failed validation: {temp_path}")
|
warn_msg = (
|
||||||
|
f"Downloaded file failed validation: "
|
||||||
|
f"{temp_path}"
|
||||||
|
)
|
||||||
|
self.logger.warning(warn_msg)
|
||||||
try:
|
try:
|
||||||
os.remove(temp_path)
|
os.remove(temp_path)
|
||||||
except Exception:
|
except OSError as e:
|
||||||
pass
|
warn_msg = f"Failed to remove temp file: {e}"
|
||||||
|
self.logger.warning(warn_msg)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.warning(f"Provider {provider_name} failed: {e}")
|
self.logger.warning(f"Provider {provider_name} failed: {e}")
|
||||||
|
# Clean up any partial temp files left by this failed attempt
|
||||||
|
_cleanup_temp_file(temp_path, self.logger)
|
||||||
self.download_stats['retried_downloads'] += 1
|
self.download_stats['retried_downloads'] += 1
|
||||||
continue
|
continue
|
||||||
|
|
||||||
|
# All providers failed – make sure no temp remnants are left behind
|
||||||
|
_cleanup_temp_file(temp_path, self.logger)
|
||||||
return False
|
return False
|
||||||
|
|
||||||
def _perform_ytdl_download(self, ydl_opts: Dict[str, Any], link: str) -> bool:
|
def _perform_ytdl_download(
|
||||||
|
self, ydl_opts: Dict[str, Any], link: str
|
||||||
|
) -> bool:
|
||||||
"""Perform actual download using yt-dlp."""
|
"""Perform actual download using yt-dlp."""
|
||||||
try:
|
try:
|
||||||
with YoutubeDL(ydl_opts) as ydl:
|
with YoutubeDL(ydl_opts) as ydl:
|
||||||
@@ -476,88 +685,165 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
|
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
self.nokey_logger.error(f"Anime key not found: {key}")
|
msg = f"Anime key not found: {key}"
|
||||||
raise NonRetryableError(f"Anime key not found: {key}")
|
self.nokey_logger.error(msg)
|
||||||
|
raise NonRetryableError(msg)
|
||||||
else:
|
else:
|
||||||
raise RetryableError(f"HTTP error {response.status_code} for key {key}")
|
err_msg = (
|
||||||
|
f"HTTP error {response.status_code} for key {key}"
|
||||||
|
)
|
||||||
|
raise RetryableError(err_msg)
|
||||||
|
|
||||||
self._KeyHTMLDict[key] = response
|
self._KeyHTMLDict[key] = response
|
||||||
return self._KeyHTMLDict[key]
|
return self._KeyHTMLDict[key]
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Failed to get HTML for key {key}: {e}")
|
error_msg = f"Failed to get HTML for key {key}: {e}"
|
||||||
|
self.logger.error(error_msg)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@with_error_recovery(max_retries=2, context="get_episode_html")
|
@with_error_recovery(max_retries=2, context="get_episode_html")
|
||||||
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
|
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
|
||||||
"""Get cached HTML for specific episode."""
|
"""Get cached HTML for specific episode.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
season: Season number (must be 1-999)
|
||||||
|
episode: Episode number (must be 1-9999)
|
||||||
|
key: Series identifier (should be non-empty)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Cached or fetched HTML response
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If parameters are invalid
|
||||||
|
NonRetryableError: If episode not found (404)
|
||||||
|
RetryableError: If HTTP error occurs
|
||||||
|
"""
|
||||||
|
# Validate parameters
|
||||||
|
if not key or not key.strip():
|
||||||
|
raise ValueError("Series key cannot be empty")
|
||||||
|
if season < 1 or season > 999:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid season number: {season} (must be 1-999)"
|
||||||
|
)
|
||||||
|
if episode < 1 or episode > 9999:
|
||||||
|
raise ValueError(
|
||||||
|
f"Invalid episode number: {episode} (must be 1-9999)"
|
||||||
|
)
|
||||||
|
|
||||||
cache_key = (key, season, episode)
|
cache_key = (key, season, episode)
|
||||||
if cache_key in self._EpisodeHTMLDict:
|
if cache_key in self._EpisodeHTMLDict:
|
||||||
return self._EpisodeHTMLDict[cache_key]
|
return self._EpisodeHTMLDict[cache_key]
|
||||||
|
|
||||||
try:
|
try:
|
||||||
url = f"{self.ANIWORLD_TO}/anime/stream/{key}/staffel-{season}/episode-{episode}"
|
url = (
|
||||||
|
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
|
||||||
|
f"staffel-{season}/episode-{episode}"
|
||||||
|
)
|
||||||
response = recovery_strategies.handle_network_failure(
|
response = recovery_strategies.handle_network_failure(
|
||||||
self.session.get,
|
self.session.get, url, timeout=self.DEFAULT_REQUEST_TIMEOUT
|
||||||
url,
|
|
||||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if not response.ok:
|
if not response.ok:
|
||||||
if response.status_code == 404:
|
if response.status_code == 404:
|
||||||
raise NonRetryableError(f"Episode not found: {key} S{season}E{episode}")
|
err_msg = (
|
||||||
|
f"Episode not found: {key} S{season}E{episode}"
|
||||||
|
)
|
||||||
|
raise NonRetryableError(err_msg)
|
||||||
else:
|
else:
|
||||||
raise RetryableError(f"HTTP error {response.status_code} for episode")
|
err_msg = (
|
||||||
|
f"HTTP error {response.status_code} for episode"
|
||||||
|
)
|
||||||
|
raise RetryableError(err_msg)
|
||||||
|
|
||||||
self._EpisodeHTMLDict[cache_key] = response
|
self._EpisodeHTMLDict[cache_key] = response
|
||||||
return self._EpisodeHTMLDict[cache_key]
|
return self._EpisodeHTMLDict[cache_key]
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Failed to get episode HTML for {key} S{season}E{episode}: {e}")
|
error_msg = (
|
||||||
|
f"Failed to get episode HTML for {key} "
|
||||||
|
f"S{season}E{episode}: {e}"
|
||||||
|
)
|
||||||
|
self.logger.error(error_msg)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
|
def _get_provider_from_html(
|
||||||
|
self, season: int, episode: int, key: str
|
||||||
|
) -> dict:
|
||||||
"""Extract providers from HTML with error handling."""
|
"""Extract providers from HTML with error handling."""
|
||||||
try:
|
try:
|
||||||
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
|
episode_html = self._GetEpisodeHTML(season, episode, key)
|
||||||
providers = {}
|
soup = BeautifulSoup(episode_html.content, "html.parser")
|
||||||
|
providers: dict[str, dict] = {}
|
||||||
|
|
||||||
episode_links = soup.find_all(
|
episode_links = soup.find_all(
|
||||||
'li', class_=lambda x: x and x.startswith('episodeLink')
|
"li", class_=lambda x: x and x.startswith("episodeLink")
|
||||||
)
|
)
|
||||||
|
|
||||||
if not episode_links:
|
if not episode_links:
|
||||||
self.logger.warning(f"No episode links found for {key} S{season}E{episode}")
|
warn_msg = (
|
||||||
|
f"No episode links found for {key} S{season}E{episode}"
|
||||||
|
)
|
||||||
|
self.logger.warning(warn_msg)
|
||||||
return providers
|
return providers
|
||||||
|
|
||||||
for link in episode_links:
|
for link in episode_links:
|
||||||
provider_name_tag = link.find('h4')
|
provider_name_tag = link.find("h4")
|
||||||
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
|
provider_name = (
|
||||||
|
provider_name_tag.text.strip()
|
||||||
|
if provider_name_tag
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
redirect_link_tag = link.find('a', class_='watchEpisode')
|
redirect_link_tag = link.find("a", class_="watchEpisode")
|
||||||
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
|
redirect_link = (
|
||||||
|
redirect_link_tag["href"]
|
||||||
|
if redirect_link_tag
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
lang_key = link.get('data-lang-key')
|
lang_key = link.get("data-lang-key")
|
||||||
lang_key = int(lang_key) if lang_key and lang_key.isdigit() else None
|
lang_key = (
|
||||||
|
int(lang_key)
|
||||||
|
if lang_key and lang_key.isdigit()
|
||||||
|
else None
|
||||||
|
)
|
||||||
|
|
||||||
if provider_name and redirect_link and lang_key:
|
if provider_name and redirect_link and lang_key:
|
||||||
if provider_name not in providers:
|
if provider_name not in providers:
|
||||||
providers[provider_name] = {}
|
providers[provider_name] = {}
|
||||||
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
|
providers[provider_name][lang_key] = (
|
||||||
|
f"{self.ANIWORLD_TO}{redirect_link}"
|
||||||
|
)
|
||||||
|
|
||||||
self.logger.debug(f"Found {len(providers)} providers for {key} S{season}E{episode}")
|
debug_msg = (
|
||||||
|
f"Found {len(providers)} providers for "
|
||||||
|
f"{key} S{season}E{episode}"
|
||||||
|
)
|
||||||
|
self.logger.debug(debug_msg)
|
||||||
return providers
|
return providers
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Failed to parse providers from HTML: {e}")
|
error_msg = f"Failed to parse providers from HTML: {e}"
|
||||||
|
self.logger.error(error_msg)
|
||||||
raise RetryableError(f"Provider parsing failed: {e}") from e
|
raise RetryableError(f"Provider parsing failed: {e}") from e
|
||||||
|
|
||||||
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
|
def _get_redirect_link(
|
||||||
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
):
|
||||||
"""Get redirect link for episode with error handling."""
|
"""Get redirect link for episode with error handling."""
|
||||||
languageCode = self._GetLanguageKey(language)
|
languageCode = self._GetLanguageKey(language)
|
||||||
|
|
||||||
if not self.IsLanguage(season, episode, key, language):
|
if not self.IsLanguage(season, episode, key, language):
|
||||||
raise NonRetryableError(f"Language {language} not available for {key} S{season}E{episode}")
|
err_msg = (
|
||||||
|
f"Language {language} not available for "
|
||||||
|
f"{key} S{season}E{episode}"
|
||||||
|
)
|
||||||
|
raise NonRetryableError(err_msg)
|
||||||
|
|
||||||
providers = self._get_provider_from_html(season, episode, key)
|
providers = self._get_provider_from_html(season, episode, key)
|
||||||
|
|
||||||
@@ -565,30 +851,51 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
if languageCode in lang_dict:
|
if languageCode in lang_dict:
|
||||||
return lang_dict[languageCode], provider_name
|
return lang_dict[languageCode], provider_name
|
||||||
|
|
||||||
raise NonRetryableError(f"No provider found for {language} in {key} S{season}E{episode}")
|
err_msg = (
|
||||||
|
f"No provider found for {language} in "
|
||||||
|
f"{key} S{season}E{episode}"
|
||||||
|
)
|
||||||
|
raise NonRetryableError(err_msg)
|
||||||
|
|
||||||
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
|
def _get_embeded_link(
|
||||||
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
):
|
||||||
"""Get embedded link with error handling."""
|
"""Get embedded link with error handling."""
|
||||||
try:
|
try:
|
||||||
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
|
redirect_link, provider_name = self._get_redirect_link(
|
||||||
|
season, episode, key, language
|
||||||
|
)
|
||||||
|
|
||||||
response = recovery_strategies.handle_network_failure(
|
response = recovery_strategies.handle_network_failure(
|
||||||
self.session.get,
|
self.session.get,
|
||||||
redirect_link,
|
redirect_link,
|
||||||
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||||
headers={'User-Agent': self.RANDOM_USER_AGENT}
|
headers={"User-Agent": self.RANDOM_USER_AGENT},
|
||||||
)
|
)
|
||||||
|
|
||||||
return response.url
|
return response.url
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Failed to get embedded link: {e}")
|
error_msg = f"Failed to get embedded link: {e}"
|
||||||
|
self.logger.error(error_msg)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub"):
|
def _get_direct_link_from_provider(
|
||||||
"""Get direct download link from provider with error handling."""
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
):
|
||||||
|
"""Get direct download link from provider."""
|
||||||
try:
|
try:
|
||||||
embedded_link = self._get_embeded_link(season, episode, key, language)
|
embedded_link = self._get_embeded_link(
|
||||||
|
season, episode, key, language
|
||||||
|
)
|
||||||
if not embedded_link:
|
if not embedded_link:
|
||||||
raise NonRetryableError("No embedded link found")
|
raise NonRetryableError("No embedded link found")
|
||||||
|
|
||||||
@@ -597,10 +904,13 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
if not provider:
|
if not provider:
|
||||||
raise NonRetryableError("VOE provider not available")
|
raise NonRetryableError("VOE provider not available")
|
||||||
|
|
||||||
return provider.GetLink(embedded_link, self.DEFAULT_REQUEST_TIMEOUT)
|
return provider.get_link(
|
||||||
|
embedded_link, self.DEFAULT_REQUEST_TIMEOUT
|
||||||
|
)
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
self.logger.error(f"Failed to get direct link from provider: {e}")
|
error_msg = f"Failed to get direct link from provider: {e}"
|
||||||
|
self.logger.error(error_msg)
|
||||||
raise
|
raise
|
||||||
|
|
||||||
@with_error_recovery(max_retries=2, context="get_season_episode_count")
|
@with_error_recovery(max_retries=2, context="get_season_episode_count")
|
||||||
@@ -611,29 +921,35 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
response = recovery_strategies.handle_network_failure(
|
response = recovery_strategies.handle_network_failure(
|
||||||
requests.get,
|
requests.get,
|
||||||
base_url,
|
base_url,
|
||||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||||
)
|
)
|
||||||
|
|
||||||
soup = BeautifulSoup(response.content, 'html.parser')
|
soup = BeautifulSoup(response.content, "html.parser")
|
||||||
|
|
||||||
season_meta = soup.find('meta', itemprop='numberOfSeasons')
|
season_meta = soup.find("meta", itemprop="numberOfSeasons")
|
||||||
number_of_seasons = int(season_meta['content']) if season_meta else 0
|
number_of_seasons = (
|
||||||
|
int(season_meta["content"]) if season_meta else 0
|
||||||
|
)
|
||||||
|
|
||||||
episode_counts = {}
|
episode_counts = {}
|
||||||
|
|
||||||
for season in range(1, number_of_seasons + 1):
|
for season in range(1, number_of_seasons + 1):
|
||||||
season_url = f"{base_url}staffel-{season}"
|
season_url = f"{base_url}staffel-{season}"
|
||||||
season_response = recovery_strategies.handle_network_failure(
|
season_response = (
|
||||||
|
recovery_strategies.handle_network_failure(
|
||||||
requests.get,
|
requests.get,
|
||||||
season_url,
|
season_url,
|
||||||
timeout=self.DEFAULT_REQUEST_TIMEOUT
|
timeout=self.DEFAULT_REQUEST_TIMEOUT,
|
||||||
|
)
|
||||||
)
|
)
|
||||||
|
|
||||||
season_soup = BeautifulSoup(season_response.content, 'html.parser')
|
season_soup = BeautifulSoup(
|
||||||
|
season_response.content, "html.parser"
|
||||||
|
)
|
||||||
|
|
||||||
episode_links = season_soup.find_all('a', href=True)
|
episode_links = season_soup.find_all("a", href=True)
|
||||||
unique_links = set(
|
unique_links = set(
|
||||||
link['href']
|
link["href"]
|
||||||
for link in episode_links
|
for link in episode_links
|
||||||
if f"staffel-{season}/episode-" in link['href']
|
if f"staffel-{season}/episode-" in link['href']
|
||||||
)
|
)
|
||||||
@@ -668,4 +984,5 @@ class EnhancedAniWorldLoader(Loader):
|
|||||||
# For backward compatibility, create wrapper that uses enhanced loader
|
# For backward compatibility, create wrapper that uses enhanced loader
|
||||||
class AniworldLoader(EnhancedAniWorldLoader):
|
class AniworldLoader(EnhancedAniWorldLoader):
|
||||||
"""Backward compatibility wrapper for the enhanced loader."""
|
"""Backward compatibility wrapper for the enhanced loader."""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
325
src/core/providers/failover.py
Normal file
325
src/core/providers/failover.py
Normal file
@@ -0,0 +1,325 @@
|
|||||||
|
"""Provider failover system for automatic fallback on failures.
|
||||||
|
|
||||||
|
This module implements automatic failover between multiple providers,
|
||||||
|
ensuring high availability by switching to backup providers when the
|
||||||
|
primary fails.
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from typing import Any, Callable, Dict, List, Optional, TypeVar
|
||||||
|
|
||||||
|
from src.core.providers.health_monitor import get_health_monitor
|
||||||
|
from src.core.providers.provider_config import DEFAULT_PROVIDERS
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
T = TypeVar("T")
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderFailover:
|
||||||
|
"""Manages automatic failover between multiple providers."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
providers: Optional[List[str]] = None,
|
||||||
|
max_retries: int = 3,
|
||||||
|
retry_delay: float = 1.0,
|
||||||
|
enable_health_monitoring: bool = True,
|
||||||
|
):
|
||||||
|
"""Initialize provider failover manager.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
providers: List of provider names to use (default: all).
|
||||||
|
max_retries: Maximum retry attempts per provider.
|
||||||
|
retry_delay: Delay between retries in seconds.
|
||||||
|
enable_health_monitoring: Whether to use health monitoring.
|
||||||
|
"""
|
||||||
|
self._providers = providers or DEFAULT_PROVIDERS.copy()
|
||||||
|
self._max_retries = max_retries
|
||||||
|
self._retry_delay = retry_delay
|
||||||
|
self._enable_health_monitoring = enable_health_monitoring
|
||||||
|
|
||||||
|
# Current provider index
|
||||||
|
self._current_index = 0
|
||||||
|
|
||||||
|
# Health monitor
|
||||||
|
self._health_monitor = (
|
||||||
|
get_health_monitor() if enable_health_monitoring else None
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Provider failover initialized with "
|
||||||
|
f"{len(self._providers)} providers"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_current_provider(self) -> str:
|
||||||
|
"""Get the current active provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Name of current provider.
|
||||||
|
"""
|
||||||
|
if self._enable_health_monitoring and self._health_monitor:
|
||||||
|
# Try to get best available provider
|
||||||
|
best = self._health_monitor.get_best_provider()
|
||||||
|
if best and best in self._providers:
|
||||||
|
return best
|
||||||
|
|
||||||
|
# Fall back to round-robin selection
|
||||||
|
return self._providers[self._current_index % len(self._providers)]
|
||||||
|
|
||||||
|
def get_next_provider(self) -> Optional[str]:
|
||||||
|
"""Get the next provider in the failover chain.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Name of next provider or None if none available.
|
||||||
|
"""
|
||||||
|
if self._enable_health_monitoring and self._health_monitor:
|
||||||
|
# Get available providers
|
||||||
|
available = [
|
||||||
|
p
|
||||||
|
for p in self._providers
|
||||||
|
if p in self._health_monitor.get_available_providers()
|
||||||
|
]
|
||||||
|
|
||||||
|
if not available:
|
||||||
|
logger.warning("No available providers for failover")
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Find next available provider
|
||||||
|
current = self.get_current_provider()
|
||||||
|
try:
|
||||||
|
current_idx = available.index(current)
|
||||||
|
next_idx = (current_idx + 1) % len(available)
|
||||||
|
return available[next_idx]
|
||||||
|
except ValueError:
|
||||||
|
# Current provider not in available list
|
||||||
|
return available[0]
|
||||||
|
|
||||||
|
# Fall back to simple rotation
|
||||||
|
self._current_index = (self._current_index + 1) % len(
|
||||||
|
self._providers
|
||||||
|
)
|
||||||
|
return self._providers[self._current_index]
|
||||||
|
|
||||||
|
async def execute_with_failover(
|
||||||
|
self,
|
||||||
|
operation: Callable[[str], Any],
|
||||||
|
operation_name: str = "operation",
|
||||||
|
**kwargs,
|
||||||
|
) -> Any:
|
||||||
|
"""Execute an operation with automatic failover.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation: Async callable that takes provider name.
|
||||||
|
operation_name: Name for logging purposes.
|
||||||
|
**kwargs: Additional arguments to pass to operation.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Result from successful operation.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
Exception: If all providers fail.
|
||||||
|
"""
|
||||||
|
providers_tried = []
|
||||||
|
last_error = None
|
||||||
|
|
||||||
|
# Try each provider
|
||||||
|
for attempt in range(len(self._providers)):
|
||||||
|
provider = self.get_current_provider()
|
||||||
|
|
||||||
|
# Skip if already tried
|
||||||
|
if provider in providers_tried:
|
||||||
|
self.get_next_provider()
|
||||||
|
continue
|
||||||
|
|
||||||
|
providers_tried.append(provider)
|
||||||
|
|
||||||
|
# Try operation with retries
|
||||||
|
for retry in range(self._max_retries):
|
||||||
|
try:
|
||||||
|
logger.info(
|
||||||
|
f"Executing {operation_name} with provider "
|
||||||
|
f"{provider} (attempt {retry + 1}/{self._max_retries})" # noqa: E501
|
||||||
|
)
|
||||||
|
|
||||||
|
# Execute operation
|
||||||
|
import time
|
||||||
|
|
||||||
|
start_time = time.time()
|
||||||
|
result = await operation(provider, **kwargs)
|
||||||
|
elapsed_ms = (time.time() - start_time) * 1000
|
||||||
|
|
||||||
|
# Record success
|
||||||
|
if self._health_monitor:
|
||||||
|
self._health_monitor.record_request(
|
||||||
|
provider_name=provider,
|
||||||
|
success=True,
|
||||||
|
response_time_ms=elapsed_ms,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"{operation_name} succeeded with provider "
|
||||||
|
f"{provider} in {elapsed_ms:.2f}ms"
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
last_error = e
|
||||||
|
logger.warning(
|
||||||
|
f"{operation_name} failed with provider "
|
||||||
|
f"{provider} (attempt {retry + 1}): {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Record failure
|
||||||
|
if self._health_monitor:
|
||||||
|
import time
|
||||||
|
|
||||||
|
elapsed_ms = (time.time() - start_time) * 1000
|
||||||
|
self._health_monitor.record_request(
|
||||||
|
provider_name=provider,
|
||||||
|
success=False,
|
||||||
|
response_time_ms=elapsed_ms,
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
|
||||||
|
# Retry with delay
|
||||||
|
if retry < self._max_retries - 1:
|
||||||
|
await asyncio.sleep(self._retry_delay)
|
||||||
|
|
||||||
|
# Try next provider
|
||||||
|
next_provider = self.get_next_provider()
|
||||||
|
if next_provider is None:
|
||||||
|
break
|
||||||
|
|
||||||
|
# All providers failed
|
||||||
|
error_msg = (
|
||||||
|
f"{operation_name} failed with all providers. "
|
||||||
|
f"Tried: {', '.join(providers_tried)}"
|
||||||
|
)
|
||||||
|
logger.error(error_msg)
|
||||||
|
raise Exception(error_msg) from last_error
|
||||||
|
|
||||||
|
def add_provider(self, provider_name: str) -> None:
|
||||||
|
"""Add a provider to the failover chain.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of provider to add.
|
||||||
|
"""
|
||||||
|
if provider_name not in self._providers:
|
||||||
|
self._providers.append(provider_name)
|
||||||
|
logger.info(f"Added provider to failover chain: {provider_name}")
|
||||||
|
|
||||||
|
def remove_provider(self, provider_name: str) -> bool:
|
||||||
|
"""Remove a provider from the failover chain.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of provider to remove.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if removed, False if not found.
|
||||||
|
"""
|
||||||
|
if provider_name in self._providers:
|
||||||
|
self._providers.remove(provider_name)
|
||||||
|
logger.info(
|
||||||
|
f"Removed provider from failover chain: {provider_name}"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
def get_providers(self) -> List[str]:
|
||||||
|
"""Get list of all providers in failover chain.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of provider names.
|
||||||
|
"""
|
||||||
|
return self._providers.copy()
|
||||||
|
|
||||||
|
def set_provider_priority(
|
||||||
|
self, provider_name: str, priority_index: int
|
||||||
|
) -> bool:
|
||||||
|
"""Set priority of a provider by moving it in the chain.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of provider to prioritize.
|
||||||
|
priority_index: New index position (0 = highest priority).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if updated, False if provider not found.
|
||||||
|
"""
|
||||||
|
if provider_name not in self._providers:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._providers.remove(provider_name)
|
||||||
|
self._providers.insert(
|
||||||
|
min(priority_index, len(self._providers)), provider_name
|
||||||
|
)
|
||||||
|
logger.info(
|
||||||
|
f"Set provider {provider_name} priority to index {priority_index}"
|
||||||
|
)
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_failover_stats(self) -> Dict[str, Any]:
|
||||||
|
"""Get failover statistics and configuration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with failover stats.
|
||||||
|
"""
|
||||||
|
stats = {
|
||||||
|
"total_providers": len(self._providers),
|
||||||
|
"providers": self._providers.copy(),
|
||||||
|
"current_provider": self.get_current_provider(),
|
||||||
|
"max_retries": self._max_retries,
|
||||||
|
"retry_delay": self._retry_delay,
|
||||||
|
"health_monitoring_enabled": self._enable_health_monitoring,
|
||||||
|
}
|
||||||
|
|
||||||
|
if self._health_monitor:
|
||||||
|
available = self._health_monitor.get_available_providers()
|
||||||
|
stats["available_providers"] = [
|
||||||
|
p for p in self._providers if p in available
|
||||||
|
]
|
||||||
|
stats["unavailable_providers"] = [
|
||||||
|
p for p in self._providers if p not in available
|
||||||
|
]
|
||||||
|
|
||||||
|
return stats
|
||||||
|
|
||||||
|
|
||||||
|
# Global failover instance
|
||||||
|
_failover: Optional[ProviderFailover] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_failover() -> ProviderFailover:
|
||||||
|
"""Get or create global provider failover instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Global ProviderFailover instance.
|
||||||
|
"""
|
||||||
|
global _failover
|
||||||
|
if _failover is None:
|
||||||
|
_failover = ProviderFailover()
|
||||||
|
return _failover
|
||||||
|
|
||||||
|
|
||||||
|
def configure_failover(
|
||||||
|
providers: Optional[List[str]] = None,
|
||||||
|
max_retries: int = 3,
|
||||||
|
retry_delay: float = 1.0,
|
||||||
|
) -> ProviderFailover:
|
||||||
|
"""Configure global provider failover instance.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
providers: List of provider names to use.
|
||||||
|
max_retries: Maximum retry attempts per provider.
|
||||||
|
retry_delay: Delay between retries in seconds.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured ProviderFailover instance.
|
||||||
|
"""
|
||||||
|
global _failover
|
||||||
|
_failover = ProviderFailover(
|
||||||
|
providers=providers,
|
||||||
|
max_retries=max_retries,
|
||||||
|
retry_delay=retry_delay,
|
||||||
|
)
|
||||||
|
return _failover
|
||||||
416
src/core/providers/health_monitor.py
Normal file
416
src/core/providers/health_monitor.py
Normal file
@@ -0,0 +1,416 @@
|
|||||||
|
"""Provider health monitoring system for tracking availability and performance.
|
||||||
|
|
||||||
|
This module provides health monitoring capabilities for anime providers,
|
||||||
|
tracking metrics like availability, response times, success rates, and
|
||||||
|
bandwidth usage.
|
||||||
|
"""
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from collections import defaultdict, deque
|
||||||
|
from dataclasses import dataclass
|
||||||
|
from datetime import datetime, timedelta
|
||||||
|
from typing import Any, Deque, Dict, List, Optional
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class ProviderHealthMetrics:
|
||||||
|
"""Health metrics for a single provider."""
|
||||||
|
|
||||||
|
provider_name: str
|
||||||
|
is_available: bool = True
|
||||||
|
last_check_time: Optional[datetime] = None
|
||||||
|
total_requests: int = 0
|
||||||
|
successful_requests: int = 0
|
||||||
|
failed_requests: int = 0
|
||||||
|
average_response_time_ms: float = 0.0
|
||||||
|
last_error: Optional[str] = None
|
||||||
|
last_error_time: Optional[datetime] = None
|
||||||
|
consecutive_failures: int = 0
|
||||||
|
total_bytes_downloaded: int = 0
|
||||||
|
uptime_percentage: float = 100.0
|
||||||
|
|
||||||
|
@property
|
||||||
|
def success_rate(self) -> float:
|
||||||
|
"""Calculate success rate as percentage."""
|
||||||
|
if self.total_requests == 0:
|
||||||
|
return 0.0
|
||||||
|
return (self.successful_requests / self.total_requests) * 100
|
||||||
|
|
||||||
|
@property
|
||||||
|
def failure_rate(self) -> float:
|
||||||
|
"""Calculate failure rate as percentage."""
|
||||||
|
return 100.0 - self.success_rate
|
||||||
|
|
||||||
|
def to_dict(self) -> Dict[str, Any]:
|
||||||
|
"""Convert metrics to dictionary."""
|
||||||
|
return {
|
||||||
|
"provider_name": self.provider_name,
|
||||||
|
"is_available": self.is_available,
|
||||||
|
"last_check_time": (
|
||||||
|
self.last_check_time.isoformat()
|
||||||
|
if self.last_check_time
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"total_requests": self.total_requests,
|
||||||
|
"successful_requests": self.successful_requests,
|
||||||
|
"failed_requests": self.failed_requests,
|
||||||
|
"success_rate": round(self.success_rate, 2),
|
||||||
|
"average_response_time_ms": round(
|
||||||
|
self.average_response_time_ms, 2
|
||||||
|
),
|
||||||
|
"last_error": self.last_error,
|
||||||
|
"last_error_time": (
|
||||||
|
self.last_error_time.isoformat()
|
||||||
|
if self.last_error_time
|
||||||
|
else None
|
||||||
|
),
|
||||||
|
"consecutive_failures": self.consecutive_failures,
|
||||||
|
"total_bytes_downloaded": self.total_bytes_downloaded,
|
||||||
|
"uptime_percentage": round(self.uptime_percentage, 2),
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class RequestMetric:
|
||||||
|
"""Individual request metric."""
|
||||||
|
|
||||||
|
timestamp: datetime
|
||||||
|
success: bool
|
||||||
|
response_time_ms: float
|
||||||
|
bytes_transferred: int = 0
|
||||||
|
error_message: Optional[str] = None
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderHealthMonitor:
|
||||||
|
"""Monitors health and performance of anime providers."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
max_history_size: int = 1000,
|
||||||
|
health_check_interval: int = 300, # 5 minutes
|
||||||
|
failure_threshold: int = 3,
|
||||||
|
):
|
||||||
|
"""Initialize provider health monitor.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_history_size: Maximum number of request metrics to keep
|
||||||
|
per provider.
|
||||||
|
health_check_interval: Interval between health checks in
|
||||||
|
seconds.
|
||||||
|
failure_threshold: Number of consecutive failures before
|
||||||
|
marking unavailable.
|
||||||
|
"""
|
||||||
|
self._max_history_size = max_history_size
|
||||||
|
self._health_check_interval = health_check_interval
|
||||||
|
self._failure_threshold = failure_threshold
|
||||||
|
|
||||||
|
# Provider metrics storage
|
||||||
|
self._metrics: Dict[str, ProviderHealthMetrics] = {}
|
||||||
|
self._request_history: Dict[str, Deque[RequestMetric]] = defaultdict(
|
||||||
|
lambda: deque(maxlen=max_history_size)
|
||||||
|
)
|
||||||
|
|
||||||
|
# Health check task
|
||||||
|
self._health_check_task: Optional[asyncio.Task] = None
|
||||||
|
self._is_running = False
|
||||||
|
|
||||||
|
logger.info("Provider health monitor initialized")
|
||||||
|
|
||||||
|
def start_monitoring(self) -> None:
|
||||||
|
"""Start background health monitoring."""
|
||||||
|
if self._is_running:
|
||||||
|
logger.warning("Health monitoring already running")
|
||||||
|
return
|
||||||
|
|
||||||
|
self._is_running = True
|
||||||
|
self._health_check_task = asyncio.create_task(
|
||||||
|
self._health_check_loop()
|
||||||
|
)
|
||||||
|
logger.info("Provider health monitoring started")
|
||||||
|
|
||||||
|
async def stop_monitoring(self) -> None:
|
||||||
|
"""Stop background health monitoring."""
|
||||||
|
self._is_running = False
|
||||||
|
if self._health_check_task:
|
||||||
|
self._health_check_task.cancel()
|
||||||
|
try:
|
||||||
|
await self._health_check_task
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
pass
|
||||||
|
self._health_check_task = None
|
||||||
|
logger.info("Provider health monitoring stopped")
|
||||||
|
|
||||||
|
async def _health_check_loop(self) -> None:
|
||||||
|
"""Background health check loop."""
|
||||||
|
while self._is_running:
|
||||||
|
try:
|
||||||
|
await self._perform_health_checks()
|
||||||
|
await asyncio.sleep(self._health_check_interval)
|
||||||
|
except asyncio.CancelledError:
|
||||||
|
break
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(f"Error in health check loop: {e}", exc_info=True)
|
||||||
|
await asyncio.sleep(self._health_check_interval)
|
||||||
|
|
||||||
|
async def _perform_health_checks(self) -> None:
|
||||||
|
"""Perform health checks on all registered providers."""
|
||||||
|
for provider_name in list(self._metrics.keys()):
|
||||||
|
try:
|
||||||
|
metrics = self._metrics[provider_name]
|
||||||
|
metrics.last_check_time = datetime.now()
|
||||||
|
|
||||||
|
# Update uptime percentage based on recent history
|
||||||
|
recent_metrics = self._get_recent_metrics(
|
||||||
|
provider_name, minutes=60
|
||||||
|
)
|
||||||
|
if recent_metrics:
|
||||||
|
successful = sum(1 for m in recent_metrics if m.success)
|
||||||
|
metrics.uptime_percentage = (
|
||||||
|
successful / len(recent_metrics)
|
||||||
|
) * 100
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Health check for {provider_name}: "
|
||||||
|
f"available={metrics.is_available}, "
|
||||||
|
f"success_rate={metrics.success_rate:.2f}%"
|
||||||
|
)
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Error checking health for {provider_name}: {e}",
|
||||||
|
exc_info=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
def record_request(
|
||||||
|
self,
|
||||||
|
provider_name: str,
|
||||||
|
success: bool,
|
||||||
|
response_time_ms: float,
|
||||||
|
bytes_transferred: int = 0,
|
||||||
|
error_message: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Record a provider request for health tracking.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
success: Whether the request was successful.
|
||||||
|
response_time_ms: Response time in milliseconds.
|
||||||
|
bytes_transferred: Number of bytes transferred.
|
||||||
|
error_message: Error message if request failed.
|
||||||
|
"""
|
||||||
|
# Initialize metrics if not exists
|
||||||
|
if provider_name not in self._metrics:
|
||||||
|
self._metrics[provider_name] = ProviderHealthMetrics(
|
||||||
|
provider_name=provider_name
|
||||||
|
)
|
||||||
|
|
||||||
|
metrics = self._metrics[provider_name]
|
||||||
|
|
||||||
|
# Update request counts
|
||||||
|
metrics.total_requests += 1
|
||||||
|
if success:
|
||||||
|
metrics.successful_requests += 1
|
||||||
|
metrics.consecutive_failures = 0
|
||||||
|
else:
|
||||||
|
metrics.failed_requests += 1
|
||||||
|
metrics.consecutive_failures += 1
|
||||||
|
metrics.last_error = error_message
|
||||||
|
metrics.last_error_time = datetime.now()
|
||||||
|
|
||||||
|
# Update availability based on consecutive failures
|
||||||
|
if metrics.consecutive_failures >= self._failure_threshold:
|
||||||
|
if metrics.is_available:
|
||||||
|
logger.warning(
|
||||||
|
f"Provider {provider_name} marked as unavailable after "
|
||||||
|
f"{metrics.consecutive_failures} consecutive failures"
|
||||||
|
)
|
||||||
|
metrics.is_available = False
|
||||||
|
else:
|
||||||
|
metrics.is_available = True
|
||||||
|
|
||||||
|
# Update average response time
|
||||||
|
total_time = metrics.average_response_time_ms * (
|
||||||
|
metrics.total_requests - 1
|
||||||
|
)
|
||||||
|
metrics.average_response_time_ms = (
|
||||||
|
total_time + response_time_ms
|
||||||
|
) / metrics.total_requests
|
||||||
|
|
||||||
|
# Update bytes transferred
|
||||||
|
metrics.total_bytes_downloaded += bytes_transferred
|
||||||
|
|
||||||
|
# Store request metric in history
|
||||||
|
request_metric = RequestMetric(
|
||||||
|
timestamp=datetime.now(),
|
||||||
|
success=success,
|
||||||
|
response_time_ms=response_time_ms,
|
||||||
|
bytes_transferred=bytes_transferred,
|
||||||
|
error_message=error_message,
|
||||||
|
)
|
||||||
|
self._request_history[provider_name].append(request_metric)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Recorded request for {provider_name}: "
|
||||||
|
f"success={success}, time={response_time_ms:.2f}ms"
|
||||||
|
)
|
||||||
|
|
||||||
|
def get_provider_metrics(
|
||||||
|
self, provider_name: str
|
||||||
|
) -> Optional[ProviderHealthMetrics]:
|
||||||
|
"""Get health metrics for a specific provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Provider health metrics or None if not found.
|
||||||
|
"""
|
||||||
|
return self._metrics.get(provider_name)
|
||||||
|
|
||||||
|
def get_all_metrics(self) -> Dict[str, ProviderHealthMetrics]:
|
||||||
|
"""Get health metrics for all providers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping provider names to their metrics.
|
||||||
|
"""
|
||||||
|
return self._metrics.copy()
|
||||||
|
|
||||||
|
def get_available_providers(self) -> List[str]:
|
||||||
|
"""Get list of currently available providers.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of available provider names.
|
||||||
|
"""
|
||||||
|
return [
|
||||||
|
name
|
||||||
|
for name, metrics in self._metrics.items()
|
||||||
|
if metrics.is_available
|
||||||
|
]
|
||||||
|
|
||||||
|
def get_best_provider(self) -> Optional[str]:
|
||||||
|
"""Get the best performing available provider.
|
||||||
|
|
||||||
|
Best is determined by:
|
||||||
|
1. Availability
|
||||||
|
2. Success rate
|
||||||
|
3. Response time
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Name of best provider or None if none available.
|
||||||
|
"""
|
||||||
|
available = [
|
||||||
|
(name, metrics)
|
||||||
|
for name, metrics in self._metrics.items()
|
||||||
|
if metrics.is_available
|
||||||
|
]
|
||||||
|
|
||||||
|
if not available:
|
||||||
|
return None
|
||||||
|
|
||||||
|
# Sort by success rate (descending) then response time (ascending)
|
||||||
|
available.sort(
|
||||||
|
key=lambda x: (-x[1].success_rate, x[1].average_response_time_ms)
|
||||||
|
)
|
||||||
|
|
||||||
|
best_provider = available[0][0]
|
||||||
|
logger.debug(f"Best provider selected: {best_provider}")
|
||||||
|
return best_provider
|
||||||
|
|
||||||
|
def _get_recent_metrics(
|
||||||
|
self, provider_name: str, minutes: int = 60
|
||||||
|
) -> List[RequestMetric]:
|
||||||
|
"""Get recent request metrics for a provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
minutes: Number of minutes to look back.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of recent request metrics.
|
||||||
|
"""
|
||||||
|
if provider_name not in self._request_history:
|
||||||
|
return []
|
||||||
|
|
||||||
|
cutoff_time = datetime.now() - timedelta(minutes=minutes)
|
||||||
|
return [
|
||||||
|
metric
|
||||||
|
for metric in self._request_history[provider_name]
|
||||||
|
if metric.timestamp >= cutoff_time
|
||||||
|
]
|
||||||
|
|
||||||
|
def reset_provider_metrics(self, provider_name: str) -> bool:
|
||||||
|
"""Reset metrics for a specific provider.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider_name: Name of the provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if reset successful, False if provider not found.
|
||||||
|
"""
|
||||||
|
if provider_name not in self._metrics:
|
||||||
|
return False
|
||||||
|
|
||||||
|
self._metrics[provider_name] = ProviderHealthMetrics(
|
||||||
|
provider_name=provider_name
|
||||||
|
)
|
||||||
|
self._request_history[provider_name].clear()
|
||||||
|
logger.info(f"Reset metrics for provider: {provider_name}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
def get_health_summary(self) -> Dict[str, Any]:
|
||||||
|
"""Get summary of overall provider health.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with health summary statistics.
|
||||||
|
"""
|
||||||
|
total_providers = len(self._metrics)
|
||||||
|
available_providers = len(self.get_available_providers())
|
||||||
|
|
||||||
|
if total_providers == 0:
|
||||||
|
return {
|
||||||
|
"total_providers": 0,
|
||||||
|
"available_providers": 0,
|
||||||
|
"availability_percentage": 0.0,
|
||||||
|
"average_success_rate": 0.0,
|
||||||
|
"average_response_time_ms": 0.0,
|
||||||
|
}
|
||||||
|
|
||||||
|
avg_success_rate = sum(
|
||||||
|
m.success_rate for m in self._metrics.values()
|
||||||
|
) / total_providers
|
||||||
|
|
||||||
|
avg_response_time = sum(
|
||||||
|
m.average_response_time_ms for m in self._metrics.values()
|
||||||
|
) / total_providers
|
||||||
|
|
||||||
|
return {
|
||||||
|
"total_providers": total_providers,
|
||||||
|
"available_providers": available_providers,
|
||||||
|
"availability_percentage": (
|
||||||
|
available_providers / total_providers
|
||||||
|
)
|
||||||
|
* 100,
|
||||||
|
"average_success_rate": round(avg_success_rate, 2),
|
||||||
|
"average_response_time_ms": round(avg_response_time, 2),
|
||||||
|
"providers": {
|
||||||
|
name: metrics.to_dict()
|
||||||
|
for name, metrics in self._metrics.items()
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
# Global health monitor instance
|
||||||
|
_health_monitor: Optional[ProviderHealthMonitor] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_health_monitor() -> ProviderHealthMonitor:
|
||||||
|
"""Get or create global provider health monitor instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Global ProviderHealthMonitor instance.
|
||||||
|
"""
|
||||||
|
global _health_monitor
|
||||||
|
if _health_monitor is None:
|
||||||
|
_health_monitor = ProviderHealthMonitor()
|
||||||
|
return _health_monitor
|
||||||
307
src/core/providers/monitored_provider.py
Normal file
307
src/core/providers/monitored_provider.py
Normal file
@@ -0,0 +1,307 @@
|
|||||||
|
"""Performance monitoring wrapper for anime providers.
|
||||||
|
|
||||||
|
This module provides a wrapper that adds automatic performance tracking
|
||||||
|
to any provider implementation.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import time
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from src.core.providers.base_provider import Loader
|
||||||
|
from src.core.providers.health_monitor import get_health_monitor
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class MonitoredProviderWrapper(Loader):
|
||||||
|
"""Wrapper that adds performance monitoring to any provider."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
provider: Loader,
|
||||||
|
enable_monitoring: bool = True,
|
||||||
|
):
|
||||||
|
"""Initialize monitored provider wrapper.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider: Provider instance to wrap.
|
||||||
|
enable_monitoring: Whether to enable performance monitoring.
|
||||||
|
"""
|
||||||
|
self._provider = provider
|
||||||
|
self._enable_monitoring = enable_monitoring
|
||||||
|
self._health_monitor = (
|
||||||
|
get_health_monitor() if enable_monitoring else None
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Monitoring wrapper initialized for provider: "
|
||||||
|
f"{provider.get_site_key()}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def _record_operation(
|
||||||
|
self,
|
||||||
|
operation_name: str,
|
||||||
|
start_time: float,
|
||||||
|
success: bool,
|
||||||
|
bytes_transferred: int = 0,
|
||||||
|
error_message: Optional[str] = None,
|
||||||
|
) -> None:
|
||||||
|
"""Record operation metrics.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
operation_name: Name of the operation.
|
||||||
|
start_time: Operation start time (from time.time()).
|
||||||
|
success: Whether operation succeeded.
|
||||||
|
bytes_transferred: Number of bytes transferred.
|
||||||
|
error_message: Error message if operation failed.
|
||||||
|
"""
|
||||||
|
if not self._enable_monitoring or not self._health_monitor:
|
||||||
|
return
|
||||||
|
|
||||||
|
elapsed_ms = (time.time() - start_time) * 1000
|
||||||
|
provider_name = self._provider.get_site_key()
|
||||||
|
|
||||||
|
self._health_monitor.record_request(
|
||||||
|
provider_name=provider_name,
|
||||||
|
success=success,
|
||||||
|
response_time_ms=elapsed_ms,
|
||||||
|
bytes_transferred=bytes_transferred,
|
||||||
|
error_message=error_message,
|
||||||
|
)
|
||||||
|
|
||||||
|
if success:
|
||||||
|
logger.debug(
|
||||||
|
f"{operation_name} succeeded for {provider_name} "
|
||||||
|
f"in {elapsed_ms:.2f}ms"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"{operation_name} failed for {provider_name} "
|
||||||
|
f"in {elapsed_ms:.2f}ms: {error_message}"
|
||||||
|
)
|
||||||
|
|
||||||
|
def search(self, word: str) -> List[Dict[str, Any]]:
|
||||||
|
"""Search for anime series by name (with monitoring).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
word: Search term to look for.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of found series as dictionaries.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
result = self._provider.search(word)
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="search",
|
||||||
|
start_time=start_time,
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="search",
|
||||||
|
start_time=start_time,
|
||||||
|
success=False,
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def is_language(
|
||||||
|
self,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
) -> bool:
|
||||||
|
"""Check if episode exists in specified language (monitored).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
season: Season number (1-indexed).
|
||||||
|
episode: Episode number (1-indexed).
|
||||||
|
key: Unique series identifier/key.
|
||||||
|
language: Language to check (default: German Dub).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if episode exists in specified language.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
result = self._provider.is_language(
|
||||||
|
season, episode, key, language
|
||||||
|
)
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="is_language",
|
||||||
|
start_time=start_time,
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="is_language",
|
||||||
|
start_time=start_time,
|
||||||
|
success=False,
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def download(
|
||||||
|
self,
|
||||||
|
base_directory: str,
|
||||||
|
serie_folder: str,
|
||||||
|
season: int,
|
||||||
|
episode: int,
|
||||||
|
key: str,
|
||||||
|
language: str = "German Dub",
|
||||||
|
progress_callback: Optional[Callable[[str, Dict], None]] = None,
|
||||||
|
) -> bool:
|
||||||
|
"""Download episode to specified directory (with monitoring).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
base_directory: Base directory for downloads.
|
||||||
|
serie_folder: Series folder name.
|
||||||
|
season: Season number.
|
||||||
|
episode: Episode number.
|
||||||
|
key: Unique series identifier/key.
|
||||||
|
language: Language version to download.
|
||||||
|
progress_callback: Optional callback for progress updates.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if download successful.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
bytes_transferred = 0
|
||||||
|
|
||||||
|
# Wrap progress callback to track bytes
|
||||||
|
if progress_callback and self._enable_monitoring:
|
||||||
|
|
||||||
|
def monitored_callback(event_type: str, data: Dict) -> None:
|
||||||
|
nonlocal bytes_transferred
|
||||||
|
if event_type == "progress" and "downloaded" in data:
|
||||||
|
bytes_transferred = data.get("downloaded", 0)
|
||||||
|
progress_callback(event_type, data)
|
||||||
|
|
||||||
|
wrapped_callback = monitored_callback
|
||||||
|
else:
|
||||||
|
wrapped_callback = progress_callback
|
||||||
|
|
||||||
|
try:
|
||||||
|
result = self._provider.download(
|
||||||
|
base_directory=base_directory,
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
season=season,
|
||||||
|
episode=episode,
|
||||||
|
key=key,
|
||||||
|
language=language,
|
||||||
|
progress_callback=wrapped_callback,
|
||||||
|
)
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="download",
|
||||||
|
start_time=start_time,
|
||||||
|
success=result,
|
||||||
|
bytes_transferred=bytes_transferred,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="download",
|
||||||
|
start_time=start_time,
|
||||||
|
success=False,
|
||||||
|
bytes_transferred=bytes_transferred,
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_site_key(self) -> str:
|
||||||
|
"""Get the site key/identifier for this provider.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Site key string.
|
||||||
|
"""
|
||||||
|
return self._provider.get_site_key()
|
||||||
|
|
||||||
|
def get_title(self, key: str) -> str:
|
||||||
|
"""Get the human-readable title of a series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Unique series identifier/key.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Series title string.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
result = self._provider.get_title(key)
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="get_title",
|
||||||
|
start_time=start_time,
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="get_title",
|
||||||
|
start_time=start_time,
|
||||||
|
success=False,
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
def get_season_episode_count(self, slug: str) -> Dict[int, int]:
|
||||||
|
"""Get season and episode counts for a series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
slug: Series slug/key identifier.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary mapping season number to episode count.
|
||||||
|
"""
|
||||||
|
start_time = time.time()
|
||||||
|
try:
|
||||||
|
result = self._provider.get_season_episode_count(slug)
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="get_season_episode_count",
|
||||||
|
start_time=start_time,
|
||||||
|
success=True,
|
||||||
|
)
|
||||||
|
return result
|
||||||
|
except Exception as e:
|
||||||
|
self._record_operation(
|
||||||
|
operation_name="get_season_episode_count",
|
||||||
|
start_time=start_time,
|
||||||
|
success=False,
|
||||||
|
error_message=str(e),
|
||||||
|
)
|
||||||
|
raise
|
||||||
|
|
||||||
|
@property
|
||||||
|
def wrapped_provider(self) -> Loader:
|
||||||
|
"""Get the underlying provider instance.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Wrapped provider instance.
|
||||||
|
"""
|
||||||
|
return self._provider
|
||||||
|
|
||||||
|
|
||||||
|
def wrap_provider(
|
||||||
|
provider: Loader,
|
||||||
|
enable_monitoring: bool = True,
|
||||||
|
) -> Loader:
|
||||||
|
"""Wrap a provider with performance monitoring.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
provider: Provider to wrap.
|
||||||
|
enable_monitoring: Whether to enable monitoring.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Monitored provider wrapper.
|
||||||
|
"""
|
||||||
|
if isinstance(provider, MonitoredProviderWrapper):
|
||||||
|
# Already wrapped
|
||||||
|
return provider
|
||||||
|
|
||||||
|
return MonitoredProviderWrapper(
|
||||||
|
provider=provider,
|
||||||
|
enable_monitoring=enable_monitoring,
|
||||||
|
)
|
||||||
79
src/core/providers/provider_config.py
Normal file
79
src/core/providers/provider_config.py
Normal file
@@ -0,0 +1,79 @@
|
|||||||
|
"""Shared provider configuration constants for AniWorld providers.
|
||||||
|
|
||||||
|
Centralizes user-agent strings, provider lists and common headers so
|
||||||
|
multiple provider implementations can import a single source of truth.
|
||||||
|
"""
|
||||||
|
from enum import Enum
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
|
||||||
|
class ProviderType(str, Enum):
|
||||||
|
"""Enumeration of supported video providers."""
|
||||||
|
VOE = "VOE"
|
||||||
|
DOODSTREAM = "Doodstream"
|
||||||
|
VIDMOLY = "Vidmoly"
|
||||||
|
VIDOZA = "Vidoza"
|
||||||
|
SPEEDFILES = "SpeedFiles"
|
||||||
|
STREAMTAPE = "Streamtape"
|
||||||
|
LULUVDO = "Luluvdo"
|
||||||
|
|
||||||
|
|
||||||
|
DEFAULT_PROVIDERS: List[str] = [
|
||||||
|
ProviderType.VOE.value,
|
||||||
|
ProviderType.DOODSTREAM.value,
|
||||||
|
ProviderType.VIDMOLY.value,
|
||||||
|
ProviderType.VIDOZA.value,
|
||||||
|
ProviderType.SPEEDFILES.value,
|
||||||
|
ProviderType.STREAMTAPE.value,
|
||||||
|
ProviderType.LULUVDO.value,
|
||||||
|
]
|
||||||
|
|
||||||
|
ANIWORLD_HEADERS: Dict[str, str] = {
|
||||||
|
"accept": (
|
||||||
|
"text/html,application/xhtml+xml,application/xml;q=0.9,"
|
||||||
|
"image/avif,image/webp,image/apng,*/*;q=0.8"
|
||||||
|
),
|
||||||
|
"accept-encoding": "gzip, deflate, br, zstd",
|
||||||
|
"accept-language": (
|
||||||
|
"de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6"
|
||||||
|
),
|
||||||
|
"cache-control": "max-age=0",
|
||||||
|
"priority": "u=0, i",
|
||||||
|
"sec-ch-ua": (
|
||||||
|
'"Chromium";v="136", "Microsoft Edge";v="136", '
|
||||||
|
'"Not.A/Brand";v="99"'
|
||||||
|
),
|
||||||
|
"sec-ch-ua-mobile": "?0",
|
||||||
|
"sec-ch-ua-platform": '"Windows"',
|
||||||
|
"sec-fetch-dest": "document",
|
||||||
|
"sec-fetch-mode": "navigate",
|
||||||
|
"sec-fetch-site": "none",
|
||||||
|
"sec-fetch-user": "?1",
|
||||||
|
"upgrade-insecure-requests": "1",
|
||||||
|
"user-agent": (
|
||||||
|
"Mozilla/5.0 (Windows NT 10.0; Win64; x64) "
|
||||||
|
"AppleWebKit/537.36 (KHTML, like Gecko) "
|
||||||
|
"Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
|
||||||
|
),
|
||||||
|
}
|
||||||
|
|
||||||
|
INVALID_PATH_CHARS: List[str] = [
|
||||||
|
"<",
|
||||||
|
">",
|
||||||
|
":",
|
||||||
|
'"',
|
||||||
|
"/",
|
||||||
|
"\\",
|
||||||
|
"|",
|
||||||
|
"?",
|
||||||
|
"*",
|
||||||
|
"&",
|
||||||
|
]
|
||||||
|
|
||||||
|
LULUVDO_USER_AGENT = (
|
||||||
|
"Mozilla/5.0 (Android 15; Mobile; rv:132.0) "
|
||||||
|
"Gecko/132.0 Firefox/132.0"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Default download timeout (seconds)
|
||||||
|
DEFAULT_DOWNLOAD_TIMEOUT = 600
|
||||||
@@ -1,10 +1,56 @@
|
|||||||
from server.infrastructure.providers.aniworld_provider import AniworldLoader
|
"""Provider factory for managing anime content providers.
|
||||||
from server.infrastructure.providers.base_provider import Loader
|
|
||||||
|
This module provides a factory class for accessing different anime content
|
||||||
|
providers (loaders). The factory uses provider identifiers (keys) to return
|
||||||
|
the appropriate provider instance.
|
||||||
|
|
||||||
|
Note: The 'key' parameter in this factory refers to the provider identifier
|
||||||
|
(e.g., 'aniworld.to'), not to be confused with series keys used within
|
||||||
|
providers to identify specific anime series.
|
||||||
|
"""
|
||||||
|
|
||||||
|
from typing import Dict
|
||||||
|
|
||||||
|
from .aniworld_provider import AniworldLoader
|
||||||
|
from .base_provider import Loader
|
||||||
|
|
||||||
|
|
||||||
class Loaders:
|
class Loaders:
|
||||||
|
"""Factory class for managing and retrieving anime content providers.
|
||||||
|
|
||||||
def __init__(self):
|
This factory maintains a registry of available providers and provides
|
||||||
self.dict = {"aniworld.to": AniworldLoader()}
|
access to them via provider keys. Each provider implements the Loader
|
||||||
|
interface for searching and downloading anime content.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
dict: Dictionary mapping provider keys to provider instances.
|
||||||
|
Provider keys are site identifiers (e.g., 'aniworld.to').
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Initialize the provider factory with available providers.
|
||||||
|
|
||||||
|
Currently supports:
|
||||||
|
- 'aniworld.to': AniworldLoader for aniworld.to content
|
||||||
|
"""
|
||||||
|
self.dict: Dict[str, Loader] = {"aniworld.to": AniworldLoader()}
|
||||||
|
|
||||||
def GetLoader(self, key: str) -> Loader:
|
def GetLoader(self, key: str) -> Loader:
|
||||||
|
"""Retrieve a provider instance by its provider key.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
key: Provider identifier (e.g., 'aniworld.to').
|
||||||
|
This is the site/provider key, not a series key.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Loader instance for the specified provider.
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
KeyError: If the provider key is not found in the registry.
|
||||||
|
|
||||||
|
Note:
|
||||||
|
The 'key' parameter here identifies the provider/site, while
|
||||||
|
series-specific operations on the returned Loader use series
|
||||||
|
keys to identify individual anime series.
|
||||||
|
"""
|
||||||
return self.dict[key]
|
return self.dict[key]
|
||||||
|
|||||||
Binary file not shown.
Binary file not shown.
@@ -1,7 +1,27 @@
|
|||||||
from abc import ABC, abstractmethod
|
from abc import ABC, abstractmethod
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
class Provider(ABC):
|
class Provider(ABC):
|
||||||
|
"""Abstract base class for streaming providers."""
|
||||||
|
|
||||||
@abstractmethod
|
@abstractmethod
|
||||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
|
def get_link(
|
||||||
pass
|
self, embedded_link: str, timeout: int
|
||||||
|
) -> tuple[str, dict[str, Any]]:
|
||||||
|
"""
|
||||||
|
Extract direct download link from embedded player link.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
embedded_link: URL of the embedded player
|
||||||
|
timeout: Request timeout in seconds
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (direct_link: str, headers: dict)
|
||||||
|
- direct_link: Direct URL to download resource
|
||||||
|
- headers: Dictionary of HTTP headers to use for download
|
||||||
|
"""
|
||||||
|
raise NotImplementedError(
|
||||||
|
"Streaming providers must implement get_link"
|
||||||
|
)
|
||||||
|
|
||||||
|
|||||||
@@ -1,59 +0,0 @@
|
|||||||
import re
|
|
||||||
import random
|
|
||||||
import time
|
|
||||||
|
|
||||||
from fake_useragent import UserAgent
|
|
||||||
import requests
|
|
||||||
from .Provider import Provider
|
|
||||||
class Doodstream(Provider):
|
|
||||||
|
|
||||||
def __init__(self):
|
|
||||||
self.RANDOM_USER_AGENT = UserAgent().random
|
|
||||||
|
|
||||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> str:
|
|
||||||
headers = {
|
|
||||||
'User-Agent': self.RANDOM_USER_AGENT,
|
|
||||||
'Referer': 'https://dood.li/'
|
|
||||||
}
|
|
||||||
|
|
||||||
def extract_data(pattern, content):
|
|
||||||
match = re.search(pattern, content)
|
|
||||||
return match.group(1) if match else None
|
|
||||||
|
|
||||||
def generate_random_string(length=10):
|
|
||||||
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
|
|
||||||
return ''.join(random.choice(characters) for _ in range(length))
|
|
||||||
|
|
||||||
response = requests.get(
|
|
||||||
embededLink,
|
|
||||||
headers=headers,
|
|
||||||
timeout=DEFAULT_REQUEST_TIMEOUT,
|
|
||||||
verify=False
|
|
||||||
)
|
|
||||||
response.raise_for_status()
|
|
||||||
|
|
||||||
pass_md5_pattern = r"\$\.get\('([^']*\/pass_md5\/[^']*)'"
|
|
||||||
pass_md5_url = extract_data(pass_md5_pattern, response.text)
|
|
||||||
if not pass_md5_url:
|
|
||||||
raise ValueError(
|
|
||||||
f'pass_md5 URL not found using {embededLink}.')
|
|
||||||
|
|
||||||
full_md5_url = f"https://dood.li{pass_md5_url}"
|
|
||||||
|
|
||||||
token_pattern = r"token=([a-zA-Z0-9]+)"
|
|
||||||
token = extract_data(token_pattern, response.text)
|
|
||||||
if not token:
|
|
||||||
raise ValueError(f'Token not found using {embededLink}.')
|
|
||||||
|
|
||||||
md5_response = requests.get(
|
|
||||||
full_md5_url, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT, verify=False)
|
|
||||||
md5_response.raise_for_status()
|
|
||||||
video_base_url = md5_response.text.strip()
|
|
||||||
|
|
||||||
random_string = generate_random_string(10)
|
|
||||||
expiry = int(time.time())
|
|
||||||
|
|
||||||
direct_link = f"{video_base_url}{random_string}?token={token}&expiry={expiry}"
|
|
||||||
# print(direct_link)
|
|
||||||
|
|
||||||
return direct_link
|
|
||||||
@@ -1,51 +0,0 @@
|
|||||||
import re
|
|
||||||
import requests
|
|
||||||
# import jsbeautifier.unpackers.packer as packer
|
|
||||||
|
|
||||||
from aniworld import config
|
|
||||||
|
|
||||||
REDIRECT_REGEX = re.compile(
|
|
||||||
r'<iframe *(?:[^>]+ )?src=(?:\'([^\']+)\'|"([^"]+)")[^>]*>')
|
|
||||||
SCRIPT_REGEX = re.compile(
|
|
||||||
r'(?s)<script\s+[^>]*?data-cfasync=["\']?false["\']?[^>]*>(.+?)</script>')
|
|
||||||
VIDEO_URL_REGEX = re.compile(r'file:\s*"([^"]+\.m3u8[^"]*)"')
|
|
||||||
|
|
||||||
# TODO Implement this script fully
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_filemoon(embeded_filemoon_link: str):
|
|
||||||
session = requests.Session()
|
|
||||||
session.verify = False
|
|
||||||
|
|
||||||
headers = {
|
|
||||||
"User-Agent": config.RANDOM_USER_AGENT,
|
|
||||||
"Referer": embeded_filemoon_link,
|
|
||||||
}
|
|
||||||
|
|
||||||
response = session.get(embeded_filemoon_link, headers=headers)
|
|
||||||
source = response.text
|
|
||||||
|
|
||||||
match = REDIRECT_REGEX.search(source)
|
|
||||||
if match:
|
|
||||||
redirect_url = match.group(1) or match.group(2)
|
|
||||||
response = session.get(redirect_url, headers=headers)
|
|
||||||
source = response.text
|
|
||||||
|
|
||||||
for script_match in SCRIPT_REGEX.finditer(source):
|
|
||||||
script_content = script_match.group(1).strip()
|
|
||||||
|
|
||||||
if not script_content.startswith("eval("):
|
|
||||||
continue
|
|
||||||
|
|
||||||
if packer.detect(script_content):
|
|
||||||
unpacked = packer.unpack(script_content)
|
|
||||||
video_match = VIDEO_URL_REGEX.search(unpacked)
|
|
||||||
if video_match:
|
|
||||||
return video_match.group(1)
|
|
||||||
|
|
||||||
raise Exception("No Video link found!")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
url = input("Enter Filemoon Link: ")
|
|
||||||
print(get_direct_link_from_filemoon(url))
|
|
||||||
@@ -1,90 +0,0 @@
|
|||||||
import re
|
|
||||||
import json
|
|
||||||
import sys
|
|
||||||
import requests
|
|
||||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT
|
|
||||||
|
|
||||||
|
|
||||||
def fetch_page_content(url):
|
|
||||||
try:
|
|
||||||
response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT)
|
|
||||||
response.raise_for_status()
|
|
||||||
return response.text
|
|
||||||
except requests.exceptions.RequestException as e:
|
|
||||||
print(f"Failed to fetch the page content: {e}")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def extract_video_data(page_content):
|
|
||||||
match = re.search(r'^.*videos_manifest.*$', page_content, re.MULTILINE)
|
|
||||||
if not match:
|
|
||||||
raise ValueError("Failed to extract video manifest from the response.")
|
|
||||||
|
|
||||||
json_str = match.group(0)[match.group(0).find(
|
|
||||||
'{'):match.group(0).rfind('}') + 1]
|
|
||||||
return json.loads(json_str)
|
|
||||||
|
|
||||||
|
|
||||||
def get_streams(url):
|
|
||||||
page_content = fetch_page_content(url)
|
|
||||||
data = extract_video_data(page_content)
|
|
||||||
video_info = data['state']['data']['video']
|
|
||||||
name = video_info['hentai_video']['name']
|
|
||||||
streams = video_info['videos_manifest']['servers'][0]['streams']
|
|
||||||
|
|
||||||
return {"name": name, "streams": streams}
|
|
||||||
|
|
||||||
|
|
||||||
def display_streams(streams):
|
|
||||||
if not streams:
|
|
||||||
print("No streams available.")
|
|
||||||
return
|
|
||||||
|
|
||||||
print("Available qualities:")
|
|
||||||
for i, stream in enumerate(streams, 1):
|
|
||||||
premium_tag = "(Premium)" if not stream['is_guest_allowed'] else ""
|
|
||||||
print(
|
|
||||||
f"{i}. {stream['width']}x{stream['height']}\t"
|
|
||||||
f"({stream['filesize_mbs']}MB) {premium_tag}")
|
|
||||||
|
|
||||||
|
|
||||||
def get_user_selection(streams):
|
|
||||||
try:
|
|
||||||
selected_index = int(input("Select a stream: ").strip()) - 1
|
|
||||||
if 0 <= selected_index < len(streams):
|
|
||||||
return selected_index
|
|
||||||
|
|
||||||
print("Invalid selection.")
|
|
||||||
return None
|
|
||||||
except ValueError:
|
|
||||||
print("Invalid input.")
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_hanime(url=None):
|
|
||||||
try:
|
|
||||||
if url is None:
|
|
||||||
if len(sys.argv) > 1:
|
|
||||||
url = sys.argv[1]
|
|
||||||
else:
|
|
||||||
url = input("Please enter the hanime.tv video URL: ").strip()
|
|
||||||
|
|
||||||
try:
|
|
||||||
video_data = get_streams(url)
|
|
||||||
print(f"Video: {video_data['name']}")
|
|
||||||
print('*' * 40)
|
|
||||||
display_streams(video_data['streams'])
|
|
||||||
|
|
||||||
selected_index = None
|
|
||||||
while selected_index is None:
|
|
||||||
selected_index = get_user_selection(video_data['streams'])
|
|
||||||
|
|
||||||
print(f"M3U8 URL: {video_data['streams'][selected_index]['url']}")
|
|
||||||
except ValueError as e:
|
|
||||||
print(f"Error: {e}")
|
|
||||||
except KeyboardInterrupt:
|
|
||||||
pass
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
get_direct_link_from_hanime()
|
|
||||||
@@ -1,35 +0,0 @@
|
|||||||
import requests
|
|
||||||
import json
|
|
||||||
from urllib.parse import urlparse
|
|
||||||
|
|
||||||
# TODO Doesn't work on download yet and has to be implemented
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_loadx(embeded_loadx_link: str):
|
|
||||||
response = requests.head(
|
|
||||||
embeded_loadx_link, allow_redirects=True, verify=False)
|
|
||||||
|
|
||||||
parsed_url = urlparse(response.url)
|
|
||||||
path_parts = parsed_url.path.split("/")
|
|
||||||
if len(path_parts) < 3:
|
|
||||||
raise ValueError("Invalid path!")
|
|
||||||
|
|
||||||
id_hash = path_parts[2]
|
|
||||||
host = parsed_url.netloc
|
|
||||||
|
|
||||||
post_url = f"https://{host}/player/index.php?data={id_hash}&do=getVideo"
|
|
||||||
headers = {"X-Requested-With": "XMLHttpRequest"}
|
|
||||||
response = requests.post(post_url, headers=headers, verify=False)
|
|
||||||
|
|
||||||
data = json.loads(response.text)
|
|
||||||
print(data)
|
|
||||||
video_url = data.get("videoSource")
|
|
||||||
if not video_url:
|
|
||||||
raise ValueError("No Video link found!")
|
|
||||||
|
|
||||||
return video_url
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
url = input("Enter Loadx Link: ")
|
|
||||||
print(get_direct_link_from_loadx(url))
|
|
||||||
@@ -1,39 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from aniworld import config
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_luluvdo(embeded_luluvdo_link, arguments=None):
|
|
||||||
luluvdo_id = embeded_luluvdo_link.split('/')[-1]
|
|
||||||
filelink = (
|
|
||||||
f"https://luluvdo.com/dl?op=embed&file_code={luluvdo_id}&embed=1&referer=luluvdo.com&adb=0"
|
|
||||||
)
|
|
||||||
|
|
||||||
# The User-Agent needs to be the same as the direct-link ones to work
|
|
||||||
headers = {
|
|
||||||
"Origin": "https://luluvdo.com",
|
|
||||||
"Referer": "https://luluvdo.com/",
|
|
||||||
"User-Agent": config.LULUVDO_USER_AGENT
|
|
||||||
}
|
|
||||||
|
|
||||||
if arguments.action == "Download":
|
|
||||||
headers["Accept-Language"] = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7"
|
|
||||||
|
|
||||||
response = requests.get(filelink, headers=headers,
|
|
||||||
timeout=config.DEFAULT_REQUEST_TIMEOUT)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
pattern = r'file:\s*"([^"]+)"'
|
|
||||||
matches = re.findall(pattern, str(response.text))
|
|
||||||
|
|
||||||
if matches:
|
|
||||||
return matches[0]
|
|
||||||
|
|
||||||
raise ValueError("No match found")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
url = input("Enter Luluvdo Link: ")
|
|
||||||
print(get_direct_link_from_luluvdo(url))
|
|
||||||
@@ -1,43 +0,0 @@
|
|||||||
import re
|
|
||||||
import base64
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
|
||||||
|
|
||||||
SPEEDFILES_PATTERN = re.compile(r'var _0x5opu234 = "(?P<encoded_data>.*?)";')
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_speedfiles(embeded_speedfiles_link):
|
|
||||||
response = requests.get(
|
|
||||||
embeded_speedfiles_link,
|
|
||||||
timeout=DEFAULT_REQUEST_TIMEOUT,
|
|
||||||
headers={'User-Agent': RANDOM_USER_AGENT}
|
|
||||||
)
|
|
||||||
|
|
||||||
if "<span class=\"inline-block\">Web server is down</span>" in response.text:
|
|
||||||
raise ValueError(
|
|
||||||
"The SpeedFiles server is currently down.\n"
|
|
||||||
"Please try again later or choose a different hoster."
|
|
||||||
)
|
|
||||||
|
|
||||||
match = SPEEDFILES_PATTERN.search(response.text)
|
|
||||||
|
|
||||||
if not match:
|
|
||||||
raise ValueError("Pattern not found in the response.")
|
|
||||||
|
|
||||||
encoded_data = match.group("encoded_data")
|
|
||||||
decoded = base64.b64decode(encoded_data).decode()
|
|
||||||
decoded = decoded.swapcase()[::-1]
|
|
||||||
decoded = base64.b64decode(decoded).decode()[::-1]
|
|
||||||
decoded_hex = ''.join(chr(int(decoded[i:i + 2], 16))
|
|
||||||
for i in range(0, len(decoded), 2))
|
|
||||||
shifted = ''.join(chr(ord(char) - 3) for char in decoded_hex)
|
|
||||||
result = base64.b64decode(shifted.swapcase()[::-1]).decode()
|
|
||||||
|
|
||||||
return result
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
speedfiles_link = input("Enter Speedfiles Link: ")
|
|
||||||
print(get_direct_link_from_speedfiles(
|
|
||||||
embeded_speedfiles_link=speedfiles_link))
|
|
||||||
@@ -1,2 +0,0 @@
|
|||||||
def get_direct_link_from_streamtape(embeded_streamtape_link: str) -> str:
|
|
||||||
pass
|
|
||||||
@@ -1,34 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_vidmoly(embeded_vidmoly_link: str):
|
|
||||||
response = requests.get(
|
|
||||||
embeded_vidmoly_link,
|
|
||||||
headers={'User-Agent': RANDOM_USER_AGENT},
|
|
||||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
|
||||||
)
|
|
||||||
html_content = response.text
|
|
||||||
soup = BeautifulSoup(html_content, 'html.parser')
|
|
||||||
scripts = soup.find_all('script')
|
|
||||||
|
|
||||||
file_link_pattern = r'file:\s*"(https?://.*?)"'
|
|
||||||
|
|
||||||
for script in scripts:
|
|
||||||
if script.string:
|
|
||||||
match = re.search(file_link_pattern, script.string)
|
|
||||||
if match:
|
|
||||||
file_link = match.group(1)
|
|
||||||
return file_link
|
|
||||||
|
|
||||||
raise ValueError("No direct link found.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
link = input("Enter Vidmoly Link: ")
|
|
||||||
print('Note: --referer "https://vidmoly.to"')
|
|
||||||
print(get_direct_link_from_vidmoly(embeded_vidmoly_link=link))
|
|
||||||
@@ -1,29 +0,0 @@
|
|||||||
import re
|
|
||||||
|
|
||||||
import requests
|
|
||||||
from bs4 import BeautifulSoup
|
|
||||||
|
|
||||||
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
|
|
||||||
|
|
||||||
|
|
||||||
def get_direct_link_from_vidoza(embeded_vidoza_link: str) -> str:
|
|
||||||
response = requests.get(
|
|
||||||
embeded_vidoza_link,
|
|
||||||
headers={'User-Agent': RANDOM_USER_AGENT},
|
|
||||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
|
||||||
)
|
|
||||||
|
|
||||||
soup = BeautifulSoup(response.content, "html.parser")
|
|
||||||
|
|
||||||
for tag in soup.find_all('script'):
|
|
||||||
if 'sourcesCode:' in tag.text:
|
|
||||||
match = re.search(r'src: "(.*?)"', tag.text)
|
|
||||||
if match:
|
|
||||||
return match.group(1)
|
|
||||||
|
|
||||||
raise ValueError("No direct link found.")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == '__main__':
|
|
||||||
link = input("Enter Vidoza Link: ")
|
|
||||||
print(get_direct_link_from_vidoza(embeded_vidoza_link=link))
|
|
||||||
Binary file not shown.
Binary file not shown.
@@ -1,44 +1,65 @@
|
|||||||
import re
|
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
|
import re
|
||||||
|
|
||||||
from requests.adapters import HTTPAdapter
|
|
||||||
from urllib3.util.retry import Retry
|
|
||||||
import requests
|
import requests
|
||||||
from bs4 import BeautifulSoup
|
from bs4 import BeautifulSoup
|
||||||
from fake_useragent import UserAgent
|
from fake_useragent import UserAgent
|
||||||
|
from requests.adapters import HTTPAdapter
|
||||||
|
from urllib3.util.retry import Retry
|
||||||
|
|
||||||
from .Provider import Provider
|
from .Provider import Provider
|
||||||
|
|
||||||
# Compile regex patterns once for better performance
|
# Precompile the different pattern matchers used during extraction:
|
||||||
|
# - REDIRECT_PATTERN pulls the intermediate redirect URL from the bootstrap
|
||||||
|
# script so we can follow the provider's hand-off.
|
||||||
|
# - B64_PATTERN isolates the base64 encoded payload containing the ``source``
|
||||||
|
# field once decoded.
|
||||||
|
# - HLS_PATTERN captures the base64 encoded HLS manifest for fallback when
|
||||||
|
# no direct MP4 link is present.
|
||||||
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
|
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
|
||||||
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
|
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
|
||||||
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
|
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
|
||||||
|
|
||||||
|
|
||||||
class VOE(Provider):
|
class VOE(Provider):
|
||||||
|
"""VOE video provider implementation."""
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
self.RANDOM_USER_AGENT = UserAgent().random
|
self.RANDOM_USER_AGENT = UserAgent().random
|
||||||
self.Header = {
|
self.Header = {"User-Agent": self.RANDOM_USER_AGENT}
|
||||||
"User-Agent": self.RANDOM_USER_AGENT
|
|
||||||
}
|
def get_link(
|
||||||
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
|
self, embedded_link: str, timeout: int
|
||||||
|
) -> tuple[str, dict]:
|
||||||
|
"""
|
||||||
|
Extract direct download link from VOE embedded player.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
embedded_link: URL of the embedded VOE player
|
||||||
|
timeout: Request timeout in seconds
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (direct_link, headers)
|
||||||
|
"""
|
||||||
self.session = requests.Session()
|
self.session = requests.Session()
|
||||||
|
|
||||||
# Configure retries with backoff
|
# Configure retries with backoff
|
||||||
retries = Retry(
|
retries = Retry(
|
||||||
total=5, # Number of retries
|
total=5, # Number of retries
|
||||||
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
|
||||||
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
|
status_forcelist=[500, 502, 503, 504],
|
||||||
allowed_methods=["GET"]
|
allowed_methods=["GET"],
|
||||||
)
|
)
|
||||||
|
|
||||||
adapter = HTTPAdapter(max_retries=retries)
|
adapter = HTTPAdapter(max_retries=retries)
|
||||||
self.session.mount("https://", adapter)
|
self.session.mount("https://", adapter)
|
||||||
DEFAULT_REQUEST_TIMEOUT = 30
|
timeout = 30
|
||||||
|
|
||||||
response = self.session.get(
|
response = self.session.get(
|
||||||
embededLink,
|
embedded_link,
|
||||||
headers={'User-Agent': self.RANDOM_USER_AGENT},
|
headers={"User-Agent": self.RANDOM_USER_AGENT},
|
||||||
timeout=DEFAULT_REQUEST_TIMEOUT
|
timeout=timeout,
|
||||||
)
|
)
|
||||||
|
|
||||||
redirect = re.search(r"https?://[^'\"<>]+", response.text)
|
redirect = re.search(r"https?://[^'\"<>]+", response.text)
|
||||||
@@ -49,17 +70,18 @@ class VOE(Provider):
|
|||||||
parts = redirect_url.strip().split("/")
|
parts = redirect_url.strip().split("/")
|
||||||
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
|
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
|
||||||
|
|
||||||
response = self.session.get(redirect_url, headers={'User-Agent': self.RANDOM_USER_AGENT})
|
response = self.session.get(
|
||||||
|
redirect_url, headers={"User-Agent": self.RANDOM_USER_AGENT}
|
||||||
|
)
|
||||||
html = response.content
|
html = response.content
|
||||||
|
|
||||||
|
|
||||||
# Method 1: Extract from script tag
|
# Method 1: Extract from script tag
|
||||||
extracted = self.extract_voe_from_script(html)
|
extracted = self.extract_voe_from_script(html)
|
||||||
if extracted:
|
if extracted:
|
||||||
return extracted, self.Header
|
return extracted, self.Header
|
||||||
|
|
||||||
# Method 2: Extract from base64 encoded variable
|
# Method 2: Extract from base64 encoded variable
|
||||||
htmlText = html.decode('utf-8')
|
htmlText = html.decode("utf-8")
|
||||||
b64_match = B64_PATTERN.search(htmlText)
|
b64_match = B64_PATTERN.search(htmlText)
|
||||||
if b64_match:
|
if b64_match:
|
||||||
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
|
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
|
||||||
@@ -70,10 +92,14 @@ class VOE(Provider):
|
|||||||
# Method 3: Extract HLS source
|
# Method 3: Extract HLS source
|
||||||
hls_match = HLS_PATTERN.search(htmlText)
|
hls_match = HLS_PATTERN.search(htmlText)
|
||||||
if hls_match:
|
if hls_match:
|
||||||
return base64.b64decode(hls_match.group("hls")).decode(), self.Header
|
decoded_hls = base64.b64decode(hls_match.group("hls")).decode()
|
||||||
|
return decoded_hls, self.Header
|
||||||
|
|
||||||
def shift_letters(self, input_str):
|
raise ValueError("Could not extract download link from VOE")
|
||||||
result = ''
|
|
||||||
|
def shift_letters(self, input_str: str) -> str:
|
||||||
|
"""Apply ROT13 shift to letters."""
|
||||||
|
result = ""
|
||||||
for c in input_str:
|
for c in input_str:
|
||||||
code = ord(c)
|
code = ord(c)
|
||||||
if 65 <= code <= 90:
|
if 65 <= code <= 90:
|
||||||
@@ -83,28 +109,28 @@ class VOE(Provider):
|
|||||||
result += chr(code)
|
result += chr(code)
|
||||||
return result
|
return result
|
||||||
|
|
||||||
|
def replace_junk(self, input_str: str) -> str:
|
||||||
def replace_junk(self, input_str):
|
"""Replace junk character sequences."""
|
||||||
junk_parts = ['@$', '^^', '~@', '%?', '*~', '!!', '#&']
|
junk_parts = ["@$", "^^", "~@", "%?", "*~", "!!", "#&"]
|
||||||
for part in junk_parts:
|
for part in junk_parts:
|
||||||
input_str = re.sub(re.escape(part), '_', input_str)
|
input_str = re.sub(re.escape(part), "_", input_str)
|
||||||
return input_str
|
return input_str
|
||||||
|
|
||||||
|
def shift_back(self, s: str, n: int) -> str:
|
||||||
|
"""Shift characters back by n positions."""
|
||||||
|
return "".join(chr(ord(c) - n) for c in s)
|
||||||
|
|
||||||
def shift_back(self, s, n):
|
def decode_voe_string(self, encoded: str) -> dict:
|
||||||
return ''.join(chr(ord(c) - n) for c in s)
|
"""Decode VOE-encoded string to extract video source."""
|
||||||
|
|
||||||
|
|
||||||
def decode_voe_string(self, encoded):
|
|
||||||
step1 = self.shift_letters(encoded)
|
step1 = self.shift_letters(encoded)
|
||||||
step2 = self.replace_junk(step1).replace('_', '')
|
step2 = self.replace_junk(step1).replace("_", "")
|
||||||
step3 = base64.b64decode(step2).decode()
|
step3 = base64.b64decode(step2).decode()
|
||||||
step4 = self.shift_back(step3, 3)
|
step4 = self.shift_back(step3, 3)
|
||||||
step5 = base64.b64decode(step4[::-1]).decode()
|
step5 = base64.b64decode(step4[::-1]).decode()
|
||||||
return json.loads(step5)
|
return json.loads(step5)
|
||||||
|
|
||||||
|
def extract_voe_from_script(self, html: bytes) -> str:
|
||||||
def extract_voe_from_script(self, html):
|
"""Extract download link from VOE script tag."""
|
||||||
soup = BeautifulSoup(html, "html.parser")
|
soup = BeautifulSoup(html, "html.parser")
|
||||||
script = soup.find("script", type="application/json")
|
script = soup.find("script", type="application/json")
|
||||||
return self.decode_voe_string(script.text[2:-2])["source"]
|
return self.decode_voe_string(script.text[2:-2])["source"]
|
||||||
|
|||||||
237
src/core/services/nfo_factory.py
Normal file
237
src/core/services/nfo_factory.py
Normal file
@@ -0,0 +1,237 @@
|
|||||||
|
"""NFO Service Factory Module.
|
||||||
|
|
||||||
|
This module provides a centralized factory for creating NFOService instances
|
||||||
|
with consistent configuration and initialization logic.
|
||||||
|
|
||||||
|
The factory supports both direct instantiation and FastAPI dependency injection,
|
||||||
|
while remaining testable through optional dependency overrides.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.core.services.nfo_service import NFOService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NFOServiceFactory:
|
||||||
|
"""Factory for creating NFOService instances with consistent configuration.
|
||||||
|
|
||||||
|
This factory centralizes NFO service initialization logic that was previously
|
||||||
|
duplicated across multiple modules (SeriesApp, SeriesManagerService, API endpoints).
|
||||||
|
|
||||||
|
The factory follows these precedence rules for configuration:
|
||||||
|
1. Explicit parameters (highest priority)
|
||||||
|
2. Environment variables via settings
|
||||||
|
3. config.json via ConfigService (fallback)
|
||||||
|
4. Raise error if TMDB API key unavailable
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> factory = NFOServiceFactory()
|
||||||
|
>>> nfo_service = factory.create()
|
||||||
|
>>> # Or with custom settings:
|
||||||
|
>>> nfo_service = factory.create(tmdb_api_key="custom_key")
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self):
|
||||||
|
"""Initialize the NFO service factory."""
|
||||||
|
self._config_service = None
|
||||||
|
|
||||||
|
def create(
|
||||||
|
self,
|
||||||
|
tmdb_api_key: Optional[str] = None,
|
||||||
|
anime_directory: Optional[str] = None,
|
||||||
|
image_size: Optional[str] = None,
|
||||||
|
auto_create: Optional[bool] = None
|
||||||
|
) -> NFOService:
|
||||||
|
"""Create an NFOService instance with proper configuration.
|
||||||
|
|
||||||
|
This method implements the configuration precedence:
|
||||||
|
1. Use explicit parameters if provided
|
||||||
|
2. Fall back to settings (from ENV vars)
|
||||||
|
3. Fall back to config.json (only if ENV not set)
|
||||||
|
4. Raise ValueError if TMDB API key still unavailable
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tmdb_api_key: TMDB API key (optional, falls back to settings/config)
|
||||||
|
anime_directory: Anime directory path (optional, defaults to settings)
|
||||||
|
image_size: Image size for downloads (optional, defaults to settings)
|
||||||
|
auto_create: Whether to auto-create NFO files (optional, defaults to settings)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
NFOService: Configured NFO service instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If TMDB API key cannot be determined from any source
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> factory = NFOServiceFactory()
|
||||||
|
>>> # Use all defaults from settings
|
||||||
|
>>> service = factory.create()
|
||||||
|
>>> # Override specific settings
|
||||||
|
>>> service = factory.create(auto_create=False)
|
||||||
|
"""
|
||||||
|
# Step 1: Determine TMDB API key with fallback logic
|
||||||
|
api_key = tmdb_api_key or settings.tmdb_api_key
|
||||||
|
|
||||||
|
# Step 2: If no API key in settings, try config.json as fallback
|
||||||
|
if not api_key:
|
||||||
|
api_key = self._get_api_key_from_config()
|
||||||
|
|
||||||
|
# Step 3: Validate API key is available
|
||||||
|
if not api_key:
|
||||||
|
raise ValueError(
|
||||||
|
"TMDB API key not configured. Set TMDB_API_KEY environment "
|
||||||
|
"variable or configure in config.json (nfo.tmdb_api_key)."
|
||||||
|
)
|
||||||
|
|
||||||
|
# Step 4: Use provided values or fall back to settings
|
||||||
|
directory = anime_directory or settings.anime_directory
|
||||||
|
size = image_size or settings.nfo_image_size
|
||||||
|
auto = auto_create if auto_create is not None else settings.nfo_auto_create
|
||||||
|
|
||||||
|
# Step 5: Create and return the service
|
||||||
|
logger.debug(
|
||||||
|
"Creating NFOService: directory=%s, size=%s, auto_create=%s",
|
||||||
|
directory, size, auto
|
||||||
|
)
|
||||||
|
|
||||||
|
return NFOService(
|
||||||
|
tmdb_api_key=api_key,
|
||||||
|
anime_directory=directory,
|
||||||
|
image_size=size,
|
||||||
|
auto_create=auto
|
||||||
|
)
|
||||||
|
|
||||||
|
def create_optional(
|
||||||
|
self,
|
||||||
|
tmdb_api_key: Optional[str] = None,
|
||||||
|
anime_directory: Optional[str] = None,
|
||||||
|
image_size: Optional[str] = None,
|
||||||
|
auto_create: Optional[bool] = None
|
||||||
|
) -> Optional[NFOService]:
|
||||||
|
"""Create an NFOService instance, returning None if configuration unavailable.
|
||||||
|
|
||||||
|
This is a convenience method for cases where NFO service is optional.
|
||||||
|
Unlike create(), this returns None instead of raising ValueError when
|
||||||
|
the TMDB API key is not configured.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tmdb_api_key: TMDB API key (optional)
|
||||||
|
anime_directory: Anime directory path (optional)
|
||||||
|
image_size: Image size for downloads (optional)
|
||||||
|
auto_create: Whether to auto-create NFO files (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[NFOService]: Configured service or None if key unavailable
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> factory = NFOServiceFactory()
|
||||||
|
>>> service = factory.create_optional()
|
||||||
|
>>> if service:
|
||||||
|
... service.create_tvshow_nfo(...)
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
return self.create(
|
||||||
|
tmdb_api_key=tmdb_api_key,
|
||||||
|
anime_directory=anime_directory,
|
||||||
|
image_size=image_size,
|
||||||
|
auto_create=auto_create
|
||||||
|
)
|
||||||
|
except ValueError as e:
|
||||||
|
logger.debug("NFO service not available: %s", e)
|
||||||
|
return None
|
||||||
|
|
||||||
|
def _get_api_key_from_config(self) -> Optional[str]:
|
||||||
|
"""Get TMDB API key from config.json as fallback.
|
||||||
|
|
||||||
|
This method is only called when the API key is not in settings
|
||||||
|
(i.e., not set via environment variable). It provides backward
|
||||||
|
compatibility with config.json configuration.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Optional[str]: API key from config.json, or None if unavailable
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
# Lazy import to avoid circular dependencies
|
||||||
|
from src.server.services.config_service import get_config_service
|
||||||
|
|
||||||
|
if self._config_service is None:
|
||||||
|
self._config_service = get_config_service()
|
||||||
|
|
||||||
|
config = self._config_service.load_config()
|
||||||
|
|
||||||
|
if config.nfo and config.nfo.tmdb_api_key:
|
||||||
|
logger.debug("Using TMDB API key from config.json")
|
||||||
|
return config.nfo.tmdb_api_key
|
||||||
|
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
logger.debug("Could not load API key from config.json: %s", e)
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
# Global factory instance for convenience
|
||||||
|
_factory_instance: Optional[NFOServiceFactory] = None
|
||||||
|
|
||||||
|
|
||||||
|
def get_nfo_factory() -> NFOServiceFactory:
|
||||||
|
"""Get the global NFO service factory instance.
|
||||||
|
|
||||||
|
This function provides a singleton factory instance for the application.
|
||||||
|
The singleton pattern here is for the factory itself (which is stateless),
|
||||||
|
not for the NFO service instances it creates.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
NFOServiceFactory: The global factory instance
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> factory = get_nfo_factory()
|
||||||
|
>>> service = factory.create()
|
||||||
|
"""
|
||||||
|
global _factory_instance
|
||||||
|
|
||||||
|
if _factory_instance is None:
|
||||||
|
_factory_instance = NFOServiceFactory()
|
||||||
|
|
||||||
|
return _factory_instance
|
||||||
|
|
||||||
|
|
||||||
|
def create_nfo_service(
|
||||||
|
tmdb_api_key: Optional[str] = None,
|
||||||
|
anime_directory: Optional[str] = None,
|
||||||
|
image_size: Optional[str] = None,
|
||||||
|
auto_create: Optional[bool] = None
|
||||||
|
) -> NFOService:
|
||||||
|
"""Convenience function to create an NFOService instance.
|
||||||
|
|
||||||
|
This is a shorthand for get_nfo_factory().create() that can be used
|
||||||
|
when you need a quick NFO service instance without interacting with
|
||||||
|
the factory directly.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tmdb_api_key: TMDB API key (optional)
|
||||||
|
anime_directory: Anime directory path (optional)
|
||||||
|
image_size: Image size for downloads (optional)
|
||||||
|
auto_create: Whether to auto-create NFO files (optional)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
NFOService: Configured NFO service instance
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ValueError: If TMDB API key cannot be determined
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> service = create_nfo_service()
|
||||||
|
>>> # Or with custom settings:
|
||||||
|
>>> service = create_nfo_service(auto_create=False)
|
||||||
|
"""
|
||||||
|
factory = get_nfo_factory()
|
||||||
|
return factory.create(
|
||||||
|
tmdb_api_key=tmdb_api_key,
|
||||||
|
anime_directory=anime_directory,
|
||||||
|
image_size=image_size,
|
||||||
|
auto_create=auto_create
|
||||||
|
)
|
||||||
180
src/core/services/nfo_repair_service.py
Normal file
180
src/core/services/nfo_repair_service.py
Normal file
@@ -0,0 +1,180 @@
|
|||||||
|
"""NFO repair service for detecting and fixing incomplete tvshow.nfo files.
|
||||||
|
|
||||||
|
This module provides utilities to check whether an existing ``tvshow.nfo``
|
||||||
|
contains all required tags and to trigger a repair (re-fetch from TMDB) when
|
||||||
|
needed.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> service = NfoRepairService(nfo_service)
|
||||||
|
>>> repaired = await service.repair_series(Path("/anime/Attack on Titan"), "Attack on Titan")
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Dict, List
|
||||||
|
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
from src.core.services.nfo_service import NFOService
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
# XPath relative to <tvshow> root → human-readable label
|
||||||
|
REQUIRED_TAGS: Dict[str, str] = {
|
||||||
|
"./title": "title",
|
||||||
|
"./originaltitle": "originaltitle",
|
||||||
|
"./year": "year",
|
||||||
|
"./plot": "plot",
|
||||||
|
"./runtime": "runtime",
|
||||||
|
"./premiered": "premiered",
|
||||||
|
"./status": "status",
|
||||||
|
"./imdbid": "imdbid",
|
||||||
|
"./genre": "genre",
|
||||||
|
"./studio": "studio",
|
||||||
|
"./country": "country",
|
||||||
|
"./actor/name": "actor/name",
|
||||||
|
"./watched": "watched",
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def parse_nfo_tags(nfo_path: Path) -> Dict[str, List[str]]:
|
||||||
|
"""Parse an existing tvshow.nfo and return present tag values.
|
||||||
|
|
||||||
|
Evaluates every XPath in :data:`REQUIRED_TAGS` against the document root
|
||||||
|
and collects all non-empty text values.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nfo_path: Absolute path to the ``tvshow.nfo`` file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Mapping of XPath expression → list of non-empty text strings found in
|
||||||
|
the document. Returns an empty dict on any error (missing file,
|
||||||
|
invalid XML, permission error).
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> tags = parse_nfo_tags(Path("/anime/Attack on Titan/tvshow.nfo"))
|
||||||
|
>>> tags.get("./title")
|
||||||
|
['Attack on Titan']
|
||||||
|
"""
|
||||||
|
if not nfo_path.exists():
|
||||||
|
logger.debug("NFO file not found: %s", nfo_path)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
try:
|
||||||
|
tree = etree.parse(str(nfo_path))
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
result: Dict[str, List[str]] = {}
|
||||||
|
for xpath in REQUIRED_TAGS:
|
||||||
|
elements = root.findall(xpath)
|
||||||
|
result[xpath] = [e.text for e in elements if e.text]
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
except etree.XMLSyntaxError as exc:
|
||||||
|
logger.warning("Malformed XML in %s: %s", nfo_path, exc)
|
||||||
|
return {}
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
logger.warning("Unexpected error parsing %s: %s", nfo_path, exc)
|
||||||
|
return {}
|
||||||
|
|
||||||
|
|
||||||
|
def find_missing_tags(nfo_path: Path) -> List[str]:
|
||||||
|
"""Return tags that are absent or empty in the NFO.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nfo_path: Absolute path to the ``tvshow.nfo`` file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
List of human-readable tag labels (values from :data:`REQUIRED_TAGS`)
|
||||||
|
whose XPath matched no elements or only elements with empty text.
|
||||||
|
An empty list means the NFO is complete.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> missing = find_missing_tags(Path("/anime/series/tvshow.nfo"))
|
||||||
|
>>> if missing:
|
||||||
|
... print("Missing:", missing)
|
||||||
|
"""
|
||||||
|
parsed = parse_nfo_tags(nfo_path)
|
||||||
|
missing: List[str] = []
|
||||||
|
for xpath, label in REQUIRED_TAGS.items():
|
||||||
|
if not parsed.get(xpath):
|
||||||
|
missing.append(label)
|
||||||
|
return missing
|
||||||
|
|
||||||
|
|
||||||
|
def nfo_needs_repair(nfo_path: Path) -> bool:
|
||||||
|
"""Return ``True`` if the NFO is missing any required tag.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nfo_path: Absolute path to the ``tvshow.nfo`` file.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if :func:`find_missing_tags` returns a non-empty list.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> if nfo_needs_repair(Path("/anime/series/tvshow.nfo")):
|
||||||
|
... await service.repair_series(series_path, series_name)
|
||||||
|
"""
|
||||||
|
return bool(find_missing_tags(nfo_path))
|
||||||
|
|
||||||
|
|
||||||
|
class NfoRepairService:
|
||||||
|
"""Service that detects and repairs incomplete tvshow.nfo files.
|
||||||
|
|
||||||
|
Wraps the module-level helpers with structured logging and delegates
|
||||||
|
the actual TMDB re-fetch to an injected :class:`NFOService` instance.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
_nfo_service: The underlying NFOService used to update NFOs.
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(self, nfo_service: NFOService) -> None:
|
||||||
|
"""Initialise the repair service.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nfo_service: Configured :class:`NFOService` instance.
|
||||||
|
"""
|
||||||
|
self._nfo_service = nfo_service
|
||||||
|
|
||||||
|
async def repair_series(self, series_path: Path, series_name: str) -> bool:
|
||||||
|
"""Repair an NFO file if required tags are missing.
|
||||||
|
|
||||||
|
Checks ``{series_path}/tvshow.nfo`` for completeness. If tags are
|
||||||
|
missing, logs them and calls
|
||||||
|
``NFOService.update_tvshow_nfo(series_name)`` to re-fetch metadata
|
||||||
|
from TMDB.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
series_path: Absolute path to the series folder.
|
||||||
|
series_name: Series folder name used as the identifier for
|
||||||
|
:meth:`NFOService.update_tvshow_nfo`.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
``True`` if a repair was triggered, ``False`` if the NFO was
|
||||||
|
already complete (or did not exist).
|
||||||
|
"""
|
||||||
|
nfo_path = series_path / "tvshow.nfo"
|
||||||
|
missing = find_missing_tags(nfo_path)
|
||||||
|
|
||||||
|
if not missing:
|
||||||
|
logger.info(
|
||||||
|
"NFO repair skipped — complete: %s",
|
||||||
|
series_name,
|
||||||
|
)
|
||||||
|
return False
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
"NFO repair triggered for %s — missing tags: %s",
|
||||||
|
series_name,
|
||||||
|
", ".join(missing),
|
||||||
|
)
|
||||||
|
|
||||||
|
await self._nfo_service.update_tvshow_nfo(
|
||||||
|
series_name,
|
||||||
|
download_media=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info("NFO repair completed: %s", series_name)
|
||||||
|
return True
|
||||||
555
src/core/services/nfo_service.py
Normal file
555
src/core/services/nfo_service.py
Normal file
@@ -0,0 +1,555 @@
|
|||||||
|
"""NFO service for creating and managing tvshow.nfo files.
|
||||||
|
|
||||||
|
This service orchestrates TMDB API calls, XML generation, and media downloads
|
||||||
|
to create complete NFO metadata for TV series.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> nfo_service = NFOService(tmdb_api_key="key", anime_directory="/anime")
|
||||||
|
>>> await nfo_service.create_tvshow_nfo("Attack on Titan", "/anime/aot", 2013)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
import re
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional, Tuple
|
||||||
|
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
from src.core.services.tmdb_client import TMDBAPIError, TMDBClient
|
||||||
|
from src.core.utils.image_downloader import ImageDownloader
|
||||||
|
from src.core.utils.nfo_generator import generate_tvshow_nfo
|
||||||
|
from src.core.utils.nfo_mapper import tmdb_to_nfo_model
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class NFOService:
|
||||||
|
"""Service for creating and managing tvshow.nfo files.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
tmdb_client: TMDB API client
|
||||||
|
image_downloader: Image downloader utility
|
||||||
|
anime_directory: Base directory for anime series
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
tmdb_api_key: str,
|
||||||
|
anime_directory: str,
|
||||||
|
image_size: str = "original",
|
||||||
|
auto_create: bool = True
|
||||||
|
):
|
||||||
|
"""Initialize NFO service.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tmdb_api_key: TMDB API key
|
||||||
|
anime_directory: Base anime directory path
|
||||||
|
image_size: Image size to download (original, w500, etc.)
|
||||||
|
auto_create: Whether to auto-create NFOs
|
||||||
|
"""
|
||||||
|
self.tmdb_client = TMDBClient(api_key=tmdb_api_key)
|
||||||
|
self.image_downloader = ImageDownloader()
|
||||||
|
self.anime_directory = Path(anime_directory)
|
||||||
|
self.image_size = image_size
|
||||||
|
self.auto_create = auto_create
|
||||||
|
|
||||||
|
def has_nfo(self, serie_folder: str) -> bool:
|
||||||
|
"""Check if tvshow.nfo exists for a series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_folder: Series folder name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if NFO file exists
|
||||||
|
"""
|
||||||
|
nfo_path = self.anime_directory / serie_folder / "tvshow.nfo"
|
||||||
|
return nfo_path.exists()
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _extract_year_from_name(serie_name: str) -> Tuple[str, Optional[int]]:
|
||||||
|
"""Extract year from series name if present in format 'Name (YYYY)'.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_name: Series name, possibly with year in parentheses
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Tuple of (clean_name, year)
|
||||||
|
- clean_name: Series name without year
|
||||||
|
- year: Extracted year or None
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
>>> _extract_year_from_name("Attack on Titan (2013)")
|
||||||
|
("Attack on Titan", 2013)
|
||||||
|
>>> _extract_year_from_name("Attack on Titan")
|
||||||
|
("Attack on Titan", None)
|
||||||
|
"""
|
||||||
|
# Match year in parentheses at the end: (YYYY)
|
||||||
|
match = re.search(r'\((\d{4})\)\s*$', serie_name)
|
||||||
|
if match:
|
||||||
|
year = int(match.group(1))
|
||||||
|
clean_name = serie_name[:match.start()].strip()
|
||||||
|
return clean_name, year
|
||||||
|
return serie_name, None
|
||||||
|
|
||||||
|
async def check_nfo_exists(self, serie_folder: str) -> bool:
|
||||||
|
"""Check if tvshow.nfo exists for a series.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_folder: Series folder name
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if tvshow.nfo exists
|
||||||
|
"""
|
||||||
|
nfo_path = self.anime_directory / serie_folder / "tvshow.nfo"
|
||||||
|
return nfo_path.exists()
|
||||||
|
|
||||||
|
async def create_tvshow_nfo(
|
||||||
|
self,
|
||||||
|
serie_name: str,
|
||||||
|
serie_folder: str,
|
||||||
|
year: Optional[int] = None,
|
||||||
|
download_poster: bool = True,
|
||||||
|
download_logo: bool = True,
|
||||||
|
download_fanart: bool = True
|
||||||
|
) -> Path:
|
||||||
|
"""Create tvshow.nfo by scraping TMDB.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_name: Name of the series to search (may include year in parentheses)
|
||||||
|
serie_folder: Series folder name
|
||||||
|
year: Release year (helps narrow search). If None and name contains year,
|
||||||
|
year will be auto-extracted
|
||||||
|
download_poster: Whether to download poster.jpg
|
||||||
|
download_logo: Whether to download logo.png
|
||||||
|
download_fanart: Whether to download fanart.jpg
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to created NFO file
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TMDBAPIError: If TMDB API fails
|
||||||
|
FileNotFoundError: If series folder doesn't exist
|
||||||
|
"""
|
||||||
|
# Extract year from name if not provided
|
||||||
|
clean_name, extracted_year = self._extract_year_from_name(serie_name)
|
||||||
|
if year is None and extracted_year is not None:
|
||||||
|
year = extracted_year
|
||||||
|
logger.info(f"Extracted year {year} from series name")
|
||||||
|
|
||||||
|
# Use clean name for search
|
||||||
|
search_name = clean_name
|
||||||
|
|
||||||
|
logger.info(f"Creating NFO for {search_name} (year: {year})")
|
||||||
|
|
||||||
|
folder_path = self.anime_directory / serie_folder
|
||||||
|
if not folder_path.exists():
|
||||||
|
logger.info(f"Creating series folder: {folder_path}")
|
||||||
|
folder_path.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
async with self.tmdb_client:
|
||||||
|
# Search for TV show with clean name (without year)
|
||||||
|
logger.debug(f"Searching TMDB for: {search_name}")
|
||||||
|
search_results = await self.tmdb_client.search_tv_show(search_name)
|
||||||
|
|
||||||
|
if not search_results.get("results"):
|
||||||
|
raise TMDBAPIError(f"No results found for: {search_name}")
|
||||||
|
|
||||||
|
# Find best match (consider year if provided)
|
||||||
|
tv_show = self._find_best_match(search_results["results"], search_name, year)
|
||||||
|
tv_id = tv_show["id"]
|
||||||
|
|
||||||
|
logger.info(f"Found match: {tv_show['name']} (ID: {tv_id})")
|
||||||
|
|
||||||
|
# Get detailed information with multi-language image support
|
||||||
|
details = await self.tmdb_client.get_tv_show_details(
|
||||||
|
tv_id,
|
||||||
|
append_to_response="credits,external_ids,images"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get content ratings for FSK
|
||||||
|
content_ratings = await self.tmdb_client.get_tv_show_content_ratings(tv_id)
|
||||||
|
|
||||||
|
# Enrich with fallback languages for empty overview/tagline
|
||||||
|
# Pass search result overview as last resort fallback
|
||||||
|
search_overview = tv_show.get("overview") or None
|
||||||
|
details = await self._enrich_details_with_fallback(
|
||||||
|
details, search_overview=search_overview
|
||||||
|
)
|
||||||
|
|
||||||
|
# Convert TMDB data to TVShowNFO model
|
||||||
|
nfo_model = tmdb_to_nfo_model(
|
||||||
|
details,
|
||||||
|
content_ratings,
|
||||||
|
self.tmdb_client.get_image_url,
|
||||||
|
self.image_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate XML
|
||||||
|
nfo_xml = generate_tvshow_nfo(nfo_model)
|
||||||
|
|
||||||
|
# Save NFO file
|
||||||
|
nfo_path = folder_path / "tvshow.nfo"
|
||||||
|
nfo_path.write_text(nfo_xml, encoding="utf-8")
|
||||||
|
logger.info(f"Created NFO: {nfo_path}")
|
||||||
|
|
||||||
|
# Download media files
|
||||||
|
await self._download_media_files(
|
||||||
|
details,
|
||||||
|
folder_path,
|
||||||
|
download_poster=download_poster,
|
||||||
|
download_logo=download_logo,
|
||||||
|
download_fanart=download_fanart
|
||||||
|
)
|
||||||
|
|
||||||
|
return nfo_path
|
||||||
|
|
||||||
|
async def update_tvshow_nfo(
|
||||||
|
self,
|
||||||
|
serie_folder: str,
|
||||||
|
download_media: bool = True
|
||||||
|
) -> Path:
|
||||||
|
"""Update existing tvshow.nfo with fresh data from TMDB.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_folder: Series folder name
|
||||||
|
download_media: Whether to re-download media files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Path to updated NFO file
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
FileNotFoundError: If NFO file doesn't exist
|
||||||
|
TMDBAPIError: If TMDB API fails or no TMDB ID found in NFO
|
||||||
|
"""
|
||||||
|
folder_path = self.anime_directory / serie_folder
|
||||||
|
nfo_path = folder_path / "tvshow.nfo"
|
||||||
|
|
||||||
|
if not nfo_path.exists():
|
||||||
|
raise FileNotFoundError(f"NFO file not found: {nfo_path}")
|
||||||
|
|
||||||
|
logger.info(f"Updating NFO for {serie_folder}")
|
||||||
|
|
||||||
|
# Parse existing NFO to extract TMDB ID
|
||||||
|
try:
|
||||||
|
tree = etree.parse(str(nfo_path))
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
# Try to find TMDB ID from uniqueid elements
|
||||||
|
tmdb_id = None
|
||||||
|
for uniqueid in root.findall(".//uniqueid"):
|
||||||
|
if uniqueid.get("type") == "tmdb":
|
||||||
|
tmdb_id = int(uniqueid.text)
|
||||||
|
break
|
||||||
|
|
||||||
|
# Fallback: check for tmdbid element
|
||||||
|
if tmdb_id is None:
|
||||||
|
tmdbid_elem = root.find(".//tmdbid")
|
||||||
|
if tmdbid_elem is not None and tmdbid_elem.text:
|
||||||
|
tmdb_id = int(tmdbid_elem.text)
|
||||||
|
|
||||||
|
if tmdb_id is None:
|
||||||
|
raise TMDBAPIError(
|
||||||
|
f"No TMDB ID found in existing NFO. "
|
||||||
|
f"Delete the NFO and create a new one instead."
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(f"Found TMDB ID: {tmdb_id}")
|
||||||
|
|
||||||
|
except etree.XMLSyntaxError as e:
|
||||||
|
raise TMDBAPIError(f"Invalid XML in NFO file: {e}")
|
||||||
|
except ValueError as e:
|
||||||
|
raise TMDBAPIError(f"Invalid TMDB ID format in NFO: {e}")
|
||||||
|
|
||||||
|
# Fetch fresh data from TMDB
|
||||||
|
async with self.tmdb_client:
|
||||||
|
logger.debug(f"Fetching fresh data for TMDB ID: {tmdb_id}")
|
||||||
|
details = await self.tmdb_client.get_tv_show_details(
|
||||||
|
tmdb_id,
|
||||||
|
append_to_response="credits,external_ids,images"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get content ratings for FSK
|
||||||
|
content_ratings = await self.tmdb_client.get_tv_show_content_ratings(tmdb_id)
|
||||||
|
|
||||||
|
# Enrich with fallback languages for empty overview/tagline
|
||||||
|
details = await self._enrich_details_with_fallback(details)
|
||||||
|
# Convert TMDB data to TVShowNFO model
|
||||||
|
nfo_model = tmdb_to_nfo_model(
|
||||||
|
details,
|
||||||
|
content_ratings,
|
||||||
|
self.tmdb_client.get_image_url,
|
||||||
|
self.image_size,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Generate XML
|
||||||
|
nfo_xml = generate_tvshow_nfo(nfo_model)
|
||||||
|
|
||||||
|
# Save updated NFO file
|
||||||
|
nfo_path.write_text(nfo_xml, encoding="utf-8")
|
||||||
|
logger.info(f"Updated NFO: {nfo_path}")
|
||||||
|
|
||||||
|
# Re-download media files if requested
|
||||||
|
if download_media:
|
||||||
|
await self._download_media_files(
|
||||||
|
details,
|
||||||
|
folder_path,
|
||||||
|
download_poster=True,
|
||||||
|
download_logo=True,
|
||||||
|
download_fanart=True
|
||||||
|
)
|
||||||
|
|
||||||
|
return nfo_path
|
||||||
|
|
||||||
|
def parse_nfo_ids(self, nfo_path: Path) -> Dict[str, Optional[int]]:
|
||||||
|
"""Parse TMDB ID and TVDB ID from an existing NFO file.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
nfo_path: Path to tvshow.nfo file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with 'tmdb_id' and 'tvdb_id' keys.
|
||||||
|
Values are integers if found, None otherwise.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> ids = nfo_service.parse_nfo_ids(Path("/anime/series/tvshow.nfo"))
|
||||||
|
>>> print(ids)
|
||||||
|
{'tmdb_id': 1429, 'tvdb_id': 79168}
|
||||||
|
"""
|
||||||
|
result = {"tmdb_id": None, "tvdb_id": None}
|
||||||
|
|
||||||
|
if not nfo_path.exists():
|
||||||
|
logger.debug(f"NFO file not found: {nfo_path}")
|
||||||
|
return result
|
||||||
|
|
||||||
|
try:
|
||||||
|
tree = etree.parse(str(nfo_path))
|
||||||
|
root = tree.getroot()
|
||||||
|
|
||||||
|
# Try to find TMDB ID from uniqueid elements first
|
||||||
|
for uniqueid in root.findall(".//uniqueid"):
|
||||||
|
uid_type = uniqueid.get("type")
|
||||||
|
uid_text = uniqueid.text
|
||||||
|
|
||||||
|
if uid_type == "tmdb" and uid_text:
|
||||||
|
try:
|
||||||
|
result["tmdb_id"] = int(uid_text)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid TMDB ID format in NFO: {uid_text}"
|
||||||
|
)
|
||||||
|
|
||||||
|
elif uid_type == "tvdb" and uid_text:
|
||||||
|
try:
|
||||||
|
result["tvdb_id"] = int(uid_text)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid TVDB ID format in NFO: {uid_text}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Fallback: check for dedicated tmdbid/tvdbid elements
|
||||||
|
if result["tmdb_id"] is None:
|
||||||
|
tmdbid_elem = root.find(".//tmdbid")
|
||||||
|
if tmdbid_elem is not None and tmdbid_elem.text:
|
||||||
|
try:
|
||||||
|
result["tmdb_id"] = int(tmdbid_elem.text)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid TMDB ID format in tmdbid element: "
|
||||||
|
f"{tmdbid_elem.text}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if result["tvdb_id"] is None:
|
||||||
|
tvdbid_elem = root.find(".//tvdbid")
|
||||||
|
if tvdbid_elem is not None and tvdbid_elem.text:
|
||||||
|
try:
|
||||||
|
result["tvdb_id"] = int(tvdbid_elem.text)
|
||||||
|
except ValueError:
|
||||||
|
logger.warning(
|
||||||
|
f"Invalid TVDB ID format in tvdbid element: "
|
||||||
|
f"{tvdbid_elem.text}"
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
f"Parsed IDs from NFO: {nfo_path.name} - "
|
||||||
|
f"TMDB: {result['tmdb_id']}, TVDB: {result['tvdb_id']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
except etree.XMLSyntaxError as e:
|
||||||
|
logger.error(f"Invalid XML in NFO file {nfo_path}: {e}")
|
||||||
|
except Exception as e: # pylint: disable=broad-except
|
||||||
|
logger.error(f"Error parsing NFO file {nfo_path}: {e}")
|
||||||
|
|
||||||
|
return result
|
||||||
|
|
||||||
|
async def _enrich_details_with_fallback(
|
||||||
|
self,
|
||||||
|
details: Dict[str, Any],
|
||||||
|
search_overview: Optional[str] = None,
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Enrich TMDB details with fallback languages for empty fields.
|
||||||
|
|
||||||
|
When requesting details in ``de-DE``, some anime have an empty
|
||||||
|
``overview`` (and potentially other translatable fields). This
|
||||||
|
method detects empty values and fills them from alternative
|
||||||
|
languages (``en-US``, then ``ja-JP``) so that NFO files always
|
||||||
|
contain a ``plot`` regardless of whether the German translation
|
||||||
|
exists. As a last resort, the overview from the search result
|
||||||
|
is used.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
details: TMDB TV show details (language ``de-DE``).
|
||||||
|
search_overview: Overview text from the TMDB search result,
|
||||||
|
used as a final fallback if all language-specific
|
||||||
|
requests fail or return empty overviews.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
The *same* dict, mutated in-place with fallback values
|
||||||
|
where needed.
|
||||||
|
"""
|
||||||
|
overview = details.get("overview") or ""
|
||||||
|
|
||||||
|
if overview:
|
||||||
|
# Overview already populated – nothing to do.
|
||||||
|
return details
|
||||||
|
|
||||||
|
tmdb_id = details.get("id")
|
||||||
|
fallback_languages = ["en-US", "ja-JP"]
|
||||||
|
|
||||||
|
for lang in fallback_languages:
|
||||||
|
if details.get("overview"):
|
||||||
|
break
|
||||||
|
|
||||||
|
logger.debug(
|
||||||
|
"Trying %s fallback for TMDB ID %s",
|
||||||
|
lang, tmdb_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
try:
|
||||||
|
lang_details = await self.tmdb_client.get_tv_show_details(
|
||||||
|
tmdb_id,
|
||||||
|
language=lang,
|
||||||
|
)
|
||||||
|
|
||||||
|
if not details.get("overview") and lang_details.get("overview"):
|
||||||
|
details["overview"] = lang_details["overview"]
|
||||||
|
logger.info(
|
||||||
|
"Used %s overview fallback for TMDB ID %s",
|
||||||
|
lang, tmdb_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Also fill tagline if missing
|
||||||
|
if not details.get("tagline") and lang_details.get("tagline"):
|
||||||
|
details["tagline"] = lang_details["tagline"]
|
||||||
|
except Exception as exc: # pylint: disable=broad-except
|
||||||
|
logger.warning(
|
||||||
|
"Failed to fetch %s fallback for TMDB ID %s: %s",
|
||||||
|
lang, tmdb_id, exc,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Last resort: use search result overview
|
||||||
|
if not details.get("overview") and search_overview:
|
||||||
|
details["overview"] = search_overview
|
||||||
|
logger.info(
|
||||||
|
"Used search result overview fallback for TMDB ID %s",
|
||||||
|
tmdb_id,
|
||||||
|
)
|
||||||
|
|
||||||
|
return details
|
||||||
|
|
||||||
|
def _find_best_match(
|
||||||
|
self,
|
||||||
|
results: List[Dict[str, Any]],
|
||||||
|
query: str,
|
||||||
|
year: Optional[int] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Find best matching TV show from search results.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
results: TMDB search results
|
||||||
|
query: Original search query
|
||||||
|
year: Expected release year
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Best matching TV show data
|
||||||
|
"""
|
||||||
|
if not results:
|
||||||
|
raise TMDBAPIError("No search results to match")
|
||||||
|
|
||||||
|
# If year is provided, try to find exact match
|
||||||
|
if year:
|
||||||
|
for result in results:
|
||||||
|
first_air_date = result.get("first_air_date", "")
|
||||||
|
if first_air_date.startswith(str(year)):
|
||||||
|
logger.debug(f"Found year match: {result['name']} ({first_air_date})")
|
||||||
|
return result
|
||||||
|
|
||||||
|
# Return first result (usually best match)
|
||||||
|
return results[0]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def _download_media_files(
|
||||||
|
self,
|
||||||
|
tmdb_data: Dict[str, Any],
|
||||||
|
folder_path: Path,
|
||||||
|
download_poster: bool = True,
|
||||||
|
download_logo: bool = True,
|
||||||
|
download_fanart: bool = True
|
||||||
|
) -> Dict[str, bool]:
|
||||||
|
"""Download media files (poster, logo, fanart).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tmdb_data: TMDB TV show details
|
||||||
|
folder_path: Series folder path
|
||||||
|
download_poster: Download poster.jpg
|
||||||
|
download_logo: Download logo.png
|
||||||
|
download_fanart: Download fanart.jpg
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with download status for each file
|
||||||
|
"""
|
||||||
|
poster_url = None
|
||||||
|
logo_url = None
|
||||||
|
fanart_url = None
|
||||||
|
|
||||||
|
# Get poster URL
|
||||||
|
if download_poster and tmdb_data.get("poster_path"):
|
||||||
|
poster_url = self.tmdb_client.get_image_url(
|
||||||
|
tmdb_data["poster_path"],
|
||||||
|
self.image_size
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get fanart URL
|
||||||
|
if download_fanart and tmdb_data.get("backdrop_path"):
|
||||||
|
fanart_url = self.tmdb_client.get_image_url(
|
||||||
|
tmdb_data["backdrop_path"],
|
||||||
|
"original" # Always use original for fanart
|
||||||
|
)
|
||||||
|
|
||||||
|
# Get logo URL
|
||||||
|
if download_logo:
|
||||||
|
images_data = tmdb_data.get("images", {})
|
||||||
|
logos = images_data.get("logos", [])
|
||||||
|
if logos:
|
||||||
|
logo_url = self.tmdb_client.get_image_url(
|
||||||
|
logos[0]["file_path"],
|
||||||
|
"original" # Logos should be original size
|
||||||
|
)
|
||||||
|
|
||||||
|
# Download all media concurrently
|
||||||
|
results = await self.image_downloader.download_all_media(
|
||||||
|
folder_path,
|
||||||
|
poster_url=poster_url,
|
||||||
|
logo_url=logo_url,
|
||||||
|
fanart_url=fanart_url,
|
||||||
|
skip_existing=True
|
||||||
|
)
|
||||||
|
|
||||||
|
logger.info(f"Media download results: {results}")
|
||||||
|
return results
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
"""Clean up resources."""
|
||||||
|
await self.tmdb_client.close()
|
||||||
279
src/core/services/series_manager_service.py
Normal file
279
src/core/services/series_manager_service.py
Normal file
@@ -0,0 +1,279 @@
|
|||||||
|
"""Service for managing series with NFO metadata support.
|
||||||
|
|
||||||
|
This service layer component orchestrates SerieList (core entity) with
|
||||||
|
NFOService to provide automatic NFO creation and updates during series scans.
|
||||||
|
|
||||||
|
This follows clean architecture principles by keeping the core entities
|
||||||
|
independent of external services like TMDB API.
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.core.entities.SerieList import SerieList
|
||||||
|
from src.core.services.nfo_service import NFOService
|
||||||
|
from src.core.services.tmdb_client import TMDBAPIError
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class SeriesManagerService:
|
||||||
|
"""Service for managing series with optional NFO metadata support.
|
||||||
|
|
||||||
|
This service wraps SerieList and adds NFO creation/update capabilities
|
||||||
|
based on configuration settings. It maintains clean separation between
|
||||||
|
core entities and external services.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
serie_list: SerieList instance for series management
|
||||||
|
nfo_service: Optional NFOService for metadata management
|
||||||
|
auto_create_nfo: Whether to auto-create NFO files
|
||||||
|
update_on_scan: Whether to update existing NFO files
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
anime_directory: str,
|
||||||
|
tmdb_api_key: Optional[str] = None,
|
||||||
|
auto_create_nfo: bool = False,
|
||||||
|
update_on_scan: bool = False,
|
||||||
|
download_poster: bool = True,
|
||||||
|
download_logo: bool = True,
|
||||||
|
download_fanart: bool = True,
|
||||||
|
image_size: str = "original"
|
||||||
|
):
|
||||||
|
"""Initialize series manager service.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
anime_directory: Base directory for anime series
|
||||||
|
tmdb_api_key: TMDB API key (optional, required for NFO features)
|
||||||
|
auto_create_nfo: Automatically create NFO files when scanning
|
||||||
|
update_on_scan: Update existing NFO files when scanning
|
||||||
|
download_poster: Download poster.jpg
|
||||||
|
download_logo: Download logo.png
|
||||||
|
download_fanart: Download fanart.jpg
|
||||||
|
image_size: Image size to download
|
||||||
|
"""
|
||||||
|
self.anime_directory = anime_directory
|
||||||
|
# Skip automatic folder scanning - we load from database instead
|
||||||
|
self.serie_list = SerieList(anime_directory, skip_load=True)
|
||||||
|
|
||||||
|
# NFO configuration
|
||||||
|
self.auto_create_nfo = auto_create_nfo
|
||||||
|
self.update_on_scan = update_on_scan
|
||||||
|
self.download_poster = download_poster
|
||||||
|
self.download_logo = download_logo
|
||||||
|
self.download_fanart = download_fanart
|
||||||
|
|
||||||
|
# Initialize NFO service if API key provided and NFO features enabled
|
||||||
|
self.nfo_service: Optional[NFOService] = None
|
||||||
|
if tmdb_api_key and (auto_create_nfo or update_on_scan):
|
||||||
|
try:
|
||||||
|
from src.core.services.nfo_factory import get_nfo_factory
|
||||||
|
factory = get_nfo_factory()
|
||||||
|
self.nfo_service = factory.create(
|
||||||
|
tmdb_api_key=tmdb_api_key,
|
||||||
|
anime_directory=anime_directory,
|
||||||
|
image_size=image_size,
|
||||||
|
auto_create=auto_create_nfo
|
||||||
|
)
|
||||||
|
logger.info("NFO service initialized (auto_create=%s, update=%s)",
|
||||||
|
auto_create_nfo, update_on_scan)
|
||||||
|
except (ValueError, Exception) as e: # pylint: disable=broad-except
|
||||||
|
logger.warning(
|
||||||
|
"Failed to initialize NFO service: %s", str(e)
|
||||||
|
)
|
||||||
|
self.nfo_service = None
|
||||||
|
elif auto_create_nfo or update_on_scan:
|
||||||
|
logger.warning(
|
||||||
|
"NFO features requested but TMDB_API_KEY not provided. "
|
||||||
|
"NFO creation/updates will be skipped."
|
||||||
|
)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def from_settings(cls) -> "SeriesManagerService":
|
||||||
|
"""Create SeriesManagerService from application settings.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured SeriesManagerService instance
|
||||||
|
"""
|
||||||
|
return cls(
|
||||||
|
anime_directory=settings.anime_directory,
|
||||||
|
tmdb_api_key=settings.tmdb_api_key,
|
||||||
|
auto_create_nfo=settings.nfo_auto_create,
|
||||||
|
update_on_scan=settings.nfo_update_on_scan,
|
||||||
|
download_poster=settings.nfo_download_poster,
|
||||||
|
download_logo=settings.nfo_download_logo,
|
||||||
|
download_fanart=settings.nfo_download_fanart,
|
||||||
|
image_size=settings.nfo_image_size
|
||||||
|
)
|
||||||
|
|
||||||
|
async def process_nfo_for_series(
|
||||||
|
self,
|
||||||
|
serie_folder: str,
|
||||||
|
serie_name: str,
|
||||||
|
serie_key: str,
|
||||||
|
year: Optional[int] = None
|
||||||
|
):
|
||||||
|
"""Process NFO file for a series (create or update).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
serie_folder: Series folder name
|
||||||
|
serie_name: Series display name
|
||||||
|
serie_key: Series unique identifier for database updates
|
||||||
|
year: Release year (helps with TMDB matching)
|
||||||
|
"""
|
||||||
|
if not self.nfo_service:
|
||||||
|
return
|
||||||
|
|
||||||
|
try:
|
||||||
|
folder_path = Path(self.anime_directory) / serie_folder
|
||||||
|
nfo_path = folder_path / "tvshow.nfo"
|
||||||
|
nfo_exists = await self.nfo_service.check_nfo_exists(serie_folder)
|
||||||
|
|
||||||
|
# If NFO exists, parse IDs and update database
|
||||||
|
if nfo_exists:
|
||||||
|
logger.debug(f"Parsing IDs from existing NFO for '{serie_name}'")
|
||||||
|
ids = self.nfo_service.parse_nfo_ids(nfo_path)
|
||||||
|
|
||||||
|
if ids["tmdb_id"] or ids["tvdb_id"]:
|
||||||
|
# Update database using service layer
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
|
||||||
|
from src.server.database.connection import get_db_session
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
|
||||||
|
async with get_db_session() as db:
|
||||||
|
series = await AnimeSeriesService.get_by_key(db, serie_key)
|
||||||
|
|
||||||
|
if series:
|
||||||
|
now = datetime.now(timezone.utc)
|
||||||
|
|
||||||
|
# Prepare update fields
|
||||||
|
update_fields = {
|
||||||
|
"has_nfo": True,
|
||||||
|
"nfo_updated_at": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
if series.nfo_created_at is None:
|
||||||
|
update_fields["nfo_created_at"] = now
|
||||||
|
|
||||||
|
if ids["tmdb_id"] is not None:
|
||||||
|
update_fields["tmdb_id"] = ids["tmdb_id"]
|
||||||
|
logger.debug(
|
||||||
|
f"Updated TMDB ID for '{serie_name}': "
|
||||||
|
f"{ids['tmdb_id']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
if ids["tvdb_id"] is not None:
|
||||||
|
update_fields["tvdb_id"] = ids["tvdb_id"]
|
||||||
|
logger.debug(
|
||||||
|
f"Updated TVDB ID for '{serie_name}': "
|
||||||
|
f"{ids['tvdb_id']}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use service layer for update
|
||||||
|
await AnimeSeriesService.update(db, series.id, **update_fields)
|
||||||
|
await db.commit()
|
||||||
|
|
||||||
|
logger.info(
|
||||||
|
f"Updated database with IDs from NFO for "
|
||||||
|
f"'{serie_name}' - TMDB: {ids['tmdb_id']}, "
|
||||||
|
f"TVDB: {ids['tvdb_id']}"
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
logger.warning(
|
||||||
|
f"Series not found in database for NFO ID "
|
||||||
|
f"update: {serie_key}"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Create NFO file only if it doesn't exist and auto_create enabled
|
||||||
|
if not nfo_exists and self.auto_create_nfo:
|
||||||
|
logger.info(
|
||||||
|
f"Creating NFO for '{serie_name}' ({serie_folder})"
|
||||||
|
)
|
||||||
|
await self.nfo_service.create_tvshow_nfo(
|
||||||
|
serie_name=serie_name,
|
||||||
|
serie_folder=serie_folder,
|
||||||
|
year=year,
|
||||||
|
download_poster=self.download_poster,
|
||||||
|
download_logo=self.download_logo,
|
||||||
|
download_fanart=self.download_fanart
|
||||||
|
)
|
||||||
|
logger.info(f"Successfully created NFO for '{serie_name}'")
|
||||||
|
elif nfo_exists:
|
||||||
|
logger.debug(
|
||||||
|
f"NFO exists for '{serie_name}', skipping download"
|
||||||
|
)
|
||||||
|
|
||||||
|
except TMDBAPIError as e:
|
||||||
|
logger.error(f"TMDB API error processing '{serie_name}': {e}")
|
||||||
|
except Exception as e:
|
||||||
|
logger.error(
|
||||||
|
f"Unexpected error processing NFO for '{serie_name}': {e}",
|
||||||
|
exc_info=True
|
||||||
|
)
|
||||||
|
|
||||||
|
async def scan_and_process_nfo(self):
|
||||||
|
"""Scan all series and process NFO files based on configuration.
|
||||||
|
|
||||||
|
This method:
|
||||||
|
1. Loads series from database (avoiding filesystem scan)
|
||||||
|
2. For each series with existing NFO, reads TMDB/TVDB IDs
|
||||||
|
and updates database
|
||||||
|
3. For each series without NFO (if auto_create=True), creates one
|
||||||
|
4. For each series with NFO (if update_on_scan=True), updates it
|
||||||
|
5. Runs operations concurrently for better performance
|
||||||
|
"""
|
||||||
|
if not self.nfo_service:
|
||||||
|
logger.info("NFO service not enabled, skipping NFO processing")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Import database dependencies
|
||||||
|
from src.server.database.connection import get_db_session
|
||||||
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
|
||||||
|
# Load series from database (not from filesystem)
|
||||||
|
async with get_db_session() as db:
|
||||||
|
anime_series_list = await AnimeSeriesService.get_all(
|
||||||
|
db, with_episodes=False
|
||||||
|
)
|
||||||
|
|
||||||
|
if not anime_series_list:
|
||||||
|
logger.info("No series found in database to process")
|
||||||
|
return
|
||||||
|
|
||||||
|
logger.info(f"Processing NFO for {len(anime_series_list)} series...")
|
||||||
|
|
||||||
|
# Create tasks for concurrent processing
|
||||||
|
# Each task creates its own database session
|
||||||
|
tasks = []
|
||||||
|
for anime_series in anime_series_list:
|
||||||
|
# Extract year if available
|
||||||
|
year = getattr(anime_series, 'year', None)
|
||||||
|
|
||||||
|
task = self.process_nfo_for_series(
|
||||||
|
serie_folder=anime_series.folder,
|
||||||
|
serie_name=anime_series.name,
|
||||||
|
serie_key=anime_series.key,
|
||||||
|
year=year
|
||||||
|
)
|
||||||
|
tasks.append(task)
|
||||||
|
|
||||||
|
# Process in batches to avoid overwhelming TMDB API
|
||||||
|
batch_size = 5
|
||||||
|
for i in range(0, len(tasks), batch_size):
|
||||||
|
batch = tasks[i:i + batch_size]
|
||||||
|
await asyncio.gather(*batch, return_exceptions=True)
|
||||||
|
|
||||||
|
# Small delay between batches to respect rate limits
|
||||||
|
if i + batch_size < len(tasks):
|
||||||
|
await asyncio.sleep(2)
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
"""Clean up resources."""
|
||||||
|
if self.nfo_service:
|
||||||
|
await self.nfo_service.close()
|
||||||
316
src/core/services/tmdb_client.py
Normal file
316
src/core/services/tmdb_client.py
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
"""TMDB API client for fetching TV show metadata.
|
||||||
|
|
||||||
|
This module provides an async client for The Movie Database (TMDB) API,
|
||||||
|
adapted from the scraper project to fit the AniworldMain architecture.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> async with TMDBClient(api_key="your_key") as client:
|
||||||
|
... results = await client.search_tv_show("Attack on Titan")
|
||||||
|
... show_id = results["results"][0]["id"]
|
||||||
|
... details = await client.get_tv_show_details(show_id)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Any, Dict, List, Optional
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class TMDBAPIError(Exception):
|
||||||
|
"""Exception raised for TMDB API errors."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class TMDBClient:
|
||||||
|
"""Async TMDB API client for TV show metadata.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
api_key: TMDB API key for authentication
|
||||||
|
base_url: Base URL for TMDB API
|
||||||
|
image_base_url: Base URL for TMDB images
|
||||||
|
max_connections: Maximum concurrent connections
|
||||||
|
session: aiohttp ClientSession for requests
|
||||||
|
"""
|
||||||
|
|
||||||
|
DEFAULT_BASE_URL = "https://api.themoviedb.org/3"
|
||||||
|
DEFAULT_IMAGE_BASE_URL = "https://image.tmdb.org/t/p"
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
api_key: str,
|
||||||
|
base_url: str = DEFAULT_BASE_URL,
|
||||||
|
image_base_url: str = DEFAULT_IMAGE_BASE_URL,
|
||||||
|
max_connections: int = 10
|
||||||
|
):
|
||||||
|
"""Initialize TMDB client.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
api_key: TMDB API key
|
||||||
|
base_url: TMDB API base URL
|
||||||
|
image_base_url: TMDB image base URL
|
||||||
|
max_connections: Maximum concurrent connections
|
||||||
|
"""
|
||||||
|
if not api_key:
|
||||||
|
raise ValueError("TMDB API key is required")
|
||||||
|
|
||||||
|
self.api_key = api_key
|
||||||
|
self.base_url = base_url.rstrip('/')
|
||||||
|
self.image_base_url = image_base_url.rstrip('/')
|
||||||
|
self.max_connections = max_connections
|
||||||
|
self.session: Optional[aiohttp.ClientSession] = None
|
||||||
|
self._cache: Dict[str, Any] = {}
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
"""Async context manager entry."""
|
||||||
|
await self._ensure_session()
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Async context manager exit."""
|
||||||
|
await self.close()
|
||||||
|
|
||||||
|
async def _ensure_session(self):
|
||||||
|
"""Ensure aiohttp session is created."""
|
||||||
|
if self.session is None or self.session.closed:
|
||||||
|
connector = aiohttp.TCPConnector(limit=self.max_connections)
|
||||||
|
self.session = aiohttp.ClientSession(connector=connector)
|
||||||
|
|
||||||
|
async def _request(
|
||||||
|
self,
|
||||||
|
endpoint: str,
|
||||||
|
params: Optional[Dict[str, Any]] = None,
|
||||||
|
max_retries: int = 3
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Make an async request to TMDB API with retries.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
endpoint: API endpoint (e.g., 'search/tv')
|
||||||
|
params: Query parameters
|
||||||
|
max_retries: Maximum retry attempts
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
API response as dictionary
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TMDBAPIError: If request fails after retries
|
||||||
|
"""
|
||||||
|
await self._ensure_session()
|
||||||
|
|
||||||
|
url = f"{self.base_url}/{endpoint}"
|
||||||
|
params = params or {}
|
||||||
|
params["api_key"] = self.api_key
|
||||||
|
|
||||||
|
# Cache key for deduplication
|
||||||
|
cache_key = f"{endpoint}:{str(sorted(params.items()))}"
|
||||||
|
if cache_key in self._cache:
|
||||||
|
logger.debug(f"Cache hit for {endpoint}")
|
||||||
|
return self._cache[cache_key]
|
||||||
|
|
||||||
|
delay = 1
|
||||||
|
last_error = None
|
||||||
|
|
||||||
|
for attempt in range(max_retries):
|
||||||
|
try:
|
||||||
|
# Re-ensure session before each attempt in case it was closed
|
||||||
|
await self._ensure_session()
|
||||||
|
|
||||||
|
if self.session is None:
|
||||||
|
raise TMDBAPIError("Session is not available")
|
||||||
|
|
||||||
|
logger.debug(f"TMDB API request: {endpoint} (attempt {attempt + 1})")
|
||||||
|
async with self.session.get(url, params=params, timeout=aiohttp.ClientTimeout(total=60)) as resp:
|
||||||
|
if resp.status == 401:
|
||||||
|
raise TMDBAPIError("Invalid TMDB API key")
|
||||||
|
elif resp.status == 404:
|
||||||
|
raise TMDBAPIError(f"Resource not found: {endpoint}")
|
||||||
|
elif resp.status == 429:
|
||||||
|
# Rate limit - wait longer
|
||||||
|
retry_after = int(resp.headers.get('Retry-After', delay * 2))
|
||||||
|
logger.warning(f"Rate limited, waiting {retry_after}s")
|
||||||
|
await asyncio.sleep(retry_after)
|
||||||
|
continue
|
||||||
|
|
||||||
|
resp.raise_for_status()
|
||||||
|
data = await resp.json()
|
||||||
|
self._cache[cache_key] = data
|
||||||
|
return data
|
||||||
|
|
||||||
|
except asyncio.TimeoutError as e:
|
||||||
|
last_error = e
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
logger.warning(f"Request timeout (attempt {attempt + 1}), retrying in {delay}s")
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
delay *= 2
|
||||||
|
else:
|
||||||
|
logger.error(f"Request timed out after {max_retries} attempts")
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, AttributeError) as e:
|
||||||
|
last_error = e
|
||||||
|
# If connector/session was closed, try to recreate it
|
||||||
|
if "Connector is closed" in str(e) or isinstance(e, AttributeError):
|
||||||
|
logger.warning(f"Session issue detected, recreating session: {e}")
|
||||||
|
self.session = None
|
||||||
|
await self._ensure_session()
|
||||||
|
|
||||||
|
if attempt < max_retries - 1:
|
||||||
|
logger.warning(f"Request failed (attempt {attempt + 1}): {e}, retrying in {delay}s")
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
delay *= 2
|
||||||
|
else:
|
||||||
|
logger.error(f"Request failed after {max_retries} attempts: {e}")
|
||||||
|
|
||||||
|
raise TMDBAPIError(f"Request failed after {max_retries} attempts: {last_error}")
|
||||||
|
|
||||||
|
async def search_tv_show(
|
||||||
|
self,
|
||||||
|
query: str,
|
||||||
|
language: str = "de-DE",
|
||||||
|
page: int = 1
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Search for TV shows by name.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
query: Search query (show name)
|
||||||
|
language: Language for results (default: German)
|
||||||
|
page: Page number for pagination
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Search results with list of shows
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> results = await client.search_tv_show("Attack on Titan")
|
||||||
|
>>> shows = results["results"]
|
||||||
|
"""
|
||||||
|
return await self._request(
|
||||||
|
"search/tv",
|
||||||
|
{"query": query, "language": language, "page": page}
|
||||||
|
)
|
||||||
|
|
||||||
|
async def get_tv_show_details(
|
||||||
|
self,
|
||||||
|
tv_id: int,
|
||||||
|
language: str = "de-DE",
|
||||||
|
append_to_response: Optional[str] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get detailed information about a TV show.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tv_id: TMDB TV show ID
|
||||||
|
language: Language for metadata
|
||||||
|
append_to_response: Additional data to include (e.g., "credits,images")
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TV show details including metadata, cast, etc.
|
||||||
|
"""
|
||||||
|
params = {"language": language}
|
||||||
|
if append_to_response:
|
||||||
|
params["append_to_response"] = append_to_response
|
||||||
|
|
||||||
|
return await self._request(f"tv/{tv_id}", params)
|
||||||
|
|
||||||
|
async def get_tv_show_content_ratings(self, tv_id: int) -> Dict[str, Any]:
|
||||||
|
"""Get content ratings for a TV show.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tv_id: TMDB TV show ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Content ratings by country
|
||||||
|
"""
|
||||||
|
return await self._request(f"tv/{tv_id}/content_ratings")
|
||||||
|
|
||||||
|
async def get_tv_show_external_ids(self, tv_id: int) -> Dict[str, Any]:
|
||||||
|
"""Get external IDs (IMDB, TVDB) for a TV show.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tv_id: TMDB TV show ID
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with external IDs (imdb_id, tvdb_id, etc.)
|
||||||
|
"""
|
||||||
|
return await self._request(f"tv/{tv_id}/external_ids")
|
||||||
|
|
||||||
|
async def get_tv_show_images(
|
||||||
|
self,
|
||||||
|
tv_id: int,
|
||||||
|
language: Optional[str] = None
|
||||||
|
) -> Dict[str, Any]:
|
||||||
|
"""Get images (posters, backdrops, logos) for a TV show.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tv_id: TMDB TV show ID
|
||||||
|
language: Language filter for images (None = all languages)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with poster, backdrop, and logo lists
|
||||||
|
"""
|
||||||
|
params = {}
|
||||||
|
if language:
|
||||||
|
params["language"] = language
|
||||||
|
|
||||||
|
return await self._request(f"tv/{tv_id}/images", params)
|
||||||
|
|
||||||
|
async def download_image(
|
||||||
|
self,
|
||||||
|
image_path: str,
|
||||||
|
local_path: Path,
|
||||||
|
size: str = "original"
|
||||||
|
) -> None:
|
||||||
|
"""Download an image from TMDB.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image_path: Image path from TMDB API (e.g., "/abc123.jpg")
|
||||||
|
local_path: Local file path to save image
|
||||||
|
size: Image size (w500, original, etc.)
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
TMDBAPIError: If download fails
|
||||||
|
"""
|
||||||
|
await self._ensure_session()
|
||||||
|
|
||||||
|
url = f"{self.image_base_url}/{size}{image_path}"
|
||||||
|
|
||||||
|
try:
|
||||||
|
logger.debug(f"Downloading image from {url}")
|
||||||
|
async with self.session.get(url, timeout=aiohttp.ClientTimeout(total=60)) as resp:
|
||||||
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
# Ensure parent directory exists
|
||||||
|
local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
# Write image data
|
||||||
|
with open(local_path, "wb") as f:
|
||||||
|
f.write(await resp.read())
|
||||||
|
|
||||||
|
logger.info(f"Downloaded image to {local_path}")
|
||||||
|
|
||||||
|
except aiohttp.ClientError as e:
|
||||||
|
raise TMDBAPIError(f"Failed to download image: {e}")
|
||||||
|
|
||||||
|
def get_image_url(self, image_path: str, size: str = "original") -> str:
|
||||||
|
"""Get full URL for an image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image_path: Image path from TMDB API
|
||||||
|
size: Image size (w500, original, etc.)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Full image URL
|
||||||
|
"""
|
||||||
|
return f"{self.image_base_url}/{size}{image_path}"
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
"""Close the aiohttp session and clean up resources."""
|
||||||
|
if self.session and not self.session.closed:
|
||||||
|
await self.session.close()
|
||||||
|
self.session = None
|
||||||
|
logger.debug("TMDB client session closed")
|
||||||
|
|
||||||
|
def clear_cache(self):
|
||||||
|
"""Clear the request cache."""
|
||||||
|
self._cache.clear()
|
||||||
|
logger.debug("TMDB client cache cleared")
|
||||||
349
src/core/utils/image_downloader.py
Normal file
349
src/core/utils/image_downloader.py
Normal file
@@ -0,0 +1,349 @@
|
|||||||
|
"""Image downloader utility for NFO media files.
|
||||||
|
|
||||||
|
This module provides functions to download poster, logo, and fanart images
|
||||||
|
from TMDB and validate them.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> downloader = ImageDownloader()
|
||||||
|
>>> await downloader.download_poster(poster_url, "/path/to/poster.jpg")
|
||||||
|
"""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
|
import logging
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
import aiohttp
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
class ImageDownloadError(Exception):
|
||||||
|
"""Exception raised for image download failures."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
class ImageDownloader:
|
||||||
|
"""Utility for downloading and validating images.
|
||||||
|
|
||||||
|
Supports async context manager protocol for proper resource cleanup.
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
max_retries: Maximum retry attempts for downloads
|
||||||
|
timeout: Request timeout in seconds
|
||||||
|
min_file_size: Minimum valid file size in bytes
|
||||||
|
session: Optional aiohttp session (managed internally)
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> async with ImageDownloader() as downloader:
|
||||||
|
... await downloader.download_poster(url, path)
|
||||||
|
"""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
max_retries: int = 3,
|
||||||
|
timeout: int = 30,
|
||||||
|
min_file_size: int = 1024, # 1 KB
|
||||||
|
retry_delay: float = 1.0
|
||||||
|
):
|
||||||
|
"""Initialize image downloader.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
max_retries: Maximum retry attempts
|
||||||
|
timeout: Request timeout in seconds
|
||||||
|
min_file_size: Minimum valid file size in bytes
|
||||||
|
retry_delay: Delay between retries in seconds
|
||||||
|
"""
|
||||||
|
self.max_retries = max_retries
|
||||||
|
self.timeout = timeout
|
||||||
|
self.min_file_size = min_file_size
|
||||||
|
self.retry_delay = retry_delay
|
||||||
|
self.session: Optional[aiohttp.ClientSession] = None
|
||||||
|
|
||||||
|
async def __aenter__(self):
|
||||||
|
"""Enter async context manager and create session."""
|
||||||
|
self._get_session() # Ensure session is created
|
||||||
|
return self
|
||||||
|
|
||||||
|
async def __aexit__(self, exc_type, exc_val, exc_tb):
|
||||||
|
"""Exit async context manager and cleanup resources."""
|
||||||
|
await self.close()
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def close(self):
|
||||||
|
"""Close aiohttp session if open."""
|
||||||
|
if self.session and not self.session.closed:
|
||||||
|
await self.session.close()
|
||||||
|
self.session = None
|
||||||
|
|
||||||
|
def _get_session(self) -> aiohttp.ClientSession:
|
||||||
|
"""Get or create aiohttp session.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Active aiohttp session
|
||||||
|
"""
|
||||||
|
# If no session, create one
|
||||||
|
if self.session is None:
|
||||||
|
timeout = aiohttp.ClientTimeout(total=self.timeout)
|
||||||
|
self.session = aiohttp.ClientSession(timeout=timeout)
|
||||||
|
return self.session
|
||||||
|
|
||||||
|
# If session exists, check if it's closed (handle real sessions only)
|
||||||
|
# Mock sessions from tests won't have a boolean closed attribute
|
||||||
|
try:
|
||||||
|
if hasattr(self.session, 'closed') and self.session.closed is True:
|
||||||
|
timeout = aiohttp.ClientTimeout(total=self.timeout)
|
||||||
|
self.session = aiohttp.ClientSession(timeout=timeout)
|
||||||
|
except (AttributeError, TypeError):
|
||||||
|
# Mock session or unusual object, just use it as-is
|
||||||
|
pass
|
||||||
|
|
||||||
|
return self.session
|
||||||
|
|
||||||
|
async def download_image(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
local_path: Path,
|
||||||
|
skip_existing: bool = True,
|
||||||
|
validate: bool = True
|
||||||
|
) -> bool:
|
||||||
|
"""Download an image from URL to local path.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: Image URL
|
||||||
|
local_path: Local file path to save image
|
||||||
|
skip_existing: Skip download if file already exists
|
||||||
|
validate: Validate image after download
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if download successful, False otherwise
|
||||||
|
|
||||||
|
Raises:
|
||||||
|
ImageDownloadError: If download fails after retries
|
||||||
|
"""
|
||||||
|
# Check if file already exists
|
||||||
|
if skip_existing and local_path.exists():
|
||||||
|
if local_path.stat().st_size >= self.min_file_size:
|
||||||
|
logger.debug(f"Image already exists: {local_path}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
# Ensure parent directory exists
|
||||||
|
local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
delay = self.retry_delay
|
||||||
|
last_error = None
|
||||||
|
|
||||||
|
for attempt in range(self.max_retries):
|
||||||
|
try:
|
||||||
|
logger.debug(
|
||||||
|
f"Downloading image from {url} "
|
||||||
|
f"(attempt {attempt + 1})"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Use persistent session
|
||||||
|
session = self._get_session()
|
||||||
|
async with session.get(url) as resp:
|
||||||
|
if resp.status == 404:
|
||||||
|
logger.warning(f"Image not found: {url}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
resp.raise_for_status()
|
||||||
|
|
||||||
|
# Download image data
|
||||||
|
data = await resp.read()
|
||||||
|
|
||||||
|
# Check file size
|
||||||
|
if len(data) < self.min_file_size:
|
||||||
|
raise ImageDownloadError(
|
||||||
|
f"Downloaded file too small: {len(data)} bytes"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Write to file
|
||||||
|
with open(local_path, "wb") as f:
|
||||||
|
f.write(data)
|
||||||
|
|
||||||
|
# Validate image if requested
|
||||||
|
if validate and not self.validate_image(local_path):
|
||||||
|
local_path.unlink(missing_ok=True)
|
||||||
|
raise ImageDownloadError("Image validation failed")
|
||||||
|
|
||||||
|
logger.info(f"Downloaded image to {local_path}")
|
||||||
|
return True
|
||||||
|
|
||||||
|
except (aiohttp.ClientError, IOError, ImageDownloadError) as e:
|
||||||
|
last_error = e
|
||||||
|
if attempt < self.max_retries - 1:
|
||||||
|
logger.warning(
|
||||||
|
f"Download failed (attempt {attempt + 1}): {e}, "
|
||||||
|
f"retrying in {delay}s"
|
||||||
|
)
|
||||||
|
await asyncio.sleep(delay)
|
||||||
|
delay *= 2
|
||||||
|
else:
|
||||||
|
logger.error(
|
||||||
|
f"Download failed after {self.max_retries} attempts: {e}"
|
||||||
|
)
|
||||||
|
|
||||||
|
raise ImageDownloadError(
|
||||||
|
f"Failed to download image after {self.max_retries} attempts: {last_error}"
|
||||||
|
)
|
||||||
|
|
||||||
|
async def download_poster(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
series_folder: Path,
|
||||||
|
filename: str = "poster.jpg",
|
||||||
|
skip_existing: bool = True
|
||||||
|
) -> bool:
|
||||||
|
"""Download poster image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: Poster URL
|
||||||
|
series_folder: Series folder path
|
||||||
|
filename: Output filename (default: poster.jpg)
|
||||||
|
skip_existing: Skip if file exists
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful
|
||||||
|
"""
|
||||||
|
local_path = series_folder / filename
|
||||||
|
try:
|
||||||
|
return await self.download_image(url, local_path, skip_existing)
|
||||||
|
except ImageDownloadError as e:
|
||||||
|
logger.warning(f"Failed to download poster: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def download_logo(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
series_folder: Path,
|
||||||
|
filename: str = "logo.png",
|
||||||
|
skip_existing: bool = True
|
||||||
|
) -> bool:
|
||||||
|
"""Download logo image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: Logo URL
|
||||||
|
series_folder: Series folder path
|
||||||
|
filename: Output filename (default: logo.png)
|
||||||
|
skip_existing: Skip if file exists
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful
|
||||||
|
"""
|
||||||
|
local_path = series_folder / filename
|
||||||
|
try:
|
||||||
|
return await self.download_image(url, local_path, skip_existing)
|
||||||
|
except ImageDownloadError as e:
|
||||||
|
logger.warning(f"Failed to download logo: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def download_fanart(
|
||||||
|
self,
|
||||||
|
url: str,
|
||||||
|
series_folder: Path,
|
||||||
|
filename: str = "fanart.jpg",
|
||||||
|
skip_existing: bool = True
|
||||||
|
) -> bool:
|
||||||
|
"""Download fanart/backdrop image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
url: Fanart URL
|
||||||
|
series_folder: Series folder path
|
||||||
|
filename: Output filename (default: fanart.jpg)
|
||||||
|
skip_existing: Skip if file exists
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if successful
|
||||||
|
"""
|
||||||
|
local_path = series_folder / filename
|
||||||
|
try:
|
||||||
|
return await self.download_image(url, local_path, skip_existing)
|
||||||
|
except ImageDownloadError as e:
|
||||||
|
logger.warning(f"Failed to download fanart: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
def validate_image(self, image_path: Path) -> bool:
|
||||||
|
"""Validate that file is a valid image.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
image_path: Path to image file
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if valid image, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
with Image.open(image_path) as img:
|
||||||
|
# Verify it's a valid image
|
||||||
|
img.verify()
|
||||||
|
|
||||||
|
# Check file size
|
||||||
|
if image_path.stat().st_size < self.min_file_size:
|
||||||
|
logger.warning(f"Image file too small: {image_path}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
return True
|
||||||
|
|
||||||
|
except Exception as e:
|
||||||
|
logger.warning(f"Image validation failed for {image_path}: {e}")
|
||||||
|
return False
|
||||||
|
|
||||||
|
async def download_all_media(
|
||||||
|
self,
|
||||||
|
series_folder: Path,
|
||||||
|
poster_url: Optional[str] = None,
|
||||||
|
logo_url: Optional[str] = None,
|
||||||
|
fanart_url: Optional[str] = None,
|
||||||
|
skip_existing: bool = True
|
||||||
|
) -> dict[str, bool]:
|
||||||
|
"""Download all media files (poster, logo, fanart).
|
||||||
|
|
||||||
|
Args:
|
||||||
|
series_folder: Series folder path
|
||||||
|
poster_url: Poster URL (optional)
|
||||||
|
logo_url: Logo URL (optional)
|
||||||
|
fanart_url: Fanart URL (optional)
|
||||||
|
skip_existing: Skip existing files
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary with download status for each file type
|
||||||
|
"""
|
||||||
|
results = {
|
||||||
|
"poster": None,
|
||||||
|
"logo": None,
|
||||||
|
"fanart": None
|
||||||
|
}
|
||||||
|
|
||||||
|
tasks = []
|
||||||
|
|
||||||
|
if poster_url:
|
||||||
|
tasks.append(("poster", self.download_poster(
|
||||||
|
poster_url, series_folder, skip_existing=skip_existing
|
||||||
|
)))
|
||||||
|
|
||||||
|
if logo_url:
|
||||||
|
tasks.append(("logo", self.download_logo(
|
||||||
|
logo_url, series_folder, skip_existing=skip_existing
|
||||||
|
)))
|
||||||
|
|
||||||
|
if fanart_url:
|
||||||
|
tasks.append(("fanart", self.download_fanart(
|
||||||
|
fanart_url, series_folder, skip_existing=skip_existing
|
||||||
|
)))
|
||||||
|
|
||||||
|
# Download concurrently
|
||||||
|
if tasks:
|
||||||
|
task_results = await asyncio.gather(
|
||||||
|
*[task for _, task in tasks],
|
||||||
|
return_exceptions=True
|
||||||
|
)
|
||||||
|
|
||||||
|
for (media_type, _), result in zip(tasks, task_results):
|
||||||
|
if isinstance(result, Exception):
|
||||||
|
logger.error(f"Error downloading {media_type}: {result}")
|
||||||
|
results[media_type] = False
|
||||||
|
else:
|
||||||
|
results[media_type] = result
|
||||||
|
|
||||||
|
return results
|
||||||
213
src/core/utils/nfo_generator.py
Normal file
213
src/core/utils/nfo_generator.py
Normal file
@@ -0,0 +1,213 @@
|
|||||||
|
"""NFO XML generator for Kodi/XBMC format.
|
||||||
|
|
||||||
|
This module provides functions to generate tvshow.nfo XML files from
|
||||||
|
TVShowNFO Pydantic models, adapted from the scraper project.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> from src.core.entities.nfo_models import TVShowNFO
|
||||||
|
>>> nfo = TVShowNFO(title="Test Show", year=2020, tmdbid=12345)
|
||||||
|
>>> xml_string = generate_tvshow_nfo(nfo)
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from lxml import etree
|
||||||
|
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.core.entities.nfo_models import TVShowNFO
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def generate_tvshow_nfo(tvshow: TVShowNFO, pretty_print: bool = True) -> str:
|
||||||
|
"""Generate tvshow.nfo XML content from TVShowNFO model.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tvshow: TVShowNFO Pydantic model with metadata
|
||||||
|
pretty_print: Whether to format XML with indentation
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
XML string in Kodi/XBMC tvshow.nfo format
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> nfo = TVShowNFO(title="Attack on Titan", year=2013)
|
||||||
|
>>> xml = generate_tvshow_nfo(nfo)
|
||||||
|
"""
|
||||||
|
root = etree.Element("tvshow")
|
||||||
|
|
||||||
|
# Basic information
|
||||||
|
_add_element(root, "title", tvshow.title)
|
||||||
|
_add_element(root, "originaltitle", tvshow.originaltitle)
|
||||||
|
_add_element(root, "showtitle", tvshow.showtitle)
|
||||||
|
_add_element(root, "sorttitle", tvshow.sorttitle)
|
||||||
|
_add_element(root, "year", str(tvshow.year) if tvshow.year else None)
|
||||||
|
|
||||||
|
# Plot and description – always write <plot> even when empty so that
|
||||||
|
# all NFO files have a consistent set of tags regardless of whether they
|
||||||
|
# were produced by create or update.
|
||||||
|
_add_element(root, "plot", tvshow.plot, always_write=True)
|
||||||
|
_add_element(root, "outline", tvshow.outline)
|
||||||
|
_add_element(root, "tagline", tvshow.tagline)
|
||||||
|
|
||||||
|
# Technical details
|
||||||
|
_add_element(root, "runtime", str(tvshow.runtime) if tvshow.runtime else None)
|
||||||
|
|
||||||
|
# Content rating - prefer FSK if available and configured
|
||||||
|
if getattr(settings, 'nfo_prefer_fsk_rating', True) and tvshow.fsk:
|
||||||
|
_add_element(root, "mpaa", tvshow.fsk)
|
||||||
|
else:
|
||||||
|
_add_element(root, "mpaa", tvshow.mpaa)
|
||||||
|
|
||||||
|
_add_element(root, "certification", tvshow.certification)
|
||||||
|
|
||||||
|
# Status and dates
|
||||||
|
_add_element(root, "premiered", tvshow.premiered)
|
||||||
|
_add_element(root, "status", tvshow.status)
|
||||||
|
_add_element(root, "dateadded", tvshow.dateadded)
|
||||||
|
|
||||||
|
# Ratings
|
||||||
|
if tvshow.ratings:
|
||||||
|
ratings_elem = etree.SubElement(root, "ratings")
|
||||||
|
for rating in tvshow.ratings:
|
||||||
|
rating_elem = etree.SubElement(ratings_elem, "rating")
|
||||||
|
if rating.name:
|
||||||
|
rating_elem.set("name", rating.name)
|
||||||
|
if rating.max_rating:
|
||||||
|
rating_elem.set("max", str(rating.max_rating))
|
||||||
|
if rating.default:
|
||||||
|
rating_elem.set("default", "true")
|
||||||
|
|
||||||
|
_add_element(rating_elem, "value", str(rating.value))
|
||||||
|
if rating.votes is not None:
|
||||||
|
_add_element(rating_elem, "votes", str(rating.votes))
|
||||||
|
|
||||||
|
_add_element(root, "userrating", str(tvshow.userrating) if tvshow.userrating is not None else None)
|
||||||
|
|
||||||
|
# IDs
|
||||||
|
_add_element(root, "tmdbid", str(tvshow.tmdbid) if tvshow.tmdbid else None)
|
||||||
|
_add_element(root, "imdbid", tvshow.imdbid)
|
||||||
|
_add_element(root, "tvdbid", str(tvshow.tvdbid) if tvshow.tvdbid else None)
|
||||||
|
|
||||||
|
# Legacy ID fields for compatibility
|
||||||
|
_add_element(root, "id", str(tvshow.tvdbid) if tvshow.tvdbid else None)
|
||||||
|
_add_element(root, "imdb_id", tvshow.imdbid)
|
||||||
|
|
||||||
|
# Unique IDs
|
||||||
|
for uid in tvshow.uniqueid:
|
||||||
|
uid_elem = etree.SubElement(root, "uniqueid")
|
||||||
|
uid_elem.set("type", uid.type)
|
||||||
|
if uid.default:
|
||||||
|
uid_elem.set("default", "true")
|
||||||
|
uid_elem.text = uid.value
|
||||||
|
|
||||||
|
# Multi-value fields
|
||||||
|
for genre in tvshow.genre:
|
||||||
|
_add_element(root, "genre", genre)
|
||||||
|
|
||||||
|
for studio in tvshow.studio:
|
||||||
|
_add_element(root, "studio", studio)
|
||||||
|
|
||||||
|
for country in tvshow.country:
|
||||||
|
_add_element(root, "country", country)
|
||||||
|
|
||||||
|
for tag in tvshow.tag:
|
||||||
|
_add_element(root, "tag", tag)
|
||||||
|
|
||||||
|
# Thumbnails (posters, logos)
|
||||||
|
for thumb in tvshow.thumb:
|
||||||
|
thumb_elem = etree.SubElement(root, "thumb")
|
||||||
|
if thumb.aspect:
|
||||||
|
thumb_elem.set("aspect", thumb.aspect)
|
||||||
|
if thumb.season is not None:
|
||||||
|
thumb_elem.set("season", str(thumb.season))
|
||||||
|
if thumb.type:
|
||||||
|
thumb_elem.set("type", thumb.type)
|
||||||
|
thumb_elem.text = str(thumb.url)
|
||||||
|
|
||||||
|
# Fanart
|
||||||
|
if tvshow.fanart:
|
||||||
|
fanart_elem = etree.SubElement(root, "fanart")
|
||||||
|
for fanart in tvshow.fanart:
|
||||||
|
fanart_thumb = etree.SubElement(fanart_elem, "thumb")
|
||||||
|
fanart_thumb.text = str(fanart.url)
|
||||||
|
|
||||||
|
# Named seasons
|
||||||
|
for named_season in tvshow.namedseason:
|
||||||
|
season_elem = etree.SubElement(root, "namedseason")
|
||||||
|
season_elem.set("number", str(named_season.number))
|
||||||
|
season_elem.text = named_season.name
|
||||||
|
|
||||||
|
# Actors
|
||||||
|
for actor in tvshow.actors:
|
||||||
|
actor_elem = etree.SubElement(root, "actor")
|
||||||
|
_add_element(actor_elem, "name", actor.name)
|
||||||
|
_add_element(actor_elem, "role", actor.role)
|
||||||
|
_add_element(actor_elem, "thumb", str(actor.thumb) if actor.thumb else None)
|
||||||
|
_add_element(actor_elem, "profile", str(actor.profile) if actor.profile else None)
|
||||||
|
_add_element(actor_elem, "tmdbid", str(actor.tmdbid) if actor.tmdbid else None)
|
||||||
|
|
||||||
|
# Additional fields
|
||||||
|
_add_element(root, "trailer", str(tvshow.trailer) if tvshow.trailer else None)
|
||||||
|
_add_element(root, "watched", "true" if tvshow.watched else "false")
|
||||||
|
if tvshow.playcount is not None:
|
||||||
|
_add_element(root, "playcount", str(tvshow.playcount))
|
||||||
|
|
||||||
|
# Generate XML string
|
||||||
|
xml_str = etree.tostring(
|
||||||
|
root,
|
||||||
|
pretty_print=pretty_print,
|
||||||
|
encoding="unicode",
|
||||||
|
xml_declaration=False
|
||||||
|
)
|
||||||
|
|
||||||
|
# Add XML declaration
|
||||||
|
xml_declaration = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
|
||||||
|
return xml_declaration + xml_str
|
||||||
|
|
||||||
|
|
||||||
|
def _add_element(
|
||||||
|
parent: etree.Element,
|
||||||
|
tag: str,
|
||||||
|
text: Optional[str],
|
||||||
|
always_write: bool = False,
|
||||||
|
) -> Optional[etree.Element]:
|
||||||
|
"""Add a child element to parent if text is not None or empty.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
parent: Parent XML element
|
||||||
|
tag: Tag name for child element
|
||||||
|
text: Text content (None or empty strings are skipped
|
||||||
|
unless *always_write* is True)
|
||||||
|
always_write: When True the element is created even when
|
||||||
|
*text* is None/empty (the element will have
|
||||||
|
no text content). Useful for tags like
|
||||||
|
``<plot>`` that should always be present.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Created element or None if skipped
|
||||||
|
"""
|
||||||
|
if text is not None and text != "":
|
||||||
|
elem = etree.SubElement(parent, tag)
|
||||||
|
elem.text = text
|
||||||
|
return elem
|
||||||
|
if always_write:
|
||||||
|
return etree.SubElement(parent, tag)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def validate_nfo_xml(xml_string: str) -> bool:
|
||||||
|
"""Validate NFO XML structure.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
xml_string: XML content to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
True if valid XML, False otherwise
|
||||||
|
"""
|
||||||
|
try:
|
||||||
|
etree.fromstring(xml_string.encode('utf-8'))
|
||||||
|
return True
|
||||||
|
except etree.XMLSyntaxError as e:
|
||||||
|
logger.error(f"Invalid NFO XML: {e}")
|
||||||
|
return False
|
||||||
234
src/core/utils/nfo_mapper.py
Normal file
234
src/core/utils/nfo_mapper.py
Normal file
@@ -0,0 +1,234 @@
|
|||||||
|
"""TMDB to NFO model mapper.
|
||||||
|
|
||||||
|
This module converts TMDB API data to TVShowNFO Pydantic models,
|
||||||
|
keeping the mapping logic separate from the service orchestration.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> model = tmdb_to_nfo_model(tmdb_data, content_ratings, get_image_url, "original")
|
||||||
|
"""
|
||||||
|
|
||||||
|
import logging
|
||||||
|
from datetime import datetime
|
||||||
|
from typing import Any, Callable, Dict, List, Optional
|
||||||
|
|
||||||
|
from src.core.entities.nfo_models import (
|
||||||
|
ActorInfo,
|
||||||
|
ImageInfo,
|
||||||
|
NamedSeason,
|
||||||
|
RatingInfo,
|
||||||
|
TVShowNFO,
|
||||||
|
UniqueID,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = logging.getLogger(__name__)
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_rating_by_country(
|
||||||
|
content_ratings: Dict[str, Any],
|
||||||
|
country_code: str,
|
||||||
|
) -> Optional[str]:
|
||||||
|
"""Extract content rating for a specific country from TMDB content ratings.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content_ratings: TMDB content ratings response dict with "results" list.
|
||||||
|
country_code: ISO 3166-1 alpha-2 country code (e.g., "DE", "US").
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Raw rating string for the requested country, or None if not found.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
>>> _extract_rating_by_country({"results": [{"iso_3166_1": "US", "rating": "TV-14"}]}, "US")
|
||||||
|
'TV-14'
|
||||||
|
"""
|
||||||
|
if not content_ratings or "results" not in content_ratings:
|
||||||
|
return None
|
||||||
|
|
||||||
|
for rating in content_ratings["results"]:
|
||||||
|
if rating.get("iso_3166_1") == country_code:
|
||||||
|
return rating.get("rating") or None
|
||||||
|
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def _extract_fsk_rating(content_ratings: Dict[str, Any]) -> Optional[str]:
|
||||||
|
"""Extract German FSK rating from TMDB content ratings.
|
||||||
|
|
||||||
|
Delegates to :func:`_extract_rating_by_country` and then normalises the
|
||||||
|
raw TMDB string into the 'FSK XX' format expected by Kodi/Jellyfin.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
content_ratings: TMDB content ratings response.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Formatted FSK string (e.g., 'FSK 12') or None.
|
||||||
|
"""
|
||||||
|
raw = _extract_rating_by_country(content_ratings, "DE")
|
||||||
|
if raw is None:
|
||||||
|
return None
|
||||||
|
|
||||||
|
fsk_mapping: Dict[str, str] = {
|
||||||
|
"0": "FSK 0",
|
||||||
|
"6": "FSK 6",
|
||||||
|
"12": "FSK 12",
|
||||||
|
"16": "FSK 16",
|
||||||
|
"18": "FSK 18",
|
||||||
|
}
|
||||||
|
|
||||||
|
if raw in fsk_mapping:
|
||||||
|
return fsk_mapping[raw]
|
||||||
|
|
||||||
|
# Try to extract numeric part (ordered high→low to avoid partial matches)
|
||||||
|
for key in ["18", "16", "12", "6", "0"]:
|
||||||
|
if key in raw:
|
||||||
|
return fsk_mapping[key]
|
||||||
|
|
||||||
|
if raw.startswith("FSK"):
|
||||||
|
return raw
|
||||||
|
|
||||||
|
logger.debug("Unmapped German rating: %s", raw)
|
||||||
|
return None
|
||||||
|
|
||||||
|
|
||||||
|
def tmdb_to_nfo_model(
|
||||||
|
tmdb_data: Dict[str, Any],
|
||||||
|
content_ratings: Optional[Dict[str, Any]],
|
||||||
|
get_image_url: Callable[[str, str], str],
|
||||||
|
image_size: str = "original",
|
||||||
|
) -> TVShowNFO:
|
||||||
|
"""Convert TMDB API data to a fully-populated TVShowNFO model.
|
||||||
|
|
||||||
|
All required NFO tags are explicitly set in this function so that newly
|
||||||
|
created files are complete without a subsequent repair pass.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
tmdb_data: TMDB TV show details (with credits, external_ids, images
|
||||||
|
appended via ``append_to_response``).
|
||||||
|
content_ratings: TMDB content ratings response, or None.
|
||||||
|
get_image_url: Callable ``(path, size) -> url`` for TMDB images.
|
||||||
|
image_size: TMDB image size parameter (e.g., ``"original"``, ``"w500"``).
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
TVShowNFO Pydantic model with all available fields populated.
|
||||||
|
"""
|
||||||
|
title: str = tmdb_data["name"]
|
||||||
|
original_title: str = tmdb_data.get("original_name") or title
|
||||||
|
|
||||||
|
# --- Year and dates ---
|
||||||
|
first_air_date: Optional[str] = tmdb_data.get("first_air_date") or None
|
||||||
|
year: Optional[int] = int(first_air_date[:4]) if first_air_date else None
|
||||||
|
|
||||||
|
# --- Ratings ---
|
||||||
|
ratings: List[RatingInfo] = []
|
||||||
|
if tmdb_data.get("vote_average"):
|
||||||
|
ratings.append(RatingInfo(
|
||||||
|
name="themoviedb",
|
||||||
|
value=float(tmdb_data["vote_average"]),
|
||||||
|
votes=tmdb_data.get("vote_count", 0),
|
||||||
|
max_rating=10,
|
||||||
|
default=True,
|
||||||
|
))
|
||||||
|
|
||||||
|
# --- External IDs ---
|
||||||
|
external_ids: Dict[str, Any] = tmdb_data.get("external_ids", {})
|
||||||
|
imdb_id: Optional[str] = external_ids.get("imdb_id")
|
||||||
|
tvdb_id: Optional[int] = external_ids.get("tvdb_id")
|
||||||
|
|
||||||
|
# --- Images ---
|
||||||
|
thumb_images: List[ImageInfo] = []
|
||||||
|
fanart_images: List[ImageInfo] = []
|
||||||
|
|
||||||
|
if tmdb_data.get("poster_path"):
|
||||||
|
thumb_images.append(ImageInfo(
|
||||||
|
url=get_image_url(tmdb_data["poster_path"], image_size),
|
||||||
|
aspect="poster",
|
||||||
|
))
|
||||||
|
|
||||||
|
if tmdb_data.get("backdrop_path"):
|
||||||
|
fanart_images.append(ImageInfo(
|
||||||
|
url=get_image_url(tmdb_data["backdrop_path"], image_size),
|
||||||
|
))
|
||||||
|
|
||||||
|
logos: List[Dict[str, Any]] = tmdb_data.get("images", {}).get("logos", [])
|
||||||
|
if logos:
|
||||||
|
thumb_images.append(ImageInfo(
|
||||||
|
url=get_image_url(logos[0]["file_path"], image_size),
|
||||||
|
aspect="clearlogo",
|
||||||
|
))
|
||||||
|
|
||||||
|
# --- Cast (top 10) ---
|
||||||
|
actors: List[ActorInfo] = []
|
||||||
|
for member in tmdb_data.get("credits", {}).get("cast", [])[:10]:
|
||||||
|
actor_thumb: Optional[str] = None
|
||||||
|
if member.get("profile_path"):
|
||||||
|
actor_thumb = get_image_url(member["profile_path"], "h632")
|
||||||
|
actors.append(ActorInfo(
|
||||||
|
name=member["name"],
|
||||||
|
role=member.get("character"),
|
||||||
|
thumb=actor_thumb,
|
||||||
|
tmdbid=member["id"],
|
||||||
|
))
|
||||||
|
|
||||||
|
# --- Named seasons ---
|
||||||
|
named_seasons: List[NamedSeason] = []
|
||||||
|
for season_info in tmdb_data.get("seasons", []):
|
||||||
|
season_name = season_info.get("name")
|
||||||
|
season_number = season_info.get("season_number")
|
||||||
|
if season_name and season_number is not None:
|
||||||
|
named_seasons.append(NamedSeason(
|
||||||
|
number=season_number,
|
||||||
|
name=season_name,
|
||||||
|
))
|
||||||
|
|
||||||
|
# --- Unique IDs ---
|
||||||
|
unique_ids: List[UniqueID] = []
|
||||||
|
if tmdb_data.get("id"):
|
||||||
|
unique_ids.append(UniqueID(type="tmdb", value=str(tmdb_data["id"]), default=False))
|
||||||
|
if imdb_id:
|
||||||
|
unique_ids.append(UniqueID(type="imdb", value=imdb_id, default=False))
|
||||||
|
if tvdb_id:
|
||||||
|
unique_ids.append(UniqueID(type="tvdb", value=str(tvdb_id), default=True))
|
||||||
|
|
||||||
|
# --- Content ratings ---
|
||||||
|
fsk_rating: Optional[str] = _extract_fsk_rating(content_ratings) if content_ratings else None
|
||||||
|
mpaa_rating: Optional[str] = (
|
||||||
|
_extract_rating_by_country(content_ratings, "US") if content_ratings else None
|
||||||
|
)
|
||||||
|
|
||||||
|
# --- Country: prefer origin_country codes; fall back to production_countries names ---
|
||||||
|
country_list: List[str] = list(tmdb_data.get("origin_country", []))
|
||||||
|
if not country_list:
|
||||||
|
country_list = [c["name"] for c in tmdb_data.get("production_countries", [])]
|
||||||
|
|
||||||
|
# --- Runtime ---
|
||||||
|
runtime_list: List[int] = tmdb_data.get("episode_run_time", [])
|
||||||
|
runtime: Optional[int] = runtime_list[0] if runtime_list else None
|
||||||
|
|
||||||
|
return TVShowNFO(
|
||||||
|
title=title,
|
||||||
|
originaltitle=original_title,
|
||||||
|
showtitle=title,
|
||||||
|
sorttitle=title,
|
||||||
|
year=year,
|
||||||
|
plot=tmdb_data.get("overview") or None,
|
||||||
|
outline=tmdb_data.get("overview") or None,
|
||||||
|
tagline=tmdb_data.get("tagline") or None,
|
||||||
|
runtime=runtime,
|
||||||
|
premiered=first_air_date,
|
||||||
|
status=tmdb_data.get("status"),
|
||||||
|
genre=[g["name"] for g in tmdb_data.get("genres", [])],
|
||||||
|
studio=[n["name"] for n in tmdb_data.get("networks", [])],
|
||||||
|
country=country_list,
|
||||||
|
ratings=ratings,
|
||||||
|
fsk=fsk_rating,
|
||||||
|
mpaa=mpaa_rating,
|
||||||
|
tmdbid=tmdb_data.get("id"),
|
||||||
|
imdbid=imdb_id,
|
||||||
|
tvdbid=tvdb_id,
|
||||||
|
uniqueid=unique_ids,
|
||||||
|
thumb=thumb_images,
|
||||||
|
fanart=fanart_images,
|
||||||
|
actors=actors,
|
||||||
|
namedseason=named_seasons,
|
||||||
|
watched=False,
|
||||||
|
dateadded=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||||
|
)
|
||||||
7
src/infrastructure/logging/__init__.py
Normal file
7
src/infrastructure/logging/__init__.py
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
"""
|
||||||
|
Logging infrastructure for the Aniworld application.
|
||||||
|
"""
|
||||||
|
from src.infrastructure.logging.logger import get_logger, setup_logging
|
||||||
|
from src.infrastructure.logging.uvicorn_config import get_uvicorn_log_config
|
||||||
|
|
||||||
|
__all__ = ["setup_logging", "get_logger", "get_uvicorn_log_config"]
|
||||||
100
src/infrastructure/logging/logger.py
Normal file
100
src/infrastructure/logging/logger.py
Normal file
@@ -0,0 +1,100 @@
|
|||||||
|
"""
|
||||||
|
Logging configuration for the Aniworld application.
|
||||||
|
|
||||||
|
This module provides a centralized logging setup with both console and file
|
||||||
|
logging, following Python logging best practices.
|
||||||
|
"""
|
||||||
|
import logging
|
||||||
|
import sys
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import Optional
|
||||||
|
|
||||||
|
from src.config.settings import settings
|
||||||
|
|
||||||
|
|
||||||
|
def setup_logging(
|
||||||
|
log_file: Optional[str] = None,
|
||||||
|
log_level: Optional[str] = None,
|
||||||
|
log_dir: Optional[Path] = None
|
||||||
|
) -> logging.Logger:
|
||||||
|
"""
|
||||||
|
Configure application logging with console and file handlers.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
log_file: Name of the log file (default: "fastapi_app.log")
|
||||||
|
log_level: Logging level (default: from settings or "INFO")
|
||||||
|
log_dir: Directory for log files (default: "logs" in project root)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Configured logger instance
|
||||||
|
"""
|
||||||
|
# Determine log level
|
||||||
|
level_name = log_level or settings.log_level or "INFO"
|
||||||
|
level = getattr(logging, level_name.upper(), logging.INFO)
|
||||||
|
|
||||||
|
# Determine log directory and file
|
||||||
|
if log_dir is None:
|
||||||
|
# Default to logs directory in project root
|
||||||
|
log_dir = Path(__file__).parent.parent.parent.parent / "logs"
|
||||||
|
|
||||||
|
log_dir.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
if log_file is None:
|
||||||
|
log_file = "fastapi_app.log"
|
||||||
|
|
||||||
|
log_path = log_dir / log_file
|
||||||
|
|
||||||
|
# Create formatters
|
||||||
|
detailed_formatter = logging.Formatter(
|
||||||
|
fmt="%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
datefmt="%Y-%m-%d %H:%M:%S"
|
||||||
|
)
|
||||||
|
|
||||||
|
console_formatter = logging.Formatter(
|
||||||
|
fmt="%(levelname)s: %(message)s"
|
||||||
|
)
|
||||||
|
|
||||||
|
# Configure root logger
|
||||||
|
root_logger = logging.getLogger()
|
||||||
|
root_logger.setLevel(level)
|
||||||
|
|
||||||
|
# Remove existing handlers to avoid duplicates
|
||||||
|
root_logger.handlers.clear()
|
||||||
|
|
||||||
|
# Console handler (stdout)
|
||||||
|
console_handler = logging.StreamHandler(sys.stdout)
|
||||||
|
console_handler.setLevel(level)
|
||||||
|
console_handler.setFormatter(console_formatter)
|
||||||
|
root_logger.addHandler(console_handler)
|
||||||
|
|
||||||
|
# File handler
|
||||||
|
file_handler = logging.FileHandler(log_path, mode='a', encoding='utf-8')
|
||||||
|
file_handler.setLevel(level)
|
||||||
|
file_handler.setFormatter(detailed_formatter)
|
||||||
|
root_logger.addHandler(file_handler)
|
||||||
|
|
||||||
|
# Create application logger
|
||||||
|
logger = logging.getLogger("aniworld")
|
||||||
|
logger.setLevel(level)
|
||||||
|
|
||||||
|
# Log startup information
|
||||||
|
logger.info("=" * 60)
|
||||||
|
logger.info("Logging configured successfully")
|
||||||
|
logger.info("Log level: %s", level_name.upper())
|
||||||
|
logger.info("Log file: %s", log_path)
|
||||||
|
logger.info("=" * 60)
|
||||||
|
|
||||||
|
return logger
|
||||||
|
|
||||||
|
|
||||||
|
def get_logger(name: str) -> logging.Logger:
|
||||||
|
"""
|
||||||
|
Get a logger instance for a specific module.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
name: Name of the logger (typically __name__)
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Logger instance
|
||||||
|
"""
|
||||||
|
return logging.getLogger(name)
|
||||||
92
src/infrastructure/logging/uvicorn_config.py
Normal file
92
src/infrastructure/logging/uvicorn_config.py
Normal file
@@ -0,0 +1,92 @@
|
|||||||
|
"""
|
||||||
|
Uvicorn logging configuration for the Aniworld application.
|
||||||
|
|
||||||
|
This configuration ensures that uvicorn logs are properly formatted and
|
||||||
|
written to both console and file.
|
||||||
|
"""
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
# Get the logs directory
|
||||||
|
LOGS_DIR = Path(__file__).parent.parent.parent.parent / "logs"
|
||||||
|
LOGS_DIR.mkdir(parents=True, exist_ok=True)
|
||||||
|
LOG_FILE = LOGS_DIR / "fastapi_app.log"
|
||||||
|
|
||||||
|
LOGGING_CONFIG = {
|
||||||
|
"version": 1,
|
||||||
|
"disable_existing_loggers": False,
|
||||||
|
"formatters": {
|
||||||
|
"default": {
|
||||||
|
"()": "uvicorn.logging.DefaultFormatter",
|
||||||
|
"fmt": "%(levelprefix)s %(message)s",
|
||||||
|
"use_colors": None,
|
||||||
|
},
|
||||||
|
"access": {
|
||||||
|
"()": "uvicorn.logging.AccessFormatter",
|
||||||
|
"fmt": (
|
||||||
|
'%(levelprefix)s %(client_addr)s - '
|
||||||
|
'"%(request_line)s" %(status_code)s'
|
||||||
|
),
|
||||||
|
},
|
||||||
|
"detailed": {
|
||||||
|
"format": "%(asctime)s - %(name)s - %(levelname)s - %(message)s",
|
||||||
|
"datefmt": "%Y-%m-%d %H:%M:%S",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"handlers": {
|
||||||
|
"console": {
|
||||||
|
"class": "logging.StreamHandler",
|
||||||
|
"level": "INFO",
|
||||||
|
"formatter": "default",
|
||||||
|
"stream": "ext://sys.stdout",
|
||||||
|
},
|
||||||
|
"file": {
|
||||||
|
"class": "logging.FileHandler",
|
||||||
|
"level": "INFO",
|
||||||
|
"formatter": "detailed",
|
||||||
|
"filename": str(LOG_FILE),
|
||||||
|
"mode": "a",
|
||||||
|
"encoding": "utf-8",
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"loggers": {
|
||||||
|
"uvicorn": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": "INFO",
|
||||||
|
"propagate": False,
|
||||||
|
},
|
||||||
|
"uvicorn.error": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": "INFO",
|
||||||
|
"propagate": False,
|
||||||
|
},
|
||||||
|
"uvicorn.access": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": "INFO",
|
||||||
|
"propagate": False,
|
||||||
|
},
|
||||||
|
"watchfiles.main": {
|
||||||
|
"handlers": ["console"],
|
||||||
|
"level": "WARNING",
|
||||||
|
"propagate": False,
|
||||||
|
},
|
||||||
|
"aniworld": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": "INFO",
|
||||||
|
"propagate": False,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
"root": {
|
||||||
|
"handlers": ["console", "file"],
|
||||||
|
"level": "INFO",
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def get_uvicorn_log_config() -> dict:
|
||||||
|
"""
|
||||||
|
Get the uvicorn logging configuration dictionary.
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
Dictionary containing logging configuration
|
||||||
|
"""
|
||||||
|
return LOGGING_CONFIG
|
||||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user