Compare commits
7 Commits
d8248be67d
...
0ec120e08f
| Author | SHA1 | Date | |
|---|---|---|---|
| 0ec120e08f | |||
| db58ea9396 | |||
| 69b409f42d | |||
| b34ee59bca | |||
| 624c0db16e | |||
| e6d9f9f342 | |||
| fc8cdc538d |
@@ -18,7 +18,7 @@ COPY entrypoint.sh /entrypoint.sh
|
||||
RUN chmod +x /entrypoint.sh
|
||||
|
||||
# Health check: can we reach the internet through the VPN?
|
||||
HEALTHCHECK --interval=30s --timeout=10s --retries=3 \
|
||||
CMD ping -c 1 -W 5 1.1.1.1 || exit 1
|
||||
HEALTHCHECK --interval=30s --timeout=10s --retries=5 \
|
||||
CMD curl -sf --max-time 5 http://1.1.1.1 || exit 1
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
|
||||
@@ -101,7 +101,9 @@ setup_killswitch() {
|
||||
# ──────────────────────────────────────────────
|
||||
enable_forwarding() {
|
||||
echo "[init] Enabling IP forwarding..."
|
||||
if echo 1 > /proc/sys/net/ipv4/ip_forward 2>/dev/null; then
|
||||
if cat /proc/sys/net/ipv4/ip_forward 2>/dev/null | grep -q 1; then
|
||||
echo "[init] IP forwarding already enabled."
|
||||
elif echo 1 > /proc/sys/net/ipv4/ip_forward 2>/dev/null; then
|
||||
echo "[init] IP forwarding enabled via /proc."
|
||||
else
|
||||
echo "[init] /proc read-only — relying on --sysctl net.ipv4.ip_forward=1"
|
||||
@@ -139,6 +141,20 @@ start_vpn() {
|
||||
ip route add 0.0.0.0/1 dev "$INTERFACE"
|
||||
ip route add 128.0.0.0/1 dev "$INTERFACE"
|
||||
|
||||
# ── Policy routing: ensure responses to incoming LAN traffic go back via eth0 ──
|
||||
if [ -n "$DEFAULT_GW" ] && [ -n "$DEFAULT_IF" ]; then
|
||||
# Get the container's eth0 IP address (BusyBox-compatible, no grep -P)
|
||||
ETH0_IP=$(ip -4 addr show "$DEFAULT_IF" | awk '/inet / {split($2, a, "/"); print a[1]}' | head -1)
|
||||
ETH0_SUBNET=$(ip -4 route show dev "$DEFAULT_IF" | grep -v default | head -1 | awk '{print $1}')
|
||||
if [ -n "$ETH0_IP" ] && [ -n "$ETH0_SUBNET" ]; then
|
||||
echo "[vpn] Setting up policy routing for incoming traffic (${ETH0_IP} on ${DEFAULT_IF})"
|
||||
ip route add default via "$DEFAULT_GW" dev "$DEFAULT_IF" table 100 2>/dev/null || true
|
||||
ip route add "$ETH0_SUBNET" dev "$DEFAULT_IF" table 100 2>/dev/null || true
|
||||
ip rule add from "$ETH0_IP" table 100 priority 100 2>/dev/null || true
|
||||
echo "[vpn] Policy routing active — incoming connections will be routed back via ${DEFAULT_IF}"
|
||||
fi
|
||||
fi
|
||||
|
||||
# Set up DNS
|
||||
VPN_DNS=$(grep -i '^DNS' "$CONFIG_FILE" | head -1 | sed 's/.*= *//;s/ //g')
|
||||
if [ -n "$VPN_DNS" ]; then
|
||||
@@ -169,7 +185,7 @@ health_loop() {
|
||||
while true; do
|
||||
sleep "$CHECK_INTERVAL"
|
||||
|
||||
if ping -c 1 -W 5 "$CHECK_HOST" > /dev/null 2>&1; then
|
||||
if curl -sf --max-time 5 "http://$CHECK_HOST" > /dev/null 2>&1; then
|
||||
if [ "$failures" -gt 0 ]; then
|
||||
echo "[health] VPN recovered."
|
||||
failures=0
|
||||
|
||||
54
Docker/podman-compose.prod.yml
Normal file
54
Docker/podman-compose.prod.yml
Normal file
@@ -0,0 +1,54 @@
|
||||
# Production compose — pulls pre-built images from Gitea registry.
|
||||
#
|
||||
# Usage:
|
||||
# podman login git.lpl-mind.de
|
||||
# podman-compose -f podman-compose.prod.yml pull
|
||||
# podman-compose -f podman-compose.prod.yml up -d
|
||||
#
|
||||
# Required files:
|
||||
# - wg0.conf (WireGuard configuration in the same directory)
|
||||
|
||||
services:
|
||||
vpn:
|
||||
image: git.lpl-mind.de/lukas.pupkalipinski/aniworld/vpn:latest
|
||||
container_name: vpn-wireguard
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
- SYS_MODULE
|
||||
sysctls:
|
||||
- net.ipv4.ip_forward=1
|
||||
- net.ipv4.conf.all.src_valid_mark=1
|
||||
volumes:
|
||||
- /server/server_aniworld/wg0.conf:/etc/wireguard/wg0.conf:ro
|
||||
- /lib/modules:/lib/modules:ro
|
||||
ports:
|
||||
- "2000:8000"
|
||||
environment:
|
||||
- HEALTH_CHECK_INTERVAL=10
|
||||
- HEALTH_CHECK_HOST=1.1.1.1
|
||||
- LOCAL_PORTS=8000
|
||||
- PUID=1013
|
||||
- PGID=1001
|
||||
restart: unless-stopped
|
||||
healthcheck:
|
||||
test: ["CMD", "curl", "-sf", "--max-time", "5", "http://1.1.1.1"]
|
||||
interval: 30s
|
||||
timeout: 10s
|
||||
retries: 5
|
||||
start_period: 60s
|
||||
|
||||
app:
|
||||
image: git.lpl-mind.de/lukas.pupkalipinski/aniworld/app:latest
|
||||
container_name: aniworld-app
|
||||
network_mode: "service:vpn"
|
||||
depends_on:
|
||||
vpn:
|
||||
condition: service_healthy
|
||||
environment:
|
||||
- PYTHONUNBUFFERED=1
|
||||
- PUID=1013
|
||||
- PGID=1001
|
||||
volumes:
|
||||
- /server/server_aniworld/data:/app/data
|
||||
- /server/server_aniworld/logs:/app/logs
|
||||
restart: unless-stopped
|
||||
97
Docker/push.sh
Normal file
97
Docker/push.sh
Normal file
@@ -0,0 +1,97 @@
|
||||
#!/usr/bin/env bash
|
||||
# filepath: /home/lukas/Volume/repo/Aniworld/Docker/push.sh
|
||||
#
|
||||
# Build and push Aniworld container images to the Gitea registry.
|
||||
#
|
||||
# Usage:
|
||||
# ./push.sh # builds & pushes with tag "latest"
|
||||
# ./push.sh v1.2.3 # builds & pushes with tag "v1.2.3"
|
||||
# ./push.sh v1.2.3 --no-build # pushes existing images only
|
||||
#
|
||||
# Prerequisites:
|
||||
# podman login git.lpl-mind.de
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Configuration
|
||||
# ---------------------------------------------------------------------------
|
||||
REGISTRY="git.lpl-mind.de"
|
||||
NAMESPACE="lukas.pupkalipinski"
|
||||
PROJECT="aniworld"
|
||||
|
||||
APP_IMAGE="${REGISTRY}/${NAMESPACE}/${PROJECT}/app"
|
||||
VPN_IMAGE="${REGISTRY}/${NAMESPACE}/${PROJECT}/vpn"
|
||||
|
||||
TAG="${1:-latest}"
|
||||
SKIP_BUILD=false
|
||||
if [[ "${2:-}" == "--no-build" ]]; then
|
||||
SKIP_BUILD=true
|
||||
fi
|
||||
|
||||
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
|
||||
PROJECT_ROOT="$(cd "${SCRIPT_DIR}/.." && pwd)"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
log() { echo -e "\n>>> $*"; }
|
||||
err() { echo -e "\n❌ ERROR: $*" >&2; exit 1; }
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Pre-flight checks
|
||||
# ---------------------------------------------------------------------------
|
||||
echo "============================================"
|
||||
echo " Aniworld — Build & Push"
|
||||
echo " Registry : ${REGISTRY}"
|
||||
echo " Tag : ${TAG}"
|
||||
echo "============================================"
|
||||
|
||||
command -v podman &>/dev/null || err "podman is not installed."
|
||||
|
||||
if ! podman login --get-login "${REGISTRY}" &>/dev/null; then
|
||||
err "Not logged in. Run:\n podman login ${REGISTRY}"
|
||||
fi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Build
|
||||
# ---------------------------------------------------------------------------
|
||||
if [[ "${SKIP_BUILD}" == false ]]; then
|
||||
log "Building app image → ${APP_IMAGE}:${TAG}"
|
||||
podman build \
|
||||
-t "${APP_IMAGE}:${TAG}" \
|
||||
-f "${SCRIPT_DIR}/Dockerfile.app" \
|
||||
"${PROJECT_ROOT}"
|
||||
|
||||
log "Building VPN image → ${VPN_IMAGE}:${TAG}"
|
||||
podman build \
|
||||
-t "${VPN_IMAGE}:${TAG}" \
|
||||
-f "${SCRIPT_DIR}/Containerfile" \
|
||||
"${SCRIPT_DIR}"
|
||||
fi
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Push
|
||||
# ---------------------------------------------------------------------------
|
||||
log "Pushing ${APP_IMAGE}:${TAG}"
|
||||
podman push "${APP_IMAGE}:${TAG}"
|
||||
|
||||
log "Pushing ${VPN_IMAGE}:${TAG}"
|
||||
podman push "${VPN_IMAGE}:${TAG}"
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Summary
|
||||
# ---------------------------------------------------------------------------
|
||||
echo ""
|
||||
echo "============================================"
|
||||
echo " ✅ Push complete!"
|
||||
echo ""
|
||||
echo " Images:"
|
||||
echo " ${APP_IMAGE}:${TAG}"
|
||||
echo " ${VPN_IMAGE}:${TAG}"
|
||||
echo ""
|
||||
echo " Deploy on server:"
|
||||
echo " podman login ${REGISTRY}"
|
||||
echo " podman-compose -f podman-compose.prod.yml pull"
|
||||
echo " podman-compose -f podman-compose.prod.yml up -d"
|
||||
echo "============================================"
|
||||
@@ -120,3 +120,97 @@ For each task completed:
|
||||
## TODO List:
|
||||
|
||||
---
|
||||
|
||||
fix: download not working:
|
||||
|
||||
elf.\_progress_service.update_progress(
|
||||
|
||||
File "/app/src/server/services/progress_service.py", line 369, in update_progress
|
||||
|
||||
raise ProgressServiceError(
|
||||
|
||||
src.server.services.progress_service.ProgressServiceError: Progress with id 'download_queue' not found
|
||||
|
||||
2026-03-11 15:15:42 [info ] Processing next item from queue item_id=108 remaining=2 serie=I Was Reincarnated as the 7th Prince So I Can Take My Time Perfecting My Magical Ability
|
||||
|
||||
2026-03-11 15:15:42 [error ] Error in queue processing loop error=Progress with id 'download_queue' not found
|
||||
|
||||
Traceback (most recent call last):
|
||||
|
||||
File "/app/src/server/services/download_service.py", line 716, in \_process_queue
|
||||
|
||||
await self._progress_service.update_progress(
|
||||
|
||||
File "/app/src/server/services/progress_service.py", line 369, in update_progress
|
||||
|
||||
raise ProgressServiceError(
|
||||
|
||||
src.server.services.progress_service.ProgressServiceError: Progress with id 'download_queue' not found
|
||||
|
||||
2026-03-11 15:15:43 [info ] Processing next item from queue item_id=109 remaining=1 serie=I Was Reincarnated as the 7th Prince So I Can Take My Time Perfecting My Magical Ability
|
||||
|
||||
2026-03-11 15:15:43 [error ] Error in queue processing loop error=Progress with id 'download_queue' not found
|
||||
|
||||
Traceback (most recent call last):
|
||||
|
||||
File "/app/src/server/services/download_service.py", line 716, in \_process_queue
|
||||
|
||||
await self._progress_service.update_progress(
|
||||
|
||||
File "/app/src/server/services/progress_service.py", line 369, in update_progress
|
||||
|
||||
raise ProgressServiceError(
|
||||
|
||||
src.server.services.progress_service.ProgressServiceError: Progress with id 'download_queue' not found
|
||||
|
||||
2026-03-11 15:15:44 [info ] Processing next item from queue item_id=110 remaining=0 serie=I Was Reincarnated as the 7th Prince So I Can Take My Time Perfecting My Magical Ability
|
||||
|
||||
2026-03-11 15:15:44 [error ] Error in queue processing loop error=Progress with id 'download_queue' not found
|
||||
|
||||
Traceback (most recent call last):
|
||||
|
||||
File "/app/src/server/services/download_service.py", line 716, in \_process_queue
|
||||
|
||||
await self._progress_service.update_progress(
|
||||
|
||||
File "/app/src/server/services/progress_service.py", line 369, in update_progress
|
||||
|
||||
raise ProgressServiceError(
|
||||
|
||||
src.server.services.progress_service.ProgressServiceError: Progress with id 'download_queue' not found
|
||||
|
||||
2026-03-11 15:15:45 [info ] Queue processing completed - all items processed
|
||||
|
||||
ERROR: Task exception was never retrieved
|
||||
|
||||
future: <Task finished name='Task-16373' coro=<DownloadService.\_process_queue() done, defined at /app/src/server/services/download_service.py:688> exception=ProgressServiceError("Progress with id 'download_queue' not found")>
|
||||
|
||||
Traceback (most recent call last):
|
||||
|
||||
File "/app/src/server/services/download_service.py", line 755, in \_process_queue
|
||||
|
||||
await self._progress_service.complete_progress(
|
||||
|
||||
File "/app/src/server/services/progress_service.py", line 446, in complete_progress
|
||||
|
||||
raise ProgressServiceError(
|
||||
|
||||
src.server.services.progress_service.ProgressServiceError: Progress with id 'download_queue' not found
|
||||
|
||||
2026-03-11 15:15:46 [info ] Queue processing completed - all items processed
|
||||
|
||||
ERROR: Task exception was never retrieved
|
||||
|
||||
future: <Task finished name='Task-16313' coro=<DownloadService.\_process_queue() done, defined at /app/src/server/services/download_service.py:688> exception=ProgressServiceError("Progress with id 'download_queue' not found")>
|
||||
|
||||
Traceback (most recent call last):
|
||||
|
||||
File "/app/src/server/services/download_service.py", line 755, in \_process_queue
|
||||
|
||||
await self._progress_service.complete_progress(
|
||||
|
||||
File "/app/src/server/services/progress_service.py", line 446, in complete_progress
|
||||
|
||||
raise ProgressServiceError(
|
||||
|
||||
src.server.services.progress_service.ProgressServiceError: Progress with id 'download_queue' not found
|
||||
|
||||
@@ -18,4 +18,10 @@ aiosqlite>=0.19.0
|
||||
aiohttp>=3.9.0
|
||||
lxml>=5.0.0
|
||||
pillow>=10.0.0
|
||||
APScheduler>=3.10.4
|
||||
APScheduler>=3.10.4
|
||||
Events>=0.5
|
||||
requests>=2.31.0
|
||||
beautifulsoup4>=4.12.0
|
||||
fake-useragent>=1.4.0
|
||||
yt-dlp>=2024.1.0
|
||||
urllib3>=2.0.0
|
||||
@@ -160,7 +160,7 @@ class NFOService:
|
||||
|
||||
logger.info(f"Found match: {tv_show['name']} (ID: {tv_id})")
|
||||
|
||||
# Get detailed information
|
||||
# Get detailed information with multi-language image support
|
||||
details = await self.tmdb_client.get_tv_show_details(
|
||||
tv_id,
|
||||
append_to_response="credits,external_ids,images"
|
||||
@@ -169,6 +169,13 @@ class NFOService:
|
||||
# Get content ratings for FSK
|
||||
content_ratings = await self.tmdb_client.get_tv_show_content_ratings(tv_id)
|
||||
|
||||
# Enrich with fallback languages for empty overview/tagline
|
||||
# Pass search result overview as last resort fallback
|
||||
search_overview = tv_show.get("overview") or None
|
||||
details = await self._enrich_details_with_fallback(
|
||||
details, search_overview=search_overview
|
||||
)
|
||||
|
||||
# Convert TMDB data to TVShowNFO model
|
||||
nfo_model = tmdb_to_nfo_model(
|
||||
details,
|
||||
@@ -264,6 +271,8 @@ class NFOService:
|
||||
# Get content ratings for FSK
|
||||
content_ratings = await self.tmdb_client.get_tv_show_content_ratings(tmdb_id)
|
||||
|
||||
# Enrich with fallback languages for empty overview/tagline
|
||||
details = await self._enrich_details_with_fallback(details)
|
||||
# Convert TMDB data to TVShowNFO model
|
||||
nfo_model = tmdb_to_nfo_model(
|
||||
details,
|
||||
@@ -372,6 +381,81 @@ class NFOService:
|
||||
|
||||
return result
|
||||
|
||||
async def _enrich_details_with_fallback(
|
||||
self,
|
||||
details: Dict[str, Any],
|
||||
search_overview: Optional[str] = None,
|
||||
) -> Dict[str, Any]:
|
||||
"""Enrich TMDB details with fallback languages for empty fields.
|
||||
|
||||
When requesting details in ``de-DE``, some anime have an empty
|
||||
``overview`` (and potentially other translatable fields). This
|
||||
method detects empty values and fills them from alternative
|
||||
languages (``en-US``, then ``ja-JP``) so that NFO files always
|
||||
contain a ``plot`` regardless of whether the German translation
|
||||
exists. As a last resort, the overview from the search result
|
||||
is used.
|
||||
|
||||
Args:
|
||||
details: TMDB TV show details (language ``de-DE``).
|
||||
search_overview: Overview text from the TMDB search result,
|
||||
used as a final fallback if all language-specific
|
||||
requests fail or return empty overviews.
|
||||
|
||||
Returns:
|
||||
The *same* dict, mutated in-place with fallback values
|
||||
where needed.
|
||||
"""
|
||||
overview = details.get("overview") or ""
|
||||
|
||||
if overview:
|
||||
# Overview already populated – nothing to do.
|
||||
return details
|
||||
|
||||
tmdb_id = details.get("id")
|
||||
fallback_languages = ["en-US", "ja-JP"]
|
||||
|
||||
for lang in fallback_languages:
|
||||
if details.get("overview"):
|
||||
break
|
||||
|
||||
logger.debug(
|
||||
"Trying %s fallback for TMDB ID %s",
|
||||
lang, tmdb_id,
|
||||
)
|
||||
|
||||
try:
|
||||
lang_details = await self.tmdb_client.get_tv_show_details(
|
||||
tmdb_id,
|
||||
language=lang,
|
||||
)
|
||||
|
||||
if not details.get("overview") and lang_details.get("overview"):
|
||||
details["overview"] = lang_details["overview"]
|
||||
logger.info(
|
||||
"Used %s overview fallback for TMDB ID %s",
|
||||
lang, tmdb_id,
|
||||
)
|
||||
|
||||
# Also fill tagline if missing
|
||||
if not details.get("tagline") and lang_details.get("tagline"):
|
||||
details["tagline"] = lang_details["tagline"]
|
||||
except Exception as exc: # pylint: disable=broad-except
|
||||
logger.warning(
|
||||
"Failed to fetch %s fallback for TMDB ID %s: %s",
|
||||
lang, tmdb_id, exc,
|
||||
)
|
||||
|
||||
# Last resort: use search result overview
|
||||
if not details.get("overview") and search_overview:
|
||||
details["overview"] = search_overview
|
||||
logger.info(
|
||||
"Used search result overview fallback for TMDB ID %s",
|
||||
tmdb_id,
|
||||
)
|
||||
|
||||
return details
|
||||
|
||||
def _find_best_match(
|
||||
self,
|
||||
results: List[Dict[str, Any]],
|
||||
|
||||
@@ -43,8 +43,10 @@ def generate_tvshow_nfo(tvshow: TVShowNFO, pretty_print: bool = True) -> str:
|
||||
_add_element(root, "sorttitle", tvshow.sorttitle)
|
||||
_add_element(root, "year", str(tvshow.year) if tvshow.year else None)
|
||||
|
||||
# Plot and description
|
||||
_add_element(root, "plot", tvshow.plot)
|
||||
# Plot and description – always write <plot> even when empty so that
|
||||
# all NFO files have a consistent set of tags regardless of whether they
|
||||
# were produced by create or update.
|
||||
_add_element(root, "plot", tvshow.plot, always_write=True)
|
||||
_add_element(root, "outline", tvshow.outline)
|
||||
_add_element(root, "tagline", tvshow.tagline)
|
||||
|
||||
@@ -164,13 +166,23 @@ def generate_tvshow_nfo(tvshow: TVShowNFO, pretty_print: bool = True) -> str:
|
||||
return xml_declaration + xml_str
|
||||
|
||||
|
||||
def _add_element(parent: etree.Element, tag: str, text: Optional[str]) -> Optional[etree.Element]:
|
||||
def _add_element(
|
||||
parent: etree.Element,
|
||||
tag: str,
|
||||
text: Optional[str],
|
||||
always_write: bool = False,
|
||||
) -> Optional[etree.Element]:
|
||||
"""Add a child element to parent if text is not None or empty.
|
||||
|
||||
Args:
|
||||
parent: Parent XML element
|
||||
tag: Tag name for child element
|
||||
text: Text content (None or empty strings are skipped)
|
||||
text: Text content (None or empty strings are skipped
|
||||
unless *always_write* is True)
|
||||
always_write: When True the element is created even when
|
||||
*text* is None/empty (the element will have
|
||||
no text content). Useful for tags like
|
||||
``<plot>`` that should always be present.
|
||||
|
||||
Returns:
|
||||
Created element or None if skipped
|
||||
@@ -179,6 +191,8 @@ def _add_element(parent: etree.Element, tag: str, text: Optional[str]) -> Option
|
||||
elem = etree.SubElement(parent, tag)
|
||||
elem.text = text
|
||||
return elem
|
||||
if always_write:
|
||||
return etree.SubElement(parent, tag)
|
||||
return None
|
||||
|
||||
|
||||
|
||||
@@ -14,6 +14,7 @@ from typing import Any, Callable, Dict, List, Optional
|
||||
from src.core.entities.nfo_models import (
|
||||
ActorInfo,
|
||||
ImageInfo,
|
||||
NamedSeason,
|
||||
RatingInfo,
|
||||
TVShowNFO,
|
||||
UniqueID,
|
||||
@@ -167,6 +168,17 @@ def tmdb_to_nfo_model(
|
||||
tmdbid=member["id"],
|
||||
))
|
||||
|
||||
# --- Named seasons ---
|
||||
named_seasons: List[NamedSeason] = []
|
||||
for season_info in tmdb_data.get("seasons", []):
|
||||
season_name = season_info.get("name")
|
||||
season_number = season_info.get("season_number")
|
||||
if season_name and season_number is not None:
|
||||
named_seasons.append(NamedSeason(
|
||||
number=season_number,
|
||||
name=season_name,
|
||||
))
|
||||
|
||||
# --- Unique IDs ---
|
||||
unique_ids: List[UniqueID] = []
|
||||
if tmdb_data.get("id"):
|
||||
@@ -194,6 +206,7 @@ def tmdb_to_nfo_model(
|
||||
return TVShowNFO(
|
||||
title=title,
|
||||
originaltitle=original_title,
|
||||
showtitle=title,
|
||||
sorttitle=title,
|
||||
year=year,
|
||||
plot=tmdb_data.get("overview") or None,
|
||||
@@ -215,6 +228,7 @@ def tmdb_to_nfo_model(
|
||||
thumb=thumb_images,
|
||||
fanart=fanart_images,
|
||||
actors=actors,
|
||||
namedseason=named_seasons,
|
||||
watched=False,
|
||||
dateadded=datetime.now().strftime("%Y-%m-%d %H:%M:%S"),
|
||||
)
|
||||
|
||||
@@ -210,8 +210,12 @@ class DownloadService:
|
||||
) -> bool:
|
||||
"""Remove a downloaded episode from the missing episodes list.
|
||||
|
||||
Called when a download completes successfully to update the
|
||||
database so the episode no longer appears as missing.
|
||||
Called when a download completes successfully to update both:
|
||||
1. The database (Episode record deleted)
|
||||
2. The in-memory Serie.episodeDict and series_list cache
|
||||
|
||||
This ensures the episode no longer appears as missing in both
|
||||
the API responses and the UI immediately after download.
|
||||
|
||||
Args:
|
||||
series_key: Unique provider key for the series
|
||||
@@ -225,6 +229,14 @@ class DownloadService:
|
||||
from src.server.database.connection import get_db_session
|
||||
from src.server.database.service import EpisodeService
|
||||
|
||||
logger.info(
|
||||
"Attempting to remove missing episode from DB: "
|
||||
"%s S%02dE%02d",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
)
|
||||
|
||||
async with get_db_session() as db:
|
||||
deleted = await EpisodeService.delete_by_series_and_episode(
|
||||
db=db,
|
||||
@@ -234,25 +246,136 @@ class DownloadService:
|
||||
)
|
||||
if deleted:
|
||||
logger.info(
|
||||
"Removed episode from missing list: "
|
||||
"Successfully removed episode from DB missing list: "
|
||||
"%s S%02dE%02d",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
)
|
||||
# Clear the anime service cache so list_missing
|
||||
# returns updated data
|
||||
try:
|
||||
self._anime_service._cached_list_missing.cache_clear()
|
||||
except Exception:
|
||||
pass
|
||||
return deleted
|
||||
else:
|
||||
logger.warning(
|
||||
"Episode not found in DB missing list "
|
||||
"(may already be removed): %s S%02dE%02d",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
)
|
||||
|
||||
# Update in-memory Serie.episodeDict so list_missing is
|
||||
# immediately consistent without a full DB reload
|
||||
self._remove_episode_from_memory(series_key, season, episode)
|
||||
|
||||
# Clear the anime service cache so list_missing
|
||||
# re-reads from the (now updated) in-memory state
|
||||
try:
|
||||
self._anime_service._cached_list_missing.cache_clear()
|
||||
logger.debug(
|
||||
"Cleared list_missing cache after removing "
|
||||
"%s S%02dE%02d",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
)
|
||||
except Exception:
|
||||
pass
|
||||
|
||||
return deleted
|
||||
except Exception as e:
|
||||
logger.error(
|
||||
"Failed to remove episode from missing list: %s", e
|
||||
"Failed to remove episode from missing list: "
|
||||
"%s S%02dE%02d - %s",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
e,
|
||||
)
|
||||
return False
|
||||
|
||||
def _remove_episode_from_memory(
|
||||
self,
|
||||
series_key: str,
|
||||
season: int,
|
||||
episode: int,
|
||||
) -> None:
|
||||
"""Remove an episode from the in-memory Serie.episodeDict.
|
||||
|
||||
Updates the SeriesApp's keyDict so that list_missing and
|
||||
series_list reflect the removal immediately without needing
|
||||
a full database reload.
|
||||
|
||||
Args:
|
||||
series_key: Unique provider key for the series
|
||||
season: Season number
|
||||
episode: Episode number within season
|
||||
"""
|
||||
try:
|
||||
app = self._anime_service._app
|
||||
serie = app.list.keyDict.get(series_key)
|
||||
if not serie:
|
||||
logger.debug(
|
||||
"Series %s not found in keyDict, skipping "
|
||||
"in-memory removal",
|
||||
series_key,
|
||||
)
|
||||
return
|
||||
|
||||
ep_dict = serie.episodeDict
|
||||
if season not in ep_dict:
|
||||
logger.debug(
|
||||
"Season %d not in episodeDict for %s, "
|
||||
"skipping in-memory removal",
|
||||
season,
|
||||
series_key,
|
||||
)
|
||||
return
|
||||
|
||||
if episode in ep_dict[season]:
|
||||
ep_dict[season].remove(episode)
|
||||
logger.info(
|
||||
"Removed episode from in-memory episodeDict: "
|
||||
"%s S%02dE%02d (remaining in season: %s)",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
ep_dict[season],
|
||||
)
|
||||
|
||||
# Remove the season key if no episodes remain
|
||||
if not ep_dict[season]:
|
||||
del ep_dict[season]
|
||||
logger.info(
|
||||
"Removed empty season %d from episodeDict "
|
||||
"for %s",
|
||||
season,
|
||||
series_key,
|
||||
)
|
||||
|
||||
# Refresh series_list so GetMissingEpisode()
|
||||
# reflects the change
|
||||
app.series_list = app.list.GetMissingEpisode()
|
||||
logger.info(
|
||||
"Refreshed series_list: %d series with "
|
||||
"missing episodes remaining",
|
||||
len(app.series_list),
|
||||
)
|
||||
else:
|
||||
logger.debug(
|
||||
"Episode %d not in season %d for %s, "
|
||||
"already removed from memory",
|
||||
episode,
|
||||
season,
|
||||
series_key,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning(
|
||||
"Failed to remove episode from in-memory state: "
|
||||
"%s S%02dE%02d - %s",
|
||||
series_key,
|
||||
season,
|
||||
episode,
|
||||
e,
|
||||
)
|
||||
|
||||
async def _init_queue_progress(self) -> None:
|
||||
"""Initialize the download queue progress tracking.
|
||||
|
||||
@@ -272,7 +395,16 @@ class DownloadService:
|
||||
)
|
||||
self._queue_progress_initialized = True
|
||||
except Exception as e:
|
||||
logger.error("Failed to initialize queue progress: %s", e)
|
||||
# If the entry already exists (e.g. from a concurrent task),
|
||||
# treat that as success — the progress is usable.
|
||||
from src.server.services.progress_service import ProgressServiceError
|
||||
if isinstance(e, ProgressServiceError) and "already exists" in str(e):
|
||||
logger.debug(
|
||||
"Queue progress already initialized by concurrent task"
|
||||
)
|
||||
self._queue_progress_initialized = True
|
||||
else:
|
||||
logger.error("Failed to initialize queue progress: %s", e)
|
||||
|
||||
def _add_to_pending_queue(
|
||||
self, item: DownloadItem, front: bool = False
|
||||
@@ -636,8 +768,12 @@ class DownloadService:
|
||||
"queue_status": queue_status.model_dump(mode="json")
|
||||
},
|
||||
)
|
||||
# Reset flag so next queue run re-creates the progress entry
|
||||
self._queue_progress_initialized = False
|
||||
else:
|
||||
logger.info("Queue processing stopped by user")
|
||||
# Reset flag so next queue run re-creates the progress entry
|
||||
self._queue_progress_initialized = False
|
||||
|
||||
async def start_next_download(self) -> Optional[str]:
|
||||
"""Legacy method - redirects to start_queue_processing.
|
||||
@@ -658,18 +794,21 @@ class DownloadService:
|
||||
self._is_stopped = True
|
||||
logger.info("Download processing stopped")
|
||||
|
||||
# Notify via progress service
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message="Queue processing stopped",
|
||||
metadata={
|
||||
"action": "queue_stopped",
|
||||
"is_stopped": True,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
# Notify via progress service (guard against entry not existing)
|
||||
try:
|
||||
queue_status = await self.get_queue_status()
|
||||
await self._progress_service.update_progress(
|
||||
progress_id="download_queue",
|
||||
message="Queue processing stopped",
|
||||
metadata={
|
||||
"action": "queue_stopped",
|
||||
"is_stopped": True,
|
||||
"queue_status": queue_status.model_dump(mode="json"),
|
||||
},
|
||||
force_broadcast=True,
|
||||
)
|
||||
except Exception as e:
|
||||
logger.warning("Could not update queue progress on stop: %s", e)
|
||||
|
||||
async def get_queue_status(self) -> QueueStatus:
|
||||
"""Get current status of all queues.
|
||||
@@ -933,18 +1072,35 @@ class DownloadService:
|
||||
|
||||
self._completed_items.append(item)
|
||||
|
||||
# Delete completed item from database (status is in-memory)
|
||||
logger.info(
|
||||
"Download succeeded, cleaning up: item_id=%s, "
|
||||
"serie_key=%s, S%02dE%02d",
|
||||
item.id,
|
||||
item.serie_id,
|
||||
item.episode.season,
|
||||
item.episode.episode,
|
||||
)
|
||||
|
||||
# Delete completed item from download queue database
|
||||
await self._delete_from_database(item.id)
|
||||
|
||||
# Remove episode from missing episodes list in database
|
||||
await self._remove_episode_from_missing_list(
|
||||
# Remove episode from missing episodes list
|
||||
# (both database and in-memory)
|
||||
removed = await self._remove_episode_from_missing_list(
|
||||
series_key=item.serie_id,
|
||||
season=item.episode.season,
|
||||
episode=item.episode.episode,
|
||||
)
|
||||
|
||||
logger.info(
|
||||
"Download completed successfully: item_id=%s", item.id
|
||||
"Download completed successfully: item_id=%s, "
|
||||
"serie_key=%s, S%02dE%02d, "
|
||||
"missing_episode_removed=%s",
|
||||
item.id,
|
||||
item.serie_id,
|
||||
item.episode.season,
|
||||
item.episode.episode,
|
||||
removed,
|
||||
)
|
||||
else:
|
||||
raise AnimeServiceError("Download returned False")
|
||||
|
||||
@@ -36,6 +36,11 @@ AniWorld.SelectionManager = (function() {
|
||||
if (downloadBtn) {
|
||||
downloadBtn.addEventListener('click', downloadSelected);
|
||||
}
|
||||
|
||||
const refreshNfoBtn = document.getElementById('refresh-nfo-selected');
|
||||
if (refreshNfoBtn) {
|
||||
refreshNfoBtn.addEventListener('click', refreshNFOForSelected);
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -90,6 +95,11 @@ AniWorld.SelectionManager = (function() {
|
||||
|
||||
downloadBtn.disabled = selectedSeries.size === 0;
|
||||
|
||||
const refreshNfoBtn = document.getElementById('refresh-nfo-selected');
|
||||
if (refreshNfoBtn) {
|
||||
refreshNfoBtn.disabled = selectedSeries.size === 0;
|
||||
}
|
||||
|
||||
const allSelectableSelected = selectableKeys.every(function(key) {
|
||||
return selectedSeries.has(key);
|
||||
});
|
||||
@@ -274,6 +284,60 @@ AniWorld.SelectionManager = (function() {
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get selected series keys
|
||||
* @returns {Array<string>}
|
||||
*/
|
||||
function getSelectedKeys() {
|
||||
return Array.from(selectedSeries);
|
||||
}
|
||||
|
||||
/**
|
||||
* Refresh NFO metadata for all selected series
|
||||
*/
|
||||
async function refreshNFOForSelected() {
|
||||
if (selectedSeries.size === 0) {
|
||||
AniWorld.UI.showToast('No series selected', 'warning');
|
||||
return;
|
||||
}
|
||||
|
||||
if (!AniWorld.NFOManager) {
|
||||
AniWorld.UI.showToast('NFO Manager not available', 'error');
|
||||
return;
|
||||
}
|
||||
|
||||
const keys = Array.from(selectedSeries);
|
||||
let successCount = 0;
|
||||
let failCount = 0;
|
||||
|
||||
AniWorld.UI.showLoading('Refreshing NFO for ' + keys.length + ' series...');
|
||||
|
||||
for (const key of keys) {
|
||||
try {
|
||||
await AniWorld.NFOManager.refreshNFO(key);
|
||||
successCount++;
|
||||
} catch (error) {
|
||||
console.error('Error refreshing NFO for ' + key + ':', error);
|
||||
failCount++;
|
||||
}
|
||||
}
|
||||
|
||||
AniWorld.UI.hideLoading();
|
||||
|
||||
if (failCount === 0) {
|
||||
AniWorld.UI.showToast('NFO refreshed for ' + successCount + ' series', 'success');
|
||||
} else {
|
||||
AniWorld.UI.showToast(
|
||||
'NFO refreshed for ' + successCount + ' series, ' + failCount + ' failed',
|
||||
failCount === keys.length ? 'error' : 'warning'
|
||||
);
|
||||
}
|
||||
|
||||
if (successCount > 0 && AniWorld.SeriesManager) {
|
||||
AniWorld.SeriesManager.loadSeries();
|
||||
}
|
||||
}
|
||||
|
||||
/**
|
||||
* Get selected series count
|
||||
* @returns {number}
|
||||
@@ -291,6 +355,8 @@ AniWorld.SelectionManager = (function() {
|
||||
toggleSelectAll: toggleSelectAll,
|
||||
clearSelection: clearSelection,
|
||||
downloadSelected: downloadSelected,
|
||||
refreshNFOForSelected: refreshNFOForSelected,
|
||||
getSelectedKeys: getSelectedKeys,
|
||||
getSelectionCount: getSelectionCount
|
||||
};
|
||||
})();
|
||||
|
||||
@@ -237,46 +237,6 @@ AniWorld.SeriesManager = (function() {
|
||||
});
|
||||
});
|
||||
|
||||
// Bind NFO button events
|
||||
grid.querySelectorAll('.nfo-create-btn').forEach(function(btn) {
|
||||
btn.addEventListener('click', function(e) {
|
||||
e.stopPropagation();
|
||||
const seriesKey = e.currentTarget.dataset.key;
|
||||
if (AniWorld.NFOManager) {
|
||||
AniWorld.NFOManager.createNFO(seriesKey).then(function() {
|
||||
// Reload series to reflect new NFO status
|
||||
loadSeries();
|
||||
}).catch(function(error) {
|
||||
console.error('Error creating NFO:', error);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
grid.querySelectorAll('.nfo-view-btn').forEach(function(btn) {
|
||||
btn.addEventListener('click', function(e) {
|
||||
e.stopPropagation();
|
||||
const seriesKey = e.currentTarget.dataset.key;
|
||||
if (AniWorld.NFOManager) {
|
||||
AniWorld.NFOManager.showNFOModal(seriesKey);
|
||||
}
|
||||
});
|
||||
});
|
||||
|
||||
grid.querySelectorAll('.nfo-refresh-btn').forEach(function(btn) {
|
||||
btn.addEventListener('click', function(e) {
|
||||
e.stopPropagation();
|
||||
const seriesKey = e.currentTarget.dataset.key;
|
||||
if (AniWorld.NFOManager) {
|
||||
AniWorld.NFOManager.refreshNFO(seriesKey).then(function() {
|
||||
// Reload series to reflect updated NFO
|
||||
loadSeries();
|
||||
}).catch(function(error) {
|
||||
console.error('Error refreshing NFO:', error);
|
||||
});
|
||||
}
|
||||
});
|
||||
});
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -376,15 +336,6 @@ AniWorld.SeriesManager = (function() {
|
||||
'<span class="series-site">' + serie.site + '</span>' +
|
||||
'</div>' +
|
||||
(isLoading ? getLoadingIndicatorHTML(serie) : '') +
|
||||
'<div class="series-actions">' +
|
||||
(hasNfo ?
|
||||
'<button class="btn btn-sm btn-secondary nfo-view-btn" data-key="' + serie.key + '" title="View NFO">' +
|
||||
'<i class="fas fa-eye"></i> View NFO</button>' +
|
||||
'<button class="btn btn-sm btn-secondary nfo-refresh-btn" data-key="' + serie.key + '" title="Refresh NFO">' +
|
||||
'<i class="fas fa-sync-alt"></i> Refresh</button>' :
|
||||
'<button class="btn btn-sm btn-primary nfo-create-btn" data-key="' + serie.key + '" title="Create NFO">' +
|
||||
'<i class="fas fa-plus"></i> Create NFO</button>') +
|
||||
'</div>' +
|
||||
'</div>';
|
||||
}
|
||||
|
||||
|
||||
@@ -141,6 +141,10 @@
|
||||
<i class="fas fa-check-double"></i>
|
||||
<span data-text="select-all">Select All</span>
|
||||
</button>
|
||||
<button id="refresh-nfo-selected" class="btn btn-secondary" disabled title="Refresh NFO for selected series">
|
||||
<i class="fas fa-sync-alt"></i>
|
||||
<span>Refresh NFO</span>
|
||||
</button>
|
||||
<button id="download-selected" class="btn btn-success" disabled>
|
||||
<i class="fas fa-download"></i>
|
||||
<span data-text="download-selected">Download Selected</span>
|
||||
|
||||
@@ -630,3 +630,207 @@ class TestErrorHandling:
|
||||
download_service._failed_items[0].status == DownloadStatus.FAILED
|
||||
)
|
||||
assert download_service._failed_items[0].error is not None
|
||||
|
||||
|
||||
class TestRemoveEpisodeFromMissingList:
|
||||
"""Test that completed downloads remove episodes from missing list."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_episode_from_memory(self, download_service):
|
||||
"""Test _remove_episode_from_memory updates in-memory state."""
|
||||
from src.core.entities.series import Serie
|
||||
|
||||
# Set up in-memory series with missing episodes
|
||||
serie = Serie(
|
||||
key="test-series",
|
||||
name="Test Series",
|
||||
site="https://example.com",
|
||||
folder="Test Series (2024)",
|
||||
episodeDict={1: [1, 2, 3], 2: [1, 2]},
|
||||
)
|
||||
mock_app = MagicMock()
|
||||
mock_app.list.keyDict = {"test-series": serie}
|
||||
mock_app.list.GetMissingEpisode.return_value = [serie]
|
||||
mock_app.series_list = [serie]
|
||||
download_service._anime_service._app = mock_app
|
||||
|
||||
# Remove episode S01E02
|
||||
download_service._remove_episode_from_memory("test-series", 1, 2)
|
||||
|
||||
# Episode should be removed from episodeDict
|
||||
assert 2 not in serie.episodeDict[1]
|
||||
assert serie.episodeDict[1] == [1, 3]
|
||||
# Season 2 should be untouched
|
||||
assert serie.episodeDict[2] == [1, 2]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_last_episode_in_season_removes_season(
|
||||
self, download_service
|
||||
):
|
||||
"""Test removing the last episode in a season removes the season key."""
|
||||
from src.core.entities.series import Serie
|
||||
|
||||
serie = Serie(
|
||||
key="test-series",
|
||||
name="Test Series",
|
||||
site="https://example.com",
|
||||
folder="Test Series (2024)",
|
||||
episodeDict={1: [5], 2: [1, 2]},
|
||||
)
|
||||
mock_app = MagicMock()
|
||||
mock_app.list.keyDict = {"test-series": serie}
|
||||
mock_app.list.GetMissingEpisode.return_value = [serie]
|
||||
mock_app.series_list = [serie]
|
||||
download_service._anime_service._app = mock_app
|
||||
|
||||
# Remove the only episode in season 1
|
||||
download_service._remove_episode_from_memory("test-series", 1, 5)
|
||||
|
||||
# Season 1 should be completely removed
|
||||
assert 1 not in serie.episodeDict
|
||||
# Season 2 untouched
|
||||
assert serie.episodeDict[2] == [1, 2]
|
||||
# GetMissingEpisode should have been called to refresh
|
||||
mock_app.list.GetMissingEpisode.assert_called()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_episode_unknown_series_no_error(
|
||||
self, download_service
|
||||
):
|
||||
"""Test removing episode for unknown series does not raise."""
|
||||
mock_app = MagicMock()
|
||||
mock_app.list.keyDict = {}
|
||||
download_service._anime_service._app = mock_app
|
||||
|
||||
# Should not raise
|
||||
download_service._remove_episode_from_memory(
|
||||
"nonexistent-series", 1, 1
|
||||
)
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_remove_episode_from_missing_list_calls_db_and_memory(
|
||||
self, download_service
|
||||
):
|
||||
"""Test _remove_episode_from_missing_list updates both DB and memory."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from src.core.entities.series import Serie
|
||||
|
||||
# Set up in-memory state
|
||||
serie = Serie(
|
||||
key="test-series",
|
||||
name="Test Series",
|
||||
site="https://example.com",
|
||||
folder="Test Series (2024)",
|
||||
episodeDict={1: [1, 2, 3]},
|
||||
)
|
||||
mock_app = MagicMock()
|
||||
mock_app.list.keyDict = {"test-series": serie}
|
||||
mock_app.list.GetMissingEpisode.return_value = [serie]
|
||||
mock_app.series_list = [serie]
|
||||
download_service._anime_service._app = mock_app
|
||||
download_service._anime_service._cached_list_missing = MagicMock()
|
||||
|
||||
# Mock DB call
|
||||
mock_db_session = AsyncMock()
|
||||
mock_delete = AsyncMock(return_value=True)
|
||||
|
||||
with patch(
|
||||
"src.server.database.connection.get_db_session"
|
||||
) as mock_get_db, patch(
|
||||
"src.server.database.service.EpisodeService"
|
||||
) as mock_ep_svc:
|
||||
mock_get_db.return_value.__aenter__ = AsyncMock(
|
||||
return_value=mock_db_session
|
||||
)
|
||||
mock_get_db.return_value.__aexit__ = AsyncMock(
|
||||
return_value=False
|
||||
)
|
||||
mock_ep_svc.delete_by_series_and_episode = mock_delete
|
||||
|
||||
result = await download_service._remove_episode_from_missing_list(
|
||||
series_key="test-series",
|
||||
season=1,
|
||||
episode=2,
|
||||
)
|
||||
|
||||
# DB deletion was called
|
||||
mock_delete.assert_awaited_once_with(
|
||||
db=mock_db_session,
|
||||
series_key="test-series",
|
||||
season=1,
|
||||
episode_number=2,
|
||||
)
|
||||
# In-memory update happened
|
||||
assert 2 not in serie.episodeDict[1]
|
||||
assert serie.episodeDict[1] == [1, 3]
|
||||
# Cache was cleared
|
||||
download_service._anime_service._cached_list_missing.cache_clear.assert_called()
|
||||
assert result is True
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_download_completion_removes_missing_episode(
|
||||
self, download_service
|
||||
):
|
||||
"""Test full flow: download success removes episode from missing list."""
|
||||
from unittest.mock import patch
|
||||
|
||||
from src.core.entities.series import Serie
|
||||
|
||||
# Setup mock anime service to return success
|
||||
download_service._anime_service.download = AsyncMock(
|
||||
return_value=True
|
||||
)
|
||||
|
||||
# Set up in-memory series state
|
||||
serie = Serie(
|
||||
key="series-1",
|
||||
name="Test Series",
|
||||
site="https://example.com",
|
||||
folder="series",
|
||||
episodeDict={1: [1, 2, 3]},
|
||||
)
|
||||
mock_app = MagicMock()
|
||||
mock_app.list.keyDict = {"series-1": serie}
|
||||
mock_app.list.GetMissingEpisode.return_value = [serie]
|
||||
mock_app.series_list = [serie]
|
||||
download_service._anime_service._app = mock_app
|
||||
download_service._anime_service._cached_list_missing = MagicMock()
|
||||
|
||||
# Add episode to queue
|
||||
await download_service.add_to_queue(
|
||||
serie_id="series-1",
|
||||
serie_folder="series",
|
||||
serie_name="Test Series",
|
||||
episodes=[EpisodeIdentifier(season=1, episode=2)],
|
||||
)
|
||||
|
||||
# Mock DB calls
|
||||
mock_db_session = AsyncMock()
|
||||
mock_delete = AsyncMock(return_value=True)
|
||||
|
||||
with patch(
|
||||
"src.server.database.connection.get_db_session"
|
||||
) as mock_get_db, patch(
|
||||
"src.server.database.service.EpisodeService"
|
||||
) as mock_ep_svc:
|
||||
mock_get_db.return_value.__aenter__ = AsyncMock(
|
||||
return_value=mock_db_session
|
||||
)
|
||||
mock_get_db.return_value.__aexit__ = AsyncMock(
|
||||
return_value=False
|
||||
)
|
||||
mock_ep_svc.delete_by_series_and_episode = mock_delete
|
||||
|
||||
# Process the download
|
||||
item = download_service._pending_queue.popleft()
|
||||
download_service._pending_items_by_id.pop(item.id, None)
|
||||
await download_service._process_download(item)
|
||||
|
||||
# Episode should be completed
|
||||
assert len(download_service._completed_items) == 1
|
||||
assert download_service._completed_items[0].status == DownloadStatus.COMPLETED
|
||||
|
||||
# Episode 2 should be removed from in-memory missing list
|
||||
assert 2 not in serie.episodeDict[1]
|
||||
assert serie.episodeDict[1] == [1, 3]
|
||||
|
||||
@@ -229,3 +229,79 @@ def test_generate_nfo_writes_mpaa_when_no_fsk() -> None:
|
||||
mpaa_elem = root.find(".//mpaa")
|
||||
assert mpaa_elem is not None
|
||||
assert mpaa_elem.text == "TV-14"
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# showtitle and namedseason — new coverage
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
def test_tmdb_to_nfo_model_sets_showtitle(nfo_model: TVShowNFO) -> None:
|
||||
"""showtitle must equal the main title."""
|
||||
assert nfo_model.showtitle == "Test Show"
|
||||
|
||||
|
||||
def test_generate_nfo_writes_showtitle(nfo_model: TVShowNFO) -> None:
|
||||
xml_str = generate_tvshow_nfo(nfo_model)
|
||||
root = _parse_xml(xml_str)
|
||||
elem = root.find(".//showtitle")
|
||||
assert elem is not None
|
||||
assert elem.text == "Test Show"
|
||||
|
||||
|
||||
TMDB_WITH_SEASONS: Dict[str, Any] = {
|
||||
**MINIMAL_TMDB,
|
||||
"seasons": [
|
||||
{"season_number": 0, "name": "Specials"},
|
||||
{"season_number": 1, "name": "Season 1"},
|
||||
{"season_number": 2, "name": "Season 2"},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def test_tmdb_to_nfo_model_sets_namedseasons() -> None:
|
||||
model = tmdb_to_nfo_model(
|
||||
TMDB_WITH_SEASONS, CONTENT_RATINGS_DE_US, _fake_get_image_url,
|
||||
)
|
||||
assert len(model.namedseason) == 3
|
||||
assert model.namedseason[0].number == 0
|
||||
assert model.namedseason[0].name == "Specials"
|
||||
assert model.namedseason[1].number == 1
|
||||
|
||||
|
||||
def test_generate_nfo_writes_namedseasons() -> None:
|
||||
model = tmdb_to_nfo_model(
|
||||
TMDB_WITH_SEASONS, CONTENT_RATINGS_DE_US, _fake_get_image_url,
|
||||
)
|
||||
xml_str = generate_tvshow_nfo(model)
|
||||
root = _parse_xml(xml_str)
|
||||
elems = root.findall(".//namedseason")
|
||||
assert len(elems) == 3
|
||||
assert elems[0].get("number") == "0"
|
||||
assert elems[0].text == "Specials"
|
||||
|
||||
|
||||
def test_tmdb_to_nfo_model_no_seasons_key() -> None:
|
||||
"""No 'seasons' key in TMDB data → namedseason list is empty."""
|
||||
model = tmdb_to_nfo_model(
|
||||
MINIMAL_TMDB, CONTENT_RATINGS_DE_US, _fake_get_image_url,
|
||||
)
|
||||
assert model.namedseason == []
|
||||
|
||||
|
||||
def test_tmdb_to_nfo_model_empty_overview_produces_none_plot() -> None:
|
||||
"""When overview is empty the plot field should be None."""
|
||||
data = {**MINIMAL_TMDB, "overview": ""}
|
||||
model = tmdb_to_nfo_model(
|
||||
data, CONTENT_RATINGS_DE_US, _fake_get_image_url,
|
||||
)
|
||||
assert model.plot is None
|
||||
|
||||
|
||||
def test_generate_nfo_always_writes_plot_tag_even_when_none() -> None:
|
||||
"""<plot> must always appear, even when plot is None."""
|
||||
nfo = TVShowNFO(title="No Plot Show")
|
||||
xml_str = generate_tvshow_nfo(nfo)
|
||||
root = _parse_xml(xml_str)
|
||||
plot_elem = root.find(".//plot")
|
||||
assert plot_elem is not None # tag exists (always_write=True)
|
||||
|
||||
@@ -524,6 +524,207 @@ class TestCreateTVShowNFO:
|
||||
mock_ratings.assert_called_once_with(1429)
|
||||
|
||||
|
||||
class TestEnrichDetailsWithFallback:
|
||||
"""Tests for English fallback when German overview is empty."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_create_nfo_uses_english_fallback_for_empty_overview(
|
||||
self, nfo_service, tmp_path
|
||||
):
|
||||
"""When the German overview is empty, create_tvshow_nfo should
|
||||
fetch the English overview from TMDB and include it as <plot>."""
|
||||
series_folder = tmp_path / "Basilisk"
|
||||
series_folder.mkdir()
|
||||
|
||||
# German TMDB data with empty overview
|
||||
de_data = {
|
||||
"id": 35014, "name": "Basilisk",
|
||||
"original_name": "甲賀忍法帖", "first_air_date": "2005-04-13",
|
||||
"overview": "", # <-- empty German overview
|
||||
"vote_average": 7.2, "vote_count": 200,
|
||||
"status": "Ended", "episode_run_time": [24],
|
||||
"genres": [{"id": 16, "name": "Animation"}],
|
||||
"networks": [{"id": 1, "name": "MBS"}],
|
||||
"production_countries": [{"name": "Japan"}],
|
||||
"poster_path": "/poster.jpg", "backdrop_path": "/backdrop.jpg",
|
||||
"external_ids": {"imdb_id": "tt0464064", "tvdb_id": 79604},
|
||||
"credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
|
||||
# English TMDB data with overview
|
||||
en_data = {
|
||||
"id": 35014,
|
||||
"overview": "The year is 1614 and two warring ninja clans collide.",
|
||||
"tagline": "Blood spills when ninja clans clash.",
|
||||
}
|
||||
|
||||
async def side_effect(tv_id, **kwargs):
|
||||
if kwargs.get("language") == "en-US":
|
||||
return en_data
|
||||
return de_data
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'search_tv_show', new_callable=AsyncMock) as mock_search, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
mock_search.return_value = {
|
||||
"results": [{"id": 35014, "name": "Basilisk", "first_air_date": "2005-04-13"}]
|
||||
}
|
||||
mock_details.side_effect = side_effect
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
nfo_path = await nfo_service.create_tvshow_nfo(
|
||||
"Basilisk", "Basilisk", year=2005,
|
||||
download_poster=False, download_logo=False, download_fanart=False,
|
||||
)
|
||||
|
||||
content = nfo_path.read_text(encoding="utf-8")
|
||||
assert "<plot>The year is 1614" in content
|
||||
# Details called twice: once for de-DE, once for en-US fallback
|
||||
assert mock_details.call_count == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_update_nfo_uses_english_fallback_for_empty_overview(
|
||||
self, nfo_service, tmp_path
|
||||
):
|
||||
"""update_tvshow_nfo should also use the English fallback."""
|
||||
series_folder = tmp_path / "Basilisk"
|
||||
series_folder.mkdir()
|
||||
nfo_path = series_folder / "tvshow.nfo"
|
||||
nfo_path.write_text(
|
||||
'<?xml version="1.0"?>\n<tvshow><title>Basilisk</title>'
|
||||
"<tmdbid>35014</tmdbid></tvshow>",
|
||||
encoding="utf-8",
|
||||
)
|
||||
|
||||
de_data = {
|
||||
"id": 35014, "name": "Basilisk",
|
||||
"original_name": "甲賀忍法帖", "first_air_date": "2005-04-13",
|
||||
"overview": "",
|
||||
"vote_average": 7.2, "vote_count": 200,
|
||||
"status": "Ended", "episode_run_time": [24],
|
||||
"genres": [{"id": 16, "name": "Animation"}],
|
||||
"networks": [{"id": 1, "name": "MBS"}],
|
||||
"production_countries": [{"name": "Japan"}],
|
||||
"poster_path": "/poster.jpg", "backdrop_path": "/backdrop.jpg",
|
||||
"external_ids": {"imdb_id": "tt0464064", "tvdb_id": 79604},
|
||||
"credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
en_data = {
|
||||
"id": 35014,
|
||||
"overview": "English fallback overview for Basilisk.",
|
||||
}
|
||||
|
||||
async def side_effect(tv_id, **kwargs):
|
||||
if kwargs.get("language") == "en-US":
|
||||
return en_data
|
||||
return de_data
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
mock_details.side_effect = side_effect
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
updated_path = await nfo_service.update_tvshow_nfo(
|
||||
"Basilisk", download_media=False,
|
||||
)
|
||||
|
||||
content = updated_path.read_text(encoding="utf-8")
|
||||
assert "<plot>English fallback overview" in content
|
||||
assert mock_details.call_count == 2
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_no_fallback_when_german_overview_exists(
|
||||
self, nfo_service, tmp_path
|
||||
):
|
||||
"""No English fallback call when German overview is present."""
|
||||
series_folder = tmp_path / "Attack on Titan"
|
||||
series_folder.mkdir()
|
||||
|
||||
de_data = {
|
||||
"id": 1429, "name": "Attack on Titan",
|
||||
"original_name": "進撃の巨人", "first_air_date": "2013-04-07",
|
||||
"overview": "Vor mehreren hundert Jahren...",
|
||||
"vote_average": 8.6, "vote_count": 5000,
|
||||
"status": "Ended", "episode_run_time": [24],
|
||||
"genres": [], "networks": [], "production_countries": [],
|
||||
"poster_path": None, "backdrop_path": None,
|
||||
"external_ids": {}, "credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'search_tv_show', new_callable=AsyncMock) as mock_search, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
mock_search.return_value = {
|
||||
"results": [{"id": 1429, "name": "Attack on Titan", "first_air_date": "2013-04-07"}]
|
||||
}
|
||||
mock_details.return_value = de_data
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
nfo_path = await nfo_service.create_tvshow_nfo(
|
||||
"Attack on Titan", "Attack on Titan", year=2013,
|
||||
download_poster=False, download_logo=False, download_fanart=False,
|
||||
)
|
||||
|
||||
content = nfo_path.read_text(encoding="utf-8")
|
||||
assert "<plot>Vor mehreren hundert Jahren...</plot>" in content
|
||||
# Only one detail call (German), no fallback needed
|
||||
mock_details.assert_called_once_with(1429, append_to_response="credits,external_ids,images")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_plot_tag_always_present_even_if_empty(
|
||||
self, nfo_service, tmp_path
|
||||
):
|
||||
"""<plot> tag should always be present, even when overview is missing
|
||||
from both German and English TMDB data."""
|
||||
series_folder = tmp_path / "Unknown Show"
|
||||
series_folder.mkdir()
|
||||
|
||||
empty_data = {
|
||||
"id": 99999, "name": "Unknown Show",
|
||||
"original_name": "Unknown", "first_air_date": "2020-01-01",
|
||||
"overview": "",
|
||||
"vote_average": 0, "vote_count": 0,
|
||||
"status": "Ended", "episode_run_time": [],
|
||||
"genres": [], "networks": [], "production_countries": [],
|
||||
"poster_path": None, "backdrop_path": None,
|
||||
"external_ids": {}, "credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
|
||||
async def side_effect(tv_id, **kwargs):
|
||||
# English also empty
|
||||
return empty_data
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'search_tv_show', new_callable=AsyncMock) as mock_search, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
mock_search.return_value = {
|
||||
"results": [{"id": 99999, "name": "Unknown Show", "first_air_date": "2020-01-01"}]
|
||||
}
|
||||
mock_details.side_effect = side_effect
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
nfo_path = await nfo_service.create_tvshow_nfo(
|
||||
"Unknown Show", "Unknown Show",
|
||||
download_poster=False, download_logo=False, download_fanart=False,
|
||||
)
|
||||
|
||||
content = nfo_path.read_text(encoding="utf-8")
|
||||
# <plot/> (self-closing) or <plot></plot> should be present
|
||||
assert "<plot" in content
|
||||
|
||||
|
||||
class TestNFOServiceEdgeCases:
|
||||
"""Test edge cases in NFO service."""
|
||||
|
||||
@@ -1184,3 +1385,152 @@ class TestYearExtractionComprehensive:
|
||||
assert clean_name == "Series (12345)"
|
||||
assert year is None
|
||||
|
||||
|
||||
class TestEnrichFallbackLanguages:
|
||||
"""Tests for multi-language fallback and search overview fallback."""
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_japanese_fallback_when_english_also_empty(
|
||||
self, nfo_service, tmp_path,
|
||||
):
|
||||
"""ja-JP fallback is tried when both de-DE and en-US are empty."""
|
||||
series_folder = tmp_path / "Rare Anime"
|
||||
series_folder.mkdir()
|
||||
|
||||
de_data = {
|
||||
"id": 55555, "name": "Rare Anime",
|
||||
"original_name": "レアアニメ", "first_air_date": "2024-01-01",
|
||||
"overview": "",
|
||||
"vote_average": 7.0, "vote_count": 50,
|
||||
"status": "Continuing", "episode_run_time": [24],
|
||||
"genres": [], "networks": [], "production_countries": [],
|
||||
"poster_path": None, "backdrop_path": None,
|
||||
"external_ids": {}, "credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
en_data = {"id": 55555, "overview": ""}
|
||||
ja_data = {"id": 55555, "overview": "日本語のあらすじ"}
|
||||
|
||||
async def side_effect(tv_id, **kwargs):
|
||||
lang = kwargs.get("language")
|
||||
if lang == "ja-JP":
|
||||
return ja_data
|
||||
if lang == "en-US":
|
||||
return en_data
|
||||
return de_data
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'search_tv_show', new_callable=AsyncMock) as mock_search, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
mock_search.return_value = {
|
||||
"results": [{"id": 55555, "name": "Rare Anime", "first_air_date": "2024-01-01"}],
|
||||
}
|
||||
mock_details.side_effect = side_effect
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
nfo_path = await nfo_service.create_tvshow_nfo(
|
||||
"Rare Anime", "Rare Anime",
|
||||
download_poster=False, download_logo=False, download_fanart=False,
|
||||
)
|
||||
|
||||
content = nfo_path.read_text(encoding="utf-8")
|
||||
assert "<plot>日本語のあらすじ</plot>" in content
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_search_overview_fallback_when_all_languages_empty(
|
||||
self, nfo_service, tmp_path,
|
||||
):
|
||||
"""Search result overview is used as last resort."""
|
||||
series_folder = tmp_path / "Brand New Anime"
|
||||
series_folder.mkdir()
|
||||
|
||||
empty_data = {
|
||||
"id": 77777, "name": "Brand New Anime",
|
||||
"original_name": "新しいアニメ", "first_air_date": "2025-01-01",
|
||||
"overview": "",
|
||||
"vote_average": 0, "vote_count": 0,
|
||||
"status": "Continuing", "episode_run_time": [],
|
||||
"genres": [], "networks": [], "production_countries": [],
|
||||
"poster_path": None, "backdrop_path": None,
|
||||
"external_ids": {}, "credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
|
||||
async def side_effect(tv_id, **kwargs):
|
||||
return empty_data
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'search_tv_show', new_callable=AsyncMock) as mock_search, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
# Search result DOES have an overview
|
||||
mock_search.return_value = {
|
||||
"results": [{
|
||||
"id": 77777,
|
||||
"name": "Brand New Anime",
|
||||
"first_air_date": "2025-01-01",
|
||||
"overview": "Search result overview text.",
|
||||
}],
|
||||
}
|
||||
mock_details.side_effect = side_effect
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
nfo_path = await nfo_service.create_tvshow_nfo(
|
||||
"Brand New Anime", "Brand New Anime",
|
||||
download_poster=False, download_logo=False, download_fanart=False,
|
||||
)
|
||||
|
||||
content = nfo_path.read_text(encoding="utf-8")
|
||||
assert "<plot>Search result overview text.</plot>" in content
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_no_japanese_fallback_when_english_succeeds(
|
||||
self, nfo_service, tmp_path,
|
||||
):
|
||||
"""Stop after en-US if it provides the overview."""
|
||||
series_folder = tmp_path / "Test Anime"
|
||||
series_folder.mkdir()
|
||||
|
||||
de_data = {
|
||||
"id": 88888, "name": "Test Anime",
|
||||
"original_name": "テスト", "first_air_date": "2024-01-01",
|
||||
"overview": "",
|
||||
"vote_average": 7.0, "vote_count": 50,
|
||||
"status": "Continuing", "episode_run_time": [24],
|
||||
"genres": [], "networks": [], "production_countries": [],
|
||||
"poster_path": None, "backdrop_path": None,
|
||||
"external_ids": {}, "credits": {"cast": []},
|
||||
"images": {"logos": []},
|
||||
}
|
||||
en_data = {"id": 88888, "overview": "English overview."}
|
||||
|
||||
async def side_effect(tv_id, **kwargs):
|
||||
lang = kwargs.get("language")
|
||||
if lang == "en-US":
|
||||
return en_data
|
||||
return de_data
|
||||
|
||||
with patch.object(nfo_service.tmdb_client, 'search_tv_show', new_callable=AsyncMock) as mock_search, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_details', new_callable=AsyncMock) as mock_details, \
|
||||
patch.object(nfo_service.tmdb_client, 'get_tv_show_content_ratings', new_callable=AsyncMock) as mock_ratings, \
|
||||
patch.object(nfo_service, '_download_media_files', new_callable=AsyncMock):
|
||||
|
||||
mock_search.return_value = {
|
||||
"results": [{"id": 88888, "name": "Test Anime", "first_air_date": "2024-01-01"}],
|
||||
}
|
||||
mock_details.side_effect = side_effect
|
||||
mock_ratings.return_value = {"results": []}
|
||||
|
||||
nfo_path = await nfo_service.create_tvshow_nfo(
|
||||
"Test Anime", "Test Anime",
|
||||
download_poster=False, download_logo=False, download_fanart=False,
|
||||
)
|
||||
|
||||
content = nfo_path.read_text(encoding="utf-8")
|
||||
assert "<plot>English overview.</plot>" in content
|
||||
# de-DE + en-US = 2 calls (no ja-JP needed)
|
||||
assert mock_details.call_count == 2
|
||||
|
||||
|
||||
Reference in New Issue
Block a user