added logging

This commit is contained in:
Lukas 2025-11-01 16:29:07 +01:00
parent 33aeac0141
commit 6cdb2eb1e1
3 changed files with 130 additions and 90 deletions

View File

@ -1,81 +1,5 @@
{
"pending": [
{
"id": "7a6a40ce-c5d9-4e7e-8f71-a08141e9ce6f",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 8,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882050Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "e02d144f-9939-41fe-bd02-0d52d581df4a",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 9,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882082Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "6d5577e4-cab3-4be9-8eda-691a9af27517",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 10,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882111Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "d63fbe5a-7a04-4a40-9ddc-423396d30140",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 11,
"title": null
},
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882139Z",
"started_at": null,
"completed_at": null,
"progress": null,
"error": null,
"retry_count": 0,
"source_url": null
},
{
"id": "16bd4821-59f8-4071-a094-552db51af475",
"serie_id": "highschool-dxd",
@ -1462,10 +1386,7 @@
"error": "Download failed",
"retry_count": 0,
"source_url": null
}
],
"active": [],
"failed": [
},
{
"id": "467b39ac-48be-48c5-a04c-ce94b6f6c0d9",
"serie_id": "highschool-dxd",
@ -1475,7 +1396,7 @@
"episode": 5,
"title": null
},
"status": "failed",
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.881950Z",
"started_at": "2025-11-01T15:12:21.534113Z",
@ -1494,7 +1415,7 @@
"episode": 6,
"title": null
},
"status": "failed",
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.881979Z",
"started_at": "2025-11-01T15:12:31.484945Z",
@ -1513,7 +1434,7 @@
"episode": 7,
"title": null
},
"status": "failed",
"status": "pending",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882013Z",
"started_at": "2025-11-01T15:12:40.122525Z",
@ -1524,5 +1445,65 @@
"source_url": null
}
],
"timestamp": "2025-11-01T15:12:46.420241+00:00"
"active": [],
"failed": [
{
"id": "e02d144f-9939-41fe-bd02-0d52d581df4a",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 9,
"title": null
},
"status": "failed",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882082Z",
"started_at": "2025-11-01T15:26:12.854334Z",
"completed_at": "2025-11-01T15:26:24.812576Z",
"progress": null,
"error": "Download failed",
"retry_count": 0,
"source_url": null
},
{
"id": "6d5577e4-cab3-4be9-8eda-691a9af27517",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 10,
"title": null
},
"status": "failed",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882111Z",
"started_at": "2025-11-01T15:26:25.815381Z",
"completed_at": "2025-11-01T15:26:34.238411Z",
"progress": null,
"error": "Download failed",
"retry_count": 0,
"source_url": null
},
{
"id": "d63fbe5a-7a04-4a40-9ddc-423396d30140",
"serie_id": "highschool-dxd",
"serie_name": "Highschool DxD",
"episode": {
"season": 3,
"episode": 11,
"title": null
},
"status": "failed",
"priority": "normal",
"added_at": "2025-11-01T14:30:56.882139Z",
"started_at": "2025-11-01T15:26:35.243481Z",
"completed_at": "2025-11-01T15:26:48.941639Z",
"progress": null,
"error": "Download failed",
"retry_count": 0,
"source_url": null
}
],
"timestamp": "2025-11-01T15:26:48.942408+00:00"
}

View File

@ -93,12 +93,16 @@ class AniworldLoader(Loader):
def clear_cache(self):
"""Clear the cached HTML data."""
logging.debug("Clearing HTML cache")
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
logging.debug("HTML cache cleared successfully")
def remove_from_cache(self):
"""Remove episode HTML from cache."""
logging.debug("Removing episode HTML from cache")
self._EpisodeHTMLDict = {}
logging.debug("Episode HTML cache cleared")
def search(self, word: str) -> list:
"""Search for anime series.
@ -109,23 +113,30 @@ class AniworldLoader(Loader):
Returns:
List of found series
"""
logging.info(f"Searching for anime with keyword: '{word}'")
search_url = (
f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
)
logging.debug(f"Search URL: {search_url}")
anime_list = self.fetch_anime_list(search_url)
logging.info(f"Found {len(anime_list)} anime series for keyword '{word}'")
return anime_list
def fetch_anime_list(self, url: str) -> list:
logging.debug(f"Fetching anime list from URL: {url}")
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
logging.debug(f"Response status code: {response.status_code}")
clean_text = response.text.strip()
try:
decoded_data = json.loads(html.unescape(clean_text))
logging.debug(f"Successfully decoded JSON data on first attempt")
return decoded_data if isinstance(decoded_data, list) else []
except json.JSONDecodeError:
logging.warning("Initial JSON decode failed, attempting cleanup")
try:
# Remove BOM and problematic characters
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
@ -133,8 +144,10 @@ class AniworldLoader(Loader):
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
# Parse the new text
decoded_data = json.loads(clean_text)
logging.debug("Successfully decoded JSON after cleanup")
return decoded_data if isinstance(decoded_data, list) else []
except (requests.RequestException, json.JSONDecodeError) as exc:
logging.error(f"Failed to decode anime list from {url}: {exc}")
raise ValueError("Could not get valid anime: ") from exc
def _get_language_key(self, language: str) -> int:
@ -152,6 +165,7 @@ class AniworldLoader(Loader):
language_code = 2
if language == "German Sub":
language_code = 3
logging.debug(f"Converted language '{language}' to code {language_code}")
return language_code
def is_language(
@ -162,6 +176,7 @@ class AniworldLoader(Loader):
language: str = "German Dub"
) -> bool:
"""Check if episode is available in specified language."""
logging.debug(f"Checking if S{season:02}E{episode:03} ({key}) is available in {language}")
language_code = self._get_language_key(language)
episode_soup = BeautifulSoup(
@ -179,7 +194,9 @@ class AniworldLoader(Loader):
if lang_key and lang_key.isdigit():
languages.append(int(lang_key))
return language_code in languages
is_available = language_code in languages
logging.debug(f"Available languages for S{season:02}E{episode:03}: {languages}, requested: {language_code}, available: {is_available}")
return is_available
def download(
self,
@ -192,10 +209,12 @@ class AniworldLoader(Loader):
progress_callback=None
) -> bool:
"""Download episode to specified directory."""
logging.info(f"Starting download for S{season:02}E{episode:03} ({key}) in {language}")
sanitized_anime_title = ''.join(
char for char in self.get_title(key)
if char not in self.INVALID_PATH_CHARS
)
logging.debug(f"Sanitized anime title: {sanitized_anime_title}")
if season == 0:
output_file = (
@ -215,16 +234,20 @@ class AniworldLoader(Loader):
f"Season {season}"
)
output_path = os.path.join(folder_path, output_file)
logging.debug(f"Output path: {output_path}")
os.makedirs(os.path.dirname(output_path), exist_ok=True)
temp_dir = "./Temp/"
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
temp_path = os.path.join(temp_dir, output_file)
logging.debug(f"Temporary path: {temp_path}")
for provider in self.SUPPORTED_PROVIDERS:
logging.debug(f"Attempting download with provider: {provider}")
link, header = self._get_direct_link_from_provider(
season, episode, key, language
)
logging.debug(f"Direct link obtained from provider")
ydl_opts = {
'fragment_retries': float('inf'),
'outtmpl': temp_path,
@ -236,15 +259,19 @@ class AniworldLoader(Loader):
if header:
ydl_opts['http_headers'] = header
logging.debug(f"Using custom headers for download")
if progress_callback:
ydl_opts['progress_hooks'] = [progress_callback]
logging.debug(f"Starting YoutubeDL download")
with YoutubeDL(ydl_opts) as ydl:
ydl.download([link])
if os.path.exists(temp_path):
logging.debug(f"Moving file from temp to final destination")
shutil.copy(temp_path, output_path)
os.remove(temp_path)
logging.info(f"Download completed successfully: {output_file}")
break
self.clear_cache()
return True
@ -255,6 +282,7 @@ class AniworldLoader(Loader):
def get_title(self, key: str) -> str:
"""Get anime title from series key."""
logging.debug(f"Getting title for key: {key}")
soup = BeautifulSoup(
self._get_key_html(key).content,
'html.parser'
@ -262,8 +290,11 @@ class AniworldLoader(Loader):
title_div = soup.find('div', class_='series-title')
if title_div:
return title_div.find('h1').find('span').text
title = title_div.find('h1').find('span').text
logging.debug(f"Found title: {title}")
return title
logging.warning(f"No title found for key: {key}")
return ""
def _get_key_html(self, key: str):
@ -276,14 +307,18 @@ class AniworldLoader(Loader):
Cached or fetched HTML response
"""
if key in self._KeyHTMLDict:
logging.debug(f"Using cached HTML for key: {key}")
return self._KeyHTMLDict[key]
# Sanitize key parameter for URL
safe_key = quote(key, safe='')
url = f"{self.ANIWORLD_TO}/anime/stream/{safe_key}"
logging.debug(f"Fetching HTML for key: {key} from {url}")
self._KeyHTMLDict[key] = self.session.get(
f"{self.ANIWORLD_TO}/anime/stream/{safe_key}",
url,
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
logging.debug(f"Cached HTML for key: {key}")
return self._KeyHTMLDict[key]
def _get_episode_html(self, season: int, episode: int, key: str):
@ -302,11 +337,14 @@ class AniworldLoader(Loader):
"""
# Validate season and episode numbers
if season < 1 or season > 999:
logging.error(f"Invalid season number: {season}")
raise ValueError(f"Invalid season number: {season}")
if episode < 1 or episode > 9999:
logging.error(f"Invalid episode number: {episode}")
raise ValueError(f"Invalid episode number: {episode}")
if key in self._EpisodeHTMLDict:
logging.debug(f"Using cached HTML for S{season:02}E{episode:03} ({key})")
return self._EpisodeHTMLDict[(key, season, episode)]
# Sanitize key parameter for URL
@ -315,8 +353,10 @@ class AniworldLoader(Loader):
f"{self.ANIWORLD_TO}/anime/stream/{safe_key}/"
f"staffel-{season}/episode-{episode}"
)
logging.debug(f"Fetching episode HTML from: {link}")
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
self._EpisodeHTMLDict[(key, season, episode)] = html
logging.debug(f"Cached episode HTML for S{season:02}E{episode:03} ({key})")
return self._EpisodeHTMLDict[(key, season, episode)]
def _get_provider_from_html(
@ -336,6 +376,7 @@ class AniworldLoader(Loader):
2: 'https://aniworld.to/redirect/1766405'},
}
"""
logging.debug(f"Extracting providers from HTML for S{season:02}E{episode:03} ({key})")
soup = BeautifulSoup(
self._get_episode_html(season, episode, key).content,
'html.parser'
@ -347,6 +388,7 @@ class AniworldLoader(Loader):
)
if not episode_links:
logging.warning(f"No episode links found for S{season:02}E{episode:03} ({key})")
return providers
for link in episode_links:
@ -374,7 +416,9 @@ class AniworldLoader(Loader):
providers[provider_name][lang_key] = (
f"{self.ANIWORLD_TO}{redirect_link}"
)
logging.debug(f"Found provider: {provider_name}, lang_key: {lang_key}")
logging.debug(f"Total providers found: {len(providers)}")
return providers
def _get_redirect_link(
@ -385,6 +429,7 @@ class AniworldLoader(Loader):
language: str = "German Dub"
):
"""Get redirect link for episode in specified language."""
logging.debug(f"Getting redirect link for S{season:02}E{episode:03} ({key}) in {language}")
language_code = self._get_language_key(language)
if self.is_language(season, episode, key, language):
for (provider_name, lang_dict) in (
@ -393,7 +438,9 @@ class AniworldLoader(Loader):
).items()
):
if language_code in lang_dict:
logging.debug(f"Found redirect link with provider: {provider_name}")
return (lang_dict[language_code], provider_name)
logging.warning(f"No redirect link found for S{season:02}E{episode:03} ({key}) in {language}")
return None
def _get_embeded_link(
@ -404,15 +451,18 @@ class AniworldLoader(Loader):
language: str = "German Dub"
):
"""Get embedded link from redirect link."""
logging.debug(f"Getting embedded link for S{season:02}E{episode:03} ({key}) in {language}")
redirect_link, provider_name = (
self._get_redirect_link(season, episode, key, language)
)
logging.debug(f"Redirect link: {redirect_link}, provider: {provider_name}")
embeded_link = self.session.get(
redirect_link,
timeout=self.DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': self.RANDOM_USER_AGENT}
).url
logging.debug(f"Embedded link: {embeded_link}")
return embeded_link
def _get_direct_link_from_provider(
@ -423,12 +473,15 @@ class AniworldLoader(Loader):
language: str = "German Dub"
):
"""Get direct download link from streaming provider."""
logging.debug(f"Getting direct link from provider for S{season:02}E{episode:03} ({key}) in {language}")
embeded_link = self._get_embeded_link(
season, episode, key, language
)
if embeded_link is None:
logging.error(f"No embedded link found for S{season:02}E{episode:03} ({key})")
return None
logging.debug(f"Using VOE provider to extract direct link")
return self.Providers.GetProvider(
"VOE"
).get_link(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
@ -442,19 +495,23 @@ class AniworldLoader(Loader):
Returns:
Dictionary mapping season numbers to episode counts
"""
logging.info(f"Getting season and episode count for slug: {slug}")
# Sanitize slug parameter for URL
safe_slug = quote(slug, safe='')
base_url = f"{self.ANIWORLD_TO}/anime/stream/{safe_slug}/"
logging.debug(f"Base URL: {base_url}")
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
season_meta = soup.find('meta', itemprop='numberOfSeasons')
number_of_seasons = int(season_meta['content']) if season_meta else 0
logging.info(f"Found {number_of_seasons} seasons for '{slug}'")
episode_counts = {}
for season in range(1, number_of_seasons + 1):
season_url = f"{base_url}staffel-{season}"
logging.debug(f"Fetching episodes for season {season} from: {season_url}")
response = requests.get(
season_url,
timeout=self.DEFAULT_REQUEST_TIMEOUT,
@ -469,5 +526,7 @@ class AniworldLoader(Loader):
)
episode_counts[season] = len(unique_links)
logging.debug(f"Season {season} has {episode_counts[season]} episodes")
logging.info(f"Episode count retrieval complete for '{slug}': {episode_counts}")
return episode_counts

View File

@ -46,8 +46,8 @@ from src.server.services.websocket_service import get_websocket_service
@asynccontextmanager
async def lifespan(app: FastAPI):
"""Manage application lifespan (startup and shutdown)."""
# Setup logging first
logger = setup_logging()
# Setup logging first with DEBUG level
logger = setup_logging(log_level="DEBUG")
# Startup
try:
@ -191,5 +191,5 @@ if __name__ == "__main__":
host="127.0.0.1",
port=8000,
reload=True,
log_level="info"
log_level="debug"
)