This commit is contained in:
2025-10-12 18:05:31 +02:00
parent 57d49bcf78
commit 7a71715183
130 changed files with 30010 additions and 50631 deletions

View File

@@ -1,229 +1,229 @@
import sys
import os
import logging
from ..core.providers import aniworld_provider
from rich.progress import Progress
from ..core.entities import SerieList
from ..core.SerieScanner import SerieScanner
from ..core.providers.provider_factory import Loaders
from ..core.entities.series import Serie
import time
# Configure logging
logging.basicConfig(level=logging.FATAL, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.ERROR)
console_handler.setFormatter(logging.Formatter(
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
)
for h in logging.root.handlers:
logging.root.removeHandler(h)
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
logging.getLogger().setLevel(logging.ERROR)
for h in logging.getLogger().handlers:
logging.getLogger().removeHandler(h)
class NoKeyFoundException(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class MatchNotFoundError(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class SeriesApp:
_initialization_count = 0 # Track how many times initialization has been called
def __init__(self, directory_to_search: str):
SeriesApp._initialization_count += 1
# Only show initialization message for the first instance
if SeriesApp._initialization_count <= 1:
print("Please wait while initializing...")
self.progress = None
self.directory_to_search = directory_to_search
self.Loaders = Loaders()
loader = self.Loaders.GetLoader(key="aniworld.to")
self.SerieScanner = SerieScanner(directory_to_search, loader)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
def __InitList__(self):
self.series_list = self.List.GetMissingEpisode()
def display_series(self):
"""Print all series with assigned numbers."""
print("\nCurrent result:")
for i, serie in enumerate(self.series_list, 1):
name = serie.name # Access the property on the instance
if name is None or str(name).strip() == "":
print(f"{i}. {serie.folder}")
else:
print(f"{i}. {serie.name}")
def search(self, words :str) -> list:
loader = self.Loaders.GetLoader(key="aniworld.to")
return loader.Search(words)
def get_user_selection(self):
"""Handle user input for selecting series."""
self.display_series()
while True:
selection = input(
"\nSelect series by number (e.g. '1', '1,2' or 'all') or type 'exit' to return: ").strip().lower()
if selection == "exit":
return None
selected_series = []
if selection == "all":
selected_series = self.series_list
else:
try:
indexes = [int(num) - 1 for num in selection.split(",")]
selected_series = [self.series_list[i] for i in indexes if 0 <= i < len(self.series_list)]
except ValueError:
print("Invalid selection. Going back to the result display.")
self.display_series()
continue
if selected_series:
return selected_series
else:
print("No valid series selected. Going back to the result display.")
return None
def retry(self, func, max_retries=3, delay=2, *args, **kwargs):
for attempt in range(1, max_retries + 1):
try:
func(*args, **kwargs)
return True
except Exception as e:
print(e)
time.sleep(delay)
return False
def download_series(self, series):
"""Simulate the downloading process with a progress bar."""
total_downloaded = 0
total_episodes = sum(sum(len(ep) for ep in serie.episodeDict.values()) for serie in series)
self.progress = Progress()
task1 = self.progress.add_task("[red]Processing...", total=total_episodes)
task2 = self.progress.add_task(f"[green]...", total=0)
self.task3 = self.progress.add_task(f"[Gray]...", total=100) # Setze total auf 100 für Prozentanzeige
self.progress.start()
for serie in series:
serie_episodes = sum(len(ep) for ep in serie.episodeDict.values())
self.progress.update(task2, description=f"[green]{serie.folder}", total=serie_episodes)
downloaded = 0
for season, episodes in serie.episodeDict.items():
for episode in episodes:
loader = self.Loaders.GetLoader(key="aniworld.to")
if loader.IsLanguage(season, episode, serie.key):
self.retry(loader.Download, 3, 1, self.directory_to_search, serie.folder, season, episode, serie.key, "German Dub",self.print_Download_Progress)
downloaded += 1
total_downloaded += 1
self.progress.update(task1, advance=1)
self.progress.update(task2, advance=1)
time.sleep(0.02)
self.progress.stop()
self.progress = None
def print_Download_Progress(self, d):
# Nutze self.progress und self.task3 für Fortschrittsanzeige
if self.progress is None or not hasattr(self, 'task3'):
return
if d['status'] == 'downloading':
total = d.get('total_bytes') or d.get('total_bytes_estimate')
downloaded = d.get('downloaded_bytes', 0)
if total:
percent = downloaded / total * 100
self.progress.update(self.task3, completed=percent, description=f"[gray]Download: {percent:.1f}%")
else:
self.progress.update(self.task3, description=f"[gray]{downloaded/1024/1024:.2f}MB geladen")
elif d['status'] == 'finished':
self.progress.update(self.task3, completed=100, description="[gray]Download abgeschlossen.")
def search_mode(self):
"""Search for a series and allow user to select an option."""
search_string = input("Enter search string: ").strip()
results = self.search(search_string)
if not results:
print("No results found. Returning to start.")
return
print("\nSearch results:")
for i, result in enumerate(results, 1):
print(f"{i}. {result.get('name')}")
while True:
selection = input("\nSelect an option by number or type '<enter>' to return: ").strip().lower()
if selection == "":
return
try:
index = int(selection) - 1
if 0 <= index < len(results):
chosen_name = results[index]
self.List.add(Serie(chosen_name["link"], chosen_name["name"], "aniworld.to", chosen_name["link"], {}))
return
else:
print("Invalid selection. Try again.")
except ValueError:
print("Invalid input. Try again.")
def updateFromReinit(self, folder, counter):
self.progress.update(self.task1, advance=1)
def run(self):
"""Main function to run the app."""
while True:
action = input("\nChoose action ('s' for search, 'i' for init or 'd' for download): ").strip().lower()
if action == "s":
self.search_mode()
if action == "i":
print("\nRescanning series...\n")
self.progress = Progress()
self.task1 = self.progress.add_task("[red]items processed...", total=300)
self.progress.start()
self.SerieScanner.Reinit()
self.SerieScanner.Scan(self.updateFromReinit)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
self.progress.stop()
self.progress = None
elif action == "d":
selected_series = self.get_user_selection()
if selected_series:
self.download_series(selected_series)
# Run the app
if __name__ == "__main__":
# Read the base directory from an environment variable
directory_to_search = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
app = SeriesApp(directory_to_search)
import sys
import os
import logging
from ..core.providers import aniworld_provider
from rich.progress import Progress
from ..core.entities import SerieList
from ..core.SerieScanner import SerieScanner
from ..core.providers.provider_factory import Loaders
from ..core.entities.series import Serie
import time
# Configure logging
logging.basicConfig(level=logging.FATAL, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.ERROR)
console_handler.setFormatter(logging.Formatter(
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
)
for h in logging.root.handlers:
logging.root.removeHandler(h)
logging.getLogger("urllib3.connectionpool").setLevel(logging.ERROR)
logging.getLogger('charset_normalizer').setLevel(logging.ERROR)
logging.getLogger().setLevel(logging.ERROR)
for h in logging.getLogger().handlers:
logging.getLogger().removeHandler(h)
class NoKeyFoundException(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class MatchNotFoundError(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class SeriesApp:
_initialization_count = 0 # Track how many times initialization has been called
def __init__(self, directory_to_search: str):
SeriesApp._initialization_count += 1
# Only show initialization message for the first instance
if SeriesApp._initialization_count <= 1:
print("Please wait while initializing...")
self.progress = None
self.directory_to_search = directory_to_search
self.Loaders = Loaders()
loader = self.Loaders.GetLoader(key="aniworld.to")
self.SerieScanner = SerieScanner(directory_to_search, loader)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
def __InitList__(self):
self.series_list = self.List.GetMissingEpisode()
def display_series(self):
"""Print all series with assigned numbers."""
print("\nCurrent result:")
for i, serie in enumerate(self.series_list, 1):
name = serie.name # Access the property on the instance
if name is None or str(name).strip() == "":
print(f"{i}. {serie.folder}")
else:
print(f"{i}. {serie.name}")
def search(self, words :str) -> list:
loader = self.Loaders.GetLoader(key="aniworld.to")
return loader.Search(words)
def get_user_selection(self):
"""Handle user input for selecting series."""
self.display_series()
while True:
selection = input(
"\nSelect series by number (e.g. '1', '1,2' or 'all') or type 'exit' to return: ").strip().lower()
if selection == "exit":
return None
selected_series = []
if selection == "all":
selected_series = self.series_list
else:
try:
indexes = [int(num) - 1 for num in selection.split(",")]
selected_series = [self.series_list[i] for i in indexes if 0 <= i < len(self.series_list)]
except ValueError:
print("Invalid selection. Going back to the result display.")
self.display_series()
continue
if selected_series:
return selected_series
else:
print("No valid series selected. Going back to the result display.")
return None
def retry(self, func, max_retries=3, delay=2, *args, **kwargs):
for attempt in range(1, max_retries + 1):
try:
func(*args, **kwargs)
return True
except Exception as e:
print(e)
time.sleep(delay)
return False
def download_series(self, series):
"""Simulate the downloading process with a progress bar."""
total_downloaded = 0
total_episodes = sum(sum(len(ep) for ep in serie.episodeDict.values()) for serie in series)
self.progress = Progress()
task1 = self.progress.add_task("[red]Processing...", total=total_episodes)
task2 = self.progress.add_task(f"[green]...", total=0)
self.task3 = self.progress.add_task(f"[Gray]...", total=100) # Setze total auf 100 für Prozentanzeige
self.progress.start()
for serie in series:
serie_episodes = sum(len(ep) for ep in serie.episodeDict.values())
self.progress.update(task2, description=f"[green]{serie.folder}", total=serie_episodes)
downloaded = 0
for season, episodes in serie.episodeDict.items():
for episode in episodes:
loader = self.Loaders.GetLoader(key="aniworld.to")
if loader.IsLanguage(season, episode, serie.key):
self.retry(loader.Download, 3, 1, self.directory_to_search, serie.folder, season, episode, serie.key, "German Dub",self.print_Download_Progress)
downloaded += 1
total_downloaded += 1
self.progress.update(task1, advance=1)
self.progress.update(task2, advance=1)
time.sleep(0.02)
self.progress.stop()
self.progress = None
def print_Download_Progress(self, d):
# Nutze self.progress und self.task3 für Fortschrittsanzeige
if self.progress is None or not hasattr(self, 'task3'):
return
if d['status'] == 'downloading':
total = d.get('total_bytes') or d.get('total_bytes_estimate')
downloaded = d.get('downloaded_bytes', 0)
if total:
percent = downloaded / total * 100
self.progress.update(self.task3, completed=percent, description=f"[gray]Download: {percent:.1f}%")
else:
self.progress.update(self.task3, description=f"[gray]{downloaded/1024/1024:.2f}MB geladen")
elif d['status'] == 'finished':
self.progress.update(self.task3, completed=100, description="[gray]Download abgeschlossen.")
def search_mode(self):
"""Search for a series and allow user to select an option."""
search_string = input("Enter search string: ").strip()
results = self.search(search_string)
if not results:
print("No results found. Returning to start.")
return
print("\nSearch results:")
for i, result in enumerate(results, 1):
print(f"{i}. {result.get('name')}")
while True:
selection = input("\nSelect an option by number or type '<enter>' to return: ").strip().lower()
if selection == "":
return
try:
index = int(selection) - 1
if 0 <= index < len(results):
chosen_name = results[index]
self.List.add(Serie(chosen_name["link"], chosen_name["name"], "aniworld.to", chosen_name["link"], {}))
return
else:
print("Invalid selection. Try again.")
except ValueError:
print("Invalid input. Try again.")
def updateFromReinit(self, folder, counter):
self.progress.update(self.task1, advance=1)
def run(self):
"""Main function to run the app."""
while True:
action = input("\nChoose action ('s' for search, 'i' for init or 'd' for download): ").strip().lower()
if action == "s":
self.search_mode()
if action == "i":
print("\nRescanning series...\n")
self.progress = Progress()
self.task1 = self.progress.add_task("[red]items processed...", total=300)
self.progress.start()
self.SerieScanner.Reinit()
self.SerieScanner.Scan(self.updateFromReinit)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
self.progress.stop()
self.progress = None
elif action == "d":
selected_series = self.get_user_selection()
if selected_series:
self.download_series(selected_series)
# Run the app
if __name__ == "__main__":
# Read the base directory from an environment variable
directory_to_search = os.getenv("ANIME_DIRECTORY", "\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
app = SeriesApp(directory_to_search)
app.run()

View File

@@ -1,491 +1,491 @@
2025-09-29 12:38:25 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 12:38:25 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 12:38:25 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:25 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 12:38:25 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 12:38:25 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 12:38:30 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 12:38:30 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 12:38:30 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:30 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 12:38:30 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 12:38:30 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 12:38:30 - WARNING - werkzeug - _log - * Debugger is active!
2025-09-29 12:38:40 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:40 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin in Another World with Level 2 Super Cheat Powers (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin in Another World with Level 2 Super Cheat Powers (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ill Become a Villainess Who Goes Down in History (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ill Become a Villainess Who Goes Down in History (2024)\data for Ill Become a Villainess Who Goes Down in History (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba An Explosion on This Wonderful World! (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba An Explosion on This Wonderful World! (2023)\data for KonoSuba An Explosion on This Wonderful World! (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla Gods Go-To Girl (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla Gods Go-To Girl (2025)\data for The Gorilla Gods Go-To Girl (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saints Magic Power is Omnipotent (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saints Magic Power is Omnipotent (2021)\data for The Saints Magic Power is Omnipotent (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Worlds End Harem (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Worlds End Harem (2022)\data for Worlds End Harem (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping clevatess - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
2025-09-29 20:23:13 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 20:23:13 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 20:23:13 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 20:23:13 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 20:23:13 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 20:23:13 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 20:23:16 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 20:23:16 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 20:23:16 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 20:23:16 - ERROR - root - init_series_app - Error initializing SeriesApp:
Traceback (most recent call last):
File "D:\repo\Aniworld/src/server/app.py", line 145, in init_series_app
series_app = SeriesApp(directory_to_search)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\repo\Aniworld\src\Main.py", line 54, in __init__
self.List = SerieList(self.directory_to_search)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 9, in __init__
self.load_series()
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 29, in load_series
for anime_folder in os.listdir(self.directory):
^^^^^^^^^^^^^^^^^^^^^^^^^^
FileNotFoundError: [WinError 53] Der Netzwerkpfad wurde nicht gefunden: '\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien'
2025-09-29 20:23:16 - WARNING - werkzeug - _log - * Debugger is active!
2025-09-29 20:33:06 - DEBUG - schedule - clear - Deleting *all* jobs
2025-09-29 20:33:06 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
2025-09-29 20:33:06 - INFO - __main__ - <module> - Scheduler stopped
2025-09-29 12:38:25 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 12:38:25 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 12:38:25 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:25 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 12:38:25 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 12:38:25 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 12:38:30 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 12:38:30 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 12:38:30 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:30 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 12:38:30 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 12:38:30 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 12:38:30 - WARNING - werkzeug - _log - * Debugger is active!
2025-09-29 12:38:40 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:40 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping .deletedByTMM - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\2.5 Dimensional Seduction (2024)\data for 2.5 Dimensional Seduction (2024)
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-dimensional-seduction - No data folder found
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping 25-sai no Joshikousei (2018) - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)\data for 7th Time Loop The Villainess Enjoys a Carefree Life Married to Her Worst Enemy! (2024)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\9-nine-rulers-crown\data for 9-nine-rulers-crown
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A Couple of Cuckoos (2022)\data for A Couple of Cuckoos (2022)
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping A Time Called You (2023) - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\A.I.C.O. Incarnation (2018)\data for A.I.C.O. Incarnation (2018)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aesthetica of a Rogue Hero (2012)\data for Aesthetica of a Rogue Hero (2012)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Alya Sometimes Hides Her Feelings in Russian (2024)\data for Alya Sometimes Hides Her Feelings in Russian (2024)
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping American Horror Story (2011) - No data folder found
2025-09-29 12:38:40 - WARNING - root - load_series - Skipping Andor (2022) - No data folder found
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Angels of Death (2018)\data for Angels of Death (2018)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data
2025-09-29 12:38:40 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Aokana Four Rhythm Across the Blue (2016)\data for Aokana Four Rhythm Across the Blue (2016)
2025-09-29 12:38:40 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Arifureta (2019)\data for Arifureta (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)\data for As a Reincarnated Aristocrat, I'll Use My Appraisal Skill to Rise in the World (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)\data for BOFURI I Don't Want to Get Hurt, so I'll Max Out My Defense. (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Butler (2008)\data for Black Butler (2008)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Black Clover (2017)\data for Black Clover (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blast of Tempest (2012)\data for Blast of Tempest (2012)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blood Lad (2013)\data for Blood Lad (2013)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Box (2024)\data for Blue Box (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Blue Exorcist (2011)\data for Blue Exorcist (2011)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)\data for Bogus Skill Fruitmaster About That Time I Became Able to Eat Unlimited Numbers of Skill Fruits (That Kill You) (2025)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Boys Over Flowers (2009) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Burst Angel (2004)\data for Burst Angel (2004)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\By the Grace of the Gods (2020)\data for By the Grace of the Gods (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Call of the Night (2022)\data for Call of the Night (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Campfire Cooking in Another World with My Absurd Skill (2023)\data for Campfire Cooking in Another World with My Absurd Skill (2023)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Celebrity (2023) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chainsaw Man (2022)\data for Chainsaw Man (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Charlotte (2015)\data for Charlotte (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Cherish the Day (2020) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Chernobyl (2019) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin in Another World with Level 2 Super Cheat Powers (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Chillin in Another World with Level 2 Super Cheat Powers (2024)\data for Chillin in Another World with Level 2 Super Cheat Powers (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clannad (2007)\data for Clannad (2007)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Classroom of the Elite (2017)\data for Classroom of the Elite (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Clevatess (2025)\data for Clevatess (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\DAN DA DAN (2024)\data for DAN DA DAN (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)\data for Danmachi Is It Wrong to Try to Pick Up Girls in a Dungeon (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Das Buch von Boba Fett (2021) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Date a Live (2013)\data for Date a Live (2013)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dead Mount Death Play (2023)\data for Dead Mount Death Play (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Deadman Wonderland (2011)\data for Deadman Wonderland (2011)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dealing with Mikadono Sisters Is a Breeze (2025)\data for Dealing with Mikadono Sisters Is a Breeze (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Delicious in Dungeon (2024)\data for Delicious in Dungeon (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Lord, Retry! (2019)\data for Demon Lord, Retry! (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slave - The Chained Soldier (2024)\data for Demon Slave - The Chained Soldier (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Demon Slayer Kimetsu no Yaiba (2019)\data for Demon Slayer Kimetsu no Yaiba (2019)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Der Herr der Ringe Die Ringe der Macht (2022) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Devil in Ohio (2022) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Die Bibel (2013) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Die Tagebücher der Apothekerin (2023)\data for Die Tagebücher der Apothekerin (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Domestic Girlfriend (2019)\data for Domestic Girlfriend (2019)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Doona! (2023) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dr. STONE (2019)\data for Dr. STONE (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Dragonball Super (2015)\data for Dragonball Super (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Failure Frame I Became the Strongest and Annihilated Everything With Low-Level Spells (2024) - No data folder found
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Fallout (2024) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Farming Life in Another World (2023)\data for Farming Life in Another World (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Frieren - Nach dem Ende der Reise (2023)\data for Frieren - Nach dem Ende der Reise (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Fruits Basket (2019)\data for Fruits Basket (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gachiakuta (2025)\data for Gachiakuta (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gate (2015)\data for Gate (2015)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Generation der Verdammten (2014) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Girls und Panzer (2012)\data for Girls und Panzer (2012)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Gleipnir (2020)\data for Gleipnir (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Golden Time (2013)\data for Golden Time (2013)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Grimgar, Ashes and Illusions (2016)\data for Grimgar, Ashes and Illusions (2016)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Harem in the Labyrinth of Another World (2022)\data for Harem in the Labyrinth of Another World (2022)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Highschool D×D (2012) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Hinamatsuri (2018)\data for Hinamatsuri (2018)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)\data for I Got a Cheat Skill in Another World and Became Unrivaled in The Real World Too (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)\data for I Parry Everything What Do You Mean Im the Strongest Im Not Even an Adventurer Yet! (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I'm the Evil Lord of an Intergalactic Empire! (2025)\data for I'm the Evil Lord of an Intergalactic Empire! (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)\data for I've Been Killing Slimes for 300 Years and Maxed Out My Level (2021)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\In the Land of Leadale (2022)\data for In the Land of Leadale (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ishura (2024)\data for Ishura (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ill Become a Villainess Who Goes Down in History (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ill Become a Villainess Who Goes Down in History (2024)\data for Ill Become a Villainess Who Goes Down in History (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\JUJUTSU KAISEN (2020)\data for JUJUTSU KAISEN (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaguya-sama Love is War (2019)\data for Kaguya-sama Love is War (2019)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kaiju No. 8 (20200)\data for Kaiju No. 8 (20200)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)\data for KamiKatsu Meine Arbeit als Missionar in einer gottlosen Welt (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Knight's & Magic (2017)\data for Knight's & Magic (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kombattanten werden entsandt! (2021)\data for Kombattanten werden entsandt! (2021)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba An Explosion on This Wonderful World! (2023)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\KonoSuba An Explosion on This Wonderful World! (2023)\data for KonoSuba An Explosion on This Wonderful World! (2023)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Konosuba God's Blessing on This Wonderful World! (2016)\data for Konosuba God's Blessing on This Wonderful World! (2016)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Krieg der Welten (2019) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Kuma Kuma Kuma Bear (2020)\data for Kuma Kuma Kuma Bear (2020)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Log Horizon (2013)\data for Log Horizon (2013)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Loki (2021) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Loner Life in Another World (2024)\data for Loner Life in Another World (2024)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lord of Mysteries (2025)\data for Lord of Mysteries (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Lycoris Recoil (2022)\data for Lycoris Recoil (2022)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magic Maker How to Make Magic in Another World (2025)\data for Magic Maker How to Make Magic in Another World (2025)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Magical Girl Site (2018)\data for Magical Girl Site (2018)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Management of a Novice Alchemist (2022)\data for Management of a Novice Alchemist (2022)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Marianne (2019) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Meine Wiedergeburt als Schleim in einer anderen Welt (2018)\data for Meine Wiedergeburt als Schleim in einer anderen Welt (2018)
2025-09-29 12:38:41 - WARNING - root - load_series - Skipping Midnight Mass (2021) - No data folder found
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mirai Nikki (2011)\data for Mirai Nikki (2011)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Miss Kobayashi's Dragon Maid (2017)\data for Miss Kobayashi's Dragon Maid (2017)
2025-09-29 12:38:41 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data
2025-09-29 12:38:41 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mob Psycho 100 (2016)\data for Mob Psycho 100 (2016)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\More than a Married Couple, but Not Lovers (2022)\data for More than a Married Couple, but Not Lovers (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Mushoku Tensei Jobless Reincarnation (2021)\data for Mushoku Tensei Jobless Reincarnation (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Hero Academia Vigilantes (2025)\data for My Hero Academia Vigilantes (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)\data for My Instant Death Ability Is So Overpowered, No One in This Other World Stands a Chance Against Me! (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Isekai Life (2022)\data for My Isekai Life (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Life as Inukai-san's Dog (2023)\data for My Life as Inukai-san's Dog (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\My Unique Skill Makes Me OP even at Level 1 (2023)\data for My Unique Skill Makes Me OP even at Level 1 (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\New Saga (2025)\data for New Saga (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nina the Starry Bride (2024)\data for Nina the Starry Bride (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Nisekoi Liebe, Lügen & Yakuza (2014)\data for Nisekoi Liebe, Lügen & Yakuza (2014)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\No Game No Life (2014)\data for No Game No Life (2014)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Obi-Wan Kenobi (2022) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Orange (2016)\data for Orange (2016)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Peach Boy Riverside (2021)\data for Peach Boy Riverside (2021)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Penny Dreadful (2014) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Planet Erde II Eine Erde - viele Welten (2016) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Plastic Memories (2015)\data for Plastic Memories (2015)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Ragna Crimson (2023)\data for Ragna Crimson (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rascal Does Not Dream of Bunny Girl Senpai (2018)\data for Rascal Does Not Dream of Bunny Girl Senpai (2018)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReMonster (2024)\data for ReMonster (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\ReZERO - Starting Life in Another World (2016)\data for ReZERO - Starting Life in Another World (2016)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Reborn as a Vending Machine, I Now Wander the Dungeon (2023)\data for Reborn as a Vending Machine, I Now Wander the Dungeon (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Redo of Healer (2021)\data for Redo of Healer (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Rick and Morty (2013)\data for Rick and Morty (2013)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Rocket & Groot (2017) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Romulus (2020) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Saga of Tanya the Evil (2017)\data for Saga of Tanya the Evil (2017)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Seirei Gensouki Spirit Chronicles (2021)\data for Seirei Gensouki Spirit Chronicles (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Shangri-La Frontier (2023)\data for Shangri-La Frontier (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\She Professed Herself Pupil of the Wise Man (2022)\data for She Professed Herself Pupil of the Wise Man (2022)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping She-Hulk Die Anwältin (2022) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Solo Leveling (2024)\data for Solo Leveling (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Spice and Wolf (2008)\data for Spice and Wolf (2008)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Star Trek Discovery (2017) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate (1997) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Stargate Atlantis (2004) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Steins;Gate (2011)\data for Steins;Gate (2011)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Sweet Tooth (2021) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Sword of the Demon Hunter Kijin Gen (2025)\data for Sword of the Demon Hunter Kijin Gen (2025)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Tales from the Loop (2020) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tamako Market (2013)\data for Tamako Market (2013)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Ancient Magus' Bride (2017)\data for The Ancient Magus' Bride (2017)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Demon Sword Master of Excalibur Academy (2023)\data for The Demon Sword Master of Excalibur Academy (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Devil is a Part-Timer! (2013)\data for The Devil is a Part-Timer! (2013)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dreaming Boy is a Realist (2023)\data for The Dreaming Boy is a Realist (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Dungeon of Black Company (2021)\data for The Dungeon of Black Company (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Eminence in Shadow (2022)\data for The Eminence in Shadow (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Familiar of Zero (2006)\data for The Familiar of Zero (2006)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Faraway Paladin (2021)\data for The Faraway Paladin (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla Gods Go-To Girl (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Gorilla Gods Go-To Girl (2025)\data for The Gorilla Gods Go-To Girl (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Hidden Dungeon Only I Can Enter (2021)\data for The Hidden Dungeon Only I Can Enter (2021)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Last of Us (2023) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Man in the High Castle (2015) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Mandalorian (2019) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Quintessential Quintuplets (2019)\data for The Quintessential Quintuplets (2019)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saints Magic Power is Omnipotent (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Saints Magic Power is Omnipotent (2021)\data for The Saints Magic Power is Omnipotent (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)\data for The Too-Perfect Saint Tossed Aside by My Fiance and Sold to Another Kingdom (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Unaware Atelier Meister (2025)\data for The Unaware Atelier Meister (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\The Weakest Tamer Began a Journey to Pick Up Trash (2024)\data for The Weakest Tamer Began a Journey to Pick Up Trash (2024)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The Witcher (2019) - No data folder found
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping The World's Finest Assassin Gets Reincarnated in Another World as an Aristocrat (2021) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\To Your Eternity (2021)\data for To Your Eternity (2021)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tomo-chan Is a Girl! (2023)\data for Tomo-chan Is a Girl! (2023)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tonikawa Over the Moon for You (2020)\data for Tonikawa Over the Moon for You (2020)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Tsukimichi Moonlit Fantasy (2021)\data for Tsukimichi Moonlit Fantasy (2021)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping Unidentified - Die wahren X-Akten (2019) - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Unnamed Memory (2024)\data for Unnamed Memory (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Vom Landei zum Schwertheiligen (2025)\data for Vom Landei zum Schwertheiligen (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WIND BREAKER (2024)\data for WIND BREAKER (2024)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\WITCH WATCH (2025)\data for WITCH WATCH (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Wolf Girl & Black Prince (2014)\data for Wolf Girl & Black Prince (2014)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Worlds End Harem (2022)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Worlds End Harem (2022)\data for Worlds End Harem (2022)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Zom 100 Bucket List of the Dead (2023)\data for Zom 100 Bucket List of the Dead (2023)
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-couple-of-cuckoos - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-ninja-and-an-assassin-under-one-roof\data for a-ninja-and-an-assassin-under-one-roof
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\a-nobodys-way-up-to-an-exploration-hero\data for a-nobodys-way-up-to-an-exploration-hero
2025-09-29 12:38:42 - WARNING - root - load_series - Skipping a-silent-voice - No data folder found
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\am-i-actually-the-strongest\data for am-i-actually-the-strongest
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\anne-shirley\data for anne-shirley
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\apocalypse-bringer-mynoghra\data for apocalypse-bringer-mynoghra
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside\data for banished-from-the-heros-party-i-decided-to-live-a-quiet-life-in-the-countryside
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)\data for beheneko the elf girls cat is secretly an s ranked monster (2025) (2025)
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data
2025-09-29 12:38:42 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\berserk-of-gluttony\data for berserk-of-gluttony
2025-09-29 12:38:42 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\black-summoner\data for black-summoner
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\boarding-school-juliet\data for boarding-school-juliet
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\buddy-daddies\data for buddy-daddies
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\can-a-boy-girl-friendship-survive\data for can-a-boy-girl-friendship-survive
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping chillin-in-another-world-with-level-2-super-cheat-powers - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army\data for chillin-in-my-30s-after-getting-fired-from-the-demon-kings-army
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu\data for choujin koukousei tachi wa isekai de mo yoyuu de ikinuku you desu
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping clevatess - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\compass-20-animation-project\data for compass-20-animation-project
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragon-raja-the-blazing-dawn\data for dragon-raja-the-blazing-dawn
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\dragonar-academy\data for dragonar-academy
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist\data for drugstore-in-another-world-the-slow-life-of-a-cheat-pharmacist
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\fluffy-paradise\data for fluffy-paradise
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\food-for-the-soul\data for food-for-the-soul
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\handyman-saitou-in-another-world\data for handyman-saitou-in-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\i-shall-survive-using-potions\data for i-shall-survive-using-potions
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness\data for im-giving-the-disgraced-noble-lady-i-rescued-a-crash-course-in-naughtiness
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\killing-bites\data for killing-bites
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\love-flops\data for love-flops
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\magic-maker-how-to-make-magic-in-another-world\data for magic-maker-how-to-make-magic-in-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\muhyo-rojis-bureau-of-supernatural-investigation\data for muhyo-rojis-bureau-of-supernatural-investigation
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\my-roommate-is-a-cat\data for my-roommate-is-a-cat
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\nukitashi-the-animation\data for nukitashi-the-animation
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\outbreak-company\data for outbreak-company
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping plastic-memories - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\pseudo-harem\data for pseudo-harem
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping rent-a-girlfriend - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sasaki-and-peeps\data for sasaki-and-peeps
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\scooped-up-by-an-s-rank-adventurer\data for scooped-up-by-an-s-rank-adventurer
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\secrets-of-the-silent-witch\data for secrets-of-the-silent-witch
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\seton-academy-join-the-pack\data for seton-academy-join-the-pack
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\shachibato-president-its-time-for-battle\data for shachibato-president-its-time-for-battle
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\skeleton-knight-in-another-world\data for skeleton-knight-in-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\sugar-apple-fairy-tale\data for sugar-apple-fairy-tale
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\summer-pockets\data for summer-pockets
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town\data for suppose-a-kid-from-the-last-dungeon-boonies-moved-to-a-starter-town
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-beginning-after-the-end\data for the-beginning-after-the-end
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-brilliant-healers-new-life-in-the-shadows\data for the-brilliant-healers-new-life-in-the-shadows
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-daily-life-of-a-middle-aged-online-shopper-in-another-world\data for the-daily-life-of-a-middle-aged-online-shopper-in-another-world
2025-09-29 12:38:43 - WARNING - root - load_series - Skipping the-familiar-of-zero - No data folder found
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-fragrant-flower-blooms-with-dignity\data for the-fragrant-flower-blooms-with-dignity
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-great-cleric\data for the-great-cleric
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-new-chronicles-of-extraordinary-beings-preface\data for the-new-chronicles-of-extraordinary-beings-preface
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shiunji-family-children\data for the-shiunji-family-children
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-shy-hero-and-the-assassin-princesses\data for the-shy-hero-and-the-assassin-princesses
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-testament-of-sister-new-devil\data for the-testament-of-sister-new-devil
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-unwanted-undead-adventurer\data for the-unwanted-undead-adventurer
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-water-magician\data for the-water-magician
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat\data for the-worlds-finest-assassin-gets-reincarnated-in-another-world-as-an-aristocrat
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\the-wrong-way-to-use-healing-magic\data for the-wrong-way-to-use-healing-magic
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\theres-no-freaking-way-ill-be-your-lover-unless\data for theres-no-freaking-way-ill-be-your-lover-unless
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\to-be-hero-x\data for to-be-hero-x
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\tougen-anki\data for tougen-anki
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\uglymug-epicfighter\data for uglymug-epicfighter
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\valkyrie-drive-mermaid\data for valkyrie-drive-mermaid
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\wandering-witch-the-journey-of-elaina\data for wandering-witch-the-journey-of-elaina
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\war-god-system-im-counting-on-you\data for war-god-system-im-counting-on-you
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-japan-ms-elf\data for welcome-to-japan-ms-elf
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\welcome-to-the-outcasts-restaurant\data for welcome-to-the-outcasts-restaurant
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\yandere-dark-elf-she-chased-me-all-the-way-from-another-world\data for yandere-dark-elf-she-chased-me-all-the-way-from-another-world
2025-09-29 12:38:43 - INFO - root - load_series - Found data folder: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data
2025-09-29 12:38:43 - INFO - root - load_data - Successfully loaded \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien\Übel Blatt (2025)\data for Übel Blatt (2025)
2025-09-29 20:23:13 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 20:23:13 - INFO - __main__ - <module> - Starting Aniworld Flask server...
2025-09-29 20:23:13 - INFO - __main__ - <module> - Anime directory: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 20:23:13 - INFO - __main__ - <module> - Log level: INFO
2025-09-29 20:23:13 - INFO - __main__ - <module> - Scheduled operations disabled
2025-09-29 20:23:13 - INFO - __main__ - <module> - Server will be available at http://localhost:5000
2025-09-29 20:23:16 - INFO - __main__ - <module> - Enhanced logging system initialized
2025-09-29 20:23:16 - INFO - root - __init__ - Initialized Loader with base path: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 20:23:16 - INFO - root - load_series - Scanning anime folders in: \\sshfs.r\ubuntu@192.168.178.43\media\serien\Serien
2025-09-29 20:23:16 - ERROR - root - init_series_app - Error initializing SeriesApp:
Traceback (most recent call last):
File "D:\repo\Aniworld/src/server/app.py", line 145, in init_series_app
series_app = SeriesApp(directory_to_search)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\repo\Aniworld\src\Main.py", line 54, in __init__
self.List = SerieList(self.directory_to_search)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 9, in __init__
self.load_series()
File "D:\repo\Aniworld\src\server\core\entities\SerieList.py", line 29, in load_series
for anime_folder in os.listdir(self.directory):
^^^^^^^^^^^^^^^^^^^^^^^^^^
FileNotFoundError: [WinError 53] Der Netzwerkpfad wurde nicht gefunden: '\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien'
2025-09-29 20:23:16 - WARNING - werkzeug - _log - * Debugger is active!
2025-09-29 20:33:06 - DEBUG - schedule - clear - Deleting *all* jobs
2025-09-29 20:33:06 - INFO - application.services.scheduler_service - stop_scheduler - Scheduled operations stopped
2025-09-29 20:33:06 - INFO - __main__ - <module> - Scheduler stopped

View File

@@ -1,131 +1,131 @@
import os
import re
import logging
from .entities.series import Serie
import traceback
from ..infrastructure.logging.GlobalLogger import error_logger, noKeyFound_logger
from .exceptions.Exceptions import NoKeyFoundException, MatchNotFoundError
from .providers.base_provider import Loader
class SerieScanner:
def __init__(self, basePath: str, loader: Loader):
self.directory = basePath
self.folderDict: dict[str, Serie] = {} # Proper initialization
self.loader = loader
logging.info(f"Initialized Loader with base path: {self.directory}")
def Reinit(self):
self.folderDict: dict[str, Serie] = {} # Proper initialization
def is_null_or_whitespace(self, s):
return s is None or s.strip() == ""
def GetTotalToScan(self):
result = self.__find_mp4_files()
return sum(1 for _ in result)
def Scan(self, callback):
logging.info("Starting process to load missing episodes")
result = self.__find_mp4_files()
counter = 0
for folder, mp4_files in result:
try:
counter += 1
callback(folder, counter)
serie = self.__ReadDataFromFile(folder)
if (serie != None and not self.is_null_or_whitespace(serie.key)):
missings, site = self.__GetMissingEpisodesAndSeason(serie.key, mp4_files)
serie.episodeDict = missings
serie.folder = folder
serie.save_to_file(os.path.join(os.path.join(self.directory, folder), 'data'))
if (serie.key in self.folderDict):
logging.ERROR(f"dublication found: {serie.key}");
pass
self.folderDict[serie.key] = serie
noKeyFound_logger.info(f"Saved Serie: '{str(serie)}'")
except NoKeyFoundException as nkfe:
NoKeyFoundException.error(f"Error processing folder '{folder}': {nkfe}")
except Exception as e:
error_logger.error(f"Folder: '{folder}' - Unexpected error processing folder '{folder}': {e} \n {traceback.format_exc()}")
continue
def __find_mp4_files(self):
logging.info("Scanning for .mp4 files")
for anime_name in os.listdir(self.directory):
anime_path = os.path.join(self.directory, anime_name)
if os.path.isdir(anime_path):
mp4_files = []
has_files = False
for root, _, files in os.walk(anime_path):
for file in files:
if file.endswith(".mp4"):
mp4_files.append(os.path.join(root, file))
has_files = True
yield anime_name, mp4_files if has_files else []
def __remove_year(self, input_string: str):
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
logging.debug(f"Removed year from '{input_string}' -> '{cleaned_string}'")
return cleaned_string
def __ReadDataFromFile(self, folder_name: str):
folder_path = os.path.join(self.directory, folder_name)
key = None
key_file = os.path.join(folder_path, 'key')
serie_file = os.path.join(folder_path, 'data')
if os.path.exists(key_file):
with open(key_file, 'r') as file:
key = file.read().strip()
logging.info(f"Key found for folder '{folder_name}': {key}")
return Serie(key, "", "aniworld.to", folder_name, dict())
if os.path.exists(serie_file):
with open(serie_file, "rb") as file:
logging.info(f"load serie_file from '{folder_name}': {serie_file}")
return Serie.load_from_file(serie_file)
return None
def __GetEpisodeAndSeason(self, filename: str):
pattern = r'S(\d+)E(\d+)'
match = re.search(pattern, filename)
if match:
season = match.group(1)
episode = match.group(2)
logging.debug(f"Extracted season {season}, episode {episode} from '{filename}'")
return int(season), int(episode)
else:
logging.error(f"Failed to find season/episode pattern in '{filename}'")
raise MatchNotFoundError("Season and episode pattern not found in the filename.")
def __GetEpisodesAndSeasons(self, mp4_files: []):
episodes_dict = {}
for file in mp4_files:
season, episode = self.__GetEpisodeAndSeason(file)
if season in episodes_dict:
episodes_dict[season].append(episode)
else:
episodes_dict[season] = [episode]
return episodes_dict
def __GetMissingEpisodesAndSeason(self, key: str, mp4_files: []):
expected_dict = self.loader.get_season_episode_count(key) # key season , value count of episodes
filedict = self.__GetEpisodesAndSeasons(mp4_files)
episodes_dict = {}
for season, expected_count in expected_dict.items():
existing_episodes = filedict.get(season, [])
missing_episodes = [ep for ep in range(1, expected_count + 1) if ep not in existing_episodes and self.loader.IsLanguage(season, ep, key)]
if missing_episodes:
episodes_dict[season] = missing_episodes
return episodes_dict, "aniworld.to"
import os
import re
import logging
from .entities.series import Serie
import traceback
from ..infrastructure.logging.GlobalLogger import error_logger, noKeyFound_logger
from .exceptions.Exceptions import NoKeyFoundException, MatchNotFoundError
from .providers.base_provider import Loader
class SerieScanner:
def __init__(self, basePath: str, loader: Loader):
self.directory = basePath
self.folderDict: dict[str, Serie] = {} # Proper initialization
self.loader = loader
logging.info(f"Initialized Loader with base path: {self.directory}")
def Reinit(self):
self.folderDict: dict[str, Serie] = {} # Proper initialization
def is_null_or_whitespace(self, s):
return s is None or s.strip() == ""
def GetTotalToScan(self):
result = self.__find_mp4_files()
return sum(1 for _ in result)
def Scan(self, callback):
logging.info("Starting process to load missing episodes")
result = self.__find_mp4_files()
counter = 0
for folder, mp4_files in result:
try:
counter += 1
callback(folder, counter)
serie = self.__ReadDataFromFile(folder)
if (serie != None and not self.is_null_or_whitespace(serie.key)):
missings, site = self.__GetMissingEpisodesAndSeason(serie.key, mp4_files)
serie.episodeDict = missings
serie.folder = folder
serie.save_to_file(os.path.join(os.path.join(self.directory, folder), 'data'))
if (serie.key in self.folderDict):
logging.ERROR(f"dublication found: {serie.key}");
pass
self.folderDict[serie.key] = serie
noKeyFound_logger.info(f"Saved Serie: '{str(serie)}'")
except NoKeyFoundException as nkfe:
NoKeyFoundException.error(f"Error processing folder '{folder}': {nkfe}")
except Exception as e:
error_logger.error(f"Folder: '{folder}' - Unexpected error processing folder '{folder}': {e} \n {traceback.format_exc()}")
continue
def __find_mp4_files(self):
logging.info("Scanning for .mp4 files")
for anime_name in os.listdir(self.directory):
anime_path = os.path.join(self.directory, anime_name)
if os.path.isdir(anime_path):
mp4_files = []
has_files = False
for root, _, files in os.walk(anime_path):
for file in files:
if file.endswith(".mp4"):
mp4_files.append(os.path.join(root, file))
has_files = True
yield anime_name, mp4_files if has_files else []
def __remove_year(self, input_string: str):
cleaned_string = re.sub(r'\(\d{4}\)', '', input_string).strip()
logging.debug(f"Removed year from '{input_string}' -> '{cleaned_string}'")
return cleaned_string
def __ReadDataFromFile(self, folder_name: str):
folder_path = os.path.join(self.directory, folder_name)
key = None
key_file = os.path.join(folder_path, 'key')
serie_file = os.path.join(folder_path, 'data')
if os.path.exists(key_file):
with open(key_file, 'r') as file:
key = file.read().strip()
logging.info(f"Key found for folder '{folder_name}': {key}")
return Serie(key, "", "aniworld.to", folder_name, dict())
if os.path.exists(serie_file):
with open(serie_file, "rb") as file:
logging.info(f"load serie_file from '{folder_name}': {serie_file}")
return Serie.load_from_file(serie_file)
return None
def __GetEpisodeAndSeason(self, filename: str):
pattern = r'S(\d+)E(\d+)'
match = re.search(pattern, filename)
if match:
season = match.group(1)
episode = match.group(2)
logging.debug(f"Extracted season {season}, episode {episode} from '{filename}'")
return int(season), int(episode)
else:
logging.error(f"Failed to find season/episode pattern in '{filename}'")
raise MatchNotFoundError("Season and episode pattern not found in the filename.")
def __GetEpisodesAndSeasons(self, mp4_files: []):
episodes_dict = {}
for file in mp4_files:
season, episode = self.__GetEpisodeAndSeason(file)
if season in episodes_dict:
episodes_dict[season].append(episode)
else:
episodes_dict[season] = [episode]
return episodes_dict
def __GetMissingEpisodesAndSeason(self, key: str, mp4_files: []):
expected_dict = self.loader.get_season_episode_count(key) # key season , value count of episodes
filedict = self.__GetEpisodesAndSeasons(mp4_files)
episodes_dict = {}
for season, expected_count in expected_dict.items():
existing_episodes = filedict.get(season, [])
missing_episodes = [ep for ep in range(1, expected_count + 1) if ep not in existing_episodes and self.loader.IsLanguage(season, ep, key)]
if missing_episodes:
episodes_dict[season] = missing_episodes
return episodes_dict, "aniworld.to"

View File

@@ -1,38 +1,38 @@
from src.core.entities.SerieList import SerieList
from src.core.providers.provider_factory import Loaders
from src.core.SerieScanner import SerieScanner
class SeriesApp:
_initialization_count = 0
def __init__(self, directory_to_search: str):
SeriesApp._initialization_count += 1 # Only show initialization message for the first instance
if SeriesApp._initialization_count <= 1:
print("Please wait while initializing...")
self.progress = None
self.directory_to_search = directory_to_search
self.Loaders = Loaders()
self.loader = self.Loaders.GetLoader(key="aniworld.to")
self.SerieScanner = SerieScanner(directory_to_search, self.loader)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
def __InitList__(self):
self.series_list = self.List.GetMissingEpisode()
def search(self, words: str) -> list:
return self.loader.Search(words)
def download(self, serieFolder: str, season: int, episode: int, key: str, callback) -> bool:
self.loader.Download(self.directory_to_search, serieFolder, season, episode, key, "German Dub", callback)
def ReScan(self, callback):
self.SerieScanner.Reinit()
self.SerieScanner.Scan(callback)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
from src.core.entities.SerieList import SerieList
from src.core.providers.provider_factory import Loaders
from src.core.SerieScanner import SerieScanner
class SeriesApp:
_initialization_count = 0
def __init__(self, directory_to_search: str):
SeriesApp._initialization_count += 1 # Only show initialization message for the first instance
if SeriesApp._initialization_count <= 1:
print("Please wait while initializing...")
self.progress = None
self.directory_to_search = directory_to_search
self.Loaders = Loaders()
self.loader = self.Loaders.GetLoader(key="aniworld.to")
self.SerieScanner = SerieScanner(directory_to_search, self.loader)
self.List = SerieList(self.directory_to_search)
self.__InitList__()
def __InitList__(self):
self.series_list = self.List.GetMissingEpisode()
def search(self, words: str) -> list:
return self.loader.Search(words)
def download(self, serieFolder: str, season: int, episode: int, key: str, callback) -> bool:
self.loader.Download(self.directory_to_search, serieFolder, season, episode, key, "German Dub", callback)
def ReScan(self, callback):
self.SerieScanner.Reinit()
self.SerieScanner.Scan(callback)
self.List = SerieList(self.directory_to_search)
self.__InitList__()

View File

@@ -1,12 +1,12 @@
"""
Core module for AniWorld application.
Contains domain entities, interfaces, application services, and exceptions.
"""
from . import entities
from . import exceptions
from . import interfaces
from . import application
from . import providers
"""
Core module for AniWorld application.
Contains domain entities, interfaces, application services, and exceptions.
"""
from . import entities
from . import exceptions
from . import interfaces
from . import application
from . import providers
__all__ = ['entities', 'exceptions', 'interfaces', 'application', 'providers']

View File

@@ -1,56 +1,56 @@
import os
import json
import logging
from .series import Serie
class SerieList:
def __init__(self, basePath: str):
self.directory = basePath
self.folderDict: dict[str, Serie] = {} # Proper initialization
self.load_series()
def add(self, serie: Serie):
if (not self.contains(serie.key)):
dataPath = os.path.join(self.directory, serie.folder, "data")
animePath = os.path.join(self.directory, serie.folder)
os.makedirs(animePath, exist_ok=True)
if not os.path.isfile(dataPath):
serie.save_to_file(dataPath)
self.folderDict[serie.folder] = serie;
def contains(self, key: str) -> bool:
for k, value in self.folderDict.items():
if value.key == key:
return True
return False
def load_series(self):
""" Scan folders and load data files """
logging.info(f"Scanning anime folders in: {self.directory}")
for anime_folder in os.listdir(self.directory):
anime_path = os.path.join(self.directory, anime_folder, "data")
if os.path.isfile(anime_path):
logging.debug(f"Found data folder: {anime_path}")
self.load_data(anime_folder, anime_path)
else:
logging.warning(f"Skipping {anime_folder} - No data folder found")
def load_data(self, anime_folder, data_path):
""" Load pickle files from the data folder """
try:
self.folderDict[anime_folder] = Serie.load_from_file(data_path)
logging.debug(f"Successfully loaded {data_path} for {anime_folder}")
except Exception as e:
logging.error(f"Failed to load {data_path} in {anime_folder}: {e}")
def GetMissingEpisode(self):
"""Find all series with a non-empty episodeDict"""
return [serie for serie in self.folderDict.values() if len(serie.episodeDict) > 0]
def GetList(self):
"""Get all series in the list"""
return list(self.folderDict.values())
#k = AnimeList("\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
#bbabab = k.GetMissingEpisode()
import os
import json
import logging
from .series import Serie
class SerieList:
def __init__(self, basePath: str):
self.directory = basePath
self.folderDict: dict[str, Serie] = {} # Proper initialization
self.load_series()
def add(self, serie: Serie):
if (not self.contains(serie.key)):
dataPath = os.path.join(self.directory, serie.folder, "data")
animePath = os.path.join(self.directory, serie.folder)
os.makedirs(animePath, exist_ok=True)
if not os.path.isfile(dataPath):
serie.save_to_file(dataPath)
self.folderDict[serie.folder] = serie;
def contains(self, key: str) -> bool:
for k, value in self.folderDict.items():
if value.key == key:
return True
return False
def load_series(self):
""" Scan folders and load data files """
logging.info(f"Scanning anime folders in: {self.directory}")
for anime_folder in os.listdir(self.directory):
anime_path = os.path.join(self.directory, anime_folder, "data")
if os.path.isfile(anime_path):
logging.debug(f"Found data folder: {anime_path}")
self.load_data(anime_folder, anime_path)
else:
logging.warning(f"Skipping {anime_folder} - No data folder found")
def load_data(self, anime_folder, data_path):
""" Load pickle files from the data folder """
try:
self.folderDict[anime_folder] = Serie.load_from_file(data_path)
logging.debug(f"Successfully loaded {data_path} for {anime_folder}")
except Exception as e:
logging.error(f"Failed to load {data_path} in {anime_folder}: {e}")
def GetMissingEpisode(self):
"""Find all series with a non-empty episodeDict"""
return [serie for serie in self.folderDict.values() if len(serie.episodeDict) > 0]
def GetList(self):
"""Get all series in the list"""
return list(self.folderDict.values())
#k = AnimeList("\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien")
#bbabab = k.GetMissingEpisode()
#print(bbabab)

View File

@@ -1,82 +1,82 @@
import json
class Serie:
def __init__(self, key: str, name: str, site: str, folder: str, episodeDict: dict[int, list[int]]):
self._key = key
self._name = name
self._site = site
self._folder = folder
self._episodeDict = episodeDict
def __str__(self):
"""String representation of Serie object"""
return f"Serie(key='{self.key}', name='{self.name}', site='{self.site}', folder='{self.folder}', episodeDict={self.episodeDict})"
@property
def key(self) -> str:
return self._key
@key.setter
def key(self, value: str):
self._key = value
@property
def name(self) -> str:
return self._name
@name.setter
def name(self, value: str):
self._name = value
@property
def site(self) -> str:
return self._site
@site.setter
def site(self, value: str):
self._site = value
@property
def folder(self) -> str:
return self._folder
@folder.setter
def folder(self, value: str):
self._folder = value
@property
def episodeDict(self) -> dict[int, list[int]]:
return self._episodeDict
@episodeDict.setter
def episodeDict(self, value: dict[int, list[int]]):
self._episodeDict = value
def to_dict(self):
"""Convert Serie object to dictionary for JSON serialization."""
return {
"key": self.key,
"name": self.name,
"site": self.site,
"folder": self.folder,
"episodeDict": {str(k): list(v) for k, v in self.episodeDict.items()}
}
@staticmethod
def from_dict(data: dict):
"""Create a Serie object from dictionary."""
episode_dict = {int(k): v for k, v in data["episodeDict"].items()} # Convert keys to int
return Serie(data["key"], data["name"], data["site"], data["folder"], episode_dict)
def save_to_file(self, filename: str):
"""Save Serie object to JSON file."""
with open(filename, "w") as file:
json.dump(self.to_dict(), file, indent=4)
@classmethod
def load_from_file(cls, filename: str) -> "Serie":
"""Load Serie object from JSON file."""
with open(filename, "r") as file:
data = json.load(file)
import json
class Serie:
def __init__(self, key: str, name: str, site: str, folder: str, episodeDict: dict[int, list[int]]):
self._key = key
self._name = name
self._site = site
self._folder = folder
self._episodeDict = episodeDict
def __str__(self):
"""String representation of Serie object"""
return f"Serie(key='{self.key}', name='{self.name}', site='{self.site}', folder='{self.folder}', episodeDict={self.episodeDict})"
@property
def key(self) -> str:
return self._key
@key.setter
def key(self, value: str):
self._key = value
@property
def name(self) -> str:
return self._name
@name.setter
def name(self, value: str):
self._name = value
@property
def site(self) -> str:
return self._site
@site.setter
def site(self, value: str):
self._site = value
@property
def folder(self) -> str:
return self._folder
@folder.setter
def folder(self, value: str):
self._folder = value
@property
def episodeDict(self) -> dict[int, list[int]]:
return self._episodeDict
@episodeDict.setter
def episodeDict(self, value: dict[int, list[int]]):
self._episodeDict = value
def to_dict(self):
"""Convert Serie object to dictionary for JSON serialization."""
return {
"key": self.key,
"name": self.name,
"site": self.site,
"folder": self.folder,
"episodeDict": {str(k): list(v) for k, v in self.episodeDict.items()}
}
@staticmethod
def from_dict(data: dict):
"""Create a Serie object from dictionary."""
episode_dict = {int(k): v for k, v in data["episodeDict"].items()} # Convert keys to int
return Serie(data["key"], data["name"], data["site"], data["folder"], episode_dict)
def save_to_file(self, filename: str):
"""Save Serie object to JSON file."""
with open(filename, "w") as file:
json.dump(self.to_dict(), file, indent=4)
@classmethod
def load_from_file(cls, filename: str) -> "Serie":
"""Load Serie object from JSON file."""
with open(filename, "r") as file:
data = json.load(file)
return cls.from_dict(data)

View File

@@ -1,7 +1,7 @@
class NoKeyFoundException(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class MatchNotFoundError(Exception):
"""Exception raised when an anime key cannot be found."""
class NoKeyFoundException(Exception):
"""Exception raised when an anime key cannot be found."""
pass
class MatchNotFoundError(Exception):
"""Exception raised when an anime key cannot be found."""
pass

View File

@@ -1,11 +1,11 @@
from ..providers.streaming.Provider import Provider
from ..providers.streaming.voe import VOE
class Providers:
def __init__(self):
self.dict = {"VOE": VOE()}
def GetProvider(self, key: str) -> Provider:
return self.dict[key]
from ..providers.streaming.Provider import Provider
from ..providers.streaming.voe import VOE
class Providers:
def __init__(self):
self.dict = {"VOE": VOE()}
def GetProvider(self, key: str) -> Provider:
return self.dict[key]

View File

@@ -1,343 +1,343 @@
import os
import re
import logging
import json
import requests
import html
from urllib.parse import quote
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from .base_provider import Loader
from ..interfaces.providers import Providers
from yt_dlp import YoutubeDL
import shutil
# Read timeout from environment variable, default to 600 seconds (10 minutes)
timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
download_error_logger = logging.getLogger("DownloadErrors")
download_error_handler = logging.FileHandler("../../download_errors.log")
download_error_handler.setLevel(logging.ERROR)
noKeyFound_logger = logging.getLogger("NoKeyFound")
noKeyFound_handler = logging.FileHandler("../../NoKeyFound.log")
noKeyFound_handler.setLevel(logging.ERROR)
class AniworldLoader(Loader):
def __init__(self):
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
self.AniworldHeaders = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"accept-encoding": "gzip, deflate, br, zstd",
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
"cache-control": "max-age=0",
"priority": "u=0, i",
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"sec-fetch-dest": "document",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "none",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
}
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
self.RANDOM_USER_AGENT = UserAgent().random
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
self.PROVIDER_HEADERS = {
"Vidmoly": ['Referer: "https://vidmoly.to"'],
"Doodstream": ['Referer: "https://dood.li/"'],
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
"Luluvdo": [
f'User-Agent: {self.LULUVDO_USER_AGENT}',
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
'Origin: "https://luluvdo.com"',
'Referer: "https://luluvdo.com/"'
]}
self.ANIWORLD_TO = "https://aniworld.to"
self.session = requests.Session()
# Configure retries with backoff
retries = Retry(
total=5, # Number of retries
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
allowed_methods=["GET"]
)
adapter = HTTPAdapter(max_retries=retries)
self.session.mount("https://", adapter)
self.DEFAULT_REQUEST_TIMEOUT = 30
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
self.Providers = Providers()
def ClearCache(self):
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
def RemoveFromCache(self):
self._EpisodeHTMLDict = {}
def Search(self, word: str) -> list:
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
anime_list = self.fetch_anime_list(search_url)
return anime_list
def fetch_anime_list(self, url: str) -> list:
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
clean_text = response.text.strip()
try:
decoded_data = json.loads(html.unescape(clean_text))
return decoded_data if isinstance(decoded_data, list) else []
except json.JSONDecodeError:
try:
# Remove BOM and problematic characters
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
# Remove problematic characters
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
# Parse the new text
decoded_data = json.loads(clean_text)
return decoded_data if isinstance(decoded_data, list) else []
except (requests.RequestException, json.JSONDecodeError) as exc:
raise ValueError("Could not get valid anime: ") from exc
def _GetLanguageKey(self, language: str) -> int:
languageCode = 0
if (language == "German Dub"):
languageCode = 1
if (language == "English Sub"):
languageCode = 2
if (language == "German Sub"):
languageCode = 3
return languageCode
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
"""
Language Codes:
1: German Dub
2: English Sub
3: German Sub
"""
languageCode = self._GetLanguageKey(language)
episode_soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
change_language_box_div = episode_soup.find(
'div', class_='changeLanguageBox')
languages = []
if change_language_box_div:
img_tags = change_language_box_div.find_all('img')
for img in img_tags:
lang_key = img.get('data-lang-key')
if lang_key and lang_key.isdigit():
languages.append(int(lang_key))
return languageCode in languages
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, language: str = "German Dub", progress_callback: callable = None) -> bool:
sanitized_anime_title = ''.join(
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
)
if season == 0:
output_file = (
f"{sanitized_anime_title} - "
f"Movie {episode:02} - "
f"({language}).mp4"
)
else:
output_file = (
f"{sanitized_anime_title} - "
f"S{season:02}E{episode:03} - "
f"({language}).mp4"
)
folderPath = os.path.join(os.path.join(baseDirectory, serieFolder), f"Season {season}")
output_path = os.path.join(folderPath, output_file)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
temp_dir = "./Temp/"
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
temp_Path = os.path.join(temp_dir, output_file)
for provider in self.SUPPORTED_PROVIDERS:
link, header = self._get_direct_link_from_provider(season, episode, key, language)
ydl_opts = {
'fragment_retries': float('inf'),
'outtmpl': temp_Path,
'quiet': True,
'no_warnings': True,
'progress_with_newline': False,
'nocheckcertificate': True,
}
if header:
ydl_opts['http_headers'] = header
if progress_callback:
ydl_opts['progress_hooks'] = [progress_callback]
with YoutubeDL(ydl_opts) as ydl:
ydl.download([link])
if (os.path.exists(temp_Path)):
shutil.copy(temp_Path, output_path)
os.remove(temp_Path)
break
self.ClearCache()
def GetSiteKey(self) -> str:
return "aniworld.to"
def GetTitle(self, key: str) -> str:
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
title_div = soup.find('div', class_='series-title')
if title_div:
return title_div.find('h1').find('span').text
return ""
def _GetKeyHTML(self, key: str):
if key in self._KeyHTMLDict:
return self._KeyHTMLDict[key]
self._KeyHTMLDict[key] = self.session.get(
f"{self.ANIWORLD_TO}/anime/stream/{key}",
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
return self._KeyHTMLDict[key]
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
if key in self._EpisodeHTMLDict:
return self._EpisodeHTMLDict[(key, season, episode)]
link = (
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
f"staffel-{season}/episode-{episode}"
)
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
self._EpisodeHTMLDict[(key, season, episode)] = html
return self._EpisodeHTMLDict[(key, season, episode)]
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
"""
Parses the HTML content to extract streaming providers,
their language keys, and redirect links.
Returns a dictionary with provider names as keys
and language key-to-redirect URL mappings as values.
Example:
{
'VOE': {1: 'https://aniworld.to/redirect/1766412',
2: 'https://aniworld.to/redirect/1766405'},
'Doodstream': {1: 'https://aniworld.to/redirect/1987922',
2: 'https://aniworld.to/redirect/2700342'},
...
}
Access redirect link with:
print(self.provider["VOE"][2])
"""
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
providers = {}
episode_links = soup.find_all(
'li', class_=lambda x: x and x.startswith('episodeLink')
)
if not episode_links:
return providers
for link in episode_links:
provider_name_tag = link.find('h4')
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
redirect_link_tag = link.find('a', class_='watchEpisode')
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
lang_key = link.get('data-lang-key')
lang_key = int(
lang_key) if lang_key and lang_key.isdigit() else None
if provider_name and redirect_link and lang_key:
if provider_name not in providers:
providers[provider_name] = {}
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
return providers
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
languageCode = self._GetLanguageKey(language)
if (self.IsLanguage(season, episode, key, language)):
for provider_name, lang_dict in self._get_provider_from_html(season, episode, key).items():
if languageCode in lang_dict:
return(lang_dict[languageCode], provider_name)
break
return None
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
embeded_link = self.session.get(
redirect_link, timeout=self.DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': self.RANDOM_USER_AGENT}).url
return embeded_link
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
"""
providers = {
"Vidmoly": get_direct_link_from_vidmoly,
"Vidoza": get_direct_link_from_vidoza,
"VOE": get_direct_link_from_voe,
"Doodstream": get_direct_link_from_doodstream,
"SpeedFiles": get_direct_link_from_speedfiles,
"Luluvdo": get_direct_link_from_luluvdo
}
"""
embeded_link = self._get_embeded_link(season, episode, key, language)
if embeded_link is None:
return None
return self.Providers.GetProvider("VOE").GetLink(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
def get_season_episode_count(self, slug : str) -> dict:
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
season_meta = soup.find('meta', itemprop='numberOfSeasons')
number_of_seasons = int(season_meta['content']) if season_meta else 0
episode_counts = {}
for season in range(1, number_of_seasons + 1):
season_url = f"{base_url}staffel-{season}"
response = requests.get(season_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
episode_links = soup.find_all('a', href=True)
unique_links = set(
link['href']
for link in episode_links
if f"staffel-{season}/episode-" in link['href']
)
episode_counts[season] = len(unique_links)
return episode_counts
import os
import re
import logging
import json
import requests
import html
from urllib.parse import quote
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
from .base_provider import Loader
from ..interfaces.providers import Providers
from yt_dlp import YoutubeDL
import shutil
# Read timeout from environment variable, default to 600 seconds (10 minutes)
timeout = int(os.getenv("DOWNLOAD_TIMEOUT", 600))
download_error_logger = logging.getLogger("DownloadErrors")
download_error_handler = logging.FileHandler("../../download_errors.log")
download_error_handler.setLevel(logging.ERROR)
noKeyFound_logger = logging.getLogger("NoKeyFound")
noKeyFound_handler = logging.FileHandler("../../NoKeyFound.log")
noKeyFound_handler.setLevel(logging.ERROR)
class AniworldLoader(Loader):
def __init__(self):
self.SUPPORTED_PROVIDERS = ["VOE", "Doodstream", "Vidmoly", "Vidoza", "SpeedFiles", "Streamtape", "Luluvdo"]
self.AniworldHeaders = {
"accept": "text/html,application/xhtml+xml,application/xml;q=0.9,image/avif,image/webp,image/apng,*/*;q=0.8",
"accept-encoding": "gzip, deflate, br, zstd",
"accept-language": "de,de-DE;q=0.9,en;q=0.8,en-GB;q=0.7,en-US;q=0.6",
"cache-control": "max-age=0",
"priority": "u=0, i",
"sec-ch-ua": '"Chromium";v="136", "Microsoft Edge";v="136", "Not.A/Brand";v="99"',
"sec-ch-ua-mobile": "?0",
"sec-ch-ua-platform": '"Windows"',
"sec-fetch-dest": "document",
"sec-fetch-mode": "navigate",
"sec-fetch-site": "none",
"sec-fetch-user": "?1",
"upgrade-insecure-requests": "1",
"user-agent": "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/136.0.0.0 Safari/537.36 Edg/136.0.0.0"
}
self.INVALID_PATH_CHARS = ['<', '>', ':', '"', '/', '\\', '|', '?', '*', '&']
self.RANDOM_USER_AGENT = UserAgent().random
self.LULUVDO_USER_AGENT = "Mozilla/5.0 (Android 15; Mobile; rv:132.0) Gecko/132.0 Firefox/132.0"
self.PROVIDER_HEADERS = {
"Vidmoly": ['Referer: "https://vidmoly.to"'],
"Doodstream": ['Referer: "https://dood.li/"'],
"VOE": [f'User-Agent: {self.RANDOM_USER_AGENT}'],
"Luluvdo": [
f'User-Agent: {self.LULUVDO_USER_AGENT}',
'Accept-Language: de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7',
'Origin: "https://luluvdo.com"',
'Referer: "https://luluvdo.com/"'
]}
self.ANIWORLD_TO = "https://aniworld.to"
self.session = requests.Session()
# Configure retries with backoff
retries = Retry(
total=5, # Number of retries
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
allowed_methods=["GET"]
)
adapter = HTTPAdapter(max_retries=retries)
self.session.mount("https://", adapter)
self.DEFAULT_REQUEST_TIMEOUT = 30
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
self.Providers = Providers()
def ClearCache(self):
self._KeyHTMLDict = {}
self._EpisodeHTMLDict = {}
def RemoveFromCache(self):
self._EpisodeHTMLDict = {}
def Search(self, word: str) -> list:
search_url = f"{self.ANIWORLD_TO}/ajax/seriesSearch?keyword={quote(word)}"
anime_list = self.fetch_anime_list(search_url)
return anime_list
def fetch_anime_list(self, url: str) -> list:
response = self.session.get(url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
clean_text = response.text.strip()
try:
decoded_data = json.loads(html.unescape(clean_text))
return decoded_data if isinstance(decoded_data, list) else []
except json.JSONDecodeError:
try:
# Remove BOM and problematic characters
clean_text = clean_text.encode('utf-8').decode('utf-8-sig')
# Remove problematic characters
clean_text = re.sub(r'[\x00-\x1F\x7F-\x9F]', '', clean_text)
# Parse the new text
decoded_data = json.loads(clean_text)
return decoded_data if isinstance(decoded_data, list) else []
except (requests.RequestException, json.JSONDecodeError) as exc:
raise ValueError("Could not get valid anime: ") from exc
def _GetLanguageKey(self, language: str) -> int:
languageCode = 0
if (language == "German Dub"):
languageCode = 1
if (language == "English Sub"):
languageCode = 2
if (language == "German Sub"):
languageCode = 3
return languageCode
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
"""
Language Codes:
1: German Dub
2: English Sub
3: German Sub
"""
languageCode = self._GetLanguageKey(language)
episode_soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
change_language_box_div = episode_soup.find(
'div', class_='changeLanguageBox')
languages = []
if change_language_box_div:
img_tags = change_language_box_div.find_all('img')
for img in img_tags:
lang_key = img.get('data-lang-key')
if lang_key and lang_key.isdigit():
languages.append(int(lang_key))
return languageCode in languages
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, language: str = "German Dub", progress_callback: callable = None) -> bool:
sanitized_anime_title = ''.join(
char for char in self.GetTitle(key) if char not in self.INVALID_PATH_CHARS
)
if season == 0:
output_file = (
f"{sanitized_anime_title} - "
f"Movie {episode:02} - "
f"({language}).mp4"
)
else:
output_file = (
f"{sanitized_anime_title} - "
f"S{season:02}E{episode:03} - "
f"({language}).mp4"
)
folderPath = os.path.join(os.path.join(baseDirectory, serieFolder), f"Season {season}")
output_path = os.path.join(folderPath, output_file)
os.makedirs(os.path.dirname(output_path), exist_ok=True)
temp_dir = "./Temp/"
os.makedirs(os.path.dirname(temp_dir), exist_ok=True)
temp_Path = os.path.join(temp_dir, output_file)
for provider in self.SUPPORTED_PROVIDERS:
link, header = self._get_direct_link_from_provider(season, episode, key, language)
ydl_opts = {
'fragment_retries': float('inf'),
'outtmpl': temp_Path,
'quiet': True,
'no_warnings': True,
'progress_with_newline': False,
'nocheckcertificate': True,
}
if header:
ydl_opts['http_headers'] = header
if progress_callback:
ydl_opts['progress_hooks'] = [progress_callback]
with YoutubeDL(ydl_opts) as ydl:
ydl.download([link])
if (os.path.exists(temp_Path)):
shutil.copy(temp_Path, output_path)
os.remove(temp_Path)
break
self.ClearCache()
def GetSiteKey(self) -> str:
return "aniworld.to"
def GetTitle(self, key: str) -> str:
soup = BeautifulSoup(self._GetKeyHTML(key).content, 'html.parser')
title_div = soup.find('div', class_='series-title')
if title_div:
return title_div.find('h1').find('span').text
return ""
def _GetKeyHTML(self, key: str):
if key in self._KeyHTMLDict:
return self._KeyHTMLDict[key]
self._KeyHTMLDict[key] = self.session.get(
f"{self.ANIWORLD_TO}/anime/stream/{key}",
timeout=self.DEFAULT_REQUEST_TIMEOUT
)
return self._KeyHTMLDict[key]
def _GetEpisodeHTML(self, season: int, episode: int, key: str):
if key in self._EpisodeHTMLDict:
return self._EpisodeHTMLDict[(key, season, episode)]
link = (
f"{self.ANIWORLD_TO}/anime/stream/{key}/"
f"staffel-{season}/episode-{episode}"
)
html = self.session.get(link, timeout=self.DEFAULT_REQUEST_TIMEOUT)
self._EpisodeHTMLDict[(key, season, episode)] = html
return self._EpisodeHTMLDict[(key, season, episode)]
def _get_provider_from_html(self, season: int, episode: int, key: str) -> dict:
"""
Parses the HTML content to extract streaming providers,
their language keys, and redirect links.
Returns a dictionary with provider names as keys
and language key-to-redirect URL mappings as values.
Example:
{
'VOE': {1: 'https://aniworld.to/redirect/1766412',
2: 'https://aniworld.to/redirect/1766405'},
'Doodstream': {1: 'https://aniworld.to/redirect/1987922',
2: 'https://aniworld.to/redirect/2700342'},
...
}
Access redirect link with:
print(self.provider["VOE"][2])
"""
soup = BeautifulSoup(self._GetEpisodeHTML(season, episode, key).content, 'html.parser')
providers = {}
episode_links = soup.find_all(
'li', class_=lambda x: x and x.startswith('episodeLink')
)
if not episode_links:
return providers
for link in episode_links:
provider_name_tag = link.find('h4')
provider_name = provider_name_tag.text.strip() if provider_name_tag else None
redirect_link_tag = link.find('a', class_='watchEpisode')
redirect_link = redirect_link_tag['href'] if redirect_link_tag else None
lang_key = link.get('data-lang-key')
lang_key = int(
lang_key) if lang_key and lang_key.isdigit() else None
if provider_name and redirect_link and lang_key:
if provider_name not in providers:
providers[provider_name] = {}
providers[provider_name][lang_key] = f"{self.ANIWORLD_TO}{redirect_link}"
return providers
def _get_redirect_link(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
languageCode = self._GetLanguageKey(language)
if (self.IsLanguage(season, episode, key, language)):
for provider_name, lang_dict in self._get_provider_from_html(season, episode, key).items():
if languageCode in lang_dict:
return(lang_dict[languageCode], provider_name)
break
return None
def _get_embeded_link(self, season: int, episode: int, key: str, language: str = "German Dub"):
redirect_link, provider_name = self._get_redirect_link(season, episode, key, language)
embeded_link = self.session.get(
redirect_link, timeout=self.DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': self.RANDOM_USER_AGENT}).url
return embeded_link
def _get_direct_link_from_provider(self, season: int, episode: int, key: str, language: str = "German Dub") -> str:
"""
providers = {
"Vidmoly": get_direct_link_from_vidmoly,
"Vidoza": get_direct_link_from_vidoza,
"VOE": get_direct_link_from_voe,
"Doodstream": get_direct_link_from_doodstream,
"SpeedFiles": get_direct_link_from_speedfiles,
"Luluvdo": get_direct_link_from_luluvdo
}
"""
embeded_link = self._get_embeded_link(season, episode, key, language)
if embeded_link is None:
return None
return self.Providers.GetProvider("VOE").GetLink(embeded_link, self.DEFAULT_REQUEST_TIMEOUT)
def get_season_episode_count(self, slug : str) -> dict:
base_url = f"{self.ANIWORLD_TO}/anime/stream/{slug}/"
response = requests.get(base_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
season_meta = soup.find('meta', itemprop='numberOfSeasons')
number_of_seasons = int(season_meta['content']) if season_meta else 0
episode_counts = {}
for season in range(1, number_of_seasons + 1):
season_url = f"{base_url}staffel-{season}"
response = requests.get(season_url, timeout=self.DEFAULT_REQUEST_TIMEOUT)
soup = BeautifulSoup(response.content, 'html.parser')
episode_links = soup.find_all('a', href=True)
unique_links = set(
link['href']
for link in episode_links
if f"staffel-{season}/episode-" in link['href']
)
episode_counts[season] = len(unique_links)
return episode_counts

View File

@@ -1,27 +1,27 @@
from abc import ABC, abstractmethod
class Loader(ABC):
@abstractmethod
def Search(self, word: str) -> list:
pass
@abstractmethod
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
pass
@abstractmethod
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, progress_callback: callable = None) -> bool:
pass
@abstractmethod
def GetSiteKey(self) -> str:
pass
@abstractmethod
def GetTitle(self) -> str:
pass
@abstractmethod
def get_season_episode_count(self, slug: str) -> dict:
from abc import ABC, abstractmethod
class Loader(ABC):
@abstractmethod
def Search(self, word: str) -> list:
pass
@abstractmethod
def IsLanguage(self, season: int, episode: int, key: str, language: str = "German Dub") -> bool:
pass
@abstractmethod
def Download(self, baseDirectory: str, serieFolder: str, season: int, episode: int, key: str, progress_callback: callable = None) -> bool:
pass
@abstractmethod
def GetSiteKey(self) -> str:
pass
@abstractmethod
def GetTitle(self) -> str:
pass
@abstractmethod
def get_season_episode_count(self, slug: str) -> dict:
pass

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,10 @@
from .aniworld_provider import AniworldLoader
from .base_provider import Loader
class Loaders:
def __init__(self):
self.dict = {"aniworld.to": AniworldLoader()}
def GetLoader(self, key: str) -> Loader:
return self.dict[key]
from .aniworld_provider import AniworldLoader
from .base_provider import Loader
class Loaders:
def __init__(self):
self.dict = {"aniworld.to": AniworldLoader()}
def GetLoader(self, key: str) -> Loader:
return self.dict[key]

View File

@@ -1,7 +1,7 @@
from abc import ABC, abstractmethod
class Provider(ABC):
@abstractmethod
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
pass
from abc import ABC, abstractmethod
class Provider(ABC):
@abstractmethod
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
pass

View File

@@ -1,59 +1,59 @@
import re
import random
import time
from fake_useragent import UserAgent
import requests
from .Provider import Provider
class Doodstream(Provider):
def __init__(self):
self.RANDOM_USER_AGENT = UserAgent().random
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> str:
headers = {
'User-Agent': self.RANDOM_USER_AGENT,
'Referer': 'https://dood.li/'
}
def extract_data(pattern, content):
match = re.search(pattern, content)
return match.group(1) if match else None
def generate_random_string(length=10):
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
return ''.join(random.choice(characters) for _ in range(length))
response = requests.get(
embededLink,
headers=headers,
timeout=DEFAULT_REQUEST_TIMEOUT,
verify=False
)
response.raise_for_status()
pass_md5_pattern = r"\$\.get\('([^']*\/pass_md5\/[^']*)'"
pass_md5_url = extract_data(pass_md5_pattern, response.text)
if not pass_md5_url:
raise ValueError(
f'pass_md5 URL not found using {embededLink}.')
full_md5_url = f"https://dood.li{pass_md5_url}"
token_pattern = r"token=([a-zA-Z0-9]+)"
token = extract_data(token_pattern, response.text)
if not token:
raise ValueError(f'Token not found using {embededLink}.')
md5_response = requests.get(
full_md5_url, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT, verify=False)
md5_response.raise_for_status()
video_base_url = md5_response.text.strip()
random_string = generate_random_string(10)
expiry = int(time.time())
direct_link = f"{video_base_url}{random_string}?token={token}&expiry={expiry}"
# print(direct_link)
import re
import random
import time
from fake_useragent import UserAgent
import requests
from .Provider import Provider
class Doodstream(Provider):
def __init__(self):
self.RANDOM_USER_AGENT = UserAgent().random
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> str:
headers = {
'User-Agent': self.RANDOM_USER_AGENT,
'Referer': 'https://dood.li/'
}
def extract_data(pattern, content):
match = re.search(pattern, content)
return match.group(1) if match else None
def generate_random_string(length=10):
characters = 'ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789'
return ''.join(random.choice(characters) for _ in range(length))
response = requests.get(
embededLink,
headers=headers,
timeout=DEFAULT_REQUEST_TIMEOUT,
verify=False
)
response.raise_for_status()
pass_md5_pattern = r"\$\.get\('([^']*\/pass_md5\/[^']*)'"
pass_md5_url = extract_data(pass_md5_pattern, response.text)
if not pass_md5_url:
raise ValueError(
f'pass_md5 URL not found using {embededLink}.')
full_md5_url = f"https://dood.li{pass_md5_url}"
token_pattern = r"token=([a-zA-Z0-9]+)"
token = extract_data(token_pattern, response.text)
if not token:
raise ValueError(f'Token not found using {embededLink}.')
md5_response = requests.get(
full_md5_url, headers=headers, timeout=DEFAULT_REQUEST_TIMEOUT, verify=False)
md5_response.raise_for_status()
video_base_url = md5_response.text.strip()
random_string = generate_random_string(10)
expiry = int(time.time())
direct_link = f"{video_base_url}{random_string}?token={token}&expiry={expiry}"
# print(direct_link)
return direct_link

View File

@@ -1,51 +1,51 @@
import re
import requests
# import jsbeautifier.unpackers.packer as packer
from aniworld import config
REDIRECT_REGEX = re.compile(
r'<iframe *(?:[^>]+ )?src=(?:\'([^\']+)\'|"([^"]+)")[^>]*>')
SCRIPT_REGEX = re.compile(
r'(?s)<script\s+[^>]*?data-cfasync=["\']?false["\']?[^>]*>(.+?)</script>')
VIDEO_URL_REGEX = re.compile(r'file:\s*"([^"]+\.m3u8[^"]*)"')
# TODO Implement this script fully
def get_direct_link_from_filemoon(embeded_filemoon_link: str):
session = requests.Session()
session.verify = False
headers = {
"User-Agent": config.RANDOM_USER_AGENT,
"Referer": embeded_filemoon_link,
}
response = session.get(embeded_filemoon_link, headers=headers)
source = response.text
match = REDIRECT_REGEX.search(source)
if match:
redirect_url = match.group(1) or match.group(2)
response = session.get(redirect_url, headers=headers)
source = response.text
for script_match in SCRIPT_REGEX.finditer(source):
script_content = script_match.group(1).strip()
if not script_content.startswith("eval("):
continue
if packer.detect(script_content):
unpacked = packer.unpack(script_content)
video_match = VIDEO_URL_REGEX.search(unpacked)
if video_match:
return video_match.group(1)
raise Exception("No Video link found!")
if __name__ == '__main__':
url = input("Enter Filemoon Link: ")
print(get_direct_link_from_filemoon(url))
import re
import requests
# import jsbeautifier.unpackers.packer as packer
from aniworld import config
REDIRECT_REGEX = re.compile(
r'<iframe *(?:[^>]+ )?src=(?:\'([^\']+)\'|"([^"]+)")[^>]*>')
SCRIPT_REGEX = re.compile(
r'(?s)<script\s+[^>]*?data-cfasync=["\']?false["\']?[^>]*>(.+?)</script>')
VIDEO_URL_REGEX = re.compile(r'file:\s*"([^"]+\.m3u8[^"]*)"')
# TODO Implement this script fully
def get_direct_link_from_filemoon(embeded_filemoon_link: str):
session = requests.Session()
session.verify = False
headers = {
"User-Agent": config.RANDOM_USER_AGENT,
"Referer": embeded_filemoon_link,
}
response = session.get(embeded_filemoon_link, headers=headers)
source = response.text
match = REDIRECT_REGEX.search(source)
if match:
redirect_url = match.group(1) or match.group(2)
response = session.get(redirect_url, headers=headers)
source = response.text
for script_match in SCRIPT_REGEX.finditer(source):
script_content = script_match.group(1).strip()
if not script_content.startswith("eval("):
continue
if packer.detect(script_content):
unpacked = packer.unpack(script_content)
video_match = VIDEO_URL_REGEX.search(unpacked)
if video_match:
return video_match.group(1)
raise Exception("No Video link found!")
if __name__ == '__main__':
url = input("Enter Filemoon Link: ")
print(get_direct_link_from_filemoon(url))

View File

@@ -1,90 +1,90 @@
import re
import json
import sys
import requests
from aniworld.config import DEFAULT_REQUEST_TIMEOUT
def fetch_page_content(url):
try:
response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
return response.text
except requests.exceptions.RequestException as e:
print(f"Failed to fetch the page content: {e}")
return None
def extract_video_data(page_content):
match = re.search(r'^.*videos_manifest.*$', page_content, re.MULTILINE)
if not match:
raise ValueError("Failed to extract video manifest from the response.")
json_str = match.group(0)[match.group(0).find(
'{'):match.group(0).rfind('}') + 1]
return json.loads(json_str)
def get_streams(url):
page_content = fetch_page_content(url)
data = extract_video_data(page_content)
video_info = data['state']['data']['video']
name = video_info['hentai_video']['name']
streams = video_info['videos_manifest']['servers'][0]['streams']
return {"name": name, "streams": streams}
def display_streams(streams):
if not streams:
print("No streams available.")
return
print("Available qualities:")
for i, stream in enumerate(streams, 1):
premium_tag = "(Premium)" if not stream['is_guest_allowed'] else ""
print(
f"{i}. {stream['width']}x{stream['height']}\t"
f"({stream['filesize_mbs']}MB) {premium_tag}")
def get_user_selection(streams):
try:
selected_index = int(input("Select a stream: ").strip()) - 1
if 0 <= selected_index < len(streams):
return selected_index
print("Invalid selection.")
return None
except ValueError:
print("Invalid input.")
return None
def get_direct_link_from_hanime(url=None):
try:
if url is None:
if len(sys.argv) > 1:
url = sys.argv[1]
else:
url = input("Please enter the hanime.tv video URL: ").strip()
try:
video_data = get_streams(url)
print(f"Video: {video_data['name']}")
print('*' * 40)
display_streams(video_data['streams'])
selected_index = None
while selected_index is None:
selected_index = get_user_selection(video_data['streams'])
print(f"M3U8 URL: {video_data['streams'][selected_index]['url']}")
except ValueError as e:
print(f"Error: {e}")
except KeyboardInterrupt:
pass
if __name__ == "__main__":
get_direct_link_from_hanime()
import re
import json
import sys
import requests
from aniworld.config import DEFAULT_REQUEST_TIMEOUT
def fetch_page_content(url):
try:
response = requests.get(url, timeout=DEFAULT_REQUEST_TIMEOUT)
response.raise_for_status()
return response.text
except requests.exceptions.RequestException as e:
print(f"Failed to fetch the page content: {e}")
return None
def extract_video_data(page_content):
match = re.search(r'^.*videos_manifest.*$', page_content, re.MULTILINE)
if not match:
raise ValueError("Failed to extract video manifest from the response.")
json_str = match.group(0)[match.group(0).find(
'{'):match.group(0).rfind('}') + 1]
return json.loads(json_str)
def get_streams(url):
page_content = fetch_page_content(url)
data = extract_video_data(page_content)
video_info = data['state']['data']['video']
name = video_info['hentai_video']['name']
streams = video_info['videos_manifest']['servers'][0]['streams']
return {"name": name, "streams": streams}
def display_streams(streams):
if not streams:
print("No streams available.")
return
print("Available qualities:")
for i, stream in enumerate(streams, 1):
premium_tag = "(Premium)" if not stream['is_guest_allowed'] else ""
print(
f"{i}. {stream['width']}x{stream['height']}\t"
f"({stream['filesize_mbs']}MB) {premium_tag}")
def get_user_selection(streams):
try:
selected_index = int(input("Select a stream: ").strip()) - 1
if 0 <= selected_index < len(streams):
return selected_index
print("Invalid selection.")
return None
except ValueError:
print("Invalid input.")
return None
def get_direct_link_from_hanime(url=None):
try:
if url is None:
if len(sys.argv) > 1:
url = sys.argv[1]
else:
url = input("Please enter the hanime.tv video URL: ").strip()
try:
video_data = get_streams(url)
print(f"Video: {video_data['name']}")
print('*' * 40)
display_streams(video_data['streams'])
selected_index = None
while selected_index is None:
selected_index = get_user_selection(video_data['streams'])
print(f"M3U8 URL: {video_data['streams'][selected_index]['url']}")
except ValueError as e:
print(f"Error: {e}")
except KeyboardInterrupt:
pass
if __name__ == "__main__":
get_direct_link_from_hanime()

View File

@@ -1,35 +1,35 @@
import requests
import json
from urllib.parse import urlparse
# TODO Doesn't work on download yet and has to be implemented
def get_direct_link_from_loadx(embeded_loadx_link: str):
response = requests.head(
embeded_loadx_link, allow_redirects=True, verify=False)
parsed_url = urlparse(response.url)
path_parts = parsed_url.path.split("/")
if len(path_parts) < 3:
raise ValueError("Invalid path!")
id_hash = path_parts[2]
host = parsed_url.netloc
post_url = f"https://{host}/player/index.php?data={id_hash}&do=getVideo"
headers = {"X-Requested-With": "XMLHttpRequest"}
response = requests.post(post_url, headers=headers, verify=False)
data = json.loads(response.text)
print(data)
video_url = data.get("videoSource")
if not video_url:
raise ValueError("No Video link found!")
return video_url
if __name__ == '__main__':
url = input("Enter Loadx Link: ")
print(get_direct_link_from_loadx(url))
import requests
import json
from urllib.parse import urlparse
# TODO Doesn't work on download yet and has to be implemented
def get_direct_link_from_loadx(embeded_loadx_link: str):
response = requests.head(
embeded_loadx_link, allow_redirects=True, verify=False)
parsed_url = urlparse(response.url)
path_parts = parsed_url.path.split("/")
if len(path_parts) < 3:
raise ValueError("Invalid path!")
id_hash = path_parts[2]
host = parsed_url.netloc
post_url = f"https://{host}/player/index.php?data={id_hash}&do=getVideo"
headers = {"X-Requested-With": "XMLHttpRequest"}
response = requests.post(post_url, headers=headers, verify=False)
data = json.loads(response.text)
print(data)
video_url = data.get("videoSource")
if not video_url:
raise ValueError("No Video link found!")
return video_url
if __name__ == '__main__':
url = input("Enter Loadx Link: ")
print(get_direct_link_from_loadx(url))

View File

@@ -1,39 +1,39 @@
import re
import requests
from aniworld import config
def get_direct_link_from_luluvdo(embeded_luluvdo_link, arguments=None):
luluvdo_id = embeded_luluvdo_link.split('/')[-1]
filelink = (
f"https://luluvdo.com/dl?op=embed&file_code={luluvdo_id}&embed=1&referer=luluvdo.com&adb=0"
)
# The User-Agent needs to be the same as the direct-link ones to work
headers = {
"Origin": "https://luluvdo.com",
"Referer": "https://luluvdo.com/",
"User-Agent": config.LULUVDO_USER_AGENT
}
if arguments.action == "Download":
headers["Accept-Language"] = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7"
response = requests.get(filelink, headers=headers,
timeout=config.DEFAULT_REQUEST_TIMEOUT)
if response.status_code == 200:
pattern = r'file:\s*"([^"]+)"'
matches = re.findall(pattern, str(response.text))
if matches:
return matches[0]
raise ValueError("No match found")
if __name__ == '__main__':
url = input("Enter Luluvdo Link: ")
print(get_direct_link_from_luluvdo(url))
import re
import requests
from aniworld import config
def get_direct_link_from_luluvdo(embeded_luluvdo_link, arguments=None):
luluvdo_id = embeded_luluvdo_link.split('/')[-1]
filelink = (
f"https://luluvdo.com/dl?op=embed&file_code={luluvdo_id}&embed=1&referer=luluvdo.com&adb=0"
)
# The User-Agent needs to be the same as the direct-link ones to work
headers = {
"Origin": "https://luluvdo.com",
"Referer": "https://luluvdo.com/",
"User-Agent": config.LULUVDO_USER_AGENT
}
if arguments.action == "Download":
headers["Accept-Language"] = "de-DE,de;q=0.9,en-US;q=0.8,en;q=0.7"
response = requests.get(filelink, headers=headers,
timeout=config.DEFAULT_REQUEST_TIMEOUT)
if response.status_code == 200:
pattern = r'file:\s*"([^"]+)"'
matches = re.findall(pattern, str(response.text))
if matches:
return matches[0]
raise ValueError("No match found")
if __name__ == '__main__':
url = input("Enter Luluvdo Link: ")
print(get_direct_link_from_luluvdo(url))

View File

@@ -1,43 +1,43 @@
import re
import base64
import requests
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
SPEEDFILES_PATTERN = re.compile(r'var _0x5opu234 = "(?P<encoded_data>.*?)";')
def get_direct_link_from_speedfiles(embeded_speedfiles_link):
response = requests.get(
embeded_speedfiles_link,
timeout=DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': RANDOM_USER_AGENT}
)
if "<span class=\"inline-block\">Web server is down</span>" in response.text:
raise ValueError(
"The SpeedFiles server is currently down.\n"
"Please try again later or choose a different hoster."
)
match = SPEEDFILES_PATTERN.search(response.text)
if not match:
raise ValueError("Pattern not found in the response.")
encoded_data = match.group("encoded_data")
decoded = base64.b64decode(encoded_data).decode()
decoded = decoded.swapcase()[::-1]
decoded = base64.b64decode(decoded).decode()[::-1]
decoded_hex = ''.join(chr(int(decoded[i:i + 2], 16))
for i in range(0, len(decoded), 2))
shifted = ''.join(chr(ord(char) - 3) for char in decoded_hex)
result = base64.b64decode(shifted.swapcase()[::-1]).decode()
return result
if __name__ == '__main__':
speedfiles_link = input("Enter Speedfiles Link: ")
print(get_direct_link_from_speedfiles(
embeded_speedfiles_link=speedfiles_link))
import re
import base64
import requests
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
SPEEDFILES_PATTERN = re.compile(r'var _0x5opu234 = "(?P<encoded_data>.*?)";')
def get_direct_link_from_speedfiles(embeded_speedfiles_link):
response = requests.get(
embeded_speedfiles_link,
timeout=DEFAULT_REQUEST_TIMEOUT,
headers={'User-Agent': RANDOM_USER_AGENT}
)
if "<span class=\"inline-block\">Web server is down</span>" in response.text:
raise ValueError(
"The SpeedFiles server is currently down.\n"
"Please try again later or choose a different hoster."
)
match = SPEEDFILES_PATTERN.search(response.text)
if not match:
raise ValueError("Pattern not found in the response.")
encoded_data = match.group("encoded_data")
decoded = base64.b64decode(encoded_data).decode()
decoded = decoded.swapcase()[::-1]
decoded = base64.b64decode(decoded).decode()[::-1]
decoded_hex = ''.join(chr(int(decoded[i:i + 2], 16))
for i in range(0, len(decoded), 2))
shifted = ''.join(chr(ord(char) - 3) for char in decoded_hex)
result = base64.b64decode(shifted.swapcase()[::-1]).decode()
return result
if __name__ == '__main__':
speedfiles_link = input("Enter Speedfiles Link: ")
print(get_direct_link_from_speedfiles(
embeded_speedfiles_link=speedfiles_link))

View File

@@ -1,2 +1,2 @@
def get_direct_link_from_streamtape(embeded_streamtape_link: str) -> str:
pass
def get_direct_link_from_streamtape(embeded_streamtape_link: str) -> str:
pass

View File

@@ -1,34 +1,34 @@
import re
import requests
from bs4 import BeautifulSoup
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
def get_direct_link_from_vidmoly(embeded_vidmoly_link: str):
response = requests.get(
embeded_vidmoly_link,
headers={'User-Agent': RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
html_content = response.text
soup = BeautifulSoup(html_content, 'html.parser')
scripts = soup.find_all('script')
file_link_pattern = r'file:\s*"(https?://.*?)"'
for script in scripts:
if script.string:
match = re.search(file_link_pattern, script.string)
if match:
file_link = match.group(1)
return file_link
raise ValueError("No direct link found.")
if __name__ == '__main__':
link = input("Enter Vidmoly Link: ")
print('Note: --referer "https://vidmoly.to"')
print(get_direct_link_from_vidmoly(embeded_vidmoly_link=link))
import re
import requests
from bs4 import BeautifulSoup
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
def get_direct_link_from_vidmoly(embeded_vidmoly_link: str):
response = requests.get(
embeded_vidmoly_link,
headers={'User-Agent': RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
html_content = response.text
soup = BeautifulSoup(html_content, 'html.parser')
scripts = soup.find_all('script')
file_link_pattern = r'file:\s*"(https?://.*?)"'
for script in scripts:
if script.string:
match = re.search(file_link_pattern, script.string)
if match:
file_link = match.group(1)
return file_link
raise ValueError("No direct link found.")
if __name__ == '__main__':
link = input("Enter Vidmoly Link: ")
print('Note: --referer "https://vidmoly.to"')
print(get_direct_link_from_vidmoly(embeded_vidmoly_link=link))

View File

@@ -1,29 +1,29 @@
import re
import requests
from bs4 import BeautifulSoup
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
def get_direct_link_from_vidoza(embeded_vidoza_link: str) -> str:
response = requests.get(
embeded_vidoza_link,
headers={'User-Agent': RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
soup = BeautifulSoup(response.content, "html.parser")
for tag in soup.find_all('script'):
if 'sourcesCode:' in tag.text:
match = re.search(r'src: "(.*?)"', tag.text)
if match:
return match.group(1)
raise ValueError("No direct link found.")
if __name__ == '__main__':
link = input("Enter Vidoza Link: ")
print(get_direct_link_from_vidoza(embeded_vidoza_link=link))
import re
import requests
from bs4 import BeautifulSoup
from aniworld.config import DEFAULT_REQUEST_TIMEOUT, RANDOM_USER_AGENT
def get_direct_link_from_vidoza(embeded_vidoza_link: str) -> str:
response = requests.get(
embeded_vidoza_link,
headers={'User-Agent': RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
soup = BeautifulSoup(response.content, "html.parser")
for tag in soup.find_all('script'):
if 'sourcesCode:' in tag.text:
match = re.search(r'src: "(.*?)"', tag.text)
if match:
return match.group(1)
raise ValueError("No direct link found.")
if __name__ == '__main__':
link = input("Enter Vidoza Link: ")
print(get_direct_link_from_vidoza(embeded_vidoza_link=link))

View File

@@ -1,113 +1,113 @@
import re
import base64
import json
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from .Provider import Provider
# Compile regex patterns once for better performance
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
class VOE(Provider):
def __init__(self):
self.RANDOM_USER_AGENT = UserAgent().random
self.Header = {
"User-Agent": self.RANDOM_USER_AGENT
}
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
self.session = requests.Session()
# Configure retries with backoff
retries = Retry(
total=5, # Number of retries
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
allowed_methods=["GET"]
)
adapter = HTTPAdapter(max_retries=retries)
self.session.mount("https://", adapter)
DEFAULT_REQUEST_TIMEOUT = 30
response = self.session.get(
embededLink,
headers={'User-Agent': self.RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
redirect = re.search(r"https?://[^'\"<>]+", response.text)
if not redirect:
raise ValueError("No redirect found.")
redirect_url = redirect.group(0)
parts = redirect_url.strip().split("/")
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
response = self.session.get(redirect_url, headers={'User-Agent': self.RANDOM_USER_AGENT})
html = response.content
# Method 1: Extract from script tag
extracted = self.extract_voe_from_script(html)
if extracted:
return extracted, self.Header
# Method 2: Extract from base64 encoded variable
htmlText = html.decode('utf-8')
b64_match = B64_PATTERN.search(htmlText)
if b64_match:
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
source = json.loads(decoded).get("source")
if source:
return source, self.Header
# Method 3: Extract HLS source
hls_match = HLS_PATTERN.search(htmlText)
if hls_match:
return base64.b64decode(hls_match.group("hls")).decode(), self.Header
def shift_letters(self, input_str):
result = ''
for c in input_str:
code = ord(c)
if 65 <= code <= 90:
code = (code - 65 + 13) % 26 + 65
elif 97 <= code <= 122:
code = (code - 97 + 13) % 26 + 97
result += chr(code)
return result
def replace_junk(self, input_str):
junk_parts = ['@$', '^^', '~@', '%?', '*~', '!!', '#&']
for part in junk_parts:
input_str = re.sub(re.escape(part), '_', input_str)
return input_str
def shift_back(self, s, n):
return ''.join(chr(ord(c) - n) for c in s)
def decode_voe_string(self, encoded):
step1 = self.shift_letters(encoded)
step2 = self.replace_junk(step1).replace('_', '')
step3 = base64.b64decode(step2).decode()
step4 = self.shift_back(step3, 3)
step5 = base64.b64decode(step4[::-1]).decode()
return json.loads(step5)
def extract_voe_from_script(self, html):
soup = BeautifulSoup(html, "html.parser")
script = soup.find("script", type="application/json")
return self.decode_voe_string(script.text[2:-2])["source"]
import re
import base64
import json
from requests.adapters import HTTPAdapter
from urllib3.util.retry import Retry
import requests
from bs4 import BeautifulSoup
from fake_useragent import UserAgent
from .Provider import Provider
# Compile regex patterns once for better performance
REDIRECT_PATTERN = re.compile(r"https?://[^'\"<>]+")
B64_PATTERN = re.compile(r"var a168c='([^']+)'")
HLS_PATTERN = re.compile(r"'hls': '(?P<hls>[^']+)'")
class VOE(Provider):
def __init__(self):
self.RANDOM_USER_AGENT = UserAgent().random
self.Header = {
"User-Agent": self.RANDOM_USER_AGENT
}
def GetLink(self, embededLink: str, DEFAULT_REQUEST_TIMEOUT: int) -> (str, [str]):
self.session = requests.Session()
# Configure retries with backoff
retries = Retry(
total=5, # Number of retries
backoff_factor=1, # Delay multiplier (1s, 2s, 4s, ...)
status_forcelist=[500, 502, 503, 504], # Retry for specific HTTP errors
allowed_methods=["GET"]
)
adapter = HTTPAdapter(max_retries=retries)
self.session.mount("https://", adapter)
DEFAULT_REQUEST_TIMEOUT = 30
response = self.session.get(
embededLink,
headers={'User-Agent': self.RANDOM_USER_AGENT},
timeout=DEFAULT_REQUEST_TIMEOUT
)
redirect = re.search(r"https?://[^'\"<>]+", response.text)
if not redirect:
raise ValueError("No redirect found.")
redirect_url = redirect.group(0)
parts = redirect_url.strip().split("/")
self.Header["Referer"] = f"{parts[0]}//{parts[2]}/"
response = self.session.get(redirect_url, headers={'User-Agent': self.RANDOM_USER_AGENT})
html = response.content
# Method 1: Extract from script tag
extracted = self.extract_voe_from_script(html)
if extracted:
return extracted, self.Header
# Method 2: Extract from base64 encoded variable
htmlText = html.decode('utf-8')
b64_match = B64_PATTERN.search(htmlText)
if b64_match:
decoded = base64.b64decode(b64_match.group(1)).decode()[::-1]
source = json.loads(decoded).get("source")
if source:
return source, self.Header
# Method 3: Extract HLS source
hls_match = HLS_PATTERN.search(htmlText)
if hls_match:
return base64.b64decode(hls_match.group("hls")).decode(), self.Header
def shift_letters(self, input_str):
result = ''
for c in input_str:
code = ord(c)
if 65 <= code <= 90:
code = (code - 65 + 13) % 26 + 65
elif 97 <= code <= 122:
code = (code - 97 + 13) % 26 + 97
result += chr(code)
return result
def replace_junk(self, input_str):
junk_parts = ['@$', '^^', '~@', '%?', '*~', '!!', '#&']
for part in junk_parts:
input_str = re.sub(re.escape(part), '_', input_str)
return input_str
def shift_back(self, s, n):
return ''.join(chr(ord(c) - n) for c in s)
def decode_voe_string(self, encoded):
step1 = self.shift_letters(encoded)
step2 = self.replace_junk(step1).replace('_', '')
step3 = base64.b64decode(step2).decode()
step4 = self.shift_back(step3, 3)
step5 = base64.b64decode(step4[::-1]).decode()
return json.loads(step5)
def extract_voe_from_script(self, html):
soup = BeautifulSoup(html, "html.parser")
script = soup.find("script", type="application/json")
return self.decode_voe_string(script.text[2:-2])["source"]

View File

@@ -1,6 +1,6 @@
"""
Infrastructure package for the Aniworld server.
This package contains repository implementations, database connections,
caching, and other infrastructure concerns.
"""
Infrastructure package for the Aniworld server.
This package contains repository implementations, database connections,
caching, and other infrastructure concerns.
"""

File diff suppressed because it is too large Load Diff

View File

@@ -1,40 +1,40 @@
import logging
console_handler = None
error_logger = None
noKeyFound_logger = None
noGerFound_logger = None
def setupLogger():
global console_handler, error_logger, noKeyFound_logger, noGerFound_logger
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
if (console_handler is None):
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(logging.Formatter(
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
)
logging.getLogger().addHandler(console_handler)
logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO)
logging.getLogger('charset_normalizer').setLevel(logging.INFO)
logging.getLogger().setLevel(logging.INFO)
if (error_logger is None):
error_logger = logging.getLogger("ErrorLog")
error_handler = logging.FileHandler("../errors.log")
error_handler.setLevel(logging.ERROR)
error_logger.addHandler(error_handler)
if (noKeyFound_logger is None):
noKeyFound_logger = logging.getLogger("NoKeyFound")
noKeyFound_handler = logging.FileHandler("../NoKeyFound.log")
noKeyFound_handler.setLevel(logging.ERROR)
noKeyFound_logger.addHandler(noKeyFound_handler)
if (noGerFound_logger is None):
noGerFound_logger = logging.getLogger("noGerFound")
noGerFound_handler = logging.FileHandler("../noGerFound.log")
noGerFound_handler.setLevel(logging.ERROR)
noGerFound_logger.addHandler(noGerFound_handler)
import logging
console_handler = None
error_logger = None
noKeyFound_logger = None
noGerFound_logger = None
def setupLogger():
global console_handler, error_logger, noKeyFound_logger, noGerFound_logger
# Configure logging
logging.basicConfig(level=logging.INFO, format='%(asctime)s - %(levelname)s - %(funcName)s - %(message)s')
if (console_handler is None):
console_handler = logging.StreamHandler()
console_handler.setLevel(logging.INFO)
console_handler.setFormatter(logging.Formatter(
"%(asctime)s - %(levelname)s - %(funcName)s - %(message)s")
)
logging.getLogger().addHandler(console_handler)
logging.getLogger("urllib3.connectionpool").setLevel(logging.INFO)
logging.getLogger('charset_normalizer').setLevel(logging.INFO)
logging.getLogger().setLevel(logging.INFO)
if (error_logger is None):
error_logger = logging.getLogger("ErrorLog")
error_handler = logging.FileHandler("../errors.log")
error_handler.setLevel(logging.ERROR)
error_logger.addHandler(error_handler)
if (noKeyFound_logger is None):
noKeyFound_logger = logging.getLogger("NoKeyFound")
noKeyFound_handler = logging.FileHandler("../NoKeyFound.log")
noKeyFound_handler.setLevel(logging.ERROR)
noKeyFound_logger.addHandler(noKeyFound_handler)
if (noGerFound_logger is None):
noGerFound_logger = logging.getLogger("noGerFound")
noGerFound_handler = logging.FileHandler("../noGerFound.log")
noGerFound_handler.setLevel(logging.ERROR)
noGerFound_logger.addHandler(noGerFound_handler)
setupLogger()

View File

@@ -1,6 +1,6 @@
"""
Repository package for data access layer.
This package contains repository implementations following the Repository pattern
for clean separation of data access logic from business logic.
"""
Repository package for data access layer.
This package contains repository implementations following the Repository pattern
for clean separation of data access logic from business logic.
"""

View File

@@ -1,24 +1,24 @@
# AniWorld FastAPI Server Configuration
# Authentication Configuration
JWT_SECRET_KEY=your-super-secure-jwt-secret-key-change-this-in-production
PASSWORD_SALT=c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1
MASTER_PASSWORD=admin123
# MASTER_PASSWORD_HASH=bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5
SESSION_TIMEOUT_HOURS=24
# Application Configuration
ANIME_DIRECTORY=\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien
LOG_LEVEL=INFO
# Database Configuration (if needed)
DATABASE_URL=sqlite:///./aniworld.db
# Security Configuration
CORS_ORIGINS=*
API_RATE_LIMIT=100
# Provider Configuration
DEFAULT_PROVIDER=aniworld.to
PROVIDER_TIMEOUT=30
# AniWorld FastAPI Server Configuration
# Authentication Configuration
JWT_SECRET_KEY=your-super-secure-jwt-secret-key-change-this-in-production
PASSWORD_SALT=c3149a46648b4394410b415ea654c31731b988ee59fc91b8fb8366a0b32ef0c1
MASTER_PASSWORD=admin123
# MASTER_PASSWORD_HASH=bb202031f646922388567de96a784074272efbbba9eb5d2259e23af04686d2a5
SESSION_TIMEOUT_HOURS=24
# Application Configuration
ANIME_DIRECTORY=\\\\sshfs.r\\ubuntu@192.168.178.43\\media\\serien\\Serien
LOG_LEVEL=INFO
# Database Configuration (if needed)
DATABASE_URL=sqlite:///./aniworld.db
# Security Configuration
CORS_ORIGINS=*
API_RATE_LIMIT=100
# Provider Configuration
DEFAULT_PROVIDER=aniworld.to
PROVIDER_TIMEOUT=30
RETRY_ATTEMPTS=3

View File

@@ -1,257 +1,257 @@
# AniWorld FastAPI Server
A comprehensive FastAPI-based server implementation for AniWorld following the project instructions.
## 🚀 Features
### ✅ Authentication System (Completed)
- **Simple Master Password Authentication**: Single master password for the entire application
- **JWT Token Management**: Stateless authentication using JWT tokens
- **Environment Configuration**: Secure password hash stored in environment variables
- **Session Management**: Configurable token expiry (default: 24 hours)
- **Security Features**: SHA-256 password hashing with salt
### ✅ API Endpoints (Implemented)
#### Authentication Endpoints
- `POST /auth/login` - Login with master password and receive JWT token
- `GET /auth/verify` - Verify JWT token validity (protected)
- `POST /auth/logout` - Logout endpoint (stateless - client removes token)
#### System Endpoints
- `GET /` - Root endpoint with API information
- `GET /health` - Health check endpoint
- `GET /api/system/config` - System configuration (protected)
- `GET /api/system/database/health` - Database health check (protected)
#### Anime & Episode Endpoints (Protected)
- `GET /api/anime/search` - Search anime by title with pagination
- `GET /api/anime/{anime_id}` - Get specific anime details
- `GET /api/anime/{anime_id}/episodes` - Get all episodes for anime
- `GET /api/episodes/{episode_id}` - Get specific episode details
### 🔧 Technical Features
- **FastAPI Framework**: Modern, fast (high-performance) web framework
- **OpenAPI Documentation**: Automatic API documentation at `/docs`
- **CORS Support**: Configurable cross-origin resource sharing
- **Request Validation**: Pydantic models for request/response validation
- **Error Handling**: Centralized error handling with proper HTTP status codes
- **Logging**: Comprehensive logging system with file and console output
- **Environment Configuration**: Secure configuration via environment variables
## 🛠️ Installation & Setup
### Prerequisites
- Python 3.11+ (AniWorld conda environment)
- Conda package manager
### 1. Activate AniWorld Environment
```bash
conda activate AniWorld
```
### 2. Install Dependencies
```bash
cd src/server
pip install -r requirements_fastapi.txt
```
### 3. Configure Environment
Create or update `.env` file:
```env
# Authentication
JWT_SECRET_KEY=your-super-secure-jwt-secret-key
PASSWORD_SALT=your-secure-salt
MASTER_PASSWORD=admin123
SESSION_TIMEOUT_HOURS=24
# Application
ANIME_DIRECTORY=your-anime-directory-path
LOG_LEVEL=INFO
# Optional
DATABASE_URL=sqlite:///./aniworld.db
CORS_ORIGINS=*
```
### 4. Start the Server
#### Option 1: Direct Python Execution
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py
```
#### Option 2: Using Batch Script (Windows)
```cmd
cd src/server
run_and_test.bat
```
#### Option 3: Using Shell Script (Linux/Mac)
```bash
cd src/server
chmod +x start_fastapi_server.sh
./start_fastapi_server.sh
```
## 📖 API Usage
### 1. Access Documentation
Visit: http://localhost:8000/docs
### 2. Authentication Flow
#### Step 1: Login
```bash
curl -X POST "http://localhost:8000/auth/login" \
-H "Content-Type: application/json" \
-d '{"password": "admin123"}'
```
Response:
```json
{
"success": true,
"message": "Authentication successful",
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"expires_at": "2025-10-06T18:19:24.710065"
}
```
#### Step 2: Use Token for Protected Endpoints
```bash
curl -X GET "http://localhost:8000/api/anime/search?query=naruto&limit=5" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
### 3. Example API Calls
#### Health Check
```bash
curl "http://localhost:8000/health"
```
#### Search Anime
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/search?query=naruto&limit=10"
```
#### Get Anime Details
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/anime_123"
```
## 🧪 Testing
### Automated Testing
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
```
### Manual Testing
1. Start the server
2. Visit http://localhost:8000/docs
3. Use the interactive API documentation
4. Test authentication with password: `admin123`
## 📁 Project Structure
```
src/server/
├── fastapi_app.py # Main FastAPI application
├── .env # Environment configuration
├── requirements_fastapi.txt # Python dependencies
├── test_fastapi.py # Test script
├── start_fastapi_server.bat # Windows startup script
├── start_fastapi_server.sh # Linux/Mac startup script
├── run_and_test.bat # Windows test runner
└── logs/ # Log files
```
## 🔐 Security
### Authentication
- Master password authentication (no user registration required)
- JWT tokens with configurable expiry
- Secure password hashing (SHA-256 + salt)
- Environment-based secret management
### API Security
- All anime/episode endpoints require authentication
- CORS protection
- Input validation using Pydantic
- Error handling without sensitive data exposure
## 🔧 Configuration
### Environment Variables
- `JWT_SECRET_KEY`: Secret key for JWT token signing
- `PASSWORD_SALT`: Salt for password hashing
- `MASTER_PASSWORD`: Master password (development only)
- `MASTER_PASSWORD_HASH`: Hashed master password (production)
- `SESSION_TIMEOUT_HOURS`: JWT token expiry time
- `ANIME_DIRECTORY`: Path to anime files
- `LOG_LEVEL`: Logging level (DEBUG, INFO, WARNING, ERROR)
### Production Configuration
1. Set `MASTER_PASSWORD_HASH` instead of `MASTER_PASSWORD`
2. Use a strong `JWT_SECRET_KEY`
3. Set appropriate `CORS_ORIGINS`
4. Configure proper logging levels
## 📊 API Status
| Endpoint Category | Status | Coverage |
|------------------|--------|----------|
| Authentication | ✅ Complete | 100% |
| Health/System | ✅ Complete | 100% |
| Anime Search | ✅ Implemented | Mock data |
| Episode Management | ✅ Implemented | Mock data |
| Database Integration | 🔄 Placeholder | Todo |
| Real Data Provider | 🔄 Placeholder | Todo |
## 🚧 Future Enhancements
### High Priority
- [ ] Connect to actual anime database/provider
- [ ] Implement real anime search functionality
- [ ] Add episode streaming capabilities
- [ ] Database connection pooling
### Medium Priority
- [ ] Redis caching layer
- [ ] Rate limiting middleware
- [ ] Background task processing
- [ ] WebSocket support
### Low Priority
- [ ] Advanced search filters
- [ ] User preferences (multi-user support)
- [ ] Download progress tracking
- [ ] Statistics and analytics
## 📝 License
This project follows the AniWorld project licensing terms.
## 🤝 Contributing
1. Follow the coding standards in `.github/copilot-instructions.md`
2. Use type hints and Pydantic models
3. Add comprehensive logging
4. Include tests for new features
5. Update documentation
## 📞 Support
- API Documentation: http://localhost:8000/docs
- Health Check: http://localhost:8000/health
- Logs: Check `logs/aniworld.log` for detailed information
---
# AniWorld FastAPI Server
A comprehensive FastAPI-based server implementation for AniWorld following the project instructions.
## 🚀 Features
### ✅ Authentication System (Completed)
- **Simple Master Password Authentication**: Single master password for the entire application
- **JWT Token Management**: Stateless authentication using JWT tokens
- **Environment Configuration**: Secure password hash stored in environment variables
- **Session Management**: Configurable token expiry (default: 24 hours)
- **Security Features**: SHA-256 password hashing with salt
### ✅ API Endpoints (Implemented)
#### Authentication Endpoints
- `POST /auth/login` - Login with master password and receive JWT token
- `GET /auth/verify` - Verify JWT token validity (protected)
- `POST /auth/logout` - Logout endpoint (stateless - client removes token)
#### System Endpoints
- `GET /` - Root endpoint with API information
- `GET /health` - Health check endpoint
- `GET /api/system/config` - System configuration (protected)
- `GET /api/system/database/health` - Database health check (protected)
#### Anime & Episode Endpoints (Protected)
- `GET /api/anime/search` - Search anime by title with pagination
- `GET /api/anime/{anime_id}` - Get specific anime details
- `GET /api/anime/{anime_id}/episodes` - Get all episodes for anime
- `GET /api/episodes/{episode_id}` - Get specific episode details
### 🔧 Technical Features
- **FastAPI Framework**: Modern, fast (high-performance) web framework
- **OpenAPI Documentation**: Automatic API documentation at `/docs`
- **CORS Support**: Configurable cross-origin resource sharing
- **Request Validation**: Pydantic models for request/response validation
- **Error Handling**: Centralized error handling with proper HTTP status codes
- **Logging**: Comprehensive logging system with file and console output
- **Environment Configuration**: Secure configuration via environment variables
## 🛠️ Installation & Setup
### Prerequisites
- Python 3.11+ (AniWorld conda environment)
- Conda package manager
### 1. Activate AniWorld Environment
```bash
conda activate AniWorld
```
### 2. Install Dependencies
```bash
cd src/server
pip install -r requirements_fastapi.txt
```
### 3. Configure Environment
Create or update `.env` file:
```env
# Authentication
JWT_SECRET_KEY=your-super-secure-jwt-secret-key
PASSWORD_SALT=your-secure-salt
MASTER_PASSWORD=admin123
SESSION_TIMEOUT_HOURS=24
# Application
ANIME_DIRECTORY=your-anime-directory-path
LOG_LEVEL=INFO
# Optional
DATABASE_URL=sqlite:///./aniworld.db
CORS_ORIGINS=*
```
### 4. Start the Server
#### Option 1: Direct Python Execution
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe fastapi_app.py
```
#### Option 2: Using Batch Script (Windows)
```cmd
cd src/server
run_and_test.bat
```
#### Option 3: Using Shell Script (Linux/Mac)
```bash
cd src/server
chmod +x start_fastapi_server.sh
./start_fastapi_server.sh
```
## 📖 API Usage
### 1. Access Documentation
Visit: http://localhost:8000/docs
### 2. Authentication Flow
#### Step 1: Login
```bash
curl -X POST "http://localhost:8000/auth/login" \
-H "Content-Type: application/json" \
-d '{"password": "admin123"}'
```
Response:
```json
{
"success": true,
"message": "Authentication successful",
"token": "eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9...",
"expires_at": "2025-10-06T18:19:24.710065"
}
```
#### Step 2: Use Token for Protected Endpoints
```bash
curl -X GET "http://localhost:8000/api/anime/search?query=naruto&limit=5" \
-H "Authorization: Bearer YOUR_JWT_TOKEN"
```
### 3. Example API Calls
#### Health Check
```bash
curl "http://localhost:8000/health"
```
#### Search Anime
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/search?query=naruto&limit=10"
```
#### Get Anime Details
```bash
curl -H "Authorization: Bearer YOUR_TOKEN" \
"http://localhost:8000/api/anime/anime_123"
```
## 🧪 Testing
### Automated Testing
```bash
cd src/server
C:\Users\lukas\anaconda3\envs\AniWorld\python.exe test_fastapi.py
```
### Manual Testing
1. Start the server
2. Visit http://localhost:8000/docs
3. Use the interactive API documentation
4. Test authentication with password: `admin123`
## 📁 Project Structure
```
src/server/
├── fastapi_app.py # Main FastAPI application
├── .env # Environment configuration
├── requirements_fastapi.txt # Python dependencies
├── test_fastapi.py # Test script
├── start_fastapi_server.bat # Windows startup script
├── start_fastapi_server.sh # Linux/Mac startup script
├── run_and_test.bat # Windows test runner
└── logs/ # Log files
```
## 🔐 Security
### Authentication
- Master password authentication (no user registration required)
- JWT tokens with configurable expiry
- Secure password hashing (SHA-256 + salt)
- Environment-based secret management
### API Security
- All anime/episode endpoints require authentication
- CORS protection
- Input validation using Pydantic
- Error handling without sensitive data exposure
## 🔧 Configuration
### Environment Variables
- `JWT_SECRET_KEY`: Secret key for JWT token signing
- `PASSWORD_SALT`: Salt for password hashing
- `MASTER_PASSWORD`: Master password (development only)
- `MASTER_PASSWORD_HASH`: Hashed master password (production)
- `SESSION_TIMEOUT_HOURS`: JWT token expiry time
- `ANIME_DIRECTORY`: Path to anime files
- `LOG_LEVEL`: Logging level (DEBUG, INFO, WARNING, ERROR)
### Production Configuration
1. Set `MASTER_PASSWORD_HASH` instead of `MASTER_PASSWORD`
2. Use a strong `JWT_SECRET_KEY`
3. Set appropriate `CORS_ORIGINS`
4. Configure proper logging levels
## 📊 API Status
| Endpoint Category | Status | Coverage |
|------------------|--------|----------|
| Authentication | ✅ Complete | 100% |
| Health/System | ✅ Complete | 100% |
| Anime Search | ✅ Implemented | Mock data |
| Episode Management | ✅ Implemented | Mock data |
| Database Integration | 🔄 Placeholder | Todo |
| Real Data Provider | 🔄 Placeholder | Todo |
## 🚧 Future Enhancements
### High Priority
- [ ] Connect to actual anime database/provider
- [ ] Implement real anime search functionality
- [ ] Add episode streaming capabilities
- [ ] Database connection pooling
### Medium Priority
- [ ] Redis caching layer
- [ ] Rate limiting middleware
- [ ] Background task processing
- [ ] WebSocket support
### Low Priority
- [ ] Advanced search filters
- [ ] User preferences (multi-user support)
- [ ] Download progress tracking
- [ ] Statistics and analytics
## 📝 License
This project follows the AniWorld project licensing terms.
## 🤝 Contributing
1. Follow the coding standards in `.github/copilot-instructions.md`
2. Use type hints and Pydantic models
3. Add comprehensive logging
4. Include tests for new features
5. Update documentation
## 📞 Support
- API Documentation: http://localhost:8000/docs
- Health Check: http://localhost:8000/health
- Logs: Check `logs/aniworld.log` for detailed information
---
**Note**: This FastAPI implementation provides a solid foundation following the project instructions. The authentication system is complete and production-ready, while anime/episode endpoints currently return mock data pending integration with the actual data providers.

File diff suppressed because it is too large Load Diff

View File

@@ -1,10 +1,10 @@
"""
Configuration package for the Aniworld server.
This package provides configuration management and environment
variable handling for secure application deployment.
"""
from .env_config import EnvironmentConfig, env_config
"""
Configuration package for the Aniworld server.
This package provides configuration management and environment
variable handling for secure application deployment.
"""
from .env_config import EnvironmentConfig, env_config
__all__ = ['EnvironmentConfig', 'env_config']

View File

@@ -1,217 +1,217 @@
"""
Environment configuration for secure handling of sensitive data.
This module provides secure environment variable handling and configuration
management for the Aniworld server application.
"""
import os
import secrets
from typing import Optional, Dict, Any
from dotenv import load_dotenv
import logging
logger = logging.getLogger(__name__)
# Load environment variables from .env file
load_dotenv()
class EnvironmentConfig:
"""Manages environment variables and secure configuration."""
# Security
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
# Database
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
# Redis (for caching and sessions)
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
# API Keys and External Services
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
# Email Configuration (for password reset)
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
# Security Settings
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
# Rate Limiting
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
# Application Settings
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
HOST: str = os.getenv('HOST', '127.0.0.1')
PORT: int = int(os.getenv('PORT', '5000'))
# Anime Directory and Download Settings
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
# Logging
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
LOG_FILE: str = os.getenv('LOG_FILE', './logs/aniworld.log')
@classmethod
def get_database_config(cls) -> Dict[str, Any]:
"""Get database configuration."""
return {
'url': cls.DATABASE_URL,
'password': cls.DATABASE_PASSWORD,
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
}
@classmethod
def get_redis_config(cls) -> Dict[str, Any]:
"""Get Redis configuration."""
return {
'url': cls.REDIS_URL,
'password': cls.REDIS_PASSWORD,
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
'retry_on_timeout': True,
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
}
@classmethod
def get_email_config(cls) -> Dict[str, Any]:
"""Get email configuration."""
return {
'server': cls.SMTP_SERVER,
'port': cls.SMTP_PORT,
'username': cls.SMTP_USERNAME,
'password': cls.SMTP_PASSWORD,
'use_tls': cls.SMTP_USE_TLS,
'from_email': cls.FROM_EMAIL
}
@classmethod
def get_security_config(cls) -> Dict[str, Any]:
"""Get security configuration."""
return {
'secret_key': cls.SECRET_KEY,
'jwt_secret_key': cls.JWT_SECRET_KEY,
'password_salt': cls.PASSWORD_SALT,
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
}
@classmethod
def validate_config(cls) -> bool:
"""Validate that required configuration is present."""
required_vars = [
'SECRET_KEY',
'JWT_SECRET_KEY',
'PASSWORD_SALT'
]
missing_vars = []
for var in required_vars:
if not getattr(cls, var):
missing_vars.append(var)
if missing_vars:
logger.error(f"Missing required environment variables: {missing_vars}")
return False
return True
@classmethod
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
"""Generate a template .env file with all available configuration options."""
try:
template_content = """# Aniworld Server Environment Configuration
# Copy this file to .env and fill in your values
# Security (REQUIRED - Generate secure random values)
SECRET_KEY=your_secret_key_here
JWT_SECRET_KEY=your_jwt_secret_here
PASSWORD_SALT=your_password_salt_here
# Database Configuration
DATABASE_URL=sqlite:///data/aniworld.db
# DATABASE_PASSWORD=your_db_password_here
DATABASE_POOL_SIZE=10
DATABASE_MAX_OVERFLOW=20
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (for caching and sessions)
REDIS_URL=redis://localhost:6379/0
# REDIS_PASSWORD=your_redis_password_here
REDIS_MAX_CONNECTIONS=10
REDIS_SOCKET_TIMEOUT=5
# Email Configuration (for password reset emails)
SMTP_SERVER=localhost
SMTP_PORT=587
# SMTP_USERNAME=your_smtp_username
# SMTP_PASSWORD=your_smtp_password
SMTP_USE_TLS=true
FROM_EMAIL=noreply@aniworld.local
# External API Keys
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
# TMDB_API_KEY=your_tmdb_api_key
# Security Settings
SESSION_TIMEOUT_HOURS=24
MAX_FAILED_LOGIN_ATTEMPTS=5
LOCKOUT_DURATION_MINUTES=30
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
API_RATE_LIMIT_PER_MINUTE=100
# Application Settings
DEBUG=false
HOST=127.0.0.1
PORT=5000
# Anime and Download Settings
ANIME_DIRECTORY=./downloads
MAX_CONCURRENT_DOWNLOADS=3
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
# Logging
LOG_LEVEL=INFO
LOG_FILE=./logs/aniworld.log
"""
with open(file_path, 'w', encoding='utf-8') as f:
f.write(template_content)
logger.info(f"Environment template created at {file_path}")
return True
except Exception as e:
logger.error(f"Error creating environment template: {e}")
return False
# Create global instance
env_config = EnvironmentConfig()
# Validate configuration on import
if not env_config.validate_config():
"""
Environment configuration for secure handling of sensitive data.
This module provides secure environment variable handling and configuration
management for the Aniworld server application.
"""
import os
import secrets
from typing import Optional, Dict, Any
from dotenv import load_dotenv
import logging
logger = logging.getLogger(__name__)
# Load environment variables from .env file
load_dotenv()
class EnvironmentConfig:
"""Manages environment variables and secure configuration."""
# Security
SECRET_KEY: str = os.getenv('SECRET_KEY', secrets.token_urlsafe(32))
JWT_SECRET_KEY: str = os.getenv('JWT_SECRET_KEY', secrets.token_urlsafe(32))
PASSWORD_SALT: str = os.getenv('PASSWORD_SALT', secrets.token_hex(32))
# Database
DATABASE_URL: str = os.getenv('DATABASE_URL', 'sqlite:///data/aniworld.db')
DATABASE_PASSWORD: Optional[str] = os.getenv('DATABASE_PASSWORD')
# Redis (for caching and sessions)
REDIS_URL: str = os.getenv('REDIS_URL', 'redis://localhost:6379/0')
REDIS_PASSWORD: Optional[str] = os.getenv('REDIS_PASSWORD')
# API Keys and External Services
ANIME_PROVIDER_API_KEY: Optional[str] = os.getenv('ANIME_PROVIDER_API_KEY')
TMDB_API_KEY: Optional[str] = os.getenv('TMDB_API_KEY')
# Email Configuration (for password reset)
SMTP_SERVER: str = os.getenv('SMTP_SERVER', 'localhost')
SMTP_PORT: int = int(os.getenv('SMTP_PORT', '587'))
SMTP_USERNAME: Optional[str] = os.getenv('SMTP_USERNAME')
SMTP_PASSWORD: Optional[str] = os.getenv('SMTP_PASSWORD')
SMTP_USE_TLS: bool = os.getenv('SMTP_USE_TLS', 'true').lower() == 'true'
FROM_EMAIL: str = os.getenv('FROM_EMAIL', 'noreply@aniworld.local')
# Security Settings
SESSION_TIMEOUT_HOURS: int = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
MAX_FAILED_LOGIN_ATTEMPTS: int = int(os.getenv('MAX_FAILED_LOGIN_ATTEMPTS', '5'))
LOCKOUT_DURATION_MINUTES: int = int(os.getenv('LOCKOUT_DURATION_MINUTES', '30'))
# Rate Limiting
RATE_LIMIT_PER_MINUTE: int = int(os.getenv('RATE_LIMIT_PER_MINUTE', '60'))
API_RATE_LIMIT_PER_MINUTE: int = int(os.getenv('API_RATE_LIMIT_PER_MINUTE', '100'))
# Application Settings
DEBUG: bool = os.getenv('DEBUG', 'false').lower() == 'true'
HOST: str = os.getenv('HOST', '127.0.0.1')
PORT: int = int(os.getenv('PORT', '5000'))
# Anime Directory and Download Settings
ANIME_DIRECTORY: str = os.getenv('ANIME_DIRECTORY', './downloads')
MAX_CONCURRENT_DOWNLOADS: int = int(os.getenv('MAX_CONCURRENT_DOWNLOADS', '3'))
DOWNLOAD_SPEED_LIMIT: Optional[int] = int(os.getenv('DOWNLOAD_SPEED_LIMIT', '0')) or None
# Logging
LOG_LEVEL: str = os.getenv('LOG_LEVEL', 'INFO')
LOG_FILE: str = os.getenv('LOG_FILE', './logs/aniworld.log')
@classmethod
def get_database_config(cls) -> Dict[str, Any]:
"""Get database configuration."""
return {
'url': cls.DATABASE_URL,
'password': cls.DATABASE_PASSWORD,
'pool_size': int(os.getenv('DATABASE_POOL_SIZE', '10')),
'max_overflow': int(os.getenv('DATABASE_MAX_OVERFLOW', '20')),
'pool_timeout': int(os.getenv('DATABASE_POOL_TIMEOUT', '30')),
'pool_recycle': int(os.getenv('DATABASE_POOL_RECYCLE', '3600'))
}
@classmethod
def get_redis_config(cls) -> Dict[str, Any]:
"""Get Redis configuration."""
return {
'url': cls.REDIS_URL,
'password': cls.REDIS_PASSWORD,
'max_connections': int(os.getenv('REDIS_MAX_CONNECTIONS', '10')),
'retry_on_timeout': True,
'socket_timeout': int(os.getenv('REDIS_SOCKET_TIMEOUT', '5'))
}
@classmethod
def get_email_config(cls) -> Dict[str, Any]:
"""Get email configuration."""
return {
'server': cls.SMTP_SERVER,
'port': cls.SMTP_PORT,
'username': cls.SMTP_USERNAME,
'password': cls.SMTP_PASSWORD,
'use_tls': cls.SMTP_USE_TLS,
'from_email': cls.FROM_EMAIL
}
@classmethod
def get_security_config(cls) -> Dict[str, Any]:
"""Get security configuration."""
return {
'secret_key': cls.SECRET_KEY,
'jwt_secret_key': cls.JWT_SECRET_KEY,
'password_salt': cls.PASSWORD_SALT,
'session_timeout_hours': cls.SESSION_TIMEOUT_HOURS,
'max_failed_attempts': cls.MAX_FAILED_LOGIN_ATTEMPTS,
'lockout_duration_minutes': cls.LOCKOUT_DURATION_MINUTES,
'rate_limit_per_minute': cls.RATE_LIMIT_PER_MINUTE,
'api_rate_limit_per_minute': cls.API_RATE_LIMIT_PER_MINUTE
}
@classmethod
def validate_config(cls) -> bool:
"""Validate that required configuration is present."""
required_vars = [
'SECRET_KEY',
'JWT_SECRET_KEY',
'PASSWORD_SALT'
]
missing_vars = []
for var in required_vars:
if not getattr(cls, var):
missing_vars.append(var)
if missing_vars:
logger.error(f"Missing required environment variables: {missing_vars}")
return False
return True
@classmethod
def generate_env_template(cls, file_path: str = '.env.template') -> bool:
"""Generate a template .env file with all available configuration options."""
try:
template_content = """# Aniworld Server Environment Configuration
# Copy this file to .env and fill in your values
# Security (REQUIRED - Generate secure random values)
SECRET_KEY=your_secret_key_here
JWT_SECRET_KEY=your_jwt_secret_here
PASSWORD_SALT=your_password_salt_here
# Database Configuration
DATABASE_URL=sqlite:///data/aniworld.db
# DATABASE_PASSWORD=your_db_password_here
DATABASE_POOL_SIZE=10
DATABASE_MAX_OVERFLOW=20
DATABASE_POOL_TIMEOUT=30
DATABASE_POOL_RECYCLE=3600
# Redis Configuration (for caching and sessions)
REDIS_URL=redis://localhost:6379/0
# REDIS_PASSWORD=your_redis_password_here
REDIS_MAX_CONNECTIONS=10
REDIS_SOCKET_TIMEOUT=5
# Email Configuration (for password reset emails)
SMTP_SERVER=localhost
SMTP_PORT=587
# SMTP_USERNAME=your_smtp_username
# SMTP_PASSWORD=your_smtp_password
SMTP_USE_TLS=true
FROM_EMAIL=noreply@aniworld.local
# External API Keys
# ANIME_PROVIDER_API_KEY=your_anime_provider_api_key
# TMDB_API_KEY=your_tmdb_api_key
# Security Settings
SESSION_TIMEOUT_HOURS=24
MAX_FAILED_LOGIN_ATTEMPTS=5
LOCKOUT_DURATION_MINUTES=30
# Rate Limiting
RATE_LIMIT_PER_MINUTE=60
API_RATE_LIMIT_PER_MINUTE=100
# Application Settings
DEBUG=false
HOST=127.0.0.1
PORT=5000
# Anime and Download Settings
ANIME_DIRECTORY=./downloads
MAX_CONCURRENT_DOWNLOADS=3
# DOWNLOAD_SPEED_LIMIT=1000000 # bytes per second
# Logging
LOG_LEVEL=INFO
LOG_FILE=./logs/aniworld.log
"""
with open(file_path, 'w', encoding='utf-8') as f:
f.write(template_content)
logger.info(f"Environment template created at {file_path}")
return True
except Exception as e:
logger.error(f"Error creating environment template: {e}")
return False
# Create global instance
env_config = EnvironmentConfig()
# Validate configuration on import
if not env_config.validate_config():
logger.warning("Invalid environment configuration detected. Please check your .env file.")

File diff suppressed because it is too large Load Diff

View File

@@ -1,248 +1,248 @@
"""
Application Flow Middleware for FastAPI.
This middleware enforces the application flow priorities:
1. Setup page (if setup is not complete)
2. Authentication page (if user is not authenticated)
3. Main application (for authenticated users with completed setup)
The middleware redirects users to the appropriate page based on their current state
and the state of the application setup.
"""
import logging
from typing import Optional
from fastapi import Request
from fastapi.responses import RedirectResponse
from starlette.middleware.base import BaseHTTPMiddleware
# Import the setup service
try:
from ..services.setup_service import SetupService
except ImportError:
# Handle case where service is not available
class SetupService:
def is_setup_complete(self):
return True
logger = logging.getLogger(__name__)
class ApplicationFlowMiddleware(BaseHTTPMiddleware):
"""
Middleware to enforce application flow: setup → auth → main application.
This middleware:
1. Checks if setup is complete
2. Validates authentication status
3. Redirects to appropriate page based on state
4. Allows API endpoints and static files to pass through
"""
def __init__(self, app, setup_service: Optional[SetupService] = None):
"""
Initialize the application flow middleware.
Args:
app: FastAPI application instance
setup_service: Setup service instance (optional, will create if not provided)
"""
super().__init__(app)
self.setup_service = setup_service or SetupService()
# Define paths that should bypass flow enforcement
self.bypass_paths = {
"/static", # Static files
"/favicon.ico", # Browser favicon requests
"/robots.txt", # Robots.txt
"/health", # Health check endpoints
"/docs", # OpenAPI documentation
"/redoc", # ReDoc documentation
"/openapi.json" # OpenAPI spec
}
# API paths that should bypass flow but may require auth
self.api_paths = {
"/api",
"/auth"
}
# Pages that are part of the flow and should be accessible
self.flow_pages = {
"/setup",
"/login",
"/app"
}
async def dispatch(self, request: Request, call_next):
"""
Process the request and enforce application flow.
Args:
request: Incoming HTTP request
call_next: Next middleware/handler in chain
Returns:
Response: Either a redirect response or the result of call_next
"""
try:
# Get the request path
path = request.url.path
# Skip flow enforcement for certain paths
if self._should_bypass_flow(path):
return await call_next(request)
# Check application setup status
setup_complete = self.setup_service.is_setup_complete()
# Check authentication status
is_authenticated = await self._is_user_authenticated(request)
# Determine the appropriate action
redirect_response = self._determine_redirect(path, setup_complete, is_authenticated)
if redirect_response:
logger.info(f"Redirecting {path} to {redirect_response.headers.get('location')}")
return redirect_response
# Continue with the request
return await call_next(request)
except Exception as e:
logger.error(f"Error in ApplicationFlowMiddleware: {e}", exc_info=True)
# In case of error, allow the request to continue
return await call_next(request)
def _should_bypass_flow(self, path: str) -> bool:
"""
Check if the given path should bypass flow enforcement.
Args:
path: Request path
Returns:
bool: True if path should bypass flow enforcement
"""
# Check exact bypass paths
for bypass_path in self.bypass_paths:
if path.startswith(bypass_path):
return True
# API paths bypass flow enforcement (but may have their own auth)
for api_path in self.api_paths:
if path.startswith(api_path):
return True
return False
async def _is_user_authenticated(self, request: Request) -> bool:
"""
Check if the user is authenticated by validating JWT token.
Args:
request: HTTP request object
Returns:
bool: True if user is authenticated, False otherwise
"""
try:
# Check for Authorization header
auth_header = request.headers.get("authorization")
if not auth_header or not auth_header.startswith("Bearer "):
return False
# Extract and validate token
token = auth_header.split(" ")[1]
# Import JWT validation function (avoid circular imports)
try:
from ..fastapi_app import verify_jwt_token
payload = verify_jwt_token(token)
return payload is not None
except ImportError:
# Fallback if import fails
logger.warning("Could not import JWT verification function")
return False
except Exception as e:
logger.error(f"Error checking authentication: {e}")
return False
def _determine_redirect(self, path: str, setup_complete: bool, is_authenticated: bool) -> Optional[RedirectResponse]:
"""
Determine if a redirect is needed based on current state.
Args:
path: Current request path
setup_complete: Whether application setup is complete
is_authenticated: Whether user is authenticated
Returns:
Optional[RedirectResponse]: Redirect response if needed, None otherwise
"""
# If setup is not complete
if not setup_complete:
# Allow access to setup page
if path == "/setup":
return None
# Redirect everything else to setup
return RedirectResponse(url="/setup", status_code=302)
# Setup is complete, check authentication
if not is_authenticated:
# Allow access to login page
if path == "/login":
return None
# Redirect unauthenticated users to login (except for specific pages)
if path in self.flow_pages or path == "/":
return RedirectResponse(url="/login", status_code=302)
# User is authenticated and setup is complete
else:
# Redirect from setup/login pages to main app
if path in ["/setup", "/login", "/"]:
return RedirectResponse(url="/app", status_code=302)
# No redirect needed
return None
def get_flow_status(self, request: Request) -> dict:
"""
Get current flow status for debugging/monitoring.
Args:
request: HTTP request object
Returns:
dict: Current flow status information
"""
try:
setup_complete = self.setup_service.is_setup_complete()
is_authenticated = self._is_user_authenticated(request)
return {
"setup_complete": setup_complete,
"authenticated": is_authenticated,
"path": request.url.path,
"should_bypass": self._should_bypass_flow(request.url.path)
}
except Exception as e:
return {
"error": str(e),
"path": request.url.path
}
def create_application_flow_middleware(setup_service: Optional[SetupService] = None) -> ApplicationFlowMiddleware:
"""
Factory function to create application flow middleware.
Args:
setup_service: Setup service instance (optional)
Returns:
ApplicationFlowMiddleware: Configured middleware instance
"""
"""
Application Flow Middleware for FastAPI.
This middleware enforces the application flow priorities:
1. Setup page (if setup is not complete)
2. Authentication page (if user is not authenticated)
3. Main application (for authenticated users with completed setup)
The middleware redirects users to the appropriate page based on their current state
and the state of the application setup.
"""
import logging
from typing import Optional
from fastapi import Request
from fastapi.responses import RedirectResponse
from starlette.middleware.base import BaseHTTPMiddleware
# Import the setup service
try:
from ...core.application.services.setup_service import SetupService
except ImportError:
# Handle case where service is not available
class SetupService:
def is_setup_complete(self):
return True
logger = logging.getLogger(__name__)
class ApplicationFlowMiddleware(BaseHTTPMiddleware):
"""
Middleware to enforce application flow: setup → auth → main application.
This middleware:
1. Checks if setup is complete
2. Validates authentication status
3. Redirects to appropriate page based on state
4. Allows API endpoints and static files to pass through
"""
def __init__(self, app, setup_service: Optional[SetupService] = None):
"""
Initialize the application flow middleware.
Args:
app: FastAPI application instance
setup_service: Setup service instance (optional, will create if not provided)
"""
super().__init__(app)
self.setup_service = setup_service or SetupService()
# Define paths that should bypass flow enforcement
self.bypass_paths = {
"/static", # Static files
"/favicon.ico", # Browser favicon requests
"/robots.txt", # Robots.txt
"/health", # Health check endpoints
"/docs", # OpenAPI documentation
"/redoc", # ReDoc documentation
"/openapi.json" # OpenAPI spec
}
# API paths that should bypass flow but may require auth
self.api_paths = {
"/api",
"/auth"
}
# Pages that are part of the flow and should be accessible
self.flow_pages = {
"/setup",
"/login",
"/app"
}
async def dispatch(self, request: Request, call_next):
"""
Process the request and enforce application flow.
Args:
request: Incoming HTTP request
call_next: Next middleware/handler in chain
Returns:
Response: Either a redirect response or the result of call_next
"""
try:
# Get the request path
path = request.url.path
# Skip flow enforcement for certain paths
if self._should_bypass_flow(path):
return await call_next(request)
# Check application setup status
setup_complete = self.setup_service.is_setup_complete()
# Check authentication status
is_authenticated = await self._is_user_authenticated(request)
# Determine the appropriate action
redirect_response = self._determine_redirect(path, setup_complete, is_authenticated)
if redirect_response:
logger.info(f"Redirecting {path} to {redirect_response.headers.get('location')}")
return redirect_response
# Continue with the request
return await call_next(request)
except Exception as e:
logger.error(f"Error in ApplicationFlowMiddleware: {e}", exc_info=True)
# In case of error, allow the request to continue
return await call_next(request)
def _should_bypass_flow(self, path: str) -> bool:
"""
Check if the given path should bypass flow enforcement.
Args:
path: Request path
Returns:
bool: True if path should bypass flow enforcement
"""
# Check exact bypass paths
for bypass_path in self.bypass_paths:
if path.startswith(bypass_path):
return True
# API paths bypass flow enforcement (but may have their own auth)
for api_path in self.api_paths:
if path.startswith(api_path):
return True
return False
async def _is_user_authenticated(self, request: Request) -> bool:
"""
Check if the user is authenticated by validating JWT token.
Args:
request: HTTP request object
Returns:
bool: True if user is authenticated, False otherwise
"""
try:
# Check for Authorization header
auth_header = request.headers.get("authorization")
if not auth_header or not auth_header.startswith("Bearer "):
return False
# Extract and validate token
token = auth_header.split(" ")[1]
# Import JWT validation function (avoid circular imports)
try:
from ..fastapi_app import verify_jwt_token
payload = verify_jwt_token(token)
return payload is not None
except ImportError:
# Fallback if import fails
logger.warning("Could not import JWT verification function")
return False
except Exception as e:
logger.error(f"Error checking authentication: {e}")
return False
def _determine_redirect(self, path: str, setup_complete: bool, is_authenticated: bool) -> Optional[RedirectResponse]:
"""
Determine if a redirect is needed based on current state.
Args:
path: Current request path
setup_complete: Whether application setup is complete
is_authenticated: Whether user is authenticated
Returns:
Optional[RedirectResponse]: Redirect response if needed, None otherwise
"""
# If setup is not complete
if not setup_complete:
# Allow access to setup page
if path == "/setup":
return None
# Redirect everything else to setup
return RedirectResponse(url="/setup", status_code=302)
# Setup is complete, check authentication
if not is_authenticated:
# Allow access to login page
if path == "/login":
return None
# Redirect unauthenticated users to login (except for specific pages)
if path in self.flow_pages or path == "/":
return RedirectResponse(url="/login", status_code=302)
# User is authenticated and setup is complete
else:
# Redirect from setup/login pages to main app
if path in ["/setup", "/login", "/"]:
return RedirectResponse(url="/app", status_code=302)
# No redirect needed
return None
def get_flow_status(self, request: Request) -> dict:
"""
Get current flow status for debugging/monitoring.
Args:
request: HTTP request object
Returns:
dict: Current flow status information
"""
try:
setup_complete = self.setup_service.is_setup_complete()
is_authenticated = self._is_user_authenticated(request)
return {
"setup_complete": setup_complete,
"authenticated": is_authenticated,
"path": request.url.path,
"should_bypass": self._should_bypass_flow(request.url.path)
}
except Exception as e:
return {
"error": str(e),
"path": request.url.path
}
def create_application_flow_middleware(setup_service: Optional[SetupService] = None) -> ApplicationFlowMiddleware:
"""
Factory function to create application flow middleware.
Args:
setup_service: Setup service instance (optional)
Returns:
ApplicationFlowMiddleware: Configured middleware instance
"""
return ApplicationFlowMiddleware(app=None, setup_service=setup_service)

View File

@@ -1,41 +1,41 @@
# FastAPI and ASGI server
fastapi==0.118.0
uvicorn[standard]==0.37.0
python-multipart==0.0.12
# Authentication and security
pyjwt==2.10.1
passlib[bcrypt]==1.7.4
python-jose[cryptography]==3.3.0
# Configuration and environment
pydantic==2.11.10
pydantic-settings==2.11.0
python-dotenv==1.1.1
# Database (if needed)
sqlalchemy==2.0.43
alembic==1.16.5
# HTTP client
httpx==0.28.1
aiofiles==24.1.0
# Utilities
python-dateutil==2.9.0.post0
pytz==2024.2
# Development and testing
pytest==8.4.2
pytest-asyncio==1.2.0
pytest-cov==7.0.0
pytest-mock==3.15.1
# Code quality
black==25.9.0
isort==6.1.0
flake8==7.3.0
mypy==1.18.2
# Logging
# FastAPI and ASGI server
fastapi==0.118.0
uvicorn[standard]==0.37.0
python-multipart==0.0.12
# Authentication and security
pyjwt==2.10.1
passlib[bcrypt]==1.7.4
python-jose[cryptography]==3.3.0
# Configuration and environment
pydantic==2.11.10
pydantic-settings==2.11.0
python-dotenv==1.1.1
# Database (if needed)
sqlalchemy==2.0.43
alembic==1.16.5
# HTTP client
httpx==0.28.1
aiofiles==24.1.0
# Utilities
python-dateutil==2.9.0.post0
pytz==2024.2
# Development and testing
pytest==8.4.2
pytest-asyncio==1.2.0
pytest-cov==7.0.0
pytest-mock==3.15.1
# Code quality
black==25.9.0
isort==6.1.0
flake8==7.3.0
mypy==1.18.2
# Logging
structlog==25.1.0

View File

@@ -1,303 +1,303 @@
from flask import Blueprint, render_template, request, jsonify
from web.controllers.auth_controller import optional_auth
import threading
import time
from datetime import datetime, timedelta
# Create blueprint for download queue management
download_queue_bp = Blueprint('download_queue', __name__)
# Global download queue state
download_queue_state = {
'active_downloads': [],
'pending_queue': [],
'completed_downloads': [],
'failed_downloads': [],
'queue_lock': threading.Lock(),
'statistics': {
'total_items': 0,
'completed_items': 0,
'failed_items': 0,
'estimated_time_remaining': None,
'current_speed': '0 MB/s',
'average_speed': '0 MB/s'
}
}
@download_queue_bp.route('/queue')
@optional_auth
def queue_page():
"""Download queue management page."""
return render_template('queue.html')
@download_queue_bp.route('/api/queue/status')
@optional_auth
def get_queue_status():
"""Get detailed download queue status."""
with download_queue_state['queue_lock']:
# Calculate ETA
eta = None
if download_queue_state['active_downloads']:
active_download = download_queue_state['active_downloads'][0]
if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0:
remaining_items = len(download_queue_state['pending_queue'])
avg_speed = active_download['progress']['speed_mbps']
# Rough estimation: assume 500MB per episode
estimated_mb_remaining = remaining_items * 500
eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None
if eta_seconds:
eta = datetime.now() + timedelta(seconds=eta_seconds)
return jsonify({
'active_downloads': download_queue_state['active_downloads'],
'pending_queue': download_queue_state['pending_queue'],
'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10
'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10
'statistics': {
**download_queue_state['statistics'],
'eta': eta.isoformat() if eta else None
}
})
@download_queue_bp.route('/api/queue/clear', methods=['POST'])
@optional_auth
def clear_queue():
"""Clear completed and failed downloads from queue."""
try:
data = request.get_json() or {}
queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all'
with download_queue_state['queue_lock']:
if queue_type == 'completed' or queue_type == 'all':
download_queue_state['completed_downloads'].clear()
if queue_type == 'failed' or queue_type == 'all':
download_queue_state['failed_downloads'].clear()
return jsonify({
'status': 'success',
'message': f'Cleared {queue_type} downloads'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@download_queue_bp.route('/api/queue/retry', methods=['POST'])
@optional_auth
def retry_failed_download():
"""Retry a failed download."""
try:
data = request.get_json()
download_id = data.get('id')
if not download_id:
return jsonify({
'status': 'error',
'message': 'Download ID is required'
}), 400
with download_queue_state['queue_lock']:
# Find failed download
failed_download = None
for i, download in enumerate(download_queue_state['failed_downloads']):
if download['id'] == download_id:
failed_download = download_queue_state['failed_downloads'].pop(i)
break
if not failed_download:
return jsonify({
'status': 'error',
'message': 'Failed download not found'
}), 404
# Reset download status and add back to queue
failed_download['status'] = 'queued'
failed_download['error'] = None
failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1
download_queue_state['pending_queue'].append(failed_download)
return jsonify({
'status': 'success',
'message': 'Download added back to queue'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@download_queue_bp.route('/api/queue/remove', methods=['POST'])
@optional_auth
def remove_from_queue():
"""Remove an item from the pending queue."""
try:
data = request.get_json()
download_id = data.get('id')
if not download_id:
return jsonify({
'status': 'error',
'message': 'Download ID is required'
}), 400
with download_queue_state['queue_lock']:
# Find and remove from pending queue
removed = False
for i, download in enumerate(download_queue_state['pending_queue']):
if download['id'] == download_id:
download_queue_state['pending_queue'].pop(i)
removed = True
break
if not removed:
return jsonify({
'status': 'error',
'message': 'Download not found in queue'
}), 404
return jsonify({
'status': 'success',
'message': 'Download removed from queue'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@download_queue_bp.route('/api/queue/reorder', methods=['POST'])
@optional_auth
def reorder_queue():
"""Reorder items in the pending queue."""
try:
data = request.get_json()
new_order = data.get('order') # Array of download IDs in new order
if not new_order or not isinstance(new_order, list):
return jsonify({
'status': 'error',
'message': 'Valid order array is required'
}), 400
with download_queue_state['queue_lock']:
# Create new queue based on the provided order
old_queue = download_queue_state['pending_queue'].copy()
new_queue = []
# Add items in the specified order
for download_id in new_order:
for download in old_queue:
if download['id'] == download_id:
new_queue.append(download)
break
# Add any remaining items that weren't in the new order
for download in old_queue:
if download not in new_queue:
new_queue.append(download)
download_queue_state['pending_queue'] = new_queue
return jsonify({
'status': 'success',
'message': 'Queue reordered successfully'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
# Helper functions for queue management
def add_to_download_queue(serie_name, episode_info, priority='normal'):
"""Add a download to the queue."""
import uuid
download_item = {
'id': str(uuid.uuid4()),
'serie_name': serie_name,
'episode': episode_info,
'status': 'queued',
'priority': priority,
'added_at': datetime.now().isoformat(),
'started_at': None,
'completed_at': None,
'error': None,
'retry_count': 0,
'progress': {
'percent': 0,
'downloaded_mb': 0,
'total_mb': 0,
'speed_mbps': 0,
'eta_seconds': None
}
}
with download_queue_state['queue_lock']:
# Insert based on priority
if priority == 'high':
download_queue_state['pending_queue'].insert(0, download_item)
else:
download_queue_state['pending_queue'].append(download_item)
download_queue_state['statistics']['total_items'] += 1
return download_item['id']
def update_download_progress(download_id, progress_data):
"""Update progress for an active download."""
with download_queue_state['queue_lock']:
for download in download_queue_state['active_downloads']:
if download['id'] == download_id:
download['progress'].update(progress_data)
# Update global statistics
if 'speed_mbps' in progress_data:
download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s"
break
def move_download_to_completed(download_id, success=True, error=None):
"""Move download from active to completed/failed."""
with download_queue_state['queue_lock']:
download = None
for i, item in enumerate(download_queue_state['active_downloads']):
if item['id'] == download_id:
download = download_queue_state['active_downloads'].pop(i)
break
if download:
download['completed_at'] = datetime.now().isoformat()
if success:
download['status'] = 'completed'
download['progress']['percent'] = 100
download_queue_state['completed_downloads'].append(download)
download_queue_state['statistics']['completed_items'] += 1
else:
download['status'] = 'failed'
download['error'] = error
download_queue_state['failed_downloads'].append(download)
download_queue_state['statistics']['failed_items'] += 1
def start_next_download():
"""Move next queued download to active state."""
with download_queue_state['queue_lock']:
if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent
download = download_queue_state['pending_queue'].pop(0)
download['status'] = 'downloading'
download['started_at'] = datetime.now().isoformat()
download_queue_state['active_downloads'].append(download)
return download
return None
def get_queue_statistics():
"""Get current queue statistics."""
with download_queue_state['queue_lock']:
from flask import Blueprint, render_template, request, jsonify
from web.controllers.auth_controller import optional_auth
import threading
import time
from datetime import datetime, timedelta
# Create blueprint for download queue management
download_queue_bp = Blueprint('download_queue', __name__)
# Global download queue state
download_queue_state = {
'active_downloads': [],
'pending_queue': [],
'completed_downloads': [],
'failed_downloads': [],
'queue_lock': threading.Lock(),
'statistics': {
'total_items': 0,
'completed_items': 0,
'failed_items': 0,
'estimated_time_remaining': None,
'current_speed': '0 MB/s',
'average_speed': '0 MB/s'
}
}
@download_queue_bp.route('/queue')
@optional_auth
def queue_page():
"""Download queue management page."""
return render_template('queue.html')
@download_queue_bp.route('/api/queue/status')
@optional_auth
def get_queue_status():
"""Get detailed download queue status."""
with download_queue_state['queue_lock']:
# Calculate ETA
eta = None
if download_queue_state['active_downloads']:
active_download = download_queue_state['active_downloads'][0]
if 'progress' in active_download and active_download['progress'].get('speed_mbps', 0) > 0:
remaining_items = len(download_queue_state['pending_queue'])
avg_speed = active_download['progress']['speed_mbps']
# Rough estimation: assume 500MB per episode
estimated_mb_remaining = remaining_items * 500
eta_seconds = estimated_mb_remaining / avg_speed if avg_speed > 0 else None
if eta_seconds:
eta = datetime.now() + timedelta(seconds=eta_seconds)
return jsonify({
'active_downloads': download_queue_state['active_downloads'],
'pending_queue': download_queue_state['pending_queue'],
'completed_downloads': download_queue_state['completed_downloads'][-10:], # Last 10
'failed_downloads': download_queue_state['failed_downloads'][-10:], # Last 10
'statistics': {
**download_queue_state['statistics'],
'eta': eta.isoformat() if eta else None
}
})
@download_queue_bp.route('/api/queue/clear', methods=['POST'])
@optional_auth
def clear_queue():
"""Clear completed and failed downloads from queue."""
try:
data = request.get_json() or {}
queue_type = data.get('type', 'completed') # 'completed', 'failed', or 'all'
with download_queue_state['queue_lock']:
if queue_type == 'completed' or queue_type == 'all':
download_queue_state['completed_downloads'].clear()
if queue_type == 'failed' or queue_type == 'all':
download_queue_state['failed_downloads'].clear()
return jsonify({
'status': 'success',
'message': f'Cleared {queue_type} downloads'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@download_queue_bp.route('/api/queue/retry', methods=['POST'])
@optional_auth
def retry_failed_download():
"""Retry a failed download."""
try:
data = request.get_json()
download_id = data.get('id')
if not download_id:
return jsonify({
'status': 'error',
'message': 'Download ID is required'
}), 400
with download_queue_state['queue_lock']:
# Find failed download
failed_download = None
for i, download in enumerate(download_queue_state['failed_downloads']):
if download['id'] == download_id:
failed_download = download_queue_state['failed_downloads'].pop(i)
break
if not failed_download:
return jsonify({
'status': 'error',
'message': 'Failed download not found'
}), 404
# Reset download status and add back to queue
failed_download['status'] = 'queued'
failed_download['error'] = None
failed_download['retry_count'] = failed_download.get('retry_count', 0) + 1
download_queue_state['pending_queue'].append(failed_download)
return jsonify({
'status': 'success',
'message': 'Download added back to queue'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@download_queue_bp.route('/api/queue/remove', methods=['POST'])
@optional_auth
def remove_from_queue():
"""Remove an item from the pending queue."""
try:
data = request.get_json()
download_id = data.get('id')
if not download_id:
return jsonify({
'status': 'error',
'message': 'Download ID is required'
}), 400
with download_queue_state['queue_lock']:
# Find and remove from pending queue
removed = False
for i, download in enumerate(download_queue_state['pending_queue']):
if download['id'] == download_id:
download_queue_state['pending_queue'].pop(i)
removed = True
break
if not removed:
return jsonify({
'status': 'error',
'message': 'Download not found in queue'
}), 404
return jsonify({
'status': 'success',
'message': 'Download removed from queue'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
@download_queue_bp.route('/api/queue/reorder', methods=['POST'])
@optional_auth
def reorder_queue():
"""Reorder items in the pending queue."""
try:
data = request.get_json()
new_order = data.get('order') # Array of download IDs in new order
if not new_order or not isinstance(new_order, list):
return jsonify({
'status': 'error',
'message': 'Valid order array is required'
}), 400
with download_queue_state['queue_lock']:
# Create new queue based on the provided order
old_queue = download_queue_state['pending_queue'].copy()
new_queue = []
# Add items in the specified order
for download_id in new_order:
for download in old_queue:
if download['id'] == download_id:
new_queue.append(download)
break
# Add any remaining items that weren't in the new order
for download in old_queue:
if download not in new_queue:
new_queue.append(download)
download_queue_state['pending_queue'] = new_queue
return jsonify({
'status': 'success',
'message': 'Queue reordered successfully'
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e)
}), 500
# Helper functions for queue management
def add_to_download_queue(serie_name, episode_info, priority='normal'):
"""Add a download to the queue."""
import uuid
download_item = {
'id': str(uuid.uuid4()),
'serie_name': serie_name,
'episode': episode_info,
'status': 'queued',
'priority': priority,
'added_at': datetime.now().isoformat(),
'started_at': None,
'completed_at': None,
'error': None,
'retry_count': 0,
'progress': {
'percent': 0,
'downloaded_mb': 0,
'total_mb': 0,
'speed_mbps': 0,
'eta_seconds': None
}
}
with download_queue_state['queue_lock']:
# Insert based on priority
if priority == 'high':
download_queue_state['pending_queue'].insert(0, download_item)
else:
download_queue_state['pending_queue'].append(download_item)
download_queue_state['statistics']['total_items'] += 1
return download_item['id']
def update_download_progress(download_id, progress_data):
"""Update progress for an active download."""
with download_queue_state['queue_lock']:
for download in download_queue_state['active_downloads']:
if download['id'] == download_id:
download['progress'].update(progress_data)
# Update global statistics
if 'speed_mbps' in progress_data:
download_queue_state['statistics']['current_speed'] = f"{progress_data['speed_mbps']:.1f} MB/s"
break
def move_download_to_completed(download_id, success=True, error=None):
"""Move download from active to completed/failed."""
with download_queue_state['queue_lock']:
download = None
for i, item in enumerate(download_queue_state['active_downloads']):
if item['id'] == download_id:
download = download_queue_state['active_downloads'].pop(i)
break
if download:
download['completed_at'] = datetime.now().isoformat()
if success:
download['status'] = 'completed'
download['progress']['percent'] = 100
download_queue_state['completed_downloads'].append(download)
download_queue_state['statistics']['completed_items'] += 1
else:
download['status'] = 'failed'
download['error'] = error
download_queue_state['failed_downloads'].append(download)
download_queue_state['statistics']['failed_items'] += 1
def start_next_download():
"""Move next queued download to active state."""
with download_queue_state['queue_lock']:
if download_queue_state['pending_queue'] and len(download_queue_state['active_downloads']) < 3: # Max 3 concurrent
download = download_queue_state['pending_queue'].pop(0)
download['status'] = 'downloading'
download['started_at'] = datetime.now().isoformat()
download_queue_state['active_downloads'].append(download)
return download
return None
def get_queue_statistics():
"""Get current queue statistics."""
with download_queue_state['queue_lock']:
return download_queue_state['statistics'].copy()

View File

@@ -1,252 +1,252 @@
import threading
import time
import schedule
from datetime import datetime, timedelta
from typing import Optional, Callable, Dict, Any
import logging
from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK,
ProcessLockError, is_process_running)
logger = logging.getLogger(__name__)
class ScheduledOperations:
"""Handle scheduled operations like automatic rescans and downloads."""
def __init__(self, config_manager, socketio=None):
self.config = config_manager
self.socketio = socketio
self.scheduler_thread = None
self.running = False
self.rescan_callback: Optional[Callable] = None
self.download_callback: Optional[Callable] = None
self.last_scheduled_rescan: Optional[datetime] = None
# Load scheduled rescan settings
self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False)
self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00')
self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False)
def set_rescan_callback(self, callback: Callable):
"""Set callback function for performing rescan operations."""
self.rescan_callback = callback
def set_download_callback(self, callback: Callable):
"""Set callback function for performing download operations."""
self.download_callback = callback
def start_scheduler(self):
"""Start the background scheduler thread."""
if self.running:
logger.warning("Scheduler is already running")
return
self.running = True
self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
self.scheduler_thread.start()
logger.info("Scheduled operations started")
def stop_scheduler(self):
"""Stop the background scheduler."""
self.running = False
schedule.clear()
if self.scheduler_thread and self.scheduler_thread.is_alive():
self.scheduler_thread.join(timeout=5)
logger.info("Scheduled operations stopped")
def _scheduler_loop(self):
"""Main scheduler loop that runs in background thread."""
self._setup_scheduled_jobs()
while self.running:
try:
schedule.run_pending()
time.sleep(60) # Check every minute
except Exception as e:
logger.error(f"Scheduler error: {e}")
time.sleep(60)
def _setup_scheduled_jobs(self):
"""Setup all scheduled jobs based on configuration."""
schedule.clear()
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
try:
schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan)
logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}")
except Exception as e:
logger.error(f"Error setting up scheduled rescan: {e}")
def _perform_scheduled_rescan(self):
"""Perform the scheduled rescan operation."""
try:
logger.info("Starting scheduled rescan...")
# Emit scheduled rescan started event
if self.socketio:
self.socketio.emit('scheduled_rescan_started')
# Check if rescan is already running
if is_process_running(RESCAN_LOCK):
logger.warning("Rescan is already running, skipping scheduled rescan")
if self.socketio:
self.socketio.emit('scheduled_rescan_skipped', {
'reason': 'Rescan already in progress'
})
return
# Perform the rescan using process lock
@with_process_lock(RESCAN_LOCK, timeout_minutes=180)
def perform_rescan():
self.last_scheduled_rescan = datetime.now()
if self.rescan_callback:
result = self.rescan_callback()
logger.info("Scheduled rescan completed successfully")
if self.socketio:
self.socketio.emit('scheduled_rescan_completed', {
'timestamp': self.last_scheduled_rescan.isoformat(),
'result': result
})
# Auto-start download if configured
if self.auto_download_after_rescan and self.download_callback:
logger.info("Starting auto-download after scheduled rescan")
threading.Thread(
target=self._perform_auto_download,
daemon=True
).start()
else:
logger.warning("No rescan callback configured")
perform_rescan(_locked_by='scheduled_operation')
except ProcessLockError:
logger.warning("Could not acquire rescan lock for scheduled operation")
if self.socketio:
self.socketio.emit('scheduled_rescan_error', {
'error': 'Could not acquire rescan lock'
})
except Exception as e:
logger.error(f"Scheduled rescan failed: {e}")
if self.socketio:
self.socketio.emit('scheduled_rescan_error', {
'error': str(e)
})
def _perform_auto_download(self):
"""Perform automatic download after scheduled rescan."""
try:
# Wait a bit after rescan to let UI update
time.sleep(10)
if self.download_callback:
# Find series with missing episodes and start download
logger.info("Starting auto-download of missing episodes")
result = self.download_callback()
if self.socketio:
self.socketio.emit('auto_download_started', {
'timestamp': datetime.now().isoformat(),
'result': result
})
else:
logger.warning("No download callback configured for auto-download")
except Exception as e:
logger.error(f"Auto-download after scheduled rescan failed: {e}")
if self.socketio:
self.socketio.emit('auto_download_error', {
'error': str(e)
})
def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False):
"""Update scheduled rescan configuration."""
try:
# Validate time format
if enabled and time_str:
datetime.strptime(time_str, '%H:%M')
# Update configuration
self.scheduled_rescan_enabled = enabled
self.scheduled_rescan_time = time_str
self.auto_download_after_rescan = auto_download
# Save to config
self.config.scheduled_rescan_enabled = enabled
self.config.scheduled_rescan_time = time_str
self.config.auto_download_after_rescan = auto_download
self.config.save_config()
# Restart scheduler with new settings
if self.running:
self._setup_scheduled_jobs()
logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}")
return True
except ValueError as e:
logger.error(f"Invalid time format: {time_str}")
raise ValueError(f"Invalid time format. Use HH:MM format.")
except Exception as e:
logger.error(f"Error updating scheduled rescan config: {e}")
raise
def get_scheduled_rescan_config(self) -> Dict[str, Any]:
"""Get current scheduled rescan configuration."""
next_run = None
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
try:
# Calculate next run time
now = datetime.now()
today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M')
if now > today_run:
# Next run is tomorrow
next_run = today_run + timedelta(days=1)
else:
# Next run is today
next_run = today_run
except Exception as e:
logger.error(f"Error calculating next run time: {e}")
return {
'enabled': self.scheduled_rescan_enabled,
'time': self.scheduled_rescan_time,
'auto_download_after_rescan': self.auto_download_after_rescan,
'next_run': next_run.isoformat() if next_run else None,
'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None,
'is_running': self.running
}
def trigger_manual_scheduled_rescan(self):
"""Manually trigger a scheduled rescan (for testing purposes)."""
logger.info("Manually triggering scheduled rescan")
threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start()
def get_next_scheduled_jobs(self) -> list:
"""Get list of all scheduled jobs with their next run times."""
jobs = []
for job in schedule.jobs:
jobs.append({
'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func),
'next_run': job.next_run.isoformat() if job.next_run else None,
'interval': str(job.interval),
'unit': job.unit
})
return jobs
# Global scheduler instance
scheduled_operations = None
def init_scheduler(config_manager, socketio=None):
"""Initialize the global scheduler."""
global scheduled_operations
scheduled_operations = ScheduledOperations(config_manager, socketio)
return scheduled_operations
def get_scheduler():
"""Get the global scheduler instance."""
import threading
import time
import schedule
from datetime import datetime, timedelta
from typing import Optional, Callable, Dict, Any
import logging
from shared.utils.process_utils import (with_process_lock, RESCAN_LOCK,
ProcessLockError, is_process_running)
logger = logging.getLogger(__name__)
class ScheduledOperations:
"""Handle scheduled operations like automatic rescans and downloads."""
def __init__(self, config_manager, socketio=None):
self.config = config_manager
self.socketio = socketio
self.scheduler_thread = None
self.running = False
self.rescan_callback: Optional[Callable] = None
self.download_callback: Optional[Callable] = None
self.last_scheduled_rescan: Optional[datetime] = None
# Load scheduled rescan settings
self.scheduled_rescan_enabled = getattr(self.config, 'scheduled_rescan_enabled', False)
self.scheduled_rescan_time = getattr(self.config, 'scheduled_rescan_time', '03:00')
self.auto_download_after_rescan = getattr(self.config, 'auto_download_after_rescan', False)
def set_rescan_callback(self, callback: Callable):
"""Set callback function for performing rescan operations."""
self.rescan_callback = callback
def set_download_callback(self, callback: Callable):
"""Set callback function for performing download operations."""
self.download_callback = callback
def start_scheduler(self):
"""Start the background scheduler thread."""
if self.running:
logger.warning("Scheduler is already running")
return
self.running = True
self.scheduler_thread = threading.Thread(target=self._scheduler_loop, daemon=True)
self.scheduler_thread.start()
logger.info("Scheduled operations started")
def stop_scheduler(self):
"""Stop the background scheduler."""
self.running = False
schedule.clear()
if self.scheduler_thread and self.scheduler_thread.is_alive():
self.scheduler_thread.join(timeout=5)
logger.info("Scheduled operations stopped")
def _scheduler_loop(self):
"""Main scheduler loop that runs in background thread."""
self._setup_scheduled_jobs()
while self.running:
try:
schedule.run_pending()
time.sleep(60) # Check every minute
except Exception as e:
logger.error(f"Scheduler error: {e}")
time.sleep(60)
def _setup_scheduled_jobs(self):
"""Setup all scheduled jobs based on configuration."""
schedule.clear()
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
try:
schedule.every().day.at(self.scheduled_rescan_time).do(self._perform_scheduled_rescan)
logger.info(f"Scheduled daily rescan at {self.scheduled_rescan_time}")
except Exception as e:
logger.error(f"Error setting up scheduled rescan: {e}")
def _perform_scheduled_rescan(self):
"""Perform the scheduled rescan operation."""
try:
logger.info("Starting scheduled rescan...")
# Emit scheduled rescan started event
if self.socketio:
self.socketio.emit('scheduled_rescan_started')
# Check if rescan is already running
if is_process_running(RESCAN_LOCK):
logger.warning("Rescan is already running, skipping scheduled rescan")
if self.socketio:
self.socketio.emit('scheduled_rescan_skipped', {
'reason': 'Rescan already in progress'
})
return
# Perform the rescan using process lock
@with_process_lock(RESCAN_LOCK, timeout_minutes=180)
def perform_rescan():
self.last_scheduled_rescan = datetime.now()
if self.rescan_callback:
result = self.rescan_callback()
logger.info("Scheduled rescan completed successfully")
if self.socketio:
self.socketio.emit('scheduled_rescan_completed', {
'timestamp': self.last_scheduled_rescan.isoformat(),
'result': result
})
# Auto-start download if configured
if self.auto_download_after_rescan and self.download_callback:
logger.info("Starting auto-download after scheduled rescan")
threading.Thread(
target=self._perform_auto_download,
daemon=True
).start()
else:
logger.warning("No rescan callback configured")
perform_rescan(_locked_by='scheduled_operation')
except ProcessLockError:
logger.warning("Could not acquire rescan lock for scheduled operation")
if self.socketio:
self.socketio.emit('scheduled_rescan_error', {
'error': 'Could not acquire rescan lock'
})
except Exception as e:
logger.error(f"Scheduled rescan failed: {e}")
if self.socketio:
self.socketio.emit('scheduled_rescan_error', {
'error': str(e)
})
def _perform_auto_download(self):
"""Perform automatic download after scheduled rescan."""
try:
# Wait a bit after rescan to let UI update
time.sleep(10)
if self.download_callback:
# Find series with missing episodes and start download
logger.info("Starting auto-download of missing episodes")
result = self.download_callback()
if self.socketio:
self.socketio.emit('auto_download_started', {
'timestamp': datetime.now().isoformat(),
'result': result
})
else:
logger.warning("No download callback configured for auto-download")
except Exception as e:
logger.error(f"Auto-download after scheduled rescan failed: {e}")
if self.socketio:
self.socketio.emit('auto_download_error', {
'error': str(e)
})
def update_scheduled_rescan_config(self, enabled: bool, time_str: str, auto_download: bool = False):
"""Update scheduled rescan configuration."""
try:
# Validate time format
if enabled and time_str:
datetime.strptime(time_str, '%H:%M')
# Update configuration
self.scheduled_rescan_enabled = enabled
self.scheduled_rescan_time = time_str
self.auto_download_after_rescan = auto_download
# Save to config
self.config.scheduled_rescan_enabled = enabled
self.config.scheduled_rescan_time = time_str
self.config.auto_download_after_rescan = auto_download
self.config.save_config()
# Restart scheduler with new settings
if self.running:
self._setup_scheduled_jobs()
logger.info(f"Updated scheduled rescan config: enabled={enabled}, time={time_str}, auto_download={auto_download}")
return True
except ValueError as e:
logger.error(f"Invalid time format: {time_str}")
raise ValueError(f"Invalid time format. Use HH:MM format.")
except Exception as e:
logger.error(f"Error updating scheduled rescan config: {e}")
raise
def get_scheduled_rescan_config(self) -> Dict[str, Any]:
"""Get current scheduled rescan configuration."""
next_run = None
if self.scheduled_rescan_enabled and self.scheduled_rescan_time:
try:
# Calculate next run time
now = datetime.now()
today_run = datetime.strptime(f"{now.strftime('%Y-%m-%d')} {self.scheduled_rescan_time}", '%Y-%m-%d %H:%M')
if now > today_run:
# Next run is tomorrow
next_run = today_run + timedelta(days=1)
else:
# Next run is today
next_run = today_run
except Exception as e:
logger.error(f"Error calculating next run time: {e}")
return {
'enabled': self.scheduled_rescan_enabled,
'time': self.scheduled_rescan_time,
'auto_download_after_rescan': self.auto_download_after_rescan,
'next_run': next_run.isoformat() if next_run else None,
'last_run': self.last_scheduled_rescan.isoformat() if self.last_scheduled_rescan else None,
'is_running': self.running
}
def trigger_manual_scheduled_rescan(self):
"""Manually trigger a scheduled rescan (for testing purposes)."""
logger.info("Manually triggering scheduled rescan")
threading.Thread(target=self._perform_scheduled_rescan, daemon=True).start()
def get_next_scheduled_jobs(self) -> list:
"""Get list of all scheduled jobs with their next run times."""
jobs = []
for job in schedule.jobs:
jobs.append({
'job_func': job.job_func.__name__ if hasattr(job.job_func, '__name__') else str(job.job_func),
'next_run': job.next_run.isoformat() if job.next_run else None,
'interval': str(job.interval),
'unit': job.unit
})
return jobs
# Global scheduler instance
scheduled_operations = None
def init_scheduler(config_manager, socketio=None):
"""Initialize the global scheduler."""
global scheduled_operations
scheduled_operations = ScheduledOperations(config_manager, socketio)
return scheduled_operations
def get_scheduler():
"""Get the global scheduler instance."""
return scheduled_operations

View File

@@ -1,268 +1,268 @@
"""
Setup service for detecting and managing application setup state.
This service determines if the application is properly configured and set up,
following the application flow pattern: setup → auth → main application.
"""
import json
import logging
import sqlite3
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, List, Optional
logger = logging.getLogger(__name__)
class SetupService:
"""Service for managing application setup detection and configuration."""
def __init__(self, config_path: str = "data/config.json", db_path: str = "data/aniworld.db"):
"""Initialize the setup service with configuration and database paths."""
self.config_path = Path(config_path)
self.db_path = Path(db_path)
self._config_cache: Optional[Dict[str, Any]] = None
def is_setup_complete(self) -> bool:
"""
Check if the application setup is complete.
Setup is considered complete if:
1. Configuration file exists and is valid
2. Database exists and is accessible
3. Master password is configured
4. Setup completion flag is set (if present)
Returns:
bool: True if setup is complete, False otherwise
"""
try:
# Check if configuration file exists and is valid
if not self._is_config_valid():
logger.info("Setup incomplete: Configuration file is missing or invalid")
return False
# Check if database exists and is accessible
if not self._is_database_accessible():
logger.info("Setup incomplete: Database is not accessible")
return False
# Check if master password is configured
if not self._is_master_password_configured():
logger.info("Setup incomplete: Master password is not configured")
return False
# Check for explicit setup completion flag
config = self.get_config()
if config and config.get("setup", {}).get("completed") is False:
logger.info("Setup incomplete: Setup completion flag is False")
return False
logger.debug("Setup validation complete: All checks passed")
return True
except Exception as e:
logger.error(f"Error checking setup completion: {e}")
return False
def _is_config_valid(self) -> bool:
"""Check if the configuration file exists and contains valid JSON."""
try:
if not self.config_path.exists():
return False
config = self.get_config()
return config is not None and isinstance(config, dict)
except Exception as e:
logger.error(f"Configuration validation error: {e}")
return False
def _is_database_accessible(self) -> bool:
"""Check if the database exists and is accessible."""
try:
if not self.db_path.exists():
return False
# Try to connect and perform a simple query
with sqlite3.connect(str(self.db_path)) as conn:
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1")
return True
except Exception as e:
logger.error(f"Database accessibility check failed: {e}")
return False
def _is_master_password_configured(self) -> bool:
"""Check if master password is properly configured."""
try:
config = self.get_config()
if not config:
return False
security_config = config.get("security", {})
# Check if password hash exists
password_hash = security_config.get("master_password_hash")
salt = security_config.get("salt")
return bool(password_hash and salt and len(password_hash) > 0 and len(salt) > 0)
except Exception as e:
logger.error(f"Master password configuration check failed: {e}")
return False
def get_config(self, force_reload: bool = False) -> Optional[Dict[str, Any]]:
"""
Get the configuration data from the config file.
Args:
force_reload: If True, reload config from file even if cached
Returns:
dict: Configuration data or None if not accessible
"""
try:
if self._config_cache is None or force_reload:
if not self.config_path.exists():
return None
with open(self.config_path, 'r', encoding='utf-8') as f:
self._config_cache = json.load(f)
return self._config_cache
except Exception as e:
logger.error(f"Error loading configuration: {e}")
return None
def mark_setup_complete(self, config_updates: Optional[Dict[str, Any]] = None) -> bool:
"""
Mark the setup as completed and optionally update configuration.
Args:
config_updates: Additional configuration updates to apply
Returns:
bool: True if successful, False otherwise
"""
try:
config = self.get_config() or {}
# Update configuration with any provided updates
if config_updates:
config.update(config_updates)
# Set setup completion flag
if "setup" not in config:
config["setup"] = {}
config["setup"]["completed"] = True
config["setup"]["completed_at"] = str(datetime.utcnow())
# Save updated configuration
return self._save_config(config)
except Exception as e:
logger.error(f"Error marking setup as complete: {e}")
return False
def reset_setup(self) -> bool:
"""
Reset the setup completion status (for development/testing).
Returns:
bool: True if successful, False otherwise
"""
try:
config = self.get_config()
if not config:
return False
# Remove or set setup completion flag to false
if "setup" in config:
config["setup"]["completed"] = False
return self._save_config(config)
except Exception as e:
logger.error(f"Error resetting setup: {e}")
return False
def _save_config(self, config: Dict[str, Any]) -> bool:
"""Save configuration to file."""
try:
# Ensure directory exists
self.config_path.parent.mkdir(parents=True, exist_ok=True)
# Save configuration
with open(self.config_path, 'w', encoding='utf-8') as f:
json.dump(config, f, indent=4, ensure_ascii=False)
# Clear cache to force reload on next access
self._config_cache = None
logger.info(f"Configuration saved to {self.config_path}")
return True
except Exception as e:
logger.error(f"Error saving configuration: {e}")
return False
def get_setup_requirements(self) -> Dict[str, bool]:
"""
Get detailed breakdown of setup requirements and their status.
Returns:
dict: Dictionary with requirement names and their completion status
"""
config = self.get_config()
return {
"config_file_exists": self.config_path.exists(),
"config_file_valid": self._is_config_valid(),
"database_exists": self.db_path.exists(),
"database_accessible": self._is_database_accessible(),
"master_password_configured": self._is_master_password_configured(),
"setup_marked_complete": bool(config and config.get("setup", {}).get("completed", True))
}
def get_missing_requirements(self) -> List[str]:
"""
Get list of missing setup requirements.
Returns:
list: List of missing requirement descriptions
"""
requirements = self.get_setup_requirements()
missing = []
if not requirements["config_file_exists"]:
missing.append("Configuration file is missing")
elif not requirements["config_file_valid"]:
missing.append("Configuration file is invalid or corrupted")
if not requirements["database_exists"]:
missing.append("Database file is missing")
elif not requirements["database_accessible"]:
missing.append("Database is not accessible or corrupted")
if not requirements["master_password_configured"]:
missing.append("Master password is not configured")
if not requirements["setup_marked_complete"]:
missing.append("Setup process was not completed")
return missing
# Convenience functions for easy import
def is_setup_complete() -> bool:
"""Convenience function to check if setup is complete."""
service = SetupService()
return service.is_setup_complete()
def get_setup_service() -> SetupService:
"""Get a configured setup service instance."""
return SetupService()
"""
Setup service for detecting and managing application setup state.
This service determines if the application is properly configured and set up,
following the application flow pattern: setup → auth → main application.
"""
import json
import logging
import sqlite3
from datetime import datetime
from pathlib import Path
from typing import Any, Dict, List, Optional
logger = logging.getLogger(__name__)
class SetupService:
"""Service for managing application setup detection and configuration."""
def __init__(self, config_path: str = "data/config.json", db_path: str = "data/aniworld.db"):
"""Initialize the setup service with configuration and database paths."""
self.config_path = Path(config_path)
self.db_path = Path(db_path)
self._config_cache: Optional[Dict[str, Any]] = None
def is_setup_complete(self) -> bool:
"""
Check if the application setup is complete.
Setup is considered complete if:
1. Configuration file exists and is valid
2. Database exists and is accessible
3. Master password is configured
4. Setup completion flag is set (if present)
Returns:
bool: True if setup is complete, False otherwise
"""
try:
# Check if configuration file exists and is valid
if not self._is_config_valid():
logger.info("Setup incomplete: Configuration file is missing or invalid")
return False
# Check if database exists and is accessible
if not self._is_database_accessible():
logger.info("Setup incomplete: Database is not accessible")
return False
# Check if master password is configured
if not self._is_master_password_configured():
logger.info("Setup incomplete: Master password is not configured")
return False
# Check for explicit setup completion flag
config = self.get_config()
if config and config.get("setup", {}).get("completed") is False:
logger.info("Setup incomplete: Setup completion flag is False")
return False
logger.debug("Setup validation complete: All checks passed")
return True
except Exception as e:
logger.error(f"Error checking setup completion: {e}")
return False
def _is_config_valid(self) -> bool:
"""Check if the configuration file exists and contains valid JSON."""
try:
if not self.config_path.exists():
return False
config = self.get_config()
return config is not None and isinstance(config, dict)
except Exception as e:
logger.error(f"Configuration validation error: {e}")
return False
def _is_database_accessible(self) -> bool:
"""Check if the database exists and is accessible."""
try:
if not self.db_path.exists():
return False
# Try to connect and perform a simple query
with sqlite3.connect(str(self.db_path)) as conn:
cursor = conn.cursor()
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' LIMIT 1")
return True
except Exception as e:
logger.error(f"Database accessibility check failed: {e}")
return False
def _is_master_password_configured(self) -> bool:
"""Check if master password is properly configured."""
try:
config = self.get_config()
if not config:
return False
security_config = config.get("security", {})
# Check if password hash exists
password_hash = security_config.get("master_password_hash")
salt = security_config.get("salt")
return bool(password_hash and salt and len(password_hash) > 0 and len(salt) > 0)
except Exception as e:
logger.error(f"Master password configuration check failed: {e}")
return False
def get_config(self, force_reload: bool = False) -> Optional[Dict[str, Any]]:
"""
Get the configuration data from the config file.
Args:
force_reload: If True, reload config from file even if cached
Returns:
dict: Configuration data or None if not accessible
"""
try:
if self._config_cache is None or force_reload:
if not self.config_path.exists():
return None
with open(self.config_path, 'r', encoding='utf-8') as f:
self._config_cache = json.load(f)
return self._config_cache
except Exception as e:
logger.error(f"Error loading configuration: {e}")
return None
def mark_setup_complete(self, config_updates: Optional[Dict[str, Any]] = None) -> bool:
"""
Mark the setup as completed and optionally update configuration.
Args:
config_updates: Additional configuration updates to apply
Returns:
bool: True if successful, False otherwise
"""
try:
config = self.get_config() or {}
# Update configuration with any provided updates
if config_updates:
config.update(config_updates)
# Set setup completion flag
if "setup" not in config:
config["setup"] = {}
config["setup"]["completed"] = True
config["setup"]["completed_at"] = str(datetime.utcnow())
# Save updated configuration
return self._save_config(config)
except Exception as e:
logger.error(f"Error marking setup as complete: {e}")
return False
def reset_setup(self) -> bool:
"""
Reset the setup completion status (for development/testing).
Returns:
bool: True if successful, False otherwise
"""
try:
config = self.get_config()
if not config:
return False
# Remove or set setup completion flag to false
if "setup" in config:
config["setup"]["completed"] = False
return self._save_config(config)
except Exception as e:
logger.error(f"Error resetting setup: {e}")
return False
def _save_config(self, config: Dict[str, Any]) -> bool:
"""Save configuration to file."""
try:
# Ensure directory exists
self.config_path.parent.mkdir(parents=True, exist_ok=True)
# Save configuration
with open(self.config_path, 'w', encoding='utf-8') as f:
json.dump(config, f, indent=4, ensure_ascii=False)
# Clear cache to force reload on next access
self._config_cache = None
logger.info(f"Configuration saved to {self.config_path}")
return True
except Exception as e:
logger.error(f"Error saving configuration: {e}")
return False
def get_setup_requirements(self) -> Dict[str, bool]:
"""
Get detailed breakdown of setup requirements and their status.
Returns:
dict: Dictionary with requirement names and their completion status
"""
config = self.get_config()
return {
"config_file_exists": self.config_path.exists(),
"config_file_valid": self._is_config_valid(),
"database_exists": self.db_path.exists(),
"database_accessible": self._is_database_accessible(),
"master_password_configured": self._is_master_password_configured(),
"setup_marked_complete": bool(config and config.get("setup", {}).get("completed", True))
}
def get_missing_requirements(self) -> List[str]:
"""
Get list of missing setup requirements.
Returns:
list: List of missing requirement descriptions
"""
requirements = self.get_setup_requirements()
missing = []
if not requirements["config_file_exists"]:
missing.append("Configuration file is missing")
elif not requirements["config_file_valid"]:
missing.append("Configuration file is invalid or corrupted")
if not requirements["database_exists"]:
missing.append("Database file is missing")
elif not requirements["database_accessible"]:
missing.append("Database is not accessible or corrupted")
if not requirements["master_password_configured"]:
missing.append("Master password is not configured")
if not requirements["setup_marked_complete"]:
missing.append("Setup process was not completed")
return missing
# Convenience functions for easy import
def is_setup_complete() -> bool:
"""Convenience function to check if setup is complete."""
service = SetupService()
return service.is_setup_complete()
def get_setup_service() -> SetupService:
"""Get a configured setup service instance."""
return SetupService()

View File

@@ -1,782 +0,0 @@
"""
Anime Management API Endpoints
This module provides REST API endpoints for anime CRUD operations,
including creation, reading, updating, deletion, and search functionality.
"""
import uuid
from typing import Any, Dict, List, Optional
from fastapi import APIRouter, Depends, HTTPException, Query, status
from pydantic import BaseModel, Field
# Import SeriesApp for business logic
from src.core.SeriesApp import SeriesApp
# FastAPI dependencies and models
from src.server.fastapi_app import get_current_user, settings
# Pydantic models for requests
class AnimeSearchRequest(BaseModel):
"""Request model for anime search."""
query: str = Field(..., min_length=1, max_length=100)
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
genre: Optional[str] = None
year: Optional[int] = Field(None, ge=1900, le=2100)
class AnimeResponse(BaseModel):
"""Response model for anime data."""
id: str
title: str
description: Optional[str] = None
status: str = "Unknown"
folder: Optional[str] = None
episodes: int = 0
class AnimeCreateRequest(BaseModel):
"""Request model for creating anime entries."""
name: str = Field(..., min_length=1, max_length=255)
folder: str = Field(..., min_length=1)
description: Optional[str] = None
status: str = Field(default="planned", pattern="^(ongoing|completed|planned|dropped|paused)$")
genre: Optional[str] = None
year: Optional[int] = Field(None, ge=1900, le=2100)
class AnimeUpdateRequest(BaseModel):
"""Request model for updating anime entries."""
name: Optional[str] = Field(None, min_length=1, max_length=255)
folder: Optional[str] = None
description: Optional[str] = None
status: Optional[str] = Field(None, pattern="^(ongoing|completed|planned|dropped|paused)$")
genre: Optional[str] = None
year: Optional[int] = Field(None, ge=1900, le=2100)
class PaginatedAnimeResponse(BaseModel):
"""Paginated response model for anime lists."""
success: bool = True
data: List[AnimeResponse]
pagination: Dict[str, Any]
class AnimeSearchResponse(BaseModel):
"""Response model for anime search results."""
success: bool = True
data: List[AnimeResponse]
pagination: Dict[str, Any]
search: Dict[str, Any]
class RescanResponse(BaseModel):
"""Response model for rescan operations."""
success: bool
message: str
total_series: int
# Dependency to get SeriesApp instance
def get_series_app() -> SeriesApp:
"""Get SeriesApp instance for business logic operations."""
if not settings.anime_directory:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Anime directory not configured"
)
return SeriesApp(settings.anime_directory)
# Create FastAPI router for anime management endpoints
router = APIRouter(prefix='/api/v1/anime', tags=['anime'])
@router.get('', response_model=PaginatedAnimeResponse)
async def list_anime(
status: Optional[str] = Query(None, pattern="^(ongoing|completed|planned|dropped|paused)$"),
genre: Optional[str] = Query(None),
year: Optional[int] = Query(None, ge=1900, le=2100),
search: Optional[str] = Query(None),
page: int = Query(1, ge=1),
per_page: int = Query(50, ge=1, le=1000),
current_user: Optional[Dict] = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> PaginatedAnimeResponse:
"""
Get all anime with optional filtering and pagination.
Query Parameters:
- status: Filter by anime status (ongoing, completed, planned, dropped, paused)
- genre: Filter by genre
- year: Filter by release year
- search: Search in name and description
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of anime with metadata
"""
try:
# Get the series list from SeriesApp
anime_list = series_app.series_list
# Convert to list of AnimeResponse objects
anime_responses = []
for series_item in anime_list:
anime_response = AnimeResponse(
id=getattr(series_item, 'id', str(uuid.uuid4())),
title=getattr(series_item, 'name', 'Unknown'),
folder=getattr(series_item, 'folder', ''),
description=getattr(series_item, 'description', ''),
status='ongoing', # Default status
episodes=getattr(series_item, 'total_episodes', 0)
)
# Apply search filter if provided
if search:
if search.lower() not in anime_response.title.lower():
continue
anime_responses.append(anime_response)
# Apply pagination
total = len(anime_responses)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_anime = anime_responses[start_idx:end_idx]
return PaginatedAnimeResponse(
data=paginated_anime,
pagination={
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
"has_next": end_idx < total,
"has_prev": page > 1
}
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Error retrieving anime list: {str(e)}"
)
@anime_bp.route('/<int:anime_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('anime_id')
@optional_auth
def get_anime(anime_id: int) -> Dict[str, Any]:
"""
Get specific anime by ID.
Args:
anime_id: Unique identifier for the anime
Returns:
Anime details with episodes summary
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
anime = anime_repository.get_anime_by_id(anime_id)
if not anime:
raise NotFoundError("Anime not found")
# Format anime data
anime_data = format_anime_response(anime.__dict__)
# Add episodes summary
episodes_summary = anime_repository.get_episodes_summary(anime_id)
anime_data['episodes_summary'] = episodes_summary
return create_success_response(anime_data)
@anime_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['name', 'folder'],
optional_fields=['key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
field_types={
'name': str,
'folder': str,
'key': str,
'description': str,
'genres': list,
'release_year': int,
'status': str,
'total_episodes': int,
'poster_url': str,
'custom_metadata': dict
}
)
@require_auth
def create_anime() -> Dict[str, Any]:
"""
Create a new anime record.
Required Fields:
- name: Anime name
- folder: Folder path where anime files are stored
Optional Fields:
- key: Unique key identifier
- description: Anime description
- genres: List of genres
- release_year: Year of release
- status: Status (ongoing, completed, planned, dropped, paused)
- total_episodes: Total number of episodes
- poster_url: URL to poster image
- custom_metadata: Additional metadata as key-value pairs
Returns:
Created anime details with generated ID
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
data = request.get_json()
# Validate status if provided
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
# Check if anime with same folder already exists
existing_anime = anime_repository.get_anime_by_folder(data['folder'])
if existing_anime:
raise ValidationError("Anime with this folder already exists")
# Create anime metadata object
try:
anime = AnimeMetadata(
anime_id=str(uuid.uuid4()),
name=data['name'],
folder=data['folder'],
key=data.get('key'),
description=data.get('description'),
genres=data.get('genres', []),
release_year=data.get('release_year'),
status=data.get('status', 'planned'),
total_episodes=data.get('total_episodes'),
poster_url=data.get('poster_url'),
custom_metadata=data.get('custom_metadata', {})
)
except Exception as e:
raise ValidationError(f"Invalid anime data: {str(e)}")
# Save to database
success = anime_repository.create_anime(anime)
if not success:
raise APIException("Failed to create anime", 500)
# Return created anime
anime_data = format_anime_response(anime.__dict__)
return create_success_response(
data=anime_data,
message="Anime created successfully",
status_code=201
)
@anime_bp.route('/<int:anime_id>', methods=['PUT'])
@handle_api_errors
@validate_id_parameter('anime_id')
@validate_json_input(
optional_fields=['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url', 'custom_metadata'],
field_types={
'name': str,
'folder': str,
'key': str,
'description': str,
'genres': list,
'release_year': int,
'status': str,
'total_episodes': int,
'poster_url': str,
'custom_metadata': dict
}
)
@require_auth
def update_anime(anime_id: int) -> Dict[str, Any]:
"""
Update an existing anime record.
Args:
anime_id: Unique identifier for the anime
Optional Fields:
- name: Anime name
- folder: Folder path where anime files are stored
- key: Unique key identifier
- description: Anime description
- genres: List of genres
- release_year: Year of release
- status: Status (ongoing, completed, planned, dropped, paused)
- total_episodes: Total number of episodes
- poster_url: URL to poster image
- custom_metadata: Additional metadata as key-value pairs
Returns:
Updated anime details
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
data = request.get_json()
# Get existing anime
existing_anime = anime_repository.get_anime_by_id(anime_id)
if not existing_anime:
raise NotFoundError("Anime not found")
# Validate status if provided
if 'status' in data and data['status'] not in ['ongoing', 'completed', 'planned', 'dropped', 'paused']:
raise ValidationError("Status must be one of: ongoing, completed, planned, dropped, paused")
# Check if folder is being changed and if it conflicts
if 'folder' in data and data['folder'] != existing_anime.folder:
conflicting_anime = anime_repository.get_anime_by_folder(data['folder'])
if conflicting_anime and conflicting_anime.anime_id != anime_id:
raise ValidationError("Another anime with this folder already exists")
# Update fields
update_fields = {}
for field in ['name', 'folder', 'key', 'description', 'genres', 'release_year', 'status', 'total_episodes', 'poster_url']:
if field in data:
update_fields[field] = data[field]
# Handle custom metadata update (merge instead of replace)
if 'custom_metadata' in data:
existing_metadata = existing_anime.custom_metadata or {}
existing_metadata.update(data['custom_metadata'])
update_fields['custom_metadata'] = existing_metadata
# Perform update
success = anime_repository.update_anime(anime_id, update_fields)
if not success:
raise APIException("Failed to update anime", 500)
# Get updated anime
updated_anime = anime_repository.get_anime_by_id(anime_id)
anime_data = format_anime_response(updated_anime.__dict__)
return create_success_response(
data=anime_data,
message="Anime updated successfully"
)
@anime_bp.route('/<int:anime_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('anime_id')
@require_auth
def delete_anime(anime_id: int) -> Dict[str, Any]:
"""
Delete an anime record and all related data.
Args:
anime_id: Unique identifier for the anime
Query Parameters:
- force: Set to 'true' to force deletion even if episodes exist
Returns:
Deletion confirmation
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
# Check if anime exists
existing_anime = anime_repository.get_anime_by_id(anime_id)
if not existing_anime:
raise NotFoundError("Anime not found")
# Check for existing episodes unless force deletion
force_delete = request.args.get('force', 'false').lower() == 'true'
if not force_delete:
episode_count = anime_repository.get_episode_count(anime_id)
if episode_count > 0:
raise ValidationError(
f"Cannot delete anime with {episode_count} episodes. "
"Use ?force=true to force deletion or delete episodes first."
)
# Perform deletion (this should cascade to episodes, downloads, etc.)
success = anime_repository.delete_anime(anime_id)
if not success:
raise APIException("Failed to delete anime", 500)
return create_success_response(
message=f"Anime '{existing_anime.name}' deleted successfully"
)
@router.get('/search', response_model=AnimeSearchResponse)
async def search_anime(
q: str = Query(..., min_length=2, description="Search query"),
page: int = Query(1, ge=1),
per_page: int = Query(20, ge=1, le=100),
current_user: Optional[Dict] = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> AnimeSearchResponse:
"""
Search anime by name using SeriesApp.
Query Parameters:
- q: Search query (required, min 2 characters)
- page: Page number (default: 1)
- per_page: Items per page (default: 20, max: 100)
Returns:
Paginated search results
"""
try:
# Use SeriesApp to perform search
search_results = series_app.search(q)
# Convert search results to AnimeResponse objects
anime_responses = []
for result in search_results:
anime_response = AnimeResponse(
id=getattr(result, 'id', str(uuid.uuid4())),
title=getattr(result, 'name', getattr(result, 'title', 'Unknown')),
description=getattr(result, 'description', ''),
status='available',
episodes=getattr(result, 'episodes', 0),
folder=getattr(result, 'key', '')
)
anime_responses.append(anime_response)
# Apply pagination
total = len(anime_responses)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = anime_responses[start_idx:end_idx]
return AnimeSearchResponse(
data=paginated_results,
pagination={
"page": page,
"per_page": per_page,
"total": total,
"pages": (total + per_page - 1) // per_page,
"has_next": end_idx < total,
"has_prev": page > 1
},
search={
"query": q,
"total_results": total
}
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Search failed: {str(e)}"
)
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
# Create response with search metadata
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='anime.search_anime',
q=search_term,
fields=','.join(search_fields)
)
# Add search metadata
response['search'] = {
'query': search_term,
'fields': search_fields,
'total_results': total
}
return response
@anime_bp.route('/<int:anime_id>/episodes', methods=['GET'])
@handle_api_errors
@validate_id_parameter('anime_id')
@validate_pagination_params
@optional_auth
def get_anime_episodes(anime_id: int) -> Dict[str, Any]:
"""
Get all episodes for a specific anime.
Args:
anime_id: Unique identifier for the anime
Query Parameters:
- status: Filter by episode status
- downloaded: Filter by download status (true/false)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of episodes for the anime
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
# Check if anime exists
anime = anime_repository.get_anime_by_id(anime_id)
if not anime:
raise NotFoundError("Anime not found")
# Get filters
status_filter = request.args.get('status')
downloaded_filter = request.args.get('downloaded')
# Validate downloaded filter
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
raise ValidationError("Downloaded filter must be 'true' or 'false'")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get episodes
episodes = anime_repository.get_episodes_for_anime(
anime_id=anime_id,
status_filter=status_filter,
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None
)
# Format episodes (this would use episode formatting from episodes.py)
formatted_episodes = []
for episode in episodes:
formatted_episodes.append({
'id': episode.id,
'episode_number': episode.episode_number,
'title': episode.title,
'url': episode.url,
'status': episode.status,
'is_downloaded': episode.is_downloaded,
'file_path': episode.file_path,
'file_size': episode.file_size,
'created_at': episode.created_at.isoformat() if episode.created_at else None,
'updated_at': episode.updated_at.isoformat() if episode.updated_at else None
})
# Apply pagination
total = len(formatted_episodes)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_episodes = formatted_episodes[start_idx:end_idx]
return create_paginated_response(
data=paginated_episodes,
page=page,
per_page=per_page,
total=total,
endpoint='anime.get_anime_episodes',
anime_id=anime_id
)
@anime_bp.route('/bulk', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['action', 'anime_ids'],
optional_fields=['data'],
field_types={
'action': str,
'anime_ids': list,
'data': dict
}
)
@require_auth
def bulk_anime_operation() -> Dict[str, Any]:
"""
Perform bulk operations on multiple anime.
Required Fields:
- action: Operation to perform (update_status, delete, update_metadata)
- anime_ids: List of anime IDs to operate on
Optional Fields:
- data: Additional data for the operation
Returns:
Results of the bulk operation
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
data = request.get_json()
action = data['action']
anime_ids = data['anime_ids']
operation_data = data.get('data', {})
# Validate action
valid_actions = ['update_status', 'delete', 'update_metadata', 'update_genres']
if action not in valid_actions:
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
# Validate anime_ids
if not isinstance(anime_ids, list) or not anime_ids:
raise ValidationError("anime_ids must be a non-empty list")
if len(anime_ids) > 100:
raise ValidationError("Cannot operate on more than 100 anime at once")
# Validate anime IDs are integers
try:
anime_ids = [int(aid) for aid in anime_ids]
except ValueError:
raise ValidationError("All anime_ids must be valid integers")
# Perform bulk operation
successful_items = []
failed_items = []
for anime_id in anime_ids:
try:
if action == 'update_status':
if 'status' not in operation_data:
raise ValueError("Status is required for update_status action")
success = anime_repository.update_anime(anime_id, {'status': operation_data['status']})
if success:
successful_items.append({'anime_id': anime_id, 'action': 'status_updated'})
else:
failed_items.append({'anime_id': anime_id, 'error': 'Update failed'})
elif action == 'delete':
success = anime_repository.delete_anime(anime_id)
if success:
successful_items.append({'anime_id': anime_id, 'action': 'deleted'})
else:
failed_items.append({'anime_id': anime_id, 'error': 'Deletion failed'})
elif action == 'update_metadata':
success = anime_repository.update_anime(anime_id, operation_data)
if success:
successful_items.append({'anime_id': anime_id, 'action': 'metadata_updated'})
else:
failed_items.append({'anime_id': anime_id, 'error': 'Metadata update failed'})
except Exception as e:
failed_items.append({'anime_id': anime_id, 'error': str(e)})
# Create batch response
from ...shared.response_helpers import create_batch_response
return create_batch_response(
successful_items=successful_items,
failed_items=failed_items,
message=f"Bulk {action} operation completed"
)
@router.post('/rescan', response_model=RescanResponse)
async def rescan_anime_directory(
current_user: Dict = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> RescanResponse:
"""
Rescan the anime directory for new episodes and series.
Returns:
Status of the rescan operation
"""
try:
# Use SeriesApp to perform rescan with a simple callback
def progress_callback(progress_info):
# Simple progress tracking - in a real implementation,
# this could be sent via WebSocket or stored for polling
pass
series_app.ReScan(progress_callback)
return RescanResponse(
success=True,
message="Anime directory rescanned successfully",
total_series=len(series_app.series_list) if hasattr(series_app, 'series_list') else 0
)
except Exception as e:
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=f"Rescan failed: {str(e)}"
)
# Additional endpoints for legacy API compatibility
class AddSeriesRequest(BaseModel):
"""Request model for adding a new series."""
link: str = Field(..., min_length=1)
name: str = Field(..., min_length=1, max_length=255)
class AddSeriesResponse(BaseModel):
"""Response model for add series operation."""
status: str
message: str
class DownloadRequest(BaseModel):
"""Request model for downloading series."""
folders: List[str] = Field(..., min_items=1)
class DownloadResponse(BaseModel):
"""Response model for download operation."""
status: str
message: str
@router.post('/add_series', response_model=AddSeriesResponse)
async def add_series(
request_data: AddSeriesRequest,
current_user: Dict = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> AddSeriesResponse:
"""
Add a new series to the collection.
Args:
request_data: Contains link and name of the series to add
Returns:
Status of the add operation
"""
try:
# For now, just return success - actual implementation would use SeriesApp
# to add the series to the collection
return AddSeriesResponse(
status="success",
message=f"Series '{request_data.name}' added successfully"
)
except Exception as e:
return AddSeriesResponse(
status="error",
message=f"Failed to add series: {str(e)}"
)
@router.post('/download', response_model=DownloadResponse)
async def download_series(
request_data: DownloadRequest,
current_user: Dict = Depends(get_current_user),
series_app: SeriesApp = Depends(get_series_app)
) -> DownloadResponse:
"""
Start downloading selected series folders.
Args:
request_data: Contains list of folder names to download
Returns:
Status of the download operation
"""
try:
# For now, just return success - actual implementation would use SeriesApp
# to start downloads
folder_count = len(request_data.folders)
return DownloadResponse(
status="success",
message=f"Download started for {folder_count} series"
)
except Exception as e:
return DownloadResponse(
status="error",
message=f"Failed to start download: {str(e)}"
)

View File

@@ -1,773 +0,0 @@
"""
Authentication API endpoints.
This module handles all authentication-related operations including:
- User authentication
- Session management
- Password management
- API key management
"""
from flask import Blueprint, request, session, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import hashlib
import secrets
import time
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import (
validate_json_input, validate_query_params, is_valid_email, sanitize_string
)
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, format_user_data
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def optional_auth(f): return f
def handle_api_errors(f): return f
def validate_json_input(**kwargs): return lambda f: f
def validate_query_params(**kwargs): return lambda f: f
def is_valid_email(email): return '@' in email
def sanitize_string(s): return str(s).strip()
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def format_user_data(data): return data
# Import authentication components
try:
from src.data.user_manager import UserManager
from src.data.session_manager import SessionManager
from src.data.api_key_manager import APIKeyManager
except ImportError:
# Fallback for development
class UserManager:
def authenticate_user(self, username, password): return None
def get_user_by_id(self, id): return None
def get_user_by_username(self, username): return None
def get_user_by_email(self, email): return None
def create_user(self, **kwargs): return 1
def update_user(self, id, **kwargs): return True
def delete_user(self, id): return True
def change_password(self, id, new_password): return True
def reset_password(self, email): return 'reset_token'
def verify_reset_token(self, token): return None
def get_user_sessions(self, user_id): return []
def get_user_activity(self, user_id): return []
class SessionManager:
def create_session(self, user_id): return 'session_token'
def validate_session(self, token): return None
def destroy_session(self, token): return True
def destroy_all_sessions(self, user_id): return True
def get_session_info(self, token): return None
def update_session_activity(self, token): return True
class APIKeyManager:
def create_api_key(self, user_id, name): return {'id': 1, 'key': 'api_key', 'name': name}
def get_user_api_keys(self, user_id): return []
def revoke_api_key(self, key_id): return True
def validate_api_key(self, key): return None
# Create blueprint
auth_bp = Blueprint('auth', __name__)
# Initialize managers
user_manager = UserManager()
session_manager = SessionManager()
api_key_manager = APIKeyManager()
logger = logging.getLogger(__name__)
@auth_bp.route('/auth/login', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['username', 'password'],
optional_fields=['remember_me'],
field_types={'username': str, 'password': str, 'remember_me': bool}
)
def login() -> Tuple[Any, int]:
"""
Authenticate user and create session.
Request Body:
- username: Username or email
- password: User password
- remember_me: Extend session duration (optional)
Returns:
JSON response with authentication result
"""
data = request.get_json()
username = sanitize_string(data['username'])
password = data['password']
remember_me = data.get('remember_me', False)
try:
# Authenticate user
user = user_manager.authenticate_user(username, password)
if not user:
logger.warning(f"Failed login attempt for username: {username}")
return create_error_response("Invalid username or password", 401)
# Create session
session_token = session_manager.create_session(
user['id'],
extended=remember_me
)
# Set session data
session['user_id'] = user['id']
session['username'] = user['username']
session['session_token'] = session_token
session.permanent = remember_me
# Format user data (exclude sensitive information)
user_data = format_user_data(user, include_sensitive=False)
response_data = {
'user': user_data,
'session_token': session_token,
'expires_at': (datetime.now() + timedelta(days=30 if remember_me else 7)).isoformat()
}
logger.info(f"User {user['username']} (ID: {user['id']}) logged in successfully")
return create_success_response("Login successful", 200, response_data)
except Exception as e:
logger.error(f"Error during login for username {username}: {str(e)}")
return create_error_response("Login failed", 500)
@auth_bp.route('/auth/logout', methods=['POST'])
@require_auth
@handle_api_errors
def logout() -> Tuple[Any, int]:
"""
Logout user and destroy session.
Returns:
JSON response with logout result
"""
try:
# Get session token
session_token = session.get('session_token')
user_id = session.get('user_id')
if session_token:
# Destroy session in database
session_manager.destroy_session(session_token)
# Clear Flask session
session.clear()
logger.info(f"User ID {user_id} logged out successfully")
return create_success_response("Logout successful")
except Exception as e:
logger.error(f"Error during logout: {str(e)}")
return create_error_response("Logout failed", 500)
@auth_bp.route('/auth/register', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['username', 'email', 'password'],
optional_fields=['full_name'],
field_types={'username': str, 'email': str, 'password': str, 'full_name': str}
)
def register() -> Tuple[Any, int]:
"""
Register new user account.
Request Body:
- username: Unique username
- email: User email address
- password: User password
- full_name: User's full name (optional)
Returns:
JSON response with registration result
"""
data = request.get_json()
username = sanitize_string(data['username'])
email = sanitize_string(data['email'])
password = data['password']
full_name = sanitize_string(data.get('full_name', ''))
# Validate input
if len(username) < 3:
return create_error_response("Username must be at least 3 characters long", 400)
if len(password) < 8:
return create_error_response("Password must be at least 8 characters long", 400)
if not is_valid_email(email):
return create_error_response("Invalid email address", 400)
try:
# Check if username already exists
existing_user = user_manager.get_user_by_username(username)
if existing_user:
return create_error_response("Username already exists", 409)
# Check if email already exists
existing_email = user_manager.get_user_by_email(email)
if existing_email:
return create_error_response("Email already registered", 409)
# Create user
user_id = user_manager.create_user(
username=username,
email=email,
password=password,
full_name=full_name
)
# Get created user
user = user_manager.get_user_by_id(user_id)
user_data = format_user_data(user, include_sensitive=False)
logger.info(f"New user registered: {username} (ID: {user_id})")
return create_success_response("Registration successful", 201, user_data)
except Exception as e:
logger.error(f"Error during registration for username {username}: {str(e)}")
return create_error_response("Registration failed", 500)
@auth_bp.route('/auth/me', methods=['GET'])
@require_auth
@handle_api_errors
def get_current_user() -> Tuple[Any, int]:
"""
Get current user information.
Returns:
JSON response with current user data
"""
try:
user_id = session.get('user_id')
user = user_manager.get_user_by_id(user_id)
if not user:
return create_error_response("User not found", 404)
user_data = format_user_data(user, include_sensitive=False)
return create_success_response("User information retrieved", 200, user_data)
except Exception as e:
logger.error(f"Error getting current user: {str(e)}")
return create_error_response("Failed to get user information", 500)
@auth_bp.route('/auth/me', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['email', 'full_name'],
field_types={'email': str, 'full_name': str}
)
def update_current_user() -> Tuple[Any, int]:
"""
Update current user information.
Request Body:
- email: New email address (optional)
- full_name: New full name (optional)
Returns:
JSON response with update result
"""
data = request.get_json()
user_id = session.get('user_id')
# Validate email if provided
if 'email' in data and not is_valid_email(data['email']):
return create_error_response("Invalid email address", 400)
try:
# Check if email is already taken by another user
if 'email' in data:
existing_user = user_manager.get_user_by_email(data['email'])
if existing_user and existing_user['id'] != user_id:
return create_error_response("Email already registered", 409)
# Update user
success = user_manager.update_user(user_id, **data)
if success:
# Get updated user
user = user_manager.get_user_by_id(user_id)
user_data = format_user_data(user, include_sensitive=False)
logger.info(f"User {user_id} updated their profile")
return create_success_response("Profile updated successfully", 200, user_data)
else:
return create_error_response("Failed to update profile", 500)
except Exception as e:
logger.error(f"Error updating user {user_id}: {str(e)}")
return create_error_response("Failed to update profile", 500)
@auth_bp.route('/auth/change-password', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['current_password', 'new_password'],
field_types={'current_password': str, 'new_password': str}
)
def change_password() -> Tuple[Any, int]:
"""
Change user password.
Request Body:
- current_password: Current password
- new_password: New password
Returns:
JSON response with change result
"""
data = request.get_json()
user_id = session.get('user_id')
current_password = data['current_password']
new_password = data['new_password']
# Validate new password
if len(new_password) < 8:
return create_error_response("New password must be at least 8 characters long", 400)
try:
# Get user
user = user_manager.get_user_by_id(user_id)
# Verify current password
authenticated_user = user_manager.authenticate_user(user['username'], current_password)
if not authenticated_user:
return create_error_response("Current password is incorrect", 401)
# Change password
success = user_manager.change_password(user_id, new_password)
if success:
logger.info(f"User {user_id} changed their password")
return create_success_response("Password changed successfully")
else:
return create_error_response("Failed to change password", 500)
except Exception as e:
logger.error(f"Error changing password for user {user_id}: {str(e)}")
return create_error_response("Failed to change password", 500)
@auth_bp.route('/auth/forgot-password', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['email'],
field_types={'email': str}
)
def forgot_password() -> Tuple[Any, int]:
"""
Request password reset.
Request Body:
- email: User email address
Returns:
JSON response with reset result
"""
data = request.get_json()
email = sanitize_string(data['email'])
if not is_valid_email(email):
return create_error_response("Invalid email address", 400)
try:
# Check if user exists
user = user_manager.get_user_by_email(email)
if user:
# Generate reset token
reset_token = user_manager.reset_password(email)
# In a real application, you would send this token via email
logger.info(f"Password reset requested for user {user['id']} (email: {email})")
# For security, always return success even if email doesn't exist
return create_success_response("If the email exists, a reset link has been sent")
else:
# For security, don't reveal that email doesn't exist
logger.warning(f"Password reset requested for non-existent email: {email}")
return create_success_response("If the email exists, a reset link has been sent")
except Exception as e:
logger.error(f"Error processing password reset for email {email}: {str(e)}")
return create_error_response("Failed to process password reset", 500)
@auth_bp.route('/auth/reset-password', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['token', 'new_password'],
field_types={'token': str, 'new_password': str}
)
def reset_password() -> Tuple[Any, int]:
"""
Reset password using token.
Request Body:
- token: Password reset token
- new_password: New password
Returns:
JSON response with reset result
"""
data = request.get_json()
token = data['token']
new_password = data['new_password']
# Validate new password
if len(new_password) < 8:
return create_error_response("New password must be at least 8 characters long", 400)
try:
# Verify reset token
user = user_manager.verify_reset_token(token)
if not user:
return create_error_response("Invalid or expired reset token", 400)
# Change password
success = user_manager.change_password(user['id'], new_password)
if success:
logger.info(f"Password reset completed for user {user['id']}")
return create_success_response("Password reset successfully")
else:
return create_error_response("Failed to reset password", 500)
except Exception as e:
logger.error(f"Error resetting password with token: {str(e)}")
return create_error_response("Failed to reset password", 500)
@auth_bp.route('/auth/sessions', methods=['GET'])
@require_auth
@handle_api_errors
def get_user_sessions() -> Tuple[Any, int]:
"""
Get user's active sessions.
Returns:
JSON response with user sessions
"""
try:
user_id = session.get('user_id')
sessions = user_manager.get_user_sessions(user_id)
return create_success_response("Sessions retrieved successfully", 200, sessions)
except Exception as e:
logger.error(f"Error getting user sessions: {str(e)}")
return create_error_response("Failed to get sessions", 500)
@auth_bp.route('/auth/sessions', methods=['DELETE'])
@require_auth
@handle_api_errors
def destroy_all_sessions() -> Tuple[Any, int]:
"""
Destroy all user sessions except current one.
Returns:
JSON response with operation result
"""
try:
user_id = session.get('user_id')
current_token = session.get('session_token')
# Destroy all sessions except current
success = session_manager.destroy_all_sessions(user_id, except_token=current_token)
if success:
logger.info(f"All sessions destroyed for user {user_id}")
return create_success_response("All other sessions destroyed successfully")
else:
return create_error_response("Failed to destroy sessions", 500)
except Exception as e:
logger.error(f"Error destroying sessions: {str(e)}")
return create_error_response("Failed to destroy sessions", 500)
@auth_bp.route('/auth/api-keys', methods=['GET'])
@require_auth
@handle_api_errors
def get_api_keys() -> Tuple[Any, int]:
"""
Get user's API keys.
Returns:
JSON response with API keys
"""
try:
user_id = session.get('user_id')
api_keys = api_key_manager.get_user_api_keys(user_id)
return create_success_response("API keys retrieved successfully", 200, api_keys)
except Exception as e:
logger.error(f"Error getting API keys: {str(e)}")
return create_error_response("Failed to get API keys", 500)
@auth_bp.route('/auth/api-keys', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['name'],
optional_fields=['description'],
field_types={'name': str, 'description': str}
)
def create_api_key() -> Tuple[Any, int]:
"""
Create new API key.
Request Body:
- name: API key name
- description: API key description (optional)
Returns:
JSON response with created API key
"""
data = request.get_json()
user_id = session.get('user_id')
name = sanitize_string(data['name'])
description = sanitize_string(data.get('description', ''))
try:
# Create API key
api_key = api_key_manager.create_api_key(
user_id=user_id,
name=name,
description=description
)
logger.info(f"API key created for user {user_id}: {name}")
return create_success_response("API key created successfully", 201, api_key)
except Exception as e:
logger.error(f"Error creating API key for user {user_id}: {str(e)}")
return create_error_response("Failed to create API key", 500)
@auth_bp.route('/auth/api-keys/<int:key_id>', methods=['DELETE'])
@require_auth
@handle_api_errors
def revoke_api_key(key_id: int) -> Tuple[Any, int]:
"""
Revoke API key.
Args:
key_id: API key ID
Returns:
JSON response with revocation result
"""
try:
user_id = session.get('user_id')
# Verify key belongs to user and revoke
success = api_key_manager.revoke_api_key(key_id, user_id)
if success:
logger.info(f"API key {key_id} revoked by user {user_id}")
return create_success_response("API key revoked successfully")
else:
return create_error_response("API key not found or access denied", 404)
except Exception as e:
logger.error(f"Error revoking API key {key_id}: {str(e)}")
return create_error_response("Failed to revoke API key", 500)
@auth_bp.route('/auth/password-reset', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['email'],
field_types={'email': str}
)
def request_password_reset() -> Tuple[Any, int]:
"""
Request password reset for user email.
Request Body:
- email: User email address
Returns:
JSON response with password reset request result
"""
data = request.get_json()
email = sanitize_string(data['email'])
try:
# Validate email format
if not is_valid_email(email):
return create_error_response("Invalid email format", 400)
# Check if user exists
user = user_manager.get_user_by_email(email)
if not user:
# Don't reveal if email exists or not for security
logger.warning(f"Password reset requested for non-existent email: {email}")
return create_success_response("If the email exists, a password reset link has been sent")
# Generate reset token
reset_token = user_manager.create_password_reset_token(user['id'])
# In a real implementation, you would send an email here
# For now, we'll just log it and return success
logger.info(f"Password reset token generated for user {user['id']}: {reset_token}")
return create_success_response("If the email exists, a password reset link has been sent")
except Exception as e:
logger.error(f"Error during password reset request for {email}: {str(e)}")
return create_error_response("Failed to process password reset request", 500)
@auth_bp.route('/auth/password-reset/confirm', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['token', 'new_password'],
field_types={'token': str, 'new_password': str}
)
def confirm_password_reset() -> Tuple[Any, int]:
"""
Confirm password reset with token.
Request Body:
- token: Password reset token
- new_password: New password
Returns:
JSON response with password reset confirmation result
"""
data = request.get_json()
token = data['token']
new_password = data['new_password']
try:
# Validate password strength
if len(new_password) < 8:
return create_error_response("Password must be at least 8 characters long", 400)
# Verify reset token
user_id = user_manager.verify_reset_token(token)
if not user_id:
return create_error_response("Invalid or expired reset token", 400)
# Update password
success = user_manager.change_password(user_id, new_password)
if not success:
return create_error_response("Failed to update password", 500)
# Invalidate all existing sessions for security
session_manager.destroy_all_sessions(user_id)
logger.info(f"Password reset completed for user ID {user_id}")
return create_success_response("Password has been successfully reset")
except Exception as e:
logger.error(f"Error during password reset confirmation: {str(e)}")
return create_error_response("Failed to reset password", 500)
@auth_bp.route('/auth/refresh', methods=['POST'])
@handle_api_errors
def refresh_token() -> Tuple[Any, int]:
"""
Refresh authentication token.
Returns:
JSON response with new token
"""
try:
# Get current session token
session_token = session.get('session_token')
if not session_token:
return create_error_response("No active session found", 401)
# Validate current session
session_info = session_manager.get_session_info(session_token)
if not session_info or session_info.get('expired', True):
session.clear()
return create_error_response("Session expired", 401)
# Create new session token
user_id = session_info['user_id']
new_session_token = session_manager.create_session(user_id)
# Destroy old session
session_manager.destroy_session(session_token)
# Update session data
session['session_token'] = new_session_token
session_manager.update_session_activity(new_session_token)
# Get user data
user = user_manager.get_user_by_id(user_id)
user_data = format_user_data(user, include_sensitive=False)
response_data = {
'user': user_data,
'session_token': new_session_token,
'expires_at': (datetime.now() + timedelta(days=7)).isoformat()
}
logger.info(f"Token refreshed for user ID {user_id}")
return create_success_response("Token refreshed successfully", 200, response_data)
except Exception as e:
logger.error(f"Error during token refresh: {str(e)}")
return create_error_response("Failed to refresh token", 500)
@auth_bp.route('/auth/activity', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['limit', 'offset'],
param_types={'limit': int, 'offset': int}
)
def get_user_activity() -> Tuple[Any, int]:
"""
Get user activity log.
Query Parameters:
- limit: Number of activities to return (default: 50, max: 200)
- offset: Number of activities to skip (default: 0)
Returns:
JSON response with user activity
"""
limit = min(request.args.get('limit', 50, type=int), 200)
offset = request.args.get('offset', 0, type=int)
try:
user_id = session.get('user_id')
activity = user_manager.get_user_activity(user_id, limit=limit, offset=offset)
return create_success_response("User activity retrieved successfully", 200, activity)
except Exception as e:
logger.error(f"Error getting user activity: {str(e)}")
return create_error_response("Failed to get user activity", 500)

View File

@@ -1,649 +0,0 @@
"""
Backup Management API Endpoints
This module provides REST API endpoints for database backup operations,
including backup creation, restoration, and cleanup functionality.
"""
from flask import Blueprint, request, send_file
from typing import Dict, List, Any, Optional
import os
from datetime import datetime
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, extract_pagination_params
)
# Import backup components (these imports would need to be adjusted based on actual structure)
try:
from database_manager import backup_manager, BackupInfo
except ImportError:
# Fallback for development/testing
backup_manager = None
BackupInfo = None
# Blueprint for backup management endpoints
backups_bp = Blueprint('backups', __name__, url_prefix='/api/v1/backups')
@backups_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def list_backups() -> Dict[str, Any]:
"""
List all available backups with optional filtering.
Query Parameters:
- backup_type: Filter by backup type (full, metadata_only, incremental)
- date_from: Filter from date (ISO format)
- date_to: Filter to date (ISO format)
- min_size_mb: Minimum backup size in MB
- max_size_mb: Maximum backup size in MB
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of backups
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Extract filters
backup_type_filter = request.args.get('backup_type')
date_from = request.args.get('date_from')
date_to = request.args.get('date_to')
min_size_mb = request.args.get('min_size_mb')
max_size_mb = request.args.get('max_size_mb')
# Validate filters
valid_types = ['full', 'metadata_only', 'incremental']
if backup_type_filter and backup_type_filter not in valid_types:
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
# Validate dates
if date_from:
try:
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_from must be in ISO format")
if date_to:
try:
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_to must be in ISO format")
# Validate size filters
if min_size_mb:
try:
min_size_mb = float(min_size_mb)
if min_size_mb < 0:
raise ValueError()
except ValueError:
raise ValidationError("min_size_mb must be a non-negative number")
if max_size_mb:
try:
max_size_mb = float(max_size_mb)
if max_size_mb < 0:
raise ValueError()
except ValueError:
raise ValidationError("max_size_mb must be a non-negative number")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get backups with filters
backups = backup_manager.list_backups(
backup_type=backup_type_filter,
date_from=date_from,
date_to=date_to,
min_size_bytes=int(min_size_mb * 1024 * 1024) if min_size_mb else None,
max_size_bytes=int(max_size_mb * 1024 * 1024) if max_size_mb else None
)
# Format backup data
backup_data = []
for backup in backups:
backup_data.append({
'backup_id': backup.backup_id,
'backup_type': backup.backup_type,
'created_at': backup.created_at.isoformat(),
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
'size_bytes': backup.size_bytes,
'description': backup.description,
'tables_included': backup.tables_included,
'backup_path': backup.backup_path,
'is_compressed': backup.is_compressed,
'checksum': backup.checksum,
'status': backup.status
})
# Apply pagination
total = len(backup_data)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_backups = backup_data[start_idx:end_idx]
return create_paginated_response(
data=paginated_backups,
page=page,
per_page=per_page,
total=total,
endpoint='backups.list_backups'
)
@backups_bp.route('/<backup_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('backup_id')
@optional_auth
def get_backup(backup_id: str) -> Dict[str, Any]:
"""
Get detailed information about a specific backup.
Args:
backup_id: Unique identifier for the backup
Returns:
Detailed backup information
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
# Get additional details
backup_details = {
'backup_id': backup.backup_id,
'backup_type': backup.backup_type,
'created_at': backup.created_at.isoformat(),
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
'size_bytes': backup.size_bytes,
'description': backup.description,
'tables_included': backup.tables_included,
'backup_path': backup.backup_path,
'is_compressed': backup.is_compressed,
'checksum': backup.checksum,
'status': backup.status,
'creation_duration_seconds': backup.creation_duration_seconds,
'file_exists': os.path.exists(backup.backup_path),
'validation_status': backup_manager.validate_backup(backup_id)
}
return create_success_response(backup_details)
@backups_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['backup_type'],
optional_fields=['description', 'tables', 'compress', 'encryption_key'],
field_types={
'backup_type': str,
'description': str,
'tables': list,
'compress': bool,
'encryption_key': str
}
)
@require_auth
def create_backup() -> Dict[str, Any]:
"""
Create a new database backup.
Required Fields:
- backup_type: Type of backup (full, metadata_only, incremental)
Optional Fields:
- description: Backup description
- tables: Specific tables to backup (for selective backups)
- compress: Whether to compress the backup (default: true)
- encryption_key: Key for backup encryption
Returns:
Created backup information
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json()
backup_type = data['backup_type']
# Validate backup type
valid_types = ['full', 'metadata_only', 'incremental']
if backup_type not in valid_types:
raise ValidationError(f"backup_type must be one of: {', '.join(valid_types)}")
description = data.get('description')
tables = data.get('tables')
compress = data.get('compress', True)
encryption_key = data.get('encryption_key')
# Validate tables if provided
if tables:
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
raise ValidationError("tables must be a list of table names")
# Validate table names exist
valid_tables = backup_manager.get_available_tables()
invalid_tables = [t for t in tables if t not in valid_tables]
if invalid_tables:
raise ValidationError(f"Invalid tables: {', '.join(invalid_tables)}")
try:
# Create backup based on type
if backup_type == 'full':
backup_info = backup_manager.create_full_backup(
description=description,
compress=compress,
encryption_key=encryption_key
)
elif backup_type == 'metadata_only':
backup_info = backup_manager.create_metadata_backup(
description=description,
compress=compress,
encryption_key=encryption_key
)
elif backup_type == 'incremental':
backup_info = backup_manager.create_incremental_backup(
description=description,
compress=compress,
encryption_key=encryption_key
)
else: # selective backup
backup_info = backup_manager.create_selective_backup(
tables=tables,
description=description,
compress=compress,
encryption_key=encryption_key
)
if not backup_info:
raise APIException("Failed to create backup", 500)
backup_data = {
'backup_id': backup_info.backup_id,
'backup_type': backup_info.backup_type,
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
'created_at': backup_info.created_at.isoformat(),
'description': backup_info.description,
'tables_included': backup_info.tables_included,
'is_compressed': backup_info.is_compressed,
'checksum': backup_info.checksum
}
return create_success_response(
data=backup_data,
message=f"{backup_type.title()} backup created successfully",
status_code=201
)
except Exception as e:
raise APIException(f"Failed to create backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>/restore', methods=['POST'])
@handle_api_errors
@validate_id_parameter('backup_id')
@validate_json_input(
optional_fields=['confirm', 'tables', 'target_database', 'restore_data', 'restore_schema'],
field_types={
'confirm': bool,
'tables': list,
'target_database': str,
'restore_data': bool,
'restore_schema': bool
}
)
@require_auth
def restore_backup(backup_id: str) -> Dict[str, Any]:
"""
Restore from a backup.
Args:
backup_id: Unique identifier for the backup
Optional Fields:
- confirm: Confirmation flag (required for production)
- tables: Specific tables to restore
- target_database: Target database path (for restore to different location)
- restore_data: Whether to restore data (default: true)
- restore_schema: Whether to restore schema (default: true)
Returns:
Restoration results
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json() or {}
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
# Validate backup file exists
if not os.path.exists(backup.backup_path):
raise APIException("Backup file not found", 404)
# Require confirmation for production environments
confirm = data.get('confirm', False)
if not confirm:
# Check if this is a production environment
from config import config
if hasattr(config, 'environment') and config.environment == 'production':
raise ValidationError("Confirmation required for restore operation in production")
tables = data.get('tables')
target_database = data.get('target_database')
restore_data = data.get('restore_data', True)
restore_schema = data.get('restore_schema', True)
# Validate tables if provided
if tables:
if not isinstance(tables, list) or not all(isinstance(t, str) for t in tables):
raise ValidationError("tables must be a list of table names")
try:
# Perform restoration
restore_result = backup_manager.restore_backup(
backup_id=backup_id,
tables=tables,
target_database=target_database,
restore_data=restore_data,
restore_schema=restore_schema
)
if restore_result.success:
return create_success_response(
data={
'backup_id': backup_id,
'restore_time': restore_result.restore_time.isoformat(),
'restored_tables': restore_result.restored_tables,
'restored_records': restore_result.restored_records,
'duration_seconds': restore_result.duration_seconds
},
message="Backup restored successfully"
)
else:
raise APIException(f"Restore failed: {restore_result.error_message}", 500)
except Exception as e:
raise APIException(f"Failed to restore backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>/download', methods=['GET'])
@handle_api_errors
@validate_id_parameter('backup_id')
@require_auth
def download_backup(backup_id: str):
"""
Download a backup file.
Args:
backup_id: Unique identifier for the backup
Returns:
Backup file download
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
# Check if backup file exists
if not os.path.exists(backup.backup_path):
raise NotFoundError("Backup file not found")
# Generate filename
timestamp = backup.created_at.strftime('%Y%m%d_%H%M%S')
filename = f"backup_{backup.backup_type}_{timestamp}_{backup_id[:8]}.db"
if backup.is_compressed:
filename += ".gz"
try:
return send_file(
backup.backup_path,
as_attachment=True,
download_name=filename,
mimetype='application/octet-stream'
)
except Exception as e:
raise APIException(f"Failed to download backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>/validate', methods=['POST'])
@handle_api_errors
@validate_id_parameter('backup_id')
@optional_auth
def validate_backup(backup_id: str) -> Dict[str, Any]:
"""
Validate a backup file integrity.
Args:
backup_id: Unique identifier for the backup
Returns:
Validation results
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
try:
validation_result = backup_manager.validate_backup(backup_id)
return create_success_response(
data={
'backup_id': backup_id,
'is_valid': validation_result.is_valid,
'file_exists': validation_result.file_exists,
'checksum_valid': validation_result.checksum_valid,
'database_readable': validation_result.database_readable,
'tables_count': validation_result.tables_count,
'records_count': validation_result.records_count,
'validation_errors': validation_result.errors,
'validated_at': datetime.utcnow().isoformat()
}
)
except Exception as e:
raise APIException(f"Failed to validate backup: {str(e)}", 500)
@backups_bp.route('/<backup_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('backup_id')
@require_auth
def delete_backup(backup_id: str) -> Dict[str, Any]:
"""
Delete a backup.
Args:
backup_id: Unique identifier for the backup
Query Parameters:
- delete_file: Set to 'true' to also delete the backup file
Returns:
Deletion confirmation
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
# Check if backup exists
backup = backup_manager.get_backup_by_id(backup_id)
if not backup:
raise NotFoundError("Backup not found")
delete_file = request.args.get('delete_file', 'true').lower() == 'true'
try:
success = backup_manager.delete_backup(backup_id, delete_file=delete_file)
if success:
message = f"Backup {backup_id} deleted successfully"
if delete_file:
message += " (including file)"
return create_success_response(message=message)
else:
raise APIException("Failed to delete backup", 500)
except Exception as e:
raise APIException(f"Failed to delete backup: {str(e)}", 500)
@backups_bp.route('/cleanup', methods=['POST'])
@handle_api_errors
@validate_json_input(
optional_fields=['keep_days', 'keep_count', 'backup_types', 'dry_run'],
field_types={
'keep_days': int,
'keep_count': int,
'backup_types': list,
'dry_run': bool
}
)
@require_auth
def cleanup_backups() -> Dict[str, Any]:
"""
Clean up old backup files based on retention policy.
Optional Fields:
- keep_days: Keep backups newer than this many days (default: 30)
- keep_count: Keep at least this many backups (default: 10)
- backup_types: Types of backups to clean up (default: all)
- dry_run: Preview what would be deleted without actually deleting
Returns:
Cleanup results
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json() or {}
keep_days = data.get('keep_days', 30)
keep_count = data.get('keep_count', 10)
backup_types = data.get('backup_types', ['full', 'metadata_only', 'incremental'])
dry_run = data.get('dry_run', False)
# Validate parameters
if keep_days < 1:
raise ValidationError("keep_days must be at least 1")
if keep_count < 1:
raise ValidationError("keep_count must be at least 1")
valid_types = ['full', 'metadata_only', 'incremental']
if not all(bt in valid_types for bt in backup_types):
raise ValidationError(f"backup_types must contain only: {', '.join(valid_types)}")
try:
cleanup_result = backup_manager.cleanup_old_backups(
keep_days=keep_days,
keep_count=keep_count,
backup_types=backup_types,
dry_run=dry_run
)
return create_success_response(
data={
'dry_run': dry_run,
'deleted_count': cleanup_result.deleted_count,
'deleted_backups': cleanup_result.deleted_backups,
'space_freed_mb': round(cleanup_result.space_freed_bytes / (1024 * 1024), 2),
'kept_count': cleanup_result.kept_count,
'retention_policy': {
'keep_days': keep_days,
'keep_count': keep_count,
'backup_types': backup_types
}
},
message=f"Backup cleanup {'simulated' if dry_run else 'completed'}"
)
except Exception as e:
raise APIException(f"Failed to cleanup backups: {str(e)}", 500)
@backups_bp.route('/schedule', methods=['GET'])
@handle_api_errors
@optional_auth
def get_backup_schedule() -> Dict[str, Any]:
"""
Get current backup schedule configuration.
Returns:
Backup schedule information
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
try:
schedule_config = backup_manager.get_backup_schedule()
return create_success_response(data=schedule_config)
except Exception as e:
raise APIException(f"Failed to get backup schedule: {str(e)}", 500)
@backups_bp.route('/schedule', methods=['PUT'])
@handle_api_errors
@validate_json_input(
optional_fields=['enabled', 'full_backup_interval', 'incremental_interval', 'retention_days', 'cleanup_enabled'],
field_types={
'enabled': bool,
'full_backup_interval': str,
'incremental_interval': str,
'retention_days': int,
'cleanup_enabled': bool
}
)
@require_auth
def update_backup_schedule() -> Dict[str, Any]:
"""
Update backup schedule configuration.
Optional Fields:
- enabled: Enable/disable automatic backups
- full_backup_interval: Cron expression for full backups
- incremental_interval: Cron expression for incremental backups
- retention_days: Number of days to keep backups
- cleanup_enabled: Enable/disable automatic cleanup
Returns:
Updated schedule configuration
"""
if not backup_manager:
raise APIException("Backup manager not available", 503)
data = request.get_json()
try:
updated_config = backup_manager.update_backup_schedule(data)
return create_success_response(
data=updated_config,
message="Backup schedule updated successfully"
)
except Exception as e:
raise APIException(f"Failed to update backup schedule: {str(e)}", 500)

View File

@@ -1,341 +0,0 @@
"""
Bulk Operations API endpoints
Provides REST API for bulk series management operations.
"""
from flask import Blueprint, request, jsonify, send_file
import asyncio
import threading
from typing import Dict, Any
import uuid
import io
from bulk_operations import bulk_operations_manager
bulk_api_bp = Blueprint('bulk_api', __name__, url_prefix='/api/bulk')
# Store active operations
active_operations = {}
@bulk_api_bp.route('/download', methods=['POST'])
def bulk_download():
"""Start bulk download operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
# Create task ID
task_id = str(uuid.uuid4())
# Store operation info
active_operations[task_id] = {
'id': operation_id,
'type': 'download',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting download...'
}
}
# Start async operation
def run_bulk_download():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_download(series_ids, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_download)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/update', methods=['POST'])
def bulk_update():
"""Start bulk update operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
task_id = str(uuid.uuid4())
active_operations[task_id] = {
'id': operation_id,
'type': 'update',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting update...'
}
}
def run_bulk_update():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_update(series_ids, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_update)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/organize', methods=['POST'])
def bulk_organize():
"""Start bulk organize operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
options = data.get('options', {})
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
task_id = str(uuid.uuid4())
active_operations[task_id] = {
'id': operation_id,
'type': 'organize',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting organization...'
}
}
def run_bulk_organize():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_organize(series_ids, options, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_organize)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/delete', methods=['DELETE'])
def bulk_delete():
"""Start bulk delete operation."""
try:
data = request.get_json()
operation_id = data.get('operation_id')
series_ids = data.get('series_ids', [])
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
task_id = str(uuid.uuid4())
active_operations[task_id] = {
'id': operation_id,
'type': 'delete',
'status': 'running',
'progress': {
'completed': 0,
'total': len(series_ids),
'message': 'Starting deletion...'
}
}
def run_bulk_delete():
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
result = loop.run_until_complete(
bulk_operations_manager.bulk_delete(series_ids, operation_id)
)
active_operations[task_id]['status'] = 'completed'
active_operations[task_id]['result'] = result
except Exception as e:
active_operations[task_id]['status'] = 'failed'
active_operations[task_id]['error'] = str(e)
finally:
loop.close()
thread = threading.Thread(target=run_bulk_delete)
thread.start()
return jsonify({'success': True, 'task_id': task_id})
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/export', methods=['POST'])
def bulk_export():
"""Export series data."""
try:
data = request.get_json()
series_ids = data.get('series_ids', [])
format_type = data.get('format', 'json')
if not series_ids:
return jsonify({'success': False, 'error': 'No series IDs provided'}), 400
# Generate export data
loop = asyncio.new_event_loop()
asyncio.set_event_loop(loop)
try:
export_data = loop.run_until_complete(
bulk_operations_manager.export_series_data(series_ids, format_type)
)
finally:
loop.close()
# Determine content type and filename
content_types = {
'json': 'application/json',
'csv': 'text/csv',
'xml': 'application/xml'
}
content_type = content_types.get(format_type, 'application/octet-stream')
filename = f'series_export_{len(series_ids)}_items.{format_type}'
return send_file(
io.BytesIO(export_data),
mimetype=content_type,
as_attachment=True,
download_name=filename
)
except Exception as e:
return jsonify({'success': False, 'error': str(e)}), 500
@bulk_api_bp.route('/status/<task_id>', methods=['GET'])
def get_operation_status(task_id):
"""Get operation status and progress."""
try:
if task_id not in active_operations:
return jsonify({'error': 'Task not found'}), 404
operation = active_operations[task_id]
response = {
'complete': operation['status'] in ['completed', 'failed'],
'success': operation['status'] == 'completed',
'status': operation['status']
}
if 'progress' in operation:
response.update(operation['progress'])
if 'error' in operation:
response['error'] = operation['error']
if 'result' in operation:
response['result'] = operation['result']
return jsonify(response)
except Exception as e:
return jsonify({'error': str(e)}), 500
@bulk_api_bp.route('/cancel/<task_id>', methods=['POST'])
def cancel_operation(task_id):
"""Cancel a running operation."""
try:
if task_id not in active_operations:
return jsonify({'error': 'Task not found'}), 404
# Mark operation as cancelled
active_operations[task_id]['status'] = 'cancelled'
return jsonify({'success': True, 'message': 'Operation cancelled'})
except Exception as e:
return jsonify({'error': str(e)}), 500
@bulk_api_bp.route('/history', methods=['GET'])
def get_operation_history():
"""Get history of bulk operations."""
try:
# Return completed/failed operations
history = []
for task_id, operation in active_operations.items():
if operation['status'] in ['completed', 'failed', 'cancelled']:
history.append({
'task_id': task_id,
'operation_id': operation['id'],
'type': operation['type'],
'status': operation['status'],
'progress': operation.get('progress', {}),
'error': operation.get('error'),
'result': operation.get('result')
})
# Sort by most recent first
history.sort(key=lambda x: x.get('progress', {}).get('completed', 0), reverse=True)
return jsonify({'history': history})
except Exception as e:
return jsonify({'error': str(e)}), 500
@bulk_api_bp.route('/cleanup', methods=['POST'])
def cleanup_completed_operations():
"""Clean up completed/failed operations."""
try:
to_remove = []
for task_id, operation in active_operations.items():
if operation['status'] in ['completed', 'failed', 'cancelled']:
to_remove.append(task_id)
for task_id in to_remove:
del active_operations[task_id]
return jsonify({
'success': True,
'cleaned_up': len(to_remove),
'message': f'Cleaned up {len(to_remove)} completed operations'
})
except Exception as e:
return jsonify({'error': str(e)}), 500

View File

@@ -1,454 +0,0 @@
"""
API endpoints for configuration management.
Provides comprehensive configuration management with validation, backup, and restore functionality.
"""
import json
import logging
import os
from datetime import datetime
from typing import Any, Dict, Optional
from fastapi import APIRouter, Depends, File, Form, HTTPException, UploadFile, status
from fastapi.responses import FileResponse
from pydantic import BaseModel
# Import SeriesApp for business logic
from src.core.SeriesApp import SeriesApp
# FastAPI dependencies and models
from src.server.fastapi_app import get_current_user, settings
logger = logging.getLogger(__name__)
# Create FastAPI router for config management endpoints
router = APIRouter(prefix='/api/v1/config', tags=['config'])
# Pydantic models for requests and responses
class ConfigResponse(BaseModel):
"""Response model for configuration data."""
success: bool = True
config: Dict[str, Any]
schema: Optional[Dict[str, Any]] = None
class ConfigUpdateRequest(BaseModel):
"""Request model for configuration updates."""
config: Dict[str, Any]
validate: bool = True
class ConfigImportResponse(BaseModel):
"""Response model for configuration import operations."""
success: bool
message: str
imported_keys: Optional[list] = None
skipped_keys: Optional[list] = None
# Dependency to get SeriesApp instance
def get_series_app() -> SeriesApp:
"""Get SeriesApp instance for business logic operations."""
if not settings.anime_directory:
raise HTTPException(
status_code=status.HTTP_503_SERVICE_UNAVAILABLE,
detail="Anime directory not configured"
)
return SeriesApp(settings.anime_directory)
@router.get('/', response_model=ConfigResponse)
async def get_full_config(
current_user: Optional[Dict] = Depends(get_current_user)
) -> ConfigResponse:
"""Get complete configuration (without sensitive data)."""
try:
# For now, return a basic config structure
# TODO: Replace with actual config management logic
config_data = {
"anime_directory": settings.anime_directory if hasattr(settings, 'anime_directory') else None,
"download_settings": {},
"display_settings": {},
"security_settings": {}
}
schema = {
"anime_directory": {"type": "string", "required": True},
"download_settings": {"type": "object"},
"display_settings": {"type": "object"},
"security_settings": {"type": "object"}
}
return ConfigResponse(
success=True,
config=config_data,
schema=schema
)
except Exception as e:
logger.error(f"Error getting configuration: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@router.post('/', response_model=ConfigImportResponse)
async def update_config(
config_update: ConfigUpdateRequest,
current_user: Optional[Dict] = Depends(get_current_user)
) -> ConfigImportResponse:
"""Update configuration with validation."""
try:
# For now, just return success
# TODO: Replace with actual config management logic
logger.info("Configuration updated successfully")
return ConfigImportResponse(
success=True,
message="Configuration updated successfully",
imported_keys=list(config_update.config.keys()),
skipped_keys=[]
)
except Exception as e:
logger.error(f"Error updating configuration: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@config_bp.route('/validate', methods=['POST'])
@require_auth
def validate_config():
"""Validate configuration without saving."""
try:
data = request.get_json() or {}
validation_result = config.validate_config(data)
return jsonify({
'success': True,
'validation': validation_result
})
except Exception as e:
logger.error(f"Error validating configuration: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/section/<section_name>', methods=['GET'])
@require_auth
def get_config_section(section_name):
"""Get specific configuration section."""
try:
section_data = config.get(section_name, {})
return jsonify({
'success': True,
'section': section_name,
'config': section_data
})
except Exception as e:
logger.error(f"Error getting config section {section_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/section/<section_name>', methods=['POST'])
@require_auth
def update_config_section(section_name):
"""Update specific configuration section."""
try:
data = request.get_json() or {}
# Get current config
current_config = config.export_config(include_sensitive=True)
# Update the specific section
current_config[section_name] = data
# Validate and save
result = config.import_config(current_config, validate=True)
if result['success']:
logger.info(f"Configuration section '{section_name}' updated successfully")
return jsonify({
'success': True,
'message': f'Configuration section "{section_name}" updated successfully',
'warnings': result.get('warnings', [])
})
else:
return jsonify({
'success': False,
'error': 'Configuration validation failed',
'errors': result['errors'],
'warnings': result.get('warnings', [])
}), 400
except Exception as e:
logger.error(f"Error updating config section {section_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backup', methods=['POST'])
@require_auth
def create_backup():
"""Create configuration backup."""
try:
data = request.get_json() or {}
backup_name = data.get('name', '')
# Generate backup filename
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
if backup_name:
# Sanitize backup name
backup_name = secure_filename(backup_name)
filename = f"config_backup_{backup_name}_{timestamp}.json"
else:
filename = f"config_backup_{timestamp}.json"
backup_path = config.backup_config(filename)
logger.info(f"Configuration backup created: {backup_path}")
return jsonify({
'success': True,
'message': 'Backup created successfully',
'backup_path': backup_path,
'filename': filename
})
except Exception as e:
logger.error(f"Error creating backup: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backups', methods=['GET'])
@require_auth
def list_backups():
"""List available configuration backups."""
try:
backups = []
# Scan current directory for backup files
for filename in os.listdir('.'):
if filename.startswith('config_backup_') and filename.endswith('.json'):
file_path = os.path.abspath(filename)
file_size = os.path.getsize(filename)
file_modified = datetime.fromtimestamp(os.path.getmtime(filename))
backups.append({
'filename': filename,
'path': file_path,
'size': file_size,
'size_kb': round(file_size / 1024, 2),
'modified': file_modified.isoformat(),
'modified_display': file_modified.strftime('%Y-%m-%d %H:%M:%S')
})
# Sort by modification date (newest first)
backups.sort(key=lambda x: x['modified'], reverse=True)
return jsonify({
'success': True,
'backups': backups
})
except Exception as e:
logger.error(f"Error listing backups: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backup/<filename>/restore', methods=['POST'])
@require_auth
def restore_backup(filename):
"""Restore configuration from backup."""
try:
# Security: Only allow config backup files
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
return jsonify({
'success': False,
'error': 'Invalid backup file'
}), 400
# Security: Check if file exists
if not os.path.exists(filename):
return jsonify({
'success': False,
'error': 'Backup file not found'
}), 404
success = config.restore_config(filename)
if success:
logger.info(f"Configuration restored from backup: {filename}")
return jsonify({
'success': True,
'message': 'Configuration restored successfully'
})
else:
return jsonify({
'success': False,
'error': 'Failed to restore configuration'
}), 500
except Exception as e:
logger.error(f"Error restoring backup {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/backup/<filename>/download', methods=['GET'])
@require_auth
def download_backup(filename):
"""Download configuration backup file."""
try:
# Security: Only allow config backup files
if not filename.startswith('config_backup_') or not filename.endswith('.json'):
return jsonify({
'success': False,
'error': 'Invalid backup file'
}), 400
# Security: Check if file exists
if not os.path.exists(filename):
return jsonify({
'success': False,
'error': 'Backup file not found'
}), 404
return send_file(
filename,
as_attachment=True,
download_name=filename
)
except Exception as e:
logger.error(f"Error downloading backup {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@config_bp.route('/export', methods=['POST'])
@require_auth
def export_config():
"""Export current configuration to JSON."""
try:
data = request.get_json() or {}
include_sensitive = data.get('include_sensitive', False)
config_data = config.export_config(include_sensitive=include_sensitive)
# Create filename with timestamp
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
filename = f"aniworld_config_export_{timestamp}.json"
# Write to temporary file
with open(filename, 'w', encoding='utf-8') as f:
json.dump(config_data, f, indent=4)
return send_file(
filename,
as_attachment=True,
download_name=filename,
mimetype='application/json'
)
except Exception as e:
logger.error(f"Error exporting configuration: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@router.post('/import', response_model=ConfigImportResponse)
async def import_config(
config_file: UploadFile = File(...),
current_user: Optional[Dict] = Depends(get_current_user)
) -> ConfigImportResponse:
"""Import configuration from uploaded JSON file."""
try:
# Validate file type
if not config_file.filename:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="No file selected"
)
if not config_file.filename.endswith('.json'):
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail="Invalid file type. Only JSON files are allowed."
)
# Read and parse JSON
try:
content = await config_file.read()
config_data = json.loads(content.decode('utf-8'))
except json.JSONDecodeError as e:
raise HTTPException(
status_code=status.HTTP_400_BAD_REQUEST,
detail=f"Invalid JSON format: {e}"
)
# For now, just return success with the keys that would be imported
# TODO: Replace with actual config management logic
logger.info(f"Configuration imported from file: {config_file.filename}")
return ConfigImportResponse(
success=True,
message="Configuration imported successfully",
imported_keys=list(config_data.keys()) if isinstance(config_data, dict) else [],
skipped_keys=[]
)
except HTTPException:
raise
except Exception as e:
logger.error(f"Error importing configuration: {e}")
raise HTTPException(
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
detail=str(e)
)
@config_bp.route('/reset', methods=['POST'])
@require_auth
def reset_config():
"""Reset configuration to defaults (preserves security settings)."""
try:
data = request.get_json() or {}
preserve_security = data.get('preserve_security', True)
# Get current security settings
current_security = config.get('security', {}) if preserve_security else {}
# Reset to defaults
config._config = config.default_config.copy()
# Restore security settings if requested
if preserve_security and current_security:
config._config['security'] = current_security
success = config.save_config()
if success:
logger.info("Configuration reset to defaults")
return jsonify({
'success': True,
'message': 'Configuration reset to defaults'
})
else:
return jsonify({
'success': False,
'error': 'Failed to save configuration'
}), 500
except Exception as e:
logger.error(f"Error resetting configuration: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,649 +0,0 @@
"""
Database & Storage Management API Endpoints
This module provides REST API endpoints for database operations,
backup management, and storage monitoring.
"""
from flask import Blueprint, request, jsonify, send_file
from auth import require_auth, optional_auth
from error_handler import handle_api_errors, RetryableError, NonRetryableError
from database_manager import (
database_manager, anime_repository, backup_manager, storage_manager,
AnimeMetadata
)
import uuid
from datetime import datetime
import os
# Blueprint for database management endpoints
database_bp = Blueprint('database', __name__)
# Database Information Endpoints
@database_bp.route('/api/database/info')
@handle_api_errors
@optional_auth
def get_database_info():
"""Get database information and statistics."""
try:
# Get schema version
schema_version = database_manager.get_current_version()
# Get table statistics
stats_query = """
SELECT
(SELECT COUNT(*) FROM anime_metadata) as anime_count,
(SELECT COUNT(*) FROM episode_metadata) as episode_count,
(SELECT COUNT(*) FROM episode_metadata WHERE is_downloaded = 1) as downloaded_count,
(SELECT COUNT(*) FROM download_history) as download_history_count
"""
results = database_manager.execute_query(stats_query)
stats = dict(results[0]) if results else {}
# Get database file size
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
return jsonify({
'status': 'success',
'data': {
'schema_version': schema_version,
'database_path': database_manager.db_path,
'database_size_mb': round(db_size / (1024 * 1024), 2),
'statistics': {
'anime_count': stats.get('anime_count', 0),
'episode_count': stats.get('episode_count', 0),
'downloaded_count': stats.get('downloaded_count', 0),
'download_history_count': stats.get('download_history_count', 0)
}
}
})
except Exception as e:
raise RetryableError(f"Failed to get database info: {e}")
# Anime Metadata Endpoints
@database_bp.route('/api/database/anime')
@handle_api_errors
@optional_auth
def get_all_anime():
"""Get all anime from database."""
try:
status_filter = request.args.get('status')
anime_list = anime_repository.get_all_anime(status_filter)
# Convert to serializable format
anime_data = []
for anime in anime_list:
anime_data.append({
'anime_id': anime.anime_id,
'name': anime.name,
'folder': anime.folder,
'key': anime.key,
'description': anime.description,
'genres': anime.genres,
'release_year': anime.release_year,
'status': anime.status,
'total_episodes': anime.total_episodes,
'poster_url': anime.poster_url,
'last_updated': anime.last_updated.isoformat(),
'created_at': anime.created_at.isoformat(),
'custom_metadata': anime.custom_metadata
})
return jsonify({
'status': 'success',
'data': {
'anime': anime_data,
'count': len(anime_data)
}
})
except Exception as e:
raise RetryableError(f"Failed to get anime list: {e}")
@database_bp.route('/api/database/anime/<anime_id>')
@handle_api_errors
@optional_auth
def get_anime_by_id(anime_id):
"""Get specific anime by ID."""
try:
query = "SELECT * FROM anime_metadata WHERE anime_id = ?"
results = database_manager.execute_query(query, (anime_id,))
if not results:
return jsonify({
'status': 'error',
'message': 'Anime not found'
}), 404
row = results[0]
anime_data = {
'anime_id': row['anime_id'],
'name': row['name'],
'folder': row['folder'],
'key': row['key'],
'description': row['description'],
'genres': row['genres'],
'release_year': row['release_year'],
'status': row['status'],
'total_episodes': row['total_episodes'],
'poster_url': row['poster_url'],
'last_updated': row['last_updated'],
'created_at': row['created_at'],
'custom_metadata': row['custom_metadata']
}
return jsonify({
'status': 'success',
'data': anime_data
})
except Exception as e:
raise RetryableError(f"Failed to get anime: {e}")
@database_bp.route('/api/database/anime', methods=['POST'])
@handle_api_errors
@require_auth
def create_anime():
"""Create new anime record."""
try:
data = request.get_json()
# Validate required fields
required_fields = ['name', 'folder']
for field in required_fields:
if field not in data:
return jsonify({
'status': 'error',
'message': f'Missing required field: {field}'
}), 400
# Create anime metadata
anime = AnimeMetadata(
anime_id=str(uuid.uuid4()),
name=data['name'],
folder=data['folder'],
key=data.get('key'),
description=data.get('description'),
genres=data.get('genres', []),
release_year=data.get('release_year'),
status=data.get('status', 'ongoing'),
total_episodes=data.get('total_episodes'),
poster_url=data.get('poster_url'),
custom_metadata=data.get('custom_metadata', {})
)
success = anime_repository.create_anime(anime)
if success:
return jsonify({
'status': 'success',
'message': 'Anime created successfully',
'data': {
'anime_id': anime.anime_id
}
}), 201
else:
return jsonify({
'status': 'error',
'message': 'Failed to create anime'
}), 500
except Exception as e:
raise RetryableError(f"Failed to create anime: {e}")
@database_bp.route('/api/database/anime/<anime_id>', methods=['PUT'])
@handle_api_errors
@require_auth
def update_anime(anime_id):
"""Update anime metadata."""
try:
data = request.get_json()
# Get existing anime
existing = anime_repository.get_anime_by_folder(data.get('folder', ''))
if not existing or existing.anime_id != anime_id:
return jsonify({
'status': 'error',
'message': 'Anime not found'
}), 404
# Update fields
if 'name' in data:
existing.name = data['name']
if 'key' in data:
existing.key = data['key']
if 'description' in data:
existing.description = data['description']
if 'genres' in data:
existing.genres = data['genres']
if 'release_year' in data:
existing.release_year = data['release_year']
if 'status' in data:
existing.status = data['status']
if 'total_episodes' in data:
existing.total_episodes = data['total_episodes']
if 'poster_url' in data:
existing.poster_url = data['poster_url']
if 'custom_metadata' in data:
existing.custom_metadata.update(data['custom_metadata'])
success = anime_repository.update_anime(existing)
if success:
return jsonify({
'status': 'success',
'message': 'Anime updated successfully'
})
else:
return jsonify({
'status': 'error',
'message': 'Failed to update anime'
}), 500
except Exception as e:
raise RetryableError(f"Failed to update anime: {e}")
@database_bp.route('/api/database/anime/<anime_id>', methods=['DELETE'])
@handle_api_errors
@require_auth
def delete_anime(anime_id):
"""Delete anime and related data."""
try:
success = anime_repository.delete_anime(anime_id)
if success:
return jsonify({
'status': 'success',
'message': 'Anime deleted successfully'
})
else:
return jsonify({
'status': 'error',
'message': 'Anime not found'
}), 404
except Exception as e:
raise RetryableError(f"Failed to delete anime: {e}")
@database_bp.route('/api/database/anime/search')
@handle_api_errors
@optional_auth
def search_anime():
"""Search anime by name or description."""
try:
search_term = request.args.get('q', '').strip()
if not search_term:
return jsonify({
'status': 'error',
'message': 'Search term is required'
}), 400
results = anime_repository.search_anime(search_term)
# Convert to serializable format
anime_data = []
for anime in results:
anime_data.append({
'anime_id': anime.anime_id,
'name': anime.name,
'folder': anime.folder,
'key': anime.key,
'description': anime.description,
'genres': anime.genres,
'release_year': anime.release_year,
'status': anime.status
})
return jsonify({
'status': 'success',
'data': {
'results': anime_data,
'count': len(anime_data),
'search_term': search_term
}
})
except Exception as e:
raise RetryableError(f"Failed to search anime: {e}")
# Backup Management Endpoints
@database_bp.route('/api/database/backups')
@handle_api_errors
@optional_auth
def list_backups():
"""List all available backups."""
try:
backups = backup_manager.list_backups()
backup_data = []
for backup in backups:
backup_data.append({
'backup_id': backup.backup_id,
'backup_type': backup.backup_type,
'created_at': backup.created_at.isoformat(),
'size_mb': round(backup.size_bytes / (1024 * 1024), 2),
'description': backup.description,
'tables_included': backup.tables_included
})
return jsonify({
'status': 'success',
'data': {
'backups': backup_data,
'count': len(backup_data)
}
})
except Exception as e:
raise RetryableError(f"Failed to list backups: {e}")
@database_bp.route('/api/database/backups/create', methods=['POST'])
@handle_api_errors
@require_auth
def create_backup():
"""Create a new database backup."""
try:
data = request.get_json() or {}
backup_type = data.get('backup_type', 'full')
description = data.get('description')
if backup_type not in ['full', 'metadata_only']:
return jsonify({
'status': 'error',
'message': 'Backup type must be "full" or "metadata_only"'
}), 400
if backup_type == 'full':
backup_info = backup_manager.create_full_backup(description)
else:
backup_info = backup_manager.create_metadata_backup(description)
if backup_info:
return jsonify({
'status': 'success',
'message': f'{backup_type.title()} backup created successfully',
'data': {
'backup_id': backup_info.backup_id,
'backup_type': backup_info.backup_type,
'size_mb': round(backup_info.size_bytes / (1024 * 1024), 2),
'created_at': backup_info.created_at.isoformat()
}
}), 201
else:
return jsonify({
'status': 'error',
'message': 'Failed to create backup'
}), 500
except Exception as e:
raise RetryableError(f"Failed to create backup: {e}")
@database_bp.route('/api/database/backups/<backup_id>/restore', methods=['POST'])
@handle_api_errors
@require_auth
def restore_backup(backup_id):
"""Restore from a backup."""
try:
success = backup_manager.restore_backup(backup_id)
if success:
return jsonify({
'status': 'success',
'message': 'Backup restored successfully'
})
else:
return jsonify({
'status': 'error',
'message': 'Failed to restore backup'
}), 500
except Exception as e:
raise RetryableError(f"Failed to restore backup: {e}")
@database_bp.route('/api/database/backups/<backup_id>/download')
@handle_api_errors
@require_auth
def download_backup(backup_id):
"""Download a backup file."""
try:
backups = backup_manager.list_backups()
target_backup = None
for backup in backups:
if backup.backup_id == backup_id:
target_backup = backup
break
if not target_backup:
return jsonify({
'status': 'error',
'message': 'Backup not found'
}), 404
if not os.path.exists(target_backup.backup_path):
return jsonify({
'status': 'error',
'message': 'Backup file not found'
}), 404
filename = os.path.basename(target_backup.backup_path)
return send_file(target_backup.backup_path, as_attachment=True, download_name=filename)
except Exception as e:
raise RetryableError(f"Failed to download backup: {e}")
@database_bp.route('/api/database/backups/cleanup', methods=['POST'])
@handle_api_errors
@require_auth
def cleanup_backups():
"""Clean up old backup files."""
try:
data = request.get_json() or {}
keep_days = data.get('keep_days', 30)
keep_count = data.get('keep_count', 10)
if keep_days < 1 or keep_count < 1:
return jsonify({
'status': 'error',
'message': 'keep_days and keep_count must be positive integers'
}), 400
backup_manager.cleanup_old_backups(keep_days, keep_count)
return jsonify({
'status': 'success',
'message': f'Backup cleanup completed (keeping {keep_count} backups, max {keep_days} days old)'
})
except Exception as e:
raise RetryableError(f"Failed to cleanup backups: {e}")
# Storage Management Endpoints
@database_bp.route('/api/database/storage/summary')
@handle_api_errors
@optional_auth
def get_storage_summary():
"""Get storage usage summary."""
try:
summary = storage_manager.get_storage_summary()
return jsonify({
'status': 'success',
'data': summary
})
except Exception as e:
raise RetryableError(f"Failed to get storage summary: {e}")
@database_bp.route('/api/database/storage/locations')
@handle_api_errors
@optional_auth
def get_storage_locations():
"""Get all storage locations."""
try:
query = """
SELECT sl.*, am.name as anime_name
FROM storage_locations sl
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
WHERE sl.is_active = 1
ORDER BY sl.location_type, sl.path
"""
results = database_manager.execute_query(query)
locations = []
for row in results:
locations.append({
'location_id': row['location_id'],
'anime_id': row['anime_id'],
'anime_name': row['anime_name'],
'path': row['path'],
'location_type': row['location_type'],
'free_space_gb': (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None,
'total_space_gb': (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None,
'usage_percent': ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100) if row['total_space_bytes'] and row['free_space_bytes'] else None,
'last_checked': row['last_checked']
})
return jsonify({
'status': 'success',
'data': {
'locations': locations,
'count': len(locations)
}
})
except Exception as e:
raise RetryableError(f"Failed to get storage locations: {e}")
@database_bp.route('/api/database/storage/locations', methods=['POST'])
@handle_api_errors
@require_auth
def add_storage_location():
"""Add a new storage location."""
try:
data = request.get_json()
path = data.get('path')
location_type = data.get('location_type', 'primary')
anime_id = data.get('anime_id')
if not path:
return jsonify({
'status': 'error',
'message': 'Path is required'
}), 400
if location_type not in ['primary', 'backup', 'cache']:
return jsonify({
'status': 'error',
'message': 'Location type must be primary, backup, or cache'
}), 400
location_id = storage_manager.add_storage_location(path, location_type, anime_id)
return jsonify({
'status': 'success',
'message': 'Storage location added successfully',
'data': {
'location_id': location_id
}
}), 201
except Exception as e:
raise RetryableError(f"Failed to add storage location: {e}")
@database_bp.route('/api/database/storage/locations/<location_id>/update', methods=['POST'])
@handle_api_errors
@require_auth
def update_storage_location(location_id):
"""Update storage location statistics."""
try:
storage_manager.update_storage_stats(location_id)
return jsonify({
'status': 'success',
'message': 'Storage statistics updated successfully'
})
except Exception as e:
raise RetryableError(f"Failed to update storage location: {e}")
# Database Maintenance Endpoints
@database_bp.route('/api/database/maintenance/vacuum', methods=['POST'])
@handle_api_errors
@require_auth
def vacuum_database():
"""Perform database VACUUM operation to reclaim space."""
try:
with database_manager.get_connection() as conn:
conn.execute("VACUUM")
return jsonify({
'status': 'success',
'message': 'Database vacuum completed successfully'
})
except Exception as e:
raise RetryableError(f"Failed to vacuum database: {e}")
@database_bp.route('/api/database/maintenance/analyze', methods=['POST'])
@handle_api_errors
@require_auth
def analyze_database():
"""Perform database ANALYZE operation to update statistics."""
try:
with database_manager.get_connection() as conn:
conn.execute("ANALYZE")
return jsonify({
'status': 'success',
'message': 'Database analysis completed successfully'
})
except Exception as e:
raise RetryableError(f"Failed to analyze database: {e}")
@database_bp.route('/api/database/maintenance/integrity-check', methods=['POST'])
@handle_api_errors
@require_auth
def integrity_check():
"""Perform database integrity check."""
try:
with database_manager.get_connection() as conn:
cursor = conn.execute("PRAGMA integrity_check")
results = cursor.fetchall()
# Check if database is OK
is_ok = len(results) == 1 and results[0][0] == 'ok'
return jsonify({
'status': 'success',
'data': {
'integrity_ok': is_ok,
'results': [row[0] for row in results]
}
})
except Exception as e:
raise RetryableError(f"Failed to check database integrity: {e}")
# Export the blueprint
__all__ = ['database_bp']

View File

@@ -1,581 +0,0 @@
"""
Diagnostics API endpoints.
This module handles all diagnostic and monitoring operations including:
- System health checks
- Performance monitoring
- Error reporting
- Network diagnostics
"""
from flask import Blueprint, request, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import psutil
import socket
import requests
import time
import platform
import sys
import os
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import validate_query_params
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, format_datetime, format_file_size
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def optional_auth(f): return f
def handle_api_errors(f): return f
def validate_query_params(**kwargs): return lambda f: f
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def format_datetime(dt): return str(dt) if dt else None
def format_file_size(size): return f"{size} bytes"
# Import diagnostic components
try:
from src.server.data.error_manager import ErrorManager
from src.server.data.performance_manager import PerformanceManager
from src.server.data.system_manager import SystemManager
except ImportError:
# Fallback for development
class ErrorManager:
def get_recent_errors(self, **kwargs): return []
def get_error_stats(self): return {}
def clear_errors(self): return True
def report_error(self, **kwargs): return 1
class PerformanceManager:
def get_performance_metrics(self): return {}
def get_performance_history(self, **kwargs): return []
def record_metric(self, **kwargs): return True
class SystemManager:
def get_system_info(self): return {}
def get_disk_usage(self): return {}
def get_network_status(self): return {}
def test_network_connectivity(self, url): return {'success': True, 'response_time': 0.1}
# Create blueprint
diagnostics_bp = Blueprint('diagnostics', __name__)
# Initialize managers
error_manager = ErrorManager()
performance_manager = PerformanceManager()
system_manager = SystemManager()
logger = logging.getLogger(__name__)
@diagnostics_bp.route('/diagnostics/health', methods=['GET'])
@optional_auth
@handle_api_errors
def health_check() -> Tuple[Any, int]:
"""
Perform comprehensive system health check.
Returns:
JSON response with system health status
"""
try:
health_status = {
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'checks': {},
'overall_score': 100
}
# System resource checks
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
# CPU check
health_status['checks']['cpu'] = {
'status': 'healthy' if cpu_percent < 80 else 'warning' if cpu_percent < 95 else 'critical',
'usage_percent': cpu_percent,
'details': f"CPU usage: {cpu_percent}%"
}
# Memory check
memory_percent = memory.percent
health_status['checks']['memory'] = {
'status': 'healthy' if memory_percent < 80 else 'warning' if memory_percent < 95 else 'critical',
'usage_percent': memory_percent,
'total': format_file_size(memory.total),
'available': format_file_size(memory.available),
'details': f"Memory usage: {memory_percent}%"
}
# Disk check
disk_percent = disk.percent
health_status['checks']['disk'] = {
'status': 'healthy' if disk_percent < 80 else 'warning' if disk_percent < 95 else 'critical',
'usage_percent': disk_percent,
'total': format_file_size(disk.total),
'free': format_file_size(disk.free),
'details': f"Disk usage: {disk_percent}%"
}
# Database connectivity check
try:
# This would test actual database connection
health_status['checks']['database'] = {
'status': 'healthy',
'details': 'Database connection successful'
}
except Exception as e:
health_status['checks']['database'] = {
'status': 'critical',
'details': f'Database connection failed: {str(e)}'
}
# Network connectivity check
try:
response = requests.get('https://httpbin.org/status/200', timeout=5)
if response.status_code == 200:
health_status['checks']['network'] = {
'status': 'healthy',
'details': 'Internet connectivity available'
}
else:
health_status['checks']['network'] = {
'status': 'warning',
'details': f'Network response: {response.status_code}'
}
except Exception as e:
health_status['checks']['network'] = {
'status': 'warning',
'details': f'Network connectivity issues: {str(e)}'
}
# Calculate overall health score
check_statuses = [check['status'] for check in health_status['checks'].values()]
critical_count = check_statuses.count('critical')
warning_count = check_statuses.count('warning')
if critical_count > 0:
health_status['status'] = 'critical'
health_status['overall_score'] = max(0, 100 - (critical_count * 30) - (warning_count * 10))
elif warning_count > 0:
health_status['status'] = 'warning'
health_status['overall_score'] = max(50, 100 - (warning_count * 15))
return create_success_response("Health check completed", 200, health_status)
except Exception as e:
logger.error(f"Error during health check: {str(e)}")
return create_error_response("Health check failed", 500)
@diagnostics_bp.route('/diagnostics/system', methods=['GET'])
@require_auth
@handle_api_errors
def get_system_info() -> Tuple[Any, int]:
"""
Get detailed system information.
Returns:
JSON response with system information
"""
try:
system_info = {
'platform': {
'system': platform.system(),
'release': platform.release(),
'version': platform.version(),
'machine': platform.machine(),
'processor': platform.processor(),
'architecture': platform.architecture()
},
'python': {
'version': sys.version,
'executable': sys.executable,
'path': sys.path[:5] # First 5 paths only
},
'resources': {
'cpu': {
'count_logical': psutil.cpu_count(logical=True),
'count_physical': psutil.cpu_count(logical=False),
'frequency': psutil.cpu_freq()._asdict() if psutil.cpu_freq() else None,
'usage_percent': psutil.cpu_percent(interval=1),
'usage_per_cpu': psutil.cpu_percent(interval=1, percpu=True)
},
'memory': {
**psutil.virtual_memory()._asdict(),
'swap': psutil.swap_memory()._asdict()
},
'disk': {
'usage': psutil.disk_usage('/')._asdict(),
'io_counters': psutil.disk_io_counters()._asdict() if psutil.disk_io_counters() else None
},
'network': {
'io_counters': psutil.net_io_counters()._asdict(),
'connections': len(psutil.net_connections()),
'interfaces': {name: addr._asdict() for name, addr in psutil.net_if_addrs().items()}
}
},
'process': {
'pid': os.getpid(),
'memory_info': psutil.Process().memory_info()._asdict(),
'cpu_percent': psutil.Process().cpu_percent(),
'num_threads': psutil.Process().num_threads(),
'create_time': format_datetime(datetime.fromtimestamp(psutil.Process().create_time())),
'open_files': len(psutil.Process().open_files())
},
'uptime': {
'boot_time': format_datetime(datetime.fromtimestamp(psutil.boot_time())),
'uptime_seconds': time.time() - psutil.boot_time()
}
}
return create_success_response("System information retrieved", 200, system_info)
except Exception as e:
logger.error(f"Error getting system info: {str(e)}")
return create_error_response("Failed to get system information", 500)
@diagnostics_bp.route('/diagnostics/performance', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['hours', 'metric'],
param_types={'hours': int}
)
def get_performance_metrics() -> Tuple[Any, int]:
"""
Get performance metrics and history.
Query Parameters:
- hours: Hours of history to retrieve (default: 24, max: 168)
- metric: Specific metric to retrieve (optional)
Returns:
JSON response with performance metrics
"""
hours = min(request.args.get('hours', 24, type=int), 168) # Max 1 week
metric = request.args.get('metric')
try:
# Current performance metrics
current_metrics = {
'timestamp': datetime.now().isoformat(),
'cpu': {
'usage_percent': psutil.cpu_percent(interval=1),
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
},
'memory': {
'usage_percent': psutil.virtual_memory().percent,
'available_gb': psutil.virtual_memory().available / (1024**3)
},
'disk': {
'usage_percent': psutil.disk_usage('/').percent,
'free_gb': psutil.disk_usage('/').free / (1024**3)
},
'network': {
'bytes_sent': psutil.net_io_counters().bytes_sent,
'bytes_recv': psutil.net_io_counters().bytes_recv,
'packets_sent': psutil.net_io_counters().packets_sent,
'packets_recv': psutil.net_io_counters().packets_recv
}
}
# Historical data
historical_data = performance_manager.get_performance_history(
hours=hours,
metric=metric
)
response_data = {
'current': current_metrics,
'history': historical_data,
'summary': {
'period_hours': hours,
'data_points': len(historical_data),
'metric_filter': metric
}
}
return create_success_response("Performance metrics retrieved", 200, response_data)
except Exception as e:
logger.error(f"Error getting performance metrics: {str(e)}")
return create_error_response("Failed to get performance metrics", 500)
@diagnostics_bp.route('/diagnostics/errors', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['hours', 'level', 'limit'],
param_types={'hours': int, 'limit': int}
)
def get_recent_errors() -> Tuple[Any, int]:
"""
Get recent errors and error statistics.
Query Parameters:
- hours: Hours of errors to retrieve (default: 24, max: 168)
- level: Error level filter (error, warning, critical)
- limit: Maximum number of errors to return (default: 100, max: 1000)
Returns:
JSON response with recent errors
"""
hours = min(request.args.get('hours', 24, type=int), 168)
level = request.args.get('level')
limit = min(request.args.get('limit', 100, type=int), 1000)
try:
# Get recent errors
errors = error_manager.get_recent_errors(
hours=hours,
level=level,
limit=limit
)
# Get error statistics
error_stats = error_manager.get_error_stats()
response_data = {
'errors': errors,
'statistics': error_stats,
'summary': {
'period_hours': hours,
'level_filter': level,
'total_returned': len(errors),
'limit': limit
}
}
return create_success_response("Recent errors retrieved", 200, response_data)
except Exception as e:
logger.error(f"Error getting recent errors: {str(e)}")
return create_error_response("Failed to get recent errors", 500)
@diagnostics_bp.route('/diagnostics/errors', methods=['DELETE'])
@require_auth
@handle_api_errors
def clear_errors() -> Tuple[Any, int]:
"""
Clear error log.
Returns:
JSON response with clear operation result
"""
try:
success = error_manager.clear_errors()
if success:
logger.info("Error log cleared")
return create_success_response("Error log cleared successfully")
else:
return create_error_response("Failed to clear error log", 500)
except Exception as e:
logger.error(f"Error clearing error log: {str(e)}")
return create_error_response("Failed to clear error log", 500)
@diagnostics_bp.route('/diagnostics/network', methods=['GET'])
@require_auth
@handle_api_errors
def test_network_connectivity() -> Tuple[Any, int]:
"""
Test network connectivity to various services.
Returns:
JSON response with network connectivity results
"""
try:
test_urls = [
'https://google.com',
'https://github.com',
'https://pypi.org',
'https://httpbin.org/status/200'
]
results = []
for url in test_urls:
try:
start_time = time.time()
response = requests.get(url, timeout=10)
response_time = time.time() - start_time
results.append({
'url': url,
'status': 'success',
'status_code': response.status_code,
'response_time_ms': round(response_time * 1000, 2),
'accessible': response.status_code == 200
})
except requests.exceptions.Timeout:
results.append({
'url': url,
'status': 'timeout',
'error': 'Request timed out',
'accessible': False
})
except Exception as e:
results.append({
'url': url,
'status': 'error',
'error': str(e),
'accessible': False
})
# Network interface information
interfaces = {}
for interface, addresses in psutil.net_if_addrs().items():
interfaces[interface] = [addr._asdict() for addr in addresses]
# Network I/O statistics
net_io = psutil.net_io_counters()._asdict()
response_data = {
'connectivity_tests': results,
'interfaces': interfaces,
'io_statistics': net_io,
'summary': {
'total_tests': len(results),
'successful': len([r for r in results if r['accessible']]),
'failed': len([r for r in results if not r['accessible']])
}
}
return create_success_response("Network connectivity test completed", 200, response_data)
except Exception as e:
logger.error(f"Error testing network connectivity: {str(e)}")
return create_error_response("Failed to test network connectivity", 500)
@diagnostics_bp.route('/diagnostics/logs', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['lines', 'level', 'component'],
param_types={'lines': int}
)
def get_application_logs() -> Tuple[Any, int]:
"""
Get recent application logs.
Query Parameters:
- lines: Number of log lines to retrieve (default: 100, max: 1000)
- level: Log level filter (debug, info, warning, error, critical)
- component: Component filter (optional)
Returns:
JSON response with application logs
"""
lines = min(request.args.get('lines', 100, type=int), 1000)
level = request.args.get('level')
component = request.args.get('component')
try:
# This would read from actual log files
log_entries = []
# For demonstration, return sample log structure
response_data = {
'logs': log_entries,
'summary': {
'lines_requested': lines,
'level_filter': level,
'component_filter': component,
'total_returned': len(log_entries)
}
}
return create_success_response("Application logs retrieved", 200, response_data)
except Exception as e:
logger.error(f"Error getting application logs: {str(e)}")
return create_error_response("Failed to get application logs", 500)
@diagnostics_bp.route('/diagnostics/report', methods=['POST'])
@require_auth
@handle_api_errors
def generate_diagnostic_report() -> Tuple[Any, int]:
"""
Generate comprehensive diagnostic report.
Returns:
JSON response with diagnostic report
"""
try:
report = {
'generated_at': datetime.now().isoformat(),
'report_id': f"diag_{int(time.time())}",
'sections': {}
}
# System information
report['sections']['system'] = {
'platform': platform.platform(),
'python_version': sys.version,
'cpu_count': psutil.cpu_count(),
'memory_total_gb': round(psutil.virtual_memory().total / (1024**3), 2),
'disk_total_gb': round(psutil.disk_usage('/').total / (1024**3), 2)
}
# Current resource usage
report['sections']['resources'] = {
'cpu_percent': psutil.cpu_percent(interval=1),
'memory_percent': psutil.virtual_memory().percent,
'disk_percent': psutil.disk_usage('/').percent,
'load_average': os.getloadavg() if hasattr(os, 'getloadavg') else None
}
# Error summary
error_stats = error_manager.get_error_stats()
report['sections']['errors'] = error_stats
# Performance summary
performance_metrics = performance_manager.get_performance_metrics()
report['sections']['performance'] = performance_metrics
# Network status
report['sections']['network'] = {
'interfaces_count': len(psutil.net_if_addrs()),
'connections_count': len(psutil.net_connections()),
'bytes_sent': psutil.net_io_counters().bytes_sent,
'bytes_recv': psutil.net_io_counters().bytes_recv
}
logger.info(f"Diagnostic report generated: {report['report_id']}")
return create_success_response("Diagnostic report generated", 200, report)
except Exception as e:
logger.error(f"Error generating diagnostic report: {str(e)}")
return create_error_response("Failed to generate diagnostic report", 500)
@diagnostics_bp.route('/diagnostics/ping', methods=['GET'])
@optional_auth
@handle_api_errors
def ping() -> Tuple[Any, int]:
"""
Simple ping endpoint for health monitoring.
Returns:
JSON response with ping result
"""
return create_success_response("pong", 200, {
'timestamp': datetime.now().isoformat(),
'status': 'alive'
})

View File

@@ -1,640 +0,0 @@
"""
Download Management API Endpoints
This module provides REST API endpoints for download operations,
including queue management, progress tracking, and download history.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import uuid
from datetime import datetime
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, format_download_response,
extract_pagination_params, create_batch_response
)
# Import download components (these imports would need to be adjusted based on actual structure)
try:
from download_manager import download_queue, download_manager, DownloadItem
from database_manager import episode_repository, anime_repository
except ImportError:
# Fallback for development/testing
download_queue = None
download_manager = None
DownloadItem = None
episode_repository = None
anime_repository = None
# Blueprint for download management endpoints
downloads_bp = Blueprint('downloads', __name__, url_prefix='/api/v1/downloads')
@downloads_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def list_downloads() -> Dict[str, Any]:
"""
Get all downloads with optional filtering and pagination.
Query Parameters:
- status: Filter by download status (pending, downloading, completed, failed, paused)
- anime_id: Filter by anime ID
- episode_id: Filter by episode ID
- active_only: Show only active downloads (true/false)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of downloads
"""
if not download_manager:
raise APIException("Download manager not available", 503)
# Extract filters
status_filter = request.args.get('status')
anime_id = request.args.get('anime_id')
episode_id = request.args.get('episode_id')
active_only = request.args.get('active_only', 'false').lower() == 'true'
# Validate filters
valid_statuses = ['pending', 'downloading', 'completed', 'failed', 'paused', 'cancelled']
if status_filter and status_filter not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
if episode_id:
try:
episode_id = int(episode_id)
except ValueError:
raise ValidationError("episode_id must be a valid integer")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get downloads with filters
downloads = download_manager.get_downloads(
status_filter=status_filter,
anime_id=anime_id,
episode_id=episode_id,
active_only=active_only
)
# Format download data
formatted_downloads = [format_download_response(download.__dict__) for download in downloads]
# Apply pagination
total = len(formatted_downloads)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_downloads = formatted_downloads[start_idx:end_idx]
return create_paginated_response(
data=paginated_downloads,
page=page,
per_page=per_page,
total=total,
endpoint='downloads.list_downloads'
)
@downloads_bp.route('/<int:download_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('download_id')
@optional_auth
def get_download(download_id: int) -> Dict[str, Any]:
"""
Get specific download by ID.
Args:
download_id: Unique identifier for the download
Returns:
Download details with progress information
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
# Format download data
download_data = format_download_response(download.__dict__)
# Add detailed progress information
progress_info = download_manager.get_download_progress(download_id)
if progress_info:
download_data['progress_details'] = progress_info
return create_success_response(download_data)
@downloads_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['episode_id'],
optional_fields=['priority', 'quality', 'subtitle_language', 'download_path'],
field_types={
'episode_id': int,
'priority': int,
'quality': str,
'subtitle_language': str,
'download_path': str
}
)
@require_auth
def create_download() -> Dict[str, Any]:
"""
Create a new download request.
Required Fields:
- episode_id: ID of the episode to download
Optional Fields:
- priority: Download priority (1-10, higher is more priority)
- quality: Preferred quality (720p, 1080p, etc.)
- subtitle_language: Preferred subtitle language
- download_path: Custom download path
Returns:
Created download details
"""
if not download_manager or not episode_repository:
raise APIException("Download manager not available", 503)
data = request.get_json()
episode_id = data['episode_id']
# Validate episode exists
episode = episode_repository.get_episode_by_id(episode_id)
if not episode:
raise ValidationError("Episode not found")
# Check if episode is already downloaded
if episode.status == 'downloaded':
raise ValidationError("Episode is already downloaded")
# Check if download already exists for this episode
existing_download = download_manager.get_download_by_episode(episode_id)
if existing_download and existing_download.status in ['pending', 'downloading']:
raise ValidationError("Download already in progress for this episode")
# Validate priority
priority = data.get('priority', 5)
if not 1 <= priority <= 10:
raise ValidationError("Priority must be between 1 and 10")
# Create download item
try:
download_item = DownloadItem(
download_id=str(uuid.uuid4()),
episode_id=episode_id,
anime_id=episode.anime_id,
priority=priority,
quality=data.get('quality'),
subtitle_language=data.get('subtitle_language'),
download_path=data.get('download_path'),
status='pending',
created_at=datetime.utcnow()
)
except Exception as e:
raise ValidationError(f"Invalid download data: {str(e)}")
# Add to download queue
success = download_queue.add_download(download_item)
if not success:
raise APIException("Failed to create download", 500)
# Return created download
download_data = format_download_response(download_item.__dict__)
return create_success_response(
data=download_data,
message="Download queued successfully",
status_code=201
)
@downloads_bp.route('/<int:download_id>/pause', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def pause_download(download_id: int) -> Dict[str, Any]:
"""
Pause a download.
Args:
download_id: Unique identifier for the download
Returns:
Updated download status
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status not in ['pending', 'downloading']:
raise ValidationError(f"Cannot pause download with status '{download.status}'")
success = download_manager.pause_download(download_id)
if not success:
raise APIException("Failed to pause download", 500)
# Get updated download
updated_download = download_manager.get_download_by_id(download_id)
download_data = format_download_response(updated_download.__dict__)
return create_success_response(
data=download_data,
message="Download paused successfully"
)
@downloads_bp.route('/<int:download_id>/resume', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def resume_download(download_id: int) -> Dict[str, Any]:
"""
Resume a paused download.
Args:
download_id: Unique identifier for the download
Returns:
Updated download status
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status != 'paused':
raise ValidationError(f"Cannot resume download with status '{download.status}'")
success = download_manager.resume_download(download_id)
if not success:
raise APIException("Failed to resume download", 500)
# Get updated download
updated_download = download_manager.get_download_by_id(download_id)
download_data = format_download_response(updated_download.__dict__)
return create_success_response(
data=download_data,
message="Download resumed successfully"
)
@downloads_bp.route('/<int:download_id>/cancel', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def cancel_download(download_id: int) -> Dict[str, Any]:
"""
Cancel a download.
Args:
download_id: Unique identifier for the download
Query Parameters:
- delete_partial: Set to 'true' to delete partially downloaded files
Returns:
Cancellation confirmation
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status in ['completed', 'cancelled']:
raise ValidationError(f"Cannot cancel download with status '{download.status}'")
delete_partial = request.args.get('delete_partial', 'false').lower() == 'true'
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
if not success:
raise APIException("Failed to cancel download", 500)
message = "Download cancelled successfully"
if delete_partial:
message += " (partial files deleted)"
return create_success_response(message=message)
@downloads_bp.route('/<int:download_id>/retry', methods=['POST'])
@handle_api_errors
@validate_id_parameter('download_id')
@require_auth
def retry_download(download_id: int) -> Dict[str, Any]:
"""
Retry a failed download.
Args:
download_id: Unique identifier for the download
Returns:
Updated download status
"""
if not download_manager:
raise APIException("Download manager not available", 503)
download = download_manager.get_download_by_id(download_id)
if not download:
raise NotFoundError("Download not found")
if download.status != 'failed':
raise ValidationError(f"Cannot retry download with status '{download.status}'")
success = download_manager.retry_download(download_id)
if not success:
raise APIException("Failed to retry download", 500)
# Get updated download
updated_download = download_manager.get_download_by_id(download_id)
download_data = format_download_response(updated_download.__dict__)
return create_success_response(
data=download_data,
message="Download queued for retry"
)
@downloads_bp.route('/bulk', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['action', 'download_ids'],
optional_fields=['delete_partial'],
field_types={
'action': str,
'download_ids': list,
'delete_partial': bool
}
)
@require_auth
def bulk_download_operation() -> Dict[str, Any]:
"""
Perform bulk operations on multiple downloads.
Required Fields:
- action: Operation to perform (pause, resume, cancel, retry)
- download_ids: List of download IDs to operate on
Optional Fields:
- delete_partial: For cancel action, whether to delete partial files
Returns:
Results of the bulk operation
"""
if not download_manager:
raise APIException("Download manager not available", 503)
data = request.get_json()
action = data['action']
download_ids = data['download_ids']
delete_partial = data.get('delete_partial', False)
# Validate action
valid_actions = ['pause', 'resume', 'cancel', 'retry']
if action not in valid_actions:
raise ValidationError(f"Invalid action. Must be one of: {', '.join(valid_actions)}")
# Validate download_ids
if not isinstance(download_ids, list) or not download_ids:
raise ValidationError("download_ids must be a non-empty list")
if len(download_ids) > 50:
raise ValidationError("Cannot operate on more than 50 downloads at once")
# Validate download IDs are integers
try:
download_ids = [int(did) for did in download_ids]
except ValueError:
raise ValidationError("All download_ids must be valid integers")
# Perform bulk operation
successful_items = []
failed_items = []
for download_id in download_ids:
try:
if action == 'pause':
success = download_manager.pause_download(download_id)
elif action == 'resume':
success = download_manager.resume_download(download_id)
elif action == 'cancel':
success = download_manager.cancel_download(download_id, delete_partial=delete_partial)
elif action == 'retry':
success = download_manager.retry_download(download_id)
if success:
successful_items.append({'download_id': download_id, 'action': action})
else:
failed_items.append({'download_id': download_id, 'error': 'Operation failed'})
except Exception as e:
failed_items.append({'download_id': download_id, 'error': str(e)})
return create_batch_response(
successful_items=successful_items,
failed_items=failed_items,
message=f"Bulk {action} operation completed"
)
@downloads_bp.route('/queue', methods=['GET'])
@handle_api_errors
@optional_auth
def get_download_queue() -> Dict[str, Any]:
"""
Get current download queue status.
Returns:
Download queue information including active downloads and queue statistics
"""
if not download_queue:
raise APIException("Download queue not available", 503)
queue_info = download_queue.get_queue_status()
return create_success_response(
data={
'queue_size': queue_info.get('queue_size', 0),
'active_downloads': queue_info.get('active_downloads', 0),
'max_concurrent': queue_info.get('max_concurrent', 0),
'paused_downloads': queue_info.get('paused_downloads', 0),
'failed_downloads': queue_info.get('failed_downloads', 0),
'completed_today': queue_info.get('completed_today', 0),
'queue_items': queue_info.get('queue_items', [])
}
)
@downloads_bp.route('/queue/pause', methods=['POST'])
@handle_api_errors
@require_auth
def pause_download_queue() -> Dict[str, Any]:
"""
Pause the entire download queue.
Returns:
Queue pause confirmation
"""
if not download_queue:
raise APIException("Download queue not available", 503)
success = download_queue.pause_queue()
if not success:
raise APIException("Failed to pause download queue", 500)
return create_success_response(message="Download queue paused")
@downloads_bp.route('/queue/resume', methods=['POST'])
@handle_api_errors
@require_auth
def resume_download_queue() -> Dict[str, Any]:
"""
Resume the download queue.
Returns:
Queue resume confirmation
"""
if not download_queue:
raise APIException("Download queue not available", 503)
success = download_queue.resume_queue()
if not success:
raise APIException("Failed to resume download queue", 500)
return create_success_response(message="Download queue resumed")
@downloads_bp.route('/queue/clear', methods=['POST'])
@handle_api_errors
@require_auth
def clear_download_queue() -> Dict[str, Any]:
"""
Clear completed and failed downloads from the queue.
Query Parameters:
- include_failed: Set to 'true' to also clear failed downloads
Returns:
Queue clear confirmation
"""
if not download_queue:
raise APIException("Download queue not available", 503)
include_failed = request.args.get('include_failed', 'false').lower() == 'true'
cleared_count = download_queue.clear_completed(include_failed=include_failed)
message = f"Cleared {cleared_count} completed downloads"
if include_failed:
message += " and failed downloads"
return create_success_response(
data={'cleared_count': cleared_count},
message=message
)
@downloads_bp.route('/history', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def get_download_history() -> Dict[str, Any]:
"""
Get download history with optional filtering.
Query Parameters:
- status: Filter by status (completed, failed)
- anime_id: Filter by anime ID
- date_from: Filter from date (ISO format)
- date_to: Filter to date (ISO format)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated download history
"""
if not download_manager:
raise APIException("Download manager not available", 503)
# Extract filters
status_filter = request.args.get('status')
anime_id = request.args.get('anime_id')
date_from = request.args.get('date_from')
date_to = request.args.get('date_to')
# Validate filters
if status_filter and status_filter not in ['completed', 'failed']:
raise ValidationError("Status filter must be 'completed' or 'failed'")
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
# Validate dates
if date_from:
try:
datetime.fromisoformat(date_from.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_from must be in ISO format")
if date_to:
try:
datetime.fromisoformat(date_to.replace('Z', '+00:00'))
except ValueError:
raise ValidationError("date_to must be in ISO format")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get download history
history = download_manager.get_download_history(
status_filter=status_filter,
anime_id=anime_id,
date_from=date_from,
date_to=date_to
)
# Format history data
formatted_history = [format_download_response(download.__dict__) for download in history]
# Apply pagination
total = len(formatted_history)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_history = formatted_history[start_idx:end_idx]
return create_paginated_response(
data=paginated_history,
page=page,
per_page=per_page,
total=total,
endpoint='downloads.get_download_history'
)

View File

@@ -1,584 +0,0 @@
"""
Episode Management API Endpoints
This module provides REST API endpoints for episode CRUD operations,
including episode status management and metadata operations.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import uuid
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, format_episode_response,
extract_pagination_params, create_batch_response
)
# Import database components (these imports would need to be adjusted based on actual structure)
try:
from database_manager import episode_repository, anime_repository, EpisodeMetadata
except ImportError:
# Fallback for development/testing
episode_repository = None
anime_repository = None
EpisodeMetadata = None
# Blueprint for episode management endpoints
episodes_bp = Blueprint('episodes', __name__, url_prefix='/api/v1/episodes')
@episodes_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def list_episodes() -> Dict[str, Any]:
"""
Get all episodes with optional filtering and pagination.
Query Parameters:
- anime_id: Filter by anime ID
- status: Filter by episode status
- downloaded: Filter by download status (true/false)
- episode_number: Filter by episode number
- search: Search in episode title
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of episodes
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
# Extract filters
anime_id = request.args.get('anime_id')
status_filter = request.args.get('status')
downloaded_filter = request.args.get('downloaded')
episode_number = request.args.get('episode_number')
search_term = request.args.get('search', '').strip()
# Validate filters
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
raise ValidationError("downloaded filter must be 'true' or 'false'")
if episode_number:
try:
episode_number = int(episode_number)
if episode_number < 1:
raise ValidationError("episode_number must be positive")
except ValueError:
raise ValidationError("episode_number must be a valid integer")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Get episodes with filters
episodes = episode_repository.get_all_episodes(
anime_id=anime_id,
status_filter=status_filter,
downloaded_filter=downloaded_filter.lower() == 'true' if downloaded_filter else None,
episode_number=episode_number,
search_term=search_term
)
# Format episode data
formatted_episodes = [format_episode_response(episode.__dict__) for episode in episodes]
# Apply pagination
total = len(formatted_episodes)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_episodes = formatted_episodes[start_idx:end_idx]
return create_paginated_response(
data=paginated_episodes,
page=page,
per_page=per_page,
total=total,
endpoint='episodes.list_episodes'
)
@episodes_bp.route('/<int:episode_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('episode_id')
@optional_auth
def get_episode(episode_id: int) -> Dict[str, Any]:
"""
Get specific episode by ID.
Args:
episode_id: Unique identifier for the episode
Returns:
Episode details with download information
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
episode = episode_repository.get_episode_by_id(episode_id)
if not episode:
raise NotFoundError("Episode not found")
# Format episode data
episode_data = format_episode_response(episode.__dict__)
# Add download information if available
download_info = episode_repository.get_download_info(episode_id)
if download_info:
episode_data['download_info'] = download_info
return create_success_response(episode_data)
@episodes_bp.route('', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['anime_id', 'episode_number', 'title', 'url'],
optional_fields=['description', 'status', 'duration', 'air_date', 'custom_metadata'],
field_types={
'anime_id': int,
'episode_number': int,
'title': str,
'url': str,
'description': str,
'status': str,
'duration': int,
'air_date': str,
'custom_metadata': dict
}
)
@require_auth
def create_episode() -> Dict[str, Any]:
"""
Create a new episode record.
Required Fields:
- anime_id: ID of the anime this episode belongs to
- episode_number: Episode number
- title: Episode title
- url: Episode URL
Optional Fields:
- description: Episode description
- status: Episode status (available, unavailable, coming_soon)
- duration: Episode duration in minutes
- air_date: Air date in ISO format
- custom_metadata: Additional metadata as key-value pairs
Returns:
Created episode details
"""
if not episode_repository or not anime_repository:
raise APIException("Episode repository not available", 503)
data = request.get_json()
# Validate anime exists
anime = anime_repository.get_anime_by_id(data['anime_id'])
if not anime:
raise ValidationError("Anime not found")
# Validate status if provided
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
if 'status' in data and data['status'] not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
# Check if episode already exists for this anime
existing_episode = episode_repository.get_episode_by_anime_and_number(
data['anime_id'], data['episode_number']
)
if existing_episode:
raise ValidationError(f"Episode {data['episode_number']} already exists for this anime")
# Validate episode number
if data['episode_number'] < 1:
raise ValidationError("Episode number must be positive")
# Create episode metadata object
try:
episode = EpisodeMetadata(
episode_id=str(uuid.uuid4()),
anime_id=data['anime_id'],
episode_number=data['episode_number'],
title=data['title'],
url=data['url'],
description=data.get('description'),
status=data.get('status', 'available'),
duration=data.get('duration'),
air_date=data.get('air_date'),
custom_metadata=data.get('custom_metadata', {})
)
except Exception as e:
raise ValidationError(f"Invalid episode data: {str(e)}")
# Save to database
success = episode_repository.create_episode(episode)
if not success:
raise APIException("Failed to create episode", 500)
# Return created episode
episode_data = format_episode_response(episode.__dict__)
return create_success_response(
data=episode_data,
message="Episode created successfully",
status_code=201
)
@episodes_bp.route('/<int:episode_id>', methods=['PUT'])
@handle_api_errors
@validate_id_parameter('episode_id')
@validate_json_input(
optional_fields=['title', 'url', 'description', 'status', 'duration', 'air_date', 'custom_metadata'],
field_types={
'title': str,
'url': str,
'description': str,
'status': str,
'duration': int,
'air_date': str,
'custom_metadata': dict
}
)
@require_auth
def update_episode(episode_id: int) -> Dict[str, Any]:
"""
Update an existing episode record.
Args:
episode_id: Unique identifier for the episode
Optional Fields:
- title: Episode title
- url: Episode URL
- description: Episode description
- status: Episode status (available, unavailable, coming_soon, downloaded)
- duration: Episode duration in minutes
- air_date: Air date in ISO format
- custom_metadata: Additional metadata as key-value pairs
Returns:
Updated episode details
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
data = request.get_json()
# Get existing episode
existing_episode = episode_repository.get_episode_by_id(episode_id)
if not existing_episode:
raise NotFoundError("Episode not found")
# Validate status if provided
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
if 'status' in data and data['status'] not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
# Update fields
update_fields = {}
for field in ['title', 'url', 'description', 'status', 'duration', 'air_date']:
if field in data:
update_fields[field] = data[field]
# Handle custom metadata update (merge instead of replace)
if 'custom_metadata' in data:
existing_metadata = existing_episode.custom_metadata or {}
existing_metadata.update(data['custom_metadata'])
update_fields['custom_metadata'] = existing_metadata
# Perform update
success = episode_repository.update_episode(episode_id, update_fields)
if not success:
raise APIException("Failed to update episode", 500)
# Get updated episode
updated_episode = episode_repository.get_episode_by_id(episode_id)
episode_data = format_episode_response(updated_episode.__dict__)
return create_success_response(
data=episode_data,
message="Episode updated successfully"
)
@episodes_bp.route('/<int:episode_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('episode_id')
@require_auth
def delete_episode(episode_id: int) -> Dict[str, Any]:
"""
Delete an episode record.
Args:
episode_id: Unique identifier for the episode
Query Parameters:
- delete_file: Set to 'true' to also delete the downloaded file
Returns:
Deletion confirmation
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
# Check if episode exists
existing_episode = episode_repository.get_episode_by_id(episode_id)
if not existing_episode:
raise NotFoundError("Episode not found")
# Check if we should also delete the file
delete_file = request.args.get('delete_file', 'false').lower() == 'true'
# Perform deletion
success = episode_repository.delete_episode(episode_id, delete_file=delete_file)
if not success:
raise APIException("Failed to delete episode", 500)
message = f"Episode {existing_episode.episode_number} deleted successfully"
if delete_file:
message += " (including downloaded file)"
return create_success_response(message=message)
@episodes_bp.route('/bulk/status', methods=['PUT'])
@handle_api_errors
@validate_json_input(
required_fields=['episode_ids', 'status'],
field_types={
'episode_ids': list,
'status': str
}
)
@require_auth
def bulk_update_status() -> Dict[str, Any]:
"""
Update status for multiple episodes.
Required Fields:
- episode_ids: List of episode IDs to update
- status: New status for all episodes
Returns:
Results of the bulk operation
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
data = request.get_json()
episode_ids = data['episode_ids']
new_status = data['status']
# Validate status
valid_statuses = ['available', 'unavailable', 'coming_soon', 'downloaded']
if new_status not in valid_statuses:
raise ValidationError(f"Status must be one of: {', '.join(valid_statuses)}")
# Validate episode_ids
if not isinstance(episode_ids, list) or not episode_ids:
raise ValidationError("episode_ids must be a non-empty list")
if len(episode_ids) > 100:
raise ValidationError("Cannot operate on more than 100 episodes at once")
# Validate episode IDs are integers
try:
episode_ids = [int(eid) for eid in episode_ids]
except ValueError:
raise ValidationError("All episode_ids must be valid integers")
# Perform bulk update
successful_items = []
failed_items = []
for episode_id in episode_ids:
try:
success = episode_repository.update_episode(episode_id, {'status': new_status})
if success:
successful_items.append({'episode_id': episode_id, 'new_status': new_status})
else:
failed_items.append({'episode_id': episode_id, 'error': 'Episode not found'})
except Exception as e:
failed_items.append({'episode_id': episode_id, 'error': str(e)})
return create_batch_response(
successful_items=successful_items,
failed_items=failed_items,
message=f"Bulk status update to '{new_status}' completed"
)
@episodes_bp.route('/anime/<int:anime_id>/sync', methods=['POST'])
@handle_api_errors
@validate_id_parameter('anime_id')
@require_auth
def sync_anime_episodes(anime_id: int) -> Dict[str, Any]:
"""
Synchronize episodes for an anime by scanning the source.
Args:
anime_id: Unique identifier for the anime
Returns:
Synchronization results
"""
if not episode_repository or not anime_repository:
raise APIException("Episode repository not available", 503)
# Check if anime exists
anime = anime_repository.get_anime_by_id(anime_id)
if not anime:
raise NotFoundError("Anime not found")
# This would trigger the episode scanning/syncing process
try:
sync_result = episode_repository.sync_episodes_for_anime(anime_id)
return create_success_response(
data={
'anime_id': anime_id,
'episodes_found': sync_result.get('episodes_found', 0),
'episodes_added': sync_result.get('episodes_added', 0),
'episodes_updated': sync_result.get('episodes_updated', 0),
'episodes_removed': sync_result.get('episodes_removed', 0)
},
message=f"Episode sync completed for '{anime.name}'"
)
except Exception as e:
raise APIException(f"Failed to sync episodes: {str(e)}", 500)
@episodes_bp.route('/<int:episode_id>/download', methods=['POST'])
@handle_api_errors
@validate_id_parameter('episode_id')
@require_auth
def queue_episode_download(episode_id: int) -> Dict[str, Any]:
"""
Queue an episode for download.
Args:
episode_id: Unique identifier for the episode
Returns:
Download queue confirmation
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
# Check if episode exists
episode = episode_repository.get_episode_by_id(episode_id)
if not episode:
raise NotFoundError("Episode not found")
# Check if episode is already downloaded
if episode.status == 'downloaded':
raise ValidationError("Episode is already downloaded")
# Check if episode is available for download
if episode.status not in ['available']:
raise ValidationError(f"Episode status '{episode.status}' is not available for download")
# Queue for download (this would integrate with the download system)
try:
from ...download_manager import download_queue
download_id = download_queue.add_episode_download(episode_id)
return create_success_response(
data={'download_id': download_id},
message=f"Episode {episode.episode_number} queued for download"
)
except Exception as e:
raise APIException(f"Failed to queue download: {str(e)}", 500)
@episodes_bp.route('/search', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def search_episodes() -> Dict[str, Any]:
"""
Search episodes by title or other criteria.
Query Parameters:
- q: Search query (required)
- anime_id: Limit search to specific anime
- status: Filter by episode status
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated search results
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
search_term = request.args.get('q', '').strip()
if not search_term:
raise ValidationError("Search term 'q' is required")
if len(search_term) < 2:
raise ValidationError("Search term must be at least 2 characters long")
# Get additional filters
anime_id = request.args.get('anime_id')
status_filter = request.args.get('status')
# Validate anime_id if provided
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform search
search_results = episode_repository.search_episodes(
search_term=search_term,
anime_id=anime_id,
status_filter=status_filter
)
# Format results
formatted_results = [format_episode_response(episode.__dict__) for episode in search_results]
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
# Create response with search metadata
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='episodes.search_episodes',
q=search_term
)
# Add search metadata
response['search'] = {
'query': search_term,
'total_results': total,
'filters': {
'anime_id': anime_id,
'status': status_filter
}
}
return response

View File

@@ -1,436 +0,0 @@
"""
Health Check Endpoints
This module provides basic health check endpoints for monitoring
the AniWorld application's status.
"""
from flask import Blueprint, jsonify
import time
import os
import psutil
from datetime import datetime
# Blueprint for health check endpoints
health_bp = Blueprint('health_check', __name__, url_prefix='/api/health')
@health_bp.route('/status')
def get_basic_health():
"""Get basic application health status."""
try:
# Basic system metrics
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
return jsonify({
'status': 'healthy',
'timestamp': datetime.now().isoformat(),
'system': {
'memory_usage_percent': memory.percent,
'disk_usage_percent': disk.percent,
'uptime': time.time()
},
'application': {
'status': 'running',
'version': '1.0.0'
}
})
except Exception as e:
return jsonify({
'status': 'error',
'message': str(e),
'timestamp': datetime.now().isoformat()
}), 500
@health_bp.route('/ping')
def ping():
"""Simple ping endpoint."""
return jsonify({
'status': 'ok',
'timestamp': datetime.now().isoformat()
})
@health_bp.route('/api/health')
def basic_health():
"""Basic health check endpoint for load balancers."""
return jsonify({
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'service': 'aniworld-web'
})
@health_bp.route('/api/health/system')
def system_health():
"""Comprehensive system health check."""
def check_system_health():
try:
# System metrics
cpu_percent = psutil.cpu_percent(interval=1)
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
# Process metrics
process = psutil.Process()
process_memory = process.memory_info()
return {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'system': {
'cpu_percent': cpu_percent,
'memory': {
'total_mb': memory.total / 1024 / 1024,
'available_mb': memory.available / 1024 / 1024,
'percent': memory.percent
},
'disk': {
'total_gb': disk.total / 1024 / 1024 / 1024,
'free_gb': disk.free / 1024 / 1024 / 1024,
'percent': (disk.used / disk.total) * 100
}
},
'process': {
'memory_mb': process_memory.rss / 1024 / 1024,
'threads': process.num_threads(),
'cpu_percent': process.cpu_percent()
}
}
except Exception as e:
return {
'status': 'unhealthy',
'error': str(e),
'timestamp': datetime.utcnow().isoformat()
}
return jsonify(get_cached_health_data('system', check_system_health))
@health_bp.route('/api/health/database')
def database_health():
"""Database connectivity and health check."""
def check_database_health():
try:
# Test database connection
start_time = time.time()
with database_manager.get_connection() as conn:
cursor = conn.execute("SELECT 1")
result = cursor.fetchone()
connection_time = (time.time() - start_time) * 1000 # ms
# Get database size and basic stats
db_size = os.path.getsize(database_manager.db_path) if os.path.exists(database_manager.db_path) else 0
# Check schema version
schema_version = database_manager.get_current_version()
# Get table counts
with database_manager.get_connection() as conn:
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
return {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'database': {
'connected': True,
'connection_time_ms': connection_time,
'size_mb': db_size / 1024 / 1024,
'schema_version': schema_version,
'tables': {
'anime_count': anime_count,
'episode_count': episode_count
}
}
}
except Exception as e:
return {
'status': 'unhealthy',
'timestamp': datetime.utcnow().isoformat(),
'database': {
'connected': False,
'error': str(e)
}
}
return jsonify(get_cached_health_data('database', check_database_health, ttl=60))
@health_bp.route('/api/health/dependencies')
def dependencies_health():
"""Check health of external dependencies."""
def check_dependencies():
dependencies = {
'status': 'healthy',
'timestamp': datetime.utcnow().isoformat(),
'dependencies': {}
}
# Check filesystem access
try:
anime_directory = getattr(config, 'anime_directory', '/app/data')
if os.path.exists(anime_directory):
# Test read/write access
test_file = os.path.join(anime_directory, '.health_check')
with open(test_file, 'w') as f:
f.write('test')
os.remove(test_file)
dependencies['dependencies']['filesystem'] = {
'status': 'healthy',
'path': anime_directory,
'accessible': True
}
else:
dependencies['dependencies']['filesystem'] = {
'status': 'unhealthy',
'path': anime_directory,
'accessible': False,
'error': 'Directory does not exist'
}
dependencies['status'] = 'degraded'
except Exception as e:
dependencies['dependencies']['filesystem'] = {
'status': 'unhealthy',
'error': str(e)
}
dependencies['status'] = 'degraded'
# Check network connectivity (basic)
try:
import socket
socket.create_connection(("8.8.8.8", 53), timeout=3)
dependencies['dependencies']['network'] = {
'status': 'healthy',
'connectivity': True
}
except Exception as e:
dependencies['dependencies']['network'] = {
'status': 'unhealthy',
'connectivity': False,
'error': str(e)
}
dependencies['status'] = 'degraded'
return dependencies
return jsonify(get_cached_health_data('dependencies', check_dependencies, ttl=120))
@health_bp.route('/api/health/performance')
def performance_health():
"""Performance metrics and health indicators."""
def check_performance():
try:
# Memory usage
memory_usage = memory_monitor.get_current_memory_usage() if memory_monitor else 0
is_memory_high = memory_monitor.is_memory_usage_high() if memory_monitor else False
# Thread count
process = psutil.Process()
thread_count = process.num_threads()
# Load average (if available)
load_avg = None
try:
load_avg = os.getloadavg()
except (AttributeError, OSError):
# Not available on all platforms
pass
# Check if performance is within acceptable limits
performance_status = 'healthy'
warnings = []
if is_memory_high:
performance_status = 'degraded'
warnings.append('High memory usage detected')
if thread_count > 100: # Arbitrary threshold
performance_status = 'degraded'
warnings.append(f'High thread count: {thread_count}')
if load_avg and load_avg[0] > 4: # Load average > 4
performance_status = 'degraded'
warnings.append(f'High system load: {load_avg[0]:.2f}')
return {
'status': performance_status,
'timestamp': datetime.utcnow().isoformat(),
'performance': {
'memory_usage_mb': memory_usage,
'memory_high': is_memory_high,
'thread_count': thread_count,
'load_average': load_avg,
'warnings': warnings
}
}
except Exception as e:
return {
'status': 'error',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}
return jsonify(get_cached_health_data('performance', check_performance, ttl=10))
@health_bp.route('/api/health/detailed')
def detailed_health():
"""Comprehensive health check combining all metrics."""
def check_detailed_health():
try:
# Get all health checks
system = get_cached_health_data('system', lambda: system_health().json)
database = get_cached_health_data('database', lambda: database_health().json)
dependencies = get_cached_health_data('dependencies', lambda: dependencies_health().json)
performance = get_cached_health_data('performance', lambda: performance_health().json)
# Determine overall status
statuses = [
system.get('status', 'unknown'),
database.get('status', 'unknown'),
dependencies.get('status', 'unknown'),
performance.get('status', 'unknown')
]
if 'unhealthy' in statuses or 'error' in statuses:
overall_status = 'unhealthy'
elif 'degraded' in statuses:
overall_status = 'degraded'
else:
overall_status = 'healthy'
return {
'status': overall_status,
'timestamp': datetime.utcnow().isoformat(),
'components': {
'system': system,
'database': database,
'dependencies': dependencies,
'performance': performance
}
}
except Exception as e:
return {
'status': 'error',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}
# Don't cache detailed health - always get fresh data
return jsonify(check_detailed_health())
@health_bp.route('/api/health/ready')
def readiness_probe():
"""Kubernetes readiness probe endpoint."""
try:
# Check critical dependencies
with database_manager.get_connection() as conn:
conn.execute("SELECT 1")
# Check if anime directory is accessible
anime_directory = getattr(config, 'anime_directory', '/app/data')
if not os.path.exists(anime_directory):
raise Exception(f"Anime directory not accessible: {anime_directory}")
return jsonify({
'status': 'ready',
'timestamp': datetime.utcnow().isoformat()
})
except Exception as e:
return jsonify({
'status': 'not_ready',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}), 503
@health_bp.route('/api/health/live')
def liveness_probe():
"""Kubernetes liveness probe endpoint."""
try:
# Basic liveness check - just verify the application is responding
return jsonify({
'status': 'alive',
'timestamp': datetime.utcnow().isoformat(),
'uptime_seconds': time.time() - psutil.Process().create_time()
})
except Exception as e:
return jsonify({
'status': 'dead',
'timestamp': datetime.utcnow().isoformat(),
'error': str(e)
}), 503
@health_bp.route('/api/health/metrics')
def prometheus_metrics():
"""Prometheus-compatible metrics endpoint."""
try:
# Generate Prometheus-format metrics
metrics = []
# System metrics
cpu_percent = psutil.cpu_percent()
memory = psutil.virtual_memory()
disk = psutil.disk_usage('/')
metrics.extend([
f"# HELP aniworld_cpu_usage_percent CPU usage percentage",
f"# TYPE aniworld_cpu_usage_percent gauge",
f"aniworld_cpu_usage_percent {cpu_percent}",
f"",
f"# HELP aniworld_memory_usage_percent Memory usage percentage",
f"# TYPE aniworld_memory_usage_percent gauge",
f"aniworld_memory_usage_percent {memory.percent}",
f"",
f"# HELP aniworld_disk_usage_percent Disk usage percentage",
f"# TYPE aniworld_disk_usage_percent gauge",
f"aniworld_disk_usage_percent {(disk.used / disk.total) * 100}",
f"",
])
# Database metrics
try:
with database_manager.get_connection() as conn:
anime_count = conn.execute("SELECT COUNT(*) FROM anime_metadata").fetchone()[0]
episode_count = conn.execute("SELECT COUNT(*) FROM episode_metadata").fetchone()[0]
metrics.extend([
f"# HELP aniworld_anime_total Total number of anime in database",
f"# TYPE aniworld_anime_total counter",
f"aniworld_anime_total {anime_count}",
f"",
f"# HELP aniworld_episodes_total Total number of episodes in database",
f"# TYPE aniworld_episodes_total counter",
f"aniworld_episodes_total {episode_count}",
f"",
])
except Exception:
pass
# Process metrics
process = psutil.Process()
metrics.extend([
f"# HELP aniworld_process_threads Number of threads in process",
f"# TYPE aniworld_process_threads gauge",
f"aniworld_process_threads {process.num_threads()}",
f"",
f"# HELP aniworld_process_memory_bytes Memory usage in bytes",
f"# TYPE aniworld_process_memory_bytes gauge",
f"aniworld_process_memory_bytes {process.memory_info().rss}",
f"",
])
return "\n".join(metrics), 200, {'Content-Type': 'text/plain; charset=utf-8'}
except Exception as e:
return f"# Error generating metrics: {e}", 500, {'Content-Type': 'text/plain'}
# Export the blueprint
__all__ = ['health_bp']

View File

@@ -1,701 +0,0 @@
"""
Integrations API endpoints.
This module handles all external integration operations including:
- API key management
- Webhook configuration
- External service integrations
- Third-party API management
"""
from flask import Blueprint, request, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import requests
import json
import hmac
import hashlib
import time
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth, optional_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import (
validate_json_input, validate_query_params, validate_pagination_params,
validate_id_parameter, is_valid_url
)
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, create_paginated_response
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def optional_auth(f): return f
def handle_api_errors(f): return f
def validate_json_input(**kwargs): return lambda f: f
def validate_query_params(**kwargs): return lambda f: f
def validate_pagination_params(f): return f
def validate_id_parameter(param): return lambda f: f
def is_valid_url(url): return url.startswith(('http://', 'https://'))
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def create_paginated_response(items, page, per_page, total, endpoint=None): return jsonify({'data': items, 'pagination': {'page': page, 'per_page': per_page, 'total': total}}), 200
# Import integration components
try:
from src.server.data.integration_manager import IntegrationManager
from src.server.data.webhook_manager import WebhookManager
from src.data.api_key_manager import APIKeyManager
except ImportError:
# Fallback for development
class IntegrationManager:
def get_all_integrations(self, **kwargs): return []
def get_integrations_count(self, **kwargs): return 0
def get_integration_by_id(self, id): return None
def create_integration(self, **kwargs): return 1
def update_integration(self, id, **kwargs): return True
def delete_integration(self, id): return True
def test_integration(self, id): return {'success': True, 'response_time': 0.1}
def get_integration_logs(self, id, **kwargs): return []
def trigger_integration(self, id, data): return {'success': True}
class WebhookManager:
def get_all_webhooks(self, **kwargs): return []
def get_webhooks_count(self, **kwargs): return 0
def get_webhook_by_id(self, id): return None
def create_webhook(self, **kwargs): return 1
def update_webhook(self, id, **kwargs): return True
def delete_webhook(self, id): return True
def test_webhook(self, id): return {'success': True, 'response_time': 0.1}
def get_webhook_deliveries(self, id, **kwargs): return []
def redeliver_webhook(self, delivery_id): return True
def trigger_webhook(self, event, data): return True
class APIKeyManager:
def get_external_api_keys(self, **kwargs): return []
def get_external_api_key_by_id(self, id): return None
def create_external_api_key(self, **kwargs): return 1
def update_external_api_key(self, id, **kwargs): return True
def delete_external_api_key(self, id): return True
def test_external_api_key(self, id): return {'success': True}
def rotate_external_api_key(self, id): return {'new_key': 'new_api_key'}
# Create blueprint
integrations_bp = Blueprint('integrations', __name__)
# Initialize managers
integration_manager = IntegrationManager()
webhook_manager = WebhookManager()
api_key_manager = APIKeyManager()
logger = logging.getLogger(__name__)
@integrations_bp.route('/integrations', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['page', 'per_page', 'type', 'status', 'sort_by', 'sort_order'],
param_types={'page': int, 'per_page': int}
)
@validate_pagination_params
def list_integrations() -> Tuple[Any, int]:
"""
List integrations with pagination and filtering.
Query Parameters:
- page: Page number (default: 1)
- per_page: Items per page (default: 20, max: 100)
- type: Filter by integration type
- status: Filter by integration status
- sort_by: Sort field (default: created_at)
- sort_order: Sort order (asc/desc, default: desc)
Returns:
JSON response with paginated integration list
"""
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 20, type=int), 100)
integration_type = request.args.get('type')
status = request.args.get('status')
sort_by = request.args.get('sort_by', 'created_at')
sort_order = request.args.get('sort_order', 'desc')
offset = (page - 1) * per_page
# Get integrations
integrations = integration_manager.get_all_integrations(
offset=offset,
limit=per_page,
integration_type=integration_type,
status=status,
sort_by=sort_by,
sort_order=sort_order
)
# Get total count
total = integration_manager.get_integrations_count(
integration_type=integration_type,
status=status
)
return create_paginated_response(
integrations,
page,
per_page,
total,
endpoint='/api/v1/integrations'
)
@integrations_bp.route('/integrations/<int:integration_id>', methods=['GET'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
def get_integration(integration_id: int) -> Tuple[Any, int]:
"""
Get specific integration by ID.
Args:
integration_id: Integration ID
Returns:
JSON response with integration data
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
return create_success_response("Integration retrieved successfully", 200, integration)
@integrations_bp.route('/integrations', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['name', 'type', 'config'],
optional_fields=['description', 'enabled'],
field_types={'name': str, 'type': str, 'config': dict, 'description': str, 'enabled': bool}
)
def create_integration() -> Tuple[Any, int]:
"""
Create a new integration.
Request Body:
- name: Integration name (required)
- type: Integration type (required)
- config: Integration configuration (required)
- description: Integration description (optional)
- enabled: Whether integration is enabled (optional, default: true)
Returns:
JSON response with created integration data
"""
data = request.get_json()
# Validate integration type
allowed_types = ['webhook', 'api', 'discord', 'slack', 'email', 'custom']
if data['type'] not in allowed_types:
return create_error_response(f"Invalid integration type. Must be one of: {', '.join(allowed_types)}", 400)
# Validate configuration based on type
config_errors = _validate_integration_config(data['type'], data['config'])
if config_errors:
return create_error_response("Configuration validation failed", 400, config_errors)
try:
# Create integration
integration_id = integration_manager.create_integration(
name=data['name'],
integration_type=data['type'],
config=data['config'],
description=data.get('description', ''),
enabled=data.get('enabled', True)
)
# Get created integration
integration = integration_manager.get_integration_by_id(integration_id)
logger.info(f"Created integration {integration_id}: {data['name']} ({data['type']})")
return create_success_response("Integration created successfully", 201, integration)
except Exception as e:
logger.error(f"Error creating integration: {str(e)}")
return create_error_response("Failed to create integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
@validate_json_input(
optional_fields=['name', 'config', 'description', 'enabled'],
field_types={'name': str, 'config': dict, 'description': str, 'enabled': bool}
)
def update_integration(integration_id: int) -> Tuple[Any, int]:
"""
Update an integration.
Args:
integration_id: Integration ID
Request Body:
- name: Integration name (optional)
- config: Integration configuration (optional)
- description: Integration description (optional)
- enabled: Whether integration is enabled (optional)
Returns:
JSON response with update result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
data = request.get_json()
# Validate configuration if provided
if 'config' in data:
config_errors = _validate_integration_config(integration['type'], data['config'])
if config_errors:
return create_error_response("Configuration validation failed", 400, config_errors)
try:
# Update integration
success = integration_manager.update_integration(integration_id, **data)
if success:
# Get updated integration
updated_integration = integration_manager.get_integration_by_id(integration_id)
logger.info(f"Updated integration {integration_id}")
return create_success_response("Integration updated successfully", 200, updated_integration)
else:
return create_error_response("Failed to update integration", 500)
except Exception as e:
logger.error(f"Error updating integration {integration_id}: {str(e)}")
return create_error_response("Failed to update integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>', methods=['DELETE'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
def delete_integration(integration_id: int) -> Tuple[Any, int]:
"""
Delete an integration.
Args:
integration_id: Integration ID
Returns:
JSON response with deletion result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
try:
success = integration_manager.delete_integration(integration_id)
if success:
logger.info(f"Deleted integration {integration_id}: {integration['name']}")
return create_success_response("Integration deleted successfully")
else:
return create_error_response("Failed to delete integration", 500)
except Exception as e:
logger.error(f"Error deleting integration {integration_id}: {str(e)}")
return create_error_response("Failed to delete integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>/test', methods=['POST'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
def test_integration(integration_id: int) -> Tuple[Any, int]:
"""
Test an integration.
Args:
integration_id: Integration ID
Returns:
JSON response with test result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
try:
test_result = integration_manager.test_integration(integration_id)
logger.info(f"Tested integration {integration_id}: {test_result}")
return create_success_response("Integration test completed", 200, test_result)
except Exception as e:
logger.error(f"Error testing integration {integration_id}: {str(e)}")
return create_error_response("Failed to test integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>/trigger', methods=['POST'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
@validate_json_input(
optional_fields=['data'],
field_types={'data': dict}
)
def trigger_integration(integration_id: int) -> Tuple[Any, int]:
"""
Manually trigger an integration.
Args:
integration_id: Integration ID
Request Body:
- data: Custom data to send with trigger (optional)
Returns:
JSON response with trigger result
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
if not integration['enabled']:
return create_error_response("Integration is disabled", 400)
data = request.get_json() or {}
trigger_data = data.get('data', {})
try:
result = integration_manager.trigger_integration(integration_id, trigger_data)
logger.info(f"Triggered integration {integration_id}")
return create_success_response("Integration triggered successfully", 200, result)
except Exception as e:
logger.error(f"Error triggering integration {integration_id}: {str(e)}")
return create_error_response("Failed to trigger integration", 500)
@integrations_bp.route('/integrations/<int:integration_id>/logs', methods=['GET'])
@require_auth
@handle_api_errors
@validate_id_parameter('integration_id')
@validate_query_params(
allowed_params=['page', 'per_page', 'level'],
param_types={'page': int, 'per_page': int}
)
@validate_pagination_params
def get_integration_logs(integration_id: int) -> Tuple[Any, int]:
"""
Get integration execution logs.
Args:
integration_id: Integration ID
Query Parameters:
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 200)
- level: Log level filter (optional)
Returns:
JSON response with integration logs
"""
integration = integration_manager.get_integration_by_id(integration_id)
if not integration:
return create_error_response("Integration not found", 404)
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 50, type=int), 200)
level = request.args.get('level')
offset = (page - 1) * per_page
try:
logs = integration_manager.get_integration_logs(
integration_id,
offset=offset,
limit=per_page,
level=level
)
# For pagination, we'd need a count method
total = len(logs) # Simplified for this example
return create_paginated_response(
logs,
page,
per_page,
total,
endpoint=f'/api/v1/integrations/{integration_id}/logs'
)
except Exception as e:
logger.error(f"Error getting integration logs for {integration_id}: {str(e)}")
return create_error_response("Failed to get integration logs", 500)
@integrations_bp.route('/webhooks', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['page', 'per_page', 'event', 'status'],
param_types={'page': int, 'per_page': int}
)
@validate_pagination_params
def list_webhooks() -> Tuple[Any, int]:
"""
List webhooks with pagination and filtering.
Query Parameters:
- page: Page number (default: 1)
- per_page: Items per page (default: 20, max: 100)
- event: Filter by event type
- status: Filter by webhook status
Returns:
JSON response with paginated webhook list
"""
page = request.args.get('page', 1, type=int)
per_page = min(request.args.get('per_page', 20, type=int), 100)
event = request.args.get('event')
status = request.args.get('status')
offset = (page - 1) * per_page
# Get webhooks
webhooks = webhook_manager.get_all_webhooks(
offset=offset,
limit=per_page,
event=event,
status=status
)
# Get total count
total = webhook_manager.get_webhooks_count(
event=event,
status=status
)
return create_paginated_response(
webhooks,
page,
per_page,
total,
endpoint='/api/v1/webhooks'
)
@integrations_bp.route('/webhooks', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['url', 'events'],
optional_fields=['name', 'secret', 'enabled', 'retry_config'],
field_types={'url': str, 'events': list, 'name': str, 'secret': str, 'enabled': bool, 'retry_config': dict}
)
def create_webhook() -> Tuple[Any, int]:
"""
Create a new webhook.
Request Body:
- url: Webhook URL (required)
- events: List of events to subscribe to (required)
- name: Webhook name (optional)
- secret: Webhook secret for signature verification (optional)
- enabled: Whether webhook is enabled (optional, default: true)
- retry_config: Retry configuration (optional)
Returns:
JSON response with created webhook data
"""
data = request.get_json()
# Validate URL
if not is_valid_url(data['url']):
return create_error_response("Invalid webhook URL", 400)
# Validate events
allowed_events = [
'anime.created', 'anime.updated', 'anime.deleted',
'episode.created', 'episode.updated', 'episode.deleted',
'download.started', 'download.completed', 'download.failed',
'backup.created', 'backup.restored', 'system.error'
]
invalid_events = [event for event in data['events'] if event not in allowed_events]
if invalid_events:
return create_error_response(f"Invalid events: {', '.join(invalid_events)}", 400)
try:
# Create webhook
webhook_id = webhook_manager.create_webhook(
url=data['url'],
events=data['events'],
name=data.get('name', ''),
secret=data.get('secret', ''),
enabled=data.get('enabled', True),
retry_config=data.get('retry_config', {})
)
# Get created webhook
webhook = webhook_manager.get_webhook_by_id(webhook_id)
logger.info(f"Created webhook {webhook_id}: {data['url']}")
return create_success_response("Webhook created successfully", 201, webhook)
except Exception as e:
logger.error(f"Error creating webhook: {str(e)}")
return create_error_response("Failed to create webhook", 500)
@integrations_bp.route('/webhooks/<int:webhook_id>/test', methods=['POST'])
@require_auth
@handle_api_errors
@validate_id_parameter('webhook_id')
def test_webhook(webhook_id: int) -> Tuple[Any, int]:
"""
Test a webhook.
Args:
webhook_id: Webhook ID
Returns:
JSON response with test result
"""
webhook = webhook_manager.get_webhook_by_id(webhook_id)
if not webhook:
return create_error_response("Webhook not found", 404)
try:
test_result = webhook_manager.test_webhook(webhook_id)
logger.info(f"Tested webhook {webhook_id}: {test_result}")
return create_success_response("Webhook test completed", 200, test_result)
except Exception as e:
logger.error(f"Error testing webhook {webhook_id}: {str(e)}")
return create_error_response("Failed to test webhook", 500)
@integrations_bp.route('/api-keys/external', methods=['GET'])
@require_auth
@handle_api_errors
@validate_pagination_params
def list_external_api_keys() -> Tuple[Any, int]:
"""
List external API keys.
Returns:
JSON response with external API keys
"""
try:
api_keys = api_key_manager.get_external_api_keys()
return create_success_response("External API keys retrieved successfully", 200, api_keys)
except Exception as e:
logger.error(f"Error getting external API keys: {str(e)}")
return create_error_response("Failed to get external API keys", 500)
@integrations_bp.route('/api-keys/external', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['service', 'key'],
optional_fields=['name', 'description'],
field_types={'service': str, 'key': str, 'name': str, 'description': str}
)
def create_external_api_key() -> Tuple[Any, int]:
"""
Store external API key.
Request Body:
- service: Service name (required)
- key: API key value (required)
- name: Key name (optional)
- description: Key description (optional)
Returns:
JSON response with created API key data
"""
data = request.get_json()
try:
# Create external API key
key_id = api_key_manager.create_external_api_key(
service=data['service'],
key=data['key'],
name=data.get('name', ''),
description=data.get('description', '')
)
# Get created key (without exposing the actual key)
api_key = api_key_manager.get_external_api_key_by_id(key_id)
logger.info(f"Created external API key {key_id} for service: {data['service']}")
return create_success_response("External API key created successfully", 201, api_key)
except Exception as e:
logger.error(f"Error creating external API key: {str(e)}")
return create_error_response("Failed to create external API key", 500)
def _validate_integration_config(integration_type: str, config: Dict[str, Any]) -> List[str]:
"""
Validate integration configuration based on type.
Args:
integration_type: Type of integration
config: Configuration dictionary
Returns:
List of validation errors (empty if valid)
"""
errors = []
if integration_type == 'webhook':
if 'url' not in config:
errors.append("Webhook URL is required")
elif not is_valid_url(config['url']):
errors.append("Invalid webhook URL")
elif integration_type == 'discord':
if 'webhook_url' not in config:
errors.append("Discord webhook URL is required")
elif not config['webhook_url'].startswith('https://discord.com/api/webhooks/'):
errors.append("Invalid Discord webhook URL")
elif integration_type == 'slack':
if 'webhook_url' not in config:
errors.append("Slack webhook URL is required")
elif not config['webhook_url'].startswith('https://hooks.slack.com/'):
errors.append("Invalid Slack webhook URL")
elif integration_type == 'email':
required_fields = ['smtp_host', 'smtp_port', 'from_email']
for field in required_fields:
if field not in config:
errors.append(f"{field} is required for email integration")
elif integration_type == 'api':
if 'base_url' not in config:
errors.append("Base URL is required for API integration")
elif not is_valid_url(config['base_url']):
errors.append("Invalid API base URL")
return errors

View File

@@ -1,268 +0,0 @@
"""
API endpoints for logging configuration and management.
"""
from flask import Blueprint, jsonify, request, send_file
from web.controllers.auth_controller import require_auth
from config import config
import logging
import os
from datetime import datetime
logger = logging.getLogger(__name__)
logging_bp = Blueprint('logging', __name__, url_prefix='/api/logging')
@logging_bp.route('/config', methods=['GET'])
@require_auth
def get_logging_config():
"""Get current logging configuration."""
try:
# Import here to avoid circular imports
from src.infrastructure.logging.GlobalLogger import error_logger
config_data = {
'log_level': config.log_level,
'enable_console_logging': config.enable_console_logging,
'enable_console_progress': config.enable_console_progress,
'enable_fail2ban_logging': config.enable_fail2ban_logging,
'log_files': [
'./logs/aniworld.log',
'./logs/auth_failures.log',
'./logs/downloads.log'
]
}
return jsonify({
'success': True,
'config': config_data
})
except Exception as e:
logger.error(f"Error getting logging config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/config', methods=['POST'])
@require_auth
def update_logging_config():
"""Update logging configuration."""
try:
data = request.get_json() or {}
# Update log level
log_level = data.get('log_level', config.log_level)
if log_level in ['DEBUG', 'INFO', 'WARNING', 'ERROR', 'CRITICAL']:
config.log_level = log_level
# Update console logging settings
if 'enable_console_logging' in data:
config.enable_console_logging = bool(data['enable_console_logging'])
if 'enable_console_progress' in data:
config.enable_console_progress = bool(data['enable_console_progress'])
if 'enable_fail2ban_logging' in data:
config.enable_fail2ban_logging = bool(data['enable_fail2ban_logging'])
# Save configuration
config.save_config()
# Update runtime logging level
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# Use standard logging level update
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
logging.getLogger().setLevel(numeric_level)
except ImportError:
# Fallback for basic logging
numeric_level = getattr(logging, config.log_level.upper(), logging.INFO)
logging.getLogger().setLevel(numeric_level)
logger.info(f"Logging configuration updated: level={config.log_level}, console={config.enable_console_logging}")
return jsonify({
'success': True,
'message': 'Logging configuration updated successfully',
'config': {
'log_level': config.log_level,
'enable_console_logging': config.enable_console_logging,
'enable_console_progress': config.enable_console_progress,
'enable_fail2ban_logging': config.enable_fail2ban_logging
}
})
except Exception as e:
logger.error(f"Error updating logging config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/files', methods=['GET'])
@require_auth
def list_log_files():
"""Get list of available log files."""
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# Return basic log files
log_files = [
'./logs/aniworld.log',
'./logs/auth_failures.log',
'./logs/downloads.log'
]
return jsonify({
'success': True,
'files': log_files
})
except Exception as e:
logger.error(f"Error listing log files: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/files/<filename>/download', methods=['GET'])
@require_auth
def download_log_file(filename):
"""Download a specific log file."""
try:
# Security: Only allow log files
if not filename.endswith('.log'):
return jsonify({
'success': False,
'error': 'Invalid file type'
}), 400
log_directory = "logs"
file_path = os.path.join(log_directory, filename)
# Security: Check if file exists and is within log directory
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
return jsonify({
'success': False,
'error': 'File not found'
}), 404
return send_file(
file_path,
as_attachment=True,
download_name=f"{filename}_{datetime.now().strftime('%Y%m%d_%H%M%S')}"
)
except Exception as e:
logger.error(f"Error downloading log file {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/files/<filename>/tail', methods=['GET'])
@require_auth
def tail_log_file(filename):
"""Get the last N lines from a log file."""
try:
# Security: Only allow log files
if not filename.endswith('.log'):
return jsonify({
'success': False,
'error': 'Invalid file type'
}), 400
lines = int(request.args.get('lines', 100))
lines = min(lines, 1000) # Limit to 1000 lines max
log_directory = "logs"
file_path = os.path.join(log_directory, filename)
# Security: Check if file exists and is within log directory
if not os.path.exists(file_path) or not os.path.abspath(file_path).startswith(os.path.abspath(log_directory)):
return jsonify({
'success': False,
'error': 'File not found'
}), 404
# Read last N lines
with open(file_path, 'r', encoding='utf-8') as f:
all_lines = f.readlines()
tail_lines = all_lines[-lines:] if len(all_lines) > lines else all_lines
return jsonify({
'success': True,
'lines': [line.rstrip('\n\r') for line in tail_lines],
'total_lines': len(all_lines),
'showing_lines': len(tail_lines)
})
except Exception as e:
logger.error(f"Error tailing log file {filename}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/cleanup', methods=['POST'])
@require_auth
def cleanup_logs():
"""Clean up old log files."""
try:
data = request.get_json() or {}
days = int(data.get('days', 30))
days = max(1, min(days, 365)) # Limit between 1-365 days
from src.infrastructure.logging.GlobalLogger import error_logger
# Since we don't have log_config.cleanup_old_logs(), simulate the cleanup
cleaned_files = [] # Would implement actual cleanup logic here
logger.info(f"Cleaned up {len(cleaned_files)} old log files (older than {days} days)")
return jsonify({
'success': True,
'message': f'Cleaned up {len(cleaned_files)} log files',
'cleaned_files': cleaned_files
})
except Exception as e:
logger.error(f"Error cleaning up logs: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@logging_bp.route('/test', methods=['POST'])
@require_auth
def test_logging():
"""Test logging at different levels."""
try:
test_message = "Test log message from web interface"
# Test different log levels
logger.debug(f"DEBUG: {test_message}")
logger.info(f"INFO: {test_message}")
logger.warning(f"WARNING: {test_message}")
logger.error(f"ERROR: {test_message}")
# Test fail2ban logging
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# log_auth_failure would be implemented here
pass
except ImportError:
pass
# Test download progress logging
try:
from src.infrastructure.logging.GlobalLogger import error_logger
# log_download_progress would be implemented here
pass
except ImportError:
pass
return jsonify({
'success': True,
'message': 'Test messages logged successfully'
})
except Exception as e:
logger.error(f"Error testing logging: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,656 +0,0 @@
"""
Maintenance API endpoints.
This module handles all system maintenance operations including:
- Database maintenance
- System optimization
- Cleanup operations
- Scheduled maintenance tasks
"""
from flask import Blueprint, request, jsonify
from typing import Dict, List, Any, Optional, Tuple
import logging
import os
import time
import sqlite3
from datetime import datetime, timedelta
# Import shared utilities
try:
from src.server.web.controllers.shared.auth_decorators import require_auth
from src.server.web.controllers.shared.error_handlers import handle_api_errors
from src.server.web.controllers.shared.validators import validate_json_input, validate_query_params
from src.server.web.controllers.shared.response_helpers import (
create_success_response, create_error_response, format_file_size, format_datetime
)
except ImportError:
# Fallback imports for development
def require_auth(f): return f
def handle_api_errors(f): return f
def validate_json_input(**kwargs): return lambda f: f
def validate_query_params(**kwargs): return lambda f: f
def create_success_response(msg, code=200, data=None): return jsonify({'success': True, 'message': msg, 'data': data}), code
def create_error_response(msg, code=400, details=None): return jsonify({'error': msg, 'details': details}), code
def format_file_size(size): return f"{size} bytes"
def format_datetime(dt): return str(dt) if dt else None
# Import maintenance components
try:
from src.server.data.database_manager import DatabaseManager
from src.server.data.cleanup_manager import CleanupManager
from src.server.data.scheduler_manager import SchedulerManager
except ImportError:
# Fallback for development
class DatabaseManager:
def vacuum_database(self): return {'size_before': 1000000, 'size_after': 800000, 'time_taken': 5.2}
def analyze_database(self): return {'tables_analyzed': 10, 'time_taken': 2.1}
def integrity_check(self): return {'status': 'ok', 'errors': [], 'warnings': []}
def reindex_database(self): return {'indexes_rebuilt': 15, 'time_taken': 3.5}
def get_database_stats(self): return {'size': 10000000, 'tables': 10, 'indexes': 15}
def optimize_database(self): return {'optimizations': ['vacuum', 'analyze', 'reindex'], 'time_taken': 10.7}
def backup_database(self, path): return {'backup_file': path, 'size': 5000000}
def get_slow_queries(self, **kwargs): return []
class CleanupManager:
def cleanup_temp_files(self): return {'files_deleted': 50, 'space_freed': 1048576}
def cleanup_logs(self, **kwargs): return {'logs_deleted': 100, 'space_freed': 2097152}
def cleanup_downloads(self, **kwargs): return {'downloads_cleaned': 25, 'space_freed': 5242880}
def cleanup_cache(self): return {'cache_cleared': True, 'space_freed': 10485760}
def cleanup_old_backups(self, **kwargs): return {'backups_deleted': 5, 'space_freed': 52428800}
def get_cleanup_stats(self): return {'temp_files': 100, 'log_files': 200, 'cache_size': 50000000}
class SchedulerManager:
def get_scheduled_tasks(self): return []
def create_scheduled_task(self, **kwargs): return 1
def update_scheduled_task(self, id, **kwargs): return True
def delete_scheduled_task(self, id): return True
def get_task_history(self, **kwargs): return []
# Create blueprint
maintenance_bp = Blueprint('maintenance', __name__)
# Initialize managers
database_manager = DatabaseManager()
cleanup_manager = CleanupManager()
scheduler_manager = SchedulerManager()
logger = logging.getLogger(__name__)
@maintenance_bp.route('/maintenance/database/vacuum', methods=['POST'])
@require_auth
@handle_api_errors
def vacuum_database() -> Tuple[Any, int]:
"""
Vacuum the database to reclaim space and optimize performance.
Returns:
JSON response with vacuum operation results
"""
try:
logger.info("Starting database vacuum operation")
start_time = time.time()
result = database_manager.vacuum_database()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
space_saved = result.get('size_before', 0) - result.get('size_after', 0)
result['space_saved'] = format_file_size(space_saved)
logger.info(f"Database vacuum completed in {operation_time:.2f} seconds, saved {space_saved} bytes")
return create_success_response("Database vacuum completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database vacuum: {str(e)}")
return create_error_response("Database vacuum failed", 500)
@maintenance_bp.route('/maintenance/database/analyze', methods=['POST'])
@require_auth
@handle_api_errors
def analyze_database() -> Tuple[Any, int]:
"""
Analyze the database to update query planner statistics.
Returns:
JSON response with analyze operation results
"""
try:
logger.info("Starting database analyze operation")
start_time = time.time()
result = database_manager.analyze_database()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
logger.info(f"Database analyze completed in {operation_time:.2f} seconds")
return create_success_response("Database analyze completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database analyze: {str(e)}")
return create_error_response("Database analyze failed", 500)
@maintenance_bp.route('/maintenance/database/integrity-check', methods=['POST'])
@require_auth
@handle_api_errors
def integrity_check() -> Tuple[Any, int]:
"""
Perform database integrity check.
Returns:
JSON response with integrity check results
"""
try:
logger.info("Starting database integrity check")
start_time = time.time()
result = database_manager.integrity_check()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
result['timestamp'] = datetime.now().isoformat()
if result['status'] == 'ok':
logger.info(f"Database integrity check passed in {operation_time:.2f} seconds")
return create_success_response("Database integrity check passed", 200, result)
else:
logger.warning(f"Database integrity check found issues: {result['errors']}")
return create_success_response("Database integrity check completed with issues", 200, result)
except Exception as e:
logger.error(f"Error during database integrity check: {str(e)}")
return create_error_response("Database integrity check failed", 500)
@maintenance_bp.route('/maintenance/database/reindex', methods=['POST'])
@require_auth
@handle_api_errors
def reindex_database() -> Tuple[Any, int]:
"""
Rebuild database indexes for optimal performance.
Returns:
JSON response with reindex operation results
"""
try:
logger.info("Starting database reindex operation")
start_time = time.time()
result = database_manager.reindex_database()
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
logger.info(f"Database reindex completed in {operation_time:.2f} seconds, rebuilt {result.get('indexes_rebuilt', 0)} indexes")
return create_success_response("Database reindex completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database reindex: {str(e)}")
return create_error_response("Database reindex failed", 500)
@maintenance_bp.route('/maintenance/database/optimize', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['operations', 'force'],
field_types={'operations': list, 'force': bool}
)
def optimize_database() -> Tuple[Any, int]:
"""
Perform comprehensive database optimization.
Request Body:
- operations: List of operations to perform (optional, default: all)
- force: Force optimization even if recently performed (optional, default: false)
Returns:
JSON response with optimization results
"""
data = request.get_json() or {}
operations = data.get('operations', ['vacuum', 'analyze', 'reindex'])
force = data.get('force', False)
# Validate operations
allowed_operations = ['vacuum', 'analyze', 'reindex', 'integrity_check']
invalid_operations = [op for op in operations if op not in allowed_operations]
if invalid_operations:
return create_error_response(f"Invalid operations: {', '.join(invalid_operations)}", 400)
try:
logger.info(f"Starting database optimization with operations: {operations}")
start_time = time.time()
result = database_manager.optimize_database(
operations=operations,
force=force
)
operation_time = time.time() - start_time
result['operation_time'] = round(operation_time, 2)
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Database optimization completed in {operation_time:.2f} seconds")
return create_success_response("Database optimization completed successfully", 200, result)
except Exception as e:
logger.error(f"Error during database optimization: {str(e)}")
return create_error_response("Database optimization failed", 500)
@maintenance_bp.route('/maintenance/database/stats', methods=['GET'])
@require_auth
@handle_api_errors
def get_database_stats() -> Tuple[Any, int]:
"""
Get database statistics and health information.
Returns:
JSON response with database statistics
"""
try:
stats = database_manager.get_database_stats()
# Add formatted values
if 'size' in stats:
stats['size_formatted'] = format_file_size(stats['size'])
# Add slow queries
slow_queries = database_manager.get_slow_queries(limit=10)
stats['slow_queries'] = slow_queries
return create_success_response("Database statistics retrieved successfully", 200, stats)
except Exception as e:
logger.error(f"Error getting database stats: {str(e)}")
return create_error_response("Failed to get database statistics", 500)
@maintenance_bp.route('/maintenance/cleanup/temp-files', methods=['POST'])
@require_auth
@handle_api_errors
def cleanup_temp_files() -> Tuple[Any, int]:
"""
Clean up temporary files.
Returns:
JSON response with cleanup results
"""
try:
logger.info("Starting temporary files cleanup")
result = cleanup_manager.cleanup_temp_files()
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Temporary files cleanup completed: {result['files_deleted']} files deleted, {result['space_freed']} bytes freed")
return create_success_response("Temporary files cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during temp files cleanup: {str(e)}")
return create_error_response("Temporary files cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/logs', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['older_than_days', 'keep_recent'],
field_types={'older_than_days': int, 'keep_recent': int}
)
def cleanup_logs() -> Tuple[Any, int]:
"""
Clean up old log files.
Request Body:
- older_than_days: Delete logs older than this many days (optional, default: 30)
- keep_recent: Number of recent log files to keep (optional, default: 10)
Returns:
JSON response with cleanup results
"""
data = request.get_json() or {}
older_than_days = data.get('older_than_days', 30)
keep_recent = data.get('keep_recent', 10)
try:
logger.info(f"Starting log cleanup: older than {older_than_days} days, keep {keep_recent} recent")
result = cleanup_manager.cleanup_logs(
older_than_days=older_than_days,
keep_recent=keep_recent
)
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Log cleanup completed: {result['logs_deleted']} logs deleted, {result['space_freed']} bytes freed")
return create_success_response("Log cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during log cleanup: {str(e)}")
return create_error_response("Log cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/downloads', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['remove_failed', 'remove_incomplete', 'older_than_days'],
field_types={'remove_failed': bool, 'remove_incomplete': bool, 'older_than_days': int}
)
def cleanup_downloads() -> Tuple[Any, int]:
"""
Clean up download files and records.
Request Body:
- remove_failed: Remove failed downloads (optional, default: true)
- remove_incomplete: Remove incomplete downloads (optional, default: false)
- older_than_days: Remove downloads older than this many days (optional)
Returns:
JSON response with cleanup results
"""
data = request.get_json() or {}
remove_failed = data.get('remove_failed', True)
remove_incomplete = data.get('remove_incomplete', False)
older_than_days = data.get('older_than_days')
try:
logger.info(f"Starting download cleanup: failed={remove_failed}, incomplete={remove_incomplete}, older_than={older_than_days}")
result = cleanup_manager.cleanup_downloads(
remove_failed=remove_failed,
remove_incomplete=remove_incomplete,
older_than_days=older_than_days
)
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Download cleanup completed: {result['downloads_cleaned']} downloads cleaned, {result['space_freed']} bytes freed")
return create_success_response("Download cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during download cleanup: {str(e)}")
return create_error_response("Download cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/cache', methods=['POST'])
@require_auth
@handle_api_errors
def cleanup_cache() -> Tuple[Any, int]:
"""
Clear application cache.
Returns:
JSON response with cleanup results
"""
try:
logger.info("Starting cache cleanup")
result = cleanup_manager.cleanup_cache()
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Cache cleanup completed: {result['space_freed']} bytes freed")
return create_success_response("Cache cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during cache cleanup: {str(e)}")
return create_error_response("Cache cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/backups', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['keep_count', 'older_than_days'],
field_types={'keep_count': int, 'older_than_days': int}
)
def cleanup_old_backups() -> Tuple[Any, int]:
"""
Clean up old backup files.
Request Body:
- keep_count: Number of recent backups to keep (optional, default: 10)
- older_than_days: Delete backups older than this many days (optional, default: 90)
Returns:
JSON response with cleanup results
"""
data = request.get_json() or {}
keep_count = data.get('keep_count', 10)
older_than_days = data.get('older_than_days', 90)
try:
logger.info(f"Starting backup cleanup: keep {keep_count} backups, older than {older_than_days} days")
result = cleanup_manager.cleanup_old_backups(
keep_count=keep_count,
older_than_days=older_than_days
)
result['space_freed_formatted'] = format_file_size(result.get('space_freed', 0))
result['timestamp'] = datetime.now().isoformat()
logger.info(f"Backup cleanup completed: {result['backups_deleted']} backups deleted, {result['space_freed']} bytes freed")
return create_success_response("Backup cleanup completed", 200, result)
except Exception as e:
logger.error(f"Error during backup cleanup: {str(e)}")
return create_error_response("Backup cleanup failed", 500)
@maintenance_bp.route('/maintenance/cleanup/stats', methods=['GET'])
@require_auth
@handle_api_errors
def get_cleanup_stats() -> Tuple[Any, int]:
"""
Get cleanup statistics and recommendations.
Returns:
JSON response with cleanup statistics
"""
try:
stats = cleanup_manager.get_cleanup_stats()
# Add formatted sizes
for key in ['temp_files_size', 'log_files_size', 'cache_size', 'old_backups_size']:
if key in stats:
stats[f"{key}_formatted"] = format_file_size(stats[key])
# Add recommendations
recommendations = []
if stats.get('temp_files', 0) > 100:
recommendations.append("Consider cleaning temporary files")
if stats.get('log_files_size', 0) > 100 * 1024 * 1024: # 100MB
recommendations.append("Consider cleaning old log files")
if stats.get('cache_size', 0) > 500 * 1024 * 1024: # 500MB
recommendations.append("Consider clearing cache")
stats['recommendations'] = recommendations
return create_success_response("Cleanup statistics retrieved successfully", 200, stats)
except Exception as e:
logger.error(f"Error getting cleanup stats: {str(e)}")
return create_error_response("Failed to get cleanup statistics", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['GET'])
@require_auth
@handle_api_errors
def get_scheduled_tasks() -> Tuple[Any, int]:
"""
Get scheduled maintenance tasks.
Returns:
JSON response with scheduled tasks
"""
try:
tasks = scheduler_manager.get_scheduled_tasks()
return create_success_response("Scheduled tasks retrieved successfully", 200, tasks)
except Exception as e:
logger.error(f"Error getting scheduled tasks: {str(e)}")
return create_error_response("Failed to get scheduled tasks", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks', methods=['POST'])
@require_auth
@handle_api_errors
@validate_json_input(
required_fields=['name', 'task_type', 'schedule'],
optional_fields=['config', 'enabled'],
field_types={'name': str, 'task_type': str, 'schedule': str, 'config': dict, 'enabled': bool}
)
def create_scheduled_task() -> Tuple[Any, int]:
"""
Create a new scheduled maintenance task.
Request Body:
- name: Task name (required)
- task_type: Type of task (required)
- schedule: Cron-style schedule (required)
- config: Task configuration (optional)
- enabled: Whether task is enabled (optional, default: true)
Returns:
JSON response with created task
"""
data = request.get_json()
# Validate task type
allowed_task_types = [
'database_vacuum', 'database_analyze', 'cleanup_temp_files',
'cleanup_logs', 'cleanup_downloads', 'cleanup_cache', 'backup_database'
]
if data['task_type'] not in allowed_task_types:
return create_error_response(f"Invalid task type. Must be one of: {', '.join(allowed_task_types)}", 400)
try:
task_id = scheduler_manager.create_scheduled_task(
name=data['name'],
task_type=data['task_type'],
schedule=data['schedule'],
config=data.get('config', {}),
enabled=data.get('enabled', True)
)
logger.info(f"Created scheduled task {task_id}: {data['name']} ({data['task_type']})")
return create_success_response("Scheduled task created successfully", 201, {'id': task_id})
except Exception as e:
logger.error(f"Error creating scheduled task: {str(e)}")
return create_error_response("Failed to create scheduled task", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['PUT'])
@require_auth
@handle_api_errors
@validate_json_input(
optional_fields=['name', 'schedule', 'config', 'enabled'],
field_types={'name': str, 'schedule': str, 'config': dict, 'enabled': bool}
)
def update_scheduled_task(task_id: int) -> Tuple[Any, int]:
"""
Update a scheduled maintenance task.
Args:
task_id: Task ID
Request Body:
- name: Task name (optional)
- schedule: Cron-style schedule (optional)
- config: Task configuration (optional)
- enabled: Whether task is enabled (optional)
Returns:
JSON response with update result
"""
data = request.get_json()
try:
success = scheduler_manager.update_scheduled_task(task_id, **data)
if success:
logger.info(f"Updated scheduled task {task_id}")
return create_success_response("Scheduled task updated successfully")
else:
return create_error_response("Scheduled task not found", 404)
except Exception as e:
logger.error(f"Error updating scheduled task {task_id}: {str(e)}")
return create_error_response("Failed to update scheduled task", 500)
@maintenance_bp.route('/maintenance/scheduled-tasks/<int:task_id>', methods=['DELETE'])
@require_auth
@handle_api_errors
def delete_scheduled_task(task_id: int) -> Tuple[Any, int]:
"""
Delete a scheduled maintenance task.
Args:
task_id: Task ID
Returns:
JSON response with deletion result
"""
try:
success = scheduler_manager.delete_scheduled_task(task_id)
if success:
logger.info(f"Deleted scheduled task {task_id}")
return create_success_response("Scheduled task deleted successfully")
else:
return create_error_response("Scheduled task not found", 404)
except Exception as e:
logger.error(f"Error deleting scheduled task {task_id}: {str(e)}")
return create_error_response("Failed to delete scheduled task", 500)
@maintenance_bp.route('/maintenance/history', methods=['GET'])
@require_auth
@handle_api_errors
@validate_query_params(
allowed_params=['task_type', 'days', 'limit'],
param_types={'days': int, 'limit': int}
)
def get_maintenance_history() -> Tuple[Any, int]:
"""
Get maintenance task execution history.
Query Parameters:
- task_type: Filter by task type (optional)
- days: Number of days of history (optional, default: 30)
- limit: Maximum number of records (optional, default: 100)
Returns:
JSON response with maintenance history
"""
task_type = request.args.get('task_type')
days = request.args.get('days', 30, type=int)
limit = request.args.get('limit', 100, type=int)
try:
history = scheduler_manager.get_task_history(
task_type=task_type,
days=days,
limit=limit
)
return create_success_response("Maintenance history retrieved successfully", 200, history)
except Exception as e:
logger.error(f"Error getting maintenance history: {str(e)}")
return create_error_response("Failed to get maintenance history", 500)

View File

@@ -1,406 +0,0 @@
"""
Performance Optimization API Endpoints
This module provides REST API endpoints for performance monitoring
and optimization features.
"""
from flask import Blueprint, request, jsonify
from auth import require_auth, optional_auth
from error_handler import handle_api_errors, RetryableError
from performance_optimizer import (
speed_limiter, download_cache, memory_monitor,
download_manager, resume_manager, DownloadTask
)
import uuid
from datetime import datetime
# Blueprint for performance optimization endpoints
performance_bp = Blueprint('performance', __name__)
@performance_bp.route('/api/performance/speed-limit', methods=['GET'])
@handle_api_errors
@optional_auth
def get_speed_limit():
"""Get current download speed limit."""
try:
return jsonify({
'status': 'success',
'data': {
'speed_limit_mbps': speed_limiter.max_speed_mbps,
'current_speed_mbps': speed_limiter.get_current_speed()
}
})
except Exception as e:
raise RetryableError(f"Failed to get speed limit: {e}")
@performance_bp.route('/api/performance/speed-limit', methods=['POST'])
@handle_api_errors
@require_auth
def set_speed_limit():
"""Set download speed limit."""
try:
data = request.get_json()
speed_mbps = data.get('speed_mbps', 0)
if speed_mbps < 0:
return jsonify({
'status': 'error',
'message': 'Speed limit must be non-negative (0 = unlimited)'
}), 400
speed_limiter.set_speed_limit(speed_mbps)
return jsonify({
'status': 'success',
'message': f'Speed limit set to {speed_mbps} MB/s' if speed_mbps > 0 else 'Speed limit removed',
'data': {
'speed_limit_mbps': speed_mbps
}
})
except Exception as e:
raise RetryableError(f"Failed to set speed limit: {e}")
@performance_bp.route('/api/performance/cache/stats')
@handle_api_errors
@optional_auth
def get_cache_stats():
"""Get cache statistics."""
try:
stats = download_cache.get_stats()
return jsonify({
'status': 'success',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to get cache stats: {e}")
@performance_bp.route('/api/performance/cache/clear', methods=['POST'])
@handle_api_errors
@require_auth
def clear_cache():
"""Clear download cache."""
try:
download_cache.clear()
return jsonify({
'status': 'success',
'message': 'Cache cleared successfully'
})
except Exception as e:
raise RetryableError(f"Failed to clear cache: {e}")
@performance_bp.route('/api/performance/memory/stats')
@handle_api_errors
@optional_auth
def get_memory_stats():
"""Get memory usage statistics."""
try:
stats = memory_monitor.get_memory_stats()
return jsonify({
'status': 'success',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to get memory stats: {e}")
@performance_bp.route('/api/performance/memory/gc', methods=['POST'])
@handle_api_errors
@require_auth
def force_garbage_collection():
"""Force garbage collection to free memory."""
try:
memory_monitor.force_garbage_collection()
stats = memory_monitor.get_memory_stats()
return jsonify({
'status': 'success',
'message': 'Garbage collection completed',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to force garbage collection: {e}")
@performance_bp.route('/api/performance/downloads/workers', methods=['GET'])
@handle_api_errors
@optional_auth
def get_worker_count():
"""Get current number of download workers."""
try:
return jsonify({
'status': 'success',
'data': {
'max_workers': download_manager.max_workers,
'active_tasks': len(download_manager.active_tasks)
}
})
except Exception as e:
raise RetryableError(f"Failed to get worker count: {e}")
@performance_bp.route('/api/performance/downloads/workers', methods=['POST'])
@handle_api_errors
@require_auth
def set_worker_count():
"""Set number of download workers."""
try:
data = request.get_json()
max_workers = data.get('max_workers', 3)
if not isinstance(max_workers, int) or max_workers < 1 or max_workers > 10:
return jsonify({
'status': 'error',
'message': 'Worker count must be between 1 and 10'
}), 400
download_manager.set_max_workers(max_workers)
return jsonify({
'status': 'success',
'message': f'Worker count set to {max_workers}',
'data': {
'max_workers': max_workers
}
})
except Exception as e:
raise RetryableError(f"Failed to set worker count: {e}")
@performance_bp.route('/api/performance/downloads/stats')
@handle_api_errors
@optional_auth
def get_download_stats():
"""Get download manager statistics."""
try:
stats = download_manager.get_statistics()
return jsonify({
'status': 'success',
'data': stats
})
except Exception as e:
raise RetryableError(f"Failed to get download stats: {e}")
@performance_bp.route('/api/performance/downloads/tasks')
@handle_api_errors
@optional_auth
def get_all_download_tasks():
"""Get all download tasks."""
try:
tasks = download_manager.get_all_tasks()
return jsonify({
'status': 'success',
'data': tasks
})
except Exception as e:
raise RetryableError(f"Failed to get download tasks: {e}")
@performance_bp.route('/api/performance/downloads/tasks/<task_id>')
@handle_api_errors
@optional_auth
def get_download_task(task_id):
"""Get specific download task status."""
try:
task_status = download_manager.get_task_status(task_id)
if not task_status:
return jsonify({
'status': 'error',
'message': 'Task not found'
}), 404
return jsonify({
'status': 'success',
'data': task_status
})
except Exception as e:
raise RetryableError(f"Failed to get task status: {e}")
@performance_bp.route('/api/performance/downloads/add-task', methods=['POST'])
@handle_api_errors
@require_auth
def add_download_task():
"""Add a new download task to the queue."""
try:
data = request.get_json()
required_fields = ['serie_name', 'season', 'episode', 'key', 'output_path', 'temp_path']
for field in required_fields:
if field not in data:
return jsonify({
'status': 'error',
'message': f'Missing required field: {field}'
}), 400
# Create download task
task = DownloadTask(
task_id=str(uuid.uuid4()),
serie_name=data['serie_name'],
season=int(data['season']),
episode=int(data['episode']),
key=data['key'],
language=data.get('language', 'German Dub'),
output_path=data['output_path'],
temp_path=data['temp_path'],
priority=data.get('priority', 0)
)
task_id = download_manager.add_task(task)
return jsonify({
'status': 'success',
'message': 'Download task added successfully',
'data': {
'task_id': task_id
}
})
except Exception as e:
raise RetryableError(f"Failed to add download task: {e}")
@performance_bp.route('/api/performance/resume/tasks')
@handle_api_errors
@optional_auth
def get_resumable_tasks():
"""Get list of tasks that can be resumed."""
try:
resumable_tasks = resume_manager.get_resumable_tasks()
# Get detailed info for each resumable task
tasks_info = []
for task_id in resumable_tasks:
resume_info = resume_manager.load_resume_info(task_id)
if resume_info:
tasks_info.append({
'task_id': task_id,
'resume_info': resume_info
})
return jsonify({
'status': 'success',
'data': {
'resumable_tasks': tasks_info,
'count': len(tasks_info)
}
})
except Exception as e:
raise RetryableError(f"Failed to get resumable tasks: {e}")
@performance_bp.route('/api/performance/resume/clear/<task_id>', methods=['POST'])
@handle_api_errors
@require_auth
def clear_resume_info(task_id):
"""Clear resume information for a specific task."""
try:
resume_manager.clear_resume_info(task_id)
return jsonify({
'status': 'success',
'message': f'Resume information cleared for task: {task_id}'
})
except Exception as e:
raise RetryableError(f"Failed to clear resume info: {e}")
@performance_bp.route('/api/performance/system/optimize', methods=['POST'])
@handle_api_errors
@require_auth
def optimize_system():
"""Perform system optimization tasks."""
try:
optimization_results = {}
# Force garbage collection
memory_monitor.force_garbage_collection()
memory_stats = memory_monitor.get_memory_stats()
optimization_results['memory_gc'] = {
'completed': True,
'memory_mb': memory_stats.get('rss_mb', 0)
}
# Clean up cache expired entries
download_cache._cleanup_expired()
cache_stats = download_cache.get_stats()
optimization_results['cache_cleanup'] = {
'completed': True,
'entries': cache_stats.get('entry_count', 0),
'size_mb': cache_stats.get('total_size_mb', 0)
}
# Clean up old resume files (older than 7 days)
import os
import time
resume_dir = resume_manager.resume_dir
cleaned_files = 0
try:
for filename in os.listdir(resume_dir):
file_path = os.path.join(resume_dir, filename)
if os.path.isfile(file_path):
file_age = time.time() - os.path.getmtime(file_path)
if file_age > 7 * 24 * 3600: # 7 days in seconds
os.remove(file_path)
cleaned_files += 1
except Exception as e:
pass # Ignore errors in cleanup
optimization_results['resume_cleanup'] = {
'completed': True,
'files_removed': cleaned_files
}
return jsonify({
'status': 'success',
'message': 'System optimization completed',
'data': optimization_results
})
except Exception as e:
raise RetryableError(f"System optimization failed: {e}")
@performance_bp.route('/api/performance/config')
@handle_api_errors
@optional_auth
def get_performance_config():
"""Get current performance configuration."""
try:
config = {
'speed_limit': {
'current_mbps': speed_limiter.max_speed_mbps,
'unlimited': speed_limiter.max_speed_mbps == 0
},
'downloads': {
'max_workers': download_manager.max_workers,
'active_tasks': len(download_manager.active_tasks)
},
'cache': {
'max_size_mb': download_cache.max_size_bytes / (1024 * 1024),
**download_cache.get_stats()
},
'memory': {
'warning_threshold_mb': memory_monitor.warning_threshold / (1024 * 1024),
'critical_threshold_mb': memory_monitor.critical_threshold / (1024 * 1024),
**memory_monitor.get_memory_stats()
}
}
return jsonify({
'status': 'success',
'data': config
})
except Exception as e:
raise RetryableError(f"Failed to get performance config: {e}")
# Export the blueprint
__all__ = ['performance_bp']

View File

@@ -1,280 +0,0 @@
from flask import Blueprint, jsonify, request
from web.controllers.auth_controller import require_auth
from shared.utils.process_utils import (
process_lock_manager,
RESCAN_LOCK,
DOWNLOAD_LOCK,
SEARCH_LOCK,
check_process_locks,
get_process_status,
update_process_progress,
is_process_running,
episode_deduplicator,
ProcessLockError
)
import logging
logger = logging.getLogger(__name__)
process_bp = Blueprint('process', __name__, url_prefix='/api/process')
@process_bp.route('/locks/status', methods=['GET'])
@require_auth
def get_all_locks_status():
"""Get status of all process locks."""
try:
# Clean up expired locks first
cleaned = check_process_locks()
if cleaned > 0:
logger.info(f"Cleaned up {cleaned} expired locks")
status = process_lock_manager.get_all_locks_status()
# Add queue deduplication info
status['queue_info'] = {
'active_episodes': episode_deduplicator.get_count(),
'episodes': episode_deduplicator.get_active_episodes()
}
return jsonify({
'success': True,
'locks': status
})
except Exception as e:
logger.error(f"Error getting locks status: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/status', methods=['GET'])
@require_auth
def get_lock_status(lock_name):
"""Get status of a specific process lock."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
status = get_process_status(lock_name)
return jsonify({
'success': True,
'status': status
})
except Exception as e:
logger.error(f"Error getting lock status for {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/acquire', methods=['POST'])
@require_auth
def acquire_lock(lock_name):
"""Manually acquire a process lock."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
data = request.get_json() or {}
locked_by = data.get('locked_by', 'manual')
timeout_minutes = data.get('timeout_minutes', 60)
success = process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes)
if success:
return jsonify({
'success': True,
'message': f'Lock {lock_name} acquired successfully'
})
else:
return jsonify({
'success': False,
'error': f'Lock {lock_name} is already held'
}), 409
except Exception as e:
logger.error(f"Error acquiring lock {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/release', methods=['POST'])
@require_auth
def release_lock(lock_name):
"""Manually release a process lock."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
success = process_lock_manager.release_lock(lock_name)
if success:
return jsonify({
'success': True,
'message': f'Lock {lock_name} released successfully'
})
else:
return jsonify({
'success': False,
'error': f'Lock {lock_name} was not held'
}), 404
except Exception as e:
logger.error(f"Error releasing lock {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/cleanup', methods=['POST'])
@require_auth
def cleanup_expired_locks():
"""Manually clean up expired locks."""
try:
cleaned = check_process_locks()
return jsonify({
'success': True,
'cleaned_count': cleaned,
'message': f'Cleaned up {cleaned} expired locks'
})
except Exception as e:
logger.error(f"Error cleaning up locks: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/force-release-all', methods=['POST'])
@require_auth
def force_release_all_locks():
"""Force release all process locks (emergency use)."""
try:
data = request.get_json() or {}
confirm = data.get('confirm', False)
if not confirm:
return jsonify({
'success': False,
'error': 'Confirmation required for force release'
}), 400
released = process_lock_manager.force_release_all()
# Also clear queue deduplication
episode_deduplicator.clear_all()
return jsonify({
'success': True,
'released_count': released,
'message': f'Force released {released} locks and cleared queue deduplication'
})
except Exception as e:
logger.error(f"Error force releasing locks: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/locks/<lock_name>/progress', methods=['POST'])
@require_auth
def update_lock_progress(lock_name):
"""Update progress for a running process."""
try:
if lock_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid lock name'
}), 400
if not is_process_running(lock_name):
return jsonify({
'success': False,
'error': f'Process {lock_name} is not running'
}), 404
data = request.get_json() or {}
progress_data = data.get('progress', {})
update_process_progress(lock_name, progress_data)
return jsonify({
'success': True,
'message': 'Progress updated successfully'
})
except Exception as e:
logger.error(f"Error updating progress for {lock_name}: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/queue/deduplication', methods=['GET'])
@require_auth
def get_queue_deduplication():
"""Get current queue deduplication status."""
try:
return jsonify({
'success': True,
'deduplication': {
'active_count': episode_deduplicator.get_count(),
'active_episodes': episode_deduplicator.get_active_episodes()
}
})
except Exception as e:
logger.error(f"Error getting queue deduplication: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/queue/deduplication/clear', methods=['POST'])
@require_auth
def clear_queue_deduplication():
"""Clear all queue deduplication entries."""
try:
episode_deduplicator.clear_all()
return jsonify({
'success': True,
'message': 'Queue deduplication cleared successfully'
})
except Exception as e:
logger.error(f"Error clearing queue deduplication: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@process_bp.route('/is-running/<process_name>', methods=['GET'])
@require_auth
def check_if_process_running(process_name):
"""Quick check if a specific process is running."""
try:
if process_name not in [RESCAN_LOCK, DOWNLOAD_LOCK, SEARCH_LOCK]:
return jsonify({
'success': False,
'error': 'Invalid process name'
}), 400
is_running = is_process_running(process_name)
return jsonify({
'success': True,
'is_running': is_running,
'process_name': process_name
})
except Exception as e:
logger.error(f"Error checking if process {process_name} is running: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,187 +0,0 @@
from flask import Blueprint, jsonify, request
from web.controllers.auth_controller import require_auth
from application.services.scheduler_service import get_scheduler
import logging
logger = logging.getLogger(__name__)
scheduler_bp = Blueprint('scheduler', __name__, url_prefix='/api/scheduler')
@scheduler_bp.route('/config', methods=['GET'])
@require_auth
def get_scheduler_config():
"""Get current scheduler configuration."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
config = scheduler.get_scheduled_rescan_config()
return jsonify({
'success': True,
'config': config
})
except Exception as e:
logger.error(f"Error getting scheduler config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/config', methods=['POST'])
@require_auth
def update_scheduler_config():
"""Update scheduler configuration."""
try:
data = request.get_json() or {}
enabled = data.get('enabled', False)
time_str = data.get('time', '03:00')
auto_download = data.get('auto_download_after_rescan', False)
# Validate inputs
if enabled and not time_str:
return jsonify({
'success': False,
'error': 'Time is required when scheduling is enabled'
}), 400
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
# Update configuration
scheduler.update_scheduled_rescan_config(enabled, time_str, auto_download)
# Get updated config
updated_config = scheduler.get_scheduled_rescan_config()
return jsonify({
'success': True,
'message': 'Scheduler configuration updated successfully',
'config': updated_config
})
except ValueError as e:
return jsonify({
'success': False,
'error': str(e)
}), 400
except Exception as e:
logger.error(f"Error updating scheduler config: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/status', methods=['GET'])
@require_auth
def get_scheduler_status():
"""Get current scheduler status and next jobs."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
config = scheduler.get_scheduled_rescan_config()
jobs = scheduler.get_next_scheduled_jobs()
return jsonify({
'success': True,
'status': {
'running': config['is_running'],
'config': config,
'scheduled_jobs': jobs
}
})
except Exception as e:
logger.error(f"Error getting scheduler status: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/start', methods=['POST'])
@require_auth
def start_scheduler():
"""Start the scheduler."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
scheduler.start_scheduler()
return jsonify({
'success': True,
'message': 'Scheduler started successfully'
})
except Exception as e:
logger.error(f"Error starting scheduler: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/stop', methods=['POST'])
@require_auth
def stop_scheduler():
"""Stop the scheduler."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
scheduler.stop_scheduler()
return jsonify({
'success': True,
'message': 'Scheduler stopped successfully'
})
except Exception as e:
logger.error(f"Error stopping scheduler: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500
@scheduler_bp.route('/trigger-rescan', methods=['POST'])
@require_auth
def trigger_manual_rescan():
"""Manually trigger a scheduled rescan for testing."""
try:
scheduler = get_scheduler()
if not scheduler:
return jsonify({
'success': False,
'error': 'Scheduler not initialized'
}), 500
scheduler.trigger_manual_scheduled_rescan()
return jsonify({
'success': True,
'message': 'Manual scheduled rescan triggered'
})
except Exception as e:
logger.error(f"Error triggering manual rescan: {e}")
return jsonify({
'success': False,
'error': str(e)
}), 500

View File

@@ -1,637 +0,0 @@
"""
Search API Endpoints
This module provides REST API endpoints for advanced search functionality
across anime, episodes, and other content.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import re
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, ValidationError
from ...shared.validators import validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, format_anime_response,
format_episode_response, extract_pagination_params
)
# Import search components (these imports would need to be adjusted based on actual structure)
try:
from search_manager import search_engine, SearchResult
from database_manager import anime_repository, episode_repository
except ImportError:
# Fallback for development/testing
search_engine = None
SearchResult = None
anime_repository = None
episode_repository = None
# Blueprint for search endpoints
search_bp = Blueprint('search', __name__, url_prefix='/api/v1/search')
@search_bp.route('', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def global_search() -> Dict[str, Any]:
"""
Perform a global search across all content types.
Query Parameters:
- q: Search query (required)
- types: Comma-separated list of content types (anime,episodes,all)
- categories: Comma-separated list of categories to search
- min_score: Minimum relevance score (0.0-1.0)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated search results grouped by content type
"""
if not search_engine:
raise APIException("Search engine not available", 503)
search_query = request.args.get('q', '').strip()
if not search_query:
raise ValidationError("Search query 'q' is required")
if len(search_query) < 2:
raise ValidationError("Search query must be at least 2 characters long")
# Parse search types
search_types = request.args.get('types', 'all').split(',')
valid_types = ['anime', 'episodes', 'all']
search_types = [t.strip() for t in search_types if t.strip() in valid_types]
if not search_types or 'all' in search_types:
search_types = ['anime', 'episodes']
# Parse categories
categories = request.args.get('categories', '').split(',')
categories = [c.strip() for c in categories if c.strip()]
# Parse minimum score
min_score = request.args.get('min_score', '0.0')
try:
min_score = float(min_score)
if not 0.0 <= min_score <= 1.0:
raise ValueError()
except ValueError:
raise ValidationError("min_score must be a number between 0.0 and 1.0")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform search
search_results = search_engine.search_all(
query=search_query,
content_types=search_types,
categories=categories,
min_score=min_score
)
# Group results by type
grouped_results = {
'anime': [],
'episodes': [],
'total_results': 0
}
for result in search_results:
if result.content_type == 'anime':
grouped_results['anime'].append({
'id': result.content_id,
'type': 'anime',
'title': result.title,
'description': result.description,
'score': result.relevance_score,
'data': format_anime_response(result.content_data)
})
elif result.content_type == 'episode':
grouped_results['episodes'].append({
'id': result.content_id,
'type': 'episode',
'title': result.title,
'description': result.description,
'score': result.relevance_score,
'data': format_episode_response(result.content_data)
})
grouped_results['total_results'] += 1
# Apply pagination to combined results
all_results = []
for result_type in ['anime', 'episodes']:
all_results.extend(grouped_results[result_type])
# Sort by relevance score
all_results.sort(key=lambda x: x['score'], reverse=True)
total = len(all_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = all_results[start_idx:end_idx]
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='search.global_search',
q=search_query
)
# Add search metadata
response['search'] = {
'query': search_query,
'types': search_types,
'categories': categories,
'min_score': min_score,
'results_by_type': {
'anime': len(grouped_results['anime']),
'episodes': len(grouped_results['episodes'])
}
}
return response
@search_bp.route('/anime', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def search_anime() -> Dict[str, Any]:
"""
Search anime with advanced filters.
Query Parameters:
- q: Search query (required)
- genres: Comma-separated list of genres
- status: Anime status filter
- year_from: Starting year filter
- year_to: Ending year filter
- min_episodes: Minimum episode count
- max_episodes: Maximum episode count
- sort_by: Sort field (name, year, episodes, relevance)
- sort_order: Sort order (asc, desc)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated anime search results
"""
if not anime_repository:
raise APIException("Anime repository not available", 503)
search_query = request.args.get('q', '').strip()
if not search_query:
raise ValidationError("Search query 'q' is required")
# Parse filters
genres = request.args.get('genres', '').split(',')
genres = [g.strip() for g in genres if g.strip()]
status_filter = request.args.get('status')
# Parse year filters
year_from = request.args.get('year_from')
year_to = request.args.get('year_to')
if year_from:
try:
year_from = int(year_from)
if year_from < 1900 or year_from > 2100:
raise ValueError()
except ValueError:
raise ValidationError("year_from must be a valid year between 1900 and 2100")
if year_to:
try:
year_to = int(year_to)
if year_to < 1900 or year_to > 2100:
raise ValueError()
except ValueError:
raise ValidationError("year_to must be a valid year between 1900 and 2100")
# Parse episode count filters
min_episodes = request.args.get('min_episodes')
max_episodes = request.args.get('max_episodes')
if min_episodes:
try:
min_episodes = int(min_episodes)
if min_episodes < 0:
raise ValueError()
except ValueError:
raise ValidationError("min_episodes must be a non-negative integer")
if max_episodes:
try:
max_episodes = int(max_episodes)
if max_episodes < 0:
raise ValueError()
except ValueError:
raise ValidationError("max_episodes must be a non-negative integer")
# Parse sorting
sort_by = request.args.get('sort_by', 'relevance')
sort_order = request.args.get('sort_order', 'desc')
valid_sort_fields = ['name', 'year', 'episodes', 'relevance', 'created_at']
if sort_by not in valid_sort_fields:
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
if sort_order not in ['asc', 'desc']:
raise ValidationError("sort_order must be 'asc' or 'desc'")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform advanced search
search_results = anime_repository.advanced_search(
query=search_query,
genres=genres,
status=status_filter,
year_from=year_from,
year_to=year_to,
min_episodes=min_episodes,
max_episodes=max_episodes,
sort_by=sort_by,
sort_order=sort_order
)
# Format results
formatted_results = []
for anime in search_results:
anime_data = format_anime_response(anime.__dict__)
# Add search relevance score if available
if hasattr(anime, 'relevance_score'):
anime_data['relevance_score'] = anime.relevance_score
formatted_results.append(anime_data)
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='search.search_anime',
q=search_query
)
# Add search metadata
response['search'] = {
'query': search_query,
'filters': {
'genres': genres,
'status': status_filter,
'year_from': year_from,
'year_to': year_to,
'min_episodes': min_episodes,
'max_episodes': max_episodes
},
'sorting': {
'sort_by': sort_by,
'sort_order': sort_order
}
}
return response
@search_bp.route('/episodes', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def search_episodes() -> Dict[str, Any]:
"""
Search episodes with advanced filters.
Query Parameters:
- q: Search query (required)
- anime_id: Filter by anime ID
- status: Episode status filter
- downloaded: Filter by download status (true/false)
- episode_range: Episode range filter (e.g., "1-10", "5+")
- duration_min: Minimum duration in minutes
- duration_max: Maximum duration in minutes
- sort_by: Sort field (episode_number, title, duration, relevance)
- sort_order: Sort order (asc, desc)
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated episode search results
"""
if not episode_repository:
raise APIException("Episode repository not available", 503)
search_query = request.args.get('q', '').strip()
if not search_query:
raise ValidationError("Search query 'q' is required")
# Parse filters
anime_id = request.args.get('anime_id')
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
status_filter = request.args.get('status')
downloaded_filter = request.args.get('downloaded')
if downloaded_filter and downloaded_filter.lower() not in ['true', 'false']:
raise ValidationError("downloaded filter must be 'true' or 'false'")
# Parse episode range
episode_range = request.args.get('episode_range')
episode_min = None
episode_max = None
if episode_range:
range_pattern = r'^(\d+)(?:-(\d+)|\+)?$'
match = re.match(range_pattern, episode_range)
if not match:
raise ValidationError("episode_range must be in format 'N', 'N-M', or 'N+'")
episode_min = int(match.group(1))
if match.group(2):
episode_max = int(match.group(2))
elif episode_range.endswith('+'):
episode_max = None # No upper limit
else:
episode_max = episode_min # Single episode
# Parse duration filters
duration_min = request.args.get('duration_min')
duration_max = request.args.get('duration_max')
if duration_min:
try:
duration_min = int(duration_min)
if duration_min < 0:
raise ValueError()
except ValueError:
raise ValidationError("duration_min must be a non-negative integer")
if duration_max:
try:
duration_max = int(duration_max)
if duration_max < 0:
raise ValueError()
except ValueError:
raise ValidationError("duration_max must be a non-negative integer")
# Parse sorting
sort_by = request.args.get('sort_by', 'relevance')
sort_order = request.args.get('sort_order', 'desc')
valid_sort_fields = ['episode_number', 'title', 'duration', 'relevance', 'created_at']
if sort_by not in valid_sort_fields:
raise ValidationError(f"sort_by must be one of: {', '.join(valid_sort_fields)}")
if sort_order not in ['asc', 'desc']:
raise ValidationError("sort_order must be 'asc' or 'desc'")
# Get pagination parameters
page, per_page = extract_pagination_params()
# Perform advanced search
search_results = episode_repository.advanced_search(
query=search_query,
anime_id=anime_id,
status=status_filter,
downloaded=downloaded_filter.lower() == 'true' if downloaded_filter else None,
episode_min=episode_min,
episode_max=episode_max,
duration_min=duration_min,
duration_max=duration_max,
sort_by=sort_by,
sort_order=sort_order
)
# Format results
formatted_results = []
for episode in search_results:
episode_data = format_episode_response(episode.__dict__)
# Add search relevance score if available
if hasattr(episode, 'relevance_score'):
episode_data['relevance_score'] = episode.relevance_score
formatted_results.append(episode_data)
# Apply pagination
total = len(formatted_results)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_results = formatted_results[start_idx:end_idx]
response = create_paginated_response(
data=paginated_results,
page=page,
per_page=per_page,
total=total,
endpoint='search.search_episodes',
q=search_query
)
# Add search metadata
response['search'] = {
'query': search_query,
'filters': {
'anime_id': anime_id,
'status': status_filter,
'downloaded': downloaded_filter,
'episode_range': episode_range,
'duration_min': duration_min,
'duration_max': duration_max
},
'sorting': {
'sort_by': sort_by,
'sort_order': sort_order
}
}
return response
@search_bp.route('/suggestions', methods=['GET'])
@handle_api_errors
@optional_auth
def get_search_suggestions() -> Dict[str, Any]:
"""
Get search suggestions based on partial query.
Query Parameters:
- q: Partial search query (required)
- type: Content type (anime, episodes, all)
- limit: Maximum suggestions to return (default: 10, max: 50)
Returns:
List of search suggestions
"""
if not search_engine:
raise APIException("Search engine not available", 503)
query = request.args.get('q', '').strip()
if not query:
raise ValidationError("Query 'q' is required")
if len(query) < 1:
return create_success_response(data=[])
content_type = request.args.get('type', 'all')
if content_type not in ['anime', 'episodes', 'all']:
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
limit = request.args.get('limit', '10')
try:
limit = int(limit)
if limit < 1 or limit > 50:
raise ValueError()
except ValueError:
raise ValidationError("limit must be an integer between 1 and 50")
# Get suggestions
suggestions = search_engine.get_suggestions(
query=query,
content_type=content_type,
limit=limit
)
return create_success_response(
data={
'suggestions': suggestions,
'query': query,
'count': len(suggestions)
}
)
@search_bp.route('/autocomplete', methods=['GET'])
@handle_api_errors
@optional_auth
def autocomplete() -> Dict[str, Any]:
"""
Get autocomplete suggestions for search fields.
Query Parameters:
- field: Field to autocomplete (name, genre, status)
- q: Partial value
- limit: Maximum suggestions (default: 10, max: 20)
Returns:
List of autocomplete suggestions
"""
field = request.args.get('field', '').strip()
query = request.args.get('q', '').strip()
if not field:
raise ValidationError("Field parameter is required")
if field not in ['name', 'genre', 'status', 'year']:
raise ValidationError("field must be one of: name, genre, status, year")
limit = request.args.get('limit', '10')
try:
limit = int(limit)
if limit < 1 or limit > 20:
raise ValueError()
except ValueError:
raise ValidationError("limit must be an integer between 1 and 20")
# Get autocomplete suggestions based on field
suggestions = []
if field == 'name':
# Get anime/episode name suggestions
if anime_repository:
anime_names = anime_repository.get_name_suggestions(query, limit)
suggestions.extend(anime_names)
elif field == 'genre':
# Get genre suggestions
if anime_repository:
genres = anime_repository.get_genre_suggestions(query, limit)
suggestions.extend(genres)
elif field == 'status':
# Get status suggestions
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
suggestions = [s for s in valid_statuses if query.lower() in s.lower()][:limit]
elif field == 'year':
# Get year suggestions
if anime_repository:
years = anime_repository.get_year_suggestions(query, limit)
suggestions.extend(years)
return create_success_response(
data={
'suggestions': suggestions,
'field': field,
'query': query,
'count': len(suggestions)
}
)
@search_bp.route('/trending', methods=['GET'])
@handle_api_errors
@optional_auth
def get_trending_searches() -> Dict[str, Any]:
"""
Get trending search queries.
Query Parameters:
- period: Time period (day, week, month)
- type: Content type (anime, episodes, all)
- limit: Maximum results (default: 10, max: 50)
Returns:
List of trending search queries
"""
if not search_engine:
raise APIException("Search engine not available", 503)
period = request.args.get('period', 'week')
content_type = request.args.get('type', 'all')
if period not in ['day', 'week', 'month']:
raise ValidationError("period must be 'day', 'week', or 'month'")
if content_type not in ['anime', 'episodes', 'all']:
raise ValidationError("type must be 'anime', 'episodes', or 'all'")
limit = request.args.get('limit', '10')
try:
limit = int(limit)
if limit < 1 or limit > 50:
raise ValueError()
except ValueError:
raise ValidationError("limit must be an integer between 1 and 50")
# Get trending searches
trending = search_engine.get_trending_searches(
period=period,
content_type=content_type,
limit=limit
)
return create_success_response(
data={
'trending': trending,
'period': period,
'type': content_type,
'count': len(trending)
}
)

View File

@@ -1,332 +0,0 @@
"""
Simple Master Password Authentication Controller for AniWorld.
This module implements a simple authentication system using:
- Single master password (no user registration)
- JWT tokens for session management
- Environment-based configuration
- No email system required
"""
import os
import hashlib
import jwt
from datetime import datetime, timedelta
from flask import Blueprint, request, jsonify
from functools import wraps
import logging
from typing import Dict, Any, Optional, Tuple
# Configure logging
logger = logging.getLogger(__name__)
# Create blueprint
simple_auth_bp = Blueprint('simple_auth', __name__)
# Configuration from environment
JWT_SECRET_KEY = os.getenv('JWT_SECRET_KEY', 'default_jwt_secret')
PASSWORD_SALT = os.getenv('PASSWORD_SALT', 'default_salt')
MASTER_PASSWORD_HASH = os.getenv('MASTER_PASSWORD_HASH')
TOKEN_EXPIRY_HOURS = int(os.getenv('SESSION_TIMEOUT_HOURS', '24'))
def hash_password(password: str) -> str:
"""Hash password with salt using SHA-256."""
salted_password = password + PASSWORD_SALT
return hashlib.sha256(salted_password.encode()).hexdigest()
def verify_master_password(password: str) -> bool:
"""Verify password against master password hash."""
if not MASTER_PASSWORD_HASH:
# If no hash is set, check against environment variable (development only)
dev_password = os.getenv('MASTER_PASSWORD')
if dev_password:
return password == dev_password
return False
password_hash = hash_password(password)
return password_hash == MASTER_PASSWORD_HASH
def generate_jwt_token() -> str:
"""Generate JWT token for authentication."""
payload = {
'user': 'master',
'exp': datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS),
'iat': datetime.utcnow(),
'iss': 'aniworld-server'
}
return jwt.encode(payload, JWT_SECRET_KEY, algorithm='HS256')
def verify_jwt_token(token: str) -> Optional[Dict[str, Any]]:
"""Verify and decode JWT token."""
try:
payload = jwt.decode(token, JWT_SECRET_KEY, algorithms=['HS256'])
return payload
except jwt.ExpiredSignatureError:
logger.warning("Token has expired")
return None
except jwt.InvalidTokenError as e:
logger.warning(f"Invalid token: {str(e)}")
return None
def require_auth(f):
"""Decorator to require authentication for API endpoints."""
@wraps(f)
def decorated_function(*args, **kwargs):
auth_header = request.headers.get('Authorization')
if not auth_header:
return jsonify({
'success': False,
'error': 'Authorization header required',
'code': 'AUTH_REQUIRED'
}), 401
try:
# Expected format: "Bearer <token>"
token = auth_header.split(' ')[1]
except IndexError:
return jsonify({
'success': False,
'error': 'Invalid authorization header format',
'code': 'INVALID_AUTH_FORMAT'
}), 401
payload = verify_jwt_token(token)
if not payload:
return jsonify({
'success': False,
'error': 'Invalid or expired token',
'code': 'INVALID_TOKEN'
}), 401
# Add user info to request context
request.current_user = payload
return f(*args, **kwargs)
return decorated_function
# Auth endpoints
@simple_auth_bp.route('/auth/login', methods=['POST'])
def login() -> Tuple[Any, int]:
"""
Authenticate with master password and receive JWT token.
Request Body:
{
"password": "master_password"
}
Response:
{
"success": true,
"message": "Login successful",
"data": {
"token": "jwt_token_here",
"expires_at": "2025-01-01T00:00:00Z",
"user": "master"
}
}
"""
try:
data = request.get_json()
if not data:
return jsonify({
'success': False,
'error': 'JSON body required',
'code': 'MISSING_JSON'
}), 400
password = data.get('password')
if not password:
return jsonify({
'success': False,
'error': 'Password required',
'code': 'MISSING_PASSWORD'
}), 400
# Verify master password
if not verify_master_password(password):
logger.warning(f"Failed login attempt from IP: {request.remote_addr}")
return jsonify({
'success': False,
'error': 'Invalid master password',
'code': 'INVALID_CREDENTIALS'
}), 401
# Generate JWT token
token = generate_jwt_token()
expires_at = datetime.utcnow() + timedelta(hours=TOKEN_EXPIRY_HOURS)
logger.info(f"Successful login from IP: {request.remote_addr}")
return jsonify({
'success': True,
'message': 'Login successful',
'data': {
'token': token,
'expires_at': expires_at.isoformat() + 'Z',
'user': 'master',
'token_type': 'Bearer'
}
}), 200
except Exception as e:
logger.error(f"Login error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
@simple_auth_bp.route('/auth/verify', methods=['GET'])
@require_auth
def verify_token() -> Tuple[Any, int]:
"""
Verify if the current JWT token is valid.
Headers:
Authorization: Bearer <token>
Response:
{
"success": true,
"message": "Token is valid",
"data": {
"user": "master",
"expires_at": "2025-01-01T00:00:00Z",
"issued_at": "2025-01-01T00:00:00Z"
}
}
"""
try:
payload = request.current_user
return jsonify({
'success': True,
'message': 'Token is valid',
'data': {
'user': payload.get('user'),
'expires_at': datetime.utcfromtimestamp(payload.get('exp')).isoformat() + 'Z',
'issued_at': datetime.utcfromtimestamp(payload.get('iat')).isoformat() + 'Z',
'issuer': payload.get('iss')
}
}), 200
except Exception as e:
logger.error(f"Token verification error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
@simple_auth_bp.route('/auth/logout', methods=['POST'])
@require_auth
def logout() -> Tuple[Any, int]:
"""
Logout (client-side token clearing).
Since JWT tokens are stateless, logout is handled client-side
by removing the token. This endpoint confirms logout action.
Headers:
Authorization: Bearer <token>
Response:
{
"success": true,
"message": "Logout successful"
}
"""
try:
logger.info(f"User logged out from IP: {request.remote_addr}")
return jsonify({
'success': True,
'message': 'Logout successful. Please remove the token on client side.',
'data': {
'action': 'clear_token'
}
}), 200
except Exception as e:
logger.error(f"Logout error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
@simple_auth_bp.route('/auth/status', methods=['GET'])
def auth_status() -> Tuple[Any, int]:
"""
Check authentication system status.
Response:
{
"success": true,
"message": "Authentication system status",
"data": {
"auth_type": "master_password",
"jwt_enabled": true,
"password_configured": true
}
}
"""
try:
password_configured = bool(MASTER_PASSWORD_HASH or os.getenv('MASTER_PASSWORD'))
return jsonify({
'success': True,
'message': 'Authentication system status',
'data': {
'auth_type': 'master_password',
'jwt_enabled': True,
'password_configured': password_configured,
'token_expiry_hours': TOKEN_EXPIRY_HOURS
}
}), 200
except Exception as e:
logger.error(f"Auth status error: {str(e)}")
return jsonify({
'success': False,
'error': 'Internal server error',
'code': 'SERVER_ERROR'
}), 500
# Utility function to set master password hash
def set_master_password(password: str) -> str:
"""
Generate hash for master password.
This should be used to set MASTER_PASSWORD_HASH in environment.
Args:
password: The master password to hash
Returns:
The hashed password that should be stored in environment
"""
return hash_password(password)
# Health check endpoint
@simple_auth_bp.route('/auth/health', methods=['GET'])
def health_check() -> Tuple[Any, int]:
"""Health check for auth system."""
return jsonify({
'success': True,
'message': 'Auth system is healthy',
'timestamp': datetime.utcnow().isoformat() + 'Z'
}), 200

View File

@@ -1,661 +0,0 @@
"""
Storage Management API Endpoints
This module provides REST API endpoints for storage management operations,
including storage monitoring, location management, and disk usage tracking.
"""
from flask import Blueprint, request
from typing import Dict, List, Any, Optional
import os
import shutil
from datetime import datetime
from ...shared.auth_decorators import require_auth, optional_auth
from ...shared.error_handlers import handle_api_errors, APIException, NotFoundError, ValidationError
from ...shared.validators import validate_json_input, validate_id_parameter, validate_pagination_params
from ...shared.response_helpers import (
create_success_response, create_paginated_response, extract_pagination_params
)
# Import storage components (these imports would need to be adjusted based on actual structure)
try:
from database_manager import storage_manager, database_manager, StorageLocation
except ImportError:
# Fallback for development/testing
storage_manager = None
database_manager = None
StorageLocation = None
# Blueprint for storage management endpoints
storage_bp = Blueprint('storage', __name__, url_prefix='/api/v1/storage')
@storage_bp.route('/summary', methods=['GET'])
@handle_api_errors
@optional_auth
def get_storage_summary() -> Dict[str, Any]:
"""
Get overall storage usage summary.
Returns:
Storage summary with usage statistics
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
try:
summary = storage_manager.get_storage_summary()
return create_success_response(
data={
'total_storage_gb': round(summary.get('total_bytes', 0) / (1024**3), 2),
'used_storage_gb': round(summary.get('used_bytes', 0) / (1024**3), 2),
'free_storage_gb': round(summary.get('free_bytes', 0) / (1024**3), 2),
'usage_percentage': summary.get('usage_percentage', 0),
'anime_storage_gb': round(summary.get('anime_bytes', 0) / (1024**3), 2),
'backup_storage_gb': round(summary.get('backup_bytes', 0) / (1024**3), 2),
'cache_storage_gb': round(summary.get('cache_bytes', 0) / (1024**3), 2),
'temp_storage_gb': round(summary.get('temp_bytes', 0) / (1024**3), 2),
'location_count': summary.get('location_count', 0),
'active_locations': summary.get('active_locations', 0),
'last_updated': summary.get('last_updated', datetime.utcnow()).isoformat()
}
)
except Exception as e:
raise APIException(f"Failed to get storage summary: {str(e)}", 500)
@storage_bp.route('/locations', methods=['GET'])
@handle_api_errors
@validate_pagination_params
@optional_auth
def get_storage_locations() -> Dict[str, Any]:
"""
Get all storage locations with optional filtering.
Query Parameters:
- location_type: Filter by location type (primary, backup, cache, temp)
- anime_id: Filter by anime ID
- status: Filter by status (active, inactive, error)
- min_free_gb: Minimum free space in GB
- max_usage_percent: Maximum usage percentage
- page: Page number (default: 1)
- per_page: Items per page (default: 50, max: 1000)
Returns:
Paginated list of storage locations
"""
if not storage_manager or not database_manager:
raise APIException("Storage manager not available", 503)
# Extract filters
location_type_filter = request.args.get('location_type')
anime_id = request.args.get('anime_id')
status_filter = request.args.get('status')
min_free_gb = request.args.get('min_free_gb')
max_usage_percent = request.args.get('max_usage_percent')
# Validate filters
valid_types = ['primary', 'backup', 'cache', 'temp']
if location_type_filter and location_type_filter not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
if anime_id:
try:
anime_id = int(anime_id)
except ValueError:
raise ValidationError("anime_id must be a valid integer")
valid_statuses = ['active', 'inactive', 'error']
if status_filter and status_filter not in valid_statuses:
raise ValidationError(f"status must be one of: {', '.join(valid_statuses)}")
if min_free_gb:
try:
min_free_gb = float(min_free_gb)
if min_free_gb < 0:
raise ValueError()
except ValueError:
raise ValidationError("min_free_gb must be a non-negative number")
if max_usage_percent:
try:
max_usage_percent = float(max_usage_percent)
if not 0 <= max_usage_percent <= 100:
raise ValueError()
except ValueError:
raise ValidationError("max_usage_percent must be between 0 and 100")
# Get pagination parameters
page, per_page = extract_pagination_params()
try:
# Query storage locations
query = """
SELECT sl.*, am.name as anime_name
FROM storage_locations sl
LEFT JOIN anime_metadata am ON sl.anime_id = am.anime_id
WHERE 1=1
"""
params = []
if location_type_filter:
query += " AND sl.location_type = ?"
params.append(location_type_filter)
if anime_id:
query += " AND sl.anime_id = ?"
params.append(anime_id)
if status_filter:
query += " AND sl.status = ?"
params.append(status_filter)
query += " ORDER BY sl.location_type, sl.path"
results = database_manager.execute_query(query, params)
# Format and filter results
locations = []
for row in results:
free_space_gb = (row['free_space_bytes'] / (1024**3)) if row['free_space_bytes'] else None
total_space_gb = (row['total_space_bytes'] / (1024**3)) if row['total_space_bytes'] else None
usage_percent = None
if row['total_space_bytes'] and row['free_space_bytes']:
usage_percent = ((row['total_space_bytes'] - row['free_space_bytes']) / row['total_space_bytes'] * 100)
# Apply additional filters
if min_free_gb and (free_space_gb is None or free_space_gb < min_free_gb):
continue
if max_usage_percent and (usage_percent is None or usage_percent > max_usage_percent):
continue
location_data = {
'location_id': row['location_id'],
'anime_id': row['anime_id'],
'anime_name': row['anime_name'],
'path': row['path'],
'location_type': row['location_type'],
'status': row['status'],
'free_space_gb': free_space_gb,
'total_space_gb': total_space_gb,
'used_space_gb': (total_space_gb - free_space_gb) if (total_space_gb and free_space_gb) else None,
'usage_percent': usage_percent,
'last_checked': row['last_checked'],
'created_at': row['created_at'],
'is_active': row['is_active'],
'mount_point': row.get('mount_point'),
'filesystem': row.get('filesystem')
}
locations.append(location_data)
# Apply pagination
total = len(locations)
start_idx = (page - 1) * per_page
end_idx = start_idx + per_page
paginated_locations = locations[start_idx:end_idx]
return create_paginated_response(
data=paginated_locations,
page=page,
per_page=per_page,
total=total,
endpoint='storage.get_storage_locations'
)
except Exception as e:
raise APIException(f"Failed to get storage locations: {str(e)}", 500)
@storage_bp.route('/locations', methods=['POST'])
@handle_api_errors
@validate_json_input(
required_fields=['path', 'location_type'],
optional_fields=['anime_id', 'description', 'mount_point', 'auto_create'],
field_types={
'path': str,
'location_type': str,
'anime_id': int,
'description': str,
'mount_point': str,
'auto_create': bool
}
)
@require_auth
def add_storage_location() -> Dict[str, Any]:
"""
Add a new storage location.
Required Fields:
- path: Storage path
- location_type: Type of storage (primary, backup, cache, temp)
Optional Fields:
- anime_id: Associated anime ID (for anime-specific storage)
- description: Location description
- mount_point: Mount point information
- auto_create: Automatically create directory if it doesn't exist
Returns:
Created storage location information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
data = request.get_json()
path = data['path']
location_type = data['location_type']
anime_id = data.get('anime_id')
description = data.get('description')
mount_point = data.get('mount_point')
auto_create = data.get('auto_create', False)
# Validate location type
valid_types = ['primary', 'backup', 'cache', 'temp']
if location_type not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
# Validate path
if not path or not isinstance(path, str):
raise ValidationError("path must be a valid string")
# Normalize path
path = os.path.abspath(path)
# Check if path already exists as a storage location
existing_location = storage_manager.get_location_by_path(path)
if existing_location:
raise ValidationError("Storage location with this path already exists")
# Check if directory exists or create it
if not os.path.exists(path):
if auto_create:
try:
os.makedirs(path, exist_ok=True)
except Exception as e:
raise ValidationError(f"Failed to create directory: {str(e)}")
else:
raise ValidationError("Directory does not exist. Set auto_create=true to create it.")
# Check if it's a directory
if not os.path.isdir(path):
raise ValidationError("Path must be a directory")
# Check if it's writable
if not os.access(path, os.W_OK):
raise ValidationError("Directory is not writable")
try:
location_id = storage_manager.add_storage_location(
path=path,
location_type=location_type,
anime_id=anime_id,
description=description,
mount_point=mount_point
)
# Get the created location details
location = storage_manager.get_location_by_id(location_id)
location_data = {
'location_id': location.location_id,
'path': location.path,
'location_type': location.location_type,
'anime_id': location.anime_id,
'description': location.description,
'mount_point': location.mount_point,
'status': location.status,
'created_at': location.created_at.isoformat(),
'is_active': location.is_active
}
return create_success_response(
data=location_data,
message="Storage location added successfully",
status_code=201
)
except Exception as e:
raise APIException(f"Failed to add storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>', methods=['GET'])
@handle_api_errors
@validate_id_parameter('location_id')
@optional_auth
def get_storage_location(location_id: int) -> Dict[str, Any]:
"""
Get detailed information about a specific storage location.
Args:
location_id: Unique identifier for the storage location
Returns:
Detailed storage location information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
try:
# Get detailed storage statistics
stats = storage_manager.get_location_stats(location_id)
location_data = {
'location_id': location.location_id,
'path': location.path,
'location_type': location.location_type,
'anime_id': location.anime_id,
'description': location.description,
'mount_point': location.mount_point,
'status': location.status,
'created_at': location.created_at.isoformat(),
'last_checked': location.last_checked.isoformat() if location.last_checked else None,
'is_active': location.is_active,
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
'usage_percent': stats.get('usage_percentage', 0),
'file_count': stats.get('file_count', 0),
'directory_count': stats.get('directory_count', 0),
'largest_file_mb': round(stats.get('largest_file_bytes', 0) / (1024**2), 2),
'filesystem': stats.get('filesystem'),
'mount_options': stats.get('mount_options'),
'health_status': stats.get('health_status', 'unknown')
}
return create_success_response(location_data)
except Exception as e:
raise APIException(f"Failed to get storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>', methods=['PUT'])
@handle_api_errors
@validate_id_parameter('location_id')
@validate_json_input(
optional_fields=['description', 'location_type', 'is_active', 'mount_point'],
field_types={
'description': str,
'location_type': str,
'is_active': bool,
'mount_point': str
}
)
@require_auth
def update_storage_location(location_id: int) -> Dict[str, Any]:
"""
Update a storage location.
Args:
location_id: Unique identifier for the storage location
Optional Fields:
- description: Updated description
- location_type: Updated location type
- is_active: Active status
- mount_point: Mount point information
Returns:
Updated storage location information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
data = request.get_json()
# Check if location exists
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
# Validate location type if provided
if 'location_type' in data:
valid_types = ['primary', 'backup', 'cache', 'temp']
if data['location_type'] not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
try:
# Update location
success = storage_manager.update_location(location_id, data)
if not success:
raise APIException("Failed to update storage location", 500)
# Get updated location
updated_location = storage_manager.get_location_by_id(location_id)
location_data = {
'location_id': updated_location.location_id,
'path': updated_location.path,
'location_type': updated_location.location_type,
'anime_id': updated_location.anime_id,
'description': updated_location.description,
'mount_point': updated_location.mount_point,
'status': updated_location.status,
'is_active': updated_location.is_active,
'updated_at': datetime.utcnow().isoformat()
}
return create_success_response(
data=location_data,
message="Storage location updated successfully"
)
except Exception as e:
raise APIException(f"Failed to update storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>', methods=['DELETE'])
@handle_api_errors
@validate_id_parameter('location_id')
@require_auth
def delete_storage_location(location_id: int) -> Dict[str, Any]:
"""
Delete a storage location.
Args:
location_id: Unique identifier for the storage location
Query Parameters:
- force: Force deletion even if location contains files
- delete_files: Also delete files in the location
Returns:
Deletion confirmation
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
# Check if location exists
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
force = request.args.get('force', 'false').lower() == 'true'
delete_files = request.args.get('delete_files', 'false').lower() == 'true'
try:
# Check if location has files (unless force is used)
if not force:
stats = storage_manager.get_location_stats(location_id)
if stats.get('file_count', 0) > 0:
raise ValidationError(
f"Storage location contains {stats['file_count']} files. "
"Use force=true to delete anyway."
)
# Delete location
success = storage_manager.delete_location(location_id, delete_files=delete_files)
if not success:
raise APIException("Failed to delete storage location", 500)
message = f"Storage location deleted successfully"
if delete_files:
message += " (including all files)"
return create_success_response(message=message)
except Exception as e:
raise APIException(f"Failed to delete storage location: {str(e)}", 500)
@storage_bp.route('/locations/<int:location_id>/refresh', methods=['POST'])
@handle_api_errors
@validate_id_parameter('location_id')
@require_auth
def refresh_storage_location(location_id: int) -> Dict[str, Any]:
"""
Refresh storage statistics for a location.
Args:
location_id: Unique identifier for the storage location
Returns:
Updated storage statistics
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
# Check if location exists
location = storage_manager.get_location_by_id(location_id)
if not location:
raise NotFoundError("Storage location not found")
try:
# Update storage statistics
stats = storage_manager.update_location_stats(location_id)
return create_success_response(
data={
'location_id': location_id,
'free_space_gb': round(stats.get('free_bytes', 0) / (1024**3), 2),
'total_space_gb': round(stats.get('total_bytes', 0) / (1024**3), 2),
'used_space_gb': round(stats.get('used_bytes', 0) / (1024**3), 2),
'usage_percent': stats.get('usage_percentage', 0),
'file_count': stats.get('file_count', 0),
'directory_count': stats.get('directory_count', 0),
'last_updated': datetime.utcnow().isoformat()
},
message="Storage statistics updated successfully"
)
except Exception as e:
raise APIException(f"Failed to refresh storage location: {str(e)}", 500)
@storage_bp.route('/cleanup', methods=['POST'])
@handle_api_errors
@validate_json_input(
optional_fields=['location_type', 'target_usage_percent', 'cleanup_temp', 'cleanup_cache', 'dry_run'],
field_types={
'location_type': str,
'target_usage_percent': float,
'cleanup_temp': bool,
'cleanup_cache': bool,
'dry_run': bool
}
)
@require_auth
def cleanup_storage() -> Dict[str, Any]:
"""
Perform storage cleanup operations.
Optional Fields:
- location_type: Type of locations to clean (temp, cache, backup)
- target_usage_percent: Target usage percentage after cleanup
- cleanup_temp: Clean temporary files
- cleanup_cache: Clean cache files
- dry_run: Preview what would be cleaned without actually doing it
Returns:
Cleanup results
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
data = request.get_json() or {}
location_type = data.get('location_type', 'temp')
target_usage_percent = data.get('target_usage_percent', 80.0)
cleanup_temp = data.get('cleanup_temp', True)
cleanup_cache = data.get('cleanup_cache', False)
dry_run = data.get('dry_run', False)
# Validate parameters
valid_types = ['temp', 'cache', 'backup']
if location_type not in valid_types:
raise ValidationError(f"location_type must be one of: {', '.join(valid_types)}")
if not 0 <= target_usage_percent <= 100:
raise ValidationError("target_usage_percent must be between 0 and 100")
try:
cleanup_result = storage_manager.cleanup_storage(
location_type=location_type,
target_usage_percent=target_usage_percent,
cleanup_temp=cleanup_temp,
cleanup_cache=cleanup_cache,
dry_run=dry_run
)
return create_success_response(
data={
'dry_run': dry_run,
'location_type': location_type,
'files_deleted': cleanup_result.get('files_deleted', 0),
'directories_deleted': cleanup_result.get('directories_deleted', 0),
'space_freed_gb': round(cleanup_result.get('space_freed_bytes', 0) / (1024**3), 2),
'cleanup_summary': cleanup_result.get('summary', {}),
'target_usage_percent': target_usage_percent,
'final_usage_percent': cleanup_result.get('final_usage_percent')
},
message=f"Storage cleanup {'simulated' if dry_run else 'completed'}"
)
except Exception as e:
raise APIException(f"Failed to cleanup storage: {str(e)}", 500)
@storage_bp.route('/health', methods=['GET'])
@handle_api_errors
@optional_auth
def get_storage_health() -> Dict[str, Any]:
"""
Get storage health status across all locations.
Returns:
Storage health information
"""
if not storage_manager:
raise APIException("Storage manager not available", 503)
try:
health_status = storage_manager.get_storage_health()
return create_success_response(
data={
'overall_status': health_status.get('overall_status', 'unknown'),
'total_locations': health_status.get('total_locations', 0),
'healthy_locations': health_status.get('healthy_locations', 0),
'warning_locations': health_status.get('warning_locations', 0),
'error_locations': health_status.get('error_locations', 0),
'average_usage_percent': health_status.get('average_usage_percent', 0),
'locations_near_full': health_status.get('locations_near_full', []),
'locations_with_errors': health_status.get('locations_with_errors', []),
'recommendations': health_status.get('recommendations', []),
'last_check': health_status.get('last_check', datetime.utcnow()).isoformat()
}
)
except Exception as e:
raise APIException(f"Failed to get storage health: {str(e)}", 500)

View File

@@ -1,352 +0,0 @@
"""
Base controller with common functionality for all controllers.
This module provides a base controller class that eliminates common duplications
across different controller modules by providing standardized error handling,
validation, and response formatting.
"""
from abc import ABC
from typing import Any, Dict, Optional, List, Union, Tuple, Callable
try:
from flask import jsonify, request
from werkzeug.exceptions import HTTPException
except ImportError:
# Fallback for environments without Flask
def jsonify(data):
import json
return json.dumps(data)
class HTTPException(Exception):
def __init__(self, status_code, detail):
self.status_code = status_code
self.detail = detail
super().__init__(detail)
class request:
is_json = False
@staticmethod
def get_json():
return {}
headers = {}
args = {}
form = {}
try:
from pydantic import BaseModel
except ImportError:
# Fallback BaseModel
class BaseModel:
pass
import logging
import functools
class BaseController(ABC):
"""Base controller with common functionality for all controllers."""
def __init__(self):
self.logger = logging.getLogger(self.__class__.__name__)
def handle_error(self, error: Exception, status_code: int = 500) -> HTTPException:
"""
Standardized error handling across all controllers.
Args:
error: The exception that occurred
status_code: HTTP status code to return
Returns:
HTTPException with standardized format
"""
self.logger.error(f"Controller error: {str(error)}", exc_info=True)
return HTTPException(status_code, str(error))
def validate_request(self, data: BaseModel) -> bool:
"""
Common validation logic for request data.
Args:
data: Pydantic model to validate
Returns:
True if validation passes
Raises:
ValidationError if validation fails
"""
try:
# Pydantic models automatically validate on instantiation
return True
except Exception as e:
self.logger.warning(f"Validation failed: {str(e)}")
raise
def format_response(self, data: Any, message: str = "Success") -> Dict[str, Any]:
"""
Standardized response format for successful operations.
Args:
data: Data to include in response
message: Success message
Returns:
Standardized success response dictionary
"""
return {
"status": "success",
"message": message,
"data": data
}
def format_error_response(self, message: str, status_code: int = 400, details: Any = None) -> Tuple[Dict[str, Any], int]:
"""
Standardized error response format.
Args:
message: Error message
status_code: HTTP status code
details: Additional error details
Returns:
Tuple of (error_response_dict, status_code)
"""
response = {
"status": "error",
"message": message,
"error_code": status_code
}
if details:
response["details"] = details
return response, status_code
def create_success_response(
self,
data: Any = None,
message: str = "Operation successful",
status_code: int = 200,
pagination: Optional[Dict[str, Any]] = None,
meta: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized success response.
Args:
data: Data to include in response
message: Success message
status_code: HTTP status code
pagination: Pagination information
meta: Additional metadata
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'success',
'message': message
}
if data is not None:
response['data'] = data
if pagination:
response['pagination'] = pagination
if meta:
response['meta'] = meta
return response, status_code
def create_error_response(
self,
message: str,
status_code: int = 400,
details: Any = None,
error_code: Optional[str] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized error response.
Args:
message: Error message
status_code: HTTP status code
details: Additional error details
error_code: Specific error code
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'error',
'message': message,
'error_code': error_code or status_code
}
if details:
response['details'] = details
return response, status_code
def handle_api_errors(f: Callable) -> Callable:
"""
Decorator for standardized API error handling.
This decorator should be used on all API endpoints to ensure
consistent error handling and response formatting.
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except HTTPException:
# Re-raise HTTP exceptions as they are already properly formatted
raise
except ValueError as e:
# Handle validation errors
return jsonify({
'status': 'error',
'message': 'Invalid input data',
'details': str(e),
'error_code': 400
}), 400
except PermissionError as e:
# Handle authorization errors
return jsonify({
'status': 'error',
'message': 'Access denied',
'details': str(e),
'error_code': 403
}), 403
except FileNotFoundError as e:
# Handle not found errors
return jsonify({
'status': 'error',
'message': 'Resource not found',
'details': str(e),
'error_code': 404
}), 404
except Exception as e:
# Handle all other errors
logging.getLogger(__name__).error(f"Unhandled error in {f.__name__}: {str(e)}", exc_info=True)
return jsonify({
'status': 'error',
'message': 'Internal server error',
'details': str(e) if logging.getLogger().isEnabledFor(logging.DEBUG) else 'An unexpected error occurred',
'error_code': 500
}), 500
return decorated_function
def require_auth(f: Callable) -> Callable:
"""
Decorator to require authentication for API endpoints.
This decorator should be applied to endpoints that require
user authentication.
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
# Implementation would depend on your authentication system
# For now, this is a placeholder that should be implemented
# based on your specific authentication requirements
# Example implementation:
# auth_header = request.headers.get('Authorization')
# if not auth_header or not validate_auth_token(auth_header):
# return jsonify({
# 'status': 'error',
# 'message': 'Authentication required',
# 'error_code': 401
# }), 401
return f(*args, **kwargs)
return decorated_function
def optional_auth(f: Callable) -> Callable:
"""
Decorator for optional authentication.
This decorator allows endpoints to work with or without authentication,
but provides additional functionality when authenticated.
"""
@functools.wraps(f)
def decorated_function(*args, **kwargs):
# Implementation would depend on your authentication system
# This would set user context if authenticated, but not fail if not
return f(*args, **kwargs)
return decorated_function
def validate_json_input(
required_fields: Optional[List[str]] = None,
optional_fields: Optional[List[str]] = None,
**field_validators
) -> Callable:
"""
Decorator for JSON input validation.
Args:
required_fields: List of required field names
optional_fields: List of optional field names
**field_validators: Field-specific validation functions
Returns:
Decorator function
"""
def decorator(f: Callable) -> Callable:
@functools.wraps(f)
def decorated_function(*args, **kwargs):
if not request.is_json:
return jsonify({
'status': 'error',
'message': 'Request must contain JSON data',
'error_code': 400
}), 400
data = request.get_json()
if not data:
return jsonify({
'status': 'error',
'message': 'Invalid JSON data',
'error_code': 400
}), 400
# Check required fields
if required_fields:
missing_fields = [field for field in required_fields if field not in data]
if missing_fields:
return jsonify({
'status': 'error',
'message': f'Missing required fields: {", ".join(missing_fields)}',
'error_code': 400
}), 400
# Apply field validators
for field, validator in field_validators.items():
if field in data:
try:
if not validator(data[field]):
return jsonify({
'status': 'error',
'message': f'Invalid value for field: {field}',
'error_code': 400
}), 400
except Exception as e:
return jsonify({
'status': 'error',
'message': f'Validation error for field {field}: {str(e)}',
'error_code': 400
}), 400
return f(*args, **kwargs)
return decorated_function
return decorator

View File

@@ -1 +0,0 @@
"""Shared utilities and helpers for web controllers."""

View File

@@ -1,150 +0,0 @@
"""
Authentication decorators and utilities for API endpoints.
This module provides authentication decorators that can be used across
all controller modules for consistent authentication handling.
"""
import logging
from functools import wraps
from typing import Optional, Dict, Any, Callable
from flask import session, request, jsonify, redirect, url_for
# Import session manager from auth controller
from ..auth_controller import session_manager
def require_auth(f: Callable) -> Callable:
"""
Decorator to require authentication for Flask routes.
Args:
f: The function to decorate
Returns:
Decorated function that requires authentication
Usage:
@require_auth
def protected_endpoint():
return "This requires authentication"
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if not session_manager.is_authenticated():
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
is_ajax = (
request.is_json or
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
request.headers.get('Accept', '').startswith('application/json') or
'/api/' in request.path # API endpoints should return JSON
)
if is_ajax:
return jsonify({
'status': 'error',
'message': 'Authentication required',
'code': 'AUTH_REQUIRED'
}), 401
else:
return redirect(url_for('auth.login'))
return f(*args, **kwargs)
return decorated_function
def optional_auth(f: Callable) -> Callable:
"""
Decorator that checks auth but doesn't require it.
This decorator will only require authentication if a master password
has been configured in the system.
Args:
f: The function to decorate
Returns:
Decorated function that optionally requires authentication
Usage:
@optional_auth
def maybe_protected_endpoint():
return "This may require authentication"
"""
@wraps(f)
def decorated_function(*args, **kwargs):
# Import config here to avoid circular imports
from config import config
# Check if master password is configured
if config.has_master_password():
# If configured, require authentication
if not session_manager.is_authenticated():
# Check if this is an AJAX request (JSON, XMLHttpRequest, or fetch API request)
is_ajax = (
request.is_json or
request.headers.get('X-Requested-With') == 'XMLHttpRequest' or
request.headers.get('Accept', '').startswith('application/json') or
'/api/' in request.path # API endpoints should return JSON
)
if is_ajax:
return jsonify({
'status': 'error',
'message': 'Authentication required',
'code': 'AUTH_REQUIRED'
}), 401
else:
return redirect(url_for('auth.login'))
return f(*args, **kwargs)
return decorated_function
def get_current_user() -> Optional[Dict[str, Any]]:
"""
Get current authenticated user information.
Returns:
Dictionary containing user information if authenticated, None otherwise
"""
if session_manager.is_authenticated():
return session_manager.get_session_info()
return None
def get_client_ip() -> str:
"""
Get client IP address with proxy support.
Returns:
Client IP address as string
"""
# Check for forwarded IP (in case of reverse proxy)
forwarded_ip = request.headers.get('X-Forwarded-For')
if forwarded_ip:
return forwarded_ip.split(',')[0].strip()
real_ip = request.headers.get('X-Real-IP')
if real_ip:
return real_ip
return request.remote_addr or 'unknown'
def is_authenticated() -> bool:
"""
Check if current request is from an authenticated user.
Returns:
True if authenticated, False otherwise
"""
return session_manager.is_authenticated()
def logout_current_user() -> bool:
"""
Logout the current user.
Returns:
True if logout was successful, False otherwise
"""
return session_manager.logout()

View File

@@ -1,286 +0,0 @@
"""
Error handling decorators and utilities for API endpoints.
This module provides standardized error handling decorators and utilities
that can be used across all controller modules for consistent error responses.
"""
import logging
import traceback
from functools import wraps
from typing import Dict, Any, Callable, Tuple, Optional, Union
from flask import jsonify, request
logger = logging.getLogger(__name__)
def handle_api_errors(f: Callable) -> Callable:
"""
Decorator to handle API errors consistently across all endpoints.
This decorator catches exceptions and returns standardized error responses
with appropriate HTTP status codes.
Args:
f: The function to decorate
Returns:
Decorated function with error handling
Usage:
@handle_api_errors
def my_endpoint():
# This will automatically handle any exceptions
return {"data": "success"}
"""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
result = f(*args, **kwargs)
# If result is already a Response object, return it
if hasattr(result, 'status_code'):
return result
# If result is a tuple (data, status_code), handle it
if isinstance(result, tuple) and len(result) == 2:
data, status_code = result
if isinstance(data, dict) and 'status' not in data:
data['status'] = 'success' if 200 <= status_code < 300 else 'error'
return jsonify(data), status_code
# If result is a dict, wrap it in success response
if isinstance(result, dict):
if 'status' not in result:
result['status'] = 'success'
return jsonify(result)
# For other types, wrap in success response
return jsonify({
'status': 'success',
'data': result
})
except ValueError as e:
logger.warning(f"Validation error in {f.__name__}: {str(e)}")
return create_error_response(
message=str(e),
status_code=400,
error_code='VALIDATION_ERROR'
)
except PermissionError as e:
logger.warning(f"Permission error in {f.__name__}: {str(e)}")
return create_error_response(
message="Access denied",
status_code=403,
error_code='ACCESS_DENIED'
)
except FileNotFoundError as e:
logger.warning(f"File not found in {f.__name__}: {str(e)}")
return create_error_response(
message="Resource not found",
status_code=404,
error_code='NOT_FOUND'
)
except Exception as e:
logger.error(f"Unexpected error in {f.__name__}: {str(e)}")
logger.error(f"Traceback: {traceback.format_exc()}")
# Don't expose internal errors in production
return create_error_response(
message="Internal server error",
status_code=500,
error_code='INTERNAL_ERROR'
)
return decorated_function
def handle_database_errors(f: Callable) -> Callable:
"""
Decorator specifically for database-related operations.
Args:
f: The function to decorate
Returns:
Decorated function with database error handling
"""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except Exception as e:
logger.error(f"Database error in {f.__name__}: {str(e)}")
return create_error_response(
message="Database operation failed",
status_code=500,
error_code='DATABASE_ERROR'
)
return decorated_function
def handle_file_operations(f: Callable) -> Callable:
"""
Decorator for file operation error handling.
Args:
f: The function to decorate
Returns:
Decorated function with file operation error handling
"""
@wraps(f)
def decorated_function(*args, **kwargs):
try:
return f(*args, **kwargs)
except FileNotFoundError as e:
logger.warning(f"File not found in {f.__name__}: {str(e)}")
return create_error_response(
message="File not found",
status_code=404,
error_code='FILE_NOT_FOUND'
)
except PermissionError as e:
logger.warning(f"File permission error in {f.__name__}: {str(e)}")
return create_error_response(
message="Permission denied",
status_code=403,
error_code='PERMISSION_DENIED'
)
except OSError as e:
logger.error(f"File system error in {f.__name__}: {str(e)}")
return create_error_response(
message="File system error",
status_code=500,
error_code='FILE_SYSTEM_ERROR'
)
return decorated_function
def create_error_response(
message: str,
status_code: int = 400,
error_code: Optional[str] = None,
errors: Optional[list] = None,
data: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized error response.
Args:
message: Error message to display
status_code: HTTP status code
error_code: Optional error code for client handling
errors: Optional list of detailed errors
data: Optional additional data
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'error',
'message': message
}
if error_code:
response['error_code'] = error_code
if errors:
response['errors'] = errors
if data:
response['data'] = data
return response, status_code
def create_success_response(
data: Any = None,
message: str = "Operation successful",
status_code: int = 200
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized success response.
Args:
data: Data to include in response
message: Success message
status_code: HTTP status code
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'success',
'message': message
}
if data is not None:
response['data'] = data
return response, status_code
def log_request_info():
"""Log request information for debugging."""
logger.info(f"Request: {request.method} {request.path}")
if request.is_json:
logger.debug(f"Request JSON: {request.get_json()}")
if request.args:
logger.debug(f"Request args: {dict(request.args)}")
class APIException(Exception):
"""Custom exception for API errors."""
def __init__(
self,
message: str,
status_code: int = 400,
error_code: Optional[str] = None,
errors: Optional[list] = None
):
self.message = message
self.status_code = status_code
self.error_code = error_code
self.errors = errors
super().__init__(message)
class ValidationError(APIException):
"""Exception for validation errors."""
def __init__(self, message: str, errors: Optional[list] = None):
super().__init__(
message=message,
status_code=400,
error_code='VALIDATION_ERROR',
errors=errors
)
class NotFoundError(APIException):
"""Exception for not found errors."""
def __init__(self, message: str = "Resource not found"):
super().__init__(
message=message,
status_code=404,
error_code='NOT_FOUND'
)
class PermissionError(APIException):
"""Exception for permission errors."""
def __init__(self, message: str = "Access denied"):
super().__init__(
message=message,
status_code=403,
error_code='ACCESS_DENIED'
)

View File

@@ -1,406 +0,0 @@
"""
Response formatting utilities for API endpoints.
This module provides utilities for creating consistent response formats
across all controller modules.
"""
from typing import Any, Dict, List, Optional, Union, Tuple
from flask import jsonify, url_for, request
import math
def create_success_response(
data: Any = None,
message: str = "Operation successful",
status_code: int = 200,
pagination: Optional[Dict[str, Any]] = None,
meta: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized success response.
Args:
data: Data to include in response
message: Success message
status_code: HTTP status code
pagination: Pagination information
meta: Additional metadata
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'success',
'message': message
}
if data is not None:
response['data'] = data
if pagination:
response['pagination'] = pagination
if meta:
response['meta'] = meta
return response, status_code
def create_error_response(
message: str,
status_code: int = 400,
error_code: Optional[str] = None,
errors: Optional[List[str]] = None,
data: Optional[Dict[str, Any]] = None
) -> Tuple[Dict[str, Any], int]:
"""
Create a standardized error response.
Args:
message: Error message to display
status_code: HTTP status code
error_code: Optional error code for client handling
errors: Optional list of detailed errors
data: Optional additional data
Returns:
Tuple of (response_dict, status_code)
"""
response = {
'status': 'error',
'message': message
}
if error_code:
response['error_code'] = error_code
if errors:
response['errors'] = errors
if data:
response['data'] = data
return response, status_code
def create_paginated_response(
data: List[Any],
page: int,
per_page: int,
total: int,
endpoint: Optional[str] = None,
**kwargs
) -> Dict[str, Any]:
"""
Create a paginated response with navigation links.
Args:
data: List of data items for current page
page: Current page number (1-based)
per_page: Items per page
total: Total number of items
endpoint: Flask endpoint name for pagination links
**kwargs: Additional parameters for pagination links
Returns:
Dictionary containing paginated response
"""
total_pages = math.ceil(total / per_page) if per_page > 0 else 1
pagination_info = {
'page': page,
'per_page': per_page,
'total': total,
'total_pages': total_pages,
'has_next': page < total_pages,
'has_prev': page > 1
}
# Add navigation links if endpoint is provided
if endpoint:
base_url = request.url_root.rstrip('/')
# Current page
pagination_info['current_url'] = url_for(endpoint, page=page, per_page=per_page, **kwargs)
# First page
pagination_info['first_url'] = url_for(endpoint, page=1, per_page=per_page, **kwargs)
# Last page
pagination_info['last_url'] = url_for(endpoint, page=total_pages, per_page=per_page, **kwargs)
# Previous page
if pagination_info['has_prev']:
pagination_info['prev_url'] = url_for(endpoint, page=page-1, per_page=per_page, **kwargs)
# Next page
if pagination_info['has_next']:
pagination_info['next_url'] = url_for(endpoint, page=page+1, per_page=per_page, **kwargs)
return {
'status': 'success',
'data': data,
'pagination': pagination_info
}
def paginate_query_results(
items: List[Any],
page: Optional[int] = None,
per_page: Optional[int] = None,
default_per_page: int = 50,
max_per_page: int = 1000
) -> Tuple[List[Any], int, int, int]:
"""
Paginate a list of items based on query parameters.
Args:
items: List of items to paginate
page: Page number (from query params)
per_page: Items per page (from query params)
default_per_page: Default items per page
max_per_page: Maximum allowed items per page
Returns:
Tuple of (paginated_items, page, per_page, total)
"""
total = len(items)
# Parse pagination parameters
if page is None:
page = int(request.args.get('page', 1))
if per_page is None:
per_page = int(request.args.get('per_page', default_per_page))
# Validate parameters
page = max(1, page)
per_page = min(max(1, per_page), max_per_page)
# Calculate offset
offset = (page - 1) * per_page
# Slice the items
paginated_items = items[offset:offset + per_page]
return paginated_items, page, per_page, total
def format_anime_response(anime_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format anime data for API response.
Args:
anime_data: Raw anime data from database
Returns:
Formatted anime data
"""
formatted = {
'id': anime_data.get('id'),
'name': anime_data.get('name'),
'url': anime_data.get('url'),
'description': anime_data.get('description'),
'episodes': anime_data.get('episodes'),
'status': anime_data.get('status', 'planned'),
'created_at': anime_data.get('created_at'),
'updated_at': anime_data.get('updated_at')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_episode_response(episode_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format episode data for API response.
Args:
episode_data: Raw episode data from database
Returns:
Formatted episode data
"""
formatted = {
'id': episode_data.get('id'),
'anime_id': episode_data.get('anime_id'),
'episode_number': episode_data.get('episode_number'),
'title': episode_data.get('title'),
'url': episode_data.get('url'),
'status': episode_data.get('status', 'available'),
'download_path': episode_data.get('download_path'),
'file_size': episode_data.get('file_size'),
'created_at': episode_data.get('created_at'),
'updated_at': episode_data.get('updated_at')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_download_response(download_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format download data for API response.
Args:
download_data: Raw download data
Returns:
Formatted download data
"""
formatted = {
'id': download_data.get('id'),
'anime_id': download_data.get('anime_id'),
'episode_id': download_data.get('episode_id'),
'status': download_data.get('status', 'pending'),
'progress': download_data.get('progress', 0),
'speed': download_data.get('speed'),
'eta': download_data.get('eta'),
'error_message': download_data.get('error_message'),
'started_at': download_data.get('started_at'),
'completed_at': download_data.get('completed_at')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_bulk_operation_response(operation_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format bulk operation data for API response.
Args:
operation_data: Raw bulk operation data
Returns:
Formatted bulk operation data
"""
formatted = {
'id': operation_data.get('id'),
'type': operation_data.get('type'),
'status': operation_data.get('status', 'pending'),
'total_items': operation_data.get('total_items', 0),
'completed_items': operation_data.get('completed_items', 0),
'failed_items': operation_data.get('failed_items', 0),
'progress_percentage': operation_data.get('progress_percentage', 0),
'started_at': operation_data.get('started_at'),
'completed_at': operation_data.get('completed_at'),
'error_message': operation_data.get('error_message')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def format_health_response(health_data: Dict[str, Any]) -> Dict[str, Any]:
"""
Format health check data for API response.
Args:
health_data: Raw health check data
Returns:
Formatted health data
"""
formatted = {
'status': health_data.get('status', 'unknown'),
'uptime': health_data.get('uptime'),
'version': health_data.get('version'),
'components': health_data.get('components', {}),
'timestamp': health_data.get('timestamp')
}
# Remove None values
return {k: v for k, v in formatted.items() if v is not None}
def add_resource_links(data: Dict[str, Any], resource_type: str, resource_id: Any) -> Dict[str, Any]:
"""
Add HATEOAS-style links to a resource response.
Args:
data: Resource data
resource_type: Type of resource (anime, episode, etc.)
resource_id: Resource identifier
Returns:
Data with added links
"""
if '_links' not in data:
data['_links'] = {}
# Self link
data['_links']['self'] = url_for(f'api.get_{resource_type}', id=resource_id)
# Collection link
data['_links']['collection'] = url_for(f'api.list_{resource_type}s')
return data
def create_batch_response(
successful_items: List[Dict[str, Any]],
failed_items: List[Dict[str, Any]],
message: Optional[str] = None
) -> Dict[str, Any]:
"""
Create response for batch operations.
Args:
successful_items: List of successfully processed items
failed_items: List of failed items with errors
message: Optional message
Returns:
Batch operation response
"""
total_items = len(successful_items) + len(failed_items)
success_count = len(successful_items)
failure_count = len(failed_items)
response = {
'status': 'success' if failure_count == 0 else 'partial_success',
'message': message or f"Processed {success_count}/{total_items} items successfully",
'summary': {
'total': total_items,
'successful': success_count,
'failed': failure_count
},
'data': {
'successful': successful_items,
'failed': failed_items
}
}
return response
def extract_pagination_params(
default_page: int = 1,
default_per_page: int = 50,
max_per_page: int = 1000
) -> Tuple[int, int]:
"""
Extract and validate pagination parameters from request.
Args:
default_page: Default page number
default_per_page: Default items per page
max_per_page: Maximum allowed items per page
Returns:
Tuple of (page, per_page)
"""
try:
page = int(request.args.get('page', default_page))
page = max(1, page)
except (ValueError, TypeError):
page = default_page
try:
per_page = int(request.args.get('per_page', default_per_page))
per_page = min(max(1, per_page), max_per_page)
except (ValueError, TypeError):
per_page = default_per_page
return page, per_page

View File

@@ -1,446 +0,0 @@
"""
Input validation utilities for API endpoints.
This module provides validation functions and decorators for consistent
input validation across all controller modules.
"""
import re
import os
from typing import Any, Dict, List, Optional, Union, Callable, Tuple
from functools import wraps
from flask import request, jsonify
from .error_handlers import ValidationError, create_error_response
def validate_json_input(required_fields: Optional[List[str]] = None,
optional_fields: Optional[List[str]] = None,
field_types: Optional[Dict[str, type]] = None) -> Callable:
"""
Decorator to validate JSON input for API endpoints.
Args:
required_fields: List of required field names
optional_fields: List of optional field names
field_types: Dictionary mapping field names to expected types
Returns:
Decorator function
Usage:
@validate_json_input(
required_fields=['name', 'url'],
optional_fields=['description'],
field_types={'name': str, 'url': str, 'episodes': int}
)
def create_anime():
data = request.get_json()
# data is now validated
"""
def decorator(f: Callable) -> Callable:
@wraps(f)
def decorated_function(*args, **kwargs):
if not request.is_json:
return create_error_response(
message="Request must be JSON",
status_code=400,
error_code='INVALID_CONTENT_TYPE'
)
try:
data = request.get_json()
except Exception:
return create_error_response(
message="Invalid JSON format",
status_code=400,
error_code='INVALID_JSON'
)
if data is None:
return create_error_response(
message="Request body cannot be empty",
status_code=400,
error_code='EMPTY_BODY'
)
# Validate required fields
if required_fields:
missing_fields = []
for field in required_fields:
if field not in data or data[field] is None:
missing_fields.append(field)
if missing_fields:
return create_error_response(
message=f"Missing required fields: {', '.join(missing_fields)}",
status_code=400,
error_code='MISSING_FIELDS',
errors=missing_fields
)
# Validate field types
if field_types:
type_errors = []
for field, expected_type in field_types.items():
if field in data and data[field] is not None:
if not isinstance(data[field], expected_type):
type_errors.append(f"{field} must be of type {expected_type.__name__}")
if type_errors:
return create_error_response(
message="Type validation failed",
status_code=400,
error_code='TYPE_ERROR',
errors=type_errors
)
# Check for unexpected fields
all_allowed = (required_fields or []) + (optional_fields or [])
if all_allowed:
unexpected_fields = [field for field in data.keys() if field not in all_allowed]
if unexpected_fields:
return create_error_response(
message=f"Unexpected fields: {', '.join(unexpected_fields)}",
status_code=400,
error_code='UNEXPECTED_FIELDS',
errors=unexpected_fields
)
return f(*args, **kwargs)
return decorated_function
return decorator
def validate_query_params(allowed_params: Optional[List[str]] = None,
required_params: Optional[List[str]] = None,
param_types: Optional[Dict[str, type]] = None) -> Callable:
"""
Decorator to validate query parameters.
Args:
allowed_params: List of allowed parameter names
required_params: List of required parameter names
param_types: Dictionary mapping parameter names to expected types
Returns:
Decorator function
"""
def decorator(f: Callable) -> Callable:
@wraps(f)
def decorated_function(*args, **kwargs):
# Check required parameters
if required_params:
missing_params = []
for param in required_params:
if param not in request.args:
missing_params.append(param)
if missing_params:
return create_error_response(
message=f"Missing required parameters: {', '.join(missing_params)}",
status_code=400,
error_code='MISSING_PARAMS'
)
# Check allowed parameters
if allowed_params:
unexpected_params = [param for param in request.args.keys() if param not in allowed_params]
if unexpected_params:
return create_error_response(
message=f"Unexpected parameters: {', '.join(unexpected_params)}",
status_code=400,
error_code='UNEXPECTED_PARAMS'
)
# Validate parameter types
if param_types:
type_errors = []
for param, expected_type in param_types.items():
if param in request.args:
value = request.args.get(param)
try:
if expected_type == int:
int(value)
elif expected_type == float:
float(value)
elif expected_type == bool:
if value.lower() not in ['true', 'false', '1', '0']:
raise ValueError()
except ValueError:
type_errors.append(f"{param} must be of type {expected_type.__name__}")
if type_errors:
return create_error_response(
message="Parameter type validation failed",
status_code=400,
error_code='PARAM_TYPE_ERROR',
errors=type_errors
)
return f(*args, **kwargs)
return decorated_function
return decorator
def validate_pagination_params(f: Callable) -> Callable:
"""
Decorator to validate pagination parameters (page, per_page, limit, offset).
Args:
f: The function to decorate
Returns:
Decorated function with pagination validation
"""
@wraps(f)
def decorated_function(*args, **kwargs):
errors = []
# Validate page parameter
page = request.args.get('page')
if page is not None:
try:
page_int = int(page)
if page_int < 1:
errors.append("page must be greater than 0")
except ValueError:
errors.append("page must be an integer")
# Validate per_page parameter
per_page = request.args.get('per_page')
if per_page is not None:
try:
per_page_int = int(per_page)
if per_page_int < 1:
errors.append("per_page must be greater than 0")
elif per_page_int > 1000:
errors.append("per_page cannot exceed 1000")
except ValueError:
errors.append("per_page must be an integer")
# Validate limit parameter
limit = request.args.get('limit')
if limit is not None:
try:
limit_int = int(limit)
if limit_int < 1:
errors.append("limit must be greater than 0")
elif limit_int > 1000:
errors.append("limit cannot exceed 1000")
except ValueError:
errors.append("limit must be an integer")
# Validate offset parameter
offset = request.args.get('offset')
if offset is not None:
try:
offset_int = int(offset)
if offset_int < 0:
errors.append("offset must be greater than or equal to 0")
except ValueError:
errors.append("offset must be an integer")
if errors:
return create_error_response(
message="Pagination parameter validation failed",
status_code=400,
error_code='PAGINATION_ERROR',
errors=errors
)
return f(*args, **kwargs)
return decorated_function
def validate_anime_data(data: Dict[str, Any]) -> List[str]:
"""
Validate anime data structure.
Args:
data: Dictionary containing anime data
Returns:
List of validation errors (empty if valid)
"""
errors = []
# Required fields
required_fields = ['name', 'url']
for field in required_fields:
if field not in data or not data[field]:
errors.append(f"Missing required field: {field}")
# Validate name
if 'name' in data:
name = data['name']
if not isinstance(name, str):
errors.append("name must be a string")
elif len(name.strip()) == 0:
errors.append("name cannot be empty")
elif len(name) > 500:
errors.append("name cannot exceed 500 characters")
# Validate URL
if 'url' in data:
url = data['url']
if not isinstance(url, str):
errors.append("url must be a string")
elif not is_valid_url(url):
errors.append("url must be a valid URL")
# Validate optional fields
if 'description' in data and data['description'] is not None:
if not isinstance(data['description'], str):
errors.append("description must be a string")
elif len(data['description']) > 2000:
errors.append("description cannot exceed 2000 characters")
if 'episodes' in data and data['episodes'] is not None:
if not isinstance(data['episodes'], int):
errors.append("episodes must be an integer")
elif data['episodes'] < 0:
errors.append("episodes must be non-negative")
if 'status' in data and data['status'] is not None:
valid_statuses = ['ongoing', 'completed', 'planned', 'dropped', 'paused']
if data['status'] not in valid_statuses:
errors.append(f"status must be one of: {', '.join(valid_statuses)}")
return errors
def validate_file_upload(file, allowed_extensions: Optional[List[str]] = None,
max_size_mb: Optional[int] = None) -> List[str]:
"""
Validate file upload.
Args:
file: Uploaded file object
allowed_extensions: List of allowed file extensions
max_size_mb: Maximum file size in MB
Returns:
List of validation errors (empty if valid)
"""
errors = []
if not file:
errors.append("No file provided")
return errors
if file.filename == '':
errors.append("No file selected")
return errors
# Check file extension
if allowed_extensions:
file_ext = os.path.splitext(file.filename)[1].lower()
if file_ext not in [f".{ext.lower()}" for ext in allowed_extensions]:
errors.append(f"File type not allowed. Allowed: {', '.join(allowed_extensions)}")
# Check file size (if we can determine it)
if max_size_mb and hasattr(file, 'content_length') and file.content_length:
max_size_bytes = max_size_mb * 1024 * 1024
if file.content_length > max_size_bytes:
errors.append(f"File size exceeds maximum of {max_size_mb}MB")
return errors
def is_valid_url(url: str) -> bool:
"""
Check if a string is a valid URL.
Args:
url: URL string to validate
Returns:
True if valid URL, False otherwise
"""
url_pattern = re.compile(
r'^https?://' # http:// or https://
r'(?:(?:[A-Z0-9](?:[A-Z0-9-]{0,61}[A-Z0-9])?\.)+[A-Z]{2,6}\.?|' # domain...
r'localhost|' # localhost...
r'\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3})' # ...or ip
r'(?::\d+)?' # optional port
r'(?:/?|[/?]\S+)$', re.IGNORECASE)
return url_pattern.match(url) is not None
def is_valid_email(email: str) -> bool:
"""
Check if a string is a valid email address.
Args:
email: Email string to validate
Returns:
True if valid email, False otherwise
"""
email_pattern = re.compile(
r'^[a-zA-Z0-9._%+-]+@[a-zA-Z0-9.-]+\.[a-zA-Z]{2,}$'
)
return email_pattern.match(email) is not None
def sanitize_string(value: str, max_length: Optional[int] = None) -> str:
"""
Sanitize string input by removing dangerous characters.
Args:
value: String to sanitize
max_length: Maximum allowed length
Returns:
Sanitized string
"""
if not isinstance(value, str):
return str(value)
# Remove null bytes and control characters
sanitized = ''.join(char for char in value if ord(char) >= 32 or char in '\t\n\r')
# Trim whitespace
sanitized = sanitized.strip()
# Truncate if necessary
if max_length and len(sanitized) > max_length:
sanitized = sanitized[:max_length]
return sanitized
def validate_id_parameter(param_name: str = 'id') -> Callable:
"""
Decorator to validate ID parameters in URLs.
Args:
param_name: Name of the ID parameter
Returns:
Decorator function
"""
def decorator(f: Callable) -> Callable:
@wraps(f)
def decorated_function(*args, **kwargs):
if param_name in kwargs:
try:
id_value = int(kwargs[param_name])
if id_value <= 0:
return create_error_response(
message=f"{param_name} must be a positive integer",
status_code=400,
error_code='INVALID_ID'
)
kwargs[param_name] = id_value
except ValueError:
return create_error_response(
message=f"{param_name} must be an integer",
status_code=400,
error_code='INVALID_ID'
)
return f(*args, **kwargs)
return decorated_function
return decorator

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,236 +1,236 @@
/**
* Localization support for AniWorld Manager
* Implements resource-based text management for easy translation
*/
class Localization {
constructor() {
this.currentLanguage = 'en';
this.fallbackLanguage = 'en';
this.translations = {};
this.loadTranslations();
}
loadTranslations() {
// English (default)
this.translations.en = {
// Header
'config-title': 'Configuration',
'toggle-theme': 'Toggle theme',
'rescan': 'Rescan',
// Search
'search-placeholder': 'Search for anime...',
'search-results': 'Search Results',
'no-results': 'No results found',
'add': 'Add',
// Series
'series-collection': 'Series Collection',
'select-all': 'Select All',
'deselect-all': 'Deselect All',
'download-selected': 'Download Selected',
'missing-episodes': 'missing episodes',
// Configuration
'anime-directory': 'Anime Directory',
'series-count': 'Series Count',
'connection-status': 'Connection Status',
'connected': 'Connected',
'disconnected': 'Disconnected',
// Download controls
'pause': 'Pause',
'resume': 'Resume',
'cancel': 'Cancel',
'downloading': 'Downloading',
'paused': 'Paused',
// Download queue
'download-queue': 'Download Queue',
'currently-downloading': 'Currently Downloading',
'queued-series': 'Queued Series',
// Status messages
'connected-server': 'Connected to server',
'disconnected-server': 'Disconnected from server',
'scan-started': 'Scan started',
'scan-completed': 'Scan completed successfully',
'download-started': 'Download started',
'download-completed': 'Download completed successfully',
'series-added': 'Series added successfully',
// Error messages
'search-failed': 'Search failed',
'download-failed': 'Download failed',
'scan-failed': 'Scan failed',
'connection-failed': 'Connection failed',
// General
'loading': 'Loading...',
'close': 'Close',
'ok': 'OK',
'cancel-action': 'Cancel'
};
// German
this.translations.de = {
// Header
'config-title': 'Konfiguration',
'toggle-theme': 'Design wechseln',
'rescan': 'Neu scannen',
// Search
'search-placeholder': 'Nach Anime suchen...',
'search-results': 'Suchergebnisse',
'no-results': 'Keine Ergebnisse gefunden',
'add': 'Hinzufügen',
// Series
'series-collection': 'Serien-Sammlung',
'select-all': 'Alle auswählen',
'deselect-all': 'Alle abwählen',
'download-selected': 'Ausgewählte herunterladen',
'missing-episodes': 'fehlende Episoden',
// Configuration
'anime-directory': 'Anime-Verzeichnis',
'series-count': 'Anzahl Serien',
'connection-status': 'Verbindungsstatus',
'connected': 'Verbunden',
'disconnected': 'Getrennt',
// Download controls
'pause': 'Pausieren',
'resume': 'Fortsetzen',
'cancel': 'Abbrechen',
'downloading': 'Herunterladen',
'paused': 'Pausiert',
// Download queue
'download-queue': 'Download-Warteschlange',
'currently-downloading': 'Wird heruntergeladen',
'queued-series': 'Warteschlange',
// Status messages
'connected-server': 'Mit Server verbunden',
'disconnected-server': 'Verbindung zum Server getrennt',
'scan-started': 'Scan gestartet',
'scan-completed': 'Scan erfolgreich abgeschlossen',
'download-started': 'Download gestartet',
'download-completed': 'Download erfolgreich abgeschlossen',
'series-added': 'Serie erfolgreich hinzugefügt',
// Error messages
'search-failed': 'Suche fehlgeschlagen',
'download-failed': 'Download fehlgeschlagen',
'scan-failed': 'Scan fehlgeschlagen',
'connection-failed': 'Verbindung fehlgeschlagen',
// General
'loading': 'Wird geladen...',
'close': 'Schließen',
'ok': 'OK',
'cancel-action': 'Abbrechen'
};
// Load saved language preference
const savedLanguage = localStorage.getItem('language') || this.detectLanguage();
this.setLanguage(savedLanguage);
}
detectLanguage() {
const browserLang = navigator.language || navigator.userLanguage;
const langCode = browserLang.split('-')[0];
return this.translations[langCode] ? langCode : this.fallbackLanguage;
}
setLanguage(langCode) {
if (this.translations[langCode]) {
this.currentLanguage = langCode;
localStorage.setItem('language', langCode);
this.updatePageTexts();
}
}
getText(key, fallback = key) {
const translation = this.translations[this.currentLanguage];
if (translation && translation[key]) {
return translation[key];
}
// Try fallback language
const fallbackTranslation = this.translations[this.fallbackLanguage];
if (fallbackTranslation && fallbackTranslation[key]) {
return fallbackTranslation[key];
}
return fallback;
}
updatePageTexts() {
// Update all elements with data-text attributes
document.querySelectorAll('[data-text]').forEach(element => {
const key = element.getAttribute('data-text');
const text = this.getText(key);
if (element.tagName === 'INPUT' && element.type === 'text') {
element.placeholder = text;
} else {
element.textContent = text;
}
});
// Update specific elements that need special handling
this.updateSearchPlaceholder();
this.updateDynamicTexts();
}
updateSearchPlaceholder() {
const searchInput = document.getElementById('search-input');
if (searchInput) {
searchInput.placeholder = this.getText('search-placeholder');
}
}
updateDynamicTexts() {
// Update any dynamically generated content
const selectAllBtn = document.getElementById('select-all');
if (selectAllBtn && window.app) {
const selectedCount = window.app.selectedSeries ? window.app.selectedSeries.size : 0;
const totalCount = window.app.seriesData ? window.app.seriesData.length : 0;
if (selectedCount === totalCount && totalCount > 0) {
selectAllBtn.innerHTML = `<i class="fas fa-times"></i><span>${this.getText('deselect-all')}</span>`;
} else {
selectAllBtn.innerHTML = `<i class="fas fa-check-double"></i><span>${this.getText('select-all')}</span>`;
}
}
}
getAvailableLanguages() {
return Object.keys(this.translations).map(code => ({
code: code,
name: this.getLanguageName(code)
}));
}
getLanguageName(code) {
const names = {
'en': 'English',
'de': 'Deutsch'
};
return names[code] || code.toUpperCase();
}
formatMessage(key, ...args) {
let message = this.getText(key);
args.forEach((arg, index) => {
message = message.replace(`{${index}}`, arg);
});
return message;
}
}
// Export for use in other modules
/**
* Localization support for AniWorld Manager
* Implements resource-based text management for easy translation
*/
class Localization {
constructor() {
this.currentLanguage = 'en';
this.fallbackLanguage = 'en';
this.translations = {};
this.loadTranslations();
}
loadTranslations() {
// English (default)
this.translations.en = {
// Header
'config-title': 'Configuration',
'toggle-theme': 'Toggle theme',
'rescan': 'Rescan',
// Search
'search-placeholder': 'Search for anime...',
'search-results': 'Search Results',
'no-results': 'No results found',
'add': 'Add',
// Series
'series-collection': 'Series Collection',
'select-all': 'Select All',
'deselect-all': 'Deselect All',
'download-selected': 'Download Selected',
'missing-episodes': 'missing episodes',
// Configuration
'anime-directory': 'Anime Directory',
'series-count': 'Series Count',
'connection-status': 'Connection Status',
'connected': 'Connected',
'disconnected': 'Disconnected',
// Download controls
'pause': 'Pause',
'resume': 'Resume',
'cancel': 'Cancel',
'downloading': 'Downloading',
'paused': 'Paused',
// Download queue
'download-queue': 'Download Queue',
'currently-downloading': 'Currently Downloading',
'queued-series': 'Queued Series',
// Status messages
'connected-server': 'Connected to server',
'disconnected-server': 'Disconnected from server',
'scan-started': 'Scan started',
'scan-completed': 'Scan completed successfully',
'download-started': 'Download started',
'download-completed': 'Download completed successfully',
'series-added': 'Series added successfully',
// Error messages
'search-failed': 'Search failed',
'download-failed': 'Download failed',
'scan-failed': 'Scan failed',
'connection-failed': 'Connection failed',
// General
'loading': 'Loading...',
'close': 'Close',
'ok': 'OK',
'cancel-action': 'Cancel'
};
// German
this.translations.de = {
// Header
'config-title': 'Konfiguration',
'toggle-theme': 'Design wechseln',
'rescan': 'Neu scannen',
// Search
'search-placeholder': 'Nach Anime suchen...',
'search-results': 'Suchergebnisse',
'no-results': 'Keine Ergebnisse gefunden',
'add': 'Hinzufügen',
// Series
'series-collection': 'Serien-Sammlung',
'select-all': 'Alle auswählen',
'deselect-all': 'Alle abwählen',
'download-selected': 'Ausgewählte herunterladen',
'missing-episodes': 'fehlende Episoden',
// Configuration
'anime-directory': 'Anime-Verzeichnis',
'series-count': 'Anzahl Serien',
'connection-status': 'Verbindungsstatus',
'connected': 'Verbunden',
'disconnected': 'Getrennt',
// Download controls
'pause': 'Pausieren',
'resume': 'Fortsetzen',
'cancel': 'Abbrechen',
'downloading': 'Herunterladen',
'paused': 'Pausiert',
// Download queue
'download-queue': 'Download-Warteschlange',
'currently-downloading': 'Wird heruntergeladen',
'queued-series': 'Warteschlange',
// Status messages
'connected-server': 'Mit Server verbunden',
'disconnected-server': 'Verbindung zum Server getrennt',
'scan-started': 'Scan gestartet',
'scan-completed': 'Scan erfolgreich abgeschlossen',
'download-started': 'Download gestartet',
'download-completed': 'Download erfolgreich abgeschlossen',
'series-added': 'Serie erfolgreich hinzugefügt',
// Error messages
'search-failed': 'Suche fehlgeschlagen',
'download-failed': 'Download fehlgeschlagen',
'scan-failed': 'Scan fehlgeschlagen',
'connection-failed': 'Verbindung fehlgeschlagen',
// General
'loading': 'Wird geladen...',
'close': 'Schließen',
'ok': 'OK',
'cancel-action': 'Abbrechen'
};
// Load saved language preference
const savedLanguage = localStorage.getItem('language') || this.detectLanguage();
this.setLanguage(savedLanguage);
}
detectLanguage() {
const browserLang = navigator.language || navigator.userLanguage;
const langCode = browserLang.split('-')[0];
return this.translations[langCode] ? langCode : this.fallbackLanguage;
}
setLanguage(langCode) {
if (this.translations[langCode]) {
this.currentLanguage = langCode;
localStorage.setItem('language', langCode);
this.updatePageTexts();
}
}
getText(key, fallback = key) {
const translation = this.translations[this.currentLanguage];
if (translation && translation[key]) {
return translation[key];
}
// Try fallback language
const fallbackTranslation = this.translations[this.fallbackLanguage];
if (fallbackTranslation && fallbackTranslation[key]) {
return fallbackTranslation[key];
}
return fallback;
}
updatePageTexts() {
// Update all elements with data-text attributes
document.querySelectorAll('[data-text]').forEach(element => {
const key = element.getAttribute('data-text');
const text = this.getText(key);
if (element.tagName === 'INPUT' && element.type === 'text') {
element.placeholder = text;
} else {
element.textContent = text;
}
});
// Update specific elements that need special handling
this.updateSearchPlaceholder();
this.updateDynamicTexts();
}
updateSearchPlaceholder() {
const searchInput = document.getElementById('search-input');
if (searchInput) {
searchInput.placeholder = this.getText('search-placeholder');
}
}
updateDynamicTexts() {
// Update any dynamically generated content
const selectAllBtn = document.getElementById('select-all');
if (selectAllBtn && window.app) {
const selectedCount = window.app.selectedSeries ? window.app.selectedSeries.size : 0;
const totalCount = window.app.seriesData ? window.app.seriesData.length : 0;
if (selectedCount === totalCount && totalCount > 0) {
selectAllBtn.innerHTML = `<i class="fas fa-times"></i><span>${this.getText('deselect-all')}</span>`;
} else {
selectAllBtn.innerHTML = `<i class="fas fa-check-double"></i><span>${this.getText('select-all')}</span>`;
}
}
}
getAvailableLanguages() {
return Object.keys(this.translations).map(code => ({
code: code,
name: this.getLanguageName(code)
}));
}
getLanguageName(code) {
const names = {
'en': 'English',
'de': 'Deutsch'
};
return names[code] || code.toUpperCase();
}
formatMessage(key, ...args) {
let message = this.getText(key);
args.forEach((arg, index) => {
message = message.replace(`{${index}}`, arg);
});
return message;
}
}
// Export for use in other modules
window.Localization = Localization;

File diff suppressed because it is too large Load Diff

View File

@@ -1,480 +1,480 @@
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<!-- UX Enhancement and Mobile & Accessibility CSS -->
<link rel="stylesheet" href="/static/css/ux_features.css">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-play-circle"></i>
<h1>AniWorld Manager</h1>
</div>
<div class="header-actions">
<!-- Process Status Indicators -->
<div class="process-status" id="process-status">
<div class="status-indicator" id="rescan-status" title="Scan is idle">
<i class="fas fa-sync-alt"></i>
<div class="status-dot idle"></div>
</div>
<div class="status-indicator" id="download-status" title="Download is idle">
<i class="fas fa-download"></i>
<div class="status-dot idle"></div>
</div>
</div>
<a href="/queue" class="btn btn-secondary" title="Download Queue">
<i class="fas fa-list-alt"></i>
<span data-text="queue">Queue</span>
</a>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span data-text="logout">Logout</span>
</button>
<button id="config-btn" class="btn btn-secondary" title="Show configuration">
<i class="fas fa-cog"></i>
<span data-text="config-title">Config</span>
</button>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme" data-title="toggle-theme">
<i class="fas fa-moon"></i>
</button>
<button id="rescan-btn" class="btn btn-primary">
<i class="fas fa-sync-alt"></i>
<span data-text="rescan">Rescan</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Search section -->
<section class="search-section">
<div class="search-container">
<div class="search-input-group">
<input type="text" id="search-input" data-text="search-placeholder"
placeholder="Search for anime..." class="search-input">
<button id="search-btn" class="btn btn-primary">
<i class="fas fa-search"></i>
</button>
<button id="clear-search" class="btn btn-secondary">
<i class="fas fa-times"></i>
</button>
</div>
</div>
<!-- Search results -->
<div id="search-results" class="search-results hidden">
<h3>Search Results</h3>
<div id="search-results-list" class="search-results-list"></div>
</div>
</section>
<!-- Download Queue Section -->
<section id="download-queue-section" class="download-queue-section hidden">
<div class="queue-header">
<h2>
<i class="fas fa-download"></i>
<span data-text="download-queue">Download Queue</span>
</h2>
<div class="queue-stats">
<span id="queue-progress" class="queue-progress">0/0 series</span>
</div>
</div>
<!-- Current Download -->
<div id="current-download" class="current-download hidden">
<div class="current-download-header">
<h3 data-text="currently-downloading">Currently Downloading</h3>
</div>
<div class="current-download-item">
<div class="download-info">
<div id="current-serie-name" class="serie-name">-</div>
<div id="current-episode" class="episode-info">-</div>
</div>
<div class="download-progress">
<div class="progress-bar-mini">
<div id="current-progress-fill" class="progress-fill-mini"></div>
</div>
<div id="current-progress-text" class="progress-text-mini">0%</div>
</div>
</div>
</div>
<!-- Queue List -->
<div id="queue-list-container" class="queue-list-container">
<h3 data-text="queued-series">Queued Series</h3>
<div id="queue-list" class="queue-list">
<!-- Queue items will be populated here -->
</div>
</div>
</section>
<!-- Series management section -->
<section class="series-section">
<div class="series-header">
<h2 data-text="series-collection">Series Collection</h2>
<div class="series-filters">
<button id="show-missing-only" class="btn btn-secondary" data-active="false">
<i class="fas fa-filter"></i>
<span data-text="show-missing-only">Missing Episodes Only</span>
</button>
<button id="sort-alphabetical" class="btn btn-secondary" data-active="false">
<i class="fas fa-sort-alpha-down"></i>
<span data-text="sort-alphabetical">A-Z Sort</span>
</button>
</div>
<div class="series-actions">
<button id="select-all" class="btn btn-secondary">
<i class="fas fa-check-double"></i>
<span data-text="select-all">Select All</span>
</button>
<button id="download-selected" class="btn btn-success" disabled>
<i class="fas fa-download"></i>
<span data-text="download-selected">Download Selected</span>
</button>
</div>
</div>
<!-- Series grid -->
<div id="series-grid" class="series-grid">
<!-- Series cards will be populated here -->
</div>
</section>
</main>
<!-- Status panel -->
<div id="status-panel" class="status-panel hidden">
<div class="status-header">
<h3 id="status-title">Status</h3>
<button id="close-status" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="status-content">
<div id="status-message" class="status-message"></div>
<div id="progress-container" class="progress-container hidden">
<div class="progress-bar">
<div id="progress-fill" class="progress-fill"></div>
</div>
<div id="progress-text" class="progress-text">0%</div>
</div>
<div id="download-controls" class="download-controls hidden">
<button id="pause-download" class="btn btn-secondary btn-small">
<i class="fas fa-pause"></i>
<span data-text="pause">Pause</span>
</button>
<button id="resume-download" class="btn btn-primary btn-small hidden">
<i class="fas fa-play"></i>
<span data-text="resume">Resume</span>
</button>
<button id="cancel-download" class="btn btn-small"
style="background-color: var(--color-error); color: white;">
<i class="fas fa-stop"></i>
<span data-text="cancel">Cancel</span>
</button>
</div>
</div>
</div>
<!-- Configuration Modal -->
<div id="config-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 data-text="config-title">Configuration</h3>
<button id="close-config" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<div class="config-item">
<label for="anime-directory-input" data-text="anime-directory">Anime Directory:</label>
<div class="input-group">
<input type="text" id="anime-directory-input" class="input-field"
placeholder="Enter anime directory path...">
<button id="browse-directory" class="btn btn-secondary">
<i class="fas fa-folder"></i>
</button>
</div>
</div>
<div class="config-item">
<label for="series-count-input" data-text="series-count">Series Count:</label>
<input type="number" id="series-count-input" class="input-field" readonly
title="This value is automatically calculated">
</div>
<div class="config-item">
<label data-text="connection-status">Connection Status:</label>
<div id="connection-status-display" class="config-value">
<span class="status-indicator"></span>
<span class="status-text">Disconnected</span>
</div>
<button id="test-connection" class="btn btn-secondary">
<i class="fas fa-network-wired"></i>
<span data-text="test-connection">Test Connection</span>
</button>
</div>
<!-- Main Configuration Actions -->
<div class="config-actions">
<button id="save-main-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-main-config">Save Configuration</span>
</button>
<button id="reset-main-config" class="btn btn-secondary">
<i class="fas fa-undo"></i>
<span data-text="reset-main-config">Reset</span>
</button>
</div>
<!-- Scheduler Configuration -->
<div class="config-section">
<h4 data-text="scheduler-config">Scheduled Operations</h4>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="scheduled-rescan-enabled">
<span class="checkbox-custom"></span>
<span data-text="enable-scheduled-rescan">Enable Daily Rescan</span>
</label>
</div>
<div class="config-item" id="rescan-time-config">
<label for="scheduled-rescan-time" data-text="rescan-time">Rescan Time (24h format):</label>
<input type="time" id="scheduled-rescan-time" value="03:00" class="input-field">
</div>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="auto-download-after-rescan">
<span class="checkbox-custom"></span>
<span data-text="auto-download-after-rescan">Auto-download missing episodes after
rescan</span>
</label>
</div>
<div class="config-item scheduler-status" id="scheduler-status">
<div class="scheduler-info">
<div class="info-row">
<span data-text="next-rescan">Next Scheduled Rescan:</span>
<span id="next-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="last-rescan">Last Scheduled Rescan:</span>
<span id="last-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="scheduler-running">Scheduler Status:</span>
<span id="scheduler-running-status" class="info-value status-badge">Stopped</span>
</div>
</div>
</div>
<div class="config-actions">
<button id="save-scheduler-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-config">Save Configuration</span>
</button>
<button id="test-scheduled-rescan" class="btn btn-secondary">
<i class="fas fa-play"></i>
<span data-text="test-rescan">Test Rescan Now</span>
</button>
</div>
</div>
<!-- Logging Configuration -->
<div class="config-section">
<h4 data-text="logging-config">Logging Configuration</h4>
<div class="config-item">
<label for="log-level" data-text="log-level">Log Level:</label>
<select id="log-level" class="input-field">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
<option value="CRITICAL">CRITICAL</option>
</select>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-logging">
<label for="enable-console-logging">
<span data-text="enable-console-logging">Enable Console Logging</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-progress">
<label for="enable-console-progress">
<span data-text="enable-console-progress">Show Progress Bars in Console</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-fail2ban-logging">
<label for="enable-fail2ban-logging">
<span data-text="enable-fail2ban-logging">Enable Fail2Ban Logging</span>
</label>
</div>
</div>
<div class="config-item">
<h5 data-text="log-files">Log Files</h5>
<div id="log-files-list" class="log-files-container">
<!-- Log files will be populated here -->
</div>
</div>
<div class="config-actions">
<button id="save-logging-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-logging-config">Save Logging Config</span>
</button>
<button id="test-logging" class="btn btn-secondary">
<i class="fas fa-bug"></i>
<span data-text="test-logging">Test Logging</span>
</button>
<button id="refresh-log-files" class="btn btn-secondary">
<i class="fas fa-refresh"></i>
<span data-text="refresh-logs">Refresh Log Files</span>
</button>
<button id="cleanup-logs" class="btn btn-warning">
<i class="fas fa-trash"></i>
<span data-text="cleanup-logs">Cleanup Old Logs</span>
</button>
</div>
</div>
<!-- Configuration Management -->
<div class="config-section">
<h4 data-text="config-management">Configuration Management</h4>
<div class="config-item">
<h5 data-text="config-backup-restore">Backup & Restore</h5>
<p class="config-description" data-text="backup-description">
Create backups of your configuration or restore from previous backups.
</p>
<div class="config-actions">
<button id="create-config-backup" class="btn btn-secondary">
<i class="fas fa-save"></i>
<span data-text="create-backup">Create Backup</span>
</button>
<button id="view-config-backups" class="btn btn-secondary">
<i class="fas fa-history"></i>
<span data-text="view-backups">View Backups</span>
</button>
<button id="export-config" class="btn btn-secondary">
<i class="fas fa-download"></i>
<span data-text="export-config">Export Config</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="config-validation">Configuration Validation</h5>
<p class="config-description" data-text="validation-description">
Validate your current configuration for errors and warnings.
</p>
<div id="validation-results" class="validation-results hidden">
<!-- Validation results will be displayed here -->
</div>
<div class="config-actions">
<button id="validate-config" class="btn btn-primary">
<i class="fas fa-check"></i>
<span data-text="validate-config">Validate Configuration</span>
</button>
<button id="reset-config" class="btn btn-warning">
<i class="fas fa-undo"></i>
<span data-text="reset-config">Reset to Defaults</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="advanced-config">Advanced Settings</h5>
<label for="max-concurrent-downloads" data-text="max-downloads">Max Concurrent
Downloads:</label>
<input type="number" id="max-concurrent-downloads" min="1" max="20" value="3"
class="input-field">
<label for="provider-timeout" data-text="provider-timeout">Provider Timeout
(seconds):</label>
<input type="number" id="provider-timeout" min="5" max="300" value="30" class="input-field">
<div class="checkbox-container">
<input type="checkbox" id="enable-debug-mode">
<label for="enable-debug-mode">
<span data-text="enable-debug">Enable Debug Mode</span>
</label>
</div>
<div class="config-actions">
<button id="save-advanced-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-advanced">Save Advanced Settings</span>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/localization.js"></script>
<!-- UX Enhancement Scripts -->
<script src="/static/js/keyboard_shortcuts.js"></script>
<script src="/static/js/drag_drop.js"></script>
<script src="/static/js/bulk_operations.js"></script>
<script src="/static/js/user_preferences.js"></script>
<script src="/static/js/advanced_search.js"></script>
<script src="/static/js/undo_redo.js"></script>
<!-- Mobile & Accessibility Scripts -->
<script src="/static/js/mobile_responsive.js"></script>
<script src="/static/js/touch_gestures.js"></script>
<script src="/static/js/accessibility_features.js"></script>
<script src="/static/js/screen_reader_support.js"></script>
<script src="/static/js/color_contrast_compliance.js"></script>
<script src="/static/js/multi_screen_support.js"></script>
<script src="/static/js/app.js"></script>
</body>
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<!-- UX Enhancement and Mobile & Accessibility CSS -->
<link rel="stylesheet" href="/static/css/ux_features.css">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-play-circle"></i>
<h1>AniWorld Manager</h1>
</div>
<div class="header-actions">
<!-- Process Status Indicators -->
<div class="process-status" id="process-status">
<div class="status-indicator" id="rescan-status" title="Scan is idle">
<i class="fas fa-sync-alt"></i>
<div class="status-dot idle"></div>
</div>
<div class="status-indicator" id="download-status" title="Download is idle">
<i class="fas fa-download"></i>
<div class="status-dot idle"></div>
</div>
</div>
<a href="/queue" class="btn btn-secondary" title="Download Queue">
<i class="fas fa-list-alt"></i>
<span data-text="queue">Queue</span>
</a>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span data-text="logout">Logout</span>
</button>
<button id="config-btn" class="btn btn-secondary" title="Show configuration">
<i class="fas fa-cog"></i>
<span data-text="config-title">Config</span>
</button>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme" data-title="toggle-theme">
<i class="fas fa-moon"></i>
</button>
<button id="rescan-btn" class="btn btn-primary">
<i class="fas fa-sync-alt"></i>
<span data-text="rescan">Rescan</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Search section -->
<section class="search-section">
<div class="search-container">
<div class="search-input-group">
<input type="text" id="search-input" data-text="search-placeholder"
placeholder="Search for anime..." class="search-input">
<button id="search-btn" class="btn btn-primary">
<i class="fas fa-search"></i>
</button>
<button id="clear-search" class="btn btn-secondary">
<i class="fas fa-times"></i>
</button>
</div>
</div>
<!-- Search results -->
<div id="search-results" class="search-results hidden">
<h3>Search Results</h3>
<div id="search-results-list" class="search-results-list"></div>
</div>
</section>
<!-- Download Queue Section -->
<section id="download-queue-section" class="download-queue-section hidden">
<div class="queue-header">
<h2>
<i class="fas fa-download"></i>
<span data-text="download-queue">Download Queue</span>
</h2>
<div class="queue-stats">
<span id="queue-progress" class="queue-progress">0/0 series</span>
</div>
</div>
<!-- Current Download -->
<div id="current-download" class="current-download hidden">
<div class="current-download-header">
<h3 data-text="currently-downloading">Currently Downloading</h3>
</div>
<div class="current-download-item">
<div class="download-info">
<div id="current-serie-name" class="serie-name">-</div>
<div id="current-episode" class="episode-info">-</div>
</div>
<div class="download-progress">
<div class="progress-bar-mini">
<div id="current-progress-fill" class="progress-fill-mini"></div>
</div>
<div id="current-progress-text" class="progress-text-mini">0%</div>
</div>
</div>
</div>
<!-- Queue List -->
<div id="queue-list-container" class="queue-list-container">
<h3 data-text="queued-series">Queued Series</h3>
<div id="queue-list" class="queue-list">
<!-- Queue items will be populated here -->
</div>
</div>
</section>
<!-- Series management section -->
<section class="series-section">
<div class="series-header">
<h2 data-text="series-collection">Series Collection</h2>
<div class="series-filters">
<button id="show-missing-only" class="btn btn-secondary" data-active="false">
<i class="fas fa-filter"></i>
<span data-text="show-missing-only">Missing Episodes Only</span>
</button>
<button id="sort-alphabetical" class="btn btn-secondary" data-active="false">
<i class="fas fa-sort-alpha-down"></i>
<span data-text="sort-alphabetical">A-Z Sort</span>
</button>
</div>
<div class="series-actions">
<button id="select-all" class="btn btn-secondary">
<i class="fas fa-check-double"></i>
<span data-text="select-all">Select All</span>
</button>
<button id="download-selected" class="btn btn-success" disabled>
<i class="fas fa-download"></i>
<span data-text="download-selected">Download Selected</span>
</button>
</div>
</div>
<!-- Series grid -->
<div id="series-grid" class="series-grid">
<!-- Series cards will be populated here -->
</div>
</section>
</main>
<!-- Status panel -->
<div id="status-panel" class="status-panel hidden">
<div class="status-header">
<h3 id="status-title">Status</h3>
<button id="close-status" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="status-content">
<div id="status-message" class="status-message"></div>
<div id="progress-container" class="progress-container hidden">
<div class="progress-bar">
<div id="progress-fill" class="progress-fill"></div>
</div>
<div id="progress-text" class="progress-text">0%</div>
</div>
<div id="download-controls" class="download-controls hidden">
<button id="pause-download" class="btn btn-secondary btn-small">
<i class="fas fa-pause"></i>
<span data-text="pause">Pause</span>
</button>
<button id="resume-download" class="btn btn-primary btn-small hidden">
<i class="fas fa-play"></i>
<span data-text="resume">Resume</span>
</button>
<button id="cancel-download" class="btn btn-small"
style="background-color: var(--color-error); color: white;">
<i class="fas fa-stop"></i>
<span data-text="cancel">Cancel</span>
</button>
</div>
</div>
</div>
<!-- Configuration Modal -->
<div id="config-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 data-text="config-title">Configuration</h3>
<button id="close-config" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<div class="config-item">
<label for="anime-directory-input" data-text="anime-directory">Anime Directory:</label>
<div class="input-group">
<input type="text" id="anime-directory-input" class="input-field"
placeholder="Enter anime directory path...">
<button id="browse-directory" class="btn btn-secondary">
<i class="fas fa-folder"></i>
</button>
</div>
</div>
<div class="config-item">
<label for="series-count-input" data-text="series-count">Series Count:</label>
<input type="number" id="series-count-input" class="input-field" readonly
title="This value is automatically calculated">
</div>
<div class="config-item">
<label data-text="connection-status">Connection Status:</label>
<div id="connection-status-display" class="config-value">
<span class="status-indicator"></span>
<span class="status-text">Disconnected</span>
</div>
<button id="test-connection" class="btn btn-secondary">
<i class="fas fa-network-wired"></i>
<span data-text="test-connection">Test Connection</span>
</button>
</div>
<!-- Main Configuration Actions -->
<div class="config-actions">
<button id="save-main-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-main-config">Save Configuration</span>
</button>
<button id="reset-main-config" class="btn btn-secondary">
<i class="fas fa-undo"></i>
<span data-text="reset-main-config">Reset</span>
</button>
</div>
<!-- Scheduler Configuration -->
<div class="config-section">
<h4 data-text="scheduler-config">Scheduled Operations</h4>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="scheduled-rescan-enabled">
<span class="checkbox-custom"></span>
<span data-text="enable-scheduled-rescan">Enable Daily Rescan</span>
</label>
</div>
<div class="config-item" id="rescan-time-config">
<label for="scheduled-rescan-time" data-text="rescan-time">Rescan Time (24h format):</label>
<input type="time" id="scheduled-rescan-time" value="03:00" class="input-field">
</div>
<div class="config-item">
<label class="checkbox-label">
<input type="checkbox" id="auto-download-after-rescan">
<span class="checkbox-custom"></span>
<span data-text="auto-download-after-rescan">Auto-download missing episodes after
rescan</span>
</label>
</div>
<div class="config-item scheduler-status" id="scheduler-status">
<div class="scheduler-info">
<div class="info-row">
<span data-text="next-rescan">Next Scheduled Rescan:</span>
<span id="next-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="last-rescan">Last Scheduled Rescan:</span>
<span id="last-rescan-time" class="info-value">-</span>
</div>
<div class="info-row">
<span data-text="scheduler-running">Scheduler Status:</span>
<span id="scheduler-running-status" class="info-value status-badge">Stopped</span>
</div>
</div>
</div>
<div class="config-actions">
<button id="save-scheduler-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-config">Save Configuration</span>
</button>
<button id="test-scheduled-rescan" class="btn btn-secondary">
<i class="fas fa-play"></i>
<span data-text="test-rescan">Test Rescan Now</span>
</button>
</div>
</div>
<!-- Logging Configuration -->
<div class="config-section">
<h4 data-text="logging-config">Logging Configuration</h4>
<div class="config-item">
<label for="log-level" data-text="log-level">Log Level:</label>
<select id="log-level" class="input-field">
<option value="DEBUG">DEBUG</option>
<option value="INFO">INFO</option>
<option value="WARNING">WARNING</option>
<option value="ERROR">ERROR</option>
<option value="CRITICAL">CRITICAL</option>
</select>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-logging">
<label for="enable-console-logging">
<span data-text="enable-console-logging">Enable Console Logging</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-console-progress">
<label for="enable-console-progress">
<span data-text="enable-console-progress">Show Progress Bars in Console</span>
</label>
</div>
</div>
<div class="config-item">
<div class="checkbox-container">
<input type="checkbox" id="enable-fail2ban-logging">
<label for="enable-fail2ban-logging">
<span data-text="enable-fail2ban-logging">Enable Fail2Ban Logging</span>
</label>
</div>
</div>
<div class="config-item">
<h5 data-text="log-files">Log Files</h5>
<div id="log-files-list" class="log-files-container">
<!-- Log files will be populated here -->
</div>
</div>
<div class="config-actions">
<button id="save-logging-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-logging-config">Save Logging Config</span>
</button>
<button id="test-logging" class="btn btn-secondary">
<i class="fas fa-bug"></i>
<span data-text="test-logging">Test Logging</span>
</button>
<button id="refresh-log-files" class="btn btn-secondary">
<i class="fas fa-refresh"></i>
<span data-text="refresh-logs">Refresh Log Files</span>
</button>
<button id="cleanup-logs" class="btn btn-warning">
<i class="fas fa-trash"></i>
<span data-text="cleanup-logs">Cleanup Old Logs</span>
</button>
</div>
</div>
<!-- Configuration Management -->
<div class="config-section">
<h4 data-text="config-management">Configuration Management</h4>
<div class="config-item">
<h5 data-text="config-backup-restore">Backup & Restore</h5>
<p class="config-description" data-text="backup-description">
Create backups of your configuration or restore from previous backups.
</p>
<div class="config-actions">
<button id="create-config-backup" class="btn btn-secondary">
<i class="fas fa-save"></i>
<span data-text="create-backup">Create Backup</span>
</button>
<button id="view-config-backups" class="btn btn-secondary">
<i class="fas fa-history"></i>
<span data-text="view-backups">View Backups</span>
</button>
<button id="export-config" class="btn btn-secondary">
<i class="fas fa-download"></i>
<span data-text="export-config">Export Config</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="config-validation">Configuration Validation</h5>
<p class="config-description" data-text="validation-description">
Validate your current configuration for errors and warnings.
</p>
<div id="validation-results" class="validation-results hidden">
<!-- Validation results will be displayed here -->
</div>
<div class="config-actions">
<button id="validate-config" class="btn btn-primary">
<i class="fas fa-check"></i>
<span data-text="validate-config">Validate Configuration</span>
</button>
<button id="reset-config" class="btn btn-warning">
<i class="fas fa-undo"></i>
<span data-text="reset-config">Reset to Defaults</span>
</button>
</div>
</div>
<div class="config-item">
<h5 data-text="advanced-config">Advanced Settings</h5>
<label for="max-concurrent-downloads" data-text="max-downloads">Max Concurrent
Downloads:</label>
<input type="number" id="max-concurrent-downloads" min="1" max="20" value="3"
class="input-field">
<label for="provider-timeout" data-text="provider-timeout">Provider Timeout
(seconds):</label>
<input type="number" id="provider-timeout" min="5" max="300" value="30" class="input-field">
<div class="checkbox-container">
<input type="checkbox" id="enable-debug-mode">
<label for="enable-debug-mode">
<span data-text="enable-debug">Enable Debug Mode</span>
</label>
</div>
<div class="config-actions">
<button id="save-advanced-config" class="btn btn-primary">
<i class="fas fa-save"></i>
<span data-text="save-advanced">Save Advanced Settings</span>
</button>
</div>
</div>
</div>
</div>
</div>
</div>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/localization.js"></script>
<!-- UX Enhancement Scripts -->
<script src="/static/js/keyboard_shortcuts.js"></script>
<script src="/static/js/drag_drop.js"></script>
<script src="/static/js/bulk_operations.js"></script>
<script src="/static/js/user_preferences.js"></script>
<script src="/static/js/advanced_search.js"></script>
<script src="/static/js/undo_redo.js"></script>
<!-- Mobile & Accessibility Scripts -->
<script src="/static/js/mobile_responsive.js"></script>
<script src="/static/js/touch_gestures.js"></script>
<script src="/static/js/accessibility_features.js"></script>
<script src="/static/js/screen_reader_support.js"></script>
<script src="/static/js/color_contrast_compliance.js"></script>
<script src="/static/js/multi_screen_support.js"></script>
<script src="/static/js/app.js"></script>
</body>
</html>

View File

@@ -1,380 +1,380 @@
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager - Login</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<style>
.login-container {
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background: linear-gradient(135deg, var(--color-primary-light) 0%, var(--color-primary) 100%);
padding: 1rem;
}
.login-card {
background: var(--color-surface);
border-radius: 16px;
padding: 2rem;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
width: 100%;
max-width: 400px;
border: 1px solid var(--color-border);
}
.login-header {
text-align: center;
margin-bottom: 2rem;
}
.login-header .logo {
font-size: 3rem;
color: var(--color-primary);
margin-bottom: 0.5rem;
}
.login-header h1 {
margin: 0;
color: var(--color-text);
font-size: 1.5rem;
font-weight: 600;
}
.login-header p {
margin: 0.5rem 0 0 0;
color: var(--color-text-secondary);
font-size: 0.9rem;
}
.login-form {
display: flex;
flex-direction: column;
gap: 1.5rem;
}
.form-group {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.form-label {
font-weight: 500;
color: var(--color-text);
font-size: 0.9rem;
}
.password-input-group {
position: relative;
}
.password-input {
width: 100%;
padding: 0.75rem 3rem 0.75rem 1rem;
border: 2px solid var(--color-border);
border-radius: 8px;
font-size: 1rem;
background: var(--color-background);
color: var(--color-text);
transition: all 0.2s ease;
box-sizing: border-box;
}
.password-input:focus {
outline: none;
border-color: var(--color-primary);
box-shadow: 0 0 0 3px rgba(var(--color-primary-rgb), 0.1);
}
.password-toggle {
position: absolute;
right: 0.75rem;
top: 50%;
transform: translateY(-50%);
background: none;
border: none;
color: var(--color-text-secondary);
cursor: pointer;
padding: 0.25rem;
border-radius: 4px;
transition: color 0.2s ease;
}
.password-toggle:hover {
color: var(--color-primary);
}
.login-button {
width: 100%;
padding: 0.75rem;
background: var(--color-primary);
color: white;
border: none;
border-radius: 8px;
font-size: 1rem;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
display: flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
}
.login-button:hover:not(:disabled) {
background: var(--color-primary-dark);
transform: translateY(-1px);
box-shadow: 0 4px 12px rgba(var(--color-primary-rgb), 0.3);
}
.login-button:disabled {
opacity: 0.6;
cursor: not-allowed;
transform: none;
box-shadow: none;
}
.error-message {
background: var(--color-error-light);
color: var(--color-error);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-error);
font-size: 0.9rem;
text-align: center;
}
.success-message {
background: var(--color-success-light);
color: var(--color-success);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-success);
font-size: 0.9rem;
text-align: center;
}
.theme-toggle {
position: absolute;
top: 1rem;
right: 1rem;
background: rgba(255, 255, 255, 0.1);
border: 1px solid rgba(255, 255, 255, 0.2);
color: white;
padding: 0.5rem;
border-radius: 50%;
cursor: pointer;
transition: all 0.2s ease;
width: 2.5rem;
height: 2.5rem;
display: flex;
align-items: center;
justify-content: center;
}
.theme-toggle:hover {
background: rgba(255, 255, 255, 0.2);
transform: scale(1.1);
}
.security-info {
margin-top: 1.5rem;
padding: 1rem;
background: var(--color-info-light);
border: 1px solid var(--color-info);
border-radius: 8px;
font-size: 0.8rem;
color: var(--color-text-secondary);
text-align: center;
}
.loading-spinner {
width: 1rem;
height: 1rem;
border: 2px solid transparent;
border-top: 2px solid currentColor;
border-radius: 50%;
animation: spin 1s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
</style>
</head>
<body>
<div class="login-container">
<button class="theme-toggle" id="theme-toggle" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<div class="login-card">
<div class="login-header">
<div class="logo">
<i class="fas fa-play-circle"></i>
</div>
<h1>AniWorld Manager</h1>
<p>Please enter your master password to continue</p>
</div>
<form class="login-form" id="login-form">
<div class="form-group">
<label for="password" class="form-label">Master Password</label>
<div class="password-input-group">
<input
type="password"
id="password"
name="password"
class="password-input"
placeholder="Enter your password"
required
autocomplete="current-password"
autofocus>
<button type="button" class="password-toggle" id="password-toggle" tabindex="-1">
<i class="fas fa-eye"></i>
</button>
</div>
</div>
<div id="message-container"></div>
<button type="submit" class="login-button" id="login-button">
<i class="fas fa-sign-in-alt"></i>
<span>Login</span>
</button>
</form>
<div class="security-info">
<i class="fas fa-shield-alt"></i>
Your session will expire after {{ session_timeout }} hours of inactivity.
<br>
After {{ max_attempts }} failed attempts, this IP will be locked for {{ lockout_duration }} minutes.
</div>
</div>
</div>
<script>
// Theme toggle functionality
const themeToggle = document.getElementById('theme-toggle');
const htmlElement = document.documentElement;
// Load saved theme
const savedTheme = localStorage.getItem('theme') || 'light';
htmlElement.setAttribute('data-theme', savedTheme);
updateThemeIcon(savedTheme);
themeToggle.addEventListener('click', () => {
const currentTheme = htmlElement.getAttribute('data-theme');
const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
htmlElement.setAttribute('data-theme', newTheme);
localStorage.setItem('theme', newTheme);
updateThemeIcon(newTheme);
});
function updateThemeIcon(theme) {
const icon = themeToggle.querySelector('i');
icon.className = theme === 'dark' ? 'fas fa-sun' : 'fas fa-moon';
}
// Password visibility toggle
const passwordToggle = document.getElementById('password-toggle');
const passwordInput = document.getElementById('password');
passwordToggle.addEventListener('click', () => {
const type = passwordInput.getAttribute('type');
const newType = type === 'password' ? 'text' : 'password';
const icon = passwordToggle.querySelector('i');
passwordInput.setAttribute('type', newType);
icon.className = newType === 'password' ? 'fas fa-eye' : 'fas fa-eye-slash';
});
// Form submission
const loginForm = document.getElementById('login-form');
const loginButton = document.getElementById('login-button');
const messageContainer = document.getElementById('message-container');
loginForm.addEventListener('submit', async (e) => {
e.preventDefault();
const password = passwordInput.value.trim();
if (!password) {
showMessage('Please enter your password', 'error');
return;
}
setLoading(true);
try {
const response = await fetch('/api/auth/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ password })
});
const data = await response.json();
if (data.status === 'success') {
showMessage(data.message, 'success');
setTimeout(() => {
window.location.href = '/';
}, 1000);
} else {
showMessage(data.message, 'error');
passwordInput.value = '';
passwordInput.focus();
}
} catch (error) {
showMessage('Connection error. Please try again.', 'error');
console.error('Login error:', error);
} finally {
setLoading(false);
}
});
function showMessage(message, type) {
messageContainer.innerHTML = `
<div class="${type}-message">
${message}
</div>
`;
}
function setLoading(loading) {
loginButton.disabled = loading;
const buttonText = loginButton.querySelector('span');
const buttonIcon = loginButton.querySelector('i');
if (loading) {
buttonIcon.className = 'loading-spinner';
buttonText.textContent = 'Logging in...';
} else {
buttonIcon.className = 'fas fa-sign-in-alt';
buttonText.textContent = 'Login';
}
}
// Clear message on input
passwordInput.addEventListener('input', () => {
messageContainer.innerHTML = '';
});
// Enter key on password toggle
passwordToggle.addEventListener('keydown', (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
passwordToggle.click();
}
});
</script>
</body>
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>AniWorld Manager - Login</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
<style>
.login-container {
min-height: 100vh;
display: flex;
align-items: center;
justify-content: center;
background: linear-gradient(135deg, var(--color-primary-light) 0%, var(--color-primary) 100%);
padding: 1rem;
}
.login-card {
background: var(--color-surface);
border-radius: 16px;
padding: 2rem;
box-shadow: 0 8px 32px rgba(0, 0, 0, 0.1);
width: 100%;
max-width: 400px;
border: 1px solid var(--color-border);
}
.login-header {
text-align: center;
margin-bottom: 2rem;
}
.login-header .logo {
font-size: 3rem;
color: var(--color-primary);
margin-bottom: 0.5rem;
}
.login-header h1 {
margin: 0;
color: var(--color-text);
font-size: 1.5rem;
font-weight: 600;
}
.login-header p {
margin: 0.5rem 0 0 0;
color: var(--color-text-secondary);
font-size: 0.9rem;
}
.login-form {
display: flex;
flex-direction: column;
gap: 1.5rem;
}
.form-group {
display: flex;
flex-direction: column;
gap: 0.5rem;
}
.form-label {
font-weight: 500;
color: var(--color-text);
font-size: 0.9rem;
}
.password-input-group {
position: relative;
}
.password-input {
width: 100%;
padding: 0.75rem 3rem 0.75rem 1rem;
border: 2px solid var(--color-border);
border-radius: 8px;
font-size: 1rem;
background: var(--color-background);
color: var(--color-text);
transition: all 0.2s ease;
box-sizing: border-box;
}
.password-input:focus {
outline: none;
border-color: var(--color-primary);
box-shadow: 0 0 0 3px rgba(var(--color-primary-rgb), 0.1);
}
.password-toggle {
position: absolute;
right: 0.75rem;
top: 50%;
transform: translateY(-50%);
background: none;
border: none;
color: var(--color-text-secondary);
cursor: pointer;
padding: 0.25rem;
border-radius: 4px;
transition: color 0.2s ease;
}
.password-toggle:hover {
color: var(--color-primary);
}
.login-button {
width: 100%;
padding: 0.75rem;
background: var(--color-primary);
color: white;
border: none;
border-radius: 8px;
font-size: 1rem;
font-weight: 500;
cursor: pointer;
transition: all 0.2s ease;
display: flex;
align-items: center;
justify-content: center;
gap: 0.5rem;
}
.login-button:hover:not(:disabled) {
background: var(--color-primary-dark);
transform: translateY(-1px);
box-shadow: 0 4px 12px rgba(var(--color-primary-rgb), 0.3);
}
.login-button:disabled {
opacity: 0.6;
cursor: not-allowed;
transform: none;
box-shadow: none;
}
.error-message {
background: var(--color-error-light);
color: var(--color-error);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-error);
font-size: 0.9rem;
text-align: center;
}
.success-message {
background: var(--color-success-light);
color: var(--color-success);
padding: 0.75rem;
border-radius: 8px;
border: 1px solid var(--color-success);
font-size: 0.9rem;
text-align: center;
}
.theme-toggle {
position: absolute;
top: 1rem;
right: 1rem;
background: rgba(255, 255, 255, 0.1);
border: 1px solid rgba(255, 255, 255, 0.2);
color: white;
padding: 0.5rem;
border-radius: 50%;
cursor: pointer;
transition: all 0.2s ease;
width: 2.5rem;
height: 2.5rem;
display: flex;
align-items: center;
justify-content: center;
}
.theme-toggle:hover {
background: rgba(255, 255, 255, 0.2);
transform: scale(1.1);
}
.security-info {
margin-top: 1.5rem;
padding: 1rem;
background: var(--color-info-light);
border: 1px solid var(--color-info);
border-radius: 8px;
font-size: 0.8rem;
color: var(--color-text-secondary);
text-align: center;
}
.loading-spinner {
width: 1rem;
height: 1rem;
border: 2px solid transparent;
border-top: 2px solid currentColor;
border-radius: 50%;
animation: spin 1s linear infinite;
}
@keyframes spin {
to {
transform: rotate(360deg);
}
}
</style>
</head>
<body>
<div class="login-container">
<button class="theme-toggle" id="theme-toggle" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<div class="login-card">
<div class="login-header">
<div class="logo">
<i class="fas fa-play-circle"></i>
</div>
<h1>AniWorld Manager</h1>
<p>Please enter your master password to continue</p>
</div>
<form class="login-form" id="login-form">
<div class="form-group">
<label for="password" class="form-label">Master Password</label>
<div class="password-input-group">
<input
type="password"
id="password"
name="password"
class="password-input"
placeholder="Enter your password"
required
autocomplete="current-password"
autofocus>
<button type="button" class="password-toggle" id="password-toggle" tabindex="-1">
<i class="fas fa-eye"></i>
</button>
</div>
</div>
<div id="message-container"></div>
<button type="submit" class="login-button" id="login-button">
<i class="fas fa-sign-in-alt"></i>
<span>Login</span>
</button>
</form>
<div class="security-info">
<i class="fas fa-shield-alt"></i>
Your session will expire after {{ session_timeout }} hours of inactivity.
<br>
After {{ max_attempts }} failed attempts, this IP will be locked for {{ lockout_duration }} minutes.
</div>
</div>
</div>
<script>
// Theme toggle functionality
const themeToggle = document.getElementById('theme-toggle');
const htmlElement = document.documentElement;
// Load saved theme
const savedTheme = localStorage.getItem('theme') || 'light';
htmlElement.setAttribute('data-theme', savedTheme);
updateThemeIcon(savedTheme);
themeToggle.addEventListener('click', () => {
const currentTheme = htmlElement.getAttribute('data-theme');
const newTheme = currentTheme === 'dark' ? 'light' : 'dark';
htmlElement.setAttribute('data-theme', newTheme);
localStorage.setItem('theme', newTheme);
updateThemeIcon(newTheme);
});
function updateThemeIcon(theme) {
const icon = themeToggle.querySelector('i');
icon.className = theme === 'dark' ? 'fas fa-sun' : 'fas fa-moon';
}
// Password visibility toggle
const passwordToggle = document.getElementById('password-toggle');
const passwordInput = document.getElementById('password');
passwordToggle.addEventListener('click', () => {
const type = passwordInput.getAttribute('type');
const newType = type === 'password' ? 'text' : 'password';
const icon = passwordToggle.querySelector('i');
passwordInput.setAttribute('type', newType);
icon.className = newType === 'password' ? 'fas fa-eye' : 'fas fa-eye-slash';
});
// Form submission
const loginForm = document.getElementById('login-form');
const loginButton = document.getElementById('login-button');
const messageContainer = document.getElementById('message-container');
loginForm.addEventListener('submit', async (e) => {
e.preventDefault();
const password = passwordInput.value.trim();
if (!password) {
showMessage('Please enter your password', 'error');
return;
}
setLoading(true);
try {
const response = await fetch('/api/auth/login', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
},
body: JSON.stringify({ password })
});
const data = await response.json();
if (data.status === 'success') {
showMessage(data.message, 'success');
setTimeout(() => {
window.location.href = '/';
}, 1000);
} else {
showMessage(data.message, 'error');
passwordInput.value = '';
passwordInput.focus();
}
} catch (error) {
showMessage('Connection error. Please try again.', 'error');
console.error('Login error:', error);
} finally {
setLoading(false);
}
});
function showMessage(message, type) {
messageContainer.innerHTML = `
<div class="${type}-message">
${message}
</div>
`;
}
function setLoading(loading) {
loginButton.disabled = loading;
const buttonText = loginButton.querySelector('span');
const buttonIcon = loginButton.querySelector('i');
if (loading) {
buttonIcon.className = 'loading-spinner';
buttonText.textContent = 'Logging in...';
} else {
buttonIcon.className = 'fas fa-sign-in-alt';
buttonText.textContent = 'Login';
}
}
// Clear message on input
passwordInput.addEventListener('input', () => {
messageContainer.innerHTML = '';
});
// Enter key on password toggle
passwordToggle.addEventListener('keydown', (e) => {
if (e.key === 'Enter' || e.key === ' ') {
e.preventDefault();
passwordToggle.click();
}
});
</script>
</body>
</html>

View File

@@ -1,252 +1,252 @@
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Download Queue - AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-download"></i>
<h1>Download Queue</h1>
</div>
<div class="header-actions">
<a href="/" class="btn btn-secondary">
<i class="fas fa-arrow-left"></i>
<span>Back to Main</span>
</a>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span>Logout</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Queue Statistics -->
<section class="queue-stats-section">
<div class="stats-grid">
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-download text-primary"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="total-items">0</div>
<div class="stat-label">Total Items</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-clock text-warning"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="pending-items">0</div>
<div class="stat-label">In Queue</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-check-circle text-success"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="completed-items">0</div>
<div class="stat-label">Completed</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-exclamation-triangle text-error"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="failed-items">0</div>
<div class="stat-label">Failed</div>
</div>
</div>
</div>
<!-- Speed and ETA -->
<div class="speed-eta-section">
<div class="speed-info">
<div class="speed-current">
<span class="label">Current Speed:</span>
<span class="value" id="current-speed">0 MB/s</span>
</div>
<div class="speed-average">
<span class="label">Average Speed:</span>
<span class="value" id="average-speed">0 MB/s</span>
</div>
</div>
<div class="eta-info">
<span class="label">Estimated Time Remaining:</span>
<span class="value" id="eta-time">--:--</span>
</div>
</div>
</section>
<!-- Active Downloads -->
<section class="active-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-play-circle"></i>
Active Downloads
</h2>
<div class="section-actions">
<button id="pause-all-btn" class="btn btn-secondary" disabled>
<i class="fas fa-pause"></i>
Pause All
</button>
<button id="resume-all-btn" class="btn btn-primary" disabled style="display: none;">
<i class="fas fa-play"></i>
Resume All
</button>
</div>
</div>
<div class="active-downloads-list" id="active-downloads">
<div class="empty-state">
<i class="fas fa-pause-circle"></i>
<p>No active downloads</p>
</div>
</div>
</section>
<!-- Pending Queue -->
<section class="pending-queue-section">
<div class="section-header">
<h2>
<i class="fas fa-clock"></i>
Download Queue
</h2>
<div class="section-actions">
<button id="start-queue-btn" class="btn btn-primary" disabled>
<i class="fas fa-play"></i>
Start Downloads
</button>
<button id="stop-queue-btn" class="btn btn-secondary" disabled style="display: none;">
<i class="fas fa-stop"></i>
Stop Downloads
</button>
<button id="clear-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-trash"></i>
Clear Queue
</button>
<button id="reorder-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-sort"></i>
Reorder
</button>
</div>
</div>
<div class="pending-queue-list" id="pending-queue">
<div class="empty-state">
<i class="fas fa-list"></i>
<p>No items in queue</p>
</div>
</div>
</section>
<!-- Completed Downloads -->
<section class="completed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-check-circle"></i>
Recent Completed
</h2>
<div class="section-actions">
<button id="clear-completed-btn" class="btn btn-secondary">
<i class="fas fa-broom"></i>
Clear Completed
</button>
</div>
</div>
<div class="completed-downloads-list" id="completed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle"></i>
<p>No completed downloads</p>
</div>
</div>
</section>
<!-- Failed Downloads -->
<section class="failed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-exclamation-triangle"></i>
Failed Downloads
</h2>
<div class="section-actions">
<button id="retry-all-btn" class="btn btn-warning" disabled>
<i class="fas fa-redo"></i>
Retry All
</button>
<button id="clear-failed-btn" class="btn btn-secondary">
<i class="fas fa-trash"></i>
Clear Failed
</button>
</div>
</div>
<div class="failed-downloads-list" id="failed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle text-success"></i>
<p>No failed downloads</p>
</div>
</div>
</section>
</main>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Confirmation Modal -->
<div id="confirm-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 id="confirm-title">Confirm Action</h3>
<button id="close-confirm" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<p id="confirm-message">Are you sure you want to perform this action?</p>
</div>
<div class="modal-footer">
<button id="confirm-cancel" class="btn btn-secondary">Cancel</button>
<button id="confirm-ok" class="btn btn-primary">Confirm</button>
</div>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/queue.js"></script>
</body>
<!DOCTYPE html>
<html lang="en" data-theme="light">
<head>
<meta charset="UTF-8">
<meta name="viewport" content="width=device-width, initial-scale=1.0">
<title>Download Queue - AniWorld Manager</title>
<link rel="stylesheet" href="/static/css/styles.css">
<link href="https://cdnjs.cloudflare.com/ajax/libs/font-awesome/6.0.0/css/all.min.css" rel="stylesheet">
</head>
<body>
<div class="app-container">
<!-- Header -->
<header class="header">
<div class="header-content">
<div class="header-title">
<i class="fas fa-download"></i>
<h1>Download Queue</h1>
</div>
<div class="header-actions">
<a href="/" class="btn btn-secondary">
<i class="fas fa-arrow-left"></i>
<span>Back to Main</span>
</a>
<button id="theme-toggle" class="btn btn-icon" title="Toggle theme">
<i class="fas fa-moon"></i>
</button>
<button id="logout-btn" class="btn btn-secondary" title="Logout" style="display: none;">
<i class="fas fa-sign-out-alt"></i>
<span>Logout</span>
</button>
</div>
</div>
</header>
<!-- Main content -->
<main class="main-content">
<!-- Queue Statistics -->
<section class="queue-stats-section">
<div class="stats-grid">
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-download text-primary"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="total-items">0</div>
<div class="stat-label">Total Items</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-clock text-warning"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="pending-items">0</div>
<div class="stat-label">In Queue</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-check-circle text-success"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="completed-items">0</div>
<div class="stat-label">Completed</div>
</div>
</div>
<div class="stat-card">
<div class="stat-icon">
<i class="fas fa-exclamation-triangle text-error"></i>
</div>
<div class="stat-info">
<div class="stat-value" id="failed-items">0</div>
<div class="stat-label">Failed</div>
</div>
</div>
</div>
<!-- Speed and ETA -->
<div class="speed-eta-section">
<div class="speed-info">
<div class="speed-current">
<span class="label">Current Speed:</span>
<span class="value" id="current-speed">0 MB/s</span>
</div>
<div class="speed-average">
<span class="label">Average Speed:</span>
<span class="value" id="average-speed">0 MB/s</span>
</div>
</div>
<div class="eta-info">
<span class="label">Estimated Time Remaining:</span>
<span class="value" id="eta-time">--:--</span>
</div>
</div>
</section>
<!-- Active Downloads -->
<section class="active-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-play-circle"></i>
Active Downloads
</h2>
<div class="section-actions">
<button id="pause-all-btn" class="btn btn-secondary" disabled>
<i class="fas fa-pause"></i>
Pause All
</button>
<button id="resume-all-btn" class="btn btn-primary" disabled style="display: none;">
<i class="fas fa-play"></i>
Resume All
</button>
</div>
</div>
<div class="active-downloads-list" id="active-downloads">
<div class="empty-state">
<i class="fas fa-pause-circle"></i>
<p>No active downloads</p>
</div>
</div>
</section>
<!-- Pending Queue -->
<section class="pending-queue-section">
<div class="section-header">
<h2>
<i class="fas fa-clock"></i>
Download Queue
</h2>
<div class="section-actions">
<button id="start-queue-btn" class="btn btn-primary" disabled>
<i class="fas fa-play"></i>
Start Downloads
</button>
<button id="stop-queue-btn" class="btn btn-secondary" disabled style="display: none;">
<i class="fas fa-stop"></i>
Stop Downloads
</button>
<button id="clear-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-trash"></i>
Clear Queue
</button>
<button id="reorder-queue-btn" class="btn btn-secondary" disabled>
<i class="fas fa-sort"></i>
Reorder
</button>
</div>
</div>
<div class="pending-queue-list" id="pending-queue">
<div class="empty-state">
<i class="fas fa-list"></i>
<p>No items in queue</p>
</div>
</div>
</section>
<!-- Completed Downloads -->
<section class="completed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-check-circle"></i>
Recent Completed
</h2>
<div class="section-actions">
<button id="clear-completed-btn" class="btn btn-secondary">
<i class="fas fa-broom"></i>
Clear Completed
</button>
</div>
</div>
<div class="completed-downloads-list" id="completed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle"></i>
<p>No completed downloads</p>
</div>
</div>
</section>
<!-- Failed Downloads -->
<section class="failed-downloads-section">
<div class="section-header">
<h2>
<i class="fas fa-exclamation-triangle"></i>
Failed Downloads
</h2>
<div class="section-actions">
<button id="retry-all-btn" class="btn btn-warning" disabled>
<i class="fas fa-redo"></i>
Retry All
</button>
<button id="clear-failed-btn" class="btn btn-secondary">
<i class="fas fa-trash"></i>
Clear Failed
</button>
</div>
</div>
<div class="failed-downloads-list" id="failed-downloads">
<div class="empty-state">
<i class="fas fa-check-circle text-success"></i>
<p>No failed downloads</p>
</div>
</div>
</section>
</main>
<!-- Toast notifications -->
<div id="toast-container" class="toast-container"></div>
</div>
<!-- Loading overlay -->
<div id="loading-overlay" class="loading-overlay hidden">
<div class="loading-spinner">
<i class="fas fa-spinner fa-spin"></i>
<p>Loading...</p>
</div>
</div>
<!-- Confirmation Modal -->
<div id="confirm-modal" class="modal hidden">
<div class="modal-overlay"></div>
<div class="modal-content">
<div class="modal-header">
<h3 id="confirm-title">Confirm Action</h3>
<button id="close-confirm" class="btn btn-icon">
<i class="fas fa-times"></i>
</button>
</div>
<div class="modal-body">
<p id="confirm-message">Are you sure you want to perform this action?</p>
</div>
<div class="modal-footer">
<button id="confirm-cancel" class="btn btn-secondary">Cancel</button>
<button id="confirm-ok" class="btn btn-primary">Confirm</button>
</div>
</div>
</div>
<!-- Scripts -->
<script src="https://cdnjs.cloudflare.com/ajax/libs/socket.io/4.0.1/socket.io.js"></script>
<script src="/static/js/queue.js"></script>
</body>
</html>

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff

View File

@@ -1,293 +1,293 @@
import threading
import time
from datetime import datetime, timedelta
from typing import Dict, Optional, Callable
import logging
logger = logging.getLogger(__name__)
class ProcessLock:
"""Thread-safe process lock for preventing duplicate operations."""
def __init__(self, name: str, timeout_minutes: int = 60):
self.name = name
self.timeout_minutes = timeout_minutes
self.lock = threading.RLock()
self.locked_at: Optional[datetime] = None
self.locked_by: Optional[str] = None
self.progress_callback: Optional[Callable] = None
self.is_locked = False
self.progress_data = {}
def acquire(self, locked_by: str = "system", progress_callback: Callable = None) -> bool:
"""
Attempt to acquire the lock.
Returns True if lock was acquired, False if already locked.
"""
with self.lock:
# Check if lock has expired
if self.is_locked and self.locked_at:
if datetime.now() - self.locked_at > timedelta(minutes=self.timeout_minutes):
logger.warning(f"Process lock '{self.name}' expired, releasing...")
self._release_internal()
if self.is_locked:
return False
self.is_locked = True
self.locked_at = datetime.now()
self.locked_by = locked_by
self.progress_callback = progress_callback
self.progress_data = {}
logger.info(f"Process lock '{self.name}' acquired by '{locked_by}'")
return True
def release(self) -> bool:
"""Release the lock."""
with self.lock:
if not self.is_locked:
return False
self._release_internal()
logger.info(f"Process lock '{self.name}' released")
return True
def _release_internal(self):
"""Internal method to release lock without logging."""
self.is_locked = False
self.locked_at = None
self.locked_by = None
self.progress_callback = None
self.progress_data = {}
def is_locked_by_other(self, requester: str) -> bool:
"""Check if lock is held by someone other than requester."""
with self.lock:
return self.is_locked and self.locked_by != requester
def get_status(self) -> Dict:
"""Get current lock status."""
with self.lock:
return {
'is_locked': self.is_locked,
'locked_by': self.locked_by,
'locked_at': self.locked_at.isoformat() if self.locked_at else None,
'progress': self.progress_data.copy(),
'timeout_minutes': self.timeout_minutes
}
def update_progress(self, progress_data: Dict):
"""Update progress data for this lock."""
with self.lock:
if self.is_locked:
self.progress_data.update(progress_data)
if self.progress_callback:
try:
self.progress_callback(progress_data)
except Exception as e:
logger.error(f"Progress callback error: {e}")
def __enter__(self):
"""Context manager entry."""
if not self.acquire():
raise ProcessLockError(f"Could not acquire lock '{self.name}'")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.release()
class ProcessLockError(Exception):
"""Exception raised when process lock operations fail."""
pass
class ProcessLockManager:
"""Global manager for all process locks."""
def __init__(self):
self.locks: Dict[str, ProcessLock] = {}
self.manager_lock = threading.RLock()
def get_lock(self, name: str, timeout_minutes: int = 60) -> ProcessLock:
"""Get or create a process lock."""
with self.manager_lock:
if name not in self.locks:
self.locks[name] = ProcessLock(name, timeout_minutes)
return self.locks[name]
def acquire_lock(self, name: str, locked_by: str = "system",
timeout_minutes: int = 60, progress_callback: Callable = None) -> bool:
"""Acquire a named lock."""
lock = self.get_lock(name, timeout_minutes)
return lock.acquire(locked_by, progress_callback)
def release_lock(self, name: str) -> bool:
"""Release a named lock."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].release()
return False
def is_locked(self, name: str) -> bool:
"""Check if a named lock is currently held."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].is_locked
return False
def get_all_locks_status(self) -> Dict:
"""Get status of all locks."""
with self.manager_lock:
return {
name: lock.get_status()
for name, lock in self.locks.items()
}
def cleanup_expired_locks(self) -> int:
"""Clean up any expired locks. Returns number of locks cleaned up."""
cleaned_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked and lock.locked_at:
if datetime.now() - lock.locked_at > timedelta(minutes=lock.timeout_minutes):
lock._release_internal()
cleaned_count += 1
logger.info(f"Cleaned up expired lock: {lock.name}")
return cleaned_count
def force_release_all(self) -> int:
"""Force release all locks. Returns number of locks released."""
released_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked:
lock._release_internal()
released_count += 1
logger.warning(f"Force released lock: {lock.name}")
return released_count
# Global instance
process_lock_manager = ProcessLockManager()
# Predefined lock names for common operations
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
SEARCH_LOCK = "search"
CONFIG_LOCK = "config"
def with_process_lock(lock_name: str, timeout_minutes: int = 60):
"""Decorator to protect functions with process locks."""
def decorator(func):
def wrapper(*args, **kwargs):
locked_by = kwargs.pop('_locked_by', func.__name__)
progress_callback = kwargs.pop('_progress_callback', None)
if not process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes, progress_callback):
raise ProcessLockError(f"Process '{lock_name}' is already running")
try:
return func(*args, **kwargs)
finally:
process_lock_manager.release_lock(lock_name)
return wrapper
return decorator
def check_process_locks():
"""Check and clean up any expired process locks."""
return process_lock_manager.cleanup_expired_locks()
def get_process_status(lock_name: str) -> Dict:
"""Get status of a specific process lock."""
lock = process_lock_manager.get_lock(lock_name)
return lock.get_status()
def update_process_progress(lock_name: str, progress_data: Dict):
"""Update progress for a specific process."""
if process_lock_manager.is_locked(lock_name):
lock = process_lock_manager.get_lock(lock_name)
lock.update_progress(progress_data)
def is_process_running(lock_name: str) -> bool:
"""Check if a specific process is currently running."""
return process_lock_manager.is_locked(lock_name)
class QueueDeduplicator:
"""Prevent duplicate episodes in download queue."""
def __init__(self):
self.active_items = set() # Set of (serie_name, season, episode) tuples
self.lock = threading.RLock()
def add_episode(self, serie_name: str, season: int, episode: int) -> bool:
"""
Add episode to active set if not already present.
Returns True if added, False if duplicate.
"""
with self.lock:
episode_key = (serie_name, season, episode)
if episode_key in self.active_items:
return False
self.active_items.add(episode_key)
return True
def remove_episode(self, serie_name: str, season: int, episode: int):
"""Remove episode from active set."""
with self.lock:
episode_key = (serie_name, season, episode)
self.active_items.discard(episode_key)
def is_episode_active(self, serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is currently being processed."""
with self.lock:
episode_key = (serie_name, season, episode)
return episode_key in self.active_items
def get_active_episodes(self) -> list:
"""Get list of all active episodes."""
with self.lock:
return list(self.active_items)
def clear_all(self):
"""Clear all active episodes."""
with self.lock:
self.active_items.clear()
def get_count(self) -> int:
"""Get number of active episodes."""
with self.lock:
return len(self.active_items)
# Global deduplicator instance
episode_deduplicator = QueueDeduplicator()
def add_episode_to_queue_safe(serie_name: str, season: int, episode: int) -> bool:
"""
Safely add episode to queue with deduplication.
Returns True if added, False if duplicate.
"""
return episode_deduplicator.add_episode(serie_name, season, episode)
def remove_episode_from_queue(serie_name: str, season: int, episode: int):
"""Remove episode from deduplication tracking."""
episode_deduplicator.remove_episode(serie_name, season, episode)
def is_episode_in_queue(serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is already in queue/being processed."""
import threading
import time
from datetime import datetime, timedelta
from typing import Dict, Optional, Callable
import logging
logger = logging.getLogger(__name__)
class ProcessLock:
"""Thread-safe process lock for preventing duplicate operations."""
def __init__(self, name: str, timeout_minutes: int = 60):
self.name = name
self.timeout_minutes = timeout_minutes
self.lock = threading.RLock()
self.locked_at: Optional[datetime] = None
self.locked_by: Optional[str] = None
self.progress_callback: Optional[Callable] = None
self.is_locked = False
self.progress_data = {}
def acquire(self, locked_by: str = "system", progress_callback: Callable = None) -> bool:
"""
Attempt to acquire the lock.
Returns True if lock was acquired, False if already locked.
"""
with self.lock:
# Check if lock has expired
if self.is_locked and self.locked_at:
if datetime.now() - self.locked_at > timedelta(minutes=self.timeout_minutes):
logger.warning(f"Process lock '{self.name}' expired, releasing...")
self._release_internal()
if self.is_locked:
return False
self.is_locked = True
self.locked_at = datetime.now()
self.locked_by = locked_by
self.progress_callback = progress_callback
self.progress_data = {}
logger.info(f"Process lock '{self.name}' acquired by '{locked_by}'")
return True
def release(self) -> bool:
"""Release the lock."""
with self.lock:
if not self.is_locked:
return False
self._release_internal()
logger.info(f"Process lock '{self.name}' released")
return True
def _release_internal(self):
"""Internal method to release lock without logging."""
self.is_locked = False
self.locked_at = None
self.locked_by = None
self.progress_callback = None
self.progress_data = {}
def is_locked_by_other(self, requester: str) -> bool:
"""Check if lock is held by someone other than requester."""
with self.lock:
return self.is_locked and self.locked_by != requester
def get_status(self) -> Dict:
"""Get current lock status."""
with self.lock:
return {
'is_locked': self.is_locked,
'locked_by': self.locked_by,
'locked_at': self.locked_at.isoformat() if self.locked_at else None,
'progress': self.progress_data.copy(),
'timeout_minutes': self.timeout_minutes
}
def update_progress(self, progress_data: Dict):
"""Update progress data for this lock."""
with self.lock:
if self.is_locked:
self.progress_data.update(progress_data)
if self.progress_callback:
try:
self.progress_callback(progress_data)
except Exception as e:
logger.error(f"Progress callback error: {e}")
def __enter__(self):
"""Context manager entry."""
if not self.acquire():
raise ProcessLockError(f"Could not acquire lock '{self.name}'")
return self
def __exit__(self, exc_type, exc_val, exc_tb):
"""Context manager exit."""
self.release()
class ProcessLockError(Exception):
"""Exception raised when process lock operations fail."""
pass
class ProcessLockManager:
"""Global manager for all process locks."""
def __init__(self):
self.locks: Dict[str, ProcessLock] = {}
self.manager_lock = threading.RLock()
def get_lock(self, name: str, timeout_minutes: int = 60) -> ProcessLock:
"""Get or create a process lock."""
with self.manager_lock:
if name not in self.locks:
self.locks[name] = ProcessLock(name, timeout_minutes)
return self.locks[name]
def acquire_lock(self, name: str, locked_by: str = "system",
timeout_minutes: int = 60, progress_callback: Callable = None) -> bool:
"""Acquire a named lock."""
lock = self.get_lock(name, timeout_minutes)
return lock.acquire(locked_by, progress_callback)
def release_lock(self, name: str) -> bool:
"""Release a named lock."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].release()
return False
def is_locked(self, name: str) -> bool:
"""Check if a named lock is currently held."""
with self.manager_lock:
if name in self.locks:
return self.locks[name].is_locked
return False
def get_all_locks_status(self) -> Dict:
"""Get status of all locks."""
with self.manager_lock:
return {
name: lock.get_status()
for name, lock in self.locks.items()
}
def cleanup_expired_locks(self) -> int:
"""Clean up any expired locks. Returns number of locks cleaned up."""
cleaned_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked and lock.locked_at:
if datetime.now() - lock.locked_at > timedelta(minutes=lock.timeout_minutes):
lock._release_internal()
cleaned_count += 1
logger.info(f"Cleaned up expired lock: {lock.name}")
return cleaned_count
def force_release_all(self) -> int:
"""Force release all locks. Returns number of locks released."""
released_count = 0
with self.manager_lock:
for lock in self.locks.values():
if lock.is_locked:
lock._release_internal()
released_count += 1
logger.warning(f"Force released lock: {lock.name}")
return released_count
# Global instance
process_lock_manager = ProcessLockManager()
# Predefined lock names for common operations
RESCAN_LOCK = "rescan"
DOWNLOAD_LOCK = "download"
SEARCH_LOCK = "search"
CONFIG_LOCK = "config"
def with_process_lock(lock_name: str, timeout_minutes: int = 60):
"""Decorator to protect functions with process locks."""
def decorator(func):
def wrapper(*args, **kwargs):
locked_by = kwargs.pop('_locked_by', func.__name__)
progress_callback = kwargs.pop('_progress_callback', None)
if not process_lock_manager.acquire_lock(lock_name, locked_by, timeout_minutes, progress_callback):
raise ProcessLockError(f"Process '{lock_name}' is already running")
try:
return func(*args, **kwargs)
finally:
process_lock_manager.release_lock(lock_name)
return wrapper
return decorator
def check_process_locks():
"""Check and clean up any expired process locks."""
return process_lock_manager.cleanup_expired_locks()
def get_process_status(lock_name: str) -> Dict:
"""Get status of a specific process lock."""
lock = process_lock_manager.get_lock(lock_name)
return lock.get_status()
def update_process_progress(lock_name: str, progress_data: Dict):
"""Update progress for a specific process."""
if process_lock_manager.is_locked(lock_name):
lock = process_lock_manager.get_lock(lock_name)
lock.update_progress(progress_data)
def is_process_running(lock_name: str) -> bool:
"""Check if a specific process is currently running."""
return process_lock_manager.is_locked(lock_name)
class QueueDeduplicator:
"""Prevent duplicate episodes in download queue."""
def __init__(self):
self.active_items = set() # Set of (serie_name, season, episode) tuples
self.lock = threading.RLock()
def add_episode(self, serie_name: str, season: int, episode: int) -> bool:
"""
Add episode to active set if not already present.
Returns True if added, False if duplicate.
"""
with self.lock:
episode_key = (serie_name, season, episode)
if episode_key in self.active_items:
return False
self.active_items.add(episode_key)
return True
def remove_episode(self, serie_name: str, season: int, episode: int):
"""Remove episode from active set."""
with self.lock:
episode_key = (serie_name, season, episode)
self.active_items.discard(episode_key)
def is_episode_active(self, serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is currently being processed."""
with self.lock:
episode_key = (serie_name, season, episode)
return episode_key in self.active_items
def get_active_episodes(self) -> list:
"""Get list of all active episodes."""
with self.lock:
return list(self.active_items)
def clear_all(self):
"""Clear all active episodes."""
with self.lock:
self.active_items.clear()
def get_count(self) -> int:
"""Get number of active episodes."""
with self.lock:
return len(self.active_items)
# Global deduplicator instance
episode_deduplicator = QueueDeduplicator()
def add_episode_to_queue_safe(serie_name: str, season: int, episode: int) -> bool:
"""
Safely add episode to queue with deduplication.
Returns True if added, False if duplicate.
"""
return episode_deduplicator.add_episode(serie_name, season, episode)
def remove_episode_from_queue(serie_name: str, season: int, episode: int):
"""Remove episode from deduplication tracking."""
episode_deduplicator.remove_episode(serie_name, season, episode)
def is_episode_in_queue(serie_name: str, season: int, episode: int) -> bool:
"""Check if episode is already in queue/being processed."""
return episode_deduplicator.is_episode_active(serie_name, season, episode)

View File

@@ -1,146 +0,0 @@
"""
Pytest configuration file for AniWorld application tests.
"""
import os
import sys
from unittest.mock import Mock
import pytest
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..'))
@pytest.fixture(scope="session")
def test_config():
"""Test configuration settings."""
return {
"jwt_secret_key": "test-secret-key",
"password_salt": "test-salt",
"master_password": "test_password",
"master_password_hash": "hashed_test_password",
"token_expiry_hours": 1,
"database_url": "sqlite:///:memory:",
"anime_directory": "./test_data",
"log_level": "DEBUG"
}
@pytest.fixture
def mock_settings(test_config):
"""Mock settings for testing."""
from unittest.mock import Mock
settings = Mock()
for key, value in test_config.items():
setattr(settings, key, value)
return settings
@pytest.fixture
def mock_database():
"""Mock database connection."""
return Mock()
@pytest.fixture
def mock_logger():
"""Mock logger for testing."""
return Mock()
@pytest.fixture
def sample_anime_data():
"""Sample anime data for testing."""
return {
"id": 1,
"title": "Test Anime",
"genre": "Action",
"year": 2023,
"episodes": [
{"id": 1, "title": "Episode 1", "season": 1, "episode": 1},
{"id": 2, "title": "Episode 2", "season": 1, "episode": 2}
]
}
@pytest.fixture
def sample_episode_data():
"""Sample episode data for testing."""
return {
"id": 1,
"title": "Test Episode",
"season": 1,
"episode": 1,
"anime_id": 1,
"download_url": "https://example.com/episode1.mp4"
}
@pytest.fixture
def valid_jwt_token():
"""Valid JWT token for testing."""
from datetime import datetime, timedelta
import jwt
payload = {
"user": "test_user",
"exp": datetime.utcnow() + timedelta(hours=1)
}
return jwt.encode(payload, "test-secret-key", algorithm="HS256")
@pytest.fixture
def expired_jwt_token():
"""Expired JWT token for testing."""
from datetime import datetime, timedelta
import jwt
payload = {
"user": "test_user",
"exp": datetime.utcnow() - timedelta(hours=1)
}
return jwt.encode(payload, "test-secret-key", algorithm="HS256")
@pytest.fixture
def mock_request():
"""Mock FastAPI request object."""
request = Mock()
request.headers = {}
request.client = Mock()
request.client.host = "127.0.0.1"
return request
@pytest.fixture
def mock_file_system():
"""Mock file system operations."""
return Mock()
# Pytest configuration
def pytest_configure(config):
"""Configure pytest with custom markers."""
config.addinivalue_line(
"markers", "unit: marks tests as unit tests"
)
config.addinivalue_line(
"markers", "integration: marks tests as integration tests"
)
config.addinivalue_line(
"markers", "e2e: marks tests as end-to-end tests"
)
config.addinivalue_line(
"markers", "slow: marks tests as slow running"
)
# Test collection configuration
collect_ignore = [
"test_auth.ps1",
"test_auth_flow.ps1",
"test_database.ps1"
]

View File

@@ -1,232 +0,0 @@
"""
End-to-end tests for authentication flow.
Tests complete user authentication scenarios including login/logout flow
and session management.
"""
import os
import sys
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# Import after path setup
from src.server.fastapi_app import app # noqa: E402
@pytest.fixture
def client():
"""Test client for E2E authentication tests."""
return TestClient(app)
@pytest.mark.e2e
class TestAuthenticationE2E:
"""End-to-end authentication tests."""
def test_full_authentication_workflow(self, client, mock_settings):
"""Test complete authentication workflow from user perspective."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Scenario: User wants to access protected resource
# Step 1: Try to access protected endpoint without authentication
protected_response = client.get("/api/anime/search?query=test")
assert protected_response.status_code in [401, 403] # Should be unauthorized
# Step 2: User logs in with correct password
login_response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert login_response.status_code == 200
login_data = login_response.json()
assert login_data["success"] is True
token = login_data["token"]
# Step 3: Verify token is working
verify_response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token}"}
)
assert verify_response.status_code == 200
assert verify_response.json()["valid"] is True
# Step 4: Access protected resource with token
# Note: This test assumes anime search endpoint exists and requires auth
protected_response_with_auth = client.get(
"/api/anime/search?query=test",
headers={"Authorization": f"Bearer {token}"}
)
# Should not be 401/403 (actual response depends on implementation)
assert protected_response_with_auth.status_code != 403
# Step 5: User logs out
logout_response = client.post(
"/auth/logout",
headers={"Authorization": f"Bearer {token}"}
)
assert logout_response.status_code == 200
assert logout_response.json()["success"] is True
# Step 6: Verify token behavior after logout
# Note: This depends on implementation - some systems invalidate tokens,
# others rely on expiry
# Just verify the logout endpoint worked
assert logout_response.json()["success"] is True
def test_authentication_with_wrong_password_flow(self, client, mock_settings):
"""Test authentication flow with wrong password."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Step 1: User tries to login with wrong password
login_response = client.post(
"/auth/login",
json={"password": "wrong_password"}
)
assert login_response.status_code == 401
login_data = login_response.json()
assert login_data["success"] is False
assert "token" not in login_data
# Step 2: User tries to access protected resource without valid token
protected_response = client.get("/api/anime/search?query=test")
assert protected_response.status_code in [401, 403]
# Step 3: User tries again with correct password
correct_login_response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert correct_login_response.status_code == 200
assert correct_login_response.json()["success"] is True
def test_session_expiry_simulation(self, client, mock_settings):
"""Test session expiry behavior."""
# Set very short token expiry for testing
mock_settings.token_expiry_hours = 0.001 # About 3.6 seconds
with patch('src.server.fastapi_app.settings', mock_settings):
# Login to get token
login_response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert login_response.status_code == 200
token = login_response.json()["token"]
# Token should be valid immediately
verify_response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token}"}
)
assert verify_response.status_code == 200
# Wait for token to expire (in real implementation)
# For testing, we'll just verify the token structure is correct
import jwt
payload = jwt.decode(token, options={"verify_signature": False})
assert "exp" in payload
assert payload["exp"] > 0
def test_multiple_session_management(self, client, mock_settings):
"""Test managing multiple concurrent sessions."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Create multiple sessions (simulate multiple browser tabs/devices)
sessions = []
for i in range(3):
login_response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert login_response.status_code == 200
sessions.append(login_response.json()["token"])
# All sessions should be valid
for token in sessions:
verify_response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token}"}
)
assert verify_response.status_code == 200
# Logout from one session
logout_response = client.post(
"/auth/logout",
headers={"Authorization": f"Bearer {sessions[0]}"}
)
assert logout_response.status_code == 200
# Other sessions should still be valid (depending on implementation)
for token in sessions[1:]:
verify_response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token}"}
)
# Should still be valid unless implementation invalidates all sessions
assert verify_response.status_code == 200
def test_authentication_error_handling(self, client, mock_settings):
"""Test error handling in authentication flow."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test various error scenarios
# Invalid JSON
invalid_json_response = client.post(
"/auth/login",
data="invalid json",
headers={"Content-Type": "application/json"}
)
assert invalid_json_response.status_code == 422
# Missing password field
missing_field_response = client.post(
"/auth/login",
json={}
)
assert missing_field_response.status_code == 422
# Empty password
empty_password_response = client.post(
"/auth/login",
json={"password": ""}
)
assert empty_password_response.status_code == 422
# Malformed authorization header
malformed_auth_response = client.get(
"/auth/verify",
headers={"Authorization": "InvalidFormat"}
)
assert malformed_auth_response.status_code == 403
def test_security_headers_and_responses(self, client, mock_settings):
"""Test security-related headers and response formats."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test login response format
login_response = client.post(
"/auth/login",
json={"password": "test_password"}
)
# Check response doesn't leak sensitive information
login_data = login_response.json()
assert "password" not in str(login_data)
assert "secret" not in str(login_data).lower()
# Test error responses don't leak sensitive information
error_response = client.post(
"/auth/login",
json={"password": "wrong_password"}
)
error_data = error_response.json()
assert "password" not in str(error_data)
assert "hash" not in str(error_data).lower()
assert "secret" not in str(error_data).lower()

View File

@@ -1,440 +0,0 @@
"""
End-to-End tests for bulk download and export flows.
This module tests complete user workflows for bulk operations including
download flows, export processes, and error handling scenarios.
"""
import asyncio
import time
from unittest.mock import AsyncMock, patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestBulkDownloadFlow:
"""End-to-end tests for bulk download workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_complete_bulk_download_workflow(self, mock_user, client):
"""Test complete bulk download workflow from search to completion."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Search for anime to download
search_response = client.get("/api/anime/search?q=test&limit=5")
if search_response.status_code == 200:
anime_list = search_response.json()
anime_ids = [anime["id"] for anime in anime_list[:3]] # Select first 3
else:
# Mock anime IDs if search endpoint not working
anime_ids = ["anime1", "anime2", "anime3"]
# Step 2: Initiate bulk download
download_request = {
"anime_ids": anime_ids,
"quality": "1080p",
"format": "mp4",
"include_subtitles": True,
"organize_by": "series"
}
download_response = client.post("/api/bulk/download", json=download_request)
# Expected 404 since bulk endpoints not implemented yet
assert download_response.status_code in [200, 202, 404]
if download_response.status_code in [200, 202]:
download_data = download_response.json()
task_id = download_data.get("task_id")
# Step 3: Monitor download progress
if task_id:
progress_response = client.get(f"/api/bulk/download/{task_id}/status")
assert progress_response.status_code in [200, 404]
if progress_response.status_code == 200:
progress_data = progress_response.json()
assert "status" in progress_data
assert "progress_percent" in progress_data
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_download_with_retry_logic(self, mock_user, client):
"""Test bulk download with retry logic for failed items."""
mock_user.return_value = {"user_id": "test_user"}
# Start bulk download
download_request = {
"anime_ids": ["anime1", "anime2", "anime3"],
"quality": "720p",
"retry_failed": True,
"max_retries": 3
}
download_response = client.post("/api/bulk/download", json=download_request)
assert download_response.status_code in [200, 202, 404]
if download_response.status_code in [200, 202]:
task_id = download_response.json().get("task_id")
# Simulate checking for failed items and retrying
if task_id:
failed_response = client.get(f"/api/bulk/download/{task_id}/failed")
assert failed_response.status_code in [200, 404]
if failed_response.status_code == 200:
failed_data = failed_response.json()
if failed_data.get("failed_items"):
# Retry failed items
retry_response = client.post(f"/api/bulk/download/{task_id}/retry")
assert retry_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_download_cancellation(self, mock_user, client):
"""Test cancelling an ongoing bulk download."""
mock_user.return_value = {"user_id": "test_user"}
# Start bulk download
download_request = {
"anime_ids": ["anime1", "anime2", "anime3", "anime4", "anime5"],
"quality": "1080p"
}
download_response = client.post("/api/bulk/download", json=download_request)
assert download_response.status_code in [200, 202, 404]
if download_response.status_code in [200, 202]:
task_id = download_response.json().get("task_id")
if task_id:
# Cancel the download
cancel_response = client.post(f"/api/bulk/download/{task_id}/cancel")
assert cancel_response.status_code in [200, 404]
if cancel_response.status_code == 200:
cancel_data = cancel_response.json()
assert cancel_data.get("status") == "cancelled"
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_download_with_insufficient_space(self, mock_user, client):
"""Test bulk download when there's insufficient disk space."""
mock_user.return_value = {"user_id": "test_user"}
# Try to download large amount of content
download_request = {
"anime_ids": [f"anime{i}" for i in range(100)], # Large number
"quality": "1080p",
"check_disk_space": True
}
download_response = client.post("/api/bulk/download", json=download_request)
# Should either work or return appropriate error
assert download_response.status_code in [200, 202, 400, 404, 507] # 507 = Insufficient Storage
class TestBulkExportFlow:
"""End-to-end tests for bulk export workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_complete_bulk_export_workflow(self, mock_user, client):
"""Test complete bulk export workflow."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Get list of available anime for export
anime_response = client.get("/api/anime/search?limit=10")
if anime_response.status_code == 200:
anime_list = anime_response.json()
anime_ids = [anime["id"] for anime in anime_list[:5]]
else:
anime_ids = ["anime1", "anime2", "anime3"]
# Step 2: Request bulk export
export_request = {
"anime_ids": anime_ids,
"format": "json",
"include_metadata": True,
"include_episode_info": True,
"include_download_history": False
}
export_response = client.post("/api/bulk/export", json=export_request)
assert export_response.status_code in [200, 202, 404]
if export_response.status_code in [200, 202]:
export_data = export_response.json()
# Step 3: Check export status or get download URL
if "export_id" in export_data:
export_id = export_data["export_id"]
status_response = client.get(f"/api/bulk/export/{export_id}/status")
assert status_response.status_code in [200, 404]
elif "download_url" in export_data:
# Direct download available
download_url = export_data["download_url"]
assert download_url.startswith("http")
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_export_csv_format(self, mock_user, client):
"""Test bulk export in CSV format."""
mock_user.return_value = {"user_id": "test_user"}
export_request = {
"anime_ids": ["anime1", "anime2"],
"format": "csv",
"include_metadata": True,
"csv_options": {
"delimiter": ",",
"include_headers": True,
"encoding": "utf-8"
}
}
export_response = client.post("/api/bulk/export", json=export_request)
assert export_response.status_code in [200, 202, 404]
if export_response.status_code == 200:
# Check if response is CSV content or redirect
content_type = export_response.headers.get("content-type", "")
assert "csv" in content_type or "json" in content_type
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_export_with_filters(self, mock_user, client):
"""Test bulk export with filtering options."""
mock_user.return_value = {"user_id": "test_user"}
export_request = {
"anime_ids": ["anime1", "anime2", "anime3"],
"format": "json",
"filters": {
"completed_only": True,
"include_watched": False,
"min_rating": 7.0,
"genres": ["Action", "Adventure"]
},
"include_metadata": True
}
export_response = client.post("/api/bulk/export", json=export_request)
assert export_response.status_code in [200, 202, 404]
class TestBulkOrganizeFlow:
"""End-to-end tests for bulk organize workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_organize_by_genre(self, mock_user, client):
"""Test bulk organizing anime by genre."""
mock_user.return_value = {"user_id": "test_user"}
organize_request = {
"anime_ids": ["anime1", "anime2", "anime3"],
"organize_by": "genre",
"create_subdirectories": True,
"move_files": True,
"update_database": True
}
organize_response = client.post("/api/bulk/organize", json=organize_request)
assert organize_response.status_code in [200, 202, 404]
if organize_response.status_code in [200, 202]:
organize_data = organize_response.json()
if "task_id" in organize_data:
task_id = organize_data["task_id"]
# Monitor organization progress
status_response = client.get(f"/api/bulk/organize/{task_id}/status")
assert status_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_organize_by_year(self, mock_user, client):
"""Test bulk organizing anime by release year."""
mock_user.return_value = {"user_id": "test_user"}
organize_request = {
"anime_ids": ["anime1", "anime2"],
"organize_by": "year",
"year_format": "YYYY",
"create_subdirectories": True,
"dry_run": True # Test without actually moving files
}
organize_response = client.post("/api/bulk/organize", json=organize_request)
assert organize_response.status_code in [200, 404]
if organize_response.status_code == 200:
organize_data = organize_response.json()
# Dry run should return what would be moved
assert "preview" in organize_data or "operations" in organize_data
class TestBulkDeleteFlow:
"""End-to-end tests for bulk delete workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_delete_with_confirmation(self, mock_user, client):
"""Test bulk delete with proper confirmation flow."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Request deletion (should require confirmation)
delete_request = {
"anime_ids": ["anime_to_delete1", "anime_to_delete2"],
"delete_files": True,
"confirm": False # First request without confirmation
}
delete_response = client.delete("/api/bulk/delete", json=delete_request)
# Should require confirmation
assert delete_response.status_code in [400, 404, 422]
# Step 2: Confirm deletion
delete_request["confirm"] = True
confirmed_response = client.delete("/api/bulk/delete", json=delete_request)
assert confirmed_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_delete_database_only(self, mock_user, client):
"""Test bulk delete from database only (keep files)."""
mock_user.return_value = {"user_id": "test_user"}
delete_request = {
"anime_ids": ["anime1", "anime2"],
"delete_files": False, # Keep files, remove from database only
"confirm": True
}
delete_response = client.delete("/api/bulk/delete", json=delete_request)
assert delete_response.status_code in [200, 404]
class TestBulkOperationsErrorHandling:
"""End-to-end tests for error handling in bulk operations."""
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_operation_with_mixed_results(self, mock_user, client):
"""Test bulk operation where some items succeed and others fail."""
mock_user.return_value = {"user_id": "test_user"}
# Mix of valid and invalid anime IDs
download_request = {
"anime_ids": ["valid_anime1", "invalid_anime", "valid_anime2"],
"quality": "1080p",
"continue_on_error": True
}
download_response = client.post("/api/bulk/download", json=download_request)
assert download_response.status_code in [200, 202, 404]
if download_response.status_code in [200, 202]:
result_data = download_response.json()
# Should have information about successes and failures
if "partial_success" in result_data:
assert "successful" in result_data
assert "failed" in result_data
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_operation_timeout_handling(self, mock_user, client):
"""Test handling of bulk operation timeouts."""
mock_user.return_value = {"user_id": "test_user"}
# Large operation that might timeout
large_request = {
"anime_ids": [f"anime{i}" for i in range(50)],
"quality": "1080p",
"timeout_seconds": 30
}
download_response = client.post("/api/bulk/download", json=large_request)
# Should either succeed, be accepted for background processing, or timeout
assert download_response.status_code in [200, 202, 404, 408, 504]
@patch('src.server.fastapi_app.get_current_user')
def test_concurrent_bulk_operations(self, mock_user, client):
"""Test handling of concurrent bulk operations."""
mock_user.return_value = {"user_id": "test_user"}
# Start first operation
first_request = {
"anime_ids": ["anime1", "anime2"],
"quality": "1080p"
}
first_response = client.post("/api/bulk/download", json=first_request)
# Start second operation while first is running
second_request = {
"anime_ids": ["anime3", "anime4"],
"quality": "720p"
}
second_response = client.post("/api/bulk/download", json=second_request)
# Both operations should be handled appropriately
assert first_response.status_code in [200, 202, 404]
assert second_response.status_code in [200, 202, 404, 429] # 429 = Too Many Requests
class TestBulkOperationsPerformance:
"""Performance tests for bulk operations."""
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_operation_response_time(self, mock_user, client):
"""Test that bulk operations respond within reasonable time."""
mock_user.return_value = {"user_id": "test_user"}
start_time = time.time()
download_request = {
"anime_ids": ["anime1", "anime2", "anime3"],
"quality": "1080p"
}
response = client.post("/api/bulk/download", json=download_request)
response_time = time.time() - start_time
# Response should be quick (< 5 seconds) even if processing is background
assert response_time < 5.0
assert response.status_code in [200, 202, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_operation_memory_usage(self, mock_user, client):
"""Test bulk operations don't cause excessive memory usage."""
mock_user.return_value = {"user_id": "test_user"}
# Large bulk operation
large_request = {
"anime_ids": [f"anime{i}" for i in range(100)],
"quality": "1080p"
}
# This test would need actual memory monitoring in real implementation
response = client.post("/api/bulk/download", json=large_request)
assert response.status_code in [200, 202, 404, 413] # 413 = Payload Too Large
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,407 +0,0 @@
"""
End-to-end tests for CLI flows.
Tests complete CLI workflows including progress bar functionality,
retry logic, user interactions, and error scenarios.
"""
import os
import sys
import tempfile
from unittest.mock import Mock, patch
import pytest
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# Import after path setup
from src.cli.Main import SeriesApp # noqa: E402
@pytest.fixture
def temp_directory():
"""Create a temporary directory for testing."""
with tempfile.TemporaryDirectory() as temp_dir:
yield temp_dir
@pytest.mark.e2e
class TestCLICompleteWorkflows:
"""Test complete CLI workflows from user perspective."""
def test_search_and_download_workflow(self, temp_directory):
"""Test complete search -> select -> download workflow."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
# Mock search results
mock_search_results = [
{"name": "Test Anime", "link": "test_link"}
]
# Mock series for download
mock_episode_dict = {1: [1, 2, 3], 2: [1, 2]}
mock_series = Mock(
episodeDict=mock_episode_dict,
folder="test_anime",
key="test_key"
)
app.series_list = [mock_series]
# Mock loader
mock_loader = Mock()
mock_loader.Search.return_value = mock_search_results
mock_loader.IsLanguage.return_value = True
mock_loader.Download.return_value = None
app.Loaders.GetLoader.return_value = mock_loader
# Test search workflow
with patch('builtins.input', side_effect=['test query', '1']), \
patch('builtins.print'):
app.search_mode()
# Should have called search and add
mock_loader.Search.assert_called_with('test query')
app.List.add.assert_called_once()
# Test download workflow
with patch('rich.progress.Progress') as mock_progress_class, \
patch('time.sleep'), \
patch('builtins.input', return_value='1'):
mock_progress = Mock()
mock_progress_class.return_value = mock_progress
selected_series = app.get_user_selection()
assert selected_series is not None
app.download_series(selected_series)
# Should have set up progress tracking
mock_progress.start.assert_called_once()
mock_progress.stop.assert_called_once()
# Should have attempted downloads for all episodes
expected_downloads = sum(len(episodes) for episodes in mock_episode_dict.values())
assert mock_loader.Download.call_count == expected_downloads
def test_init_and_rescan_workflow(self, temp_directory):
"""Test initialization and rescanning workflow."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner') as mock_scanner_class, \
patch('src.cli.Main.SerieList') as mock_list_class:
mock_scanner = Mock()
mock_scanner_class.return_value = mock_scanner
mock_list = Mock()
mock_list_class.return_value = mock_list
app = SeriesApp(temp_directory)
app.SerieScanner = mock_scanner
# Test rescan workflow
with patch('rich.progress.Progress') as mock_progress_class, \
patch('builtins.print'):
mock_progress = Mock()
mock_progress_class.return_value = mock_progress
# Simulate init action
app.progress = mock_progress
app.task1 = "task1_id"
# Call reinit workflow
app.SerieScanner.Reinit()
app.SerieScanner.Scan(app.updateFromReinit)
# Should have called scanner methods
mock_scanner.Reinit.assert_called_once()
mock_scanner.Scan.assert_called_once()
def test_error_recovery_workflow(self, temp_directory):
"""Test error recovery in CLI workflows."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
# Test retry mechanism with eventual success
mock_func = Mock(side_effect=[
Exception("First failure"),
Exception("Second failure"),
None # Success on third try
])
with patch('time.sleep'), patch('builtins.print'):
result = app.retry(mock_func, max_retries=3, delay=0)
assert result is True
assert mock_func.call_count == 3
# Test retry mechanism with persistent failure
mock_func_fail = Mock(side_effect=Exception("Persistent error"))
with patch('time.sleep'), patch('builtins.print'):
result = app.retry(mock_func_fail, max_retries=2, delay=0)
assert result is False
assert mock_func_fail.call_count == 2
@pytest.mark.e2e
class TestCLIUserInteractionFlows:
"""Test CLI user interaction flows."""
def test_user_selection_validation_flow(self, temp_directory):
"""Test user selection with various invalid inputs before success."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
app.series_list = [
Mock(name="Anime 1", folder="anime1"),
Mock(name="Anime 2", folder="anime2")
]
# Test sequence: invalid text -> invalid number -> valid selection
input_sequence = ['invalid_text', '999', '1']
with patch('builtins.input', side_effect=input_sequence), \
patch('builtins.print'):
selected = app.get_user_selection()
assert selected is not None
assert len(selected) == 1
assert selected[0].name == "Anime 1"
def test_search_interaction_flow(self, temp_directory):
"""Test search interaction with various user inputs."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
mock_search_results = [
{"name": "Result 1", "link": "link1"},
{"name": "Result 2", "link": "link2"}
]
mock_loader = Mock()
mock_loader.Search.return_value = mock_search_results
app.Loaders.GetLoader.return_value = mock_loader
# Test sequence: search -> invalid selection -> valid selection
with patch('builtins.input', side_effect=['test search', '999', '1']), \
patch('builtins.print'):
app.search_mode()
# Should have added the selected item
app.List.add.assert_called_once()
def test_main_loop_interaction_flow(self, temp_directory):
"""Test main application loop with user interactions."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
app.series_list = [Mock(name="Test Anime", folder="test")]
# Mock various components
with patch.object(app, 'search_mode') as mock_search, \
patch.object(app, 'get_user_selection', return_value=[Mock()]), \
patch.object(app, 'download_series') as mock_download, \
patch('rich.progress.Progress'), \
patch('builtins.print'):
# Test sequence: search -> download -> exit
with patch('builtins.input', side_effect=['s', 'd', KeyboardInterrupt()]):
try:
app.run()
except KeyboardInterrupt:
pass
mock_search.assert_called_once()
mock_download.assert_called_once()
@pytest.mark.e2e
class TestCLIProgressAndFeedback:
"""Test CLI progress indicators and user feedback."""
def test_download_progress_flow(self, temp_directory):
"""Test download progress tracking throughout workflow."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
# Mock series with episodes
mock_series = [
Mock(
episodeDict={1: [1, 2], 2: [1]},
folder="anime1",
key="key1"
)
]
# Mock loader
mock_loader = Mock()
mock_loader.IsLanguage.return_value = True
mock_loader.Download.return_value = None
app.Loaders.GetLoader.return_value = mock_loader
with patch('rich.progress.Progress') as mock_progress_class, \
patch('time.sleep'):
mock_progress = Mock()
mock_progress_class.return_value = mock_progress
app.download_series(mock_series)
# Verify progress setup
assert mock_progress.add_task.call_count >= 3 # At least 3 tasks
mock_progress.start.assert_called_once()
mock_progress.stop.assert_called_once()
# Verify progress updates
assert mock_progress.update.call_count > 0
def test_progress_callback_integration(self, temp_directory):
"""Test progress callback integration with download system."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
app.progress = Mock()
app.task3 = "download_task"
# Test various progress states
progress_states = [
{
'status': 'downloading',
'total_bytes': 1000000,
'downloaded_bytes': 250000
},
{
'status': 'downloading',
'total_bytes': 1000000,
'downloaded_bytes': 750000
},
{
'status': 'finished'
}
]
for state in progress_states:
app.print_Download_Progress(state)
# Should have updated progress for each state
assert app.progress.update.call_count == len(progress_states)
# Last call should indicate completion
last_call = app.progress.update.call_args_list[-1]
assert last_call[1].get('completed') == 100
def test_scan_progress_integration(self, temp_directory):
"""Test scanning progress integration."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
app.progress = Mock()
app.task1 = "scan_task"
# Simulate scan progress updates
for i in range(5):
app.updateFromReinit("folder", i)
# Should have updated progress for each folder
assert app.progress.update.call_count == 5
# Each call should advance by 1
for call in app.progress.update.call_args_list:
assert call[1].get('advance') == 1
@pytest.mark.e2e
class TestCLIErrorScenarios:
"""Test CLI error scenarios and recovery."""
def test_network_error_recovery(self, temp_directory):
"""Test recovery from network errors during operations."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
# Mock network failures
network_error = Exception("Network connection failed")
mock_func = Mock(side_effect=[network_error, network_error, None])
with patch('time.sleep'), patch('builtins.print'):
result = app.retry(mock_func, max_retries=3, delay=0)
assert result is True
assert mock_func.call_count == 3
def test_invalid_directory_handling(self):
"""Test handling of invalid directory paths."""
invalid_directory = "/nonexistent/path/that/does/not/exist"
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
# Should not raise exception during initialization
app = SeriesApp(invalid_directory)
assert app.directory_to_search == invalid_directory
def test_empty_search_results_handling(self, temp_directory):
"""Test handling of empty search results."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
# Mock empty search results
mock_loader = Mock()
mock_loader.Search.return_value = []
app.Loaders.GetLoader.return_value = mock_loader
with patch('builtins.input', return_value='nonexistent anime'), \
patch('builtins.print') as mock_print:
app.search_mode()
# Should print "No results found" message
print_calls = [call[0][0] for call in mock_print.call_args_list]
assert any("No results found" in call for call in print_calls)
def test_keyboard_interrupt_handling(self, temp_directory):
"""Test graceful handling of keyboard interrupts."""
with patch('src.cli.Main.Loaders'), \
patch('src.cli.Main.SerieScanner'), \
patch('src.cli.Main.SerieList'):
app = SeriesApp(temp_directory)
# Test that KeyboardInterrupt propagates correctly
with patch('builtins.input', side_effect=KeyboardInterrupt()):
with pytest.raises(KeyboardInterrupt):
app.run()

View File

@@ -1,550 +0,0 @@
"""
End-to-End tests for user preferences workflows and UI response verification.
This module tests complete user workflows for changing preferences and verifying
that the UI responds appropriately to preference changes.
"""
import time
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestThemeChangeWorkflow:
"""End-to-end tests for theme changing workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_complete_theme_change_workflow(self, mock_user, client):
"""Test complete workflow of changing theme and verifying UI updates."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Get current theme
current_theme_response = client.get("/api/preferences/themes/current")
initial_theme = None
if current_theme_response.status_code == 200:
initial_theme = current_theme_response.json().get("theme", {}).get("name")
# Step 2: Get available themes
themes_response = client.get("/api/preferences/themes")
available_themes = []
if themes_response.status_code == 200:
available_themes = [theme["name"] for theme in themes_response.json().get("themes", [])]
# Step 3: Change to different theme
new_theme = "dark" if initial_theme != "dark" else "light"
if not available_themes:
available_themes = ["light", "dark"] # Default themes
if new_theme in available_themes:
theme_change_data = {"theme_name": new_theme}
change_response = client.post("/api/preferences/themes/set", json=theme_change_data)
if change_response.status_code == 200:
# Step 4: Verify theme was changed
updated_theme_response = client.get("/api/preferences/themes/current")
if updated_theme_response.status_code == 200:
updated_theme = updated_theme_response.json().get("theme", {}).get("name")
assert updated_theme == new_theme
# Step 5: Verify UI reflects theme change (mock check)
ui_response = client.get("/api/preferences/ui")
if ui_response.status_code == 200:
ui_data = ui_response.json()
# UI should reflect the theme change
assert "theme" in str(ui_data).lower() or "current" in str(ui_data).lower()
# Test passes if endpoints respond appropriately (200 or 404)
assert themes_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_custom_theme_creation_and_application(self, mock_user, client):
"""Test creating custom theme and applying it."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Create custom theme
custom_theme_data = {
"name": "my_test_theme",
"display_name": "My Test Theme",
"colors": {
"primary": "#007acc",
"secondary": "#6c757d",
"background": "#ffffff",
"text": "#333333",
"accent": "#28a745"
},
"is_dark": False
}
create_response = client.post("/api/preferences/themes/custom", json=custom_theme_data)
if create_response.status_code == 201:
theme_data = create_response.json()
theme_id = theme_data.get("theme_id")
# Step 2: Apply the custom theme
apply_data = {"theme_name": "my_test_theme"}
apply_response = client.post("/api/preferences/themes/set", json=apply_data)
if apply_response.status_code == 200:
# Step 3: Verify custom theme is active
current_response = client.get("/api/preferences/themes/current")
if current_response.status_code == 200:
current_theme = current_response.json().get("theme", {})
assert current_theme.get("name") == "my_test_theme"
# Test endpoints exist and respond appropriately
assert create_response.status_code in [201, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_theme_persistence_across_sessions(self, mock_user, client):
"""Test that theme preference persists across sessions."""
mock_user.return_value = {"user_id": "test_user"}
# Set theme
theme_data = {"theme_name": "dark"}
set_response = client.post("/api/preferences/themes/set", json=theme_data)
if set_response.status_code == 200:
# Simulate new session by getting current theme
current_response = client.get("/api/preferences/themes/current")
if current_response.status_code == 200:
current_theme = current_response.json().get("theme", {}).get("name")
assert current_theme == "dark"
assert set_response.status_code in [200, 404]
class TestLanguageChangeWorkflow:
"""End-to-end tests for language changing workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_complete_language_change_workflow(self, mock_user, client):
"""Test complete workflow of changing language and verifying UI updates."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Get available languages
languages_response = client.get("/api/preferences/languages")
available_languages = []
if languages_response.status_code == 200:
available_languages = [lang["code"] for lang in languages_response.json().get("languages", [])]
# Step 2: Get current language
current_response = client.get("/api/preferences/languages/current")
current_language = None
if current_response.status_code == 200:
current_language = current_response.json().get("language", {}).get("code")
# Step 3: Change to different language
new_language = "de" if current_language != "de" else "en"
if not available_languages:
available_languages = ["en", "de", "fr", "es"] # Default languages
if new_language in available_languages:
language_data = {"language_code": new_language}
change_response = client.post("/api/preferences/languages/set", json=language_data)
if change_response.status_code == 200:
# Step 4: Verify language was changed
updated_response = client.get("/api/preferences/languages/current")
if updated_response.status_code == 200:
updated_language = updated_response.json().get("language", {}).get("code")
assert updated_language == new_language
# Step 5: Verify UI text reflects language change (mock check)
# In real implementation, this would check translated text
ui_response = client.get("/") # Main page
assert ui_response.status_code in [200, 404]
assert languages_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_language_fallback_behavior(self, mock_user, client):
"""Test language fallback when preferred language is unavailable."""
mock_user.return_value = {"user_id": "test_user"}
# Try to set unsupported language
unsupported_language_data = {"language_code": "xyz"} # Non-existent language
change_response = client.post("/api/preferences/languages/set", json=unsupported_language_data)
# Should either reject or fallback to default
assert change_response.status_code in [400, 404, 422]
# Verify fallback to default language
current_response = client.get("/api/preferences/languages/current")
if current_response.status_code == 200:
current_language = current_response.json().get("language", {}).get("code")
# Should be a valid language code (en, de, etc.)
assert len(current_language) >= 2 if current_language else True
class TestAccessibilityWorkflow:
"""End-to-end tests for accessibility settings workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_accessibility_settings_workflow(self, mock_user, client):
"""Test complete accessibility settings workflow."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Get current accessibility settings
current_response = client.get("/api/preferences/accessibility")
initial_settings = {}
if current_response.status_code == 200:
initial_settings = current_response.json()
# Step 2: Update accessibility settings
new_settings = {
"high_contrast": True,
"large_text": True,
"reduced_motion": False,
"screen_reader_support": True,
"keyboard_navigation": True,
"font_size_multiplier": 1.5
}
update_response = client.put("/api/preferences/accessibility", json=new_settings)
if update_response.status_code == 200:
# Step 3: Verify settings were updated
updated_response = client.get("/api/preferences/accessibility")
if updated_response.status_code == 200:
updated_settings = updated_response.json()
# Check that key settings were updated
for key, value in new_settings.items():
if key in updated_settings:
assert updated_settings[key] == value
# Step 4: Verify UI reflects accessibility changes
# Check main page with accessibility features
main_page_response = client.get("/app")
if main_page_response.status_code == 200:
# In real implementation, would check for accessibility features
assert main_page_response.status_code == 200
assert current_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_high_contrast_mode_workflow(self, mock_user, client):
"""Test high contrast mode workflow."""
mock_user.return_value = {"user_id": "test_user"}
# Enable high contrast mode
accessibility_data = {
"high_contrast": True,
"large_text": True
}
update_response = client.put("/api/preferences/accessibility", json=accessibility_data)
if update_response.status_code == 200:
# Verify theme reflects high contrast
theme_response = client.get("/api/preferences/themes/current")
if theme_response.status_code == 200:
theme_data = theme_response.json()
# High contrast should influence theme colors
assert "theme" in theme_data
assert update_response.status_code in [200, 404]
class TestUISettingsWorkflow:
"""End-to-end tests for UI settings workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_view_mode_change_workflow(self, mock_user, client):
"""Test changing view mode from grid to list and back."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Get current UI settings
ui_response = client.get("/api/preferences/ui")
current_view_mode = None
if ui_response.status_code == 200:
current_view_mode = ui_response.json().get("view_mode")
# Step 2: Change view mode
new_view_mode = "list" if current_view_mode != "list" else "grid"
view_data = {
"view_mode": new_view_mode,
"show_thumbnails": True if new_view_mode == "grid" else False
}
if new_view_mode == "grid":
view_data["grid_columns"] = 4
change_response = client.post("/api/preferences/ui/view-mode", json=view_data)
if change_response.status_code == 200:
# Step 3: Verify view mode changed
updated_response = client.get("/api/preferences/ui")
if updated_response.status_code == 200:
updated_ui = updated_response.json()
assert updated_ui.get("view_mode") == new_view_mode
# Step 4: Verify anime list reflects view mode
anime_response = client.get("/api/anime/search?limit=5")
if anime_response.status_code == 200:
# In real implementation, response format might differ based on view mode
assert anime_response.status_code == 200
assert ui_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_ui_density_change_workflow(self, mock_user, client):
"""Test changing UI density settings."""
mock_user.return_value = {"user_id": "test_user"}
# Test different density settings
density_options = ["compact", "comfortable", "spacious"]
for density in density_options:
density_data = {
"density": density,
"compact_mode": density == "compact"
}
density_response = client.post("/api/preferences/ui/density", json=density_data)
if density_response.status_code == 200:
# Verify density was set
ui_response = client.get("/api/preferences/ui")
if ui_response.status_code == 200:
ui_data = ui_response.json()
assert ui_data.get("density") == density
# All density changes should be valid
assert density_response.status_code in [200, 404]
class TestKeyboardShortcutsWorkflow:
"""End-to-end tests for keyboard shortcuts workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_keyboard_shortcuts_customization(self, mock_user, client):
"""Test customizing keyboard shortcuts."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Get current shortcuts
shortcuts_response = client.get("/api/preferences/shortcuts")
if shortcuts_response.status_code == 200:
current_shortcuts = shortcuts_response.json().get("shortcuts", {})
# Step 2: Update a shortcut
shortcut_data = {
"action": "search",
"shortcut": "Ctrl+Shift+F",
"description": "Global search"
}
update_response = client.put("/api/preferences/shortcuts", json=shortcut_data)
if update_response.status_code == 200:
# Step 3: Verify shortcut was updated
updated_response = client.get("/api/preferences/shortcuts")
if updated_response.status_code == 200:
updated_shortcuts = updated_response.json().get("shortcuts", {})
if "search" in updated_shortcuts:
assert updated_shortcuts["search"]["shortcut"] == "Ctrl+Shift+F"
assert shortcuts_response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_shortcuts_reset_workflow(self, mock_user, client):
"""Test resetting shortcuts to defaults."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Modify some shortcuts
custom_shortcut = {
"action": "download",
"shortcut": "Ctrl+Alt+D"
}
modify_response = client.put("/api/preferences/shortcuts", json=custom_shortcut)
# Step 2: Reset to defaults
reset_response = client.post("/api/preferences/shortcuts/reset")
if reset_response.status_code == 200:
# Step 3: Verify shortcuts were reset
shortcuts_response = client.get("/api/preferences/shortcuts")
if shortcuts_response.status_code == 200:
shortcuts = shortcuts_response.json().get("shortcuts", {})
# Should have default shortcuts
assert len(shortcuts) > 0
assert reset_response.status_code in [200, 404]
class TestPreferencesIntegrationWorkflow:
"""End-to-end tests for integrated preferences workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_complete_preferences_setup_workflow(self, mock_user, client):
"""Test complete new user preferences setup workflow."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Set theme
theme_data = {"theme_name": "dark"}
theme_response = client.post("/api/preferences/themes/set", json=theme_data)
# Step 2: Set language
language_data = {"language_code": "en"}
language_response = client.post("/api/preferences/languages/set", json=language_data)
# Step 3: Configure accessibility
accessibility_data = {
"high_contrast": False,
"large_text": False,
"reduced_motion": True
}
accessibility_response = client.put("/api/preferences/accessibility", json=accessibility_data)
# Step 4: Set UI preferences
ui_data = {
"view_mode": "grid",
"grid_columns": 4,
"show_thumbnails": True
}
ui_response = client.post("/api/preferences/ui/view-mode", json=ui_data)
# Step 5: Verify all preferences were set
all_prefs_response = client.get("/api/preferences")
if all_prefs_response.status_code == 200:
prefs_data = all_prefs_response.json()
# Should contain all preference sections
expected_sections = ["theme", "language", "accessibility", "ui_settings"]
for section in expected_sections:
if section in prefs_data:
assert prefs_data[section] is not None
# All steps should complete successfully or return 404 (not implemented)
responses = [theme_response, language_response, accessibility_response, ui_response]
for response in responses:
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_preferences_export_import_workflow(self, mock_user, client):
"""Test exporting and importing preferences."""
mock_user.return_value = {"user_id": "test_user"}
# Step 1: Set some preferences
preferences_data = {
"theme": {"name": "dark"},
"language": {"code": "de"},
"ui_settings": {"view_mode": "list", "density": "compact"}
}
bulk_response = client.put("/api/preferences", json=preferences_data)
if bulk_response.status_code == 200:
# Step 2: Export preferences
export_response = client.get("/api/preferences/export")
if export_response.status_code == 200:
exported_data = export_response.json()
# Step 3: Reset preferences
reset_response = client.post("/api/preferences/reset")
if reset_response.status_code == 200:
# Step 4: Import preferences back
import_response = client.post("/api/preferences/import", json=exported_data)
if import_response.status_code == 200:
# Step 5: Verify preferences were restored
final_response = client.get("/api/preferences")
if final_response.status_code == 200:
final_prefs = final_response.json()
# Should match original preferences
assert final_prefs is not None
# Test that export/import endpoints exist
export_test_response = client.get("/api/preferences/export")
assert export_test_response.status_code in [200, 404]
class TestPreferencesPerformance:
"""Performance tests for preferences workflows."""
@patch('src.server.fastapi_app.get_current_user')
def test_preferences_response_time(self, mock_user, client):
"""Test that preference changes respond quickly."""
mock_user.return_value = {"user_id": "test_user"}
start_time = time.time()
# Quick preference change
theme_data = {"theme_name": "light"}
response = client.post("/api/preferences/themes/set", json=theme_data)
response_time = time.time() - start_time
# Should respond quickly (< 2 seconds)
assert response_time < 2.0
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_preferences_update_performance(self, mock_user, client):
"""Test performance of bulk preferences update."""
mock_user.return_value = {"user_id": "test_user"}
start_time = time.time()
# Large preferences update
bulk_data = {
"theme": {"name": "dark", "custom_colors": {"primary": "#007acc"}},
"language": {"code": "en"},
"accessibility": {
"high_contrast": True,
"large_text": True,
"reduced_motion": False,
"font_size_multiplier": 1.2
},
"ui_settings": {
"view_mode": "grid",
"grid_columns": 6,
"density": "comfortable",
"show_thumbnails": True
},
"shortcuts": {
"search": {"shortcut": "Ctrl+K"},
"download": {"shortcut": "Ctrl+D"}
}
}
response = client.put("/api/preferences", json=bulk_data)
response_time = time.time() - start_time
# Should handle bulk update efficiently (< 3 seconds)
assert response_time < 3.0
assert response.status_code in [200, 404]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,402 +0,0 @@
"""
Integration tests for anime and episode management API endpoints.
Tests anime search, anime details, episode retrieval with pagination,
valid/invalid IDs, and search filtering functionality.
"""
import os
import sys
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# Import after path setup
from src.server.fastapi_app import app # noqa: E402
@pytest.fixture
def client():
"""Test client for anime API tests."""
return TestClient(app)
@pytest.mark.integration
class TestAnimeSearchEndpoint:
"""Test anime search API endpoint."""
def test_anime_search_requires_auth(self, client):
"""Test anime search endpoint requires authentication."""
response = client.get("/api/anime/search?query=test")
assert response.status_code == 403 # Should require authentication
def test_anime_search_with_auth(self, client, mock_settings, valid_jwt_token):
"""Test anime search with valid authentication."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/anime/search?query=sample",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
for anime in data:
assert "id" in anime
assert "title" in anime
assert "description" in anime
assert "episodes" in anime
assert "status" in anime
assert "sample" in anime["title"].lower()
def test_anime_search_pagination(self, client, mock_settings, valid_jwt_token):
"""Test anime search with pagination parameters."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test with limit and offset
response = client.get(
"/api/anime/search?query=anime&limit=5&offset=0",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
assert len(data) <= 5 # Should respect limit
def test_anime_search_invalid_params(self, client, mock_settings, valid_jwt_token):
"""Test anime search with invalid parameters."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test missing query parameter
response = client.get(
"/api/anime/search",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 422 # Validation error
# Test invalid limit (too high)
response = client.get(
"/api/anime/search?query=test&limit=200",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 422
# Test negative offset
response = client.get(
"/api/anime/search?query=test&offset=-1",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 422
def test_anime_search_empty_query(self, client, mock_settings, valid_jwt_token):
"""Test anime search with empty query."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/anime/search?query=",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Empty query should be rejected due to min_length validation
assert response.status_code == 422
def test_anime_search_no_results(self, client, mock_settings, valid_jwt_token):
"""Test anime search with query that returns no results."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/anime/search?query=nonexistent_anime_title_xyz",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
assert len(data) == 0 # Should return empty list
@pytest.mark.integration
class TestAnimeDetailsEndpoint:
"""Test anime details API endpoint."""
def test_get_anime_requires_auth(self, client):
"""Test anime details endpoint requires authentication."""
response = client.get("/api/anime/test_anime_id")
assert response.status_code == 403
def test_get_anime_with_auth(self, client, mock_settings, valid_jwt_token):
"""Test anime details with valid authentication."""
anime_id = "test_anime_123"
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
f"/api/anime/{anime_id}",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert data["id"] == anime_id
assert "title" in data
assert "description" in data
assert "episodes" in data
assert "status" in data
assert isinstance(data["episodes"], int)
def test_get_anime_invalid_id(self, client, mock_settings, valid_jwt_token):
"""Test anime details with various ID formats."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test with special characters in ID
response = client.get(
"/api/anime/anime@#$%",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Should still return 200 since it's just an ID string
assert response.status_code == 200
def test_get_anime_empty_id(self, client, mock_settings, valid_jwt_token):
"""Test anime details with empty ID."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Empty ID should result in 404 or 422
response = client.get(
"/api/anime/",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [404, 405] # Method not allowed or not found
@pytest.mark.integration
class TestEpisodeEndpoints:
"""Test episode-related API endpoints."""
def test_get_anime_episodes_requires_auth(self, client):
"""Test anime episodes endpoint requires authentication."""
response = client.get("/api/anime/test_anime/episodes")
assert response.status_code == 403
def test_get_anime_episodes_with_auth(self, client, mock_settings, valid_jwt_token):
"""Test anime episodes with valid authentication."""
anime_id = "test_anime_456"
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
f"/api/anime/{anime_id}/episodes",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert isinstance(data, list)
for episode in data:
assert "id" in episode
assert "anime_id" in episode
assert "episode_number" in episode
assert "title" in episode
assert "description" in episode
assert "duration" in episode
assert episode["anime_id"] == anime_id
assert isinstance(episode["episode_number"], int)
assert episode["episode_number"] > 0
def test_get_episode_details_requires_auth(self, client):
"""Test episode details endpoint requires authentication."""
response = client.get("/api/episodes/test_episode_id")
assert response.status_code == 403
def test_get_episode_details_with_auth(self, client, mock_settings, valid_jwt_token):
"""Test episode details with valid authentication."""
episode_id = "test_episode_789"
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
f"/api/episodes/{episode_id}",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert data["id"] == episode_id
assert "anime_id" in data
assert "episode_number" in data
assert "title" in data
assert "description" in data
assert "duration" in data
assert isinstance(data["episode_number"], int)
assert isinstance(data["duration"], int)
def test_episode_endpoints_with_invalid_auth(self, client):
"""Test episode endpoints with invalid authentication."""
invalid_token = "invalid.token.here"
endpoints = [
"/api/anime/test/episodes",
"/api/episodes/test_episode"
]
for endpoint in endpoints:
response = client.get(
endpoint,
headers={"Authorization": f"Bearer {invalid_token}"}
)
assert response.status_code == 401
@pytest.mark.integration
class TestAnimeAPIErrorHandling:
"""Test error handling in anime API endpoints."""
def test_anime_endpoints_malformed_auth(self, client):
"""Test anime endpoints with malformed authorization headers."""
malformed_headers = [
{"Authorization": "Bearer"}, # Missing token
{"Authorization": "Basic token"}, # Wrong type
{"Authorization": "token"}, # Missing Bearer
]
endpoints = [
"/api/anime/search?query=test",
"/api/anime/test_id",
"/api/anime/test_id/episodes",
"/api/episodes/test_id"
]
for headers in malformed_headers:
for endpoint in endpoints:
response = client.get(endpoint, headers=headers)
assert response.status_code in [401, 403]
def test_anime_search_parameter_validation(self, client, mock_settings, valid_jwt_token):
"""Test anime search parameter validation."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test various invalid parameter combinations
invalid_params = [
"query=test&limit=0", # limit too low
"query=test&limit=101", # limit too high
"query=test&offset=-5", # negative offset
"query=&limit=10", # empty query
]
for params in invalid_params:
response = client.get(
f"/api/anime/search?{params}",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 422
def test_anime_endpoints_content_type_handling(self, client, mock_settings, valid_jwt_token):
"""Test anime endpoints with different content types."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test with different Accept headers
accept_headers = [
"application/json",
"application/xml",
"text/plain",
"*/*"
]
for accept_header in accept_headers:
response = client.get(
"/api/anime/search?query=test",
headers={
"Authorization": f"Bearer {valid_jwt_token}",
"Accept": accept_header
}
)
# Should always return JSON regardless of Accept header
assert response.status_code == 200
assert response.headers.get("content-type", "").startswith("application/json")
@pytest.mark.integration
class TestAnimeAPIDataIntegrity:
"""Test data integrity and consistency in anime API responses."""
def test_anime_search_response_structure(self, client, mock_settings, valid_jwt_token):
"""Test anime search response has consistent structure."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/anime/search?query=anime",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
required_fields = ["id", "title", "description", "episodes", "status"]
for anime in data:
for field in required_fields:
assert field in anime, f"Missing field {field} in anime response"
# Validate field types
assert isinstance(anime["id"], str)
assert isinstance(anime["title"], str)
assert isinstance(anime["episodes"], int)
assert isinstance(anime["status"], str)
assert anime["episodes"] >= 0
def test_episode_response_structure(self, client, mock_settings, valid_jwt_token):
"""Test episode response has consistent structure."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/anime/test_anime/episodes",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
required_fields = ["id", "anime_id", "episode_number", "title", "description", "duration"]
for episode in data:
for field in required_fields:
assert field in episode, f"Missing field {field} in episode response"
# Validate field types and ranges
assert isinstance(episode["id"], str)
assert isinstance(episode["anime_id"], str)
assert isinstance(episode["episode_number"], int)
assert isinstance(episode["title"], str)
assert isinstance(episode["duration"], int)
assert episode["episode_number"] > 0
assert episode["duration"] > 0
def test_episode_numbering_consistency(self, client, mock_settings, valid_jwt_token):
"""Test episode numbering is consistent and sequential."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/anime/test_anime/episodes",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
episodes = response.json()
if len(episodes) > 1:
# Check that episode numbers are sequential
episode_numbers = [ep["episode_number"] for ep in episodes]
episode_numbers.sort()
for i in range(len(episode_numbers) - 1):
assert episode_numbers[i + 1] == episode_numbers[i] + 1, \
"Episode numbers should be sequential"

View File

@@ -1,314 +0,0 @@
"""
Integration tests for authentication API endpoints.
Tests POST /auth/login, GET /auth/verify, POST /auth/logout endpoints
with valid/invalid credentials and tokens.
"""
import os
import sys
from unittest.mock import Mock, patch
import pytest
from fastapi.testclient import TestClient
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# Import after path setup
from src.server.fastapi_app import app # noqa: E402
@pytest.fixture
def client():
"""Test client for FastAPI app."""
return TestClient(app)
@pytest.fixture
def mock_auth_settings():
"""Mock settings for authentication tests."""
settings = Mock()
settings.jwt_secret_key = "test-secret-key"
settings.password_salt = "test-salt"
settings.master_password = "test_password"
settings.master_password_hash = None
settings.token_expiry_hours = 1
return settings
@pytest.mark.integration
class TestAuthLogin:
"""Test authentication login endpoint."""
def test_login_valid_credentials(self, client, mock_auth_settings):
"""Test login with valid credentials."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert response.status_code == 200
data = response.json()
assert data["success"] is True
assert "token" in data
assert "expires_at" in data
assert data["message"] == "Login successful"
def test_login_invalid_credentials(self, client, mock_auth_settings):
"""Test login with invalid credentials."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.post(
"/auth/login",
json={"password": "wrong_password"}
)
assert response.status_code == 401
data = response.json()
assert data["success"] is False
assert "token" not in data
assert "Invalid password" in data["message"]
def test_login_missing_password(self, client):
"""Test login with missing password field."""
response = client.post(
"/auth/login",
json={}
)
assert response.status_code == 422 # Validation error
def test_login_empty_password(self, client, mock_auth_settings):
"""Test login with empty password."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.post(
"/auth/login",
json={"password": ""}
)
assert response.status_code == 422 # Validation error (min_length=1)
def test_login_invalid_json(self, client):
"""Test login with invalid JSON payload."""
response = client.post(
"/auth/login",
data="invalid json",
headers={"Content-Type": "application/json"}
)
assert response.status_code == 422
def test_login_wrong_content_type(self, client):
"""Test login with wrong content type."""
response = client.post(
"/auth/login",
data="password=test_password"
)
assert response.status_code == 422
@pytest.mark.integration
class TestAuthVerify:
"""Test authentication token verification endpoint."""
def test_verify_valid_token(self, client, mock_auth_settings, valid_jwt_token):
"""Test token verification with valid token."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert data["valid"] is True
assert data["user"] == "test_user"
assert "expires_at" in data
def test_verify_expired_token(self, client, mock_auth_settings, expired_jwt_token):
"""Test token verification with expired token."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {expired_jwt_token}"}
)
assert response.status_code == 401
data = response.json()
assert data["valid"] is False
assert "expired" in data["message"].lower()
def test_verify_invalid_token(self, client, mock_auth_settings):
"""Test token verification with invalid token."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.get(
"/auth/verify",
headers={"Authorization": "Bearer invalid.token.here"}
)
assert response.status_code == 401
data = response.json()
assert data["valid"] is False
def test_verify_missing_token(self, client):
"""Test token verification without token."""
response = client.get("/auth/verify")
assert response.status_code == 403 # Forbidden - no credentials
def test_verify_malformed_header(self, client):
"""Test token verification with malformed authorization header."""
response = client.get(
"/auth/verify",
headers={"Authorization": "InvalidFormat token"}
)
assert response.status_code == 403
def test_verify_empty_token(self, client):
"""Test token verification with empty token."""
response = client.get(
"/auth/verify",
headers={"Authorization": "Bearer "}
)
assert response.status_code == 401
@pytest.mark.integration
class TestAuthLogout:
"""Test authentication logout endpoint."""
def test_logout_valid_token(self, client, mock_auth_settings, valid_jwt_token):
"""Test logout with valid token."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.post(
"/auth/logout",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert data["success"] is True
assert "logged out" in data["message"].lower()
def test_logout_invalid_token(self, client, mock_auth_settings):
"""Test logout with invalid token."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.post(
"/auth/logout",
headers={"Authorization": "Bearer invalid.token"}
)
assert response.status_code == 401
def test_logout_missing_token(self, client):
"""Test logout without token."""
response = client.post("/auth/logout")
assert response.status_code == 403
def test_logout_expired_token(self, client, mock_auth_settings, expired_jwt_token):
"""Test logout with expired token."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
response = client.post(
"/auth/logout",
headers={"Authorization": f"Bearer {expired_jwt_token}"}
)
assert response.status_code == 401
@pytest.mark.integration
class TestAuthFlow:
"""Test complete authentication flow."""
def test_complete_login_verify_logout_flow(self, client, mock_auth_settings):
"""Test complete authentication flow: login -> verify -> logout."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
# Step 1: Login
login_response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert login_response.status_code == 200
login_data = login_response.json()
token = login_data["token"]
# Step 2: Verify token
verify_response = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token}"}
)
assert verify_response.status_code == 200
verify_data = verify_response.json()
assert verify_data["valid"] is True
# Step 3: Logout
logout_response = client.post(
"/auth/logout",
headers={"Authorization": f"Bearer {token}"}
)
assert logout_response.status_code == 200
logout_data = logout_response.json()
assert logout_data["success"] is True
def test_multiple_login_attempts(self, client, mock_auth_settings):
"""Test multiple login attempts with rate limiting consideration."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
# Multiple successful logins should work
for _ in range(3):
response = client.post(
"/auth/login",
json={"password": "test_password"}
)
assert response.status_code == 200
# Failed login attempts
for _ in range(3):
response = client.post(
"/auth/login",
json={"password": "wrong_password"}
)
assert response.status_code == 401
def test_concurrent_sessions(self, client, mock_auth_settings):
"""Test that multiple valid tokens can exist simultaneously."""
with patch('src.server.fastapi_app.settings', mock_auth_settings):
# Get first token
response1 = client.post(
"/auth/login",
json={"password": "test_password"}
)
token1 = response1.json()["token"]
# Get second token
response2 = client.post(
"/auth/login",
json={"password": "test_password"}
)
token2 = response2.json()["token"]
# Both tokens should be valid
verify1 = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token1}"}
)
verify2 = client.get(
"/auth/verify",
headers={"Authorization": f"Bearer {token2}"}
)
assert verify1.status_code == 200
assert verify2.status_code == 200

View File

@@ -1,277 +0,0 @@
"""
Integration tests for bulk operations API endpoints.
This module tests the bulk operation endpoints for download, update, organize, delete, and export.
Tests include authentication, validation, and error handling.
"""
import json
from unittest.mock import Mock, patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestBulkDownloadEndpoint:
"""Test cases for /api/bulk/download endpoint."""
def test_bulk_download_requires_auth(self, client):
"""Test that bulk download requires authentication."""
response = client.post("/api/bulk/download", json={"anime_ids": ["1", "2"]})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_download_valid_request(self, mock_user, client):
"""Test bulk download with valid request."""
mock_user.return_value = {"user_id": "test_user"}
download_data = {
"anime_ids": ["anime1", "anime2"],
"quality": "1080p",
"format": "mp4"
}
with patch('src.server.fastapi_app.bulk_download_service') as mock_service:
mock_service.start_bulk_download.return_value = {
"task_id": "bulk_task_123",
"status": "started",
"anime_count": 2
}
response = client.post("/api/bulk/download", json=download_data)
# Note: This test assumes the endpoint will be implemented
# Currently returns 404 since endpoint doesn't exist
assert response.status_code in [200, 404]
def test_bulk_download_invalid_data(self, client, auth_headers):
"""Test bulk download with invalid data."""
invalid_data = {"anime_ids": []} # Empty list
response = client.post("/api/bulk/download", json=invalid_data, headers=auth_headers)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [400, 404, 422]
def test_bulk_download_missing_anime_ids(self, client, auth_headers):
"""Test bulk download without anime_ids field."""
invalid_data = {"quality": "1080p"}
response = client.post("/api/bulk/download", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestBulkUpdateEndpoint:
"""Test cases for /api/bulk/update endpoint."""
def test_bulk_update_requires_auth(self, client):
"""Test that bulk update requires authentication."""
response = client.post("/api/bulk/update", json={"anime_ids": ["1", "2"]})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_update_metadata(self, mock_user, client):
"""Test bulk metadata update."""
mock_user.return_value = {"user_id": "test_user"}
update_data = {
"anime_ids": ["anime1", "anime2"],
"operation": "update_metadata"
}
response = client.post("/api/bulk/update", json=update_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
def test_bulk_update_invalid_operation(self, client, auth_headers):
"""Test bulk update with invalid operation."""
invalid_data = {
"anime_ids": ["anime1"],
"operation": "invalid_operation"
}
response = client.post("/api/bulk/update", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestBulkOrganizeEndpoint:
"""Test cases for /api/bulk/organize endpoint."""
def test_bulk_organize_requires_auth(self, client):
"""Test that bulk organize requires authentication."""
response = client.post("/api/bulk/organize", json={"anime_ids": ["1", "2"]})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_organize_by_genre(self, mock_user, client):
"""Test bulk organize by genre."""
mock_user.return_value = {"user_id": "test_user"}
organize_data = {
"anime_ids": ["anime1", "anime2"],
"organize_by": "genre",
"create_subdirectories": True
}
response = client.post("/api/bulk/organize", json=organize_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
def test_bulk_organize_by_year(self, client, auth_headers):
"""Test bulk organize by year."""
organize_data = {
"anime_ids": ["anime1", "anime2"],
"organize_by": "year",
"create_subdirectories": False
}
response = client.post("/api/bulk/organize", json=organize_data, headers=auth_headers)
assert response.status_code in [200, 404]
class TestBulkDeleteEndpoint:
"""Test cases for /api/bulk/delete endpoint."""
def test_bulk_delete_requires_auth(self, client):
"""Test that bulk delete requires authentication."""
response = client.delete("/api/bulk/delete", json={"anime_ids": ["1", "2"]})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_delete_with_confirmation(self, mock_user, client):
"""Test bulk delete with confirmation."""
mock_user.return_value = {"user_id": "test_user"}
delete_data = {
"anime_ids": ["anime1", "anime2"],
"confirm": True,
"delete_files": True
}
response = client.delete("/api/bulk/delete", json=delete_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
def test_bulk_delete_without_confirmation(self, client, auth_headers):
"""Test bulk delete without confirmation should fail."""
delete_data = {
"anime_ids": ["anime1", "anime2"],
"confirm": False
}
response = client.delete("/api/bulk/delete", json=delete_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestBulkExportEndpoint:
"""Test cases for /api/bulk/export endpoint."""
def test_bulk_export_requires_auth(self, client):
"""Test that bulk export requires authentication."""
response = client.post("/api/bulk/export", json={"anime_ids": ["1", "2"]})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_export_to_json(self, mock_user, client):
"""Test bulk export to JSON format."""
mock_user.return_value = {"user_id": "test_user"}
export_data = {
"anime_ids": ["anime1", "anime2"],
"format": "json",
"include_metadata": True
}
response = client.post("/api/bulk/export", json=export_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
def test_bulk_export_to_csv(self, client, auth_headers):
"""Test bulk export to CSV format."""
export_data = {
"anime_ids": ["anime1", "anime2"],
"format": "csv",
"include_metadata": False
}
response = client.post("/api/bulk/export", json=export_data, headers=auth_headers)
assert response.status_code in [200, 404]
def test_bulk_export_invalid_format(self, client, auth_headers):
"""Test bulk export with invalid format."""
export_data = {
"anime_ids": ["anime1"],
"format": "invalid_format"
}
response = client.post("/api/bulk/export", json=export_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestBulkOperationsEdgeCases:
"""Test edge cases for bulk operations."""
def test_empty_anime_ids_list(self, client, auth_headers):
"""Test bulk operations with empty anime_ids list."""
empty_data = {"anime_ids": []}
endpoints = [
"/api/bulk/download",
"/api/bulk/update",
"/api/bulk/organize",
"/api/bulk/export"
]
for endpoint in endpoints:
if endpoint == "/api/bulk/delete":
response = client.delete(endpoint, json=empty_data, headers=auth_headers)
else:
response = client.post(endpoint, json=empty_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
def test_large_anime_ids_list(self, client, auth_headers):
"""Test bulk operations with large anime_ids list."""
large_data = {"anime_ids": [f"anime_{i}" for i in range(1000)]}
response = client.post("/api/bulk/download", json=large_data, headers=auth_headers)
# Endpoint should handle large requests or return appropriate error
assert response.status_code in [200, 400, 404, 413]
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_operations_concurrent_requests(self, mock_user, client):
"""Test multiple concurrent bulk operations."""
mock_user.return_value = {"user_id": "test_user"}
# This test would need actual implementation to test concurrency
# For now, just verify endpoints exist
data = {"anime_ids": ["anime1"]}
response = client.post("/api/bulk/download", json=data)
assert response.status_code in [200, 404]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,350 +0,0 @@
"""
Integration tests for database and storage management API endpoints.
Tests database info, maintenance operations (vacuum, analyze, integrity-check,
reindex, optimize, stats), and storage management functionality.
"""
import os
import sys
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# Import after path setup
from src.server.fastapi_app import app # noqa: E402
@pytest.fixture
def client():
"""Test client for database API tests."""
return TestClient(app)
@pytest.mark.integration
class TestDatabaseInfoEndpoints:
"""Test database information endpoints."""
def test_database_health_requires_auth(self, client):
"""Test database health endpoint requires authentication."""
response = client.get("/api/system/database/health")
assert response.status_code == 403
def test_database_health_with_auth(self, client, mock_settings, valid_jwt_token):
"""Test database health with valid authentication."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/system/database/health",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert "status" in data
assert "connection_pool" in data
assert "response_time_ms" in data
assert "last_check" in data
assert data["status"] == "healthy"
assert isinstance(data["response_time_ms"], (int, float))
assert data["response_time_ms"] > 0
def test_database_info_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/database/info endpoint (to be implemented)."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/database/info",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Endpoint may not be implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = ["database_type", "version", "size", "tables"]
for field in expected_fields:
if field in data:
assert isinstance(data[field], (str, int, float, dict, list))
@pytest.mark.integration
class TestDatabaseMaintenanceEndpoints:
"""Test database maintenance operation endpoints."""
def test_database_vacuum_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /maintenance/database/vacuum endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.post(
"/maintenance/database/vacuum",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Endpoint may not be implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "success" in data or "status" in data
def test_database_analyze_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /maintenance/database/analyze endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.post(
"/maintenance/database/analyze",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = ["tables_analyzed", "statistics_updated", "duration_ms"]
# Check if any expected fields are present
assert any(field in data for field in expected_fields)
def test_database_integrity_check_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /maintenance/database/integrity-check endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.post(
"/maintenance/database/integrity-check",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "integrity_status" in data or "status" in data
if "integrity_status" in data:
assert data["integrity_status"] in ["ok", "error", "warning"]
def test_database_reindex_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /maintenance/database/reindex endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.post(
"/maintenance/database/reindex",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = ["indexes_rebuilt", "duration_ms", "status"]
assert any(field in data for field in expected_fields)
def test_database_optimize_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /maintenance/database/optimize endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.post(
"/maintenance/database/optimize",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "optimization_status" in data or "status" in data
def test_database_stats_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /maintenance/database/stats endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/maintenance/database/stats",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_stats = ["table_count", "record_count", "database_size", "index_size"]
# At least some stats should be present
assert any(stat in data for stat in expected_stats)
@pytest.mark.integration
class TestDatabaseEndpointAuthentication:
"""Test authentication requirements for database endpoints."""
def test_database_endpoints_require_auth(self, client):
"""Test that database endpoints require authentication."""
database_endpoints = [
"/api/database/info",
"/api/system/database/health",
"/maintenance/database/vacuum",
"/maintenance/database/analyze",
"/maintenance/database/integrity-check",
"/maintenance/database/reindex",
"/maintenance/database/optimize",
"/maintenance/database/stats"
]
for endpoint in database_endpoints:
# Try GET for info endpoints
if "info" in endpoint or "health" in endpoint or "stats" in endpoint:
response = client.get(endpoint)
else:
# Try POST for maintenance endpoints
response = client.post(endpoint)
# Should require authentication (403) or not be found (404)
assert response.status_code in [403, 404]
def test_database_endpoints_with_invalid_auth(self, client):
"""Test database endpoints with invalid authentication."""
invalid_token = "invalid.token.here"
database_endpoints = [
("/api/system/database/health", "GET"),
("/maintenance/database/vacuum", "POST"),
("/maintenance/database/analyze", "POST")
]
for endpoint, method in database_endpoints:
if method == "GET":
response = client.get(
endpoint,
headers={"Authorization": f"Bearer {invalid_token}"}
)
else:
response = client.post(
endpoint,
headers={"Authorization": f"Bearer {invalid_token}"}
)
# Should be unauthorized (401) or not found (404)
assert response.status_code in [401, 404]
@pytest.mark.integration
class TestDatabaseMaintenanceOperations:
"""Test database maintenance operation workflows."""
def test_maintenance_operation_sequence(self, client, mock_settings, valid_jwt_token):
"""Test sequence of maintenance operations."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test sequence: analyze -> vacuum -> reindex -> optimize
maintenance_sequence = [
"/maintenance/database/analyze",
"/maintenance/database/vacuum",
"/maintenance/database/reindex",
"/maintenance/database/optimize"
]
for endpoint in maintenance_sequence:
response = client.post(
endpoint,
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Should either work (200) or not be implemented (404)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
# Should return some kind of status or success indication
assert isinstance(data, dict)
def test_maintenance_operation_parameters(self, client, mock_settings, valid_jwt_token):
"""Test maintenance operations with parameters."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test vacuum with parameters
response = client.post(
"/maintenance/database/vacuum?full=true",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404, 422]
# Test analyze with table parameter
response = client.post(
"/maintenance/database/analyze?tables=anime,episodes",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404, 422]
def test_concurrent_maintenance_operations(self, client, mock_settings, valid_jwt_token):
"""Test behavior of concurrent maintenance operations."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Simulate starting multiple operations
# In real implementation, this should be handled properly
# Start first operation
response1 = client.post(
"/maintenance/database/vacuum",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Try to start second operation while first might be running
response2 = client.post(
"/maintenance/database/analyze",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Both should either work or not be implemented
assert response1.status_code in [200, 404, 409] # 409 for conflict
assert response2.status_code in [200, 404, 409]
@pytest.mark.integration
class TestDatabaseErrorHandling:
"""Test error handling in database operations."""
def test_database_connection_errors(self, client, mock_settings, valid_jwt_token):
"""Test handling of database connection errors."""
# Mock database connection failure
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/system/database/health",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Health check should still return a response even if DB is down
assert response.status_code in [200, 503] # 503 for service unavailable
if response.status_code == 503:
data = response.json()
assert "error" in data or "status" in data
def test_maintenance_operation_errors(self, client, mock_settings, valid_jwt_token):
"""Test error handling in maintenance operations."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test with malformed requests
malformed_requests = [
("/maintenance/database/vacuum", {"invalid": "data"}),
("/maintenance/database/analyze", {"tables": ""}),
]
for endpoint, json_data in malformed_requests:
response = client.post(
endpoint,
json=json_data,
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Should handle gracefully
assert response.status_code in [200, 400, 404, 422]
def test_database_timeout_handling(self, client, mock_settings, valid_jwt_token):
"""Test handling of database operation timeouts."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test long-running operation (like full vacuum)
response = client.post(
"/maintenance/database/vacuum?full=true",
headers={"Authorization": f"Bearer {valid_jwt_token}"},
timeout=1 # Very short timeout to simulate timeout
)
# Should either complete quickly or handle timeout gracefully
# Note: This test depends on implementation details
assert response.status_code in [200, 404, 408, 504] # 408/504 for timeout

View File

@@ -1,336 +0,0 @@
"""
Integration tests for diagnostics API endpoints.
This module tests the diagnostics endpoints for error reporting and system diagnostics.
"""
import os
import tempfile
from unittest.mock import Mock, patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestDiagnosticsReportEndpoint:
"""Test cases for /diagnostics/report endpoint."""
def test_diagnostics_report_requires_auth(self, client):
"""Test that diagnostics report requires authentication."""
response = client.get("/diagnostics/report")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_diagnostics_report(self, mock_user, client):
"""Test getting diagnostics report."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/diagnostics/report")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = [
"system_info", "memory_usage", "disk_usage",
"error_summary", "performance_metrics", "timestamp"
]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_get_diagnostics_report_with_filters(self, mock_user, client):
"""Test getting diagnostics report with time filters."""
mock_user.return_value = {"user_id": "test_user"}
# Test with time range
response = client.get("/diagnostics/report?since=2023-01-01&until=2023-12-31")
assert response.status_code in [200, 404]
# Test with severity filter
response = client.get("/diagnostics/report?severity=error")
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_generate_diagnostics_report(self, mock_user, client):
"""Test generating new diagnostics report."""
mock_user.return_value = {"user_id": "test_user"}
report_options = {
"include_logs": True,
"include_system_info": True,
"include_performance": True,
"time_range_hours": 24
}
response = client.post("/diagnostics/report", json=report_options)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "report_id" in data
assert "status" in data
def test_diagnostics_report_invalid_params(self, client, auth_headers):
"""Test diagnostics report with invalid parameters."""
invalid_params = [
"?since=invalid-date",
"?severity=invalid-severity",
"?time_range_hours=-1"
]
for param in invalid_params:
response = client.get(f"/diagnostics/report{param}", headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestDiagnosticsErrorReporting:
"""Test cases for error reporting functionality."""
@patch('src.server.fastapi_app.get_current_user')
def test_get_error_statistics(self, mock_user, client):
"""Test getting error statistics."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/diagnostics/errors/stats")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = [
"total_errors", "errors_by_type", "errors_by_severity",
"recent_errors", "error_trends"
]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_get_recent_errors(self, mock_user, client):
"""Test getting recent errors."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/diagnostics/errors/recent")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "errors" in data
assert isinstance(data["errors"], list)
@patch('src.server.fastapi_app.get_current_user')
def test_clear_error_logs(self, mock_user, client):
"""Test clearing error logs."""
mock_user.return_value = {"user_id": "test_user"}
response = client.delete("/diagnostics/errors/clear")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "cleared_count" in data
class TestDiagnosticsSystemHealth:
"""Test cases for system health diagnostics."""
@patch('src.server.fastapi_app.get_current_user')
def test_get_system_health_overview(self, mock_user, client):
"""Test getting system health overview."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/diagnostics/system/health")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = [
"overall_status", "cpu_usage", "memory_usage",
"disk_usage", "network_status", "service_status"
]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_run_system_diagnostics(self, mock_user, client):
"""Test running system diagnostics."""
mock_user.return_value = {"user_id": "test_user"}
diagnostic_options = {
"check_disk": True,
"check_memory": True,
"check_network": True,
"check_database": True
}
response = client.post("/diagnostics/system/run", json=diagnostic_options)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "diagnostic_id" in data
assert "status" in data
class TestDiagnosticsLogManagement:
"""Test cases for log management diagnostics."""
@patch('src.server.fastapi_app.get_current_user')
def test_get_log_file_info(self, mock_user, client):
"""Test getting log file information."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/diagnostics/logs/info")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = [
"log_files", "total_size_bytes", "oldest_entry",
"newest_entry", "rotation_status"
]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_get_log_entries(self, mock_user, client):
"""Test getting log entries."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/diagnostics/logs/entries")
assert response.status_code in [200, 404]
# Test with filters
response = client.get("/diagnostics/logs/entries?level=ERROR&limit=100")
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_export_logs(self, mock_user, client):
"""Test exporting logs."""
mock_user.return_value = {"user_id": "test_user"}
export_options = {
"format": "json",
"include_levels": ["ERROR", "WARNING", "INFO"],
"time_range_hours": 24
}
response = client.post("/diagnostics/logs/export", json=export_options)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_rotate_logs(self, mock_user, client):
"""Test log rotation."""
mock_user.return_value = {"user_id": "test_user"}
response = client.post("/diagnostics/logs/rotate")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "rotated_files" in data
assert "status" in data
class TestDiagnosticsIntegration:
"""Integration tests for diagnostics functionality."""
@patch('src.server.fastapi_app.get_current_user')
def test_diagnostics_workflow(self, mock_user, client):
"""Test typical diagnostics workflow."""
mock_user.return_value = {"user_id": "test_user"}
# 1. Get system health overview
response = client.get("/diagnostics/system/health")
assert response.status_code in [200, 404]
# 2. Get error statistics
response = client.get("/diagnostics/errors/stats")
assert response.status_code in [200, 404]
# 3. Generate full diagnostics report
response = client.get("/diagnostics/report")
assert response.status_code in [200, 404]
# 4. Check log file status
response = client.get("/diagnostics/logs/info")
assert response.status_code in [200, 404]
def test_diagnostics_error_handling(self, client, auth_headers):
"""Test error handling across diagnostics endpoints."""
endpoints = [
"/diagnostics/report",
"/diagnostics/errors/stats",
"/diagnostics/system/health",
"/diagnostics/logs/info"
]
for endpoint in endpoints:
response = client.get(endpoint, headers=auth_headers)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_diagnostics_concurrent_requests(self, mock_user, client):
"""Test handling of concurrent diagnostics requests."""
mock_user.return_value = {"user_id": "test_user"}
# Multiple simultaneous requests should be handled gracefully
response = client.get("/diagnostics/report")
assert response.status_code in [200, 404]
class TestDiagnosticsEdgeCases:
"""Test edge cases for diagnostics functionality."""
def test_diagnostics_with_missing_log_files(self, client, auth_headers):
"""Test diagnostics when log files are missing."""
response = client.get("/diagnostics/logs/info", headers=auth_headers)
# Should handle missing log files gracefully
assert response.status_code in [200, 404, 500]
def test_diagnostics_with_large_log_files(self, client, auth_headers):
"""Test diagnostics with very large log files."""
# Test with limit parameter for large files
response = client.get("/diagnostics/logs/entries?limit=10", headers=auth_headers)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_diagnostics_export_formats(self, mock_user, client):
"""Test different export formats for diagnostics."""
mock_user.return_value = {"user_id": "test_user"}
export_formats = ["json", "csv", "txt"]
for format_type in export_formats:
export_data = {"format": format_type}
response = client.post("/diagnostics/logs/export", json=export_data)
assert response.status_code in [200, 404, 400]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,286 +0,0 @@
"""
Integration tests for health and system monitoring API endpoints.
Tests /health, /api/health/* endpoints including system metrics,
database health, dependencies, performance, and monitoring.
"""
import os
import sys
from datetime import datetime
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
# Import after path setup
from src.server.fastapi_app import app # noqa: E402
@pytest.fixture
def client():
"""Test client for health API tests."""
return TestClient(app)
@pytest.mark.integration
class TestBasicHealthEndpoints:
"""Test basic health check endpoints."""
def test_health_endpoint_structure(self, client):
"""Test basic health endpoint returns correct structure."""
response = client.get("/health")
assert response.status_code == 200
data = response.json()
assert "status" in data
assert "timestamp" in data
assert "version" in data
assert "services" in data
assert data["status"] == "healthy"
assert data["version"] == "1.0.0"
assert isinstance(data["services"], dict)
def test_health_endpoint_services(self, client):
"""Test health endpoint returns service status."""
response = client.get("/health")
assert response.status_code == 200
data = response.json()
services = data["services"]
expected_services = ["authentication", "anime_service", "episode_service"]
for service in expected_services:
assert service in services
assert services[service] == "online"
def test_health_endpoint_timestamp_format(self, client):
"""Test health endpoint timestamp is valid."""
response = client.get("/health")
assert response.status_code == 200
data = response.json()
# Should be able to parse timestamp
timestamp_str = data["timestamp"]
parsed_timestamp = datetime.fromisoformat(timestamp_str.replace('Z', '+00:00'))
assert isinstance(parsed_timestamp, datetime)
def test_database_health_requires_auth(self, client):
"""Test database health endpoint requires authentication."""
response = client.get("/api/system/database/health")
assert response.status_code == 403 # Should require authentication
def test_database_health_with_auth(self, client, mock_settings, valid_jwt_token):
"""Test database health endpoint with authentication."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/system/database/health",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code == 200
data = response.json()
assert "status" in data
assert "connection_pool" in data
assert "response_time_ms" in data
assert "last_check" in data
assert data["status"] == "healthy"
@pytest.mark.integration
class TestSystemHealthEndpoints:
"""Test system health monitoring endpoints (to be implemented)."""
def test_api_health_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/health endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
# This endpoint might not exist yet, so we test expected behavior
response = client.get(
"/api/health",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# If not implemented, should return 404
# If implemented, should return 200 with health data
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
def test_system_health_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/health/system endpoint for CPU, memory, disk metrics."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/health/system",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
# Endpoint may not be implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_metrics = ["cpu_usage", "memory_usage", "disk_usage"]
for metric in expected_metrics:
assert metric in data
def test_dependencies_health_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/health/dependencies endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/health/dependencies",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert isinstance(data, dict)
def test_performance_health_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/health/performance endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/health/performance",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
performance_metrics = ["response_time", "throughput", "error_rate"]
# At least some performance metrics should be present
assert any(metric in data for metric in performance_metrics)
def test_metrics_health_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/health/metrics endpoint."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/health/metrics",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert isinstance(data, (dict, list))
def test_ready_health_endpoint(self, client, mock_settings, valid_jwt_token):
"""Test /api/health/ready endpoint for readiness probe."""
with patch('src.server.fastapi_app.settings', mock_settings):
response = client.get(
"/api/health/ready",
headers={"Authorization": f"Bearer {valid_jwt_token}"}
)
assert response.status_code in [200, 404, 503]
if response.status_code in [200, 503]:
data = response.json()
assert "ready" in data or "status" in data
@pytest.mark.integration
class TestHealthEndpointAuthentication:
"""Test authentication requirements for health endpoints."""
def test_health_endpoints_without_auth(self, client):
"""Test which health endpoints require authentication."""
# Basic health should be public
response = client.get("/health")
assert response.status_code == 200
# System endpoints should require auth
protected_endpoints = [
"/api/health",
"/api/health/system",
"/api/health/database",
"/api/health/dependencies",
"/api/health/performance",
"/api/health/metrics",
"/api/health/ready"
]
for endpoint in protected_endpoints:
response = client.get(endpoint)
# Should either be not found (404) or require auth (403)
assert response.status_code in [403, 404]
def test_health_endpoints_with_invalid_auth(self, client):
"""Test health endpoints with invalid authentication."""
invalid_token = "invalid.token.here"
protected_endpoints = [
"/api/health",
"/api/health/system",
"/api/health/database",
"/api/health/dependencies",
"/api/health/performance",
"/api/health/metrics",
"/api/health/ready"
]
for endpoint in protected_endpoints:
response = client.get(
endpoint,
headers={"Authorization": f"Bearer {invalid_token}"}
)
# Should either be not found (404) or unauthorized (401)
assert response.status_code in [401, 404]
@pytest.mark.integration
class TestHealthEndpointErrorHandling:
"""Test error handling in health endpoints."""
def test_health_endpoint_resilience(self, client):
"""Test health endpoint handles errors gracefully."""
# Test with various malformed requests
malformed_requests = [
("/health", {"Content-Type": "application/xml"}),
("/health", {"Accept": "text/plain"}),
]
for endpoint, headers in malformed_requests:
response = client.get(endpoint, headers=headers)
# Should still return 200 for basic health
assert response.status_code == 200
def test_database_health_error_handling(self, client, mock_settings):
"""Test database health endpoint error handling."""
with patch('src.server.fastapi_app.settings', mock_settings):
# Test with expired token
expired_token = "eyJ0eXAiOiJKV1QiLCJhbGciOiJIUzI1NiJ9.eyJ1c2VyIjoidGVzdCIsImV4cCI6MH0"
response = client.get(
"/api/system/database/health",
headers={"Authorization": f"Bearer {expired_token}"}
)
assert response.status_code == 401
def test_health_endpoint_malformed_auth_header(self, client):
"""Test health endpoints with malformed authorization headers."""
malformed_headers = [
{"Authorization": "Bearer"}, # Missing token
{"Authorization": "Basic token"}, # Wrong type
{"Authorization": "token"}, # Missing Bearer
]
for headers in malformed_headers:
response = client.get("/api/system/database/health", headers=headers)
assert response.status_code in [401, 403]

View File

@@ -1,440 +0,0 @@
"""
Integration tests for API key management, webhooks, and third-party integrations.
This module tests the integration endpoints for managing API keys, webhook configurations,
and third-party service integrations.
"""
import json
import uuid
from unittest.mock import Mock, patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestAPIKeyManagement:
"""Test cases for API key management endpoints."""
def test_list_api_keys_requires_auth(self, client):
"""Test that listing API keys requires authentication."""
response = client.get("/api/integrations/api-keys")
assert response.status_code == 401
def test_create_api_key_requires_auth(self, client):
"""Test that creating API keys requires authentication."""
response = client.post("/api/integrations/api-keys", json={"name": "test_key"})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_list_api_keys(self, mock_user, client):
"""Test listing API keys."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/integrations/api-keys")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "api_keys" in data
assert isinstance(data["api_keys"], list)
@patch('src.server.fastapi_app.get_current_user')
def test_create_api_key(self, mock_user, client):
"""Test creating new API key."""
mock_user.return_value = {"user_id": "test_user"}
key_data = {
"name": "test_integration_key",
"description": "Key for testing integrations",
"permissions": ["read", "write"],
"expires_at": "2024-12-31T23:59:59Z"
}
response = client.post("/api/integrations/api-keys", json=key_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [201, 404]
if response.status_code == 201:
data = response.json()
assert "api_key_id" in data
assert "api_key" in data
assert "created_at" in data
@patch('src.server.fastapi_app.get_current_user')
def test_get_api_key_details(self, mock_user, client):
"""Test getting API key details."""
mock_user.return_value = {"user_id": "test_user"}
key_id = "test_key_123"
response = client.get(f"/api/integrations/api-keys/{key_id}")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "api_key_id" in data
assert "name" in data
assert "permissions" in data
assert "created_at" in data
@patch('src.server.fastapi_app.get_current_user')
def test_revoke_api_key(self, mock_user, client):
"""Test revoking API key."""
mock_user.return_value = {"user_id": "test_user"}
key_id = "test_key_123"
response = client.delete(f"/api/integrations/api-keys/{key_id}")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert data["status"] == "revoked"
def test_create_api_key_invalid_data(self, client, auth_headers):
"""Test creating API key with invalid data."""
invalid_data_sets = [
{}, # Empty data
{"name": ""}, # Empty name
{"name": "test", "permissions": []}, # Empty permissions
{"name": "test", "expires_at": "invalid_date"}, # Invalid date
]
for invalid_data in invalid_data_sets:
response = client.post("/api/integrations/api-keys", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
@patch('src.server.fastapi_app.get_current_user')
def test_update_api_key_permissions(self, mock_user, client):
"""Test updating API key permissions."""
mock_user.return_value = {"user_id": "test_user"}
key_id = "test_key_123"
update_data = {
"permissions": ["read"],
"description": "Updated description"
}
response = client.patch(f"/api/integrations/api-keys/{key_id}", json=update_data)
assert response.status_code in [200, 404]
class TestWebhookManagement:
"""Test cases for webhook configuration endpoints."""
def test_list_webhooks_requires_auth(self, client):
"""Test that listing webhooks requires authentication."""
response = client.get("/api/integrations/webhooks")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_list_webhooks(self, mock_user, client):
"""Test listing configured webhooks."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/integrations/webhooks")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "webhooks" in data
assert isinstance(data["webhooks"], list)
@patch('src.server.fastapi_app.get_current_user')
def test_create_webhook(self, mock_user, client):
"""Test creating new webhook."""
mock_user.return_value = {"user_id": "test_user"}
webhook_data = {
"name": "download_complete_webhook",
"url": "https://example.com/webhook",
"events": ["download_complete", "download_failed"],
"secret": "webhook_secret_123",
"active": True
}
response = client.post("/api/integrations/webhooks", json=webhook_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [201, 404]
if response.status_code == 201:
data = response.json()
assert "webhook_id" in data
assert "created_at" in data
@patch('src.server.fastapi_app.get_current_user')
def test_test_webhook(self, mock_user, client):
"""Test webhook endpoint."""
mock_user.return_value = {"user_id": "test_user"}
webhook_id = "webhook_123"
test_data = {
"event_type": "test",
"test_payload": {"message": "test webhook"}
}
response = client.post(f"/api/integrations/webhooks/{webhook_id}/test", json=test_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "response_time_ms" in data
@patch('src.server.fastapi_app.get_current_user')
def test_update_webhook(self, mock_user, client):
"""Test updating webhook configuration."""
mock_user.return_value = {"user_id": "test_user"}
webhook_id = "webhook_123"
update_data = {
"active": False,
"events": ["download_complete"]
}
response = client.patch(f"/api/integrations/webhooks/{webhook_id}", json=update_data)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_delete_webhook(self, mock_user, client):
"""Test deleting webhook."""
mock_user.return_value = {"user_id": "test_user"}
webhook_id = "webhook_123"
response = client.delete(f"/api/integrations/webhooks/{webhook_id}")
assert response.status_code in [200, 404]
def test_create_webhook_invalid_url(self, client, auth_headers):
"""Test creating webhook with invalid URL."""
invalid_webhook_data = {
"name": "invalid_webhook",
"url": "not_a_valid_url",
"events": ["download_complete"]
}
response = client.post("/api/integrations/webhooks", json=invalid_webhook_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestThirdPartyIntegrations:
"""Test cases for third-party service integrations."""
def test_list_integrations_requires_auth(self, client):
"""Test that listing integrations requires authentication."""
response = client.get("/api/integrations/services")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_list_available_integrations(self, mock_user, client):
"""Test listing available third-party integrations."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/integrations/services")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "services" in data
assert isinstance(data["services"], list)
@patch('src.server.fastapi_app.get_current_user')
def test_configure_integration(self, mock_user, client):
"""Test configuring third-party integration."""
mock_user.return_value = {"user_id": "test_user"}
service_name = "discord"
config_data = {
"webhook_url": "https://discord.com/api/webhooks/...",
"notifications": ["download_complete", "series_added"],
"enabled": True
}
response = client.post(f"/api/integrations/services/{service_name}/configure", json=config_data)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_test_integration(self, mock_user, client):
"""Test third-party integration."""
mock_user.return_value = {"user_id": "test_user"}
service_name = "discord"
test_data = {
"message": "Test notification from AniWorld"
}
response = client.post(f"/api/integrations/services/{service_name}/test", json=test_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "response" in data
@patch('src.server.fastapi_app.get_current_user')
def test_get_integration_status(self, mock_user, client):
"""Test getting integration status."""
mock_user.return_value = {"user_id": "test_user"}
service_name = "discord"
response = client.get(f"/api/integrations/services/{service_name}/status")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "service" in data
assert "status" in data
assert "last_tested" in data
@patch('src.server.fastapi_app.get_current_user')
def test_disable_integration(self, mock_user, client):
"""Test disabling integration."""
mock_user.return_value = {"user_id": "test_user"}
service_name = "discord"
response = client.post(f"/api/integrations/services/{service_name}/disable")
assert response.status_code in [200, 404]
class TestIntegrationEvents:
"""Test cases for integration event handling."""
@patch('src.server.fastapi_app.get_current_user')
def test_list_integration_events(self, mock_user, client):
"""Test listing integration events."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/integrations/events")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "events" in data
assert isinstance(data["events"], list)
@patch('src.server.fastapi_app.get_current_user')
def test_trigger_test_event(self, mock_user, client):
"""Test triggering test integration event."""
mock_user.return_value = {"user_id": "test_user"}
event_data = {
"event_type": "download_complete",
"payload": {
"anime_id": "test_anime",
"episode_count": 12,
"download_time": "2023-01-01T12:00:00Z"
}
}
response = client.post("/api/integrations/events/trigger", json=event_data)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_get_event_history(self, mock_user, client):
"""Test getting integration event history."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/integrations/events/history")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "events" in data
assert "pagination" in data
class TestIntegrationSecurity:
"""Test cases for integration security features."""
@patch('src.server.fastapi_app.get_current_user')
def test_api_key_validation(self, mock_user, client):
"""Test API key validation."""
mock_user.return_value = {"user_id": "test_user"}
# Test with valid API key format
validation_data = {
"api_key": "ak_test_" + str(uuid.uuid4()).replace("-", "")
}
response = client.post("/api/integrations/validate-key", json=validation_data)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_webhook_signature_validation(self, mock_user, client):
"""Test webhook signature validation."""
mock_user.return_value = {"user_id": "test_user"}
signature_data = {
"payload": {"test": "data"},
"signature": "sha256=test_signature",
"secret": "webhook_secret"
}
response = client.post("/api/integrations/validate-signature", json=signature_data)
assert response.status_code in [200, 404]
def test_integration_rate_limiting(self, client, auth_headers):
"""Test rate limiting for integration endpoints."""
# Make multiple rapid requests to test rate limiting
for i in range(10):
response = client.get("/api/integrations/api-keys", headers=auth_headers)
# Should either work or be rate limited
assert response.status_code in [200, 404, 429]
class TestIntegrationErrorHandling:
"""Test cases for integration error handling."""
def test_invalid_service_name(self, client, auth_headers):
"""Test handling of invalid service names."""
response = client.get("/api/integrations/services/invalid_service/status", headers=auth_headers)
assert response.status_code in [400, 404]
def test_malformed_webhook_payload(self, client, auth_headers):
"""Test handling of malformed webhook payloads."""
malformed_data = {
"url": "https://example.com",
"events": "not_a_list" # Should be a list
}
response = client.post("/api/integrations/webhooks", json=malformed_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
@patch('src.server.fastapi_app.get_current_user')
def test_integration_service_unavailable(self, mock_user, client):
"""Test handling when integration service is unavailable."""
mock_user.return_value = {"user_id": "test_user"}
# This would test actual service connectivity in real implementation
response = client.post("/api/integrations/services/discord/test", json={"message": "test"})
assert response.status_code in [200, 404, 503]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,522 +0,0 @@
"""
Integration tests for miscellaneous components.
Tests configuration system integration, error handling pipelines,
and modular architecture component interactions.
"""
import json
import os
import sys
import tempfile
from pathlib import Path
from unittest.mock import Mock
import pytest
# Add source directory to path
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '..', '..', '..'))
@pytest.mark.integration
class TestConfigurationIntegration:
"""Test configuration system integration."""
def test_config_loading_chain(self):
"""Test complete configuration loading chain."""
# Create temporary config files
with tempfile.TemporaryDirectory() as temp_dir:
# Create default config
default_config = {
"anime_directory": "/default/path",
"log_level": "INFO",
"provider_timeout": 30
}
# Create user config that overrides some values
user_config = {
"anime_directory": "/user/path",
"log_level": "DEBUG"
}
default_file = Path(temp_dir) / "default.json"
user_file = Path(temp_dir) / "user.json"
with open(default_file, 'w') as f:
json.dump(default_config, f)
with open(user_file, 'w') as f:
json.dump(user_config, f)
# Mock configuration loader
def load_configuration(default_path, user_path):
"""Load configuration with precedence."""
config = {}
# Load default config
if os.path.exists(default_path):
with open(default_path, 'r') as f:
config.update(json.load(f))
# Load user config (overrides defaults)
if os.path.exists(user_path):
with open(user_path, 'r') as f:
config.update(json.load(f))
return config
# Test configuration loading
config = load_configuration(str(default_file), str(user_file))
# Verify precedence
assert config["anime_directory"] == "/user/path" # User override
assert config["log_level"] == "DEBUG" # User override
assert config["provider_timeout"] == 30 # Default value
def test_config_validation_integration(self):
"""Test configuration validation integration."""
def validate_config(config):
"""Validate configuration values."""
errors = []
# Validate required fields
required_fields = ["anime_directory", "log_level"]
for field in required_fields:
if field not in config:
errors.append(f"Missing required field: {field}")
# Validate specific values
if "log_level" in config:
valid_levels = ["DEBUG", "INFO", "WARNING", "ERROR", "FATAL"]
if config["log_level"] not in valid_levels:
errors.append(f"Invalid log level: {config['log_level']}")
if "provider_timeout" in config:
if config["provider_timeout"] <= 0:
errors.append("Provider timeout must be positive")
return errors
# Test valid configuration
valid_config = {
"anime_directory": "/valid/path",
"log_level": "INFO",
"provider_timeout": 30
}
errors = validate_config(valid_config)
assert len(errors) == 0
# Test invalid configuration
invalid_config = {
"log_level": "INVALID",
"provider_timeout": -5
}
errors = validate_config(invalid_config)
assert len(errors) == 3 # Missing anime_directory, invalid log level, negative timeout
assert "Missing required field: anime_directory" in errors
assert "Invalid log level: INVALID" in errors
assert "Provider timeout must be positive" in errors
def test_config_change_propagation(self):
"""Test configuration change propagation to components."""
class ConfigurableComponent:
def __init__(self, config_manager):
self.config_manager = config_manager
self.current_config = {}
self.config_manager.add_observer(self.on_config_change)
def on_config_change(self, key, old_value, new_value):
self.current_config[key] = new_value
# React to specific config changes
if key == "log_level":
self.update_log_level(new_value)
elif key == "provider_timeout":
self.update_timeout(new_value)
def update_log_level(self, level):
self.log_level_changed = level
def update_timeout(self, timeout):
self.timeout_changed = timeout
# Mock config manager
class ConfigManager:
def __init__(self):
self.config = {}
self.observers = []
def add_observer(self, observer):
self.observers.append(observer)
def set(self, key, value):
old_value = self.config.get(key)
self.config[key] = value
for observer in self.observers:
observer(key, old_value, value)
# Test configuration change propagation
config_manager = ConfigManager()
component = ConfigurableComponent(config_manager)
# Change configuration
config_manager.set("log_level", "DEBUG")
config_manager.set("provider_timeout", 60)
# Verify changes propagated
assert component.current_config["log_level"] == "DEBUG"
assert component.current_config["provider_timeout"] == 60
assert component.log_level_changed == "DEBUG"
assert component.timeout_changed == 60
@pytest.mark.integration
class TestErrorHandlingIntegration:
"""Test error handling system integration."""
def test_error_propagation_chain(self):
"""Test error propagation through component layers."""
class DataLayer:
def fetch_data(self, raise_error=False):
if raise_error:
raise ConnectionError("Database connection failed")
return {"data": "test"}
class ServiceLayer:
def __init__(self, data_layer, error_handler):
self.data_layer = data_layer
self.error_handler = error_handler
def get_data(self, raise_error=False):
try:
return self.data_layer.fetch_data(raise_error)
except Exception as e:
return self.error_handler.handle_error(e, context="service_layer")
class ApiLayer:
def __init__(self, service_layer, error_handler):
self.service_layer = service_layer
self.error_handler = error_handler
def api_get_data(self, raise_error=False):
try:
result = self.service_layer.get_data(raise_error)
if result.get("error"):
return {"success": False, "error": result["error"]}
return {"success": True, "data": result}
except Exception as e:
error_response = self.error_handler.handle_error(e, context="api_layer")
return {"success": False, "error": error_response["error"]}
# Mock error handler
class ErrorHandler:
def __init__(self):
self.handled_errors = []
def handle_error(self, error, context=None):
error_info = {
"error_type": type(error).__name__,
"error": str(error),
"context": context,
"handled": True
}
self.handled_errors.append(error_info)
return error_info
# Set up components
error_handler = ErrorHandler()
data_layer = DataLayer()
service_layer = ServiceLayer(data_layer, error_handler)
api_layer = ApiLayer(service_layer, error_handler)
# Test successful execution
result = api_layer.api_get_data(raise_error=False)
assert result["success"] is True
assert result["data"]["data"] == "test"
# Test error propagation
result = api_layer.api_get_data(raise_error=True)
assert result["success"] is False
assert "Database connection failed" in result["error"]
# Verify error was handled at service layer
assert len(error_handler.handled_errors) == 1
assert error_handler.handled_errors[0]["context"] == "service_layer"
assert error_handler.handled_errors[0]["error_type"] == "ConnectionError"
def test_error_recovery_integration(self):
"""Test error recovery integration across components."""
class RetryableService:
def __init__(self, max_retries=3):
self.max_retries = max_retries
self.attempt_count = 0
def unreliable_operation(self):
self.attempt_count += 1
if self.attempt_count < 3:
raise ConnectionError(f"Attempt {self.attempt_count} failed")
return f"Success on attempt {self.attempt_count}"
def execute_with_retry(service, operation_name, max_retries=3):
"""Execute operation with retry logic."""
last_error = None
for attempt in range(max_retries):
try:
operation = getattr(service, operation_name)
return operation()
except Exception as e:
last_error = e
if attempt == max_retries - 1:
raise e
raise last_error
# Test successful retry
service = RetryableService()
result = execute_with_retry(service, "unreliable_operation")
assert "Success on attempt 3" in result
# Test failure after max retries
service = RetryableService(max_retries=10) # Will fail more than 3 times
with pytest.raises(ConnectionError):
execute_with_retry(service, "unreliable_operation", max_retries=2)
@pytest.mark.integration
class TestModularArchitectureIntegration:
"""Test modular architecture integration."""
def test_provider_system_integration(self):
"""Test complete provider system integration."""
# Mock provider implementations
class BaseProvider:
def search(self, query):
raise NotImplementedError
class AniworldProvider(BaseProvider):
def search(self, query):
return [{"title": f"Aniworld: {query}", "source": "aniworld"}]
class BackupProvider(BaseProvider):
def search(self, query):
return [{"title": f"Backup: {query}", "source": "backup"}]
# Provider factory
class ProviderFactory:
def __init__(self):
self.providers = {}
def register(self, name, provider_class):
self.providers[name] = provider_class
def create(self, name):
if name not in self.providers:
raise ValueError(f"Provider {name} not found")
return self.providers[name]()
# Provider service with fallback
class ProviderService:
def __init__(self, factory, primary_provider, fallback_providers=None):
self.factory = factory
self.primary_provider = primary_provider
self.fallback_providers = fallback_providers or []
def search(self, query):
# Try primary provider
try:
provider = self.factory.create(self.primary_provider)
return provider.search(query)
except Exception:
# Try fallback providers
for fallback_name in self.fallback_providers:
try:
provider = self.factory.create(fallback_name)
return provider.search(query)
except Exception:
continue
raise Exception("All providers failed")
# Set up provider system
factory = ProviderFactory()
factory.register("aniworld", AniworldProvider)
factory.register("backup", BackupProvider)
service = ProviderService(
factory,
primary_provider="aniworld",
fallback_providers=["backup"]
)
# Test primary provider success
results = service.search("test anime")
assert len(results) == 1
assert results[0]["source"] == "aniworld"
# Test fallback when primary fails
factory.register("failing", lambda: None) # Will fail on search
service_with_failing_primary = ProviderService(
factory,
primary_provider="failing",
fallback_providers=["backup"]
)
results = service_with_failing_primary.search("test anime")
assert len(results) == 1
assert results[0]["source"] == "backup"
def test_repository_service_integration(self):
"""Test repository and service layer integration."""
# Mock repository
class AnimeRepository:
def __init__(self):
self.data = {}
self.next_id = 1
def save(self, anime):
anime_id = self.next_id
self.next_id += 1
anime_data = {**anime, "id": anime_id}
self.data[anime_id] = anime_data
return anime_data
def find_by_id(self, anime_id):
return self.data.get(anime_id)
def find_all(self):
return list(self.data.values())
def find_by_title(self, title):
return [anime for anime in self.data.values() if title.lower() in anime["title"].lower()]
# Service layer
class AnimeService:
def __init__(self, repository, provider_service):
self.repository = repository
self.provider_service = provider_service
def search_and_cache(self, query):
# Check cache first
cached = self.repository.find_by_title(query)
if cached:
return {"source": "cache", "results": cached}
# Search using provider
results = self.provider_service.search(query)
# Cache results
cached_results = []
for result in results:
saved = self.repository.save(result)
cached_results.append(saved)
return {"source": "provider", "results": cached_results}
# Mock provider service
mock_provider = Mock()
mock_provider.search.return_value = [
{"title": "Test Anime", "genre": "Action"}
]
# Set up service
repository = AnimeRepository()
service = AnimeService(repository, mock_provider)
# First search should use provider
result1 = service.search_and_cache("Test")
assert result1["source"] == "provider"
assert len(result1["results"]) == 1
assert result1["results"][0]["id"] == 1
# Second search should use cache
result2 = service.search_and_cache("Test")
assert result2["source"] == "cache"
assert len(result2["results"]) == 1
assert result2["results"][0]["id"] == 1
# Verify provider was only called once
mock_provider.search.assert_called_once_with("Test")
def test_event_driven_integration(self):
"""Test event-driven component integration."""
# Event bus
class EventBus:
def __init__(self):
self.subscribers = {}
def subscribe(self, event_type, handler):
if event_type not in self.subscribers:
self.subscribers[event_type] = []
self.subscribers[event_type].append(handler)
def publish(self, event_type, data):
if event_type in self.subscribers:
for handler in self.subscribers[event_type]:
handler(data)
# Components that publish/subscribe to events
class DownloadService:
def __init__(self, event_bus):
self.event_bus = event_bus
def download_anime(self, anime_id):
# Simulate download
self.event_bus.publish("download_started", {"anime_id": anime_id})
# Simulate completion
self.event_bus.publish("download_completed", {
"anime_id": anime_id,
"status": "success"
})
class NotificationService:
def __init__(self, event_bus):
self.event_bus = event_bus
self.notifications = []
# Subscribe to events
self.event_bus.subscribe("download_started", self.on_download_started)
self.event_bus.subscribe("download_completed", self.on_download_completed)
def on_download_started(self, data):
self.notifications.append(f"Download started for anime {data['anime_id']}")
def on_download_completed(self, data):
self.notifications.append(f"Download completed for anime {data['anime_id']}")
class StatisticsService:
def __init__(self, event_bus):
self.event_bus = event_bus
self.download_count = 0
self.completed_count = 0
# Subscribe to events
self.event_bus.subscribe("download_started", self.on_download_started)
self.event_bus.subscribe("download_completed", self.on_download_completed)
def on_download_started(self, data):
self.download_count += 1
def on_download_completed(self, data):
self.completed_count += 1
# Set up event-driven system
event_bus = EventBus()
download_service = DownloadService(event_bus)
notification_service = NotificationService(event_bus)
stats_service = StatisticsService(event_bus)
# Trigger download
download_service.download_anime(123)
# Verify events were handled
assert len(notification_service.notifications) == 2
assert "Download started for anime 123" in notification_service.notifications
assert "Download completed for anime 123" in notification_service.notifications
assert stats_service.download_count == 1
assert stats_service.completed_count == 1

View File

@@ -1,332 +0,0 @@
"""
Integration tests for performance optimization API endpoints.
This module tests the performance-related endpoints for speed limiting, cache management,
memory management, and download task handling.
"""
import time
from unittest.mock import Mock, patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestSpeedLimitEndpoint:
"""Test cases for /api/performance/speed-limit endpoint."""
def test_get_speed_limit_requires_auth(self, client):
"""Test that getting speed limit requires authentication."""
response = client.get("/api/performance/speed-limit")
assert response.status_code == 401
def test_set_speed_limit_requires_auth(self, client):
"""Test that setting speed limit requires authentication."""
response = client.post("/api/performance/speed-limit", json={"limit_mbps": 10})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_current_speed_limit(self, mock_user, client):
"""Test getting current speed limit."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/performance/speed-limit")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "limit_mbps" in data
assert "current_usage_mbps" in data
@patch('src.server.fastapi_app.get_current_user')
def test_set_speed_limit_valid(self, mock_user, client):
"""Test setting valid speed limit."""
mock_user.return_value = {"user_id": "test_user"}
limit_data = {"limit_mbps": 50}
response = client.post("/api/performance/speed-limit", json=limit_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
def test_set_speed_limit_invalid(self, client, auth_headers):
"""Test setting invalid speed limit."""
invalid_limits = [
{"limit_mbps": -1}, # Negative
{"limit_mbps": 0}, # Zero
{"limit_mbps": "invalid"}, # Non-numeric
]
for limit_data in invalid_limits:
response = client.post("/api/performance/speed-limit", json=limit_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestCacheStatsEndpoint:
"""Test cases for /api/performance/cache/stats endpoint."""
def test_cache_stats_requires_auth(self, client):
"""Test that cache stats requires authentication."""
response = client.get("/api/performance/cache/stats")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_cache_stats(self, mock_user, client):
"""Test getting cache statistics."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/performance/cache/stats")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = ["hit_rate", "miss_rate", "size_bytes", "entries_count", "evictions"]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_clear_cache(self, mock_user, client):
"""Test clearing cache."""
mock_user.return_value = {"user_id": "test_user"}
response = client.delete("/api/performance/cache/stats")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
class TestMemoryStatsEndpoint:
"""Test cases for /api/performance/memory/stats endpoint."""
def test_memory_stats_requires_auth(self, client):
"""Test that memory stats requires authentication."""
response = client.get("/api/performance/memory/stats")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_memory_stats(self, mock_user, client):
"""Test getting memory statistics."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/performance/memory/stats")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = ["used_bytes", "available_bytes", "percent_used", "process_memory"]
for field in expected_fields:
assert field in data
class TestMemoryGCEndpoint:
"""Test cases for /api/performance/memory/gc endpoint."""
def test_memory_gc_requires_auth(self, client):
"""Test that memory garbage collection requires authentication."""
response = client.post("/api/performance/memory/gc")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_trigger_garbage_collection(self, mock_user, client):
"""Test triggering garbage collection."""
mock_user.return_value = {"user_id": "test_user"}
response = client.post("/api/performance/memory/gc")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "collected_objects" in data
assert "memory_freed_bytes" in data
class TestDownloadTasksEndpoint:
"""Test cases for /api/performance/downloads/tasks endpoint."""
def test_download_tasks_requires_auth(self, client):
"""Test that download tasks requires authentication."""
response = client.get("/api/performance/downloads/tasks")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_download_tasks(self, mock_user, client):
"""Test getting download tasks."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/performance/downloads/tasks")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "tasks" in data
assert isinstance(data["tasks"], list)
@patch('src.server.fastapi_app.get_current_user')
def test_get_download_tasks_with_status_filter(self, mock_user, client):
"""Test getting download tasks with status filter."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/performance/downloads/tasks?status=active")
assert response.status_code in [200, 404]
response = client.get("/api/performance/downloads/tasks?status=completed")
assert response.status_code in [200, 404]
class TestAddDownloadTaskEndpoint:
"""Test cases for /api/performance/downloads/add-task endpoint."""
def test_add_download_task_requires_auth(self, client):
"""Test that adding download task requires authentication."""
response = client.post("/api/performance/downloads/add-task", json={"anime_id": "test"})
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_add_download_task_valid(self, mock_user, client):
"""Test adding valid download task."""
mock_user.return_value = {"user_id": "test_user"}
task_data = {
"anime_id": "anime123",
"episode_range": {"start": 1, "end": 12},
"quality": "1080p",
"priority": "normal"
}
response = client.post("/api/performance/downloads/add-task", json=task_data)
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "task_id" in data
assert "status" in data
def test_add_download_task_invalid(self, client, auth_headers):
"""Test adding invalid download task."""
invalid_tasks = [
{}, # Empty data
{"anime_id": ""}, # Empty anime_id
{"anime_id": "test", "episode_range": {"start": 5, "end": 2}}, # Invalid range
]
for task_data in invalid_tasks:
response = client.post("/api/performance/downloads/add-task", json=task_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestResumeTasksEndpoint:
"""Test cases for /api/performance/resume/tasks endpoint."""
def test_resume_tasks_requires_auth(self, client):
"""Test that resuming tasks requires authentication."""
response = client.post("/api/performance/resume/tasks")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_resume_all_tasks(self, mock_user, client):
"""Test resuming all paused tasks."""
mock_user.return_value = {"user_id": "test_user"}
response = client.post("/api/performance/resume/tasks")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "resumed_count" in data
@patch('src.server.fastapi_app.get_current_user')
def test_resume_specific_task(self, mock_user, client):
"""Test resuming specific task."""
mock_user.return_value = {"user_id": "test_user"}
task_data = {"task_id": "task123"}
response = client.post("/api/performance/resume/tasks", json=task_data)
assert response.status_code in [200, 404]
class TestPerformanceEndpointsIntegration:
"""Integration tests for performance endpoints."""
@patch('src.server.fastapi_app.get_current_user')
def test_performance_workflow(self, mock_user, client):
"""Test typical performance monitoring workflow."""
mock_user.return_value = {"user_id": "test_user"}
# 1. Check current memory stats
response = client.get("/api/performance/memory/stats")
assert response.status_code in [200, 404]
# 2. Check cache stats
response = client.get("/api/performance/cache/stats")
assert response.status_code in [200, 404]
# 3. Check download tasks
response = client.get("/api/performance/downloads/tasks")
assert response.status_code in [200, 404]
# 4. If needed, trigger garbage collection
response = client.post("/api/performance/memory/gc")
assert response.status_code in [200, 404]
def test_performance_endpoints_error_handling(self, client, auth_headers):
"""Test error handling across performance endpoints."""
# Test various endpoints with malformed requests
endpoints_methods = [
("GET", "/api/performance/memory/stats"),
("GET", "/api/performance/cache/stats"),
("GET", "/api/performance/downloads/tasks"),
("POST", "/api/performance/memory/gc"),
("POST", "/api/performance/resume/tasks"),
]
for method, endpoint in endpoints_methods:
if method == "GET":
response = client.get(endpoint, headers=auth_headers)
else:
response = client.post(endpoint, headers=auth_headers)
# Should either work (200) or not be implemented yet (404)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_concurrent_performance_requests(self, mock_user, client):
"""Test handling of concurrent performance requests."""
mock_user.return_value = {"user_id": "test_user"}
# This would test actual concurrency in a real implementation
# For now, just verify endpoints are accessible
response = client.get("/api/performance/memory/stats")
assert response.status_code in [200, 404]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,514 +0,0 @@
"""
Integration tests for user preferences and UI settings API endpoints.
This module tests the user preferences endpoints for theme management, language selection,
accessibility settings, keyboard shortcuts, and UI density configurations.
"""
from unittest.mock import patch
import pytest
from fastapi.testclient import TestClient
from src.server.fastapi_app import app
@pytest.fixture
def client():
"""Create a test client for the FastAPI application."""
return TestClient(app)
@pytest.fixture
def auth_headers(client):
"""Provide authentication headers for protected endpoints."""
# Login to get token
login_data = {"password": "testpassword"}
with patch('src.server.fastapi_app.settings.master_password_hash') as mock_hash:
mock_hash.return_value = "5e884898da28047151d0e56f8dc6292773603d0d6aabbdd62a11ef721d1542d8" # 'password' hash
response = client.post("/auth/login", json=login_data)
if response.status_code == 200:
token = response.json()["access_token"]
return {"Authorization": f"Bearer {token}"}
return {}
class TestThemeManagement:
"""Test cases for theme management endpoints."""
def test_get_themes_requires_auth(self, client):
"""Test that getting themes requires authentication."""
response = client.get("/api/preferences/themes")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_available_themes(self, mock_user, client):
"""Test getting available themes."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/themes")
# Expected 404 since endpoint not implemented yet
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "themes" in data
assert isinstance(data["themes"], list)
# Should include at least light and dark themes
theme_names = [theme["name"] for theme in data["themes"]]
assert "light" in theme_names or "dark" in theme_names
@patch('src.server.fastapi_app.get_current_user')
def test_get_current_theme(self, mock_user, client):
"""Test getting current theme."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/themes/current")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "theme" in data
assert "name" in data["theme"]
assert "colors" in data["theme"]
@patch('src.server.fastapi_app.get_current_user')
def test_set_theme(self, mock_user, client):
"""Test setting user theme."""
mock_user.return_value = {"user_id": "test_user"}
theme_data = {
"theme_name": "dark",
"custom_colors": {
"primary": "#007acc",
"secondary": "#6c757d",
"background": "#1a1a1a"
}
}
response = client.post("/api/preferences/themes/set", json=theme_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert data["status"] == "success"
@patch('src.server.fastapi_app.get_current_user')
def test_create_custom_theme(self, mock_user, client):
"""Test creating custom theme."""
mock_user.return_value = {"user_id": "test_user"}
custom_theme = {
"name": "my_custom_theme",
"display_name": "My Custom Theme",
"colors": {
"primary": "#ff6b6b",
"secondary": "#4ecdc4",
"background": "#2c3e50",
"text": "#ecf0f1",
"accent": "#e74c3c"
},
"is_dark": True
}
response = client.post("/api/preferences/themes/custom", json=custom_theme)
assert response.status_code in [201, 404]
if response.status_code == 201:
data = response.json()
assert "theme_id" in data
assert "name" in data
def test_set_invalid_theme(self, client, auth_headers):
"""Test setting invalid theme."""
invalid_data = {"theme_name": "nonexistent_theme"}
response = client.post("/api/preferences/themes/set", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestLanguageSelection:
"""Test cases for language selection endpoints."""
def test_get_languages_requires_auth(self, client):
"""Test that getting languages requires authentication."""
response = client.get("/api/preferences/languages")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_available_languages(self, mock_user, client):
"""Test getting available languages."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/languages")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "languages" in data
assert isinstance(data["languages"], list)
# Should include at least English
language_codes = [lang["code"] for lang in data["languages"]]
assert "en" in language_codes
@patch('src.server.fastapi_app.get_current_user')
def test_get_current_language(self, mock_user, client):
"""Test getting current language."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/languages/current")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "language" in data
assert "code" in data["language"]
assert "name" in data["language"]
@patch('src.server.fastapi_app.get_current_user')
def test_set_language(self, mock_user, client):
"""Test setting user language."""
mock_user.return_value = {"user_id": "test_user"}
language_data = {"language_code": "de"}
response = client.post("/api/preferences/languages/set", json=language_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "language" in data
def test_set_invalid_language(self, client, auth_headers):
"""Test setting invalid language."""
invalid_data = {"language_code": "invalid_lang"}
response = client.post("/api/preferences/languages/set", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestAccessibilitySettings:
"""Test cases for accessibility settings endpoints."""
def test_get_accessibility_requires_auth(self, client):
"""Test that getting accessibility settings requires authentication."""
response = client.get("/api/preferences/accessibility")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_accessibility_settings(self, mock_user, client):
"""Test getting accessibility settings."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/accessibility")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = [
"high_contrast", "large_text", "reduced_motion",
"screen_reader_support", "keyboard_navigation"
]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_update_accessibility_settings(self, mock_user, client):
"""Test updating accessibility settings."""
mock_user.return_value = {"user_id": "test_user"}
accessibility_data = {
"high_contrast": True,
"large_text": True,
"reduced_motion": False,
"screen_reader_support": True,
"keyboard_navigation": True,
"font_size_multiplier": 1.2
}
response = client.put("/api/preferences/accessibility", json=accessibility_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "updated_settings" in data
@patch('src.server.fastapi_app.get_current_user')
def test_reset_accessibility_settings(self, mock_user, client):
"""Test resetting accessibility settings to defaults."""
mock_user.return_value = {"user_id": "test_user"}
response = client.post("/api/preferences/accessibility/reset")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert data["status"] == "reset"
class TestKeyboardShortcuts:
"""Test cases for keyboard shortcuts endpoints."""
def test_get_shortcuts_requires_auth(self, client):
"""Test that getting shortcuts requires authentication."""
response = client.get("/api/preferences/shortcuts")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_keyboard_shortcuts(self, mock_user, client):
"""Test getting keyboard shortcuts."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/shortcuts")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "shortcuts" in data
assert isinstance(data["shortcuts"], dict)
@patch('src.server.fastapi_app.get_current_user')
def test_update_keyboard_shortcut(self, mock_user, client):
"""Test updating keyboard shortcut."""
mock_user.return_value = {"user_id": "test_user"}
shortcut_data = {
"action": "search",
"shortcut": "Ctrl+K",
"description": "Open search"
}
response = client.put("/api/preferences/shortcuts", json=shortcut_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "shortcut" in data
@patch('src.server.fastapi_app.get_current_user')
def test_reset_shortcuts_to_default(self, mock_user, client):
"""Test resetting shortcuts to default."""
mock_user.return_value = {"user_id": "test_user"}
response = client.post("/api/preferences/shortcuts/reset")
assert response.status_code in [200, 404]
def test_invalid_shortcut_format(self, client, auth_headers):
"""Test updating shortcut with invalid format."""
invalid_data = {
"action": "search",
"shortcut": "InvalidKey++"
}
response = client.put("/api/preferences/shortcuts", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestUIDensitySettings:
"""Test cases for UI density and view settings endpoints."""
def test_get_ui_settings_requires_auth(self, client):
"""Test that getting UI settings requires authentication."""
response = client.get("/api/preferences/ui")
assert response.status_code == 401
@patch('src.server.fastapi_app.get_current_user')
def test_get_ui_density_settings(self, mock_user, client):
"""Test getting UI density settings."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/ui")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_fields = [
"density", "view_mode", "grid_columns",
"show_thumbnails", "compact_mode"
]
for field in expected_fields:
assert field in data
@patch('src.server.fastapi_app.get_current_user')
def test_set_view_mode(self, mock_user, client):
"""Test setting view mode (grid/list)."""
mock_user.return_value = {"user_id": "test_user"}
view_data = {
"view_mode": "grid",
"grid_columns": 4,
"show_thumbnails": True
}
response = client.post("/api/preferences/ui/view-mode", json=view_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "view_mode" in data
@patch('src.server.fastapi_app.get_current_user')
def test_set_ui_density(self, mock_user, client):
"""Test setting UI density."""
mock_user.return_value = {"user_id": "test_user"}
density_data = {
"density": "comfortable", # compact, comfortable, spacious
"compact_mode": False
}
response = client.post("/api/preferences/ui/density", json=density_data)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_update_grid_settings(self, mock_user, client):
"""Test updating grid view settings."""
mock_user.return_value = {"user_id": "test_user"}
grid_data = {
"columns": 6,
"thumbnail_size": "medium",
"show_titles": True,
"show_episode_count": True
}
response = client.put("/api/preferences/ui/grid", json=grid_data)
assert response.status_code in [200, 404]
def test_invalid_view_mode(self, client, auth_headers):
"""Test setting invalid view mode."""
invalid_data = {"view_mode": "invalid_mode"}
response = client.post("/api/preferences/ui/view-mode", json=invalid_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
class TestPreferencesIntegration:
"""Integration tests for preferences functionality."""
@patch('src.server.fastapi_app.get_current_user')
def test_get_all_preferences(self, mock_user, client):
"""Test getting all user preferences."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
expected_sections = [
"theme", "language", "accessibility",
"shortcuts", "ui_settings"
]
for section in expected_sections:
assert section in data
@patch('src.server.fastapi_app.get_current_user')
def test_bulk_update_preferences(self, mock_user, client):
"""Test bulk updating multiple preferences."""
mock_user.return_value = {"user_id": "test_user"}
bulk_data = {
"theme": {"name": "dark"},
"language": {"code": "en"},
"accessibility": {"high_contrast": True},
"ui_settings": {"view_mode": "list", "density": "compact"}
}
response = client.put("/api/preferences", json=bulk_data)
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert "updated_sections" in data
@patch('src.server.fastapi_app.get_current_user')
def test_export_preferences(self, mock_user, client):
"""Test exporting user preferences."""
mock_user.return_value = {"user_id": "test_user"}
response = client.get("/api/preferences/export")
assert response.status_code in [200, 404]
if response.status_code == 200:
# Should return JSON or file download
assert response.headers.get("content-type") in [
"application/json",
"application/octet-stream"
]
@patch('src.server.fastapi_app.get_current_user')
def test_import_preferences(self, mock_user, client):
"""Test importing user preferences."""
mock_user.return_value = {"user_id": "test_user"}
import_data = {
"theme": {"name": "light"},
"language": {"code": "de"},
"ui_settings": {"view_mode": "grid"}
}
response = client.post("/api/preferences/import", json=import_data)
assert response.status_code in [200, 404]
@patch('src.server.fastapi_app.get_current_user')
def test_reset_all_preferences(self, mock_user, client):
"""Test resetting all preferences to defaults."""
mock_user.return_value = {"user_id": "test_user"}
response = client.post("/api/preferences/reset")
assert response.status_code in [200, 404]
if response.status_code == 200:
data = response.json()
assert "status" in data
assert data["status"] == "reset"
class TestPreferencesValidation:
"""Test cases for preferences validation."""
def test_theme_validation(self, client, auth_headers):
"""Test theme data validation."""
invalid_theme_data = {
"colors": {
"primary": "not_a_color", # Invalid color format
"background": "#xyz" # Invalid hex color
}
}
response = client.post("/api/preferences/themes/custom", json=invalid_theme_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
def test_accessibility_validation(self, client, auth_headers):
"""Test accessibility settings validation."""
invalid_accessibility_data = {
"font_size_multiplier": -1, # Invalid value
"high_contrast": "not_boolean" # Invalid type
}
response = client.put("/api/preferences/accessibility", json=invalid_accessibility_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
def test_ui_settings_validation(self, client, auth_headers):
"""Test UI settings validation."""
invalid_ui_data = {
"grid_columns": 0, # Invalid value
"density": "invalid_density" # Invalid enum value
}
response = client.post("/api/preferences/ui/density", json=invalid_ui_data, headers=auth_headers)
assert response.status_code in [400, 404, 422]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,3 +0,0 @@
from src.server.web.middleware.fastapi_auth_middleware_new import AuthMiddleware
print("Success importing AuthMiddleware")

View File

@@ -1,378 +0,0 @@
"""
Test application flow and setup functionality.
Tests for the application flow enforcement: setup → auth → main application.
"""
import json
import os
# Add parent directories to path for imports
import sys
from pathlib import Path
from unittest.mock import MagicMock, patch
import pytest
from fastapi.testclient import TestClient
sys.path.insert(0, os.path.join(os.path.dirname(__file__), '../..'))
from src.server.fastapi_app import app
from src.server.services.setup_service import SetupService
class TestApplicationFlow:
"""Test cases for application flow enforcement."""
def setup_method(self):
"""Set up test environment before each test."""
self.client = TestClient(app, follow_redirects=False)
self.test_config_path = "test_config.json"
self.test_db_path = "test_db.db"
def teardown_method(self):
"""Clean up after each test."""
# Remove test files
for path in [self.test_config_path, self.test_db_path]:
if os.path.exists(path):
os.unlink(path)
def test_setup_page_displayed_when_configuration_missing(self):
"""Test that setup page is displayed when configuration is missing."""
with patch.object(SetupService, 'is_setup_complete', return_value=False):
response = self.client.get("/")
assert response.status_code == 302
assert response.headers["location"] == "/setup"
def test_setup_page_form_submission_creates_valid_configuration(self):
"""Test that setup page form submission creates valid configuration."""
setup_data = {
"password": "test_password_123",
"directory": "/test/anime/directory"
}
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
patch.object(SetupService, 'mark_setup_complete', return_value=True), \
patch('pathlib.Path.mkdir'), \
patch('pathlib.Path.is_absolute', return_value=True):
response = self.client.post("/api/auth/setup", json=setup_data)
assert response.status_code == 200
data = response.json()
assert data["status"] == "success"
assert data["message"] == "Setup completed successfully"
assert data["redirect_url"] == "/login"
def test_setup_page_redirects_to_auth_after_successful_setup(self):
"""Test that setup page redirects to auth page after successful setup."""
setup_data = {
"password": "test_password_123",
"directory": "/test/anime/directory"
}
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
patch.object(SetupService, 'mark_setup_complete', return_value=True), \
patch('pathlib.Path.mkdir'), \
patch('pathlib.Path.is_absolute', return_value=True):
response = self.client.post("/api/auth/setup", json=setup_data)
data = response.json()
assert data["redirect_url"] == "/login"
def test_setup_page_validation_for_required_fields(self):
"""Test that setup page validates required fields."""
# Test missing password
response = self.client.post("/api/auth/setup", json={"directory": "/test"})
assert response.status_code == 422 # Validation error
# Test missing directory
response = self.client.post("/api/auth/setup", json={"password": "test123"})
assert response.status_code == 422 # Validation error
# Test password too short
response = self.client.post("/api/auth/setup", json={
"password": "short",
"directory": "/test"
})
assert response.status_code == 422 # Validation error
def test_setup_page_handles_database_connection_errors_gracefully(self):
"""Test that setup page handles database connection errors gracefully."""
setup_data = {
"password": "test_password_123",
"directory": "/test/anime/directory"
}
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
patch.object(SetupService, 'mark_setup_complete', return_value=False), \
patch('pathlib.Path.mkdir'), \
patch('pathlib.Path.is_absolute', return_value=True):
response = self.client.post("/api/auth/setup", json=setup_data)
assert response.status_code == 200
data = response.json()
assert data["status"] == "error"
assert "Failed to save configuration" in data["message"]
def test_setup_completion_flag_properly_set(self):
"""Test that setup completion flag is properly set in configuration."""
service = SetupService("test_config.json", "test_db.db")
# Create mock config data
config_data = {"test": "data"}
with patch.object(service, 'get_config', return_value=config_data), \
patch.object(service, '_save_config', return_value=True) as mock_save:
result = service.mark_setup_complete()
assert result is True
# Verify save was called with setup completion data
mock_save.assert_called_once()
saved_config = mock_save.call_args[0][0]
assert saved_config["setup"]["completed"] is True
assert "completed_at" in saved_config["setup"]
class TestAuthenticationFlow:
"""Test cases for authentication flow."""
def setup_method(self):
"""Set up test environment before each test."""
self.client = TestClient(app, follow_redirects=False)
def test_auth_page_displayed_when_token_invalid(self):
"""Test that auth page is displayed when authentication token is invalid."""
with patch.object(SetupService, 'is_setup_complete', return_value=True):
# Request with invalid token
headers = {"Authorization": "Bearer invalid_token"}
response = self.client.get("/app", headers=headers)
# Should redirect to login due to invalid token
assert response.status_code == 302
assert response.headers["location"] == "/login"
def test_auth_page_displayed_when_token_missing(self):
"""Test that auth page is displayed when authentication token is missing."""
with patch.object(SetupService, 'is_setup_complete', return_value=True):
response = self.client.get("/app")
# Should redirect to login due to missing token
assert response.status_code == 302
assert response.headers["location"] == "/login"
def test_successful_login_creates_valid_token(self):
"""Test that successful login creates a valid authentication token."""
login_data = {"password": "test_password"}
with patch('src.server.fastapi_app.verify_master_password', return_value=True):
response = self.client.post("/auth/login", json=login_data)
assert response.status_code == 200
data = response.json()
assert data["success"] is True
assert "token" in data
assert data["token"] is not None
assert "expires_at" in data
def test_failed_login_shows_error_message(self):
"""Test that failed login shows appropriate error messages."""
login_data = {"password": "wrong_password"}
with patch('src.server.fastapi_app.verify_master_password', return_value=False):
response = self.client.post("/auth/login", json=login_data)
assert response.status_code == 401
data = response.json()
assert "Invalid master password" in data["detail"]
def test_auth_page_redirects_to_main_after_authentication(self):
"""Test that auth page redirects to main application after successful authentication."""
with patch.object(SetupService, 'is_setup_complete', return_value=True):
# Simulate authenticated request
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
mock_verify.return_value = {"user": "master", "exp": 9999999999}
response = self.client.get("/login", headers={"Authorization": "Bearer valid_token"})
assert response.status_code == 302
assert response.headers["location"] == "/app"
def test_token_validation_middleware_correctly_identifies_tokens(self):
"""Test that token validation middleware correctly identifies valid/invalid tokens."""
# Test valid token
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
mock_verify.return_value = {"user": "master", "exp": 9999999999}
response = self.client.get("/auth/verify", headers={"Authorization": "Bearer valid_token"})
assert response.status_code == 200
data = response.json()
assert data["valid"] is True
assert data["user"] == "master"
# Test invalid token
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
mock_verify.return_value = None
response = self.client.get("/auth/verify", headers={"Authorization": "Bearer invalid_token"})
assert response.status_code == 401
def test_token_refresh_functionality(self):
"""Test token refresh functionality."""
# This would test automatic token refresh if implemented
# For now, just test that tokens have expiration
login_data = {"password": "test_password"}
with patch('src.server.fastapi_app.verify_master_password', return_value=True):
response = self.client.post("/auth/login", json=login_data)
data = response.json()
assert "expires_at" in data
assert data["expires_at"] is not None
def test_session_expiration_handling(self):
"""Test session expiration handling."""
# Test with expired token
with patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
mock_verify.return_value = None # Simulates expired token
response = self.client.get("/auth/verify", headers={"Authorization": "Bearer expired_token"})
assert response.status_code == 401
class TestMainApplicationAccess:
"""Test cases for main application access."""
def setup_method(self):
"""Set up test environment before each test."""
self.client = TestClient(app, follow_redirects=False)
def test_index_served_when_authentication_valid(self):
"""Test that index.html is served when authentication is valid."""
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
mock_verify.return_value = {"user": "master", "exp": 9999999999}
response = self.client.get("/app", headers={"Authorization": "Bearer valid_token"})
assert response.status_code == 200
assert "text/html" in response.headers.get("content-type", "")
def test_unauthenticated_users_redirected_to_auth(self):
"""Test that unauthenticated users are redirected to auth page."""
with patch.object(SetupService, 'is_setup_complete', return_value=True):
response = self.client.get("/app")
assert response.status_code == 302
assert response.headers["location"] == "/login"
def test_users_without_setup_redirected_to_setup(self):
"""Test that users without completed setup are redirected to setup page."""
with patch.object(SetupService, 'is_setup_complete', return_value=False):
response = self.client.get("/app")
assert response.status_code == 302
assert response.headers["location"] == "/setup"
def test_middleware_enforces_correct_flow_priority(self):
"""Test that middleware enforces correct flow priority (setup → auth → main)."""
# Test setup takes priority over auth
with patch.object(SetupService, 'is_setup_complete', return_value=False):
response = self.client.get("/app", headers={"Authorization": "Bearer valid_token"})
assert response.status_code == 302
assert response.headers["location"] == "/setup"
# Test auth required when setup complete but not authenticated
with patch.object(SetupService, 'is_setup_complete', return_value=True):
response = self.client.get("/app")
assert response.status_code == 302
assert response.headers["location"] == "/login"
def test_authenticated_user_session_persistence(self):
"""Test authenticated user session persistence."""
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
mock_verify.return_value = {"user": "master", "exp": 9999999999}
# Multiple requests with same token should work
headers = {"Authorization": "Bearer valid_token"}
response1 = self.client.get("/app", headers=headers)
assert response1.status_code == 200
response2 = self.client.get("/app", headers=headers)
assert response2.status_code == 200
def test_graceful_token_expiration_during_session(self):
"""Test graceful handling of token expiration during active session."""
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
patch('src.server.fastapi_app.verify_jwt_token') as mock_verify:
# First request with valid token
mock_verify.return_value = {"user": "master", "exp": 9999999999}
response1 = self.client.get("/app", headers={"Authorization": "Bearer valid_token"})
assert response1.status_code == 200
# Second request with expired token
mock_verify.return_value = None
response2 = self.client.get("/app", headers={"Authorization": "Bearer expired_token"})
assert response2.status_code == 302
assert response2.headers["location"] == "/login"
class TestSetupStatusAPI:
"""Test cases for setup status API."""
def setup_method(self):
"""Set up test environment before each test."""
self.client = TestClient(app, follow_redirects=False)
def test_setup_status_api_returns_correct_status(self):
"""Test that setup status API returns correct status information."""
with patch.object(SetupService, 'is_setup_complete', return_value=True), \
patch.object(SetupService, 'get_setup_requirements') as mock_requirements, \
patch.object(SetupService, 'get_missing_requirements') as mock_missing:
mock_requirements.return_value = {
"config_file_exists": True,
"config_file_valid": True,
"database_exists": True,
"database_accessible": True,
"master_password_configured": True,
"setup_marked_complete": True
}
mock_missing.return_value = []
response = self.client.get("/api/auth/setup/status")
assert response.status_code == 200
data = response.json()
assert data["setup_complete"] is True
assert data["requirements"]["config_file_exists"] is True
assert len(data["missing_requirements"]) == 0
def test_setup_status_shows_missing_requirements(self):
"""Test that setup status shows missing requirements correctly."""
with patch.object(SetupService, 'is_setup_complete', return_value=False), \
patch.object(SetupService, 'get_setup_requirements') as mock_requirements, \
patch.object(SetupService, 'get_missing_requirements') as mock_missing:
mock_requirements.return_value = {
"config_file_exists": False,
"master_password_configured": False
}
mock_missing.return_value = [
"Configuration file is missing",
"Master password is not configured"
]
response = self.client.get("/api/auth/setup/status")
assert response.status_code == 200
data = response.json()
assert data["setup_complete"] is False
assert "Configuration file is missing" in data["missing_requirements"]
assert "Master password is not configured" in data["missing_requirements"]
if __name__ == "__main__":
pytest.main([__file__, "-v"])

View File

@@ -1,14 +0,0 @@
$loginResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/login" -Method POST -ContentType "application/json" -Body '{"password": "admin123"}'
$loginData = $loginResponse.Content | ConvertFrom-Json
$token = $loginData.token
Write-Host "Token: $token"
# Test the anime search with authentication
$headers = @{
"Authorization" = "Bearer $token"
"Content-Type" = "application/json"
}
$searchResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/anime/search?query=naruto" -Headers $headers
Write-Host "Search Response:"
Write-Host $searchResponse.Content

View File

@@ -1,35 +0,0 @@
# Test complete authentication flow
# Step 1: Login
Write-Host "=== Testing Login ==="
$loginResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/login" -Method POST -ContentType "application/json" -Body '{"password": "admin123"}'
$loginData = $loginResponse.Content | ConvertFrom-Json
$token = $loginData.token
Write-Host "Login successful. Token received: $($token.Substring(0,20))..."
# Step 2: Verify token
Write-Host "`n=== Testing Token Verification ==="
$headers = @{ "Authorization" = "Bearer $token" }
$verifyResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/verify" -Headers $headers
Write-Host "Token verification response: $($verifyResponse.Content)"
# Step 3: Test protected endpoint
Write-Host "`n=== Testing Protected Endpoint ==="
$authStatusResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/auth/status" -Headers $headers
Write-Host "Auth status response: $($authStatusResponse.Content)"
# Step 4: Logout
Write-Host "`n=== Testing Logout ==="
$logoutResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/logout" -Method POST -Headers $headers
Write-Host "Logout response: $($logoutResponse.Content)"
# Step 5: Test expired/invalid token
Write-Host "`n=== Testing Invalid Token ==="
try {
$invalidResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/verify" -Headers @{ "Authorization" = "Bearer invalid_token" }
Write-Host "Invalid token response: $($invalidResponse.Content)"
} catch {
Write-Host "Invalid token correctly rejected: $($_.Exception.Message)"
}
Write-Host "`n=== Authentication Flow Test Complete ==="

View File

@@ -1,17 +0,0 @@
# Test database connectivity
# Get token
$loginResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/auth/login" -Method POST -ContentType "application/json" -Body '{"password": "admin123"}'
$loginData = $loginResponse.Content | ConvertFrom-Json
$token = $loginData.token
# Test database health
$headers = @{ "Authorization" = "Bearer $token" }
$dbHealthResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/system/database/health" -Headers $headers
Write-Host "Database Health Response:"
Write-Host $dbHealthResponse.Content
# Test system config
$configResponse = Invoke-WebRequest -Uri "http://127.0.0.1:8000/api/system/config" -Headers $headers
Write-Host "`nSystem Config Response:"
Write-Host $configResponse.Content

View File

@@ -1,15 +0,0 @@
import os
import sys
# Add parent directory to path
sys.path.insert(0, os.path.abspath('.'))
try:
from src.server.fastapi_app import app
print("✓ FastAPI app imported successfully")
except Exception as e:
print(f"✗ Error importing FastAPI app: {e}")
import traceback
traceback.print_exc()
print("Test completed.")

View File

@@ -1,22 +0,0 @@
#!/usr/bin/env python3
try:
from src.server.web.middleware.fastapi_auth_middleware import AuthMiddleware
print("Auth middleware imported successfully")
except Exception as e:
print(f"Error importing auth middleware: {e}")
try:
from src.server.web.middleware.fastapi_logging_middleware import (
EnhancedLoggingMiddleware,
)
print("Logging middleware imported successfully")
except Exception as e:
print(f"Error importing logging middleware: {e}")
try:
from src.server.web.middleware.fastapi_validation_middleware import (
ValidationMiddleware,
)
print("Validation middleware imported successfully")
except Exception as e:
print(f"Error importing validation middleware: {e}")

Some files were not shown because too many files have changed in this diff Show More