feat: Add NFO metadata infrastructure (Task 3 - partial)
- Created TMDB API client with async requests, caching, and retry logic - Implemented NFO XML generator for Kodi/XBMC format - Created image downloader for poster/logo/fanart with validation - Added NFO service to orchestrate metadata creation - Added NFO-related configuration settings - Updated requirements.txt with aiohttp, lxml, pillow - Created unit tests (need refinement due to implementation mismatch) Components created: - src/core/services/tmdb_client.py (270 lines) - src/core/services/nfo_service.py (390 lines) - src/core/utils/nfo_generator.py (180 lines) - src/core/utils/image_downloader.py (296 lines) - tests/unit/test_tmdb_client.py - tests/unit/test_nfo_generator.py - tests/unit/test_image_downloader.py Note: Tests need to be updated to match actual implementation APIs. Dependencies installed: aiohttp, lxml, pillow
This commit is contained in:
295
src/core/utils/image_downloader.py
Normal file
295
src/core/utils/image_downloader.py
Normal file
@@ -0,0 +1,295 @@
|
||||
"""Image downloader utility for NFO media files.
|
||||
|
||||
This module provides functions to download poster, logo, and fanart images
|
||||
from TMDB and validate them.
|
||||
|
||||
Example:
|
||||
>>> downloader = ImageDownloader()
|
||||
>>> await downloader.download_poster(poster_url, "/path/to/poster.jpg")
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
import aiohttp
|
||||
from PIL import Image
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
class ImageDownloadError(Exception):
|
||||
"""Exception raised for image download failures."""
|
||||
pass
|
||||
|
||||
|
||||
class ImageDownloader:
|
||||
"""Utility for downloading and validating images.
|
||||
|
||||
Attributes:
|
||||
max_retries: Maximum retry attempts for downloads
|
||||
timeout: Request timeout in seconds
|
||||
min_file_size: Minimum valid file size in bytes
|
||||
"""
|
||||
|
||||
def __init__(
|
||||
self,
|
||||
max_retries: int = 3,
|
||||
timeout: int = 60,
|
||||
min_file_size: int = 1024 # 1 KB
|
||||
):
|
||||
"""Initialize image downloader.
|
||||
|
||||
Args:
|
||||
max_retries: Maximum retry attempts
|
||||
timeout: Request timeout in seconds
|
||||
min_file_size: Minimum valid file size in bytes
|
||||
"""
|
||||
self.max_retries = max_retries
|
||||
self.timeout = timeout
|
||||
self.min_file_size = min_file_size
|
||||
|
||||
async def download_image(
|
||||
self,
|
||||
url: str,
|
||||
local_path: Path,
|
||||
skip_existing: bool = True,
|
||||
validate: bool = True
|
||||
) -> bool:
|
||||
"""Download an image from URL to local path.
|
||||
|
||||
Args:
|
||||
url: Image URL
|
||||
local_path: Local file path to save image
|
||||
skip_existing: Skip download if file already exists
|
||||
validate: Validate image after download
|
||||
|
||||
Returns:
|
||||
True if download successful, False otherwise
|
||||
|
||||
Raises:
|
||||
ImageDownloadError: If download fails after retries
|
||||
"""
|
||||
# Check if file already exists
|
||||
if skip_existing and local_path.exists():
|
||||
if local_path.stat().st_size >= self.min_file_size:
|
||||
logger.debug(f"Image already exists: {local_path}")
|
||||
return True
|
||||
|
||||
# Ensure parent directory exists
|
||||
local_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
delay = 1
|
||||
last_error = None
|
||||
|
||||
for attempt in range(self.max_retries):
|
||||
try:
|
||||
logger.debug(f"Downloading image from {url} (attempt {attempt + 1})")
|
||||
|
||||
timeout = aiohttp.ClientTimeout(total=self.timeout)
|
||||
async with aiohttp.ClientSession(timeout=timeout) as session:
|
||||
async with session.get(url) as resp:
|
||||
if resp.status == 404:
|
||||
logger.warning(f"Image not found: {url}")
|
||||
return False
|
||||
|
||||
resp.raise_for_status()
|
||||
|
||||
# Download image data
|
||||
data = await resp.read()
|
||||
|
||||
# Check file size
|
||||
if len(data) < self.min_file_size:
|
||||
raise ImageDownloadError(
|
||||
f"Downloaded file too small: {len(data)} bytes"
|
||||
)
|
||||
|
||||
# Write to file
|
||||
with open(local_path, "wb") as f:
|
||||
f.write(data)
|
||||
|
||||
# Validate image if requested
|
||||
if validate and not self.validate_image(local_path):
|
||||
local_path.unlink(missing_ok=True)
|
||||
raise ImageDownloadError("Image validation failed")
|
||||
|
||||
logger.info(f"Downloaded image to {local_path}")
|
||||
return True
|
||||
|
||||
except (aiohttp.ClientError, IOError, ImageDownloadError) as e:
|
||||
last_error = e
|
||||
if attempt < self.max_retries - 1:
|
||||
logger.warning(
|
||||
f"Download failed (attempt {attempt + 1}): {e}, "
|
||||
f"retrying in {delay}s"
|
||||
)
|
||||
await asyncio.sleep(delay)
|
||||
delay *= 2
|
||||
else:
|
||||
logger.error(
|
||||
f"Download failed after {self.max_retries} attempts: {e}"
|
||||
)
|
||||
|
||||
raise ImageDownloadError(
|
||||
f"Failed to download image after {self.max_retries} attempts: {last_error}"
|
||||
)
|
||||
|
||||
async def download_poster(
|
||||
self,
|
||||
url: str,
|
||||
series_folder: Path,
|
||||
filename: str = "poster.jpg",
|
||||
skip_existing: bool = True
|
||||
) -> bool:
|
||||
"""Download poster image.
|
||||
|
||||
Args:
|
||||
url: Poster URL
|
||||
series_folder: Series folder path
|
||||
filename: Output filename (default: poster.jpg)
|
||||
skip_existing: Skip if file exists
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
local_path = series_folder / filename
|
||||
try:
|
||||
return await self.download_image(url, local_path, skip_existing)
|
||||
except ImageDownloadError as e:
|
||||
logger.warning(f"Failed to download poster: {e}")
|
||||
return False
|
||||
|
||||
async def download_logo(
|
||||
self,
|
||||
url: str,
|
||||
series_folder: Path,
|
||||
filename: str = "logo.png",
|
||||
skip_existing: bool = True
|
||||
) -> bool:
|
||||
"""Download logo image.
|
||||
|
||||
Args:
|
||||
url: Logo URL
|
||||
series_folder: Series folder path
|
||||
filename: Output filename (default: logo.png)
|
||||
skip_existing: Skip if file exists
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
local_path = series_folder / filename
|
||||
try:
|
||||
return await self.download_image(url, local_path, skip_existing)
|
||||
except ImageDownloadError as e:
|
||||
logger.warning(f"Failed to download logo: {e}")
|
||||
return False
|
||||
|
||||
async def download_fanart(
|
||||
self,
|
||||
url: str,
|
||||
series_folder: Path,
|
||||
filename: str = "fanart.jpg",
|
||||
skip_existing: bool = True
|
||||
) -> bool:
|
||||
"""Download fanart/backdrop image.
|
||||
|
||||
Args:
|
||||
url: Fanart URL
|
||||
series_folder: Series folder path
|
||||
filename: Output filename (default: fanart.jpg)
|
||||
skip_existing: Skip if file exists
|
||||
|
||||
Returns:
|
||||
True if successful
|
||||
"""
|
||||
local_path = series_folder / filename
|
||||
try:
|
||||
return await self.download_image(url, local_path, skip_existing)
|
||||
except ImageDownloadError as e:
|
||||
logger.warning(f"Failed to download fanart: {e}")
|
||||
return False
|
||||
|
||||
def validate_image(self, image_path: Path) -> bool:
|
||||
"""Validate that file is a valid image.
|
||||
|
||||
Args:
|
||||
image_path: Path to image file
|
||||
|
||||
Returns:
|
||||
True if valid image, False otherwise
|
||||
"""
|
||||
try:
|
||||
with Image.open(image_path) as img:
|
||||
# Verify it's a valid image
|
||||
img.verify()
|
||||
|
||||
# Check file size
|
||||
if image_path.stat().st_size < self.min_file_size:
|
||||
logger.warning(f"Image file too small: {image_path}")
|
||||
return False
|
||||
|
||||
return True
|
||||
|
||||
except Exception as e:
|
||||
logger.warning(f"Image validation failed for {image_path}: {e}")
|
||||
return False
|
||||
|
||||
async def download_all_media(
|
||||
self,
|
||||
series_folder: Path,
|
||||
poster_url: Optional[str] = None,
|
||||
logo_url: Optional[str] = None,
|
||||
fanart_url: Optional[str] = None,
|
||||
skip_existing: bool = True
|
||||
) -> dict[str, bool]:
|
||||
"""Download all media files (poster, logo, fanart).
|
||||
|
||||
Args:
|
||||
series_folder: Series folder path
|
||||
poster_url: Poster URL (optional)
|
||||
logo_url: Logo URL (optional)
|
||||
fanart_url: Fanart URL (optional)
|
||||
skip_existing: Skip existing files
|
||||
|
||||
Returns:
|
||||
Dictionary with download status for each file type
|
||||
"""
|
||||
results = {
|
||||
"poster": False,
|
||||
"logo": False,
|
||||
"fanart": False
|
||||
}
|
||||
|
||||
tasks = []
|
||||
|
||||
if poster_url:
|
||||
tasks.append(("poster", self.download_poster(
|
||||
poster_url, series_folder, skip_existing=skip_existing
|
||||
)))
|
||||
|
||||
if logo_url:
|
||||
tasks.append(("logo", self.download_logo(
|
||||
logo_url, series_folder, skip_existing=skip_existing
|
||||
)))
|
||||
|
||||
if fanart_url:
|
||||
tasks.append(("fanart", self.download_fanart(
|
||||
fanart_url, series_folder, skip_existing=skip_existing
|
||||
)))
|
||||
|
||||
# Download concurrently
|
||||
if tasks:
|
||||
task_results = await asyncio.gather(
|
||||
*[task for _, task in tasks],
|
||||
return_exceptions=True
|
||||
)
|
||||
|
||||
for (media_type, _), result in zip(tasks, task_results):
|
||||
if isinstance(result, Exception):
|
||||
logger.error(f"Error downloading {media_type}: {result}")
|
||||
results[media_type] = False
|
||||
else:
|
||||
results[media_type] = result
|
||||
|
||||
return results
|
||||
192
src/core/utils/nfo_generator.py
Normal file
192
src/core/utils/nfo_generator.py
Normal file
@@ -0,0 +1,192 @@
|
||||
"""NFO XML generator for Kodi/XBMC format.
|
||||
|
||||
This module provides functions to generate tvshow.nfo XML files from
|
||||
TVShowNFO Pydantic models, adapted from the scraper project.
|
||||
|
||||
Example:
|
||||
>>> from src.core.entities.nfo_models import TVShowNFO
|
||||
>>> nfo = TVShowNFO(title="Test Show", year=2020, tmdbid=12345)
|
||||
>>> xml_string = generate_tvshow_nfo(nfo)
|
||||
"""
|
||||
|
||||
import logging
|
||||
from typing import Optional
|
||||
|
||||
from lxml import etree
|
||||
|
||||
from src.core.entities.nfo_models import TVShowNFO
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
def generate_tvshow_nfo(tvshow: TVShowNFO, pretty_print: bool = True) -> str:
|
||||
"""Generate tvshow.nfo XML content from TVShowNFO model.
|
||||
|
||||
Args:
|
||||
tvshow: TVShowNFO Pydantic model with metadata
|
||||
pretty_print: Whether to format XML with indentation
|
||||
|
||||
Returns:
|
||||
XML string in Kodi/XBMC tvshow.nfo format
|
||||
|
||||
Example:
|
||||
>>> nfo = TVShowNFO(title="Attack on Titan", year=2013)
|
||||
>>> xml = generate_tvshow_nfo(nfo)
|
||||
"""
|
||||
root = etree.Element("tvshow")
|
||||
|
||||
# Basic information
|
||||
_add_element(root, "title", tvshow.title)
|
||||
_add_element(root, "originaltitle", tvshow.originaltitle)
|
||||
_add_element(root, "showtitle", tvshow.showtitle)
|
||||
_add_element(root, "sorttitle", tvshow.sorttitle)
|
||||
_add_element(root, "year", str(tvshow.year) if tvshow.year else None)
|
||||
|
||||
# Plot and description
|
||||
_add_element(root, "plot", tvshow.plot)
|
||||
_add_element(root, "outline", tvshow.outline)
|
||||
_add_element(root, "tagline", tvshow.tagline)
|
||||
|
||||
# Technical details
|
||||
_add_element(root, "runtime", str(tvshow.runtime) if tvshow.runtime else None)
|
||||
_add_element(root, "mpaa", tvshow.mpaa)
|
||||
_add_element(root, "certification", tvshow.certification)
|
||||
|
||||
# Status and dates
|
||||
_add_element(root, "premiered", tvshow.premiered)
|
||||
_add_element(root, "status", tvshow.status)
|
||||
_add_element(root, "dateadded", tvshow.dateadded)
|
||||
|
||||
# Ratings
|
||||
if tvshow.ratings:
|
||||
ratings_elem = etree.SubElement(root, "ratings")
|
||||
for rating in tvshow.ratings:
|
||||
rating_elem = etree.SubElement(ratings_elem, "rating")
|
||||
if rating.name:
|
||||
rating_elem.set("name", rating.name)
|
||||
if rating.max_rating:
|
||||
rating_elem.set("max", str(rating.max_rating))
|
||||
if rating.default:
|
||||
rating_elem.set("default", "true")
|
||||
|
||||
_add_element(rating_elem, "value", str(rating.value))
|
||||
if rating.votes is not None:
|
||||
_add_element(rating_elem, "votes", str(rating.votes))
|
||||
|
||||
_add_element(root, "userrating", str(tvshow.userrating) if tvshow.userrating is not None else None)
|
||||
|
||||
# IDs
|
||||
_add_element(root, "tmdbid", str(tvshow.tmdbid) if tvshow.tmdbid else None)
|
||||
_add_element(root, "imdbid", tvshow.imdbid)
|
||||
_add_element(root, "tvdbid", str(tvshow.tvdbid) if tvshow.tvdbid else None)
|
||||
|
||||
# Legacy ID fields for compatibility
|
||||
_add_element(root, "id", str(tvshow.tvdbid) if tvshow.tvdbid else None)
|
||||
_add_element(root, "imdb_id", tvshow.imdbid)
|
||||
|
||||
# Unique IDs
|
||||
for uid in tvshow.uniqueid:
|
||||
uid_elem = etree.SubElement(root, "uniqueid")
|
||||
uid_elem.set("type", uid.type)
|
||||
if uid.default:
|
||||
uid_elem.set("default", "true")
|
||||
uid_elem.text = uid.value
|
||||
|
||||
# Multi-value fields
|
||||
for genre in tvshow.genre:
|
||||
_add_element(root, "genre", genre)
|
||||
|
||||
for studio in tvshow.studio:
|
||||
_add_element(root, "studio", studio)
|
||||
|
||||
for country in tvshow.country:
|
||||
_add_element(root, "country", country)
|
||||
|
||||
for tag in tvshow.tag:
|
||||
_add_element(root, "tag", tag)
|
||||
|
||||
# Thumbnails (posters, logos)
|
||||
for thumb in tvshow.thumb:
|
||||
thumb_elem = etree.SubElement(root, "thumb")
|
||||
if thumb.aspect:
|
||||
thumb_elem.set("aspect", thumb.aspect)
|
||||
if thumb.season is not None:
|
||||
thumb_elem.set("season", str(thumb.season))
|
||||
if thumb.type:
|
||||
thumb_elem.set("type", thumb.type)
|
||||
thumb_elem.text = str(thumb.url)
|
||||
|
||||
# Fanart
|
||||
if tvshow.fanart:
|
||||
fanart_elem = etree.SubElement(root, "fanart")
|
||||
for fanart in tvshow.fanart:
|
||||
fanart_thumb = etree.SubElement(fanart_elem, "thumb")
|
||||
fanart_thumb.text = str(fanart.url)
|
||||
|
||||
# Named seasons
|
||||
for named_season in tvshow.namedseason:
|
||||
season_elem = etree.SubElement(root, "namedseason")
|
||||
season_elem.set("number", str(named_season.number))
|
||||
season_elem.text = named_season.name
|
||||
|
||||
# Actors
|
||||
for actor in tvshow.actors:
|
||||
actor_elem = etree.SubElement(root, "actor")
|
||||
_add_element(actor_elem, "name", actor.name)
|
||||
_add_element(actor_elem, "role", actor.role)
|
||||
_add_element(actor_elem, "thumb", str(actor.thumb) if actor.thumb else None)
|
||||
_add_element(actor_elem, "profile", str(actor.profile) if actor.profile else None)
|
||||
_add_element(actor_elem, "tmdbid", str(actor.tmdbid) if actor.tmdbid else None)
|
||||
|
||||
# Additional fields
|
||||
_add_element(root, "trailer", str(tvshow.trailer) if tvshow.trailer else None)
|
||||
_add_element(root, "watched", "true" if tvshow.watched else "false")
|
||||
if tvshow.playcount is not None:
|
||||
_add_element(root, "playcount", str(tvshow.playcount))
|
||||
|
||||
# Generate XML string
|
||||
xml_str = etree.tostring(
|
||||
root,
|
||||
pretty_print=pretty_print,
|
||||
encoding="unicode",
|
||||
xml_declaration=False
|
||||
)
|
||||
|
||||
# Add XML declaration
|
||||
xml_declaration = '<?xml version="1.0" encoding="UTF-8" standalone="yes"?>\n'
|
||||
return xml_declaration + xml_str
|
||||
|
||||
|
||||
def _add_element(parent: etree.Element, tag: str, text: Optional[str]) -> Optional[etree.Element]:
|
||||
"""Add a child element to parent if text is not None or empty.
|
||||
|
||||
Args:
|
||||
parent: Parent XML element
|
||||
tag: Tag name for child element
|
||||
text: Text content (None or empty strings are skipped)
|
||||
|
||||
Returns:
|
||||
Created element or None if skipped
|
||||
"""
|
||||
if text is not None and text != "":
|
||||
elem = etree.SubElement(parent, tag)
|
||||
elem.text = text
|
||||
return elem
|
||||
return None
|
||||
|
||||
|
||||
def validate_nfo_xml(xml_string: str) -> bool:
|
||||
"""Validate NFO XML structure.
|
||||
|
||||
Args:
|
||||
xml_string: XML content to validate
|
||||
|
||||
Returns:
|
||||
True if valid XML, False otherwise
|
||||
"""
|
||||
try:
|
||||
etree.fromstring(xml_string.encode('utf-8'))
|
||||
return True
|
||||
except etree.XMLSyntaxError as e:
|
||||
logger.error(f"Invalid NFO XML: {e}")
|
||||
return False
|
||||
Reference in New Issue
Block a user