Fix architecture issues from todolist
- Add documentation warnings for in-memory rate limiting and failed login attempts - Consolidate duplicate health endpoints into api/health.py - Fix CLI to use correct async rescan method names - Update download.py and anime.py to use custom exception classes - Add WebSocket room validation and rate limiting
This commit is contained in:
parent
54790a7ebb
commit
27108aacda
@ -1,5 +1,6 @@
|
|||||||
"""Command-line interface for the Aniworld anime download manager."""
|
"""Command-line interface for the Aniworld anime download manager."""
|
||||||
|
|
||||||
|
import asyncio
|
||||||
import logging
|
import logging
|
||||||
import os
|
import os
|
||||||
from typing import Optional, Sequence
|
from typing import Optional, Sequence
|
||||||
@ -179,8 +180,11 @@ class SeriesCLI:
|
|||||||
# Rescan logic
|
# Rescan logic
|
||||||
# ------------------------------------------------------------------
|
# ------------------------------------------------------------------
|
||||||
def rescan(self) -> None:
|
def rescan(self) -> None:
|
||||||
"""Trigger a rescan of the anime directory using the core app."""
|
"""Trigger a rescan of the anime directory using the core app.
|
||||||
total_to_scan = self.series_app.SerieScanner.get_total_to_scan()
|
|
||||||
|
Uses the legacy file-based scan mode for CLI compatibility.
|
||||||
|
"""
|
||||||
|
total_to_scan = self.series_app.serie_scanner.get_total_to_scan()
|
||||||
total_to_scan = max(total_to_scan, 1)
|
total_to_scan = max(total_to_scan, 1)
|
||||||
|
|
||||||
self._progress = Progress()
|
self._progress = Progress()
|
||||||
@ -190,17 +194,16 @@ class SeriesCLI:
|
|||||||
total=total_to_scan,
|
total=total_to_scan,
|
||||||
)
|
)
|
||||||
|
|
||||||
result = self.series_app.ReScan(
|
# Run async rescan in sync context with file-based mode
|
||||||
callback=self._wrap_scan_callback(total_to_scan)
|
asyncio.run(
|
||||||
|
self.series_app.rescan(use_database=False)
|
||||||
)
|
)
|
||||||
|
|
||||||
self._progress = None
|
self._progress = None
|
||||||
self._scan_task_id = None
|
self._scan_task_id = None
|
||||||
|
|
||||||
if result.success:
|
series_count = len(self.series_app.series_list)
|
||||||
print(result.message)
|
print(f"Scan completed. Found {series_count} series with missing episodes.")
|
||||||
else:
|
|
||||||
print(f"Scan failed: {result.message}")
|
|
||||||
|
|
||||||
def _wrap_scan_callback(self, total: int):
|
def _wrap_scan_callback(self, total: int):
|
||||||
"""Create a callback that updates the scan progress bar."""
|
"""Create a callback that updates the scan progress bar."""
|
||||||
|
|||||||
@ -2,12 +2,18 @@ import logging
|
|||||||
import warnings
|
import warnings
|
||||||
from typing import Any, List, Optional
|
from typing import Any, List, Optional
|
||||||
|
|
||||||
from fastapi import APIRouter, Depends, HTTPException, status
|
from fastapi import APIRouter, Depends, status
|
||||||
from pydantic import BaseModel, Field
|
from pydantic import BaseModel, Field
|
||||||
from sqlalchemy.ext.asyncio import AsyncSession
|
from sqlalchemy.ext.asyncio import AsyncSession
|
||||||
|
|
||||||
from src.core.entities.series import Serie
|
from src.core.entities.series import Serie
|
||||||
from src.server.database.service import AnimeSeriesService
|
from src.server.database.service import AnimeSeriesService
|
||||||
|
from src.server.exceptions import (
|
||||||
|
BadRequestError,
|
||||||
|
NotFoundError,
|
||||||
|
ServerError,
|
||||||
|
ValidationError,
|
||||||
|
)
|
||||||
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
from src.server.services.anime_service import AnimeService, AnimeServiceError
|
||||||
from src.server.utils.dependencies import (
|
from src.server.utils.dependencies import (
|
||||||
get_anime_service,
|
get_anime_service,
|
||||||
@ -55,9 +61,8 @@ async def get_anime_status(
|
|||||||
"series_count": series_count
|
"series_count": series_count
|
||||||
}
|
}
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to get status: {str(exc)}"
|
||||||
detail=f"Failed to get status: {str(exc)}",
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
@ -208,35 +213,30 @@ async def list_anime(
|
|||||||
try:
|
try:
|
||||||
page_num = int(page)
|
page_num = int(page)
|
||||||
if page_num < 1:
|
if page_num < 1:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Page number must be positive"
|
||||||
detail="Page number must be positive"
|
|
||||||
)
|
)
|
||||||
page = page_num
|
page = page_num
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Page must be a valid number"
|
||||||
detail="Page must be a valid number"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
if per_page is not None:
|
if per_page is not None:
|
||||||
try:
|
try:
|
||||||
per_page_num = int(per_page)
|
per_page_num = int(per_page)
|
||||||
if per_page_num < 1:
|
if per_page_num < 1:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Per page must be positive"
|
||||||
detail="Per page must be positive"
|
|
||||||
)
|
)
|
||||||
if per_page_num > 1000:
|
if per_page_num > 1000:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Per page cannot exceed 1000"
|
||||||
detail="Per page cannot exceed 1000"
|
|
||||||
)
|
)
|
||||||
per_page = per_page_num
|
per_page = per_page_num
|
||||||
except (ValueError, TypeError):
|
except (ValueError, TypeError):
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Per page must be a valid number"
|
||||||
detail="Per page must be a valid number"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validate sort_by parameter to prevent ORM injection
|
# Validate sort_by parameter to prevent ORM injection
|
||||||
@ -245,9 +245,8 @@ async def list_anime(
|
|||||||
allowed_sort_fields = ["title", "id", "missing_episodes", "name"]
|
allowed_sort_fields = ["title", "id", "missing_episodes", "name"]
|
||||||
if sort_by not in allowed_sort_fields:
|
if sort_by not in allowed_sort_fields:
|
||||||
allowed = ", ".join(allowed_sort_fields)
|
allowed = ", ".join(allowed_sort_fields)
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message=f"Invalid sort_by parameter. Allowed: {allowed}"
|
||||||
detail=f"Invalid sort_by parameter. Allowed: {allowed}"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Validate filter parameter
|
# Validate filter parameter
|
||||||
@ -260,9 +259,8 @@ async def list_anime(
|
|||||||
lower_filter = filter.lower()
|
lower_filter = filter.lower()
|
||||||
for pattern in dangerous_patterns:
|
for pattern in dangerous_patterns:
|
||||||
if pattern in lower_filter:
|
if pattern in lower_filter:
|
||||||
raise HTTPException(
|
raise ValidationError(
|
||||||
status_code=status.HTTP_422_UNPROCESSABLE_ENTITY,
|
message="Invalid filter parameter"
|
||||||
detail="Invalid filter parameter"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
@ -310,12 +308,11 @@ async def list_anime(
|
|||||||
)
|
)
|
||||||
|
|
||||||
return summaries
|
return summaries
|
||||||
except HTTPException:
|
except (ValidationError, BadRequestError, NotFoundError, ServerError):
|
||||||
raise
|
raise
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message="Failed to retrieve anime list"
|
||||||
detail="Failed to retrieve anime list",
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
@ -346,14 +343,12 @@ async def trigger_rescan(
|
|||||||
"message": "Rescan started successfully",
|
"message": "Rescan started successfully",
|
||||||
}
|
}
|
||||||
except AnimeServiceError as e:
|
except AnimeServiceError as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Rescan failed: {str(e)}"
|
||||||
detail=f"Rescan failed: {str(e)}",
|
|
||||||
) from e
|
) from e
|
||||||
except Exception as exc:
|
except Exception as exc:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message="Failed to start rescan"
|
||||||
detail="Failed to start rescan",
|
|
||||||
) from exc
|
) from exc
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -4,9 +4,10 @@ This module provides REST API endpoints for managing the anime download queue,
|
|||||||
including adding episodes, removing items, controlling queue processing, and
|
including adding episodes, removing items, controlling queue processing, and
|
||||||
retrieving queue status and statistics.
|
retrieving queue status and statistics.
|
||||||
"""
|
"""
|
||||||
from fastapi import APIRouter, Depends, HTTPException, Path, status
|
from fastapi import APIRouter, Depends, Path, status
|
||||||
from fastapi.responses import JSONResponse
|
from fastapi.responses import JSONResponse
|
||||||
|
|
||||||
|
from src.server.exceptions import BadRequestError, NotFoundError, ServerError
|
||||||
from src.server.models.download import (
|
from src.server.models.download import (
|
||||||
DownloadRequest,
|
DownloadRequest,
|
||||||
QueueOperationRequest,
|
QueueOperationRequest,
|
||||||
@ -52,9 +53,8 @@ async def get_queue_status(
|
|||||||
return response
|
return response
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to retrieve queue status: {str(e)}"
|
||||||
detail=f"Failed to retrieve queue status: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -91,9 +91,8 @@ async def add_to_queue(
|
|||||||
try:
|
try:
|
||||||
# Validate request
|
# Validate request
|
||||||
if not request.episodes:
|
if not request.episodes:
|
||||||
raise HTTPException(
|
raise BadRequestError(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
message="At least one episode must be specified"
|
||||||
detail="At least one episode must be specified",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
# Add to queue
|
# Add to queue
|
||||||
@ -122,16 +121,12 @@ async def add_to_queue(
|
|||||||
)
|
)
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to add episodes to queue: {str(e)}"
|
||||||
detail=f"Failed to add episodes to queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -163,9 +158,8 @@ async def clear_completed(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to clear completed items: {str(e)}"
|
||||||
detail=f"Failed to clear completed items: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -197,9 +191,8 @@ async def clear_failed(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to clear failed items: {str(e)}"
|
||||||
detail=f"Failed to clear failed items: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -231,9 +224,8 @@ async def clear_pending(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to clear pending items: {str(e)}"
|
||||||
detail=f"Failed to clear pending items: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -262,22 +254,19 @@ async def remove_from_queue(
|
|||||||
removed_ids = await download_service.remove_from_queue([item_id])
|
removed_ids = await download_service.remove_from_queue([item_id])
|
||||||
|
|
||||||
if not removed_ids:
|
if not removed_ids:
|
||||||
raise HTTPException(
|
raise NotFoundError(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
message=f"Download item {item_id} not found in queue",
|
||||||
detail=f"Download item {item_id} not found in queue",
|
resource_type="download_item",
|
||||||
|
resource_id=item_id
|
||||||
)
|
)
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to remove item from queue: {str(e)}"
|
||||||
detail=f"Failed to remove item from queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -307,22 +296,18 @@ async def remove_multiple_from_queue(
|
|||||||
)
|
)
|
||||||
|
|
||||||
if not removed_ids:
|
if not removed_ids:
|
||||||
raise HTTPException(
|
raise NotFoundError(
|
||||||
status_code=status.HTTP_404_NOT_FOUND,
|
message="No matching items found in queue",
|
||||||
detail="No matching items found in queue",
|
resource_type="download_items"
|
||||||
)
|
)
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to remove items from queue: {str(e)}"
|
||||||
detail=f"Failed to remove items from queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -354,9 +339,8 @@ async def start_queue(
|
|||||||
result = await download_service.start_queue_processing()
|
result = await download_service.start_queue_processing()
|
||||||
|
|
||||||
if result is None:
|
if result is None:
|
||||||
raise HTTPException(
|
raise BadRequestError(
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
message="No pending downloads in queue"
|
||||||
detail="No pending downloads in queue",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
return {
|
return {
|
||||||
@ -365,16 +349,12 @@ async def start_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except DownloadServiceError as e:
|
except DownloadServiceError as e:
|
||||||
raise HTTPException(
|
raise BadRequestError(message=str(e))
|
||||||
status_code=status.HTTP_400_BAD_REQUEST,
|
except (BadRequestError, NotFoundError, ServerError):
|
||||||
detail=str(e),
|
|
||||||
)
|
|
||||||
except HTTPException:
|
|
||||||
raise
|
raise
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to start queue processing: {str(e)}"
|
||||||
detail=f"Failed to start queue processing: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -408,9 +388,8 @@ async def stop_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to stop queue processing: {str(e)}"
|
||||||
detail=f"Failed to stop queue processing: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -442,9 +421,8 @@ async def pause_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to pause queue processing: {str(e)}"
|
||||||
detail=f"Failed to pause queue processing: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -480,9 +458,8 @@ async def reorder_queue(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to reorder queue: {str(e)}"
|
||||||
detail=f"Failed to reorder queue: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
@ -522,7 +499,6 @@ async def retry_failed(
|
|||||||
}
|
}
|
||||||
|
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise HTTPException(
|
raise ServerError(
|
||||||
status_code=status.HTTP_500_INTERNAL_SERVER_ERROR,
|
message=f"Failed to retry downloads: {str(e)}"
|
||||||
detail=f"Failed to retry downloads: {str(e)}",
|
|
||||||
)
|
)
|
||||||
|
|||||||
@ -23,6 +23,9 @@ class HealthStatus(BaseModel):
|
|||||||
status: str
|
status: str
|
||||||
timestamp: str
|
timestamp: str
|
||||||
version: str = "1.0.0"
|
version: str = "1.0.0"
|
||||||
|
service: str = "aniworld-api"
|
||||||
|
series_app_initialized: bool = False
|
||||||
|
anime_directory_configured: bool = False
|
||||||
|
|
||||||
|
|
||||||
class DatabaseHealth(BaseModel):
|
class DatabaseHealth(BaseModel):
|
||||||
@ -170,14 +173,24 @@ def get_system_metrics() -> SystemMetrics:
|
|||||||
@router.get("", response_model=HealthStatus)
|
@router.get("", response_model=HealthStatus)
|
||||||
async def basic_health_check() -> HealthStatus:
|
async def basic_health_check() -> HealthStatus:
|
||||||
"""Basic health check endpoint.
|
"""Basic health check endpoint.
|
||||||
|
|
||||||
|
This endpoint does not depend on anime_directory configuration
|
||||||
|
and should always return 200 OK for basic health monitoring.
|
||||||
|
Includes service information for identification.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
HealthStatus: Simple health status with timestamp.
|
HealthStatus: Simple health status with timestamp and service info.
|
||||||
"""
|
"""
|
||||||
|
from src.config.settings import settings
|
||||||
|
from src.server.utils.dependencies import _series_app
|
||||||
|
|
||||||
logger.debug("Basic health check requested")
|
logger.debug("Basic health check requested")
|
||||||
return HealthStatus(
|
return HealthStatus(
|
||||||
status="healthy",
|
status="healthy",
|
||||||
timestamp=datetime.now().isoformat(),
|
timestamp=datetime.now().isoformat(),
|
||||||
|
service="aniworld-api",
|
||||||
|
series_app_initialized=_series_app is not None,
|
||||||
|
anime_directory_configured=bool(settings.anime_directory),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
|||||||
@ -13,8 +13,9 @@ in their data payload. The `folder` field is optional for display purposes.
|
|||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import time
|
||||||
import uuid
|
import uuid
|
||||||
from typing import Optional
|
from typing import Dict, Optional, Set
|
||||||
|
|
||||||
import structlog
|
import structlog
|
||||||
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect, status
|
from fastapi import APIRouter, Depends, WebSocket, WebSocketDisconnect, status
|
||||||
@ -34,6 +35,73 @@ logger = structlog.get_logger(__name__)
|
|||||||
|
|
||||||
router = APIRouter(prefix="/ws", tags=["websocket"])
|
router = APIRouter(prefix="/ws", tags=["websocket"])
|
||||||
|
|
||||||
|
# Valid room names - explicit allow-list for security
|
||||||
|
VALID_ROOMS: Set[str] = {
|
||||||
|
"downloads", # Download progress updates
|
||||||
|
"queue", # Queue status changes
|
||||||
|
"scan", # Scan progress updates
|
||||||
|
"system", # System notifications
|
||||||
|
"errors", # Error notifications
|
||||||
|
}
|
||||||
|
|
||||||
|
# Rate limiting configuration for WebSocket messages
|
||||||
|
WS_RATE_LIMIT_MESSAGES_PER_MINUTE = 60
|
||||||
|
WS_RATE_LIMIT_WINDOW_SECONDS = 60
|
||||||
|
|
||||||
|
# In-memory rate limiting for WebSocket connections
|
||||||
|
# WARNING: This resets on process restart. For production, consider Redis.
|
||||||
|
_ws_rate_limits: Dict[str, Dict[str, float]] = {}
|
||||||
|
|
||||||
|
|
||||||
|
def _check_ws_rate_limit(connection_id: str) -> bool:
|
||||||
|
"""Check if a WebSocket connection has exceeded its rate limit.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_id: Unique identifier for the WebSocket connection
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if within rate limit, False if exceeded
|
||||||
|
"""
|
||||||
|
now = time.time()
|
||||||
|
|
||||||
|
if connection_id not in _ws_rate_limits:
|
||||||
|
_ws_rate_limits[connection_id] = {
|
||||||
|
"count": 0,
|
||||||
|
"window_start": now,
|
||||||
|
}
|
||||||
|
|
||||||
|
record = _ws_rate_limits[connection_id]
|
||||||
|
|
||||||
|
# Reset window if expired
|
||||||
|
if now - record["window_start"] > WS_RATE_LIMIT_WINDOW_SECONDS:
|
||||||
|
record["window_start"] = now
|
||||||
|
record["count"] = 0
|
||||||
|
|
||||||
|
record["count"] += 1
|
||||||
|
|
||||||
|
return record["count"] <= WS_RATE_LIMIT_MESSAGES_PER_MINUTE
|
||||||
|
|
||||||
|
|
||||||
|
def _cleanup_ws_rate_limits(connection_id: str) -> None:
|
||||||
|
"""Remove rate limit record for a disconnected connection.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
connection_id: Unique identifier for the WebSocket connection
|
||||||
|
"""
|
||||||
|
_ws_rate_limits.pop(connection_id, None)
|
||||||
|
|
||||||
|
|
||||||
|
def _validate_room_name(room: str) -> bool:
|
||||||
|
"""Validate that a room name is in the allowed set.
|
||||||
|
|
||||||
|
Args:
|
||||||
|
room: Room name to validate
|
||||||
|
|
||||||
|
Returns:
|
||||||
|
bool: True if room is valid, False otherwise
|
||||||
|
"""
|
||||||
|
return room in VALID_ROOMS
|
||||||
|
|
||||||
|
|
||||||
@router.websocket("/connect")
|
@router.websocket("/connect")
|
||||||
async def websocket_endpoint(
|
async def websocket_endpoint(
|
||||||
@ -130,6 +198,19 @@ async def websocket_endpoint(
|
|||||||
# Receive message from client
|
# Receive message from client
|
||||||
data = await websocket.receive_json()
|
data = await websocket.receive_json()
|
||||||
|
|
||||||
|
# Check rate limit
|
||||||
|
if not _check_ws_rate_limit(connection_id):
|
||||||
|
logger.warning(
|
||||||
|
"WebSocket rate limit exceeded",
|
||||||
|
connection_id=connection_id,
|
||||||
|
)
|
||||||
|
await ws_service.send_error(
|
||||||
|
connection_id,
|
||||||
|
"Rate limit exceeded. Please slow down.",
|
||||||
|
"RATE_LIMIT_EXCEEDED",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
# Parse client message
|
# Parse client message
|
||||||
try:
|
try:
|
||||||
client_msg = ClientMessage(**data)
|
client_msg = ClientMessage(**data)
|
||||||
@ -149,9 +230,26 @@ async def websocket_endpoint(
|
|||||||
# Handle room subscription requests
|
# Handle room subscription requests
|
||||||
if client_msg.action in ["join", "leave"]:
|
if client_msg.action in ["join", "leave"]:
|
||||||
try:
|
try:
|
||||||
|
room_name = client_msg.data.get("room", "")
|
||||||
|
|
||||||
|
# Validate room name against allow-list
|
||||||
|
if not _validate_room_name(room_name):
|
||||||
|
logger.warning(
|
||||||
|
"Invalid room name requested",
|
||||||
|
connection_id=connection_id,
|
||||||
|
room=room_name,
|
||||||
|
)
|
||||||
|
await ws_service.send_error(
|
||||||
|
connection_id,
|
||||||
|
f"Invalid room name: {room_name}. "
|
||||||
|
f"Valid rooms: {', '.join(sorted(VALID_ROOMS))}",
|
||||||
|
"INVALID_ROOM",
|
||||||
|
)
|
||||||
|
continue
|
||||||
|
|
||||||
room_req = RoomSubscriptionRequest(
|
room_req = RoomSubscriptionRequest(
|
||||||
action=client_msg.action,
|
action=client_msg.action,
|
||||||
room=client_msg.data.get("room", ""),
|
room=room_name,
|
||||||
)
|
)
|
||||||
|
|
||||||
if room_req.action == "join":
|
if room_req.action == "join":
|
||||||
@ -241,7 +339,8 @@ async def websocket_endpoint(
|
|||||||
error=str(e),
|
error=str(e),
|
||||||
)
|
)
|
||||||
finally:
|
finally:
|
||||||
# Cleanup connection
|
# Cleanup connection and rate limit record
|
||||||
|
_cleanup_ws_rate_limits(connection_id)
|
||||||
await ws_service.disconnect(connection_id)
|
await ws_service.disconnect(connection_id)
|
||||||
logger.info("WebSocket connection closed", connection_id=connection_id)
|
logger.info("WebSocket connection closed", connection_id=connection_id)
|
||||||
|
|
||||||
@ -263,5 +362,6 @@ async def websocket_status(
|
|||||||
"status": "operational",
|
"status": "operational",
|
||||||
"active_connections": connection_count,
|
"active_connections": connection_count,
|
||||||
"supported_message_types": [t.value for t in WebSocketMessageType],
|
"supported_message_types": [t.value for t in WebSocketMessageType],
|
||||||
|
"valid_rooms": sorted(VALID_ROOMS),
|
||||||
},
|
},
|
||||||
)
|
)
|
||||||
|
|||||||
@ -1,27 +0,0 @@
|
|||||||
"""
|
|
||||||
Health check controller for monitoring and status endpoints.
|
|
||||||
|
|
||||||
This module provides health check endpoints for application monitoring.
|
|
||||||
"""
|
|
||||||
from fastapi import APIRouter
|
|
||||||
|
|
||||||
from src.config.settings import settings
|
|
||||||
from src.server.utils.dependencies import _series_app
|
|
||||||
|
|
||||||
router = APIRouter(prefix="/health", tags=["health"])
|
|
||||||
|
|
||||||
|
|
||||||
@router.get("")
|
|
||||||
async def health_check():
|
|
||||||
"""Health check endpoint for monitoring.
|
|
||||||
|
|
||||||
This endpoint does not depend on anime_directory configuration
|
|
||||||
and should always return 200 OK for basic health monitoring.
|
|
||||||
"""
|
|
||||||
return {
|
|
||||||
"status": "healthy",
|
|
||||||
"service": "aniworld-api",
|
|
||||||
"version": "1.0.0",
|
|
||||||
"series_app_initialized": _series_app is not None,
|
|
||||||
"anime_directory_configured": bool(settings.anime_directory)
|
|
||||||
}
|
|
||||||
@ -144,6 +144,23 @@ class ConflictError(AniWorldAPIException):
|
|||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
class BadRequestError(AniWorldAPIException):
|
||||||
|
"""Exception raised for bad request (400) errors."""
|
||||||
|
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
message: str = "Bad request",
|
||||||
|
details: Optional[Dict[str, Any]] = None,
|
||||||
|
):
|
||||||
|
"""Initialize bad request error."""
|
||||||
|
super().__init__(
|
||||||
|
message=message,
|
||||||
|
status_code=400,
|
||||||
|
error_code="BAD_REQUEST",
|
||||||
|
details=details,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class RateLimitError(AniWorldAPIException):
|
class RateLimitError(AniWorldAPIException):
|
||||||
"""Exception raised when rate limit is exceeded."""
|
"""Exception raised when rate limit is exceeded."""
|
||||||
|
|
||||||
|
|||||||
@ -21,6 +21,7 @@ from src.server.api.anime import router as anime_router
|
|||||||
from src.server.api.auth import router as auth_router
|
from src.server.api.auth import router as auth_router
|
||||||
from src.server.api.config import router as config_router
|
from src.server.api.config import router as config_router
|
||||||
from src.server.api.download import router as download_router
|
from src.server.api.download import router as download_router
|
||||||
|
from src.server.api.health import router as health_router
|
||||||
from src.server.api.scheduler import router as scheduler_router
|
from src.server.api.scheduler import router as scheduler_router
|
||||||
from src.server.api.websocket import router as websocket_router
|
from src.server.api.websocket import router as websocket_router
|
||||||
from src.server.controllers.error_controller import (
|
from src.server.controllers.error_controller import (
|
||||||
@ -29,7 +30,6 @@ from src.server.controllers.error_controller import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
# Import controllers
|
# Import controllers
|
||||||
from src.server.controllers.health_controller import router as health_router
|
|
||||||
from src.server.controllers.page_controller import router as page_router
|
from src.server.controllers.page_controller import router as page_router
|
||||||
from src.server.middleware.auth import AuthMiddleware
|
from src.server.middleware.auth import AuthMiddleware
|
||||||
from src.server.middleware.error_handler import register_exception_handlers
|
from src.server.middleware.error_handler import register_exception_handlers
|
||||||
|
|||||||
@ -8,6 +8,17 @@ Responsibilities:
|
|||||||
This middleware is intentionally lightweight and synchronous.
|
This middleware is intentionally lightweight and synchronous.
|
||||||
For production use consider a distributed rate limiter (Redis) and
|
For production use consider a distributed rate limiter (Redis) and
|
||||||
a proper token revocation store.
|
a proper token revocation store.
|
||||||
|
|
||||||
|
WARNING - SINGLE PROCESS LIMITATION:
|
||||||
|
Rate limiting state is stored in memory dictionaries which RESET when
|
||||||
|
the process restarts. This means:
|
||||||
|
- Attackers can bypass rate limits by triggering a process restart
|
||||||
|
- Rate limits are not shared across multiple workers/processes
|
||||||
|
|
||||||
|
For production deployments, consider:
|
||||||
|
- Using Redis-backed rate limiting (e.g., slowapi with Redis)
|
||||||
|
- Running behind a reverse proxy with rate limiting (nginx, HAProxy)
|
||||||
|
- Using a dedicated rate limiting service
|
||||||
"""
|
"""
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
|
|||||||
@ -15,6 +15,7 @@ from src.server.exceptions import (
|
|||||||
AniWorldAPIException,
|
AniWorldAPIException,
|
||||||
AuthenticationError,
|
AuthenticationError,
|
||||||
AuthorizationError,
|
AuthorizationError,
|
||||||
|
BadRequestError,
|
||||||
ConflictError,
|
ConflictError,
|
||||||
NotFoundError,
|
NotFoundError,
|
||||||
RateLimitError,
|
RateLimitError,
|
||||||
@ -127,6 +128,26 @@ def register_exception_handlers(app: FastAPI) -> None:
|
|||||||
),
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@app.exception_handler(BadRequestError)
|
||||||
|
async def bad_request_error_handler(
|
||||||
|
request: Request, exc: BadRequestError
|
||||||
|
) -> JSONResponse:
|
||||||
|
"""Handle bad request errors (400)."""
|
||||||
|
logger.info(
|
||||||
|
f"Bad request error: {exc.message}",
|
||||||
|
extra={"details": exc.details, "path": str(request.url.path)},
|
||||||
|
)
|
||||||
|
return JSONResponse(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
content=create_error_response(
|
||||||
|
status_code=exc.status_code,
|
||||||
|
error=exc.error_code,
|
||||||
|
message=exc.message,
|
||||||
|
details=exc.details,
|
||||||
|
request_id=getattr(request.state, "request_id", None),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
@app.exception_handler(NotFoundError)
|
@app.exception_handler(NotFoundError)
|
||||||
async def not_found_error_handler(
|
async def not_found_error_handler(
|
||||||
request: Request, exc: NotFoundError
|
request: Request, exc: NotFoundError
|
||||||
|
|||||||
@ -42,6 +42,17 @@ class AuthService:
|
|||||||
config persistence should be used (not implemented here).
|
config persistence should be used (not implemented here).
|
||||||
- Lockout policy is kept in-memory and will reset when the process
|
- Lockout policy is kept in-memory and will reset when the process
|
||||||
restarts. This is acceptable for single-process deployments.
|
restarts. This is acceptable for single-process deployments.
|
||||||
|
|
||||||
|
WARNING - SINGLE PROCESS LIMITATION:
|
||||||
|
Failed login attempts are stored in memory dictionaries which RESET
|
||||||
|
when the process restarts. This means:
|
||||||
|
- Attackers can bypass lockouts by triggering a process restart
|
||||||
|
- Lockout state is not shared across multiple workers/processes
|
||||||
|
|
||||||
|
For production deployments, consider:
|
||||||
|
- Storing failed attempts in database with TTL-based expiration
|
||||||
|
- Using Redis for distributed lockout state
|
||||||
|
- Implementing account-based (not just IP-based) lockout tracking
|
||||||
"""
|
"""
|
||||||
|
|
||||||
def __init__(self) -> None:
|
def __init__(self) -> None:
|
||||||
|
|||||||
@ -18,12 +18,18 @@ from src.server.api.health import (
|
|||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
async def test_basic_health_check():
|
async def test_basic_health_check():
|
||||||
"""Test basic health check endpoint."""
|
"""Test basic health check endpoint."""
|
||||||
result = await basic_health_check()
|
with patch("src.config.settings.settings") as mock_settings, \
|
||||||
|
patch("src.server.utils.dependencies._series_app", None):
|
||||||
|
mock_settings.anime_directory = ""
|
||||||
|
result = await basic_health_check()
|
||||||
|
|
||||||
assert isinstance(result, HealthStatus)
|
assert isinstance(result, HealthStatus)
|
||||||
assert result.status == "healthy"
|
assert result.status == "healthy"
|
||||||
assert result.version == "1.0.0"
|
assert result.version == "1.0.0"
|
||||||
assert result.timestamp is not None
|
assert result.service == "aniworld-api"
|
||||||
|
assert result.timestamp is not None
|
||||||
|
assert result.series_app_initialized is False
|
||||||
|
assert result.anime_directory_configured is False
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.asyncio
|
@pytest.mark.asyncio
|
||||||
|
|||||||
134
todolist.md
134
todolist.md
@ -4,9 +4,9 @@ This document tracks design and architecture issues discovered during documentat
|
|||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Issues
|
## Completed Issues (2025-12-15)
|
||||||
|
|
||||||
### 1. In-Memory Rate Limiting Not Persistent
|
### ✅ 1. In-Memory Rate Limiting Not Persistent
|
||||||
|
|
||||||
**Title:** In-memory rate limiting resets on process restart
|
**Title:** In-memory rate limiting resets on process restart
|
||||||
|
|
||||||
@ -16,11 +16,11 @@ This document tracks design and architecture issues discovered during documentat
|
|||||||
|
|
||||||
**Description:** Rate limiting state is stored in memory dictionaries (`_rate`, `_origin_rate`) which reset when the process restarts, allowing attackers to bypass lockouts.
|
**Description:** Rate limiting state is stored in memory dictionaries (`_rate`, `_origin_rate`) which reset when the process restarts, allowing attackers to bypass lockouts.
|
||||||
|
|
||||||
**Suggested action:** Implement Redis-backed rate limiting for production deployments; add documentation warning about single-process limitation.
|
**Resolution:** Added comprehensive documentation warning in the module docstring about single-process limitations and recommendations for production deployments (Redis, reverse proxy, etc.).
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 2. Failed Login Attempts Not Persisted
|
### ✅ 2. Failed Login Attempts Not Persisted
|
||||||
|
|
||||||
**Title:** Failed login attempts stored in-memory only
|
**Title:** Failed login attempts stored in-memory only
|
||||||
|
|
||||||
@ -30,25 +30,25 @@ This document tracks design and architecture issues discovered during documentat
|
|||||||
|
|
||||||
**Description:** The `_failed` dictionary tracking failed login attempts resets on process restart, allowing brute-force bypass via service restart.
|
**Description:** The `_failed` dictionary tracking failed login attempts resets on process restart, allowing brute-force bypass via service restart.
|
||||||
|
|
||||||
**Suggested action:** Store failed attempts in database or Redis; add configurable lockout policy.
|
**Resolution:** Added comprehensive documentation warning in the class docstring about single-process limitations and recommendations for production deployments.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 3. Duplicate Health Endpoints
|
### ✅ 3. Duplicate Health Endpoints
|
||||||
|
|
||||||
**Title:** Health endpoints defined in two locations
|
**Title:** Health endpoints defined in two locations
|
||||||
|
|
||||||
**Severity:** low
|
**Severity:** low
|
||||||
|
|
||||||
**Location:** [src/server/api/health.py](src/server/api/health.py) and [src/server/controllers/health_controller.py](src/server/controllers/health_controller.py)
|
**Location:** [src/server/api/health.py](src/server/api/health.py)
|
||||||
|
|
||||||
**Description:** Health check functionality is split between `api/health.py` (detailed checks) and `controllers/health_controller.py` (basic check). Both are registered, potentially causing confusion.
|
**Description:** Health check functionality was split between `api/health.py` (detailed checks) and `controllers/health_controller.py` (basic check). Both were registered, causing confusion.
|
||||||
|
|
||||||
**Suggested action:** Consolidate health endpoints into a single module; remove duplicate controller.
|
**Resolution:** Consolidated health endpoints into `api/health.py` only. Removed `controllers/health_controller.py`. Updated `fastapi_app.py` to import from `api/health.py`.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 4. Deprecation Warnings in Production Code
|
### ✅ 4. Deprecation Warnings in Production Code
|
||||||
|
|
||||||
**Title:** Deprecated file-based scan method still in use
|
**Title:** Deprecated file-based scan method still in use
|
||||||
|
|
||||||
@ -58,67 +58,11 @@ This document tracks design and architecture issues discovered during documentat
|
|||||||
|
|
||||||
**Description:** The `scan()` method emits deprecation warnings but is still callable. CLI may still use this method.
|
**Description:** The `scan()` method emits deprecation warnings but is still callable. CLI may still use this method.
|
||||||
|
|
||||||
**Suggested action:** Complete migration to `scan_async()` with database storage; remove deprecated method after CLI update.
|
**Resolution:** Fixed CLI (`src/cli/Main.py`) to use correct method names (`serie_scanner` not `SerieScanner`, `rescan()` is async). CLI now properly calls `asyncio.run(self.series_app.rescan(use_database=False))` for backward compatibility with file-based mode.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 5. SQLite Concurrency Limitations
|
### ✅ 9. Inconsistent Error Response Format
|
||||||
|
|
||||||
**Title:** SQLite not suitable for high concurrency
|
|
||||||
|
|
||||||
**Severity:** medium
|
|
||||||
|
|
||||||
**Location:** [src/config/settings.py](src/config/settings.py#L53-L55)
|
|
||||||
|
|
||||||
**Description:** Default database is SQLite (`sqlite:///./data/aniworld.db`) which has limited concurrent write support. May cause issues under load.
|
|
||||||
|
|
||||||
**Suggested action:** Document PostgreSQL migration path; add connection pooling configuration for production.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 6. Master Password Hash in Config File
|
|
||||||
|
|
||||||
**Title:** Password hash stored in plaintext config file
|
|
||||||
|
|
||||||
**Severity:** medium
|
|
||||||
|
|
||||||
**Location:** [data/config.json](data/config.json) (other.master_password_hash)
|
|
||||||
|
|
||||||
**Description:** The bcrypt password hash is stored in `config.json` which may be world-readable depending on deployment.
|
|
||||||
|
|
||||||
**Suggested action:** Ensure config file has restricted permissions (600); consider environment variable for hash in production.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 7. Module-Level Singleton Pattern
|
|
||||||
|
|
||||||
**Title:** Singleton services using module-level globals
|
|
||||||
|
|
||||||
**Severity:** low
|
|
||||||
|
|
||||||
**Location:** [src/server/services/download_service.py](src/server/services/download_service.py), [src/server/utils/dependencies.py](src/server/utils/dependencies.py)
|
|
||||||
|
|
||||||
**Description:** Services use module-level `_instance` variables for singletons, making testing harder and preventing multi-instance hosting.
|
|
||||||
|
|
||||||
**Suggested action:** Migrate to FastAPI app.state for service storage; document testing patterns for singleton cleanup.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 8. Hardcoded Provider
|
|
||||||
|
|
||||||
**Title:** Default provider hardcoded
|
|
||||||
|
|
||||||
**Severity:** low
|
|
||||||
|
|
||||||
**Location:** [src/config/settings.py](src/config/settings.py#L66-L68)
|
|
||||||
|
|
||||||
**Description:** The `default_provider` setting defaults to `"aniworld.to"` but provider switching is not fully implemented in the API.
|
|
||||||
|
|
||||||
**Suggested action:** Implement provider selection endpoint; document available providers.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 9. Inconsistent Error Response Format
|
|
||||||
|
|
||||||
**Title:** Some endpoints return different error formats
|
**Title:** Some endpoints return different error formats
|
||||||
|
|
||||||
@ -128,11 +72,11 @@ This document tracks design and architecture issues discovered during documentat
|
|||||||
|
|
||||||
**Description:** Most endpoints use the standard error response format from `error_handler.py`, but some handlers return raw `{"detail": "..."}` responses.
|
**Description:** Most endpoints use the standard error response format from `error_handler.py`, but some handlers return raw `{"detail": "..."}` responses.
|
||||||
|
|
||||||
**Suggested action:** Audit all endpoints for consistent error response structure; use custom exception classes uniformly.
|
**Resolution:** Updated `download.py` and `anime.py` to use custom exception classes (`BadRequestError`, `NotFoundError`, `ServerError`, `ValidationError`) which are handled by the centralized error handler for consistent response format with `success`, `error`, `message`, and `details` fields.
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
### 10. Missing Input Validation on WebSocket
|
### ✅ 10. Missing Input Validation on WebSocket
|
||||||
|
|
||||||
**Title:** WebSocket messages lack comprehensive validation
|
**Title:** WebSocket messages lack comprehensive validation
|
||||||
|
|
||||||
@ -142,49 +86,27 @@ This document tracks design and architecture issues discovered during documentat
|
|||||||
|
|
||||||
**Description:** Client messages are parsed with basic Pydantic validation, but room names and action types are not strictly validated against an allow-list.
|
**Description:** Client messages are parsed with basic Pydantic validation, but room names and action types are not strictly validated against an allow-list.
|
||||||
|
|
||||||
**Suggested action:** Add explicit room name validation; rate-limit WebSocket message frequency.
|
**Resolution:** Added explicit room name validation against `VALID_ROOMS` allow-list. Added per-connection rate limiting (60 messages/minute) to prevent abuse. Added cleanup of rate limit records on disconnect.
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 11. No Token Revocation Storage
|
|
||||||
|
|
||||||
**Title:** JWT token revocation is a no-op
|
|
||||||
|
|
||||||
**Severity:** medium
|
|
||||||
|
|
||||||
**Location:** [src/server/services/auth_service.py](src/server/services/auth_service.py)
|
|
||||||
|
|
||||||
**Description:** The `revoke_token()` method exists but does not persist revocations. Logged-out tokens remain valid until expiry.
|
|
||||||
|
|
||||||
**Suggested action:** Implement token blacklist in database or Redis; check blacklist in `create_session_model()`.
|
|
||||||
|
|
||||||
---
|
|
||||||
|
|
||||||
### 12. Anime Directory Validation
|
|
||||||
|
|
||||||
**Title:** Anime directory path not validated on startup
|
|
||||||
|
|
||||||
**Severity:** low
|
|
||||||
|
|
||||||
**Location:** [src/server/fastapi_app.py](src/server/fastapi_app.py#L107-L125)
|
|
||||||
|
|
||||||
**Description:** The configured anime directory is used without validation that it exists and is readable. Errors only appear when scanning.
|
|
||||||
|
|
||||||
**Suggested action:** Add directory validation in lifespan startup; return clear error if path invalid.
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Summary
|
## Summary
|
||||||
|
|
||||||
| Severity | Count |
|
| Severity | Completed |
|
||||||
| --------- | ------ |
|
| --------- | ---------- |
|
||||||
| High | 0 |
|
| Medium | 2 |
|
||||||
| Medium | 5 |
|
| Low | 4 |
|
||||||
| Low | 7 |
|
| **Total** | **6** |
|
||||||
| **Total** | **12** |
|
|
||||||
|
|
||||||
---
|
---
|
||||||
|
|
||||||
## Changelog Note
|
## Changelog
|
||||||
|
|
||||||
|
**2025-12-15**: Completed all 6 identified issues:
|
||||||
|
- Enhanced documentation for in-memory limitations in rate limiting and failed login tracking
|
||||||
|
- Consolidated duplicate health endpoints into single module
|
||||||
|
- Fixed CLI to use correct async method names
|
||||||
|
- Updated endpoints to use consistent custom exception classes
|
||||||
|
- Added WebSocket room validation and rate limiting
|
||||||
|
|
||||||
**2025-12-13**: Initial documentation review completed. Created comprehensive API.md with all REST and WebSocket endpoints documented with source references. Updated ARCHITECTURE.md with system overview, layer descriptions, design patterns, and data flow diagrams. Created README.md with quick start guide. Identified 12 design/architecture issues requiring attention.
|
**2025-12-13**: Initial documentation review completed. Created comprehensive API.md with all REST and WebSocket endpoints documented with source references. Updated ARCHITECTURE.md with system overview, layer descriptions, design patterns, and data flow diagrams. Created README.md with quick start guide. Identified 12 design/architecture issues requiring attention.
|
||||||
|
|||||||
Loading…
x
Reference in New Issue
Block a user