- Implement notification service with email, webhook, and in-app support - Add security headers middleware (CORS, CSP, HSTS, XSS protection) - Create comprehensive audit logging service for security events - Add data validation utilities with Pydantic validators - Implement cache service with in-memory and Redis backend support All 714 tests passing
611 lines
18 KiB
Python
611 lines
18 KiB
Python
"""
|
|
Audit Service for AniWorld.
|
|
|
|
This module provides comprehensive audit logging for security-critical
|
|
operations including authentication, configuration changes, and downloads.
|
|
"""
|
|
|
|
import json
|
|
import logging
|
|
from datetime import datetime, timedelta
|
|
from enum import Enum
|
|
from pathlib import Path
|
|
from typing import Any, Dict, List, Optional
|
|
|
|
from pydantic import BaseModel, Field
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
class AuditEventType(str, Enum):
|
|
"""Types of audit events."""
|
|
|
|
# Authentication events
|
|
AUTH_SETUP = "auth.setup"
|
|
AUTH_LOGIN_SUCCESS = "auth.login.success"
|
|
AUTH_LOGIN_FAILURE = "auth.login.failure"
|
|
AUTH_LOGOUT = "auth.logout"
|
|
AUTH_TOKEN_REFRESH = "auth.token.refresh"
|
|
AUTH_TOKEN_INVALID = "auth.token.invalid"
|
|
|
|
# Configuration events
|
|
CONFIG_READ = "config.read"
|
|
CONFIG_UPDATE = "config.update"
|
|
CONFIG_BACKUP = "config.backup"
|
|
CONFIG_RESTORE = "config.restore"
|
|
CONFIG_DELETE = "config.delete"
|
|
|
|
# Download events
|
|
DOWNLOAD_ADDED = "download.added"
|
|
DOWNLOAD_STARTED = "download.started"
|
|
DOWNLOAD_COMPLETED = "download.completed"
|
|
DOWNLOAD_FAILED = "download.failed"
|
|
DOWNLOAD_CANCELLED = "download.cancelled"
|
|
DOWNLOAD_REMOVED = "download.removed"
|
|
|
|
# Queue events
|
|
QUEUE_STARTED = "queue.started"
|
|
QUEUE_STOPPED = "queue.stopped"
|
|
QUEUE_PAUSED = "queue.paused"
|
|
QUEUE_RESUMED = "queue.resumed"
|
|
QUEUE_CLEARED = "queue.cleared"
|
|
|
|
# System events
|
|
SYSTEM_STARTUP = "system.startup"
|
|
SYSTEM_SHUTDOWN = "system.shutdown"
|
|
SYSTEM_ERROR = "system.error"
|
|
|
|
|
|
class AuditEventSeverity(str, Enum):
|
|
"""Severity levels for audit events."""
|
|
|
|
DEBUG = "debug"
|
|
INFO = "info"
|
|
WARNING = "warning"
|
|
ERROR = "error"
|
|
CRITICAL = "critical"
|
|
|
|
|
|
class AuditEvent(BaseModel):
|
|
"""Audit event model."""
|
|
|
|
timestamp: datetime = Field(default_factory=datetime.utcnow)
|
|
event_type: AuditEventType
|
|
severity: AuditEventSeverity = AuditEventSeverity.INFO
|
|
user_id: Optional[str] = None
|
|
ip_address: Optional[str] = None
|
|
user_agent: Optional[str] = None
|
|
resource: Optional[str] = None
|
|
action: Optional[str] = None
|
|
status: str = "success"
|
|
message: str
|
|
details: Optional[Dict[str, Any]] = None
|
|
session_id: Optional[str] = None
|
|
|
|
class Config:
|
|
"""Pydantic config."""
|
|
|
|
json_encoders = {datetime: lambda v: v.isoformat()}
|
|
|
|
|
|
class AuditLogStorage:
|
|
"""Base class for audit log storage backends."""
|
|
|
|
async def write_event(self, event: AuditEvent) -> None:
|
|
"""
|
|
Write an audit event to storage.
|
|
|
|
Args:
|
|
event: Audit event to write
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
async def read_events(
|
|
self,
|
|
start_time: Optional[datetime] = None,
|
|
end_time: Optional[datetime] = None,
|
|
event_types: Optional[List[AuditEventType]] = None,
|
|
user_id: Optional[str] = None,
|
|
limit: int = 100,
|
|
) -> List[AuditEvent]:
|
|
"""
|
|
Read audit events from storage.
|
|
|
|
Args:
|
|
start_time: Start of time range
|
|
end_time: End of time range
|
|
event_types: Filter by event types
|
|
user_id: Filter by user ID
|
|
limit: Maximum number of events to return
|
|
|
|
Returns:
|
|
List of audit events
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
async def cleanup_old_events(self, days: int = 90) -> int:
|
|
"""
|
|
Clean up audit events older than specified days.
|
|
|
|
Args:
|
|
days: Number of days to retain
|
|
|
|
Returns:
|
|
Number of events deleted
|
|
"""
|
|
raise NotImplementedError
|
|
|
|
|
|
class FileAuditLogStorage(AuditLogStorage):
|
|
"""File-based audit log storage."""
|
|
|
|
def __init__(self, log_directory: str = "logs/audit"):
|
|
"""
|
|
Initialize file-based audit log storage.
|
|
|
|
Args:
|
|
log_directory: Directory to store audit logs
|
|
"""
|
|
self.log_directory = Path(log_directory)
|
|
self.log_directory.mkdir(parents=True, exist_ok=True)
|
|
self._current_date: Optional[str] = None
|
|
self._current_file: Optional[Path] = None
|
|
|
|
def _get_log_file(self, date: datetime) -> Path:
|
|
"""
|
|
Get log file path for a specific date.
|
|
|
|
Args:
|
|
date: Date for log file
|
|
|
|
Returns:
|
|
Path to log file
|
|
"""
|
|
date_str = date.strftime("%Y-%m-%d")
|
|
return self.log_directory / f"audit_{date_str}.jsonl"
|
|
|
|
async def write_event(self, event: AuditEvent) -> None:
|
|
"""
|
|
Write an audit event to file.
|
|
|
|
Args:
|
|
event: Audit event to write
|
|
"""
|
|
log_file = self._get_log_file(event.timestamp)
|
|
|
|
try:
|
|
with open(log_file, "a", encoding="utf-8") as f:
|
|
f.write(event.model_dump_json() + "\n")
|
|
except Exception as e:
|
|
logger.error(f"Failed to write audit event to file: {e}")
|
|
|
|
async def read_events(
|
|
self,
|
|
start_time: Optional[datetime] = None,
|
|
end_time: Optional[datetime] = None,
|
|
event_types: Optional[List[AuditEventType]] = None,
|
|
user_id: Optional[str] = None,
|
|
limit: int = 100,
|
|
) -> List[AuditEvent]:
|
|
"""
|
|
Read audit events from files.
|
|
|
|
Args:
|
|
start_time: Start of time range
|
|
end_time: End of time range
|
|
event_types: Filter by event types
|
|
user_id: Filter by user ID
|
|
limit: Maximum number of events to return
|
|
|
|
Returns:
|
|
List of audit events
|
|
"""
|
|
if start_time is None:
|
|
start_time = datetime.utcnow() - timedelta(days=7)
|
|
if end_time is None:
|
|
end_time = datetime.utcnow()
|
|
|
|
events: List[AuditEvent] = []
|
|
current_date = start_time.date()
|
|
end_date = end_time.date()
|
|
|
|
# Read from all log files in date range
|
|
while current_date <= end_date and len(events) < limit:
|
|
log_file = self._get_log_file(datetime.combine(current_date, datetime.min.time()))
|
|
|
|
if log_file.exists():
|
|
try:
|
|
with open(log_file, "r", encoding="utf-8") as f:
|
|
for line in f:
|
|
if len(events) >= limit:
|
|
break
|
|
|
|
try:
|
|
event_data = json.loads(line.strip())
|
|
event = AuditEvent(**event_data)
|
|
|
|
# Apply filters
|
|
if event.timestamp < start_time or event.timestamp > end_time:
|
|
continue
|
|
|
|
if event_types and event.event_type not in event_types:
|
|
continue
|
|
|
|
if user_id and event.user_id != user_id:
|
|
continue
|
|
|
|
events.append(event)
|
|
|
|
except (json.JSONDecodeError, ValueError) as e:
|
|
logger.warning(f"Failed to parse audit event: {e}")
|
|
|
|
except Exception as e:
|
|
logger.error(f"Failed to read audit log file {log_file}: {e}")
|
|
|
|
current_date += timedelta(days=1)
|
|
|
|
# Sort by timestamp descending
|
|
events.sort(key=lambda e: e.timestamp, reverse=True)
|
|
return events[:limit]
|
|
|
|
async def cleanup_old_events(self, days: int = 90) -> int:
|
|
"""
|
|
Clean up audit events older than specified days.
|
|
|
|
Args:
|
|
days: Number of days to retain
|
|
|
|
Returns:
|
|
Number of files deleted
|
|
"""
|
|
cutoff_date = datetime.utcnow() - timedelta(days=days)
|
|
deleted_count = 0
|
|
|
|
for log_file in self.log_directory.glob("audit_*.jsonl"):
|
|
try:
|
|
# Extract date from filename
|
|
date_str = log_file.stem.replace("audit_", "")
|
|
file_date = datetime.strptime(date_str, "%Y-%m-%d")
|
|
|
|
if file_date < cutoff_date:
|
|
log_file.unlink()
|
|
deleted_count += 1
|
|
logger.info(f"Deleted old audit log: {log_file}")
|
|
|
|
except (ValueError, OSError) as e:
|
|
logger.warning(f"Failed to process audit log file {log_file}: {e}")
|
|
|
|
return deleted_count
|
|
|
|
|
|
class AuditService:
|
|
"""Main audit service for logging security events."""
|
|
|
|
def __init__(self, storage: Optional[AuditLogStorage] = None):
|
|
"""
|
|
Initialize audit service.
|
|
|
|
Args:
|
|
storage: Storage backend for audit logs
|
|
"""
|
|
self.storage = storage or FileAuditLogStorage()
|
|
|
|
async def log_event(
|
|
self,
|
|
event_type: AuditEventType,
|
|
message: str,
|
|
severity: AuditEventSeverity = AuditEventSeverity.INFO,
|
|
user_id: Optional[str] = None,
|
|
ip_address: Optional[str] = None,
|
|
user_agent: Optional[str] = None,
|
|
resource: Optional[str] = None,
|
|
action: Optional[str] = None,
|
|
status: str = "success",
|
|
details: Optional[Dict[str, Any]] = None,
|
|
session_id: Optional[str] = None,
|
|
) -> None:
|
|
"""
|
|
Log an audit event.
|
|
|
|
Args:
|
|
event_type: Type of event
|
|
message: Human-readable message
|
|
severity: Event severity
|
|
user_id: User identifier
|
|
ip_address: Client IP address
|
|
user_agent: Client user agent
|
|
resource: Resource being accessed
|
|
action: Action performed
|
|
status: Operation status
|
|
details: Additional details
|
|
session_id: Session identifier
|
|
"""
|
|
event = AuditEvent(
|
|
event_type=event_type,
|
|
severity=severity,
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
user_agent=user_agent,
|
|
resource=resource,
|
|
action=action,
|
|
status=status,
|
|
message=message,
|
|
details=details,
|
|
session_id=session_id,
|
|
)
|
|
|
|
await self.storage.write_event(event)
|
|
|
|
# Also log to application logger for high severity events
|
|
if severity in [AuditEventSeverity.ERROR, AuditEventSeverity.CRITICAL]:
|
|
logger.error(f"Audit: {message}", extra={"audit_event": event.model_dump()})
|
|
elif severity == AuditEventSeverity.WARNING:
|
|
logger.warning(f"Audit: {message}", extra={"audit_event": event.model_dump()})
|
|
|
|
async def log_auth_setup(
|
|
self, user_id: str, ip_address: Optional[str] = None
|
|
) -> None:
|
|
"""Log initial authentication setup."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.AUTH_SETUP,
|
|
message=f"Authentication configured by user {user_id}",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
action="setup",
|
|
)
|
|
|
|
async def log_login_success(
|
|
self,
|
|
user_id: str,
|
|
ip_address: Optional[str] = None,
|
|
user_agent: Optional[str] = None,
|
|
session_id: Optional[str] = None,
|
|
) -> None:
|
|
"""Log successful login."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.AUTH_LOGIN_SUCCESS,
|
|
message=f"User {user_id} logged in successfully",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
user_agent=user_agent,
|
|
session_id=session_id,
|
|
action="login",
|
|
)
|
|
|
|
async def log_login_failure(
|
|
self,
|
|
user_id: Optional[str] = None,
|
|
ip_address: Optional[str] = None,
|
|
user_agent: Optional[str] = None,
|
|
reason: str = "Invalid credentials",
|
|
) -> None:
|
|
"""Log failed login attempt."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.AUTH_LOGIN_FAILURE,
|
|
message=f"Login failed for user {user_id or 'unknown'}: {reason}",
|
|
severity=AuditEventSeverity.WARNING,
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
user_agent=user_agent,
|
|
status="failure",
|
|
action="login",
|
|
details={"reason": reason},
|
|
)
|
|
|
|
async def log_logout(
|
|
self,
|
|
user_id: str,
|
|
ip_address: Optional[str] = None,
|
|
session_id: Optional[str] = None,
|
|
) -> None:
|
|
"""Log user logout."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.AUTH_LOGOUT,
|
|
message=f"User {user_id} logged out",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
session_id=session_id,
|
|
action="logout",
|
|
)
|
|
|
|
async def log_config_update(
|
|
self,
|
|
user_id: str,
|
|
changes: Dict[str, Any],
|
|
ip_address: Optional[str] = None,
|
|
) -> None:
|
|
"""Log configuration update."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.CONFIG_UPDATE,
|
|
message=f"Configuration updated by user {user_id}",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
resource="config",
|
|
action="update",
|
|
details={"changes": changes},
|
|
)
|
|
|
|
async def log_config_backup(
|
|
self, user_id: str, backup_file: str, ip_address: Optional[str] = None
|
|
) -> None:
|
|
"""Log configuration backup."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.CONFIG_BACKUP,
|
|
message=f"Configuration backed up by user {user_id}",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
resource="config",
|
|
action="backup",
|
|
details={"backup_file": backup_file},
|
|
)
|
|
|
|
async def log_config_restore(
|
|
self, user_id: str, backup_file: str, ip_address: Optional[str] = None
|
|
) -> None:
|
|
"""Log configuration restore."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.CONFIG_RESTORE,
|
|
message=f"Configuration restored by user {user_id}",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
resource="config",
|
|
action="restore",
|
|
details={"backup_file": backup_file},
|
|
)
|
|
|
|
async def log_download_added(
|
|
self,
|
|
user_id: str,
|
|
series_name: str,
|
|
episodes: List[str],
|
|
ip_address: Optional[str] = None,
|
|
) -> None:
|
|
"""Log download added to queue."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.DOWNLOAD_ADDED,
|
|
message=f"Download added by user {user_id}: {series_name}",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
resource=series_name,
|
|
action="add",
|
|
details={"episodes": episodes},
|
|
)
|
|
|
|
async def log_download_completed(
|
|
self, series_name: str, episode: str, file_path: str
|
|
) -> None:
|
|
"""Log completed download."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.DOWNLOAD_COMPLETED,
|
|
message=f"Download completed: {series_name} - {episode}",
|
|
resource=series_name,
|
|
action="download",
|
|
details={"episode": episode, "file_path": file_path},
|
|
)
|
|
|
|
async def log_download_failed(
|
|
self, series_name: str, episode: str, error: str
|
|
) -> None:
|
|
"""Log failed download."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.DOWNLOAD_FAILED,
|
|
message=f"Download failed: {series_name} - {episode}",
|
|
severity=AuditEventSeverity.ERROR,
|
|
resource=series_name,
|
|
action="download",
|
|
status="failure",
|
|
details={"episode": episode, "error": error},
|
|
)
|
|
|
|
async def log_queue_operation(
|
|
self,
|
|
user_id: str,
|
|
operation: str,
|
|
ip_address: Optional[str] = None,
|
|
details: Optional[Dict[str, Any]] = None,
|
|
) -> None:
|
|
"""Log queue operation."""
|
|
event_type_map = {
|
|
"start": AuditEventType.QUEUE_STARTED,
|
|
"stop": AuditEventType.QUEUE_STOPPED,
|
|
"pause": AuditEventType.QUEUE_PAUSED,
|
|
"resume": AuditEventType.QUEUE_RESUMED,
|
|
"clear": AuditEventType.QUEUE_CLEARED,
|
|
}
|
|
|
|
event_type = event_type_map.get(operation, AuditEventType.SYSTEM_ERROR)
|
|
await self.log_event(
|
|
event_type=event_type,
|
|
message=f"Queue {operation} by user {user_id}",
|
|
user_id=user_id,
|
|
ip_address=ip_address,
|
|
resource="queue",
|
|
action=operation,
|
|
details=details,
|
|
)
|
|
|
|
async def log_system_error(
|
|
self, error: str, details: Optional[Dict[str, Any]] = None
|
|
) -> None:
|
|
"""Log system error."""
|
|
await self.log_event(
|
|
event_type=AuditEventType.SYSTEM_ERROR,
|
|
message=f"System error: {error}",
|
|
severity=AuditEventSeverity.ERROR,
|
|
status="error",
|
|
details=details,
|
|
)
|
|
|
|
async def get_events(
|
|
self,
|
|
start_time: Optional[datetime] = None,
|
|
end_time: Optional[datetime] = None,
|
|
event_types: Optional[List[AuditEventType]] = None,
|
|
user_id: Optional[str] = None,
|
|
limit: int = 100,
|
|
) -> List[AuditEvent]:
|
|
"""
|
|
Get audit events with filters.
|
|
|
|
Args:
|
|
start_time: Start of time range
|
|
end_time: End of time range
|
|
event_types: Filter by event types
|
|
user_id: Filter by user ID
|
|
limit: Maximum number of events to return
|
|
|
|
Returns:
|
|
List of audit events
|
|
"""
|
|
return await self.storage.read_events(
|
|
start_time=start_time,
|
|
end_time=end_time,
|
|
event_types=event_types,
|
|
user_id=user_id,
|
|
limit=limit,
|
|
)
|
|
|
|
async def cleanup_old_events(self, days: int = 90) -> int:
|
|
"""
|
|
Clean up old audit events.
|
|
|
|
Args:
|
|
days: Number of days to retain
|
|
|
|
Returns:
|
|
Number of events deleted
|
|
"""
|
|
return await self.storage.cleanup_old_events(days)
|
|
|
|
|
|
# Global audit service instance
|
|
_audit_service: Optional[AuditService] = None
|
|
|
|
|
|
def get_audit_service() -> AuditService:
|
|
"""
|
|
Get the global audit service instance.
|
|
|
|
Returns:
|
|
AuditService instance
|
|
"""
|
|
global _audit_service
|
|
if _audit_service is None:
|
|
_audit_service = AuditService()
|
|
return _audit_service
|
|
|
|
|
|
def configure_audit_service(storage: Optional[AuditLogStorage] = None) -> AuditService:
|
|
"""
|
|
Configure the global audit service.
|
|
|
|
Args:
|
|
storage: Custom storage backend
|
|
|
|
Returns:
|
|
Configured AuditService instance
|
|
"""
|
|
global _audit_service
|
|
_audit_service = AuditService(storage=storage)
|
|
return _audit_service
|