Add GET /api/dashboard/bans/trend endpoint
Implement time-bucketed ban aggregation for dashboard trend charts: - Add BanTrendBucket / BanTrendResponse Pydantic models and BUCKET_SECONDS / BUCKET_SIZE_LABEL / bucket_count helpers to ban.py - Add ban_service.ban_trend(): queries fail2ban DB with SQL bucket grouping, fills zero-count buckets, respects origin filter - Add GET /api/dashboard/bans/trend route in dashboard.py - 20 new tests (10 service, 10 router); 480 total pass, 83% coverage - ruff + mypy --strict clean
This commit is contained in:
@@ -302,9 +302,10 @@ class TestListBansBatchGeoEnrichment:
|
||||
self, f2b_db_path: str
|
||||
) -> None:
|
||||
"""Geo fields are populated via lookup_batch when http_session is given."""
|
||||
from app.services.geo_service import GeoInfo
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from app.services.geo_service import GeoInfo
|
||||
|
||||
fake_session = MagicMock()
|
||||
fake_geo_map = {
|
||||
"1.2.3.4": GeoInfo(country_code="DE", country_name="Germany", asn="AS3320", org="Deutsche Telekom"),
|
||||
@@ -357,9 +358,10 @@ class TestListBansBatchGeoEnrichment:
|
||||
self, f2b_db_path: str
|
||||
) -> None:
|
||||
"""When both http_session and geo_enricher are provided, batch wins."""
|
||||
from app.services.geo_service import GeoInfo
|
||||
from unittest.mock import MagicMock
|
||||
|
||||
from app.services.geo_service import GeoInfo
|
||||
|
||||
fake_session = MagicMock()
|
||||
fake_geo_map = {
|
||||
"1.2.3.4": GeoInfo(country_code="DE", country_name="Germany", asn=None, org=None),
|
||||
@@ -610,3 +612,167 @@ class TestOriginFilter:
|
||||
)
|
||||
|
||||
assert result.total == 3
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# ban_trend
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestBanTrend:
|
||||
"""Verify ban_service.ban_trend() behaviour."""
|
||||
|
||||
async def test_24h_returns_24_buckets(self, empty_f2b_db_path: str) -> None:
|
||||
"""``range_='24h'`` always yields exactly 24 buckets."""
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "24h")
|
||||
|
||||
assert len(result.buckets) == 24
|
||||
assert result.bucket_size == "1h"
|
||||
|
||||
async def test_7d_returns_28_buckets(self, empty_f2b_db_path: str) -> None:
|
||||
"""``range_='7d'`` yields 28 six-hour buckets."""
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "7d")
|
||||
|
||||
assert len(result.buckets) == 28
|
||||
assert result.bucket_size == "6h"
|
||||
|
||||
async def test_30d_returns_30_buckets(self, empty_f2b_db_path: str) -> None:
|
||||
"""``range_='30d'`` yields 30 daily buckets."""
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "30d")
|
||||
|
||||
assert len(result.buckets) == 30
|
||||
assert result.bucket_size == "1d"
|
||||
|
||||
async def test_365d_bucket_size_label(self, empty_f2b_db_path: str) -> None:
|
||||
"""``range_='365d'`` uses '7d' as the bucket size label."""
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "365d")
|
||||
|
||||
assert result.bucket_size == "7d"
|
||||
assert len(result.buckets) > 0
|
||||
|
||||
async def test_empty_db_all_buckets_zero(self, empty_f2b_db_path: str) -> None:
|
||||
"""All bucket counts are zero when the database has no bans."""
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "24h")
|
||||
|
||||
assert all(b.count == 0 for b in result.buckets)
|
||||
|
||||
async def test_buckets_are_time_ordered(self, empty_f2b_db_path: str) -> None:
|
||||
"""Buckets are ordered chronologically (ascending timestamps)."""
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "7d")
|
||||
|
||||
timestamps = [b.timestamp for b in result.buckets]
|
||||
assert timestamps == sorted(timestamps)
|
||||
|
||||
async def test_bans_counted_in_correct_bucket(self, tmp_path: Path) -> None:
|
||||
"""A ban at a known time appears in the expected bucket."""
|
||||
import time as _time
|
||||
|
||||
now = int(_time.time())
|
||||
# Place a ban exactly 30 minutes ago — should land in bucket 0 of a 24h range
|
||||
# (the most recent hour bucket when 'since' is ~24 h ago).
|
||||
thirty_min_ago = now - 1800
|
||||
path = str(tmp_path / "test_bucket.sqlite3")
|
||||
await _create_f2b_db(
|
||||
path,
|
||||
[{"jail": "sshd", "ip": "1.2.3.4", "timeofban": thirty_min_ago}],
|
||||
)
|
||||
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "24h")
|
||||
|
||||
# Total ban count across all buckets must be exactly 1.
|
||||
assert sum(b.count for b in result.buckets) == 1
|
||||
|
||||
async def test_origin_filter_blocklist(self, tmp_path: Path) -> None:
|
||||
"""``origin='blocklist'`` counts only blocklist-import bans."""
|
||||
import time as _time
|
||||
|
||||
now = int(_time.time())
|
||||
one_hour_ago = now - 3600
|
||||
path = str(tmp_path / "test_trend_origin.sqlite3")
|
||||
await _create_f2b_db(
|
||||
path,
|
||||
[
|
||||
{"jail": "blocklist-import", "ip": "10.0.0.1", "timeofban": one_hour_ago},
|
||||
{"jail": "sshd", "ip": "10.0.0.2", "timeofban": one_hour_ago},
|
||||
],
|
||||
)
|
||||
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=path),
|
||||
):
|
||||
result = await ban_service.ban_trend(
|
||||
"/fake/sock", "24h", origin="blocklist"
|
||||
)
|
||||
|
||||
assert sum(b.count for b in result.buckets) == 1
|
||||
|
||||
async def test_origin_filter_selfblock(self, tmp_path: Path) -> None:
|
||||
"""``origin='selfblock'`` excludes blocklist-import bans."""
|
||||
import time as _time
|
||||
|
||||
now = int(_time.time())
|
||||
one_hour_ago = now - 3600
|
||||
path = str(tmp_path / "test_trend_selfblock.sqlite3")
|
||||
await _create_f2b_db(
|
||||
path,
|
||||
[
|
||||
{"jail": "blocklist-import", "ip": "10.0.0.1", "timeofban": one_hour_ago},
|
||||
{"jail": "sshd", "ip": "10.0.0.2", "timeofban": one_hour_ago},
|
||||
{"jail": "nginx", "ip": "10.0.0.3", "timeofban": one_hour_ago},
|
||||
],
|
||||
)
|
||||
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=path),
|
||||
):
|
||||
result = await ban_service.ban_trend(
|
||||
"/fake/sock", "24h", origin="selfblock"
|
||||
)
|
||||
|
||||
assert sum(b.count for b in result.buckets) == 2
|
||||
|
||||
async def test_each_bucket_has_iso_timestamp(self, empty_f2b_db_path: str) -> None:
|
||||
"""Every bucket timestamp is a valid ISO 8601 string."""
|
||||
from datetime import datetime
|
||||
|
||||
with patch(
|
||||
"app.services.ban_service._get_fail2ban_db_path",
|
||||
new=AsyncMock(return_value=empty_f2b_db_path),
|
||||
):
|
||||
result = await ban_service.ban_trend("/fake/sock", "24h")
|
||||
|
||||
for bucket in result.buckets:
|
||||
# datetime.fromisoformat raises ValueError on invalid input.
|
||||
parsed = datetime.fromisoformat(bucket.timestamp)
|
||||
assert parsed.tzinfo is not None # Must be timezone-aware (UTC)
|
||||
|
||||
|
||||
Reference in New Issue
Block a user