Make geo lookups non-blocking with bulk DB writes and background tasks
This commit is contained in:
@@ -767,3 +767,147 @@ class TestErrorLogging:
|
||||
assert event["exc_type"] == "_EmptyMessageError"
|
||||
assert "_EmptyMessageError" in event["error"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# lookup_cached_only (Task 3)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestLookupCachedOnly:
|
||||
"""lookup_cached_only() returns cache hits without making API calls."""
|
||||
|
||||
def test_returns_cached_ips(self) -> None:
|
||||
"""IPs already in the cache are returned in the geo_map."""
|
||||
geo_service._cache["1.1.1.1"] = GeoInfo( # type: ignore[attr-defined]
|
||||
country_code="AU", country_name="Australia", asn="AS13335", org="Cloudflare"
|
||||
)
|
||||
geo_map, uncached = geo_service.lookup_cached_only(["1.1.1.1"])
|
||||
|
||||
assert "1.1.1.1" in geo_map
|
||||
assert geo_map["1.1.1.1"].country_code == "AU"
|
||||
assert uncached == []
|
||||
|
||||
def test_returns_uncached_ips(self) -> None:
|
||||
"""IPs not in the cache appear in the uncached list."""
|
||||
geo_map, uncached = geo_service.lookup_cached_only(["9.9.9.9"])
|
||||
|
||||
assert "9.9.9.9" not in geo_map
|
||||
assert "9.9.9.9" in uncached
|
||||
|
||||
def test_neg_cached_ips_excluded_from_uncached(self) -> None:
|
||||
"""IPs in the negative cache within TTL are not re-queued as uncached."""
|
||||
import time
|
||||
|
||||
geo_service._neg_cache["10.0.0.1"] = time.monotonic() # type: ignore[attr-defined]
|
||||
|
||||
geo_map, uncached = geo_service.lookup_cached_only(["10.0.0.1"])
|
||||
|
||||
assert "10.0.0.1" not in geo_map
|
||||
assert "10.0.0.1" not in uncached
|
||||
|
||||
def test_expired_neg_cache_requeued(self) -> None:
|
||||
"""IPs whose neg-cache entry has expired are listed as uncached."""
|
||||
geo_service._neg_cache["10.0.0.2"] = 0.0 # epoch 0 → expired # type: ignore[attr-defined]
|
||||
|
||||
_geo_map, uncached = geo_service.lookup_cached_only(["10.0.0.2"])
|
||||
|
||||
assert "10.0.0.2" in uncached
|
||||
|
||||
def test_mixed_ips(self) -> None:
|
||||
"""A mix of cached, neg-cached, and unknown IPs is split correctly."""
|
||||
geo_service._cache["1.2.3.4"] = GeoInfo( # type: ignore[attr-defined]
|
||||
country_code="DE", country_name="Germany", asn=None, org=None
|
||||
)
|
||||
import time
|
||||
|
||||
geo_service._neg_cache["5.5.5.5"] = time.monotonic() # type: ignore[attr-defined]
|
||||
|
||||
geo_map, uncached = geo_service.lookup_cached_only(["1.2.3.4", "5.5.5.5", "9.9.9.9"])
|
||||
|
||||
assert list(geo_map.keys()) == ["1.2.3.4"]
|
||||
assert uncached == ["9.9.9.9"]
|
||||
|
||||
def test_deduplication(self) -> None:
|
||||
"""Duplicate IPs in the input appear at most once in the output."""
|
||||
geo_service._cache["1.2.3.4"] = GeoInfo( # type: ignore[attr-defined]
|
||||
country_code="US", country_name="United States", asn=None, org=None
|
||||
)
|
||||
|
||||
geo_map, uncached = geo_service.lookup_cached_only(
|
||||
["9.9.9.9", "9.9.9.9", "1.2.3.4", "1.2.3.4"]
|
||||
)
|
||||
|
||||
assert len([ip for ip in geo_map if ip == "1.2.3.4"]) == 1
|
||||
assert uncached.count("9.9.9.9") == 1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Bulk DB writes via executemany (Task 3)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestLookupBatchBulkWrites:
|
||||
"""lookup_batch() uses executemany for bulk DB writes, not per-IP execute."""
|
||||
|
||||
async def test_executemany_called_for_successful_ips(self) -> None:
|
||||
"""When multiple IPs resolve successfully, a single executemany write occurs."""
|
||||
ips = ["1.1.1.1", "2.2.2.2", "3.3.3.3"]
|
||||
batch_response = [
|
||||
{
|
||||
"query": ip,
|
||||
"status": "success",
|
||||
"countryCode": "DE",
|
||||
"country": "Germany",
|
||||
"as": "AS3320",
|
||||
"org": "Telekom",
|
||||
}
|
||||
for ip in ips
|
||||
]
|
||||
session = _make_batch_session(batch_response)
|
||||
db = _make_async_db()
|
||||
|
||||
await geo_service.lookup_batch(ips, session, db=db) # type: ignore[arg-type]
|
||||
|
||||
# One executemany for the positive rows.
|
||||
assert db.executemany.await_count >= 1
|
||||
# High-level: execute() must NOT be called for the batch writes.
|
||||
db.execute.assert_not_awaited()
|
||||
|
||||
async def test_executemany_called_for_failed_ips(self) -> None:
|
||||
"""When IPs fail resolution, a single executemany write covers neg entries."""
|
||||
ips = ["10.0.0.1", "10.0.0.2"]
|
||||
batch_response = [
|
||||
{"query": ip, "status": "fail", "message": "private range"}
|
||||
for ip in ips
|
||||
]
|
||||
session = _make_batch_session(batch_response)
|
||||
db = _make_async_db()
|
||||
|
||||
await geo_service.lookup_batch(ips, session, db=db) # type: ignore[arg-type]
|
||||
|
||||
assert db.executemany.await_count >= 1
|
||||
db.execute.assert_not_awaited()
|
||||
|
||||
async def test_mixed_results_two_executemany_calls(self) -> None:
|
||||
"""A mix of successful and failed IPs produces two executemany calls."""
|
||||
ips = ["1.1.1.1", "10.0.0.1"]
|
||||
batch_response = [
|
||||
{
|
||||
"query": "1.1.1.1",
|
||||
"status": "success",
|
||||
"countryCode": "AU",
|
||||
"country": "Australia",
|
||||
"as": "AS13335",
|
||||
"org": "Cloudflare",
|
||||
},
|
||||
{"query": "10.0.0.1", "status": "fail", "message": "private range"},
|
||||
]
|
||||
session = _make_batch_session(batch_response)
|
||||
db = _make_async_db()
|
||||
|
||||
await geo_service.lookup_batch(ips, session, db=db) # type: ignore[arg-type]
|
||||
|
||||
# One executemany for positives, one for negatives.
|
||||
assert db.executemany.await_count == 2
|
||||
db.execute.assert_not_awaited()
|
||||
|
||||
|
||||
Reference in New Issue
Block a user