Add outlier_analysis/tests/test_core.py
This commit is contained in:
parent
c55c7dfa8f
commit
c959844608
1 changed files with 66 additions and 0 deletions
66
outlier_analysis/tests/test_core.py
Normal file
66
outlier_analysis/tests/test_core.py
Normal file
|
|
@ -0,0 +1,66 @@
|
|||
import pytest
|
||||
import math
|
||||
from typing import List, Dict
|
||||
from outlier_analysis import core
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_log_data() -> List[Dict]:
|
||||
return [
|
||||
{"run_id": "18", "latency_ms": 45.0, "stratum": "near-expiry-unpinned", "job_parallelism": 4, "retry_total_overhead_ms": 0.0},
|
||||
{"run_id": "18", "latency_ms": 120.0, "stratum": "near-expiry-unpinned", "job_parallelism": 4, "retry_total_overhead_ms": 10.0},
|
||||
{"run_id": "18", "latency_ms": 95.0, "stratum": "pinned", "job_parallelism": 4, "retry_total_overhead_ms": 5.0},
|
||||
{"run_id": "19", "latency_ms": 80.0, "stratum": "pinned", "job_parallelism": 8, "retry_total_overhead_ms": 0.0},
|
||||
{"run_id": "19", "latency_ms": 101.0, "stratum": "unpinned", "job_parallelism": 8, "retry_total_overhead_ms": 3.5},
|
||||
{"run_id": "19", "latency_ms": 200.0, "stratum": "unpinned", "job_parallelism": 8, "retry_total_overhead_ms": 15.0},
|
||||
{"run_id": "20", "latency_ms": 49.0, "stratum": "pinned", "job_parallelism": 2, "retry_total_overhead_ms": 0.0},
|
||||
{"run_id": "20", "latency_ms": 52.0, "stratum": "pinned", "job_parallelism": 2, "retry_total_overhead_ms": 0.0}
|
||||
]
|
||||
|
||||
|
||||
def test_analyze_outliers_returns_valid_structure(sample_log_data):
|
||||
result = core.analyze_outliers(sample_log_data)
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert all(isinstance(v, dict) for v in result.values())
|
||||
|
||||
for run_id, summary in result.items():
|
||||
assert set(summary.keys()) == {"run_id", "outlier_count", "latency_distribution"}
|
||||
assert isinstance(summary["run_id"], str)
|
||||
assert isinstance(summary["outlier_count"], int)
|
||||
dist = summary["latency_distribution"]
|
||||
assert isinstance(dist, dict)
|
||||
for key in ("p50", "p95", "p99", "max"):
|
||||
assert key in dist
|
||||
assert isinstance(dist[key], (int, float))
|
||||
|
||||
|
||||
def test_outlier_detection_counts(sample_log_data):
|
||||
result = core.analyze_outliers(sample_log_data)
|
||||
# Run 18 has 2 entries >90ms, expect 2 outliers
|
||||
assert result["18"]["outlier_count"] == 2
|
||||
# Run 19 has 2 entries >90ms as well
|
||||
assert result["19"]["outlier_count"] == 2
|
||||
# Run 20 has none >90ms
|
||||
assert result["20"]["outlier_count"] == 0
|
||||
|
||||
|
||||
def test_latency_distribution_values(sample_log_data):
|
||||
result = core.analyze_outliers(sample_log_data)
|
||||
for run_id, summary in result.items():
|
||||
dist = summary["latency_distribution"]
|
||||
for k, v in dist.items():
|
||||
assert v >= 0.0, f"Latency metric {k} should be non-negative"
|
||||
# Monotonic: p50 <= p95 <= p99 <= max
|
||||
assert dist["p50"] <= dist["p95"] <= dist["p99"] <= dist["max"]
|
||||
|
||||
|
||||
def test_empty_input_returns_empty():
|
||||
result = core.analyze_outliers([])
|
||||
assert isinstance(result, dict)
|
||||
assert result == {}
|
||||
|
||||
|
||||
def test_invalid_input_type_raises():
|
||||
with pytest.raises((AssertionError, TypeError)):
|
||||
core.analyze_outliers(None) # type: ignore
|
||||
Loading…
Reference in a new issue