Add generate_report/tests/test_core.py
This commit is contained in:
parent
64042606c5
commit
5cb9e58f28
1 changed files with 72 additions and 0 deletions
72
generate_report/tests/test_core.py
Normal file
72
generate_report/tests/test_core.py
Normal file
|
|
@ -0,0 +1,72 @@
|
||||||
|
import pytest
|
||||||
|
from datetime import datetime
|
||||||
|
from generate_report import core
|
||||||
|
|
||||||
|
|
||||||
|
def example_latency_results():
|
||||||
|
return [
|
||||||
|
{
|
||||||
|
"batch_id": "b1",
|
||||||
|
"metric": "p50",
|
||||||
|
"value_ms": 120.5,
|
||||||
|
"timestamp": datetime(2024, 1, 1, 12, 0, 0).isoformat(),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"batch_id": "b1",
|
||||||
|
"metric": "p95",
|
||||||
|
"value_ms": 145.2,
|
||||||
|
"timestamp": datetime(2024, 1, 1, 12, 0, 10).isoformat(),
|
||||||
|
},
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def example_analysis_summary():
|
||||||
|
return {
|
||||||
|
"batch_count": 1,
|
||||||
|
"avg_latency": 132.85,
|
||||||
|
"max_latency": 145.2,
|
||||||
|
"drift_detected": False,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_report_nominal():
|
||||||
|
latency_results = example_latency_results()
|
||||||
|
analysis_summary = example_analysis_summary()
|
||||||
|
|
||||||
|
report = core.create_report(latency_results, analysis_summary)
|
||||||
|
|
||||||
|
assert isinstance(report, dict)
|
||||||
|
assert "title" in report
|
||||||
|
assert "created_at" in report
|
||||||
|
assert "summary_text" in report
|
||||||
|
assert "metrics" in report
|
||||||
|
|
||||||
|
assert report["title"] == "Batch Latency Measurement Report"
|
||||||
|
assert any(m["metric"] == "p50" for m in report["metrics"])
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_report_empty_inputs():
|
||||||
|
report = core.create_report([], {})
|
||||||
|
assert isinstance(report, dict)
|
||||||
|
assert report["metrics"] == []
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_report_type_validation():
|
||||||
|
with pytest.raises((TypeError, ValueError)):
|
||||||
|
core.create_report("not_a_list", example_analysis_summary())
|
||||||
|
with pytest.raises((TypeError, ValueError)):
|
||||||
|
core.create_report(example_latency_results(), "not_a_dict")
|
||||||
|
|
||||||
|
|
||||||
|
def test_create_report_summary_content():
|
||||||
|
latency_results = example_latency_results()
|
||||||
|
analysis_summary = example_analysis_summary()
|
||||||
|
|
||||||
|
report = core.create_report(latency_results, analysis_summary)
|
||||||
|
summary_text = report.get("summary_text", "")
|
||||||
|
|
||||||
|
assert str(analysis_summary["avg_latency"]) in summary_text
|
||||||
|
if analysis_summary["drift_detected"]:
|
||||||
|
assert "drift" in summary_text.lower()
|
||||||
|
else:
|
||||||
|
assert "drift" not in summary_text.lower()
|
||||||
Loading…
Reference in a new issue