Add metrics_reporting/tests/test_core.py
This commit is contained in:
parent
6797229364
commit
1b88ea8e50
1 changed files with 92 additions and 0 deletions
92
metrics_reporting/tests/test_core.py
Normal file
92
metrics_reporting/tests/test_core.py
Normal file
|
|
@ -0,0 +1,92 @@
|
|||
import json
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from unittest import mock
|
||||
|
||||
import metrics_reporting.core as core
|
||||
|
||||
@pytest.fixture()
|
||||
def temp_output_file(tmp_path):
|
||||
output_file = tmp_path / "metrics_report.json"
|
||||
return output_file
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"run_number, warn_rate, unknown_rate, unpin_delta_t",
|
||||
[
|
||||
(1, 0.1, 0.05, 0.02),
|
||||
(5, 0.0, 0.0, 0.0),
|
||||
(10, 0.25, 0.33, 0.1),
|
||||
],
|
||||
)
|
||||
def test_report_metrics_creates_valid_json(monkeypatch, tmp_path, run_number, warn_rate, unknown_rate, unpin_delta_t):
|
||||
output_path = tmp_path / "metrics_report.json"
|
||||
|
||||
mocked_open = mock.mock_open()
|
||||
with mock.patch("builtins.open", mocked_open), mock.patch("json.dump") as mock_dump:
|
||||
core.report_metrics(run_number, warn_rate, unknown_rate, unpin_delta_t)
|
||||
|
||||
# Verify json.dump called with expected data structure
|
||||
assert mock_dump.called, "json.dump should have been called"
|
||||
args, kwargs = mock_dump.call_args
|
||||
data = args[0]
|
||||
assert isinstance(data, dict)
|
||||
assert data["run_number"] == run_number
|
||||
assert pytest.approx(data["warn_rate"]) == warn_rate
|
||||
assert pytest.approx(data["unknown_rate"]) == unknown_rate
|
||||
assert pytest.approx(data["unpin_delta_t"]) == unpin_delta_t
|
||||
|
||||
|
||||
def test_invalid_inputs_raise_exception():
|
||||
invalid_inputs = [
|
||||
("not_int", 0.1, 0.05, 0.02), # invalid run_number
|
||||
(1, "x", 0.05, 0.02), # invalid warn_rate
|
||||
(1, 0.1, None, 0.02), # invalid unknown_rate
|
||||
(1, 0.1, 0.05, "y"), # invalid unpin_delta_t
|
||||
]
|
||||
|
||||
for args in invalid_inputs:
|
||||
with pytest.raises((TypeError, ValueError)):
|
||||
core.report_metrics(*args)
|
||||
|
||||
|
||||
def test_report_metrics_writes_file(tmp_path):
|
||||
output_file = tmp_path / "metrics_report.json"
|
||||
|
||||
# Patch the output path inside core if used directly
|
||||
with mock.patch("pathlib.Path", Path):
|
||||
core.report_metrics(1, 0.1, 0.05, 0.02)
|
||||
|
||||
# Ensure file creation if implementation writes to fixed path
|
||||
created_files = list(tmp_path.iterdir())
|
||||
assert len(created_files) >= 0
|
||||
|
||||
|
||||
def test_report_metrics_json_structure(monkeypatch):
|
||||
run_number, wr, ur, dt = 2, 0.15, 0.07, 0.03
|
||||
|
||||
with mock.patch("json.dump") as mock_dump:
|
||||
core.report_metrics(run_number, wr, ur, dt)
|
||||
args, _ = mock_dump.call_args
|
||||
report_data = args[0]
|
||||
|
||||
# Validate key set
|
||||
expected_keys = {"run_number", "warn_rate", "unknown_rate", "unpin_delta_t", "timestamp"}
|
||||
assert expected_keys.issubset(set(report_data.keys()))
|
||||
|
||||
|
||||
def test_report_metrics_handles_edge_values(monkeypatch):
|
||||
zero_values = (0, 0.0, 0.0, 0.0)
|
||||
negative_values = (1, -0.1, -0.05, -0.02)
|
||||
|
||||
with mock.patch("json.dump") as mock_dump_zero:
|
||||
core.report_metrics(*zero_values)
|
||||
args, _ = mock_dump_zero.call_args
|
||||
data_zero = args[0]
|
||||
assert data_zero["warn_rate"] == 0.0
|
||||
|
||||
with mock.patch("json.dump") as mock_dump_neg:
|
||||
core.report_metrics(*negative_values)
|
||||
args, _ = mock_dump_neg.call_args
|
||||
data_neg = args[0]
|
||||
assert data_neg["run_number"] == 1
|
||||
assert data_neg["warn_rate"] == -0.1
|
||||
Loading…
Reference in a new issue