Add input_validation_script/tests/test_core.py
This commit is contained in:
parent
04f23b90a4
commit
b4ae2d529d
1 changed files with 81 additions and 0 deletions
81
input_validation_script/tests/test_core.py
Normal file
81
input_validation_script/tests/test_core.py
Normal file
|
|
@ -0,0 +1,81 @@
|
|||
import json
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from input_validation_script import core
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_schema():
|
||||
return {
|
||||
"type": "object",
|
||||
"properties": {
|
||||
"policy_hash": {"type": "string"},
|
||||
"run_id": {"type": "string"},
|
||||
"pinned_flag": {"type": "boolean"},
|
||||
"mischfenster_p95": {"type": "number"},
|
||||
"metrics": {"type": "object"},
|
||||
"unknown_label": {"type": "string"}
|
||||
},
|
||||
"required": ["policy_hash", "run_id", "pinned_flag", "mischfenster_p95", "metrics"]
|
||||
}
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def valid_drift_report():
|
||||
return {
|
||||
"policy_hash": "abc123",
|
||||
"run_id": "run_001",
|
||||
"pinned_flag": True,
|
||||
"mischfenster_p95": 95.5,
|
||||
"metrics": {"accuracy": 0.98},
|
||||
"unknown_label": "none"
|
||||
}
|
||||
|
||||
|
||||
def test_validate_json_with_valid_input(valid_drift_report, valid_schema, tmp_path):
|
||||
file_path = tmp_path / "valid.json"
|
||||
file_path.write_text(json.dumps(valid_drift_report))
|
||||
result = core.validate_json(str(file_path), valid_schema)
|
||||
assert isinstance(result, dict)
|
||||
assert result.get("valid") is True
|
||||
assert result.get("errors") == []
|
||||
|
||||
|
||||
def test_validate_json_with_missing_field(valid_drift_report, valid_schema, tmp_path):
|
||||
invalid = dict(valid_drift_report)
|
||||
invalid.pop("policy_hash")
|
||||
p = tmp_path / "missing.json"
|
||||
p.write_text(json.dumps(invalid))
|
||||
result = core.validate_json(str(p), valid_schema)
|
||||
assert result["valid"] is False
|
||||
assert any("required property" in e for e in result["errors"])
|
||||
|
||||
|
||||
def test_validate_json_with_invalid_type(valid_drift_report, valid_schema, tmp_path):
|
||||
invalid = dict(valid_drift_report)
|
||||
invalid["pinned_flag"] = "not_bool"
|
||||
p = tmp_path / "invalid_type.json"
|
||||
p.write_text(json.dumps(invalid))
|
||||
result = core.validate_json(str(p), valid_schema)
|
||||
assert result["valid"] is False
|
||||
assert any("type" in e.lower() for e in result["errors"])
|
||||
|
||||
|
||||
def test_validate_json_with_parse_error(valid_schema, tmp_path):
|
||||
bad_json_path = tmp_path / "bad.json"
|
||||
bad_json_path.write_text('{bad json}')
|
||||
result = core.validate_json(str(bad_json_path), valid_schema)
|
||||
assert result["valid"] is False
|
||||
assert result.get("unknown_reason") == "parse_error"
|
||||
|
||||
|
||||
def test_validate_json_with_missing_file(valid_schema):
|
||||
result = core.validate_json("/non/existent/path.json", valid_schema)
|
||||
assert result["valid"] is False
|
||||
assert result.get("unknown_reason") == "artefact_missing"
|
||||
|
||||
|
||||
def test_validate_json_with_dict_input(valid_drift_report, valid_schema):
|
||||
result = core.validate_json(valid_drift_report, valid_schema)
|
||||
assert result["valid"] is True
|
||||
assert result["errors"] == []
|
||||
Loading…
Reference in a new issue