Add artifact.preflight_checker/tests/test_core.py
This commit is contained in:
parent
22fdb193fc
commit
dc3a73ea3a
1 changed files with 69 additions and 0 deletions
69
artifact.preflight_checker/tests/test_core.py
Normal file
69
artifact.preflight_checker/tests/test_core.py
Normal file
|
|
@ -0,0 +1,69 @@
|
|||
import pytest
|
||||
import types
|
||||
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
# Ensure module import
|
||||
import importlib
|
||||
|
||||
# Try importing the package core
|
||||
spec = importlib.util.find_spec('artifact_preflight_checker.core')
|
||||
if spec is None:
|
||||
pytest.skip('artifact_preflight_checker.core module not available', allow_module_level=True)
|
||||
core = importlib.import_module('artifact_preflight_checker.core')
|
||||
|
||||
run_preflight_check = getattr(core, 'run_preflight_check', None)
|
||||
|
||||
@pytest.fixture
|
||||
def sample_runs_valid():
|
||||
return [
|
||||
{"run_id": 1, "near_expiry_unpinned": 0.1},
|
||||
{"run_id": 2, "near_expiry_unpinned": 0.15},
|
||||
{"run_id": 3, "near_expiry_unpinned": 0.2}
|
||||
]
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_runs_invalid():
|
||||
return [
|
||||
{"run_id": 1, "near_expiry_unpinned": 0.5},
|
||||
{"run_id": 2, "near_expiry_unpinned": 0.7},
|
||||
]
|
||||
|
||||
|
||||
def test_run_preflight_check_nominal(sample_runs_valid):
|
||||
result = run_preflight_check(sample_runs_valid)
|
||||
assert isinstance(result, dict)
|
||||
assert set(result.keys()) == {"freeze_ok", "freeze_target", "freeze_tol"}
|
||||
assert isinstance(result["freeze_target"], float)
|
||||
assert isinstance(result["freeze_tol"], float)
|
||||
assert isinstance(result["freeze_ok"], bool)
|
||||
|
||||
|
||||
def test_run_preflight_check_within_tolerance(sample_runs_valid):
|
||||
result = run_preflight_check(sample_runs_valid)
|
||||
assert result["freeze_ok"] is True, "Expected freeze_ok True for values within tolerance"
|
||||
|
||||
|
||||
def test_run_preflight_check_outside_tolerance(sample_runs_invalid):
|
||||
result = run_preflight_check(sample_runs_invalid)
|
||||
assert result["freeze_ok"] is False, "Expected freeze_ok False for values outside tolerance"
|
||||
|
||||
|
||||
def test_run_preflight_check_empty_list():
|
||||
with pytest.raises((ValueError, AssertionError, TypeError)):
|
||||
run_preflight_check([])
|
||||
|
||||
|
||||
def test_run_preflight_check_missing_field():
|
||||
bad_data = [{"id": 1, "temp": 0.2}]
|
||||
with pytest.raises((KeyError, ValueError, AssertionError)):
|
||||
run_preflight_check(bad_data)
|
||||
|
||||
|
||||
def test_run_preflight_check_type_validation(sample_runs_valid):
|
||||
# Alter type of field
|
||||
runs = [{**r, "near_expiry_unpinned": str(r["near_expiry_unpinned"])} for r in sample_runs_valid]
|
||||
with pytest.raises((TypeError, AssertionError, ValueError)):
|
||||
run_preflight_check(runs)
|
||||
Loading…
Reference in a new issue