From dc3a73ea3a85091e31760b1da816f27c41c0332e Mon Sep 17 00:00:00 2001 From: Mika Date: Fri, 3 Apr 2026 10:57:04 +0000 Subject: [PATCH] Add artifact.preflight_checker/tests/test_core.py --- artifact.preflight_checker/tests/test_core.py | 69 +++++++++++++++++++ 1 file changed, 69 insertions(+) create mode 100644 artifact.preflight_checker/tests/test_core.py diff --git a/artifact.preflight_checker/tests/test_core.py b/artifact.preflight_checker/tests/test_core.py new file mode 100644 index 0000000..af691df --- /dev/null +++ b/artifact.preflight_checker/tests/test_core.py @@ -0,0 +1,69 @@ +import pytest +import types + +import sys +from pathlib import Path + +# Ensure module import +import importlib + +# Try importing the package core +spec = importlib.util.find_spec('artifact_preflight_checker.core') +if spec is None: + pytest.skip('artifact_preflight_checker.core module not available', allow_module_level=True) +core = importlib.import_module('artifact_preflight_checker.core') + +run_preflight_check = getattr(core, 'run_preflight_check', None) + +@pytest.fixture +def sample_runs_valid(): + return [ + {"run_id": 1, "near_expiry_unpinned": 0.1}, + {"run_id": 2, "near_expiry_unpinned": 0.15}, + {"run_id": 3, "near_expiry_unpinned": 0.2} + ] + + +@pytest.fixture +def sample_runs_invalid(): + return [ + {"run_id": 1, "near_expiry_unpinned": 0.5}, + {"run_id": 2, "near_expiry_unpinned": 0.7}, + ] + + +def test_run_preflight_check_nominal(sample_runs_valid): + result = run_preflight_check(sample_runs_valid) + assert isinstance(result, dict) + assert set(result.keys()) == {"freeze_ok", "freeze_target", "freeze_tol"} + assert isinstance(result["freeze_target"], float) + assert isinstance(result["freeze_tol"], float) + assert isinstance(result["freeze_ok"], bool) + + +def test_run_preflight_check_within_tolerance(sample_runs_valid): + result = run_preflight_check(sample_runs_valid) + assert result["freeze_ok"] is True, "Expected freeze_ok True for values within tolerance" + + +def test_run_preflight_check_outside_tolerance(sample_runs_invalid): + result = run_preflight_check(sample_runs_invalid) + assert result["freeze_ok"] is False, "Expected freeze_ok False for values outside tolerance" + + +def test_run_preflight_check_empty_list(): + with pytest.raises((ValueError, AssertionError, TypeError)): + run_preflight_check([]) + + +def test_run_preflight_check_missing_field(): + bad_data = [{"id": 1, "temp": 0.2}] + with pytest.raises((KeyError, ValueError, AssertionError)): + run_preflight_check(bad_data) + + +def test_run_preflight_check_type_validation(sample_runs_valid): + # Alter type of field + runs = [{**r, "near_expiry_unpinned": str(r["near_expiry_unpinned"])} for r in sample_runs_valid] + with pytest.raises((TypeError, AssertionError, ValueError)): + run_preflight_check(runs)