From 1c2ed54b685c688b2289466a8e160bb468b5cc88 Mon Sep 17 00:00:00 2001 From: Mika Date: Mon, 16 Mar 2026 13:59:13 +0000 Subject: [PATCH] Add simulation_tool/tests/test_core.py --- simulation_tool/tests/test_core.py | 51 ++++++++++++++++++++++++++++++ 1 file changed, 51 insertions(+) create mode 100644 simulation_tool/tests/test_core.py diff --git a/simulation_tool/tests/test_core.py b/simulation_tool/tests/test_core.py new file mode 100644 index 0000000..ad87161 --- /dev/null +++ b/simulation_tool/tests/test_core.py @@ -0,0 +1,51 @@ +import pytest +import math +import numpy as np +from simulation_tool.core import simulate_scheduling +from simulation_tool.models import SimulationResults + +@pytest.fixture +def basic_schedule_params(): + return {"threads": 4, "queue_type": "fifo", "affinity": False} + +@pytest.fixture +def complex_schedule_params(): + return {"threads": 8, "queue_type": "priority", "affinity": True} + +def test_simulate_scheduling_returns_expected_type(basic_schedule_params): + result = simulate_scheduling(basic_schedule_params) + assert isinstance(result, SimulationResults) + assert isinstance(result.time_distributions, list) + assert isinstance(result.outlier_occurrences, int) + +def test_simulate_scheduling_time_distributions_are_valid(basic_schedule_params): + result = simulate_scheduling(basic_schedule_params) + assert all(isinstance(v, float) for v in result.time_distributions) + assert all(v >= 0.0 for v in result.time_distributions) + +def test_simulate_scheduling_outliers_count_coherence(basic_schedule_params): + result = simulate_scheduling(basic_schedule_params) + mean_val = np.mean(result.time_distributions) + std_val = np.std(result.time_distributions) + threshold = mean_val + 3 * std_val + counted_outliers = sum(1 for v in result.time_distributions if v > threshold) + assert abs(counted_outliers - result.outlier_occurrences) <= 2 # tolerate small randomness + +def test_simulate_scheduling_with_complex_params(complex_schedule_params): + result = simulate_scheduling(complex_schedule_params) + assert isinstance(result.time_distributions, list) + assert len(result.time_distributions) > 0 + assert result.outlier_occurrences >= 0 + +def test_to_json_roundtrip(basic_schedule_params): + result = simulate_scheduling(basic_schedule_params) + json_obj = result.to_json() + assert set(json_obj.keys()) == {"time_distributions", "outlier_occurrences"} + assert isinstance(json_obj["time_distributions"], list) + assert isinstance(json_obj["outlier_occurrences"], int) + +def test_invalid_input_raises_error(): + with pytest.raises((TypeError, ValueError)): + simulate_scheduling(None) + with pytest.raises((TypeError, ValueError)): + simulate_scheduling({"threads": "NaN"})