Add simulation_tool/tests/test_core.py
This commit is contained in:
parent
591e55928a
commit
1c2ed54b68
1 changed files with 51 additions and 0 deletions
51
simulation_tool/tests/test_core.py
Normal file
51
simulation_tool/tests/test_core.py
Normal file
|
|
@ -0,0 +1,51 @@
|
||||||
|
import pytest
|
||||||
|
import math
|
||||||
|
import numpy as np
|
||||||
|
from simulation_tool.core import simulate_scheduling
|
||||||
|
from simulation_tool.models import SimulationResults
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def basic_schedule_params():
|
||||||
|
return {"threads": 4, "queue_type": "fifo", "affinity": False}
|
||||||
|
|
||||||
|
@pytest.fixture
|
||||||
|
def complex_schedule_params():
|
||||||
|
return {"threads": 8, "queue_type": "priority", "affinity": True}
|
||||||
|
|
||||||
|
def test_simulate_scheduling_returns_expected_type(basic_schedule_params):
|
||||||
|
result = simulate_scheduling(basic_schedule_params)
|
||||||
|
assert isinstance(result, SimulationResults)
|
||||||
|
assert isinstance(result.time_distributions, list)
|
||||||
|
assert isinstance(result.outlier_occurrences, int)
|
||||||
|
|
||||||
|
def test_simulate_scheduling_time_distributions_are_valid(basic_schedule_params):
|
||||||
|
result = simulate_scheduling(basic_schedule_params)
|
||||||
|
assert all(isinstance(v, float) for v in result.time_distributions)
|
||||||
|
assert all(v >= 0.0 for v in result.time_distributions)
|
||||||
|
|
||||||
|
def test_simulate_scheduling_outliers_count_coherence(basic_schedule_params):
|
||||||
|
result = simulate_scheduling(basic_schedule_params)
|
||||||
|
mean_val = np.mean(result.time_distributions)
|
||||||
|
std_val = np.std(result.time_distributions)
|
||||||
|
threshold = mean_val + 3 * std_val
|
||||||
|
counted_outliers = sum(1 for v in result.time_distributions if v > threshold)
|
||||||
|
assert abs(counted_outliers - result.outlier_occurrences) <= 2 # tolerate small randomness
|
||||||
|
|
||||||
|
def test_simulate_scheduling_with_complex_params(complex_schedule_params):
|
||||||
|
result = simulate_scheduling(complex_schedule_params)
|
||||||
|
assert isinstance(result.time_distributions, list)
|
||||||
|
assert len(result.time_distributions) > 0
|
||||||
|
assert result.outlier_occurrences >= 0
|
||||||
|
|
||||||
|
def test_to_json_roundtrip(basic_schedule_params):
|
||||||
|
result = simulate_scheduling(basic_schedule_params)
|
||||||
|
json_obj = result.to_json()
|
||||||
|
assert set(json_obj.keys()) == {"time_distributions", "outlier_occurrences"}
|
||||||
|
assert isinstance(json_obj["time_distributions"], list)
|
||||||
|
assert isinstance(json_obj["outlier_occurrences"], int)
|
||||||
|
|
||||||
|
def test_invalid_input_raises_error():
|
||||||
|
with pytest.raises((TypeError, ValueError)):
|
||||||
|
simulate_scheduling(None)
|
||||||
|
with pytest.raises((TypeError, ValueError)):
|
||||||
|
simulate_scheduling({"threads": "NaN"})
|
||||||
Loading…
Reference in a new issue