Add latency_calculator/tests/test_core.py
This commit is contained in:
parent
ed492a5e27
commit
7082c2e822
1 changed files with 85 additions and 0 deletions
85
latency_calculator/tests/test_core.py
Normal file
85
latency_calculator/tests/test_core.py
Normal file
|
|
@ -0,0 +1,85 @@
|
|||
import pytest
|
||||
from datetime import datetime, timedelta
|
||||
from latency_calculator import core
|
||||
|
||||
|
||||
def create_timestamp_entry(delta_publish_to_gate: float, delta_gate_to_visible: float):
|
||||
base_time = datetime(2024, 1, 1, 12, 0, 0)
|
||||
t_publish = base_time
|
||||
t_gate_read = base_time + timedelta(seconds=delta_publish_to_gate)
|
||||
t_index_visible = t_gate_read + timedelta(seconds=delta_gate_to_visible)
|
||||
return {
|
||||
't_publish': t_publish.isoformat(),
|
||||
't_gate_read': t_gate_read.isoformat(),
|
||||
't_index_visible': t_index_visible.isoformat(),
|
||||
}
|
||||
|
||||
|
||||
def test_calculate_latency_typical_case():
|
||||
entries = [
|
||||
create_timestamp_entry(2, 3),
|
||||
create_timestamp_entry(1, 1),
|
||||
create_timestamp_entry(4, 2),
|
||||
create_timestamp_entry(3, 4),
|
||||
create_timestamp_entry(0.5, 0.5),
|
||||
]
|
||||
|
||||
result = core.calculate_latency(entries)
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert set(result.keys()) == {'p50', 'p95', 'max'}
|
||||
|
||||
# Latency values should be positive numbers
|
||||
assert all(result[key] >= 0 for key in result)
|
||||
|
||||
|
||||
def test_calculate_latency_edge_case_single_entry():
|
||||
entry = [create_timestamp_entry(1, 1)]
|
||||
result = core.calculate_latency(entry)
|
||||
expected = entry[0]
|
||||
assert 'p50' in result and 'p95' in result and 'max' in result
|
||||
assert result['p50'] == pytest.approx(result['max'], rel=1e-9)
|
||||
|
||||
|
||||
def test_calculate_latency_invalid_input():
|
||||
# Missing required keys
|
||||
bad_entry = [{ 't_publish': '2024-01-01T00:00:00' }]
|
||||
with pytest.raises((KeyError, ValueError, TypeError)):
|
||||
core.calculate_latency(bad_entry)
|
||||
|
||||
|
||||
def test_calculate_latency_mixed_timestamp_formats():
|
||||
base_time = datetime(2024, 1, 1, 12, 0, 0)
|
||||
entry_1 = {
|
||||
't_publish': base_time.timestamp(),
|
||||
't_gate_read': (base_time + timedelta(seconds=2)).timestamp(),
|
||||
't_index_visible': (base_time + timedelta(seconds=5)).timestamp(),
|
||||
}
|
||||
entry_2 = {
|
||||
't_publish': (base_time + timedelta(seconds=10)).isoformat(),
|
||||
't_gate_read': (base_time + timedelta(seconds=12)).isoformat(),
|
||||
't_index_visible': (base_time + timedelta(seconds=16)).isoformat(),
|
||||
}
|
||||
|
||||
result = core.calculate_latency([entry_1, entry_2])
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert result['p50'] > 0
|
||||
assert result['max'] >= result['p95'] >= result['p50']
|
||||
|
||||
|
||||
def test_calculate_latency_outlier_influence():
|
||||
normal_entries = [create_timestamp_entry(1, 1) for _ in range(9)]
|
||||
outlier = create_timestamp_entry(10, 20)
|
||||
entries = normal_entries + [outlier]
|
||||
result = core.calculate_latency(entries)
|
||||
|
||||
assert result['max'] == pytest.approx(30.0, rel=1e-9)
|
||||
# median should remain near normal values
|
||||
assert result['p50'] < 5.0
|
||||
|
||||
|
||||
def test_calculate_latency_assertions_strict_types():
|
||||
# Passing list instead of dict should fail
|
||||
with pytest.raises((TypeError, ValueError)):
|
||||
core.calculate_latency([['wrong_type']])
|
||||
Loading…
Reference in a new issue