Add 1.logging_analysis/tests/test_core.py
This commit is contained in:
parent
8a609e6b29
commit
e71fe39343
1 changed files with 94 additions and 0 deletions
94
1.logging_analysis/tests/test_core.py
Normal file
94
1.logging_analysis/tests/test_core.py
Normal file
|
|
@ -0,0 +1,94 @@
|
|||
import json
|
||||
import pytest
|
||||
from pathlib import Path
|
||||
from datetime import datetime
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from src.logging_analysis import core
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def sample_log_file(tmp_path):
|
||||
data = [
|
||||
{
|
||||
"timestamp": "2024-03-01T10:00:00",
|
||||
"expected_artifact_path": "/data/a.bin",
|
||||
"artifact_key": "A1",
|
||||
"status": "present",
|
||||
},
|
||||
{
|
||||
"timestamp": "2024-03-01T10:01:00",
|
||||
"expected_artifact_path": "/data/b.bin",
|
||||
"artifact_key": "B2",
|
||||
"status": "missing",
|
||||
},
|
||||
{
|
||||
"timestamp": "2024-03-01T10:01:30",
|
||||
"expected_artifact_path": "/data/c.bin",
|
||||
"artifact_key": "C3",
|
||||
"status": "delayed",
|
||||
},
|
||||
]
|
||||
log_path = tmp_path / "logs.json"
|
||||
log_path.write_text(json.dumps(data))
|
||||
return log_path
|
||||
|
||||
|
||||
def test_logentry_creation_valid_fields():
|
||||
ts = datetime.utcnow().isoformat()
|
||||
entry = core.LogEntry(
|
||||
timestamp=ts,
|
||||
expected_artifact_path="/tmp/x.bin",
|
||||
artifact_key="K123",
|
||||
status="present",
|
||||
)
|
||||
assert entry.timestamp == ts
|
||||
assert entry.expected_artifact_path == "/tmp/x.bin"
|
||||
assert entry.artifact_key == "K123"
|
||||
assert entry.status == "present"
|
||||
|
||||
# Edge case: invalid type should raise TypeError or ValueError
|
||||
with pytest.raises((TypeError, ValueError)):
|
||||
core.LogEntry(timestamp=None, expected_artifact_path=None, artifact_key=None, status=None)
|
||||
|
||||
|
||||
def test_analyze_log_computation(sample_log_file):
|
||||
result = core.analyze_log(str(sample_log_file))
|
||||
|
||||
assert isinstance(result, dict)
|
||||
assert 'missing_rate' in result
|
||||
assert 'timing_rate' in result
|
||||
assert 'summary_text' in result
|
||||
|
||||
# Expected counts: 1 missing / 3 total = 0.3333
|
||||
assert pytest.approx(result['missing_rate'], 0.001) == 1 / 3
|
||||
|
||||
# Timing-related detection: delayed counted as timing issue
|
||||
assert pytest.approx(result['timing_rate'], 0.001) == 1 / 3
|
||||
|
||||
summary = result['summary_text'].lower()
|
||||
assert 'missing' in summary
|
||||
assert 'timing' in summary
|
||||
|
||||
|
||||
def test_analyze_log_invalid_file(tmp_path):
|
||||
invalid_path = tmp_path / "nonexistent.json"
|
||||
with pytest.raises(FileNotFoundError):
|
||||
core.analyze_log(str(invalid_path))
|
||||
|
||||
|
||||
def test_analyze_log_empty_file(tmp_path):
|
||||
empty_path = tmp_path / "empty.json"
|
||||
empty_path.write_text(json.dumps([]))
|
||||
result = core.analyze_log(str(empty_path))
|
||||
assert isinstance(result, dict)
|
||||
assert result['missing_rate'] == 0.0
|
||||
assert result['timing_rate'] == 0.0
|
||||
|
||||
|
||||
def test_analyze_log_malformed_json(tmp_path):
|
||||
bad = tmp_path / "bad.json"
|
||||
bad.write_text('{"timestamp": 123,}') # malformed JSON
|
||||
with pytest.raises(json.JSONDecodeError):
|
||||
core.analyze_log(str(bad))
|
||||
Loading…
Reference in a new issue