Add artifact_1/src/artifact_1/core.py
This commit is contained in:
commit
c91700a123
1 changed files with 83 additions and 0 deletions
83
artifact_1/src/artifact_1/core.py
Normal file
83
artifact_1/src/artifact_1/core.py
Normal file
|
|
@ -0,0 +1,83 @@
|
||||||
|
import json
|
||||||
|
import time
|
||||||
|
import datetime
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
from dataclasses import dataclass, asdict
|
||||||
|
from typing import Any
|
||||||
|
|
||||||
|
|
||||||
|
class ValidationError(Exception):
|
||||||
|
"""Custom exception for invalid log data."""
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
@dataclass
|
||||||
|
class LogEntry:
|
||||||
|
"""Represents a single timestamp log entry."""
|
||||||
|
epoch_ms: int
|
||||||
|
monotonic_ns: int
|
||||||
|
tz_offset_minutes: int
|
||||||
|
run_id: str
|
||||||
|
step_id: str
|
||||||
|
|
||||||
|
def validate(self) -> None:
|
||||||
|
if not isinstance(self.epoch_ms, int) or self.epoch_ms < 0:
|
||||||
|
raise ValidationError(f"Invalid epoch_ms: {self.epoch_ms}")
|
||||||
|
if not isinstance(self.monotonic_ns, int) or self.monotonic_ns < 0:
|
||||||
|
raise ValidationError(f"Invalid monotonic_ns: {self.monotonic_ns}")
|
||||||
|
if not isinstance(self.tz_offset_minutes, int):
|
||||||
|
raise ValidationError(f"Invalid tz_offset_minutes: {self.tz_offset_minutes}")
|
||||||
|
if not isinstance(self.run_id, str) or not self.run_id:
|
||||||
|
raise ValidationError(f"Invalid run_id: {self.run_id}")
|
||||||
|
if not isinstance(self.step_id, str) or not self.step_id:
|
||||||
|
raise ValidationError(f"Invalid step_id: {self.step_id}")
|
||||||
|
|
||||||
|
|
||||||
|
OUTPUT_PATH = Path("output/timestamps.json")
|
||||||
|
|
||||||
|
|
||||||
|
def _ensure_output_dir(path: Path) -> None:
|
||||||
|
path.parent.mkdir(parents=True, exist_ok=True)
|
||||||
|
|
||||||
|
|
||||||
|
def _save_log_entry(entry: LogEntry, path: Path) -> None:
|
||||||
|
_ensure_output_dir(path)
|
||||||
|
|
||||||
|
log_data = []
|
||||||
|
if path.exists():
|
||||||
|
try:
|
||||||
|
with open(path, "r", encoding="utf-8") as f:
|
||||||
|
contents = f.read().strip()
|
||||||
|
if contents:
|
||||||
|
log_data = json.loads(contents)
|
||||||
|
except (json.JSONDecodeError, OSError):
|
||||||
|
# Reset log if file is corrupted
|
||||||
|
log_data = []
|
||||||
|
|
||||||
|
log_data.append(asdict(entry))
|
||||||
|
|
||||||
|
with open(path, "w", encoding="utf-8") as f:
|
||||||
|
json.dump(log_data, f, indent=2)
|
||||||
|
|
||||||
|
|
||||||
|
# Public API function
|
||||||
|
def log_timestamps(epoch_ms: int, monotonic_ns: int, tz_offset_minutes: int, run_id: str, step_id: str) -> None:
|
||||||
|
"""Logs timestamps including epoch, monotonic, timezone offset as JSON."""
|
||||||
|
entry = LogEntry(
|
||||||
|
epoch_ms=epoch_ms,
|
||||||
|
monotonic_ns=monotonic_ns,
|
||||||
|
tz_offset_minutes=tz_offset_minutes,
|
||||||
|
run_id=run_id,
|
||||||
|
step_id=step_id,
|
||||||
|
)
|
||||||
|
entry.validate()
|
||||||
|
_save_log_entry(entry, OUTPUT_PATH)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
# For manual debugging: log current times
|
||||||
|
epoch_ms = int(time.time() * 1000)
|
||||||
|
monotonic_ns = time.monotonic_ns()
|
||||||
|
offset = int((datetime.datetime.now() - datetime.datetime.utcnow()).total_seconds() / 60)
|
||||||
|
log_timestamps(epoch_ms, monotonic_ns, offset, "manual_run", "step0")
|
||||||
Loading…
Reference in a new issue