Add queue_separation/src/queue_separation/cli.py
This commit is contained in:
parent
f1ecf3d5eb
commit
137bb41399
1 changed files with 82 additions and 0 deletions
82
queue_separation/src/queue_separation/cli.py
Normal file
82
queue_separation/src/queue_separation/cli.py
Normal file
|
|
@ -0,0 +1,82 @@
|
|||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List
|
||||
|
||||
from queue_separation.main import simulate_queue_separation
|
||||
|
||||
|
||||
def _load_json_file(path: Path) -> Any:
|
||||
if not path.exists() or not path.is_file():
|
||||
raise FileNotFoundError(f"Eingabedatei nicht gefunden: {path}")
|
||||
with path.open("r", encoding="utf-8") as f:
|
||||
try:
|
||||
return json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
raise ValueError(f"Ungültiges JSON in Datei {path}: {e}") from e
|
||||
|
||||
|
||||
def _validate_jobs(jobs: Any) -> List[Dict[str, Any]]:
|
||||
if not isinstance(jobs, list):
|
||||
raise TypeError("Job-Daten müssen eine Liste sein.")
|
||||
for job in jobs:
|
||||
if not isinstance(job, dict):
|
||||
raise TypeError("Jeder Job muss ein Dictionary sein.")
|
||||
return jobs
|
||||
|
||||
|
||||
def _split_jobs(jobs: List[Dict[str, Any]]) -> tuple[List[Dict[str, Any]], List[Dict[str, Any]]]:
|
||||
hotspot_queue = [j for j in jobs if j.get("type") == "hotspot"]
|
||||
main_queue = [j for j in jobs if j.get("type") != "hotspot"]
|
||||
return hotspot_queue, main_queue
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Führt eine Simulation der Queue-Entkopplung durch und erzeugt Performance-Metriken."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--input",
|
||||
required=True,
|
||||
help="Pfad zur JSON-Datei mit Job-Definitionen."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--output",
|
||||
required=False,
|
||||
default="output/performance_metrics.json",
|
||||
help="Pfad für die Resultate der Performance-Metriken."
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
input_path = Path(args.input)
|
||||
output_path = Path(args.output)
|
||||
|
||||
try:
|
||||
raw_jobs = _load_json_file(input_path)
|
||||
jobs = _validate_jobs(raw_jobs)
|
||||
hotspot_queue, main_queue = _split_jobs(jobs)
|
||||
|
||||
metrics = simulate_queue_separation(
|
||||
jobs=jobs,
|
||||
hotspot_queue=hotspot_queue,
|
||||
main_queue=main_queue,
|
||||
)
|
||||
|
||||
if not isinstance(metrics, dict):
|
||||
raise ValueError("simulate_queue_separation muss ein Dictionary zurückgeben.")
|
||||
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with output_path.open("w", encoding="utf-8") as f:
|
||||
json.dump(metrics, f, indent=2)
|
||||
|
||||
print(f"Simulation abgeschlossen. Ergebnisse gespeichert unter: {output_path}")
|
||||
|
||||
except Exception as e:
|
||||
print(f"Fehler: {e}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Loading…
Reference in a new issue