Add scheduling_analysis/src/scheduling_analysis/cli.py
This commit is contained in:
parent
160a765744
commit
759b898f72
1 changed files with 84 additions and 0 deletions
84
scheduling_analysis/src/scheduling_analysis/cli.py
Normal file
84
scheduling_analysis/src/scheduling_analysis/cli.py
Normal file
|
|
@ -0,0 +1,84 @@
|
|||
import argparse
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
import pandas as pd
|
||||
|
||||
from scheduling_analysis.core import analyze_scheduling_effects
|
||||
|
||||
|
||||
logging.basicConfig(
|
||||
level=logging.INFO,
|
||||
format='[%(asctime)s] %(levelname)s: %(message)s'
|
||||
)
|
||||
|
||||
|
||||
def _validate_input_file(file_path: Path) -> None:
|
||||
if not file_path.exists():
|
||||
raise FileNotFoundError(f'Eingabedatei nicht gefunden: {file_path}')
|
||||
if not file_path.is_file():
|
||||
raise ValueError(f'Pfad ist keine Datei: {file_path}')
|
||||
if file_path.suffix.lower() != '.csv':
|
||||
raise ValueError(f'Ungültiges Dateiformat (erwartet .csv): {file_path}')
|
||||
|
||||
|
||||
def _read_experiment_data(csv_path: Path) -> list[dict[str, Any]]:
|
||||
try:
|
||||
df = pd.read_csv(csv_path)
|
||||
except Exception as e:
|
||||
logging.error('Fehler beim Einlesen der CSV-Datei: %s', e)
|
||||
raise
|
||||
|
||||
required_cols = {'run_id', 'metric_name', 'metric_value', 'mechanism'}
|
||||
missing_cols = required_cols - set(df.columns)
|
||||
if missing_cols:
|
||||
raise ValueError(f'CSV-Datei fehlt Spalten: {missing_cols}')
|
||||
|
||||
records = df.to_dict(orient='records')
|
||||
if not isinstance(records, list):
|
||||
raise ValueError('CSV konnte nicht korrekt in Datensätze umgewandelt werden.')
|
||||
|
||||
return records
|
||||
|
||||
|
||||
def _write_results_json(output_path: Path, results: dict[str, Any]) -> None:
|
||||
output_path.parent.mkdir(parents=True, exist_ok=True)
|
||||
with output_path.open('w', encoding='utf-8') as f:
|
||||
json.dump(results, f, indent=2, ensure_ascii=False)
|
||||
logging.info('Analyseergebnisse gespeichert in: %s', output_path)
|
||||
|
||||
|
||||
def main() -> None:
|
||||
parser = argparse.ArgumentParser(
|
||||
description='Analyse der Scheduling-Effekte auf Resonanzband und Max-Outlier.'
|
||||
)
|
||||
parser.add_argument('--input', required=True, help='Pfad zur CSV-Datei mit Messdaten.')
|
||||
parser.add_argument(
|
||||
'--output',
|
||||
required=False,
|
||||
default='output/results.json',
|
||||
help='Pfad zur Ausgabe der JSON-Ergebnisse (Standard: output/results.json).'
|
||||
)
|
||||
args = parser.parse_args()
|
||||
|
||||
input_path = Path(args.input).resolve()
|
||||
output_path = Path(args.output).resolve()
|
||||
|
||||
_validate_input_file(input_path)
|
||||
logging.info('Lese CSV-Datei: %s', input_path)
|
||||
data = _read_experiment_data(input_path)
|
||||
|
||||
logging.info('Starte Analyse der Scheduling-Effekte...')
|
||||
results = analyze_scheduling_effects(data)
|
||||
|
||||
if not isinstance(results, dict):
|
||||
raise TypeError('Analysefunktion gibt kein Dictionary zurück.')
|
||||
|
||||
_write_results_json(output_path, results)
|
||||
logging.info('Analyse erfolgreich abgeschlossen.')
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
Loading…
Reference in a new issue