diff --git a/mess_log_processing/src/mess_log_processing/cli.py b/mess_log_processing/src/mess_log_processing/cli.py new file mode 100644 index 0000000..02931ee --- /dev/null +++ b/mess_log_processing/src/mess_log_processing/cli.py @@ -0,0 +1,80 @@ +import argparse +import logging +import sys +from pathlib import Path + +import pandas as pd + +from mess_log_processing.core import process_logs + + +def _configure_logging(level: int = logging.INFO) -> None: + """Configure simple logging for CLI execution.""" + logging.basicConfig( + level=level, + format="%(asctime)s [%(levelname)s] %(message)s", + datefmt="%Y-%m-%d %H:%M:%S", + ) + + +def main(argv: list[str] | None = None) -> None: + """Command-line interface for processing measurement logs. + + Uses argparse to receive JSON and CSV input file paths and an optional output path. + """ + parser = argparse.ArgumentParser( + description="Analyse und Aggregation von Messdaten (pinned/unpinned) mit p99-Statistiken." + ) + parser.add_argument( + "--json", + required=True, + help="Pfad zur Eingabe-JSONL-Datei mit Messdaten.", + ) + parser.add_argument( + "--csv", + required=True, + help="Pfad zur Eingabe-CSV-Datei mit Messdaten.", + ) + parser.add_argument( + "--out", + required=False, + help="Pfad zur Ausgabedatei für aggregierte Statistik (CSV).", + ) + + args = parser.parse_args(argv) + + _configure_logging() + logger = logging.getLogger(__name__) + + json_path = Path(args.json) + csv_path = Path(args.csv) + output_path = Path(args.out) if args.out else None + + # Validate input paths + if not json_path.exists() or not json_path.is_file(): + logger.error("JSON input file not found or invalid: %s", json_path) + sys.exit(1) + if not csv_path.exists() or not csv_path.is_file(): + logger.error("CSV input file not found or invalid: %s", csv_path) + sys.exit(1) + + try: + logger.info("Processing logs: %s and %s", json_path, csv_path) + result_df: pd.DataFrame = process_logs(str(json_path), str(csv_path)) + logger.info("Processing complete. %d records aggregated.", len(result_df)) + except Exception as exc: # noqa: BLE001 + logger.exception("Error during log processing: %s", exc) + sys.exit(1) + + if output_path: + try: + output_path.parent.mkdir(parents=True, exist_ok=True) + result_df.to_csv(output_path, index=False) + logger.info("Statistics saved to: %s", output_path) + except Exception as exc: # noqa: BLE001 + logger.exception("Failed to write output CSV: %s", exc) + sys.exit(1) + + +if __name__ == "__main__": + main()