|
@@ -0,0 +1,197 @@
|
|
|
+import logging
|
|
|
+import os
|
|
|
+import re
|
|
|
+import signal
|
|
|
+import sys
|
|
|
+from datetime import datetime
|
|
|
+from os import path
|
|
|
+import time
|
|
|
+
|
|
|
+import numpy
|
|
|
+import pyarrow
|
|
|
+from pyarrow import parquet, Schema, Field, Table
|
|
|
+from tqdm import tqdm
|
|
|
+
|
|
|
+import context
|
|
|
+import metadata
|
|
|
+
|
|
|
+PARQUET_FILE_EXTENSION: str = ".parquet"
|
|
|
+POI_FILE_PREPEND: str = "poi"
|
|
|
+STOP: bool = False
|
|
|
+
|
|
|
+
|
|
|
+def query_experiment_trace_time(test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int) \
|
|
|
+ -> float | None:
|
|
|
+ return query_trace_time_from_file(test_frequency, test_amplitude, sampling_ratio, context.SYSTEMS[0] + "/" + str(test_amplitude) + "/" + str(sampling_ratio) + PARQUET_FILE_EXTENSION)
|
|
|
+
|
|
|
+
|
|
|
+def query_simulation_trace_time(test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ model_name: str) \
|
|
|
+ -> float | None:
|
|
|
+ return query_trace_time_from_file(test_frequency, test_amplitude, sampling_ratio, model_name + "/" + str(test_amplitude) + "/" + str(sampling_ratio) + PARQUET_FILE_EXTENSION)
|
|
|
+
|
|
|
+
|
|
|
+def query_poi_trace_time(test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ system: str) \
|
|
|
+ -> float | None:
|
|
|
+ return query_trace_time_from_file(test_frequency, test_amplitude, sampling_ratio,
|
|
|
+ system + "/" + str(test_amplitude) + "/" + POI_FILE_PREPEND + str(sampling_ratio) + PARQUET_FILE_EXTENSION, skip=1)
|
|
|
+
|
|
|
+
|
|
|
+def query_trace_time_from_file(test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ file_name: str,
|
|
|
+ skip: int = 2) \
|
|
|
+ -> float | None:
|
|
|
+ if path.isfile(file_name):
|
|
|
+ schema: Schema = parquet.read_schema(file_name)
|
|
|
+ for field_index in tqdm(range(0, parquet.read_table(file_name).num_columns, skip),
|
|
|
+ desc="querying trace times in " + file_name, leave=False):
|
|
|
+ field: Field = schema.field(field_index)
|
|
|
+ if metadata.check_metadata_equality(field.metadata, test_frequency, test_amplitude, sampling_ratio):
|
|
|
+ return metadata.get_time_from_field(field)
|
|
|
+ return None
|
|
|
+
|
|
|
+
|
|
|
+def query_trace(test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ system: str) \
|
|
|
+ -> [float, numpy.ndarray[float]]:
|
|
|
+ file_name: str = system + "/" + str(test_amplitude) + "/" + str(sampling_ratio) + PARQUET_FILE_EXTENSION
|
|
|
+ trace_time: float = query_trace_time_from_file(test_frequency, test_amplitude, sampling_ratio, file_name)
|
|
|
+ if trace_time is not None:
|
|
|
+ table: Table = parquet.read_table(file_name)
|
|
|
+ return [trace_time, numpy.array([table.column(metadata.TRACE_COLUMN_NAME_TYPE_PREPEND[b"output"] + str(trace_time)).to_numpy(),
|
|
|
+ table.column(metadata.TRACE_COLUMN_NAME_TYPE_PREPEND[b"input"] + str(trace_time)).to_numpy()])]
|
|
|
+ else:
|
|
|
+ return [None, None]
|
|
|
+
|
|
|
+
|
|
|
+def save_experiment_trace(samples: numpy.ndarray[float],
|
|
|
+ test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int) \
|
|
|
+ -> float:
|
|
|
+ return save_trace_in_file(samples, test_frequency, test_amplitude, sampling_ratio, context.SYSTEMS[0] + "/" + str(test_amplitude) + "/" + str(sampling_ratio) + PARQUET_FILE_EXTENSION)
|
|
|
+
|
|
|
+
|
|
|
+def save_simulation_trace(samples: numpy.ndarray[float],
|
|
|
+ test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ model_name: str) \
|
|
|
+ -> float:
|
|
|
+ return save_trace_in_file(samples, test_frequency, test_amplitude, sampling_ratio, model_name + "/" + str(test_amplitude) + "/" + str(sampling_ratio) + PARQUET_FILE_EXTENSION)
|
|
|
+
|
|
|
+
|
|
|
+def save_trace_in_file(samples: numpy.ndarray[float],
|
|
|
+ test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ file_name: str) \
|
|
|
+ -> float:
|
|
|
+
|
|
|
+ def interrupt_handler(signal, frame):
|
|
|
+ global STOP
|
|
|
+ STOP = True
|
|
|
+ logging.warning("termination signal encountered")
|
|
|
+ signal.signal(signal.SIGINT, interrupt_handler)
|
|
|
+ signal.signal(signal.SIGTERM, interrupt_handler)
|
|
|
+
|
|
|
+ trace_time: float = datetime.timestamp(datetime.now())
|
|
|
+ output_field, input_field = metadata.create_trace_fields(test_frequency, test_amplitude, sampling_ratio, trace_time)
|
|
|
+ data_table: Table
|
|
|
+ if path.isfile(file_name):
|
|
|
+ data_table = parquet.read_table(file_name)
|
|
|
+ data_table = data_table.append_column(output_field, [samples[0]]).append_column(input_field, [samples[1]])
|
|
|
+ else:
|
|
|
+ data_table = pyarrow.Table.from_arrays(samples, schema=pyarrow.schema([output_field, input_field],
|
|
|
+ metadata=metadata.TRACE_TABLE_METADATA))
|
|
|
+ global STOP
|
|
|
+ if not STOP:
|
|
|
+ upload_start_time: float = time.time()
|
|
|
+ parquet.write_table(data_table, file_name)
|
|
|
+ upload_end_time: float = time.time()
|
|
|
+ logging.info("data upload time:" + str(upload_end_time - upload_start_time))
|
|
|
+ else:
|
|
|
+ delete_modelica_build_files()
|
|
|
+ sys.exit()
|
|
|
+ return trace_time
|
|
|
+
|
|
|
+
|
|
|
+def query_pois(test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ system: str,
|
|
|
+ return_poi: str = None,
|
|
|
+ return_poi_methods: list[str] = None) \
|
|
|
+ -> numpy.ndarray[float] | None:
|
|
|
+ trace_time: float = query_poi_trace_time(test_frequency, test_amplitude, sampling_ratio, system)
|
|
|
+ if trace_time is not None:
|
|
|
+ table: Table = parquet.read_table(system + "/" + str(test_amplitude) + "/" + POI_FILE_PREPEND + str(sampling_ratio) + PARQUET_FILE_EXTENSION)
|
|
|
+ all_pois: numpy.ndarray[float] = table.column(metadata.POI_COLUMN_NAME_PREPEND + str(trace_time)).to_numpy()
|
|
|
+ if return_poi is not None:
|
|
|
+ return select_pois(all_pois, return_poi, return_poi_methods)
|
|
|
+ else:
|
|
|
+ return all_pois
|
|
|
+ else:
|
|
|
+ return None
|
|
|
+
|
|
|
+
|
|
|
+def select_pois(all_pois: numpy.ndarray[float],
|
|
|
+ poi: str,
|
|
|
+ poi_methods: list[str]) \
|
|
|
+ -> numpy.ndarray[float]:
|
|
|
+ required_pois: list = []
|
|
|
+ for poi_method in poi_methods:
|
|
|
+ poi_index: int = metadata.get_poi_index(poi, poi_method)
|
|
|
+ required_pois.append(all_pois.item(poi_index))
|
|
|
+ return numpy.array(required_pois)
|
|
|
+
|
|
|
+
|
|
|
+def save_pois(all_pois: numpy.ndarray[float],
|
|
|
+ test_frequency: float,
|
|
|
+ test_amplitude: float,
|
|
|
+ sampling_ratio: int,
|
|
|
+ trace_time: float,
|
|
|
+ system: str) \
|
|
|
+ -> None:
|
|
|
+
|
|
|
+ def interrupt_handler(signal, frame):
|
|
|
+ global STOP
|
|
|
+ STOP = True
|
|
|
+ logging.warning("termination signal encountered")
|
|
|
+ signal.signal(signal.SIGINT, interrupt_handler)
|
|
|
+ signal.signal(signal.SIGTERM, interrupt_handler)
|
|
|
+
|
|
|
+ poi_metadata: dict = metadata.create_column_metadata(test_frequency, test_amplitude, sampling_ratio, trace_time)
|
|
|
+ poi_field: Field = pyarrow.field(metadata.POI_COLUMN_NAME_PREPEND + str(trace_time), pyarrow.float64(), False, metadata=poi_metadata)
|
|
|
+ file_name: str = system + "/" + str(test_amplitude) + "/" + POI_FILE_PREPEND + str(sampling_ratio) + PARQUET_FILE_EXTENSION
|
|
|
+ data_table: Table
|
|
|
+ if path.isfile(file_name):
|
|
|
+ data_table = parquet.read_table(file_name)
|
|
|
+ data_table = data_table.append_column(poi_field, [all_pois])
|
|
|
+ else:
|
|
|
+ data_table = pyarrow.Table.from_arrays([all_pois], schema=pyarrow.schema([poi_field],
|
|
|
+ metadata=metadata.POI_TABLE_METADATA))
|
|
|
+ global STOP
|
|
|
+ if not STOP:
|
|
|
+ parquet.write_table(data_table, file_name)
|
|
|
+ else:
|
|
|
+ delete_modelica_build_files()
|
|
|
+ sys.exit()
|
|
|
+
|
|
|
+
|
|
|
+def delete_modelica_build_files() -> None:
|
|
|
+ logging.info("Cleaning Modelica build files")
|
|
|
+ for file_name in os.listdir():
|
|
|
+ if re.search("NotchFilter.*NotchFilter.*", file_name):
|
|
|
+ os.remove(file_name)
|