|
@@ -285,7 +285,10 @@ def run_perf_test(files, parameters, optimization_level, n_iterations=1):
|
|
|
total_time = end_time - start_time
|
|
|
test_runtimes.append(test_time)
|
|
|
total_runtimes.append(total_time)
|
|
|
- return mean(test_runtimes), mean(total_runtimes)
|
|
|
+ return {
|
|
|
+ TEST_TIME_QUANTITY: mean(test_runtimes),
|
|
|
+ TOTAL_TIME_QUANTITY: mean(total_runtimes)
|
|
|
+ }
|
|
|
|
|
|
def get_expectation_checks(expected_values):
|
|
|
"""Converts the given sequence of expected values to a sequence of functions which tell
|
|
@@ -383,12 +386,10 @@ def write_perf_entry_to_stream(
|
|
|
def write_perf_to_file(
|
|
|
test_name, optimization_level, runtimes, file_name=DEFAULT_PERF_FILE_NAME):
|
|
|
"""Writes performance data to a file."""
|
|
|
- test_runtime, total_runtime = runtimes
|
|
|
with open(file_name, "a") as perf_file:
|
|
|
- write_perf_entry_to_stream(
|
|
|
- test_name, optimization_level, TEST_TIME_QUANTITY, test_runtime, perf_file)
|
|
|
- write_perf_entry_to_stream(
|
|
|
- test_name, optimization_level, TOTAL_TIME_QUANTITY, total_runtime, perf_file)
|
|
|
+ for quantity, data_point in runtimes.items():
|
|
|
+ write_perf_entry_to_stream(
|
|
|
+ test_name, optimization_level, quantity, data_point, perf_file)
|
|
|
|
|
|
def write_total_runtime_to_file(
|
|
|
test_name, optimization_level, total_runtime, file_name=DEFAULT_PERF_FILE_NAME):
|