diff --git a/port/linux/ci_benchmark.sh b/port/linux/ci_benchmark.sh index 74ef97b63..6271b1cd1 100644 --- a/port/linux/ci_benchmark.sh +++ b/port/linux/ci_benchmark.sh @@ -1,4 +1,7 @@ -cd Release && make -j -cd .. -bash ci_benchmark_run.sh -python3 ci_benchmark_calibration.py +sh ci_benchmark_times.sh 2 +python3 ci_benchmark_select.py + +#clear +rm benchmark_result_*.json +rm performance_data.json +echo diff --git a/port/linux/ci_benchmark_calibration.py b/port/linux/ci_benchmark_calibration.py index 972ffb2b9..06cb29d3b 100644 --- a/port/linux/ci_benchmark_calibration.py +++ b/port/linux/ci_benchmark_calibration.py @@ -1,6 +1,12 @@ #!/usr/bin/python3 import json -with open('benchmark_result.json', 'r', encoding='utf8') as json_in: +import sys +import os +import fcntl + +benchmark_result_file_path = sys.argv[1] + +with open(benchmark_result_file_path, 'r', encoding='utf8') as json_in: json_data = dict(json.load(json_in)) benchmarks_data = list(json_data['benchmarks']) @@ -31,11 +37,22 @@ benchmarks_data[0]['cpu_time'] = performance_point_res benchmarks_data[0]['time_unit'] = 'Point' print('---------------------------------------------') -print('Perfomance point:', int(performance_point_res)) +print('Perfomance point:', int(performance_point_res), '\n') # update json_data json_data['benchmarks'] = benchmarks_data # save json -with open('benchmark_result.json', 'w') as json_out: - json.dump(json_data, json_out) +with open(benchmark_result_file_path, 'w') as benchmark_reqult_file: + json.dump(json_data, benchmark_reqult_file) + +lock_file_path = 'performance_data.lock' + +# save performance_data +with open('performance_data.json', 'r') as perf_json_file: + # lock + fcntl.flock(perf_json_file.fileno(), fcntl.LOCK_EX) + perf_json_data: list = json.load(perf_json_file) + with open('performance_data.json', 'w') as perf_json_file: + perf_json_data.append(performance_point_res) + json.dump(perf_json_data, perf_json_file) diff --git a/port/linux/ci_benchmark_run.sh b/port/linux/ci_benchmark_run.sh index df48e9b16..9732d6952 100644 --- a/port/linux/ci_benchmark_run.sh +++ b/port/linux/ci_benchmark_run.sh @@ -1 +1,2 @@ -Release/benchmark/pikascript_benchmark --benchmark_format=json | tee benchmark_result.json +echo write benchmark result to $1 +Release/benchmark/pikascript_benchmark --benchmark_format=json | tee $1 diff --git a/port/linux/ci_benchmark_select.py b/port/linux/ci_benchmark_select.py new file mode 100644 index 000000000..09f05fdbd --- /dev/null +++ b/port/linux/ci_benchmark_select.py @@ -0,0 +1,22 @@ +#!/usr/bin/python3 +import json +from select import select +import sys +import os +import shutil + +with open('performance_data.json', 'r') as perf_json_file: + perf_json_data: list = json.load(perf_json_file) + perf_data_with_index = [] + for i in range(perf_json_data.__len__()): + perf_data_with_index.append({'index': i+1, 'value': perf_json_data[i]}) + + perf_data_with_index_sorted = sorted( + perf_data_with_index, key=lambda e: e['value']) + + selected = perf_data_with_index_sorted[int(perf_json_data.__len__()/2)] + print('=============================================') + print('selected: ', selected) + + shutil.copy('benchmark_result_' + + str(selected['index']) + '.json', 'benchmark_result.json') diff --git a/port/linux/ci_benchmark_times.sh b/port/linux/ci_benchmark_times.sh new file mode 100644 index 000000000..209245a8b --- /dev/null +++ b/port/linux/ci_benchmark_times.sh @@ -0,0 +1,17 @@ +cd Release && make -j +cd .. + +RUN_TIMES=$1 +# init +rm performance_data* +rm benchmark*.json +touch performance_data.json +echo [] > performance_data.json + +for ((i=1; i<=RUN_TIMES;i++)) +do + OUT_FILE=benchmark_result_$i.json + # main + bash ci_benchmark_run.sh $OUT_FILE > /dev/null && \ + python3 ci_benchmark_calibration.py $OUT_FILE +done