|
| 1 | +#!/usr/bin/python3 |
| 2 | + |
| 3 | +import sys |
| 4 | +import json |
| 5 | +import re |
| 6 | +import os |
| 7 | +import pandas as pd |
| 8 | +import csv |
| 9 | + |
| 10 | + |
| 11 | +def listJson(path, filterStr): |
| 12 | + jsonFiles = [jsonFile for jsonFile in os.listdir(path) if jsonFile.endswith('.json') and filterStr in jsonFile] |
| 13 | + return [path + f for f in jsonFiles] |
| 14 | + |
| 15 | + |
| 16 | +def getMetrics(files, cbCols, sysCols): |
| 17 | + values = {} |
| 18 | + for entry in files: |
| 19 | + with open(entry, 'r') as jsonFile : |
| 20 | + data = json.load(jsonFile) |
| 21 | + key = os.path.basename(entry).rsplit(".", 1)[0] |
| 22 | + values[key] = {} |
| 23 | + for col in cbCols: |
| 24 | + values[key][col] = data['cachebench_metrics'][col] |
| 25 | + for col in sysCols: |
| 26 | + values[key][col] = data['system_metrics'][col] |
| 27 | + return values |
| 28 | + |
| 29 | + |
| 30 | +def main(): |
| 31 | + args = sys.argv[1:] |
| 32 | + if len(args) < 1 or len(args) > 2: |
| 33 | + print("Invalid Args. Required : path, filter-string") |
| 34 | + exit() |
| 35 | + |
| 36 | + path = args[0] |
| 37 | + filterStr = args[1] if len(args) == 2 else '' |
| 38 | + files = listJson(path, filterStr) |
| 39 | + |
| 40 | + cbCols = [ |
| 41 | + 'cache_allocate_api_latency_p90_in_ns', |
| 42 | + 'cache_allocate_api_latency_p99_in_ns', |
| 43 | + 'cache_find_api_latency_p90_in_ns', |
| 44 | + 'cache_find_api_latency_p99_in_ns', |
| 45 | + 'cache_background_eviction_latency_p90_in_ns', |
| 46 | + 'cache_background_eviction_latency_p99_in_ns', |
| 47 | + 'cache_evict_dml_large_item_wait_latency_p90_in_ns', |
| 48 | + 'cache_evict_dml_large_item_wait_latency_p99_in_ns', |
| 49 | + 'cache_evict_dml_small_item_wait_latency_p90_in_ns', |
| 50 | + 'cache_evict_dml_small_item_wait_latency_p99_in_ns', |
| 51 | + 'cache_background_promotion_latency_p90_in_ns', |
| 52 | + 'cache_background_promotion_latency_p99_in_ns', |
| 53 | + 'cache_promote_dml_large_item_wait_latency_p90_in_ns', |
| 54 | + 'cache_promote_dml_large_item_wait_latency_p99_in_ns', |
| 55 | + 'cache_promote_dml_small_item_wait_latency_p90_in_ns', |
| 56 | + 'cache_promote_dml_small_item_wait_latency_p99_in_ns' |
| 57 | + ] |
| 58 | + |
| 59 | + sysCols = [ |
| 60 | + 'dsa0/event=0x1,event_category=0x0/', |
| 61 | + 'dsa0/event=0x10,event_category=0x1/', |
| 62 | + 'dsa0/event=0x2,event_category=0x3/', |
| 63 | + 'time_elapsed_in_secs', |
| 64 | + 'user_time_seconds', |
| 65 | + 'percent_of_cpu_this_job_got' |
| 66 | + ] |
| 67 | + metrics = getMetrics(files, cbCols, sysCols) |
| 68 | + |
| 69 | + ''' Save metrics to csv ''' |
| 70 | + fields = ['test'] + cbCols + sysCols |
| 71 | + csvFile = os.path.join(path , 'metrics.' + filterStr + '.csv') |
| 72 | + with open(csvFile, 'w') as f: |
| 73 | + w = csv.DictWriter(f, fields) |
| 74 | + w.writeheader() |
| 75 | + for key, val in sorted(metrics.items()): |
| 76 | + row = {'test': key} |
| 77 | + row.update(val) |
| 78 | + w.writerow(row) |
| 79 | + print("Filter: {0} ; Results gathered in {1}".format(filterStr, csvFile)) |
| 80 | + |
| 81 | + |
| 82 | +if __name__ == '__main__': |
| 83 | + main() |
0 commit comments