Skip to content
This repository has been archived by the owner on Feb 20, 2023. It is now read-only.

Commit

Permalink
Latency int bug (#1193)
Browse files Browse the repository at this point in the history
* Fix a bug where the latency metrics were getting converted to int.

* Attempted refactor of incremetal latency metrics. Testing by sending data to test DB

* Refactor res_parser to simplify the code and for more reuse

* revert code to send performance results to prod, since the tests already passed

* reset PSS credentials in Jenkins
  • Loading branch information
bialesdaniel authored Sep 21, 2020
1 parent e7a93c1 commit 3a71293
Show file tree
Hide file tree
Showing 4 changed files with 17 additions and 19 deletions.
2 changes: 1 addition & 1 deletion script/testing/oltpbench/reporting/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,4 +3,4 @@
UNKNOWN_RESULT = 'unknown'
LATENCY_ATTRIBUTE_MAPPING = [
('l_25','25'),('l_75','75'),('l_90','90'), ('l_95','95'), ('l_99','99'),
('avg','average'),('median','median'),('min','minimum'), ('max','maximum')]
('avg','av'),('median','median'),('min','min'), ('max','max')]
28 changes: 13 additions & 15 deletions script/testing/oltpbench/reporting/parsers/res_parser.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,8 @@
import csv
import json

from oltpbench.reporting.utils import get_value_by_pattern
from oltpbench.reporting.constants import LATENCY_ATTRIBUTE_MAPPING

def parse_res_file(path):
"""Read data from file ends with ".res".
Expand All @@ -12,22 +14,18 @@ def parse_res_file(path):
incremental_metrics (list, json array): The throughput at different time.
"""
time, throughput, min_lat, lat_25th, median_lat, avg_lat, lat_75th, lat_90th, lat_95th, lat_99th, max_lat = [
], [], [], [], [], [], [], [], [], [], []
with open(path) as csvfile:
reader = csv.DictReader(csvfile, delimiter=',')
incremental_metrics = []
for row in reader:
time.append(float(row['time(sec)']))
throughput.append(float(row[' throughput(req/sec)']))
min_lat.append(float(row[' min_lat(ms)']))
lat_25th.append(float(row[' 25th_lat(ms)']))
median_lat.append(float(row[' median_lat(ms)']))
avg_lat.append(float(row[' avg_lat(ms)']))
lat_75th.append(float(row[' 75th_lat(ms)']))
lat_90th.append(float(row[' 90th_lat(ms)']))
lat_95th.append(float(row[' 95th_lat(ms)']))
lat_99th.append(float(row[' 99th_lat(ms)']))
max_lat.append(float(row[' max_lat(ms)']))
incremental_metrics = [{"time": t, "throughput": tp, "latency":{"min": ml, "l_25": l25, "median": mel, "avg": al, "l_75": l75, "l_90": l90, "l_95": l95, "l_99": l99, "max": mal}}
for t, tp, ml, l25, mel, al, l75, l90, l95, l99, mal in zip(time, throughput, min_lat, lat_25th, median_lat, avg_lat, lat_75th, lat_90th, lat_95th, lat_99th, max_lat)]
metrics_instance = {
"time": float(get_value_by_pattern(row,'time',None)),
"throughput": float(get_value_by_pattern(row, 'throughput', None))
}
latency = {}
for key, pattern in LATENCY_ATTRIBUTE_MAPPING:
value = get_value_by_pattern(row, pattern, None)
latency[key] = float("{:.4}".format(value)) if value else value
metrics_instance['latency'] = latency
incremental_metrics.append(metrics_instance)
return incremental_metrics
Original file line number Diff line number Diff line change
Expand Up @@ -70,5 +70,5 @@ def parse_latency_data(latency_dict):
latency = {}
for key, pattern in LATENCY_ATTRIBUTE_MAPPING:
value = get_value_by_pattern(latency_dict, pattern, None)
latency[key] = int(value) if value else value
latency[key] = float("{:.4}".format(value)) if value else value
return latency
4 changes: 2 additions & 2 deletions script/testing/oltpbench/test_case_oltp.py
Original file line number Diff line number Diff line change
Expand Up @@ -119,8 +119,8 @@ def run_post_test(self):

# publish results
if self.publish_results:
report(self.publish_results, self.server_data,os.path.join(
os.getcwd(), "oltp_result",self.filename_suffix),self.publish_username,self.publish_password,self.query_mode)
report(self.publish_results, self.server_data, os.path.join(
os.getcwd(), "oltp_result",self.filename_suffix), self.publish_username, self.publish_password, self.query_mode)

def create_result_dir(self):
if not os.path.exists(self.test_result_dir):
Expand Down

0 comments on commit 3a71293

Please sign in to comment.