Skip to content

Commit

Permalink
use fio's job_start parameter to compute start and end time of fio jo…
Browse files Browse the repository at this point in the history
…bs. (#1457)

* testing changes

testing changes

testing changes

testing changes

testing changes

testing changes

testing changes

testing changes

testing changes

testing changes

testing changes

revert seq_rand_read_write.fio

clean up PR

* revert local testing changes

* remove unused files

* clean up pr

* review comments

* review comments

* added unit tests
  • Loading branch information
ashmeenkaur authored and gargnitingoogle committed Nov 1, 2023
1 parent 93a9272 commit bd49633
Show file tree
Hide file tree
Showing 11 changed files with 410 additions and 44 deletions.
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
START_TIME = 'start_time'
END_TIME = 'end_time'
RW = 'rw'
JOB_START = 'job_start'
READ = 'read'
WRITE = 'write'
METRICS = 'metrics'
Expand Down
71 changes: 31 additions & 40 deletions perfmetrics/scripts/fio/fio_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -189,6 +189,23 @@ def _load_file_dict(self, filepath) -> Dict[str, Any]:
raise NoValuesError(f'JSON file {filepath} returned empty object')
return fio_out

def _get_global_ramp_time(self, out_json):
global_ramptime_ms = 0
if consts.GLOBAL_OPTS in out_json:
if consts.RAMPTIME in out_json[consts.GLOBAL_OPTS]:
global_ramptime_ms = _convert_value(
out_json[consts.GLOBAL_OPTS][consts.RAMPTIME],
consts.TIME_TO_MS_CONVERSION, 's')
return global_ramptime_ms

def _get_job_ramp_time(self, job):
ramptime_ms = 0
if consts.JOB_OPTS in job:
if consts.RAMPTIME in job[consts.JOB_OPTS]:
ramptime_ms = _convert_value(job[consts.JOB_OPTS][consts.RAMPTIME],
consts.TIME_TO_MS_CONVERSION, 's')
return ramptime_ms

def _get_start_end_times(self, out_json, job_params) -> List[Tuple[int]]:
"""Returns start and end times of each job as a list.
Expand All @@ -204,54 +221,28 @@ def _get_start_end_times(self, out_json, job_params) -> List[Tuple[int]]:
KeyError: If RW is not present in any dict in job_params
"""
# Creating a list of just the 'rw' job parameter. Later, we will
# loop through the jobs from the end, therefore we are creating
# reversed rw list for easy access
rw_rev_list = [job_param[consts.RW] for job_param in reversed(job_params)]
# Creating a list of just the 'rw' job parameter.
rw_list = [job_param[consts.RW] for job_param in job_params]

global_ramptime_ms = 0
global_startdelay_ms = 0
if consts.GLOBAL_OPTS in out_json:
if consts.RAMPTIME in out_json[consts.GLOBAL_OPTS]:
global_ramptime_ms = _convert_value(
out_json[consts.GLOBAL_OPTS][consts.RAMPTIME],
consts.TIME_TO_MS_CONVERSION, 's')
if consts.STARTDELAY in out_json[consts.GLOBAL_OPTS]:
global_startdelay_ms = _convert_value(
out_json[consts.GLOBAL_OPTS][consts.STARTDELAY],
consts.TIME_TO_MS_CONVERSION, 's')

next_end_time_ms = 0
rev_start_end_times = []
# Looping from end since the given time is the final end time
for i, job in enumerate(list(reversed(out_json[consts.JOBS]))):
rw = rw_rev_list[i]
global_ramptime_ms = self._get_global_ramp_time(out_json)
start_end_times = []
for i, job in enumerate(list(out_json[consts.JOBS])):
rw = rw_list[i]
job_rw = job[_get_rw(rw)]
ramptime_ms = 0
startdelay_ms = 0
if consts.JOB_OPTS in job:
if consts.RAMPTIME in job[consts.JOB_OPTS]:
ramptime_ms = _convert_value(job[consts.JOB_OPTS][consts.RAMPTIME],
consts.TIME_TO_MS_CONVERSION, 's')
ramptime_ms = self._get_job_ramp_time(job)

if ramptime_ms == 0:
ramptime_ms = global_ramptime_ms
if startdelay_ms == 0:
startdelay_ms = global_startdelay_ms

# for multiple jobs, end time of one job = start time of next job
end_time_ms = next_end_time_ms if next_end_time_ms > 0 else out_json[
consts.TIMESTAMP_MS]
# job start time = job end time - job runtime - ramp time
start_time_ms = end_time_ms - job_rw[consts.RUNTIME] - ramptime_ms
next_end_time_ms = start_time_ms - startdelay_ms
start_time_ms = job[consts.JOB_START]
end_time_ms = start_time_ms + job_rw[consts.RUNTIME] + ramptime_ms

# converting start and end time to seconds
start_time_s = start_time_ms // 1000
end_time_s = round(end_time_ms/1000)
rev_start_end_times.append((start_time_s, end_time_s))
end_time_s = round(end_time_ms / 1000)
start_end_times.append((start_time_s, end_time_s))

return list(reversed(rev_start_end_times))
return list(start_end_times)

def _get_job_params(self, out_json):
"""Returns parameter values of each job.
Expand Down Expand Up @@ -435,8 +426,8 @@ def _add_to_gsheet(self, jobs, worksheet_name):
gsheet.write_to_google_sheet(worksheet_name, values)

def get_metrics(self,
filepath,
worksheet_name=None) -> List[Dict[str, Any]]:
filepath,
worksheet_name=None) -> List[Dict[str, Any]]:
"""Returns job metrics obtained from given filepath and writes to gsheets.
Args:
Expand Down
56 changes: 55 additions & 1 deletion perfmetrics/scripts/fio/fio_metrics_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,8 +3,10 @@
Usage from perfmetrics/scripts folder: python3 -m fio.fio_metrics_test
"""
import unittest
import json
from unittest import mock
from fio import fio_metrics
from fio import constants as consts

TEST_PATH = './fio/testdata/'
GOOD_FILE = 'good_out_job.json'
Expand All @@ -16,6 +18,7 @@
BAD_FORMAT_FILE = 'bad_format.json'
MULTIPLE_JOBS_GLOBAL_OPTIONS_FILE = 'multiple_jobs_global_options.json'
MULTIPLE_JOBS_JOB_OPTIONS_FILE = 'multiple_jobs_job_options.json'
NO_GLOBAL_RAMP_TIME = 'no_global_ramp_time.json'

SPREADSHEET_ID = '1kvHv1OBCzr9GnFxRu9RTJC7jjQjc9M4rAiDnhyak2Sg'
WORKSHEET_NAME = 'fio_metrics'
Expand Down Expand Up @@ -63,6 +66,7 @@ def test_load_file_dict_good_file(self):
},
'jobs': [{
'jobname': '1_thread',
"job_start": 1653027084555,
'groupid': 0,
'error': 0,
'eta': 0,
Expand Down Expand Up @@ -401,6 +405,56 @@ def test_get_start_end_times_no_rw_raises_key_error(self):
with self.assertRaises(KeyError):
_ = self.fio_metrics_obj._get_start_end_times({}, extracted_job_params)

def test_get_global_ramp_time_when_global_ramp_time_is_present(self):
fio_out = {}
f = open(get_full_filepath(GOOD_FILE), 'r')
fio_out = json.load(f)
f.close()
expected_global_ramp_time = 10000

extracted_global_ramp_time = self.fio_metrics_obj \
._get_global_ramp_time(fio_out)

self.assertEqual(expected_global_ramp_time, extracted_global_ramp_time)

def test_get_global_ramp_time_when_global_ramp_time_is_not_present(self):
fio_out = {}
f = open(get_full_filepath(NO_GLOBAL_RAMP_TIME), 'r')
fio_out = json.load(f)
f.close()
expected_global_ramp_time = 0

extracted_global_ramp_time = self.fio_metrics_obj._get_global_ramp_time(
fio_out)

self.assertEqual(expected_global_ramp_time, extracted_global_ramp_time)

def test_get_job_ramp_time_when_job_ramp_time_is_present(self):
fio_out = {}
f = open(get_full_filepath(NO_GLOBAL_RAMP_TIME), 'r')
fio_out = json.load(f)
f.close()
job = list(fio_out[consts.JOBS])[0]
expected_job_ramp_time = 20000

extracted_job_ramp_time = self.fio_metrics_obj \
._get_job_ramp_time(job)

self.assertEqual(expected_job_ramp_time, extracted_job_ramp_time)

def test_get_job_ramp_time_when_job_ramp_time_is_not_present(self):
fio_out = {}
f = open(get_full_filepath(GOOD_FILE), 'r')
fio_out = json.load(f)
f.close()
job = list(fio_out[consts.JOBS])[0]
expected_job_ramp_time = 0

extracted_job_ramp_time = self.fio_metrics_obj._get_job_ramp_time(
job)

self.assertEqual(expected_job_ramp_time, extracted_job_ramp_time)

def test_extract_metrics_from_good_file(self):
json_obj = self.fio_metrics_obj._load_file_dict(
get_full_filepath(GOOD_FILE))
Expand Down Expand Up @@ -673,7 +727,7 @@ def test_get_metrics_for_multiple_jobs_global_options(self):
},
range='{}!A2'.format(WORKSHEET_NAME))
]

with mock.patch.object(fio_metrics.gsheet, '_get_sheets_service_client'
) as get_sheets_service_client_mock:
get_sheets_service_client_mock.return_value = sheets_service_mock
Expand Down
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/testdata/good_out_job.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/testdata/missing_metric_key.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653381667555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down Expand Up @@ -288,9 +289,10 @@
"latency_percentile" : 100.000000,
"latency_window" : 0
},

{
"jobname" : "2_thread",
"job_start" : 1653381757234,
"groupid" : 1,
"error" : 0,
"eta" : 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653596980555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down Expand Up @@ -289,6 +290,7 @@
},
{
"jobname" : "2_thread",
"job_start" : 1653597076112,
"groupid" : 1,
"error" : 0,
"eta" : 0,
Expand Down
Loading

0 comments on commit bd49633

Please sign in to comment.