Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

use fio's job_start parameter to compute start and end time of fio jobs. #1457

Merged
merged 7 commits into from
Oct 20, 2023
Merged
Show file tree
Hide file tree
Changes from 5 commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/constants.py
Original file line number Diff line number Diff line change
Expand Up @@ -16,6 +16,7 @@
START_TIME = 'start_time'
END_TIME = 'end_time'
RW = 'rw'
JOB_START = 'job_start'
READ = 'read'
WRITE = 'write'
METRICS = 'metrics'
Expand Down
40 changes: 12 additions & 28 deletions perfmetrics/scripts/fio/fio_metrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -204,54 +204,38 @@ def _get_start_end_times(self, out_json, job_params) -> List[Tuple[int]]:
KeyError: If RW is not present in any dict in job_params

"""
# Creating a list of just the 'rw' job parameter. Later, we will
# loop through the jobs from the end, therefore we are creating
# reversed rw list for easy access
rw_rev_list = [job_param[consts.RW] for job_param in reversed(job_params)]
# Creating a list of just the 'rw' job parameter.
rw_list = [job_param[consts.RW] for job_param in job_params]

global_ramptime_ms = 0
global_startdelay_ms = 0
if consts.GLOBAL_OPTS in out_json:
if consts.RAMPTIME in out_json[consts.GLOBAL_OPTS]:
global_ramptime_ms = _convert_value(
out_json[consts.GLOBAL_OPTS][consts.RAMPTIME],
consts.TIME_TO_MS_CONVERSION, 's')
if consts.STARTDELAY in out_json[consts.GLOBAL_OPTS]:
global_startdelay_ms = _convert_value(
out_json[consts.GLOBAL_OPTS][consts.STARTDELAY],
consts.TIME_TO_MS_CONVERSION, 's')

next_end_time_ms = 0
rev_start_end_times = []
# Looping from end since the given time is the final end time
for i, job in enumerate(list(reversed(out_json[consts.JOBS]))):
rw = rw_rev_list[i]
start_end_times = []
for i, job in enumerate(list(out_json[consts.JOBS])):
rw = rw_list[i]
job_rw = job[_get_rw(rw)]
ramptime_ms = 0
startdelay_ms = 0
if consts.JOB_OPTS in job:
ashmeenkaur marked this conversation as resolved.
Show resolved Hide resolved
if consts.RAMPTIME in job[consts.JOB_OPTS]:
ramptime_ms = _convert_value(job[consts.JOB_OPTS][consts.RAMPTIME],
consts.TIME_TO_MS_CONVERSION, 's')

if ramptime_ms == 0:
ramptime_ms = global_ramptime_ms
if startdelay_ms == 0:
startdelay_ms = global_startdelay_ms

# for multiple jobs, end time of one job = start time of next job
end_time_ms = next_end_time_ms if next_end_time_ms > 0 else out_json[
consts.TIMESTAMP_MS]
# job start time = job end time - job runtime - ramp time
start_time_ms = end_time_ms - job_rw[consts.RUNTIME] - ramptime_ms
next_end_time_ms = start_time_ms - startdelay_ms
start_time_ms = job[consts.JOB_START]
ashmeenkaur marked this conversation as resolved.
Show resolved Hide resolved
end_time_ms = start_time_ms + job_rw[consts.RUNTIME] + ramptime_ms

# converting start and end time to seconds
start_time_s = start_time_ms // 1000
end_time_s = round(end_time_ms/1000)
rev_start_end_times.append((start_time_s, end_time_s))
end_time_s = round(end_time_ms / 1000)
start_end_times.append((start_time_s, end_time_s))

return list(reversed(rev_start_end_times))
return list(start_end_times)

def _get_job_params(self, out_json):
"""Returns parameter values of each job.
Expand Down Expand Up @@ -435,8 +419,8 @@ def _add_to_gsheet(self, jobs, worksheet_name):
gsheet.write_to_google_sheet(worksheet_name, values)

def get_metrics(self,
filepath,
worksheet_name=None) -> List[Dict[str, Any]]:
filepath,
worksheet_name=None) -> List[Dict[str, Any]]:
"""Returns job metrics obtained from given filepath and writes to gsheets.

Args:
Expand Down
3 changes: 2 additions & 1 deletion perfmetrics/scripts/fio/fio_metrics_test.py
Original file line number Diff line number Diff line change
Expand Up @@ -63,6 +63,7 @@ def test_load_file_dict_good_file(self):
},
'jobs': [{
'jobname': '1_thread',
"job_start": 1653027084555,
'groupid': 0,
'error': 0,
'eta': 0,
Expand Down Expand Up @@ -673,7 +674,7 @@ def test_get_metrics_for_multiple_jobs_global_options(self):
},
range='{}!A2'.format(WORKSHEET_NAME))
]

with mock.patch.object(fio_metrics.gsheet, '_get_sheets_service_client'
) as get_sheets_service_client_mock:
get_sheets_service_client_mock.return_value = sheets_service_mock
Expand Down
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/testdata/good_out_job.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/testdata/missing_metric_key.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,6 +29,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653381667555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down Expand Up @@ -288,9 +289,10 @@
"latency_percentile" : 100.000000,
"latency_window" : 0
},

{
"jobname" : "2_thread",
"job_start" : 1653381757234,
"groupid" : 1,
"error" : 0,
"eta" : 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653596980555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down Expand Up @@ -289,6 +290,7 @@
},
{
"jobname" : "2_thread",
"job_start" : 1653597076112,
"groupid" : 1,
"error" : 0,
"eta" : 0,
Expand Down
1 change: 1 addition & 0 deletions perfmetrics/scripts/fio/testdata/no_metrics.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down
2 changes: 2 additions & 0 deletions perfmetrics/scripts/fio/testdata/partial_metrics.json
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,7 @@
"jobs" : [
{
"jobname" : "1_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down Expand Up @@ -307,6 +308,7 @@
},
{
"jobname" : "2_thread",
"job_start" : 1653027084555,
"groupid" : 0,
"error" : 0,
"eta" : 0,
Expand Down
16 changes: 14 additions & 2 deletions perfmetrics/scripts/run_load_test_and_fetch_metrics.sh
Original file line number Diff line number Diff line change
Expand Up @@ -14,10 +14,22 @@
# limitations under the License.

set -e
echo "Installing fio"
sudo apt-get install fio -y

echo "Installing pip"
sudo apt-get install pip -y
echo "Installing fio"
# install libaio as fio has a dependency on libaio
sudo apt-get install libaio-dev
# We are building fio from source because of issue: https://github.com/axboe/fio/issues/1640.
# The fix is not currently released in a package as of 20th Oct, 2023.
# TODO: install fio via package when release > 3.35 is available.
sudo rm -rf "${KOKORO_ARTIFACTS_DIR}/github/fio"
git clone https://github.com/axboe/fio.git "${KOKORO_ARTIFACTS_DIR}/github/fio"
cd "${KOKORO_ARTIFACTS_DIR}/github/fio" && \
git checkout c5d8ce3fc736210ded83b126c71e3225c7ffd7c9 && \
./configure && make && sudo make install

cd "${KOKORO_ARTIFACTS_DIR}/github/gcsfuse/perfmetrics/scripts"
echo Print the time when FIO tests start
date
echo Running fio test..
Expand Down
Loading