Skip to content

Commit d1db000

Browse files
authored
small cleanup in microbenchmark scripts (grpc#28886)
1 parent e0a5c31 commit d1db000

7 files changed

+25
-91
lines changed

tools/internal_ci/linux/grpc_performance_profile_daily.sh

+1-9
Original file line numberDiff line numberDiff line change
@@ -24,12 +24,4 @@ CPUS=`python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
2424

2525
./tools/run_tests/start_port_server.py || true
2626

27-
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload || FAILED="true"
28-
29-
# kill port_server.py to prevent the build from freezing
30-
ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
31-
32-
if [ "$FAILED" != "" ]
33-
then
34-
exit 1
35-
fi
27+
tools/run_tests/run_microbenchmark.py --collect summary --bq_result_table microbenchmarks.microbenchmarks

tools/internal_ci/linux/grpc_performance_profile_master.sh

+3-8
Original file line numberDiff line numberDiff line change
@@ -20,13 +20,8 @@ cd $(dirname $0)/../../..
2020

2121
source tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
2222

23-
tools/internal_ci/linux/run_performance_profile_hourly.sh || FAILED="true"
23+
CPUS=`python3 -c 'import multiprocessing; print(multiprocessing.cpu_count())'`
2424

25-
# kill port_server.py to prevent the build from freezing
26-
ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
27-
28-
if [ "$FAILED" != "" ]
29-
then
30-
exit 1
31-
fi
25+
./tools/run_tests/start_port_server.py || true
3226

27+
tools/run_tests/run_microbenchmark.py --collect summary --bq_result_table microbenchmarks.microbenchmarks

tools/internal_ci/linux/run_performance_profile_daily.sh

-34
This file was deleted.

tools/internal_ci/linux/run_performance_profile_hourly.sh

-24
This file was deleted.

tools/profiling/microbenchmarks/bm_json.py

+6-2
Original file line numberDiff line numberDiff line change
@@ -198,9 +198,13 @@ def expand_json(js, js2=None):
198198
labels = dict(labels_list)
199199
else:
200200
labels = {}
201+
# TODO(jtattermusch): grabbing kokoro env values shouldn't be buried
202+
# deep in the JSON conversion logic.
203+
# Link the data to a kokoro job run by adding
204+
# well known kokoro env variables as metadata for each row
201205
row = {
202-
'jenkins_build': os.environ.get('BUILD_NUMBER', ''),
203-
'jenkins_job': os.environ.get('JOB_NAME', ''),
206+
'jenkins_build': os.environ.get('KOKORO_BUILD_NUMBER', ''),
207+
'jenkins_job': os.environ.get('KOKORO_JOB_NAME', ''),
204208
}
205209
row.update(context)
206210
row.update(bm)

tools/run_tests/run_microbenchmark.py

+11-10
Original file line numberDiff line numberDiff line change
@@ -207,18 +207,18 @@ def collect_summary(bm_name, args):
207207
text(run_summary(bm_name, 'opt', bm_name))
208208
heading('Summary: %s [with counters]' % bm_name)
209209
text(run_summary(bm_name, 'counters', bm_name))
210-
if args.bigquery_upload:
210+
if args.bq_result_table:
211211
with open('%s.csv' % bm_name, 'w') as f:
212212
f.write(
213213
subprocess.check_output([
214214
'tools/profiling/microbenchmarks/bm2bq.py',
215215
'%s.counters.json' % bm_name,
216216
'%s.opt.json' % bm_name
217217
]).decode('UTF-8'))
218-
subprocess.check_call([
219-
'bq', 'load', 'microbenchmarks.microbenchmarks',
220-
'%s.csv' % bm_name
221-
])
218+
subprocess.check_call(
219+
['bq', 'load',
220+
'%s' % args.bq_result_table,
221+
'%s.csv' % bm_name])
222222

223223

224224
collectors = {
@@ -241,11 +241,12 @@ def collect_summary(bm_name, args):
241241
nargs='+',
242242
type=str,
243243
help='Which microbenchmarks should be run')
244-
argp.add_argument('--bigquery_upload',
245-
default=False,
246-
action='store_const',
247-
const=True,
248-
help='Upload results from summary collection to bigquery')
244+
argp.add_argument(
245+
'--bq_result_table',
246+
default='',
247+
type=str,
248+
help='Upload results from summary collection to a specified bigquery table.'
249+
)
249250
argp.add_argument(
250251
'--summary_time',
251252
default=None,

tools/run_tests/run_performance_tests.py

+4-4
Original file line numberDiff line numberDiff line change
@@ -169,12 +169,12 @@ def create_netperf_jobspec(server_host='localhost',
169169
# If netperf is running remotely, the env variables populated by Jenkins
170170
# won't be available on the client, but we need them for uploading results
171171
# to BigQuery.
172-
jenkins_job_name = os.getenv('JOB_NAME')
172+
jenkins_job_name = os.getenv('KOKORO_JOB_NAME')
173173
if jenkins_job_name:
174-
cmd += 'JOB_NAME="%s" ' % jenkins_job_name
175-
jenkins_build_number = os.getenv('BUILD_NUMBER')
174+
cmd += 'KOKORO_JOB_NAME="%s" ' % jenkins_job_name
175+
jenkins_build_number = os.getenv('KOKORO_BUILD_NUMBER')
176176
if jenkins_build_number:
177-
cmd += 'BUILD_NUMBER="%s" ' % jenkins_build_number
177+
cmd += 'KOKORO_BUILD_NUMBER="%s" ' % jenkins_build_number
178178

179179
cmd += 'tools/run_tests/performance/run_netperf.sh'
180180
if client_host:

0 commit comments

Comments
 (0)