From 541d5d7ae2745a9e3dc55348df3168c0f51d382e Mon Sep 17 00:00:00 2001 From: Jan Tattermusch Date: Thu, 5 May 2016 16:08:49 -0700 Subject: increase qps_worker max lifetime --- tools/run_tests/run_performance_tests.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) (limited to 'tools/run_tests/run_performance_tests.py') diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py index 5519666e84..8b67d921f2 100755 --- a/tools/run_tests/run_performance_tests.py +++ b/tools/run_tests/run_performance_tests.py @@ -89,7 +89,7 @@ def create_qpsworker_job(language, shortname=None, jobspec = jobset.JobSpec( cmdline=cmdline, shortname=shortname, - timeout_seconds=30*60) + timeout_seconds=2*60*60) return QpsWorkerJob(jobspec, language, host_and_port) -- cgit v1.2.3 From a21c7e9207490d9bcdd7205c8a5857320a3a450d Mon Sep 17 00:00:00 2001 From: Jan Tattermusch Date: Thu, 5 May 2016 17:31:52 -0700 Subject: add support for scenario categories --- test/cpp/qps/gen_build_yaml.py | 10 +++++++--- tools/run_tests/performance/scenario_config.py | 13 ++++++++++++- tools/run_tests/run_performance_tests.py | 6 +++--- 3 files changed, 22 insertions(+), 7 deletions(-) (limited to 'tools/run_tests/run_performance_tests.py') diff --git a/test/cpp/qps/gen_build_yaml.py b/test/cpp/qps/gen_build_yaml.py index 9d6bf2ab73..6b3329b046 100755 --- a/test/cpp/qps/gen_build_yaml.py +++ b/test/cpp/qps/gen_build_yaml.py @@ -43,12 +43,16 @@ sys.path.append(run_tests_root) import performance.scenario_config as scenario_config +def _scenario_json_string(scenario_json): + return json.dumps(scenario_config.remove_nonproto_fields(scenario_json)) + print yaml.dump({ 'tests': [ { 'name': 'json_run_localhost', - 'shortname': 'json_run_localhost:%s' % js['name'], - 'args': ['--scenario_json', pipes.quote(json.dumps(js))], + 'shortname': 'json_run_localhost:%s' % scenario_json['name'], + 'args': ['--scenario_json', + pipes.quote(_scenario_json_string(scenario_json))], 'ci_platforms': ['linux', 'mac', 'posix', 'windows'], 'platforms': ['linux', 'mac', 'posix', 'windows'], 'flaky': False, @@ -58,6 +62,6 @@ print yaml.dump({ 'cpu_cost': 1000.0, 'exclude_configs': [] } - for js in scenario_config.CXXLanguage().scenarios() + for scenario_json in scenario_config.CXXLanguage().scenarios() ] }) diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py index 52efe8b86b..a5b0d59d02 100644 --- a/tools/run_tests/performance/scenario_config.py +++ b/tools/run_tests/performance/scenario_config.py @@ -76,6 +76,14 @@ def _get_secargs(is_secure): return None +def remove_nonproto_fields(scenario): + """Remove special-purpose that contains some extra info about the scenario + but don't belong to the ScenarioConfig protobuf message""" + scenario.pop('CATEGORIES', None) + scenario.pop('SERVER_LANGUAGE', None) + return scenario + + def _ping_pong_scenario(name, rpc_type, client_type, server_type, secure=True, @@ -84,7 +92,8 @@ def _ping_pong_scenario(name, rpc_type, server_language=None, server_core_limit=0, async_server_threads=0, - warmup_seconds=WARMUP_SECONDS): + warmup_seconds=WARMUP_SECONDS, + categories=[]): """Creates a basic ping pong scenario.""" scenario = { 'name': name, @@ -135,6 +144,8 @@ def _ping_pong_scenario(name, rpc_type, if server_language: # the SERVER_LANGUAGE field is recognized by run_performance_tests.py scenario['SERVER_LANGUAGE'] = server_language + if categories: + scenario['CATEGORIES'] = categories return scenario diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py index 8b67d921f2..c8c1a19783 100755 --- a/tools/run_tests/run_performance_tests.py +++ b/tools/run_tests/run_performance_tests.py @@ -255,9 +255,9 @@ def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', if re.search(args.regex, scenario_json['name']): workers = workers_by_lang[str(language)] # 'SERVER_LANGUAGE' is an indicator for this script to pick - # a server in different language. It doesn't belong to the Scenario - # schema, so we also need to remove it. - custom_server_lang = scenario_json.pop('SERVER_LANGUAGE', None) + # a server in different language. + custom_server_lang = scenario_json.get('SERVER_LANGUAGE', None) + scenario_json = scenario_config.remove_nonproto_fields(scenario_json) if custom_server_lang: if not workers_by_lang.get(custom_server_lang, []): print 'Warning: Skipping scenario %s as' % scenario_json['name'] -- cgit v1.2.3 From 427699b638b2f17219f6b5d33bd571e42eb287fb Mon Sep 17 00:00:00 2001 From: Jan Tattermusch Date: Thu, 5 May 2016 18:10:14 -0700 Subject: mark some scenarios as smoketest and add a cmdline flag --- tools/run_tests/performance/scenario_config.py | 58 ++++++++++++++++++-------- tools/run_tests/run_performance_tests.py | 48 +++++++++++---------- 2 files changed, 67 insertions(+), 39 deletions(-) (limited to 'tools/run_tests/run_performance_tests.py') diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py index a5b0d59d02..8b23995149 100644 --- a/tools/run_tests/performance/scenario_config.py +++ b/tools/run_tests/performance/scenario_config.py @@ -34,6 +34,8 @@ WARMUP_SECONDS=5 JAVA_WARMUP_SECONDS=15 # Java needs more warmup time for JIT to kick in. BENCHMARK_SECONDS=30 +SMOKETEST='smoketest' + SECURE_SECARGS = {'use_test_ca': True, 'server_host_override': 'foo.test.google.fr'} @@ -164,12 +166,14 @@ class CXXLanguage: # TODO(ctiller): add 70% load latency test for secure in [True, False]: secstr = 'secure' if secure else 'insecure' + smoketest_categories = [SMOKETEST] if secure else None yield _ping_pong_scenario( 'cpp_generic_async_streaming_ping_pong_%s' % secstr, rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', use_generic_payload=True, server_core_limit=1, async_server_threads=1, - secure=secure) + secure=secure, + categories=smoketest_categories) yield _ping_pong_scenario( 'cpp_protobuf_async_streaming_ping_pong_%s' % secstr, rpc_type='STREAMING', @@ -181,7 +185,8 @@ class CXXLanguage: 'cpp_protobuf_async_unary_ping_pong_%s' % secstr, rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', server_core_limit=1, async_server_threads=1, - secure=secure) + secure=secure, + categories=smoketest_categories) yield _ping_pong_scenario( 'cpp_protobuf_sync_unary_ping_pong_%s' % secstr, rpc_type='UNARY', @@ -194,7 +199,8 @@ class CXXLanguage: client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', server_core_limit=SINGLE_MACHINE_CORES/2, use_unconstrained_client=True, - secure=secure) + secure=secure, + categories=smoketest_categories) yield _ping_pong_scenario( 'cpp_protobuf_async_streaming_qps_unconstrained_%s' % secstr, rpc_type='STREAMING', @@ -208,7 +214,8 @@ class CXXLanguage: client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', use_unconstrained_client=True, use_generic_payload=True, server_core_limit=SINGLE_MACHINE_CORES/2, - secure=secure) + secure=secure, + categories=smoketest_categories) yield _ping_pong_scenario( 'cpp_generic_async_streaming_qps_one_server_core_%s' % secstr, rpc_type='STREAMING', @@ -236,7 +243,8 @@ class CSharpLanguage: yield _ping_pong_scenario( 'csharp_generic_async_streaming_ping_pong', rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', - use_generic_payload=True) + use_generic_payload=True, + categories=[SMOKETEST]) yield _ping_pong_scenario( 'csharp_protobuf_async_streaming_ping_pong', rpc_type='STREAMING', @@ -244,7 +252,8 @@ class CSharpLanguage: yield _ping_pong_scenario( 'csharp_protobuf_async_unary_ping_pong', rpc_type='UNARY', - client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER') + client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', + categories=[SMOKETEST]) yield _ping_pong_scenario( 'csharp_protobuf_sync_to_async_unary_ping_pong', rpc_type='UNARY', @@ -253,7 +262,8 @@ class CSharpLanguage: yield _ping_pong_scenario( 'csharp_protobuf_async_unary_qps_unconstrained', rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', - use_unconstrained_client=True) + use_unconstrained_client=True, + categories=[SMOKETEST]) yield _ping_pong_scenario( 'csharp_protobuf_async_streaming_qps_unconstrained', rpc_type='STREAMING', @@ -263,7 +273,8 @@ class CSharpLanguage: yield _ping_pong_scenario( 'csharp_to_cpp_protobuf_sync_unary_ping_pong', rpc_type='UNARY', client_type='SYNC_CLIENT', server_type='SYNC_SERVER', - server_language='c++', server_core_limit=1, async_server_threads=1) + server_language='c++', server_core_limit=1, async_server_threads=1, + categories=[SMOKETEST]) yield _ping_pong_scenario( 'csharp_to_cpp_protobuf_async_streaming_ping_pong', rpc_type='STREAMING', @@ -300,12 +311,14 @@ class NodeLanguage: yield _ping_pong_scenario( 'node_protobuf_unary_ping_pong', rpc_type='UNARY', - client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER') + client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', + categories=[SMOKETEST]) yield _ping_pong_scenario( 'node_protobuf_async_unary_qps_unconstrained', rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', - use_unconstrained_client=True) + use_unconstrained_client=True, + categories=[SMOKETEST]) # TODO(jtattermusch): make this scenario work #yield _ping_pong_scenario( @@ -344,7 +357,8 @@ class PythonLanguage: yield _ping_pong_scenario( 'python_generic_async_streaming_ping_pong', rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', - use_generic_payload=True) + use_generic_payload=True, + categories=[SMOKETEST]) # TODO(jtattermusch): make this scenario work #yield _ping_pong_scenario( @@ -358,7 +372,8 @@ class PythonLanguage: yield _ping_pong_scenario( 'python_protobuf_sync_unary_ping_pong', rpc_type='UNARY', - client_type='SYNC_CLIENT', server_type='SYNC_SERVER') + client_type='SYNC_CLIENT', server_type='SYNC_SERVER', + categories=[SMOKETEST]) # TODO(jtattermusch): make this scenario work #yield _ping_pong_scenario( @@ -375,7 +390,8 @@ class PythonLanguage: yield _ping_pong_scenario( 'python_to_cpp_protobuf_sync_unary_ping_pong', rpc_type='UNARY', client_type='SYNC_CLIENT', server_type='SYNC_SERVER', - server_language='c++', server_core_limit=1, async_server_threads=1) + server_language='c++', server_core_limit=1, async_server_threads=1, + categories=[SMOKETEST]) # TODO(jtattermusch): make this scenario work #yield _ping_pong_scenario( @@ -401,11 +417,13 @@ class RubyLanguage: def scenarios(self): yield _ping_pong_scenario( 'ruby_protobuf_sync_streaming_ping_pong', rpc_type='STREAMING', - client_type='SYNC_CLIENT', server_type='SYNC_SERVER') + client_type='SYNC_CLIENT', server_type='SYNC_SERVER', + categories=[SMOKETEST]) yield _ping_pong_scenario( 'ruby_protobuf_unary_ping_pong', rpc_type='UNARY', - client_type='SYNC_CLIENT', server_type='SYNC_SERVER') + client_type='SYNC_CLIENT', server_type='SYNC_SERVER', + categories=[SMOKETEST]) # TODO: scenario reports QPS of 0.0 #yield _ping_pong_scenario( @@ -448,12 +466,14 @@ class JavaLanguage: def scenarios(self): for secure in [True, False]: secstr = 'secure' if secure else 'insecure' + smoketest_categories = [SMOKETEST] if secure else None yield _ping_pong_scenario( 'java_generic_async_streaming_ping_pong_%s' % secstr, rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', use_generic_payload=True, async_server_threads=1, - secure=secure, warmup_seconds=JAVA_WARMUP_SECONDS) + secure=secure, warmup_seconds=JAVA_WARMUP_SECONDS, + categories=smoketest_categories) yield _ping_pong_scenario( 'java_protobuf_async_streaming_ping_pong_%s' % secstr, rpc_type='STREAMING', @@ -465,7 +485,8 @@ class JavaLanguage: 'java_protobuf_async_unary_ping_pong_%s' % secstr, rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', async_server_threads=1, - secure=secure, warmup_seconds=JAVA_WARMUP_SECONDS) + secure=secure, warmup_seconds=JAVA_WARMUP_SECONDS, + categories=smoketest_categories) yield _ping_pong_scenario( 'java_protobuf_unary_ping_pong_%s' % secstr, rpc_type='UNARY', @@ -477,7 +498,8 @@ class JavaLanguage: 'java_protobuf_async_unary_qps_unconstrained_%s' % secstr, rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', use_unconstrained_client=True, - secure=secure, warmup_seconds=JAVA_WARMUP_SECONDS) + secure=secure, warmup_seconds=JAVA_WARMUP_SECONDS, + categories=smoketest_categories) yield _ping_pong_scenario( 'java_protobuf_async_streaming_qps_unconstrained_%s' % secstr, rpc_type='STREAMING', diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py index c8c1a19783..b1f5889e54 100755 --- a/tools/run_tests/run_performance_tests.py +++ b/tools/run_tests/run_performance_tests.py @@ -244,7 +244,7 @@ def start_qpsworkers(languages, worker_hosts): def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', - bq_result_table=None): + category='all', bq_result_table=None): """Create jobspecs for scenarios to run.""" all_workers = [worker for workers in workers_by_lang.values() @@ -253,25 +253,26 @@ def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', for language in languages: for scenario_json in language.scenarios(): if re.search(args.regex, scenario_json['name']): - workers = workers_by_lang[str(language)] - # 'SERVER_LANGUAGE' is an indicator for this script to pick - # a server in different language. - custom_server_lang = scenario_json.get('SERVER_LANGUAGE', None) - scenario_json = scenario_config.remove_nonproto_fields(scenario_json) - if custom_server_lang: - if not workers_by_lang.get(custom_server_lang, []): - print 'Warning: Skipping scenario %s as' % scenario_json['name'] - print('SERVER_LANGUAGE is set to %s yet the language has ' - 'not been selected with -l' % custom_server_lang) - continue - for idx in range(0, scenario_json['num_servers']): - # replace first X workers by workers of a different language - workers[idx] = workers_by_lang[custom_server_lang][idx] - scenario = create_scenario_jobspec(scenario_json, - workers, - remote_host=remote_host, - bq_result_table=bq_result_table) - scenarios.append(scenario) + if category in scenario_json.get('CATEGORIES', []) or category == 'all': + workers = workers_by_lang[str(language)] + # 'SERVER_LANGUAGE' is an indicator for this script to pick + # a server in different language. + custom_server_lang = scenario_json.get('SERVER_LANGUAGE', None) + scenario_json = scenario_config.remove_nonproto_fields(scenario_json) + if custom_server_lang: + if not workers_by_lang.get(custom_server_lang, []): + print 'Warning: Skipping scenario %s as' % scenario_json['name'] + print('SERVER_LANGUAGE is set to %s yet the language has ' + 'not been selected with -l' % custom_server_lang) + continue + for idx in range(0, scenario_json['num_servers']): + # replace first X workers by workers of a different language + workers[idx] = workers_by_lang[custom_server_lang][idx] + scenario = create_scenario_jobspec(scenario_json, + workers, + remote_host=remote_host, + bq_result_table=bq_result_table) + scenarios.append(scenario) # the very last scenario requests shutting down the workers. scenarios.append(create_quit_jobspec(all_workers, remote_host=remote_host)) @@ -298,7 +299,7 @@ argp = argparse.ArgumentParser(description='Run performance tests.') argp.add_argument('-l', '--language', choices=['all'] + sorted(scenario_config.LANGUAGES.keys()), nargs='+', - default=['all'], + required=True, help='Languages to benchmark.') argp.add_argument('--remote_driver_host', default=None, @@ -311,6 +312,10 @@ argp.add_argument('-r', '--regex', default='.*', type=str, help='Regex to select scenarios to run.') argp.add_argument('--bq_result_table', default=None, type=str, help='Bigquery "dataset.table" to upload results to.') +argp.add_argument('--category', + choices=['smoketest','all'], + default='smoketest', + help='Select a category of tests to run. Smoketest runs by default.') args = argp.parse_args() @@ -354,6 +359,7 @@ try: workers_by_lang=worker_addresses, remote_host=args.remote_driver_host, regex=args.regex, + category=args.category, bq_result_table=args.bq_result_table) if not scenarios: raise Exception('No scenarios to run') -- cgit v1.2.3 From 4de2c3254bf9af853249d3ffc905dea8e982453c Mon Sep 17 00:00:00 2001 From: Jan Tattermusch Date: Tue, 10 May 2016 14:33:07 -0700 Subject: add support for running netperf scenario --- tools/gce/create_linux_performance_worker.sh | 2 +- tools/run_tests/performance/bq_upload_result.py | 43 +++++++++++++++++++--- tools/run_tests/performance/run_netperf.sh | 45 +++++++++++++++++++++++ tools/run_tests/run_performance_tests.py | 47 +++++++++++++++++++++++-- 4 files changed, 130 insertions(+), 7 deletions(-) create mode 100755 tools/run_tests/performance/run_netperf.sh (limited to 'tools/run_tests/run_performance_tests.py') diff --git a/tools/gce/create_linux_performance_worker.sh b/tools/gce/create_linux_performance_worker.sh index 96d5558d9a..c9a0ffa4e1 100755 --- a/tools/gce/create_linux_performance_worker.sh +++ b/tools/gce/create_linux_performance_worker.sh @@ -50,7 +50,7 @@ gcloud compute instances create $INSTANCE_NAME \ --machine-type $MACHINE_TYPE \ --image ubuntu-15-10 \ --boot-disk-size 300 \ - --scope https://www.googleapis.com/auth/bigquery + --scopes https://www.googleapis.com/auth/bigquery echo 'Created GCE instance, waiting 60 seconds for it to come online.' sleep 60 diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py index ebd28f7591..fbccf3bdca 100755 --- a/tools/run_tests/performance/bq_upload_result.py +++ b/tools/run_tests/performance/bq_upload_result.py @@ -48,20 +48,47 @@ import big_query_utils _PROJECT_ID='grpc-testing' -def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): +def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): + with open(result_file, 'r') as f: + (col1, col2, col3) = f.read().split(',') + latency50 = float(col1.strip()) * 1000 + latency90 = float(col2.strip()) * 1000 + latency99 = float(col3.strip()) * 1000 + + scenario_result = { + 'scenario': { + 'name': 'netperf_tcp_rr' + }, + 'summary': { + 'latency50': latency50, + 'latency90': latency90, + 'latency99': latency99 + } + } + bq = big_query_utils.create_big_query() _create_results_table(bq, dataset_id, table_id) + if not _insert_result(bq, dataset_id, table_id, scenario_result, flatten=False): + print 'Error uploading result to bigquery.' + sys.exit(1) + + +def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): with open(result_file, 'r') as f: scenario_result = json.loads(f.read()) + bq = big_query_utils.create_big_query() + _create_results_table(bq, dataset_id, table_id) + if not _insert_result(bq, dataset_id, table_id, scenario_result): print 'Error uploading result to bigquery.' sys.exit(1) -def _insert_result(bq, dataset_id, table_id, scenario_result): - _flatten_result_inplace(scenario_result) +def _insert_result(bq, dataset_id, table_id, scenario_result, flatten=True): + if flatten: + _flatten_result_inplace(scenario_result) _populate_metadata_inplace(scenario_result) row = big_query_utils.make_row(str(uuid.uuid4()), scenario_result) return big_query_utils.insert_rows(bq, @@ -127,9 +154,17 @@ argp.add_argument('--bq_result_table', required=True, default=None, type=str, help='Bigquery "dataset.table" to upload results to.') argp.add_argument('--file_to_upload', default='scenario_result.json', type=str, help='Report file to upload.') +argp.add_argument('--file_format', + choices=['scenario_result','netperf_latency_csv'], + default='scenario_result', + help='Format of the file to upload.') args = argp.parse_args() dataset_id, table_id = args.bq_result_table.split('.', 2) -_upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload) + +if args.file_format == 'netperf_latency_csv': + _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, args.file_to_upload) +else: + _upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload) print 'Successfully uploaded %s to BigQuery.\n' % args.file_to_upload diff --git a/tools/run_tests/performance/run_netperf.sh b/tools/run_tests/performance/run_netperf.sh new file mode 100755 index 0000000000..55a0c31bd2 --- /dev/null +++ b/tools/run_tests/performance/run_netperf.sh @@ -0,0 +1,45 @@ +#!/bin/bash +# Copyright 2015, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +set -ex + +cd $(dirname $0)/../../.. + +netperf >netperf_latency.txt -P 0 -t TCP_RR -H "$NETPERF_SERVER_HOST" -- -o P50_LATENCY,P90_LATENCY,P99_LATENCY + +cat netperf_latency.txt + +if [ "$BQ_RESULT_TABLE" != "" ] +then + tools/run_tests/performance/bq_upload_result.py \ + --file_to_upload=netperf_latency.txt \ + --file_format=netperf_latency_csv \ + --bq_result_table="$BQ_RESULT_TABLE" +fi diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py index b1f5889e54..674d864539 100755 --- a/tools/run_tests/run_performance_tests.py +++ b/tools/run_tests/run_performance_tests.py @@ -131,6 +131,25 @@ def create_quit_jobspec(workers, remote_host=None): verbose_success=True) +def create_netperf_jobspec(server_host='localhost', client_host=None, + bq_result_table=None): + """Runs netperf benchmark.""" + cmd = 'NETPERF_SERVER_HOST="%s" ' % server_host + if bq_result_table: + cmd += 'BQ_RESULT_TABLE="%s" ' % bq_result_table + cmd += 'tools/run_tests/performance/run_netperf.sh' + if client_host: + user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, client_host) + cmd = 'ssh %s "cd ~/performance_workspace/grpc/ && "%s' % (user_at_host, pipes.quote(cmd)) + + return jobset.JobSpec( + cmdline=[cmd], + shortname='netperf', + timeout_seconds=60, + shell=True, + verbose_success=True) + + def archive_repo(languages): """Archives local version of repo including submodules.""" cmdline=['tar', '-cf', '../grpc.tar', '../grpc/'] @@ -244,12 +263,28 @@ def start_qpsworkers(languages, worker_hosts): def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', - category='all', bq_result_table=None): + category='all', bq_result_table=None, + netperf=False, netperf_hosts=[]): """Create jobspecs for scenarios to run.""" all_workers = [worker for workers in workers_by_lang.values() for worker in workers] scenarios = [] + + if netperf: + if not netperf_hosts: + netperf_server='localhost' + netperf_client=None + elif len(netperf_hosts) == 1: + netperf_server=netperf_hosts[0] + netperf_client=netperf_hosts[0] + else: + netperf_server=netperf_hosts[0] + netperf_client=netperf_hosts[1] + scenarios.append(create_netperf_jobspec(server_host=netperf_server, + client_host=netperf_client, + bq_result_table=bq_result_table)) + for language in languages: for scenario_json in language.scenarios(): if re.search(args.regex, scenario_json['name']): @@ -316,6 +351,11 @@ argp.add_argument('--category', choices=['smoketest','all'], default='smoketest', help='Select a category of tests to run. Smoketest runs by default.') +argp.add_argument('--netperf', + default=False, + action='store_const', + const=True, + help='Run netperf benchmark as one of the scenarios.') args = argp.parse_args() @@ -360,7 +400,10 @@ try: remote_host=args.remote_driver_host, regex=args.regex, category=args.category, - bq_result_table=args.bq_result_table) + bq_result_table=args.bq_result_table, + netperf=args.netperf, + netperf_hosts=args.remote_worker_host) + if not scenarios: raise Exception('No scenarios to run') -- cgit v1.2.3