diff options
author | Craig Tiller <ctiller@google.com> | 2016-09-26 07:37:28 -0700 |
---|---|---|
committer | Craig Tiller <ctiller@google.com> | 2016-09-26 07:37:28 -0700 |
commit | 677966a2574d6ab887758687b2231278e7b132ce (patch) | |
tree | def7fd34e7f929d502ccdc6c2d7863bce98612bf /tools | |
parent | ef5a2eb2678d38d35ff79c4d692bf48603e1c439 (diff) |
Be explicit as to which tests to run
Diffstat (limited to 'tools')
-rwxr-xr-x | tools/jenkins/run_full_performance.sh | 3 | ||||
-rw-r--r-- | tools/run_tests/performance/scenario_config.py | 26 | ||||
-rwxr-xr-x | tools/run_tests/run_performance_tests.py | 45 |
3 files changed, 42 insertions, 32 deletions
diff --git a/tools/jenkins/run_full_performance.sh b/tools/jenkins/run_full_performance.sh index 3feda866f2..7631db8844 100755 --- a/tools/jenkins/run_full_performance.sh +++ b/tools/jenkins/run_full_performance.sh @@ -38,7 +38,7 @@ cd $(dirname $0)/../.. tools/run_tests/run_performance_tests.py \ -l c++ csharp node ruby java python go \ --netperf \ - --category all \ + --category scalable \ --bq_result_table performance_test.performance_experiment \ --remote_worker_host grpc-performance-server-8core grpc-performance-client-8core grpc-performance-client2-8core \ || EXIT_CODE=1 @@ -53,4 +53,3 @@ tools/run_tests/run_performance_tests.py \ || EXIT_CODE=1 exit $EXIT_CODE - diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py index f4ccfead0c..f8a54aa144 100644 --- a/tools/run_tests/performance/scenario_config.py +++ b/tools/run_tests/performance/scenario_config.py @@ -190,7 +190,7 @@ class CXXLanguage: # TODO(ctiller): add 70% load latency test for secure in [True, False]: secstr = 'secure' if secure else 'insecure' - smoketest_categories = [SMOKETEST] if secure else [] + smoketest_categories = ([SMOKETEST] if secure else []) + [SCALABLE] yield _ping_pong_scenario( 'cpp_generic_async_streaming_ping_pong_%s' % secstr, @@ -287,7 +287,7 @@ class CSharpLanguage: 'csharp_generic_async_streaming_ping_pong', rpc_type='STREAMING', client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', use_generic_payload=True, - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'csharp_protobuf_async_streaming_ping_pong', rpc_type='STREAMING', @@ -296,7 +296,7 @@ class CSharpLanguage: yield _ping_pong_scenario( 'csharp_protobuf_async_unary_ping_pong', rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'csharp_protobuf_sync_to_async_unary_ping_pong', rpc_type='UNARY', @@ -318,7 +318,7 @@ class CSharpLanguage: 'csharp_to_cpp_protobuf_sync_unary_ping_pong', rpc_type='UNARY', client_type='SYNC_CLIENT', server_type='SYNC_SERVER', server_language='c++', server_core_limit=1, async_server_threads=1, - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'csharp_to_cpp_protobuf_async_streaming_ping_pong', rpc_type='STREAMING', @@ -375,13 +375,13 @@ class NodeLanguage: yield _ping_pong_scenario( 'node_protobuf_unary_ping_pong', rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', - categories=[SMOKETEST]) + categories=[SCALABLE, SMOKETEST]) yield _ping_pong_scenario( 'node_protobuf_async_unary_qps_unconstrained', rpc_type='UNARY', client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', unconstrained_client='async', - categories=[SMOKETEST]) + categories=[SCALABLE, SMOKETEST]) # TODO(jtattermusch): make this scenario work #yield _ping_pong_scenario( @@ -420,7 +420,7 @@ class PythonLanguage: 'python_generic_sync_streaming_ping_pong', rpc_type='STREAMING', client_type='SYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER', use_generic_payload=True, - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'python_protobuf_sync_streaming_ping_pong', rpc_type='STREAMING', @@ -433,7 +433,7 @@ class PythonLanguage: yield _ping_pong_scenario( 'python_protobuf_sync_unary_ping_pong', rpc_type='UNARY', client_type='SYNC_CLIENT', server_type='ASYNC_SERVER', - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'python_protobuf_sync_unary_qps_unconstrained', rpc_type='UNARY', @@ -449,7 +449,7 @@ class PythonLanguage: 'python_to_cpp_protobuf_sync_unary_ping_pong', rpc_type='UNARY', client_type='SYNC_CLIENT', server_type='ASYNC_SERVER', server_language='c++', server_core_limit=1, async_server_threads=1, - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'python_to_cpp_protobuf_sync_streaming_ping_pong', rpc_type='STREAMING', @@ -475,12 +475,12 @@ class RubyLanguage: yield _ping_pong_scenario( 'ruby_protobuf_sync_streaming_ping_pong', rpc_type='STREAMING', client_type='SYNC_CLIENT', server_type='SYNC_SERVER', - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'ruby_protobuf_unary_ping_pong', rpc_type='UNARY', client_type='SYNC_CLIENT', server_type='SYNC_SERVER', - categories=[SMOKETEST]) + categories=[SMOKETEST, SCALABLE]) yield _ping_pong_scenario( 'ruby_protobuf_sync_unary_qps_unconstrained', rpc_type='UNARY', @@ -521,7 +521,7 @@ class JavaLanguage: def scenarios(self): for secure in [True, False]: secstr = 'secure' if secure else 'insecure' - smoketest_categories = [SMOKETEST] if secure else [] + smoketest_categories = ([SMOKETEST] if secure else []) + [SCALABLE] yield _ping_pong_scenario( 'java_generic_async_streaming_ping_pong_%s' % secstr, rpc_type='STREAMING', @@ -598,7 +598,7 @@ class GoLanguage: def scenarios(self): for secure in [True, False]: secstr = 'secure' if secure else 'insecure' - smoketest_categories = [SMOKETEST] if secure else [] + smoketest_categories = ([SMOKETEST] if secure else []) + [SCALABLE] # ASYNC_GENERIC_SERVER for Go actually uses a sync streaming server, # but that's mostly because of lack of better name of the enum value. diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py index 3f028f177b..92149115fe 100755 --- a/tools/run_tests/run_performance_tests.py +++ b/tools/run_tests/run_performance_tests.py @@ -280,7 +280,7 @@ def create_qpsworkers(languages, worker_hosts): for worker_idx, worker in enumerate(workers)] -Scenario = collections.namedtuple('Scenario', 'jobspec workers') +Scenario = collections.namedtuple('Scenario', 'jobspec workers name') def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', @@ -307,7 +307,7 @@ def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', create_netperf_jobspec(server_host=netperf_server, client_host=netperf_client, bq_result_table=bq_result_table), - _NO_WORKERS)) + _NO_WORKERS, 'netperf')) for language in languages: for scenario_json in language.scenarios(): @@ -347,7 +347,8 @@ def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*', [w.host_and_port for w in workers], remote_host=remote_host, bq_result_table=bq_result_table), - workers) + workers, + scenario_json['name']) scenarios.append(scenario) return scenarios @@ -382,6 +383,11 @@ argp.add_argument('--remote_worker_host', nargs='+', default=[], help='Worker hosts where to start QPS workers.') +argp.add_argument('--dry_run', + default=False, + action='store_const', + const=True, + help='Just list scenarios to be run, but don\'t run them.') argp.add_argument('-r', '--regex', default='.*', type=str, help='Regex to select scenarios to run.') argp.add_argument('--bq_result_table', default=None, type=str, @@ -412,16 +418,18 @@ if args.remote_worker_host: if args.remote_driver_host: remote_hosts.add(args.remote_driver_host) -if remote_hosts: - archive_repo(languages=[str(l) for l in languages]) - prepare_remote_hosts(remote_hosts, prepare_local=True) -else: - prepare_remote_hosts([], prepare_local=True) +if not args.dry_run: + if remote_hosts: + archive_repo(languages=[str(l) for l in languages]) + prepare_remote_hosts(remote_hosts, prepare_local=True) + else: + prepare_remote_hosts([], prepare_local=True) build_local = False if not args.remote_driver_host: build_local = True -build_on_remote_hosts(remote_hosts, languages=[str(l) for l in languages], build_local=build_local) +if not args.dry_run: + build_on_remote_hosts(remote_hosts, languages=[str(l) for l in languages], build_local=build_local) qpsworker_jobs = create_qpsworkers(languages, args.remote_worker_host) @@ -443,11 +451,14 @@ if not scenarios: raise Exception('No scenarios to run') for scenario in scenarios: - try: - for worker in scenario.workers: - worker.start() - jobset.run([scenario.jobspec, - create_quit_jobspec(scenario.workers, remote_host=args.remote_driver_host)], - newline_on_success=True, maxjobs=1) - finally: - finish_qps_workers(scenario.workers) + if args.dry_run: + print(scenario.name) + else: + try: + for worker in scenario.workers: + worker.start() + jobset.run([scenario.jobspec, + create_quit_jobspec(scenario.workers, remote_host=args.remote_driver_host)], + newline_on_success=True, maxjobs=1) + finally: + finish_qps_workers(scenario.workers) |