diff options
author | Craig Tiller <ctiller@google.com> | 2016-10-31 14:42:53 -0700 |
---|---|---|
committer | Craig Tiller <ctiller@google.com> | 2016-10-31 14:42:53 -0700 |
commit | 41fd9f2343ad39fc6db1b417ee403c976fc4b00f (patch) | |
tree | bdf50dacd00594b55af953fe200fe72a2ef422d1 /tools | |
parent | c650fb3810ae6b9ee12526cc55d69ba12c0632d7 (diff) | |
parent | ccc6a9cbf264655ae6b60727cd86b987a62977c9 (diff) |
Merge github.com:grpc/grpc into grpc_slice
Diffstat (limited to 'tools')
-rw-r--r-- | tools/doxygen/Doxyfile.c++.internal | 2 | ||||
-rw-r--r-- | tools/run_tests/artifact_targets.py | 4 | ||||
-rw-r--r-- | tools/run_tests/build_artifact_node.bat | 2 | ||||
-rwxr-xr-x | tools/run_tests/build_artifact_python.sh | 11 | ||||
-rwxr-xr-x | tools/run_tests/build_python.sh | 9 | ||||
-rwxr-xr-x | tools/run_tests/performance/bq_upload_result.py | 1 | ||||
-rw-r--r-- | tools/run_tests/performance/scenario_result_schema.json | 5 | ||||
-rw-r--r-- | tools/run_tests/pre_build_node.bat | 5 | ||||
-rwxr-xr-x | tools/run_tests/pre_build_node.sh | 4 | ||||
-rw-r--r-- | tools/run_tests/report_utils.py | 15 | ||||
-rwxr-xr-x | tools/run_tests/run_interop_tests.py | 7 | ||||
-rwxr-xr-x | tools/run_tests/run_tests.py | 4 | ||||
-rwxr-xr-x | tools/run_tests/run_tests_in_workspace.sh | 8 | ||||
-rwxr-xr-x | tools/run_tests/run_tests_matrix.py | 67 | ||||
-rw-r--r-- | tools/run_tests/sources_and_headers.json | 22 | ||||
-rw-r--r-- | tools/run_tests/tests.json | 21 |
16 files changed, 137 insertions, 50 deletions
diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index 252bdb7ed1..6c2b475ed0 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -864,6 +864,7 @@ src/cpp/client/create_channel_internal.h \ src/cpp/common/channel_filter.h \ src/cpp/server/dynamic_thread_pool.h \ src/cpp/server/thread_pool_interface.h \ +src/cpp/thread_manager/thread_manager.h \ src/cpp/client/insecure_credentials.cc \ src/cpp/client/secure_credentials.cc \ src/cpp/common/auth_property_iterator.cc \ @@ -893,6 +894,7 @@ src/cpp/server/server_cc.cc \ src/cpp/server/server_context.cc \ src/cpp/server/server_credentials.cc \ src/cpp/server/server_posix.cc \ +src/cpp/thread_manager/thread_manager.cc \ src/cpp/util/byte_buffer_cc.cc \ src/cpp/util/slice_cc.cc \ src/cpp/util/status.cc \ diff --git a/tools/run_tests/artifact_targets.py b/tools/run_tests/artifact_targets.py index d36f963a7c..65d34e17e1 100644 --- a/tools/run_tests/artifact_targets.py +++ b/tools/run_tests/artifact_targets.py @@ -109,8 +109,8 @@ class PythonArtifact: # TODO(atash) get better platform-detection support in core so we don't # need to do this manually... environ['CFLAGS'] = '-DGPR_MANYLINUX1=1' - environ['BUILD_HEALTH_CHECKING'] = 'TRUE' - environ['BUILD_MANYLINUX_WHEEL'] = 'TRUE' + environ['GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS'] = 'TRUE' + environ['GRPC_BUILD_MANYLINUX_WHEEL'] = 'TRUE' return create_docker_jobspec(self.name, 'tools/dockerfile/grpc_artifact_python_manylinux_%s' % self.arch, 'tools/run_tests/build_artifact_python.sh', diff --git a/tools/run_tests/build_artifact_node.bat b/tools/run_tests/build_artifact_node.bat index c5bd726db7..57d55ef19e 100644 --- a/tools/run_tests/build_artifact_node.bat +++ b/tools/run_tests/build_artifact_node.bat @@ -52,4 +52,4 @@ if %errorlevel% neq 0 exit /b %errorlevel% goto :EOF :error -exit /b 1
\ No newline at end of file +exit /b 1 diff --git a/tools/run_tests/build_artifact_python.sh b/tools/run_tests/build_artifact_python.sh index 9fed7c5028..2a1d41fd68 100755 --- a/tools/run_tests/build_artifact_python.sh +++ b/tools/run_tests/build_artifact_python.sh @@ -66,7 +66,7 @@ ${SETARCH_CMD} ${PYTHON} tools/distrib/python/grpcio_tools/setup.py sdist # Build gRPC tools package binary distribution ${SETARCH_CMD} ${PYTHON} tools/distrib/python/grpcio_tools/setup.py bdist_wheel -if [ "$BUILD_MANYLINUX_WHEEL" != "" ] +if [ "$GRPC_BUILD_MANYLINUX_WHEEL" != "" ] then for wheel in dist/*.whl; do ${AUDITWHEEL} repair $wheel -w "$ARTIFACT_DIR" @@ -82,16 +82,21 @@ fi # Wheels are not supported by setup_requires/dependency_links, so we # manually install the dependency. Note we should only do this if we # are in a docker image or in a virtualenv. -if [ "$BUILD_HEALTH_CHECKING" != "" ] +if [ "$GRPC_BUILD_GRPCIO_TOOLS_DEPENDENTS" != "" ] then ${PIP} install -rrequirements.txt ${PIP} install grpcio --no-index --find-links "file://$ARTIFACT_DIR/" ${PIP} install grpcio-tools --no-index --find-links "file://$ARTIFACT_DIR/" - # Build gRPC health check source distribution + # Build gRPC health-checking source distribution ${SETARCH_CMD} ${PYTHON} src/python/grpcio_health_checking/setup.py \ preprocess build_package_protos sdist cp -r src/python/grpcio_health_checking/dist/* "$ARTIFACT_DIR" + + # Build gRPC reflection source distribution + ${SETARCH_CMD} ${PYTHON} src/python/grpcio_reflection/setup.py \ + preprocess build_package_protos sdist + cp -r src/python/grpcio_reflection/dist/* "$ARTIFACT_DIR" fi cp -r dist/* "$ARTIFACT_DIR" diff --git a/tools/run_tests/build_python.sh b/tools/run_tests/build_python.sh index 54e2fe5347..fb884ad166 100755 --- a/tools/run_tests/build_python.sh +++ b/tools/run_tests/build_python.sh @@ -180,9 +180,18 @@ pip_install_dir $ROOT/tools/distrib/python/grpcio_tools # TODO(atash) figure out namespace packages and grpcio-tools and auditwheel # etc... pip_install_dir $ROOT + +# Build/install health checking $VENV_PYTHON $ROOT/src/python/grpcio_health_checking/setup.py preprocess $VENV_PYTHON $ROOT/src/python/grpcio_health_checking/setup.py build_package_protos pip_install_dir $ROOT/src/python/grpcio_health_checking + +# Build/install reflection +$VENV_PYTHON $ROOT/src/python/grpcio_reflection/setup.py preprocess +$VENV_PYTHON $ROOT/src/python/grpcio_reflection/setup.py build_package_protos +pip_install_dir $ROOT/src/python/grpcio_reflection + +# Build/install tests $VENV_PYTHON $ROOT/src/python/grpcio_tests/setup.py preprocess $VENV_PYTHON $ROOT/src/python/grpcio_tests/setup.py build_package_protos pip_install_dir $ROOT/src/python/grpcio_tests diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py index 2a99499843..9b817b5896 100755 --- a/tools/run_tests/performance/bq_upload_result.py +++ b/tools/run_tests/performance/bq_upload_result.py @@ -120,6 +120,7 @@ def _flatten_result_inplace(scenario_result): scenario_result['serverCores'] = json.dumps(scenario_result['serverCores']) scenario_result['clientSuccess'] = json.dumps(scenario_result['clientSuccess']) scenario_result['serverSuccess'] = json.dumps(scenario_result['serverSuccess']) + scenario_result['requestResults'] = json.dumps(scenario_result.get('requestResults', [])) def _populate_metadata_inplace(scenario_result): diff --git a/tools/run_tests/performance/scenario_result_schema.json b/tools/run_tests/performance/scenario_result_schema.json index 6bec21df39..3285f212d7 100644 --- a/tools/run_tests/performance/scenario_result_schema.json +++ b/tools/run_tests/performance/scenario_result_schema.json @@ -208,5 +208,10 @@ "name": "serverSuccess", "type": "STRING", "mode": "NULLABLE" + }, + { + "name": "requestResults", + "type": "STRING", + "mode": "NULLABLE" } ] diff --git a/tools/run_tests/pre_build_node.bat b/tools/run_tests/pre_build_node.bat index a29456f9ed..addb01a2a4 100644 --- a/tools/run_tests/pre_build_node.bat +++ b/tools/run_tests/pre_build_node.bat @@ -29,6 +29,5 @@ set PATH=%PATH%;C:\Program Files\nodejs\;%APPDATA%\npm -@rem Expire cache after 1 week -call npm update --cache-min 604800 - +@rem Expire cache after 1 day +call npm update --cache-min 86400 diff --git a/tools/run_tests/pre_build_node.sh b/tools/run_tests/pre_build_node.sh index 4879e7ad9b..e63be9da52 100755 --- a/tools/run_tests/pre_build_node.sh +++ b/tools/run_tests/pre_build_node.sh @@ -37,8 +37,8 @@ set -ex export GRPC_CONFIG=${CONFIG:-opt} -# Expire cache after 1 week -npm update --cache-min 604800 +# Expire cache after 1 day +npm update --cache-min 86400 npm install node-gyp-install ./node_modules/.bin/node-gyp-install diff --git a/tools/run_tests/report_utils.py b/tools/run_tests/report_utils.py index efe5dc999d..3e18f36510 100644 --- a/tools/run_tests/report_utils.py +++ b/tools/run_tests/report_utils.py @@ -36,7 +36,7 @@ try: from mako.template import Template from mako import exceptions except (ImportError): - pass # Mako not installed but it is ok. + pass # Mako not installed but it is ok. import os import string import xml.etree.cElementTree as ET @@ -63,9 +63,9 @@ def render_junit_xml_report(resultset, xml_report, suite_package='grpc', root = ET.Element('testsuites') testsuite = ET.SubElement(root, 'testsuite', id='1', package=suite_package, name=suite_name) - for shortname, results in resultset.items(): + for shortname, results in resultset.iteritems(): for result in results: - xml_test = ET.SubElement(testsuite, 'testcase', name=shortname) + xml_test = ET.SubElement(testsuite, 'testcase', name=shortname) if result.elapsed_time: xml_test.set('time', str(result.elapsed_time)) ET.SubElement(xml_test, 'system-out').text = _filter_msg(result.message, @@ -79,7 +79,7 @@ def render_junit_xml_report(resultset, xml_report, suite_package='grpc', def render_interop_html_report( - client_langs, server_langs, test_cases, auth_test_cases, http2_cases, + client_langs, server_langs, test_cases, auth_test_cases, http2_cases, resultset, num_failures, cloud_to_prod, prod_servers, http2_interop): """Generate HTML report for interop tests.""" template_file = 'tools/run_tests/interop_html_report.template' @@ -99,7 +99,7 @@ def render_interop_html_report( sorted_server_langs = sorted(server_langs) sorted_prod_servers = sorted(prod_servers) - args = {'client_langs': sorted_client_langs, + args = {'client_langs': sorted_client_langs, 'server_langs': sorted_server_langs, 'test_cases': sorted_test_cases, 'auth_test_cases': sorted_auth_test_cases, @@ -110,9 +110,9 @@ def render_interop_html_report( 'prod_servers': sorted_prod_servers, 'http2_interop': http2_interop} - html_report_out_dir = 'reports' + html_report_out_dir = 'reports' if not os.path.exists(html_report_out_dir): - os.mkdir(html_report_out_dir) + os.mkdir(html_report_out_dir) html_file_path = os.path.join(html_report_out_dir, 'index.html') try: with open(html_file_path, 'w') as output_file: @@ -120,4 +120,3 @@ def render_interop_html_report( except: print(exceptions.text_error_template().render()) raise - diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py index 0c6efda1f4..c73de6b717 100755 --- a/tools/run_tests/run_interop_tests.py +++ b/tools/run_tests/run_interop_tests.py @@ -64,7 +64,9 @@ _SKIP_SERVER_COMPRESSION = ['server_compressed_unary', _SKIP_COMPRESSION = _SKIP_CLIENT_COMPRESSION + _SKIP_SERVER_COMPRESSION -_SKIP_ADVANCED_GO = ['custom_metadata', 'unimplemented_method'] +_SKIP_ADVANCED_GO = ['custom_metadata', + 'unimplemented_method', + 'unimplemented_service'] _SKIP_ADVANCED = _SKIP_ADVANCED_GO + ['status_code_and_message'] @@ -416,7 +418,8 @@ _TEST_CASES = ['large_unary', 'empty_unary', 'ping_pong', 'timeout_on_sleeping_server', 'custom_metadata', 'status_code_and_message', 'unimplemented_method', 'client_compressed_unary', 'server_compressed_unary', - 'client_compressed_streaming', 'server_compressed_streaming'] + 'client_compressed_streaming', 'server_compressed_streaming', + 'unimplemented_service'] _AUTH_TEST_CASES = ['compute_engine_creds', 'jwt_token_creds', 'oauth2_auth_token', 'per_rpc_creds'] diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py index 2886870d38..911843e9f3 100755 --- a/tools/run_tests/run_tests.py +++ b/tools/run_tests/run_tests.py @@ -363,7 +363,8 @@ class NodeLanguage(object): self.config = config self.args = args _check_compiler(self.args.compiler, ['default', 'node0.12', - 'node4', 'node5', 'node6']) + 'node4', 'node5', 'node6', + 'node7']) if self.args.compiler == 'default': self.node_version = '4' else: @@ -1064,6 +1065,7 @@ argp.add_argument('--compiler', 'clang3.4', 'clang3.5', 'clang3.6', 'clang3.7', 'vs2010', 'vs2013', 'vs2015', 'python2.7', 'python3.4', 'python3.5', 'python3.6', 'pypy', 'pypy3', + 'node0.12', 'node4', 'node5', 'node6', 'node7', 'coreclr'], default='default', help='Selects compiler to use. Allowed values depend on the platform and language.') diff --git a/tools/run_tests/run_tests_in_workspace.sh b/tools/run_tests/run_tests_in_workspace.sh index 98ef3566db..9c6c5b76e0 100755 --- a/tools/run_tests/run_tests_in_workspace.sh +++ b/tools/run_tests/run_tests_in_workspace.sh @@ -35,11 +35,13 @@ set -ex cd $(dirname $0)/../.. +export repo_root=$(pwd) rm -rf "${WORKSPACE_NAME}" -# TODO(jtattermusch): clone --recursive fetches the submodules from github. -# Try avoiding that to save time and network capacity. -git clone --recursive . "${WORKSPACE_NAME}" +git clone . "${WORKSPACE_NAME}" +# clone gRPC submodules, use data from locally cloned submodules where possible +git submodule foreach 'cd "${repo_root}/${WORKSPACE_NAME}" \ + && git submodule update --init --reference ${repo_root}/${name} ${name}' echo "Running run_tests.py in workspace ${WORKSPACE_NAME}" python "${WORKSPACE_NAME}/tools/run_tests/run_tests.py" $@ diff --git a/tools/run_tests/run_tests_matrix.py b/tools/run_tests/run_tests_matrix.py index b65010ad8b..2656f1ac5d 100755 --- a/tools/run_tests/run_tests_matrix.py +++ b/tools/run_tests/run_tests_matrix.py @@ -46,16 +46,16 @@ os.chdir(_ROOT) _RUNTESTS_TIMEOUT = 4*60*60 # Number of jobs assigned to each run_tests.py instance -_INNER_JOBS = 2 +_DEFAULT_INNER_JOBS = 2 -def _docker_jobspec(name, runtests_args=[]): +def _docker_jobspec(name, runtests_args=[], inner_jobs=_DEFAULT_INNER_JOBS): """Run a single instance of run_tests.py in a docker container""" test_job = jobset.JobSpec( cmdline=['python', 'tools/run_tests/run_tests.py', '--use_docker', '-t', - '-j', str(_INNER_JOBS), + '-j', str(inner_jobs), '-x', 'report_%s.xml' % name, '--report_suite_name', '%s' % name] + runtests_args, shortname='run_tests_%s' % name, @@ -63,7 +63,7 @@ def _docker_jobspec(name, runtests_args=[]): return test_job -def _workspace_jobspec(name, runtests_args=[], workspace_name=None): +def _workspace_jobspec(name, runtests_args=[], workspace_name=None, inner_jobs=_DEFAULT_INNER_JOBS): """Run a single instance of run_tests.py in a separate workspace""" if not workspace_name: workspace_name = 'workspace_%s' % name @@ -71,7 +71,7 @@ def _workspace_jobspec(name, runtests_args=[], workspace_name=None): test_job = jobset.JobSpec( cmdline=['tools/run_tests/run_tests_in_workspace.sh', '-t', - '-j', str(_INNER_JOBS), + '-j', str(inner_jobs), '-x', '../report_%s.xml' % name, '--report_suite_name', '%s' % name] + runtests_args, environ=env, @@ -82,7 +82,8 @@ def _workspace_jobspec(name, runtests_args=[], workspace_name=None): def _generate_jobs(languages, configs, platforms, arch=None, compiler=None, - labels=[], extra_args=[]): + labels=[], extra_args=[], + inner_jobs=_DEFAULT_INNER_JOBS): result = [] for language in languages: for platform in platforms: @@ -97,68 +98,75 @@ def _generate_jobs(languages, configs, platforms, runtests_args += extra_args if platform == 'linux': - job = _docker_jobspec(name=name, runtests_args=runtests_args) + job = _docker_jobspec(name=name, runtests_args=runtests_args, inner_jobs=inner_jobs) else: - job = _workspace_jobspec(name=name, runtests_args=runtests_args) + job = _workspace_jobspec(name=name, runtests_args=runtests_args, inner_jobs=inner_jobs) job.labels = [platform, config, language] + labels result.append(job) return result -def _create_test_jobs(extra_args=[]): +def _create_test_jobs(extra_args=[], inner_jobs=_DEFAULT_INNER_JOBS): test_jobs = [] # supported on linux only test_jobs += _generate_jobs(languages=['sanity', 'php7'], configs=['dbg', 'opt'], platforms=['linux'], labels=['basictests'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # supported on all platforms. test_jobs += _generate_jobs(languages=['c', 'csharp', 'node', 'python'], configs=['dbg', 'opt'], platforms=['linux', 'macos', 'windows'], labels=['basictests'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # supported on linux and mac. test_jobs += _generate_jobs(languages=['c++', 'ruby', 'php'], configs=['dbg', 'opt'], platforms=['linux', 'macos'], labels=['basictests'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # supported on mac only. test_jobs += _generate_jobs(languages=['objc'], configs=['dbg', 'opt'], platforms=['macos'], labels=['basictests'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # sanitizers test_jobs += _generate_jobs(languages=['c'], configs=['msan', 'asan', 'tsan'], platforms=['linux'], labels=['sanitizers'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) test_jobs += _generate_jobs(languages=['c++'], configs=['asan', 'tsan'], platforms=['linux'], labels=['sanitizers'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # libuv tests test_jobs += _generate_jobs(languages=['c'], configs=['dbg', 'opt'], platforms=['linux'], labels=['libuv'], - extra_args=extra_args + ['--iomgr_platform=uv']) + extra_args=extra_args + ['--iomgr_platform=uv'], + inner_jobs=inner_jobs) return test_jobs -def _create_portability_test_jobs(extra_args=[]): +def _create_portability_test_jobs(extra_args=[], inner_jobs=_DEFAULT_INNER_JOBS): test_jobs = [] # portability C x86 test_jobs += _generate_jobs(languages=['c'], @@ -167,7 +175,8 @@ def _create_portability_test_jobs(extra_args=[]): arch='x86', compiler='default', labels=['portability'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # portability C and C++ on x64 for compiler in ['gcc4.4', 'gcc4.6', 'gcc5.3', @@ -178,7 +187,8 @@ def _create_portability_test_jobs(extra_args=[]): arch='x64', compiler=compiler, labels=['portability'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) # portability C on Windows for arch in ['x86', 'x64']: @@ -189,7 +199,8 @@ def _create_portability_test_jobs(extra_args=[]): arch=arch, compiler=compiler, labels=['portability'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) test_jobs += _generate_jobs(languages=['python'], configs=['dbg'], @@ -197,7 +208,8 @@ def _create_portability_test_jobs(extra_args=[]): arch='default', compiler='python3.4', labels=['portability'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) test_jobs += _generate_jobs(languages=['csharp'], configs=['dbg'], @@ -205,7 +217,8 @@ def _create_portability_test_jobs(extra_args=[]): arch='default', compiler='coreclr', labels=['portability'], - extra_args=extra_args) + extra_args=extra_args, + inner_jobs=inner_jobs) return test_jobs @@ -220,7 +233,7 @@ def _allowed_labels(): argp = argparse.ArgumentParser(description='Run a matrix of run_tests.py tests.') argp.add_argument('-j', '--jobs', - default=multiprocessing.cpu_count()/_INNER_JOBS, + default=multiprocessing.cpu_count()/_DEFAULT_INNER_JOBS, type=int, help='Number of concurrent run_tests.py instances.') argp.add_argument('-f', '--filter', @@ -249,15 +262,21 @@ argp.add_argument('--base_branch', default='origin/master', type=str, help='Branch that pull request is requesting to merge into') +argp.add_argument('--inner_jobs', + default=_DEFAULT_INNER_JOBS, + type=int, + help='Number of jobs in each run_tests.py instance') args = argp.parse_args() + extra_args = [] if args.build_only: extra_args.append('--build_only') if args.force_default_poller: extra_args.append('--force_default_poller') -all_jobs = _create_test_jobs(extra_args=extra_args) + _create_portability_test_jobs(extra_args=extra_args) +all_jobs = _create_test_jobs(extra_args=extra_args, inner_jobs=args.inner_jobs) + \ + _create_portability_test_jobs(extra_args=extra_args, inner_jobs=args.inner_jobs) jobs = [] for job in all_jobs: diff --git a/tools/run_tests/sources_and_headers.json b/tools/run_tests/sources_and_headers.json index 222af3ebf3..57d3b68c1c 100644 --- a/tools/run_tests/sources_and_headers.json +++ b/tools/run_tests/sources_and_headers.json @@ -3226,6 +3226,23 @@ { "deps": [ "gpr", + "grpc", + "grpc++", + "grpc++_test_config" + ], + "headers": [], + "is_filegroup": false, + "language": "c++", + "name": "thread_manager_test", + "src": [ + "test/cpp/thread_manager/thread_manager_test.cc" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "gpr", "gpr_test_util", "grpc", "grpc++", @@ -7477,7 +7494,8 @@ "src/cpp/client/create_channel_internal.h", "src/cpp/common/channel_filter.h", "src/cpp/server/dynamic_thread_pool.h", - "src/cpp/server/thread_pool_interface.h" + "src/cpp/server/thread_pool_interface.h", + "src/cpp/thread_manager/thread_manager.h" ], "is_filegroup": true, "language": "c++", @@ -7556,6 +7574,8 @@ "src/cpp/server/server_credentials.cc", "src/cpp/server/server_posix.cc", "src/cpp/server/thread_pool_interface.h", + "src/cpp/thread_manager/thread_manager.cc", + "src/cpp/thread_manager/thread_manager.h", "src/cpp/util/byte_buffer_cc.cc", "src/cpp/util/slice_cc.cc", "src/cpp/util/status.cc", diff --git a/tools/run_tests/tests.json b/tools/run_tests/tests.json index c3715995d2..8a0cb84181 100644 --- a/tools/run_tests/tests.json +++ b/tools/run_tests/tests.json @@ -3023,6 +3023,27 @@ "posix", "windows" ], + "cpu_cost": 1.0, + "exclude_configs": [], + "flaky": false, + "gtest": false, + "language": "c++", + "name": "thread_manager_test", + "platforms": [ + "linux", + "mac", + "posix", + "windows" + ] + }, + { + "args": [], + "ci_platforms": [ + "linux", + "mac", + "posix", + "windows" + ], "cpu_cost": 100, "exclude_configs": [], "exclude_iomgrs": [], |