aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/cmake/gRPCConfig.cmake.in7
-rw-r--r--tools/cmake/gRPCConfigVersion.cmake.in11
-rwxr-xr-xtools/codegen/core/gen_static_metadata.py7
-rwxr-xr-xtools/distrib/check_include_guards.py2
-rwxr-xr-xtools/distrib/python/docgen.py3
-rw-r--r--tools/distrib/python/grpcio_tools/.gitignore1
-rw-r--r--tools/distrib/python/grpcio_tools/README.rst40
-rw-r--r--tools/distrib/python/grpcio_tools/grpc/tools/_protoc_compiler.pyx (renamed from tools/distrib/python/grpcio_tools/grpc/tools/protoc_compiler.pyx)0
-rw-r--r--tools/distrib/python/grpcio_tools/grpc/tools/command.py74
-rw-r--r--tools/distrib/python/grpcio_tools/grpc/tools/protoc.py14
-rw-r--r--tools/distrib/python/grpcio_tools/grpc_version.py2
-rw-r--r--tools/distrib/python/grpcio_tools/setup.py124
-rwxr-xr-xtools/distrib/python/make_grpcio_tools.py94
-rw-r--r--tools/dockerfile/distribtest/python_arch_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_centos6_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_centos7_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_fedora20_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_fedora21_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_fedora22_x64/Dockerfile1
-rw-r--r--tools/dockerfile/distribtest/python_fedora23_x64/Dockerfile1
-rw-r--r--tools/dockerfile/distribtest/python_jessie_x64/Dockerfile1
-rw-r--r--tools/dockerfile/distribtest/python_jessie_x86/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_opensuse_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_ubuntu1204_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_ubuntu1404_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_ubuntu1504_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_ubuntu1510_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_ubuntu1604_x64/Dockerfile2
-rw-r--r--tools/dockerfile/distribtest/python_wheezy_x64/Dockerfile2
-rw-r--r--tools/dockerfile/grpc_artifact_linux_x64/Dockerfile2
-rw-r--r--tools/dockerfile/grpc_artifact_linux_x86/Dockerfile2
-rw-r--r--tools/dockerfile/grpc_check_generated_pb_files/Dockerfile5
-rwxr-xr-xtools/dockerfile/grpc_clang_format/clang_format_all_the_things.sh2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile19
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile19
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_go/Dockerfile15
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile15
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_java/Dockerfile15
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_node/Dockerfile19
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_php/Dockerfile18
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_php7/Dockerfile125
-rwxr-xr-xtools/dockerfile/interoptest/grpc_interop_php7/build_interop.sh52
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_python/Dockerfile6
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile19
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile15
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile15
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile15
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile15
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile19
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile30
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile2
-rw-r--r--tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile15
-rw-r--r--tools/dockerfile/test/csharp_coreclr_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/csharp_jessie_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/cxx_jessie_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/cxx_jessie_x86/Dockerfile19
-rw-r--r--tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/cxx_wheezy_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/fuzzer/Dockerfile19
-rw-r--r--tools/dockerfile/test/multilang_jessie_x64/Dockerfile12
-rw-r--r--tools/dockerfile/test/node_jessie_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/php7_jessie_x64/Dockerfile105
-rw-r--r--tools/dockerfile/test/php_jessie_x64/Dockerfile25
-rw-r--r--tools/dockerfile/test/python_jessie_x64/Dockerfile6
-rw-r--r--tools/dockerfile/test/python_pyenv_x64/Dockerfile112
-rw-r--r--tools/dockerfile/test/ruby_jessie_x64/Dockerfile19
-rw-r--r--tools/dockerfile/test/sanity/Dockerfile15
-rw-r--r--tools/doxygen/Doxyfile.c++2
-rw-r--r--tools/doxygen/Doxyfile.c++.internal2
-rw-r--r--tools/doxygen/Doxyfile.c++.internal.orig2505
-rw-r--r--tools/doxygen/Doxyfile.core2
-rw-r--r--tools/doxygen/Doxyfile.core.internal14
-rwxr-xr-xtools/gce/create_linux_worker.sh3
-rwxr-xr-xtools/gce/linux_performance_worker_init.sh2
-rwxr-xr-xtools/gcp/stress_test/stress_test_utils.py2
-rwxr-xr-xtools/profiling/latency_profile/profile_analyzer.py28
-rw-r--r--tools/run_tests/artifact_targets.py64
-rw-r--r--tools/run_tests/build_artifact_python.bat57
-rwxr-xr-xtools/run_tests/build_artifact_python.sh29
-rwxr-xr-xtools/run_tests/build_package_node.sh4
-rwxr-xr-xtools/run_tests/build_python.sh161
-rw-r--r--tools/run_tests/build_python_msys2.sh36
-rw-r--r--tools/run_tests/distribtest_targets.py2
-rwxr-xr-xtools/run_tests/dockerjob.py4
-rwxr-xr-xtools/run_tests/jobset.py15
-rw-r--r--tools/run_tests/package_targets.py2
-rwxr-xr-xtools/run_tests/performance/bq_upload_result.py2
-rwxr-xr-xtools/run_tests/performance/run_worker_python.sh2
-rw-r--r--tools/run_tests/performance/scenario_config.py25
-rw-r--r--tools/run_tests/performance/scenario_result_schema.json10
-rwxr-xr-xtools/run_tests/port_server.py21
-rw-r--r--tools/run_tests/pre_build_csharp.bat55
-rwxr-xr-xtools/run_tests/pre_build_csharp.sh51
-rw-r--r--tools/run_tests/report_utils.py8
-rwxr-xr-xtools/run_tests/run_interop_tests.py72
-rwxr-xr-xtools/run_tests/run_performance_tests.py12
-rwxr-xr-xtools/run_tests/run_python.sh17
-rwxr-xr-xtools/run_tests/run_stress_tests.py16
-rwxr-xr-xtools/run_tests/run_tests.py294
-rwxr-xr-xtools/run_tests/sanity/check_sources_and_headers.py3
-rwxr-xr-xtools/run_tests/sanity/check_submodules.sh2
-rw-r--r--tools/run_tests/sources_and_headers.json167
-rwxr-xr-xtools/run_tests/task_runner.py4
-rw-r--r--tools/run_tests/tests.json1570
105 files changed, 3415 insertions, 3182 deletions
diff --git a/tools/cmake/gRPCConfig.cmake.in b/tools/cmake/gRPCConfig.cmake.in
new file mode 100644
index 0000000000..48f0674579
--- /dev/null
+++ b/tools/cmake/gRPCConfig.cmake.in
@@ -0,0 +1,7 @@
+# Depend packages
+@_gRPC_FIND_ZLIB@
+@_gRPC_FIND_PROTOBUF@
+@_gRPC_FIND_SSL@
+
+# Targets
+include(${CMAKE_CURRENT_LIST_DIR}/gRPCTargets.cmake)
diff --git a/tools/cmake/gRPCConfigVersion.cmake.in b/tools/cmake/gRPCConfigVersion.cmake.in
new file mode 100644
index 0000000000..f3c19fd403
--- /dev/null
+++ b/tools/cmake/gRPCConfigVersion.cmake.in
@@ -0,0 +1,11 @@
+set(PACKAGE_VERSION "@PACKAGE_VERSION@")
+
+# Check whether the requested PACKAGE_FIND_VERSION is compatible
+if("${PACKAGE_VERSION}" VERSION_LESS "${PACKAGE_FIND_VERSION}")
+ set(PACKAGE_VERSION_COMPATIBLE FALSE)
+else()
+ set(PACKAGE_VERSION_COMPATIBLE TRUE)
+ if ("${PACKAGE_VERSION}" VERSION_EQUAL "${PACKAGE_FIND_VERSION}")
+ set(PACKAGE_VERSION_EXACT TRUE)
+ endif()
+endif()
diff --git a/tools/codegen/core/gen_static_metadata.py b/tools/codegen/core/gen_static_metadata.py
index faa83867a6..859adcbeb1 100755
--- a/tools/codegen/core/gen_static_metadata.py
+++ b/tools/codegen/core/gen_static_metadata.py
@@ -50,8 +50,8 @@ CONFIG = [
'host',
'grpc-message',
'grpc-status',
- 'census-bin',
- 'census-binary-bin',
+ 'grpc-tracing-bin',
+ 'grpc-census-bin',
'',
('grpc-status', '0'),
('grpc-status', '1'),
@@ -108,7 +108,8 @@ CONFIG = [
('if-range', ''),
('if-unmodified-since', ''),
('last-modified', ''),
- ('load-reporting', ''),
+ ('load-reporting-initial', ''),
+ ('load-reporting-trailing', ''),
('link', ''),
('location', ''),
('max-forwards', ''),
diff --git a/tools/distrib/check_include_guards.py b/tools/distrib/check_include_guards.py
index 56b2924a1a..28312813f6 100755
--- a/tools/distrib/check_include_guards.py
+++ b/tools/distrib/check_include_guards.py
@@ -200,6 +200,6 @@ validator = GuardValidator()
for filename in filename_list:
if filename in KNOWN_BAD: continue
- ok = validator.check(filename, args.fix)
+ ok = ok and validator.check(filename, args.fix)
sys.exit(0 if ok else 1)
diff --git a/tools/distrib/python/docgen.py b/tools/distrib/python/docgen.py
index 72c65ad14a..15bd8d855f 100755
--- a/tools/distrib/python/docgen.py
+++ b/tools/distrib/python/docgen.py
@@ -70,8 +70,9 @@ environment.update({
})
subprocess_arguments_list = [
- {'args': ['make'], 'cwd': PROJECT_ROOT},
{'args': ['virtualenv', VIRTUALENV_DIR], 'env': environment},
+ {'args': [VIRTUALENV_PIP_PATH, 'install', '--upgrade', 'pip'],
+ 'env': environment},
{'args': [VIRTUALENV_PIP_PATH, 'install', '-r', REQUIREMENTS_PATH],
'env': environment},
{'args': [VIRTUALENV_PYTHON_PATH, SETUP_PATH, 'build'], 'env': environment},
diff --git a/tools/distrib/python/grpcio_tools/.gitignore b/tools/distrib/python/grpcio_tools/.gitignore
index 979704d970..9f3a7360ee 100644
--- a/tools/distrib/python/grpcio_tools/.gitignore
+++ b/tools/distrib/python/grpcio_tools/.gitignore
@@ -5,3 +5,4 @@ grpc_root/
*.c
*.cpp
*.egg-info
+*.so
diff --git a/tools/distrib/python/grpcio_tools/README.rst b/tools/distrib/python/grpcio_tools/README.rst
index 8946a1d5b3..55521d17bb 100644
--- a/tools/distrib/python/grpcio_tools/README.rst
+++ b/tools/distrib/python/grpcio_tools/README.rst
@@ -122,6 +122,7 @@ Help, I ...
third_party/protobuf/src/google/protobuf/stubs/mathlimits.h:173:31: note: in expansion of macro 'SIGNED_INT_MAX'
static const Type kPosMax = SIGNED_INT_MAX(Type); \\
^
+
And your toolchain is GCC (at the time of this writing, up through at least
GCC 6.0), this is probably a bug where GCC chokes on constant expressions
when the :code:`-fwrapv` flag is specified. You should consider setting your
@@ -136,3 +137,42 @@ Given protobuf include directories :code:`$INCLUDE`, an output directory
::
$ python -m grpc.tools.protoc -I$INCLUDE --python_out=$OUTPUT --grpc_python_out=$OUTPUT $PROTO_FILES
+
+To use as a build step in distutils-based projects, you may use the provided
+command class in your :code:`setup.py`:
+
+::
+
+ setuptools.setup(
+ # ...
+ cmdclass={
+ 'build_proto_modules': grpc.tools.command.BuildPackageProtos,
+ }
+ # ...
+ )
+
+Invocation of the command will walk the project tree and transpile every
+:code:`.proto` file into a :code:`_pb2.py` file in the same directory.
+
+Note that this particular approach requires :code:`grpcio-tools` to be
+installed on the machine before the setup script is invoked (i.e. no
+combination of :code:`setup_requires` or :code:`install_requires` will provide
+access to :code:`grpc.tools.command.BuildPackageProtos` if it isn't already
+installed). One way to work around this can be found in our
+:code:`grpcio-health-checking`
+`package <https://pypi.python.org/pypi/grpcio-health-checking>`_:
+
+::
+
+ class BuildPackageProtos(setuptools.Command):
+ """Command to generate project *_pb2.py modules from proto files."""
+ # ...
+ def run(self):
+ from grpc.tools import command
+ command.build_package_protos(self.distribution.package_dir[''])
+
+Now including :code:`grpcio-tools` in :code:`setup_requires` will provide the
+command on-setup as desired.
+
+For more information on command classes, consult :code:`distutils` and
+:code:`setuptools` documentation.
diff --git a/tools/distrib/python/grpcio_tools/grpc/tools/protoc_compiler.pyx b/tools/distrib/python/grpcio_tools/grpc/tools/_protoc_compiler.pyx
index a6530127c0..a6530127c0 100644
--- a/tools/distrib/python/grpcio_tools/grpc/tools/protoc_compiler.pyx
+++ b/tools/distrib/python/grpcio_tools/grpc/tools/_protoc_compiler.pyx
diff --git a/tools/distrib/python/grpcio_tools/grpc/tools/command.py b/tools/distrib/python/grpcio_tools/grpc/tools/command.py
new file mode 100644
index 0000000000..2520099835
--- /dev/null
+++ b/tools/distrib/python/grpcio_tools/grpc/tools/command.py
@@ -0,0 +1,74 @@
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+import os
+import sys
+
+import setuptools
+
+from grpc.tools import protoc
+
+
+def build_package_protos(package_root):
+ proto_files = []
+ inclusion_root = os.path.abspath(package_root)
+ for root, _, files in os.walk(inclusion_root):
+ for filename in files:
+ if filename.endswith('.proto'):
+ proto_files.append(os.path.abspath(os.path.join(root, filename)))
+
+ for proto_file in proto_files:
+ command = [
+ 'grpc.tools.protoc',
+ '--proto_path={}'.format(inclusion_root),
+ '--python_out={}'.format(inclusion_root),
+ '--grpc_python_out={}'.format(inclusion_root),
+ ] + [proto_file]
+ if protoc.main(command) != 0:
+ sys.stderr.write('warning: {} failed'.format(command))
+
+
+class BuildPackageProtos(setuptools.Command):
+ """Command to generate project *_pb2.py modules from proto files."""
+
+ description = 'build grpc protobuf modules'
+ user_options = []
+
+ def initialize_options(self):
+ pass
+
+ def finalize_options(self):
+ pass
+
+ def run(self):
+ # due to limitations of the proto generator, we require that only *one*
+ # directory is provided as an 'include' directory. We assume it's the '' key
+ # to `self.distribution.package_dir` (and get a key error if it's not
+ # there).
+ build_package_protos(self.distribution.package_dir[''])
diff --git a/tools/distrib/python/grpcio_tools/grpc/tools/protoc.py b/tools/distrib/python/grpcio_tools/grpc/tools/protoc.py
index 1c69e78920..e1256a7dd9 100644
--- a/tools/distrib/python/grpcio_tools/grpc/tools/protoc.py
+++ b/tools/distrib/python/grpcio_tools/grpc/tools/protoc.py
@@ -32,10 +32,18 @@
import pkg_resources
import sys
-from grpc.tools import protoc_compiler
+from grpc.tools import _protoc_compiler
+def main(command_arguments):
+ """Run the protocol buffer compiler with the given command-line arguments.
+
+ Args:
+ command_arguments: a list of strings representing command line arguments to
+ `protoc`.
+ """
+ command_arguments = [argument.encode() for argument in command_arguments]
+ return _protoc_compiler.run_main(command_arguments)
if __name__ == '__main__':
proto_include = pkg_resources.resource_filename('grpc.tools', '_proto')
- protoc_compiler.run_main(
- sys.argv + ['-I{}'.format(proto_include)])
+ sys.exit(main(sys.argv + ['-I{}'.format(proto_include)]))
diff --git a/tools/distrib/python/grpcio_tools/grpc_version.py b/tools/distrib/python/grpcio_tools/grpc_version.py
index 4b1e7fcd58..79c40717dd 100644
--- a/tools/distrib/python/grpcio_tools/grpc_version.py
+++ b/tools/distrib/python/grpcio_tools/grpc_version.py
@@ -29,4 +29,4 @@
# AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!!
-VERSION='0.16.0.dev0'
+VERSION='1.1.0.dev0'
diff --git a/tools/distrib/python/grpcio_tools/setup.py b/tools/distrib/python/grpcio_tools/setup.py
index fbe69f43d5..bb1f1cf085 100644
--- a/tools/distrib/python/grpcio_tools/setup.py
+++ b/tools/distrib/python/grpcio_tools/setup.py
@@ -28,12 +28,17 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
from distutils import extension
+from distutils import util
import errno
import os
import os.path
+import pkg_resources
+import platform
+import re
import shlex
import shutil
import sys
+import sysconfig
import setuptools
from setuptools.command import build_ext
@@ -43,29 +48,83 @@ from setuptools.command import build_ext
os.chdir(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, os.path.abspath('.'))
+import protoc_lib_deps
+import grpc_version
+
+PY3 = sys.version_info.major == 3
+
+# Environment variable to determine whether or not the Cython extension should
+# *use* Cython or use the generated C files. Note that this requires the C files
+# to have been generated by building first *with* Cython support.
+BUILD_WITH_CYTHON = os.environ.get('GRPC_PYTHON_BUILD_WITH_CYTHON', False)
+
# There are some situations (like on Windows) where CC, CFLAGS, and LDFLAGS are
# entirely ignored/dropped/forgotten by distutils and its Cygwin/MinGW support.
# We use these environment variables to thus get around that without locking
# ourselves in w.r.t. the multitude of operating systems this ought to build on.
-# By default we assume a GCC-like compiler.
-EXTRA_COMPILE_ARGS = shlex.split(os.environ.get('GRPC_PYTHON_CFLAGS',
- '-fno-wrapv -frtti -std=c++11'))
-EXTRA_LINK_ARGS = shlex.split(os.environ.get('GRPC_PYTHON_LDFLAGS',
- '-lpthread'))
+# We can also use these variables as a way to inject environment-specific
+# compiler/linker flags. We assume GCC-like compilers and/or MinGW as a
+# reasonable default.
+EXTRA_ENV_COMPILE_ARGS = os.environ.get('GRPC_PYTHON_CFLAGS', None)
+EXTRA_ENV_LINK_ARGS = os.environ.get('GRPC_PYTHON_LDFLAGS', None)
+if EXTRA_ENV_COMPILE_ARGS is None:
+ EXTRA_ENV_COMPILE_ARGS = '-fno-wrapv -frtti -std=c++11'
+ if 'win32' in sys.platform:
+ # We use define flags here and don't directly add to DEFINE_MACROS below to
+ # ensure that the expert user/builder has a way of turning it off (via the
+ # envvars) without adding yet more GRPC-specific envvars.
+ # See https://sourceforge.net/p/mingw-w64/bugs/363/
+ if '32' in platform.architecture()[0]:
+ EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime32 -D_timeb=__timeb32 -D_ftime_s=_ftime32_s'
+ else:
+ EXTRA_ENV_COMPILE_ARGS += ' -D_ftime=_ftime64 -D_timeb=__timeb64'
+if EXTRA_ENV_LINK_ARGS is None:
+ EXTRA_ENV_LINK_ARGS = '-lpthread'
+ if 'win32' in sys.platform:
+ # TODO(atash) check if this is actually safe to just import and call on
+ # non-Windows (to avoid breaking import style)
+ from distutils.cygwinccompiler import get_msvcr
+ msvcr = get_msvcr()[0]
+ EXTRA_ENV_LINK_ARGS += (
+ ' -static-libgcc -static-libstdc++ -mcrtdll={msvcr} '
+ '-static'.format(msvcr=msvcr))
+EXTRA_COMPILE_ARGS = shlex.split(EXTRA_ENV_COMPILE_ARGS)
+EXTRA_LINK_ARGS = shlex.split(EXTRA_ENV_LINK_ARGS)
+
+CC_FILES = [
+ os.path.normpath(cc_file) for cc_file in protoc_lib_deps.CC_FILES]
+PROTO_FILES = [
+ os.path.normpath(proto_file) for proto_file in protoc_lib_deps.PROTO_FILES]
+CC_INCLUDE = os.path.normpath(protoc_lib_deps.CC_INCLUDE)
+PROTO_INCLUDE = os.path.normpath(protoc_lib_deps.PROTO_INCLUDE)
GRPC_PYTHON_TOOLS_PACKAGE = 'grpc.tools'
GRPC_PYTHON_PROTO_RESOURCES_NAME = '_proto'
-import protoc_lib_deps
-import grpc_version
+DEFINE_MACROS = (('HAVE_PTHREAD', 1),)
+if "win32" in sys.platform and '64bit' in platform.architecture()[0]:
+ DEFINE_MACROS += (('MS_WIN64', 1),)
+
+# By default, Python3 distutils enforces compatibility of
+# c plugins (.so files) with the OSX version Python3 was built with.
+# For Python3.4, this is OSX 10.6, but we need Thread Local Support (__thread)
+if 'darwin' in sys.platform and PY3:
+ mac_target = sysconfig.get_config_var('MACOSX_DEPLOYMENT_TARGET')
+ if mac_target and (pkg_resources.parse_version(mac_target) <
+ pkg_resources.parse_version('10.9.0')):
+ os.environ['MACOSX_DEPLOYMENT_TARGET'] = '10.9'
+ os.environ['_PYTHON_HOST_PLATFORM'] = re.sub(
+ r'macosx-[0-9]+\.[0-9]+-(.+)',
+ r'macosx-10.9-\1',
+ util.get_platform())
def package_data():
tools_path = GRPC_PYTHON_TOOLS_PACKAGE.replace('.', os.path.sep)
proto_resources_path = os.path.join(tools_path,
GRPC_PYTHON_PROTO_RESOURCES_NAME)
proto_files = []
- for proto_file in protoc_lib_deps.PROTO_FILES:
- source = os.path.join(protoc_lib_deps.PROTO_INCLUDE, proto_file)
+ for proto_file in PROTO_FILES:
+ source = os.path.join(PROTO_INCLUDE, proto_file)
target = os.path.join(proto_resources_path, proto_file)
relative_target = os.path.join(GRPC_PYTHON_PROTO_RESOURCES_NAME, proto_file)
try:
@@ -79,44 +138,47 @@ def package_data():
proto_files.append(relative_target)
return {GRPC_PYTHON_TOOLS_PACKAGE: proto_files}
-def protoc_ext_module():
- plugin_sources = [
- 'grpc/tools/main.cc',
- 'grpc_root/src/compiler/python_generator.cc'] + [
- os.path.join(protoc_lib_deps.CC_INCLUDE, cc_file)
- for cc_file in protoc_lib_deps.CC_FILES]
+def extension_modules():
+ if BUILD_WITH_CYTHON:
+ plugin_sources = [os.path.join('grpc', 'tools', '_protoc_compiler.pyx')]
+ else:
+ plugin_sources = [os.path.join('grpc', 'tools', '_protoc_compiler.cpp')]
+ plugin_sources += [
+ os.path.join('grpc', 'tools', 'main.cc'),
+ os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')] + [
+ os.path.join(CC_INCLUDE, cc_file)
+ for cc_file in CC_FILES]
plugin_ext = extension.Extension(
- name='grpc.tools.protoc_compiler',
- sources=['grpc/tools/protoc_compiler.pyx'] + plugin_sources,
+ name='grpc.tools._protoc_compiler',
+ sources=plugin_sources,
include_dirs=[
'.',
'grpc_root',
- 'grpc_root/include',
- protoc_lib_deps.CC_INCLUDE,
+ os.path.join('grpc_root', 'include'),
+ CC_INCLUDE,
],
language='c++',
- define_macros=[('HAVE_PTHREAD', 1)],
- extra_compile_args=EXTRA_COMPILE_ARGS,
- extra_link_args=EXTRA_LINK_ARGS,
+ define_macros=list(DEFINE_MACROS),
+ extra_compile_args=list(EXTRA_COMPILE_ARGS),
+ extra_link_args=list(EXTRA_LINK_ARGS),
)
- return plugin_ext
-
-def maybe_cythonize(exts):
- from Cython import Build
- return Build.cythonize(exts)
+ extensions = [plugin_ext]
+ if BUILD_WITH_CYTHON:
+ from Cython import Build
+ return Build.cythonize(extensions)
+ else:
+ return extensions
setuptools.setup(
name='grpcio_tools',
version=grpc_version.VERSION,
license='3-clause BSD',
- ext_modules=maybe_cythonize([
- protoc_ext_module(),
- ]),
+ ext_modules=extension_modules(),
packages=setuptools.find_packages('.'),
namespace_packages=['grpc'],
install_requires=[
'protobuf>=3.0.0a3',
- 'grpcio>=0.14.0',
+ 'grpcio>=0.15.0',
],
package_data=package_data(),
)
diff --git a/tools/distrib/python/make_grpcio_tools.py b/tools/distrib/python/make_grpcio_tools.py
index fd9b38b084..adf58445af 100755
--- a/tools/distrib/python/make_grpcio_tools.py
+++ b/tools/distrib/python/make_grpcio_tools.py
@@ -29,12 +29,18 @@
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+from __future__ import print_function
+
+import errno
+import filecmp
+import glob
import os
import os.path
import shutil
import subprocess
import sys
import traceback
+import uuid
DEPS_FILE_CONTENT="""
# Copyright 2016, Google Inc.
@@ -124,20 +130,88 @@ def get_deps():
proto_include=repr(GRPC_PYTHON_PROTOBUF_RELATIVE_ROOT))
return deps_file_content
+def long_path(path):
+ if os.name == 'nt':
+ return '\\\\?\\' + path
+ else:
+ return path
+
+def atomic_file_copy(src, dst):
+ """Based on the lock-free-whack-a-mole algorithm, depending on filesystem
+ renaming being atomic. Described at http://stackoverflow.com/a/28090883.
+ """
+ try:
+ if filecmp.cmp(src, dst):
+ return
+ except:
+ pass
+ dst_dir = os.path.abspath(os.path.dirname(dst))
+ dst_base = os.path.basename(dst)
+ this_id = str(uuid.uuid4()).replace('.', '-')
+ temporary_file = os.path.join(dst_dir, '{}.{}.tmp'.format(dst_base, this_id))
+ mole_file = os.path.join(dst_dir, '{}.{}.mole.tmp'.format(dst_base, this_id))
+ mole_pattern = os.path.join(dst_dir, '{}.*.mole.tmp'.format(dst_base))
+ src = long_path(src)
+ dst = long_path(dst)
+ temporary_file = long_path(temporary_file)
+ mole_file = long_path(mole_file)
+ mole_pattern = long_path(mole_pattern)
+ shutil.copy2(src, temporary_file)
+ try:
+ os.rename(temporary_file, mole_file)
+ except:
+ print('Error moving temporary file {} to {}'.format(temporary_file, mole_file), file=sys.stderr)
+ print('while trying to copy file {} to {}'.format(src, dst), file=sys.stderr)
+ raise
+ for other_file in glob.glob(mole_pattern):
+ other_id = other_file.split('.')[-3]
+ if this_id == other_id:
+ pass
+ elif this_id < other_id:
+ try:
+ os.remove(other_file)
+ except:
+ pass
+ else:
+ try:
+ os.remove(mole_file)
+ except:
+ pass
+ this_id = other_id
+ mole_file = other_file
+ try:
+ if filecmp.cmp(src, dst):
+ try:
+ os.remove(mole_file)
+ except:
+ pass
+ return
+ except:
+ pass
+ try:
+ os.rename(mole_file, dst)
+ except:
+ pass
+
def main():
os.chdir(GRPC_ROOT)
- for tree in [GRPC_PYTHON_PROTOBUF,
- GRPC_PYTHON_PROTOC_PLUGINS,
- GRPC_PYTHON_INCLUDE]:
- try:
- shutil.rmtree(tree)
- except Exception as _:
- pass
- shutil.copytree(GRPC_PROTOBUF, GRPC_PYTHON_PROTOBUF)
- shutil.copytree(GRPC_PROTOC_PLUGINS, GRPC_PYTHON_PROTOC_PLUGINS)
- shutil.copytree(GRPC_INCLUDE, GRPC_PYTHON_INCLUDE)
+ for source, target in [
+ (GRPC_PROTOBUF, GRPC_PYTHON_PROTOBUF),
+ (GRPC_PROTOC_PLUGINS, GRPC_PYTHON_PROTOC_PLUGINS),
+ (GRPC_INCLUDE, GRPC_PYTHON_INCLUDE)]:
+ for source_dir, _, files in os.walk(source):
+ target_dir = os.path.abspath(os.path.join(target, os.path.relpath(source_dir, source)))
+ try:
+ os.makedirs(target_dir)
+ except OSError as error:
+ if error.errno != errno.EEXIST:
+ raise
+ for relative_file in files:
+ source_file = os.path.abspath(os.path.join(source_dir, relative_file))
+ target_file = os.path.abspath(os.path.join(target_dir, relative_file))
+ atomic_file_copy(source_file, target_file)
try:
protoc_lib_deps_content = get_deps()
diff --git a/tools/dockerfile/distribtest/python_arch_x64/Dockerfile b/tools/dockerfile/distribtest/python_arch_x64/Dockerfile
index 2f79cc3017..dff72eee97 100644
--- a/tools/dockerfile/distribtest/python_arch_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_arch_x64/Dockerfile
@@ -33,4 +33,4 @@ RUN pacman --noconfirm -Syy
RUN pacman --noconfirm -S openssl
RUN pacman --noconfirm -S python2
RUN pacman --noconfirm -S python2-pip
-
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_centos6_x64/Dockerfile b/tools/dockerfile/distribtest/python_centos6_x64/Dockerfile
index d4f473792e..967450156c 100644
--- a/tools/dockerfile/distribtest/python_centos6_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_centos6_x64/Dockerfile
@@ -44,3 +44,5 @@ RUN curl https://bootstrap.pypa.io/get-pip.py | python -
# "which" command required by python's run_distrib_test.sh
RUN yum install -y which
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_centos7_x64/Dockerfile b/tools/dockerfile/distribtest/python_centos7_x64/Dockerfile
index ca64fa7bea..0127fe1e28 100644
--- a/tools/dockerfile/distribtest/python_centos7_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_centos7_x64/Dockerfile
@@ -32,4 +32,4 @@ FROM centos:7
RUN yum install -y python
RUN yum install -y epel-release
RUN yum install -y python-pip
-
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_fedora20_x64/Dockerfile b/tools/dockerfile/distribtest/python_fedora20_x64/Dockerfile
index 8b0f769c26..3d3636e43d 100644
--- a/tools/dockerfile/distribtest/python_fedora20_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_fedora20_x64/Dockerfile
@@ -35,3 +35,5 @@ RUN yum clean all && yum update -y && yum install -y python python-pip
# Trying twice makes it work fine.
# https://github.com/docker/docker/issues/10180
RUN pip install --upgrade six || pip install --upgrade six
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_fedora21_x64/Dockerfile b/tools/dockerfile/distribtest/python_fedora21_x64/Dockerfile
index fcbe053f1f..0b1b6aeb35 100644
--- a/tools/dockerfile/distribtest/python_fedora21_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_fedora21_x64/Dockerfile
@@ -40,3 +40,5 @@ RUN yum clean all && yum update -y && yum install -y python python-pip
# Trying twice makes it work fine.
# https://github.com/docker/docker/issues/10180
RUN pip2 install --upgrade six || pip2 install --upgrade six
+
+RUN pip2 install virtualenv
diff --git a/tools/dockerfile/distribtest/python_fedora22_x64/Dockerfile b/tools/dockerfile/distribtest/python_fedora22_x64/Dockerfile
index ddcacb4257..4d75034c15 100644
--- a/tools/dockerfile/distribtest/python_fedora22_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_fedora22_x64/Dockerfile
@@ -30,3 +30,4 @@
FROM fedora:22
RUN yum clean all && yum update -y && yum install -y python python-pip
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_fedora23_x64/Dockerfile b/tools/dockerfile/distribtest/python_fedora23_x64/Dockerfile
index d45195e509..a1bc9ba8d6 100644
--- a/tools/dockerfile/distribtest/python_fedora23_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_fedora23_x64/Dockerfile
@@ -30,3 +30,4 @@
FROM fedora:23
RUN yum clean all && yum update -y && yum install -y python python-pip
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_jessie_x64/Dockerfile b/tools/dockerfile/distribtest/python_jessie_x64/Dockerfile
index 83df4ed4fa..7dc32a088e 100644
--- a/tools/dockerfile/distribtest/python_jessie_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_jessie_x64/Dockerfile
@@ -30,3 +30,4 @@
FROM debian:jessie
RUN apt-get update && apt-get install -y python python-pip
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_jessie_x86/Dockerfile b/tools/dockerfile/distribtest/python_jessie_x86/Dockerfile
index 19addb2912..04c1402e72 100644
--- a/tools/dockerfile/distribtest/python_jessie_x86/Dockerfile
+++ b/tools/dockerfile/distribtest/python_jessie_x86/Dockerfile
@@ -31,6 +31,8 @@ FROM 32bit/debian:jessie
RUN apt-get update && apt-get install -y python python-pip
+RUN pip install virtualenv
+
# docker is running on a 64-bit machine, so we need to
# override "uname -m" to report i686 instead of x86_64, otherwise
# python will choose a wrong binary package to install.
diff --git a/tools/dockerfile/distribtest/python_opensuse_x64/Dockerfile b/tools/dockerfile/distribtest/python_opensuse_x64/Dockerfile
index fe1406be98..27159c72e3 100644
--- a/tools/dockerfile/distribtest/python_opensuse_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_opensuse_x64/Dockerfile
@@ -38,3 +38,5 @@ RUN zypper --non-interactive install which
# Without this, pip won't be able to connect to
# https://pypi.python.org/simple/
RUN zypper --non-interactive install ca-certificates-mozilla
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_ubuntu1204_x64/Dockerfile b/tools/dockerfile/distribtest/python_ubuntu1204_x64/Dockerfile
index 4068fbe2ba..7a8c91b79b 100644
--- a/tools/dockerfile/distribtest/python_ubuntu1204_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_ubuntu1204_x64/Dockerfile
@@ -30,3 +30,5 @@
FROM ubuntu:12.04
RUN apt-get update -y && apt-get install -y python python-pip
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_ubuntu1404_x64/Dockerfile b/tools/dockerfile/distribtest/python_ubuntu1404_x64/Dockerfile
index 0858fb0c06..65189a44de 100644
--- a/tools/dockerfile/distribtest/python_ubuntu1404_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_ubuntu1404_x64/Dockerfile
@@ -30,3 +30,5 @@
FROM ubuntu:14.04
RUN apt-get update -y && apt-get install -y python python-pip
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_ubuntu1504_x64/Dockerfile b/tools/dockerfile/distribtest/python_ubuntu1504_x64/Dockerfile
index ed6ffddbec..abf36c4a24 100644
--- a/tools/dockerfile/distribtest/python_ubuntu1504_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_ubuntu1504_x64/Dockerfile
@@ -30,3 +30,5 @@
FROM ubuntu:15.04
RUN apt-get update -y && apt-get install -y python python-pip
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_ubuntu1510_x64/Dockerfile b/tools/dockerfile/distribtest/python_ubuntu1510_x64/Dockerfile
index 9e3e0c260f..6e862d203b 100644
--- a/tools/dockerfile/distribtest/python_ubuntu1510_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_ubuntu1510_x64/Dockerfile
@@ -30,3 +30,5 @@
FROM ubuntu:15.10
RUN apt-get update -y && apt-get install -y python python-pip
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_ubuntu1604_x64/Dockerfile b/tools/dockerfile/distribtest/python_ubuntu1604_x64/Dockerfile
index 5098da8a26..59f4feab55 100644
--- a/tools/dockerfile/distribtest/python_ubuntu1604_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_ubuntu1604_x64/Dockerfile
@@ -30,3 +30,5 @@
FROM ubuntu:16.04
RUN apt-get update -y && apt-get install -y python python-pip
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/distribtest/python_wheezy_x64/Dockerfile b/tools/dockerfile/distribtest/python_wheezy_x64/Dockerfile
index 66165ee929..bc8816d305 100644
--- a/tools/dockerfile/distribtest/python_wheezy_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/python_wheezy_x64/Dockerfile
@@ -30,3 +30,5 @@
FROM debian:wheezy
RUN apt-get update -y && apt-get install -y python python-pip
+
+RUN pip install virtualenv
diff --git a/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile b/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile
index 4ae4ebdb06..669e3557b8 100644
--- a/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile
+++ b/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile
@@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 tox
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2
##################
diff --git a/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile b/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile
index 9c2fd52eee..860b8f4fb9 100644
--- a/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile
+++ b/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile
@@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 tox
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2
##################
diff --git a/tools/dockerfile/grpc_check_generated_pb_files/Dockerfile b/tools/dockerfile/grpc_check_generated_pb_files/Dockerfile
index 7658991462..d19bc67120 100644
--- a/tools/dockerfile/grpc_check_generated_pb_files/Dockerfile
+++ b/tools/dockerfile/grpc_check_generated_pb_files/Dockerfile
@@ -67,11 +67,6 @@ RUN apt-get update && apt-get install -y time && apt-get clean
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
-
RUN mkdir /var/local/jenkins
# Define the default command.
diff --git a/tools/dockerfile/grpc_clang_format/clang_format_all_the_things.sh b/tools/dockerfile/grpc_clang_format/clang_format_all_the_things.sh
index eab7611b3f..462c65ab5e 100755
--- a/tools/dockerfile/grpc_clang_format/clang_format_all_the_things.sh
+++ b/tools/dockerfile/grpc_clang_format/clang_format_all_the_things.sh
@@ -31,7 +31,7 @@
set -e
# directories to run against
-DIRS="src/core/lib src/core/ext src/cpp test/core test/cpp include"
+DIRS="src/core/lib src/core/ext src/cpp test/core test/cpp include src/compiler"
# file matching patterns to check
GLOB="*.h *.c *.cc"
diff --git a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile
index baab2f5638..e3d52f0cb5 100644
--- a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#================
# C# dependencies
@@ -88,10 +103,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile
index 2bbccca9e5..aa77d5f127 100644
--- a/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
@@ -75,10 +90,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
index ec71a53c2d..05e963d1e6 100644
--- a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
@@ -32,5 +32,20 @@ FROM golang:1.5
# Using login shell removes Go from path, so we add it.
RUN ln -s /usr/local/go/bin/go /usr/local/bin
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Define the default command.
CMD ["bash"]
diff --git a/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile
index ec71a53c2d..05e963d1e6 100644
--- a/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile
@@ -32,5 +32,20 @@ FROM golang:1.5
# Using login shell removes Go from path, so we add it.
RUN ln -s /usr/local/go/bin/go /usr/local/bin
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Define the default command.
CMD ["bash"]
diff --git a/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile
index 252c9bc928..b5fe54f991 100644
--- a/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile
@@ -47,6 +47,21 @@ ENV JAVA_HOME /usr/lib/jvm/java-8-oracle
ENV PATH $PATH:$JAVA_HOME/bin
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Trigger download of as many Gradle artifacts as possible.
RUN git clone --recursive --depth 1 https://github.com/grpc/grpc-java.git && \
diff --git a/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile
index 2a8d35a5dc..d9a7501829 100644
--- a/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#==================
# Node dependencies
@@ -82,10 +97,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile
index e27a6a2301..0d6171c170 100644
--- a/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile
@@ -1,4 +1,4 @@
-# Copyright 2015, Google Inc.
+# Copyright 2016, Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
@@ -83,12 +83,6 @@ RUN /bin/bash -l -c "gem install bundler --no-ri --no-rdoc"
# Install dependencies
-RUN /bin/bash -l -c "echo 'deb http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN /bin/bash -l -c "echo 'deb-src http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN wget http://www.dotdeb.org/dotdeb.gpg -O- | apt-key add -
-
RUN apt-get update && apt-get install -y \
git php5 php5-dev phpunit unzip
@@ -100,10 +94,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
@@ -117,11 +107,6 @@ RUN /bin/bash -l -c "rvm all do gem install ronn rake"
RUN curl -sS https://getcomposer.org/installer | php
RUN mv composer.phar /usr/local/bin/composer
-# As an attempt to work around #4212, try to prefetch Protobuf-PHP dependency
-# into composer cache to prevent "composer install" from cloning on each build.
-RUN git clone --mirror https://github.com/stanley-cheung/Protobuf-PHP.git \
- /root/.composer/cache/vcs/git-github.com-stanley-cheung-Protobuf-PHP.git/
-
# Download the patched PHP protobuf so that PHP gRPC clients can be generated
# from proto3 schemas.
RUN git clone https://github.com/stanley-cheung/Protobuf-PHP.git /var/local/git/protobuf-php
@@ -133,3 +118,4 @@ RUN /bin/bash -l -c "rvm use ruby-2.1 \
# Define the default command.
CMD ["bash"]
+
diff --git a/tools/dockerfile/interoptest/grpc_interop_php7/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_php7/Dockerfile
new file mode 100644
index 0000000000..be8f25f8ff
--- /dev/null
+++ b/tools/dockerfile/interoptest/grpc_interop_php7/Dockerfile
@@ -0,0 +1,125 @@
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+FROM debian:jessie
+
+#=================
+# PHP7 dependencies
+
+# Install Git and basic packages.
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ automake \
+ build-essential \
+ ccache \
+ curl \
+ git \
+ libcurl4-openssl-dev \
+ libgmp-dev \
+ libgmp3-dev \
+ libssl-dev \
+ libtool \
+ libxml2-dev \
+ pkg-config \
+ re2c \
+ time \
+ unzip \
+ wget \
+ zip && apt-get clean
+
+# Install other dependencies
+RUN ln -sf /usr/include/x86_64-linux-gnu/gmp.h /usr/include/gmp.h
+RUN wget http://ftp.gnu.org/gnu/bison/bison-2.6.4.tar.gz -O /var/local/bison-2.6.4.tar.gz
+RUN cd /var/local \
+ && tar -zxvf bison-2.6.4.tar.gz \
+ && cd /var/local/bison-2.6.4 \
+ && ./configure \
+ && make \
+ && make install
+
+# Compile PHP7 from source
+RUN git clone https://github.com/php/php-src /var/local/git/php-src
+RUN cd /var/local/git/php-src \
+ && git checkout PHP-7.0.9 \
+ && ./buildconf --force \
+ && ./configure \
+ --with-gmp \
+ --with-openssl \
+ --with-zlib \
+ && make \
+ && make install
+
+#==================
+# Ruby dependencies
+
+# Install rvm
+RUN gpg --keyserver hkp://keys.gnupg.net --recv-keys 409B6B1796C275462A1703113804BB82D39DC0E3
+RUN \curl -sSL https://get.rvm.io | bash -s stable
+
+# Install Ruby 2.1
+RUN /bin/bash -l -c "rvm install ruby-2.1"
+RUN /bin/bash -l -c "rvm use --default ruby-2.1"
+RUN /bin/bash -l -c "echo 'gem: --no-ri --no-rdoc' > ~/.gemrc"
+RUN /bin/bash -l -c "echo 'export PATH=/usr/local/rvm/bin:$PATH' >> ~/.bashrc"
+RUN /bin/bash -l -c "echo 'rvm --default use ruby-2.1' >> ~/.bashrc"
+RUN /bin/bash -l -c "gem install bundler --no-ri --no-rdoc"
+
+# Prepare ccache
+RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
+RUN ln -s /usr/bin/ccache /usr/local/bin/g++
+RUN ln -s /usr/bin/ccache /usr/local/bin/cc
+RUN ln -s /usr/bin/ccache /usr/local/bin/c++
+RUN ln -s /usr/bin/ccache /usr/local/bin/clang
+RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
+
+
+RUN mkdir /var/local/jenkins
+
+# ronn: a ruby tool used to convert markdown to man pages, used during the
+# install of Protobuf extensions
+#
+# rake: a ruby version of make used to build the PHP Protobuf extension
+RUN /bin/bash -l -c "rvm all do gem install ronn rake"
+
+# Install composer
+RUN curl -sS https://getcomposer.org/installer | php
+RUN mv composer.phar /usr/local/bin/composer
+
+# Download the patched PHP protobuf so that PHP gRPC clients can be generated
+# from proto3 schemas.
+RUN git clone https://github.com/stanley-cheung/Protobuf-PHP.git /var/local/git/protobuf-php
+
+RUN /bin/bash -l -c "rvm use ruby-2.1 \
+ && cd /var/local/git/protobuf-php \
+ && rvm all do rake pear:package version=1.0 \
+ && pear install Protobuf-1.0.tgz"
+
+# Define the default command.
+CMD ["bash"]
+
diff --git a/tools/dockerfile/interoptest/grpc_interop_php7/build_interop.sh b/tools/dockerfile/interoptest/grpc_interop_php7/build_interop.sh
new file mode 100755
index 0000000000..261dded282
--- /dev/null
+++ b/tools/dockerfile/interoptest/grpc_interop_php7/build_interop.sh
@@ -0,0 +1,52 @@
+#!/bin/bash
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+#
+# Builds PHP interop server and client in a base image.
+set -ex
+
+mkdir -p /var/local/git
+git clone --recursive /var/local/jenkins/grpc /var/local/git/grpc
+
+# copy service account keys if available
+cp -r /var/local/jenkins/service_account $HOME || true
+
+cd /var/local/git/grpc
+rvm --default use ruby-2.1
+
+# gRPC core and protobuf need to be installed
+make install
+
+(cd src/php/ext/grpc && phpize && ./configure && make)
+
+(cd third_party/protobuf && make install)
+
+(cd src/php && composer install)
+
+(cd src/php && protoc-gen-php -i tests/interop/ -o tests/interop/ tests/interop/test.proto)
diff --git a/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile
index 071fb2c93b..10a88916ad 100644
--- a/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile
@@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 tox
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
@@ -86,10 +86,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile
index df8eef5438..dae64e5c8c 100644
--- a/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#==================
# Ruby dependencies
@@ -86,10 +101,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile
index 823fe948fb..81e3fdc380 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile
index 556a26ee13..e082da648b 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile
index 2a875f59f1..1e2b7d8c67 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile
@@ -34,6 +34,21 @@ RUN apt-get update && apt-get install -y python-pip && apt-get clean
RUN pip install --upgrade google-api-python-client
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Using login shell removes Go from path, so we add it.
RUN ln -s /usr/local/go/bin/go /usr/local/bin
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile
index 69bef1480c..0c17ff595e 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile
index 4fd7cc29a3..0594f69a5b 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#==================
# Node dependencies
@@ -87,10 +102,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile
index c29aaf7c3f..0716be5a9d 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#==================
# Ruby dependencies
@@ -88,12 +103,6 @@ RUN pip install --upgrade google-api-python-client
# Install dependencies
-RUN /bin/bash -l -c "echo 'deb http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN /bin/bash -l -c "echo 'deb-src http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN wget http://www.dotdeb.org/dotdeb.gpg -O- | apt-key add -
-
RUN apt-get update && apt-get install -y \
git php5 php5-dev phpunit unzip
@@ -105,10 +114,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
@@ -122,11 +127,6 @@ RUN /bin/bash -l -c "rvm all do gem install ronn rake"
RUN curl -sS https://getcomposer.org/installer | php
RUN mv composer.phar /usr/local/bin/composer
-# As an attempt to work around #4212, try to prefetch Protobuf-PHP dependency
-# into composer cache to prevent "composer install" from cloning on each build.
-RUN git clone --mirror https://github.com/stanley-cheung/Protobuf-PHP.git \
- /root/.composer/cache/vcs/git-github.com-stanley-cheung-Protobuf-PHP.git/
-
# Download the patched PHP protobuf so that PHP gRPC clients can be generated
# from proto3 schemas.
RUN git clone https://github.com/stanley-cheung/Protobuf-PHP.git /var/local/git/protobuf-php
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile
index 606b765457..20d2d3f57b 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile
@@ -93,7 +93,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 tox
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
RUN pip install coverage
diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile
index 36b54ddafe..f459153fe5 100644
--- a/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile
+++ b/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
RUN ln -s /usr/bin/ccache /usr/local/bin/g++
diff --git a/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile b/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile
index fd7215716d..25c6fe6ec6 100644
--- a/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile
+++ b/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#================
# C# dependencies
@@ -103,10 +118,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile
index baab2f5638..e3d52f0cb5 100644
--- a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#================
# C# dependencies
@@ -88,10 +103,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/cxx_jessie_x64/Dockerfile b/tools/dockerfile/test/cxx_jessie_x64/Dockerfile
index 6492158929..67cee19914 100644
--- a/tools/dockerfile/test/cxx_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_jessie_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
@@ -108,10 +123,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/cxx_jessie_x86/Dockerfile b/tools/dockerfile/test/cxx_jessie_x86/Dockerfile
index 92c9c4ce86..bee0849c67 100644
--- a/tools/dockerfile/test/cxx_jessie_x86/Dockerfile
+++ b/tools/dockerfile/test/cxx_jessie_x86/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
@@ -75,10 +90,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile b/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
index 5982c9783e..2b3f4af3e6 100644
--- a/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
@@ -63,14 +63,25 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile b/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile
index d356433163..2d282276d3 100644
--- a/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
@@ -75,10 +90,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile b/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile
index dd9a79b1ed..c25033387f 100644
--- a/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
@@ -86,10 +101,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/fuzzer/Dockerfile b/tools/dockerfile/test/fuzzer/Dockerfile
index 6ba31114ab..bd04f07cea 100644
--- a/tools/dockerfile/test/fuzzer/Dockerfile
+++ b/tools/dockerfile/test/fuzzer/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# C++ dependencies
RUN apt-get update && apt-get -y install libgflags-dev libgtest-dev libc++-dev clang && apt-get clean
@@ -108,10 +123,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile
index 5c3f77405e..13f7c10f92 100644
--- a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile
@@ -100,12 +100,6 @@ RUN /bin/bash -l -c "nvm alias default 4"
# Install dependencies
-RUN /bin/bash -l -c "echo 'deb http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN /bin/bash -l -c "echo 'deb-src http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN wget http://www.dotdeb.org/dotdeb.gpg -O- | apt-key add -
-
RUN apt-get update && apt-get install -y \
git php5 php5-dev phpunit unzip
@@ -137,7 +131,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 tox
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
@@ -147,10 +141,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/node_jessie_x64/Dockerfile b/tools/dockerfile/test/node_jessie_x64/Dockerfile
index 2a8d35a5dc..d9a7501829 100644
--- a/tools/dockerfile/test/node_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/node_jessie_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#==================
# Node dependencies
@@ -82,10 +97,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/php7_jessie_x64/Dockerfile b/tools/dockerfile/test/php7_jessie_x64/Dockerfile
new file mode 100644
index 0000000000..221338956e
--- /dev/null
+++ b/tools/dockerfile/test/php7_jessie_x64/Dockerfile
@@ -0,0 +1,105 @@
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+FROM debian:jessie
+
+#=================
+# PHP7 dependencies
+
+# Install Git and basic packages.
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ automake \
+ build-essential \
+ ccache \
+ curl \
+ git \
+ libcurl4-openssl-dev \
+ libgmp-dev \
+ libgmp3-dev \
+ libssl-dev \
+ libtool \
+ libxml2-dev \
+ pkg-config \
+ re2c \
+ time \
+ unzip \
+ wget \
+ zip && apt-get clean
+
+# Install other dependencies
+RUN ln -sf /usr/include/x86_64-linux-gnu/gmp.h /usr/include/gmp.h
+RUN wget http://ftp.gnu.org/gnu/bison/bison-2.6.4.tar.gz -O /var/local/bison-2.6.4.tar.gz
+RUN cd /var/local \
+ && tar -zxvf bison-2.6.4.tar.gz \
+ && cd /var/local/bison-2.6.4 \
+ && ./configure \
+ && make \
+ && make install
+
+# Compile PHP7 from source
+RUN git clone https://github.com/php/php-src /var/local/git/php-src
+RUN cd /var/local/git/php-src \
+ && git checkout PHP-7.0.9 \
+ && ./buildconf --force \
+ && ./configure \
+ --with-gmp \
+ --with-openssl \
+ --with-zlib \
+ && make \
+ && make install
+
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
+# Prepare ccache
+RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
+RUN ln -s /usr/bin/ccache /usr/local/bin/g++
+RUN ln -s /usr/bin/ccache /usr/local/bin/cc
+RUN ln -s /usr/bin/ccache /usr/local/bin/c++
+RUN ln -s /usr/bin/ccache /usr/local/bin/clang
+RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
+
+
+RUN mkdir /var/local/jenkins
+
+# Define the default command.
+CMD ["bash"]
diff --git a/tools/dockerfile/test/php_jessie_x64/Dockerfile b/tools/dockerfile/test/php_jessie_x64/Dockerfile
index d8d27846c1..17ea36b76c 100644
--- a/tools/dockerfile/test/php_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/php_jessie_x64/Dockerfile
@@ -63,17 +63,26 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#=================
# PHP dependencies
# Install dependencies
-RUN /bin/bash -l -c "echo 'deb http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN /bin/bash -l -c "echo 'deb-src http://packages.dotdeb.org wheezy-php55 all' \
- >> /etc/apt/sources.list.d/dotdeb.list"
-RUN wget http://www.dotdeb.org/dotdeb.gpg -O- | apt-key add -
-
RUN apt-get update && apt-get install -y \
git php5 php5-dev phpunit unzip
@@ -85,10 +94,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/python_jessie_x64/Dockerfile b/tools/dockerfile/test/python_jessie_x64/Dockerfile
index 071fb2c93b..10a88916ad 100644
--- a/tools/dockerfile/test/python_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/python_jessie_x64/Dockerfile
@@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 tox
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
@@ -86,10 +86,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/python_pyenv_x64/Dockerfile b/tools/dockerfile/test/python_pyenv_x64/Dockerfile
new file mode 100644
index 0000000000..abb5f3c89b
--- /dev/null
+++ b/tools/dockerfile/test/python_pyenv_x64/Dockerfile
@@ -0,0 +1,112 @@
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+FROM debian:jessie
+
+# Install Git and basic packages.
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ autotools-dev \
+ build-essential \
+ bzip2 \
+ ccache \
+ curl \
+ gcc \
+ gcc-multilib \
+ git \
+ golang \
+ gyp \
+ lcov \
+ libc6 \
+ libc6-dbg \
+ libc6-dev \
+ libgtest-dev \
+ libtool \
+ make \
+ perl \
+ strace \
+ python-dev \
+ python-setuptools \
+ python-yaml \
+ telnet \
+ unzip \
+ wget \
+ zip && apt-get clean
+
+#================
+# Build profiling
+RUN apt-get update && apt-get install -y time && apt-get clean
+
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
+# Install dependencies for pyenv
+RUN apt-get update && apt-get install -y \
+ libbz2-dev \
+ libncurses5-dev \
+ libncursesw5-dev \
+ libreadline-dev \
+ libsqlite3-dev \
+ libssl-dev \
+ llvm \
+ mercurial \
+ zlib1g-dev && apt-get clean
+
+# Install Pyenv and dev Python versions 3.5 and 3.6
+RUN curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash
+RUN pyenv update
+RUN pyenv install 3.5-dev
+RUN pyenv install 3.6-dev
+RUN pyenv local 3.5-dev 3.6-dev
+
+# Prepare ccache
+RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
+RUN ln -s /usr/bin/ccache /usr/local/bin/g++
+RUN ln -s /usr/bin/ccache /usr/local/bin/cc
+RUN ln -s /usr/bin/ccache /usr/local/bin/c++
+RUN ln -s /usr/bin/ccache /usr/local/bin/clang
+RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
+
+
+RUN mkdir /var/local/jenkins
+
+# Define the default command.
+CMD ["bash"]
diff --git a/tools/dockerfile/test/ruby_jessie_x64/Dockerfile b/tools/dockerfile/test/ruby_jessie_x64/Dockerfile
index df8eef5438..dae64e5c8c 100644
--- a/tools/dockerfile/test/ruby_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/ruby_jessie_x64/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#==================
# Ruby dependencies
@@ -86,10 +101,6 @@ RUN ln -s /usr/bin/ccache /usr/local/bin/c++
RUN ln -s /usr/bin/ccache /usr/local/bin/clang
RUN ln -s /usr/bin/ccache /usr/local/bin/clang++
-#======================
-# Zookeeper dependencies
-# TODO(jtattermusch): is zookeeper still needed?
-RUN apt-get install -y libzookeeper-mt-dev
RUN mkdir /var/local/jenkins
diff --git a/tools/dockerfile/test/sanity/Dockerfile b/tools/dockerfile/test/sanity/Dockerfile
index 70a32c5586..f4b4831a64 100644
--- a/tools/dockerfile/test/sanity/Dockerfile
+++ b/tools/dockerfile/test/sanity/Dockerfile
@@ -63,6 +63,21 @@ RUN apt-get update && apt-get install -y \
# Build profiling
RUN apt-get update && apt-get install -y time && apt-get clean
+#====================
+# Python dependencies
+
+# Install dependencies
+
+RUN apt-get update && apt-get install -y \
+ python-all-dev \
+ python3-all-dev \
+ python-pip
+
+# Install Python packages from PyPI
+RUN pip install pip --upgrade
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0
+
#========================
# Sanity test dependencies
RUN apt-get update && apt-get install -y \
diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++
index de7acd7777..a2415e1217 100644
--- a/tools/doxygen/Doxyfile.c++
+++ b/tools/doxygen/Doxyfile.c++
@@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = 0.16.0-dev
+PROJECT_NUMBER = 1.1.0-dev
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal
index 76bb3b6c59..945298b964 100644
--- a/tools/doxygen/Doxyfile.c++.internal
+++ b/tools/doxygen/Doxyfile.c++.internal
@@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = 0.16.0-dev
+PROJECT_NUMBER = 1.1.0-dev
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
diff --git a/tools/doxygen/Doxyfile.c++.internal.orig b/tools/doxygen/Doxyfile.c++.internal.orig
deleted file mode 100644
index c214b3d3c8..0000000000
--- a/tools/doxygen/Doxyfile.c++.internal.orig
+++ /dev/null
@@ -1,2505 +0,0 @@
-
-
-# Doxyfile 1.8.9.1
-
-# This file describes the settings to be used by the documentation system
-# doxygen (www.doxygen.org) for a project.
-#
-# All text after a double hash (##) is considered a comment and is placed in
-# front of the TAG it is preceding.
-#
-# All text after a single hash (#) is considered a comment and will be ignored.
-# The format is:
-# TAG = value [value, ...]
-# For lists, items can also be appended using:
-# TAG += value [value, ...]
-# Values that contain spaces should be placed between quotes (\" \").
-
-#---------------------------------------------------------------------------
-# Project related configuration options
-#---------------------------------------------------------------------------
-
-# This tag specifies the encoding used for all characters in the config file
-# that follow. The default is UTF-8 which is also the encoding used for all text
-# before the first occurrence of this tag. Doxygen uses libiconv (or the iconv
-# built into libc) for the transcoding. See http://www.gnu.org/software/libiconv
-# for the list of possible encodings.
-# The default value is: UTF-8.
-
-DOXYFILE_ENCODING = UTF-8
-
-# The PROJECT_NAME tag is a single word (or a sequence of words surrounded by
-# double-quotes, unless you are using Doxywizard) that should identify the
-# project for which the documentation is generated. This name is used in the
-# title of most generated pages and in a few other places.
-# The default value is: My Project.
-
-PROJECT_NAME = "GRPC C++"
-
-# The PROJECT_NUMBER tag can be used to enter a project or revision number. This
-# could be handy for archiving the generated documentation or if some version
-# control system is used.
-
-PROJECT_NUMBER = 0.15.0-dev
-
-# Using the PROJECT_BRIEF tag one can provide an optional one line description
-# for a project that appears at the top of each page and should give viewer a
-# quick idea about the purpose of the project. Keep the description short.
-
-PROJECT_BRIEF =
-
-# With the PROJECT_LOGO tag one can specify a logo or an icon that is included
-# in the documentation. The maximum height of the logo should not exceed 55
-# pixels and the maximum width should not exceed 200 pixels. Doxygen will copy
-# the logo to the output directory.
-
-PROJECT_LOGO =
-
-# The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path
-# into which the generated documentation will be written. If a relative path is
-# entered, it will be relative to the location where doxygen was started. If
-# left blank the current directory will be used.
-
-OUTPUT_DIRECTORY = doc/ref/c++.internal
-
-# If the CREATE_SUBDIRS tag is set to YES then doxygen will create 4096 sub-
-# directories (in 2 levels) under the output directory of each output format and
-# will distribute the generated files over these directories. Enabling this
-# option can be useful when feeding doxygen a huge amount of source files, where
-# putting all generated files in the same directory would otherwise causes
-# performance problems for the file system.
-# The default value is: NO.
-
-CREATE_SUBDIRS = NO
-
-# If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII
-# characters to appear in the names of generated files. If set to NO, non-ASCII
-# characters will be escaped, for example _xE3_x81_x84 will be used for Unicode
-# U+3044.
-# The default value is: NO.
-
-ALLOW_UNICODE_NAMES = NO
-
-# The OUTPUT_LANGUAGE tag is used to specify the language in which all
-# documentation generated by doxygen is written. Doxygen will use this
-# information to generate all constant output in the proper language.
-# Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese,
-# Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States),
-# Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian,
-# Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages),
-# Korean, Korean-en (Korean with English messages), Latvian, Lithuanian,
-# Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian,
-# Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish,
-# Ukrainian and Vietnamese.
-# The default value is: English.
-
-OUTPUT_LANGUAGE = English
-
-# If the BRIEF_MEMBER_DESC tag is set to YES, doxygen will include brief member
-# descriptions after the members that are listed in the file and class
-# documentation (similar to Javadoc). Set to NO to disable this.
-# The default value is: YES.
-
-BRIEF_MEMBER_DESC = YES
-
-# If the REPEAT_BRIEF tag is set to YES, doxygen will prepend the brief
-# description of a member or function before the detailed description
-#
-# Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the
-# brief descriptions will be completely suppressed.
-# The default value is: YES.
-
-REPEAT_BRIEF = YES
-
-# This tag implements a quasi-intelligent brief description abbreviator that is
-# used to form the text in various listings. Each string in this list, if found
-# as the leading text of the brief description, will be stripped from the text
-# and the result, after processing the whole list, is used as the annotated
-# text. Otherwise, the brief description is used as-is. If left blank, the
-# following values are used ($name is automatically replaced with the name of
-# the entity):The $name class, The $name widget, The $name file, is, provides,
-# specifies, contains, represents, a, an and the.
-
-ABBREVIATE_BRIEF =
-
-# If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then
-# doxygen will generate a detailed section even if there is only a brief
-# description.
-# The default value is: NO.
-
-ALWAYS_DETAILED_SEC = NO
-
-# If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all
-# inherited members of a class in the documentation of that class as if those
-# members were ordinary class members. Constructors, destructors and assignment
-# operators of the base classes will not be shown.
-# The default value is: NO.
-
-INLINE_INHERITED_MEMB = NO
-
-# If the FULL_PATH_NAMES tag is set to YES, doxygen will prepend the full path
-# before files name in the file list and in the header files. If set to NO the
-# shortest path that makes the file name unique will be used
-# The default value is: YES.
-
-FULL_PATH_NAMES = YES
-
-# The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path.
-# Stripping is only done if one of the specified strings matches the left-hand
-# part of the path. The tag can be used to show relative paths in the file list.
-# If left blank the directory from which doxygen is run is used as the path to
-# strip.
-#
-# Note that you can specify absolute paths here, but also relative paths, which
-# will be relative from the directory where doxygen is started.
-# This tag requires that the tag FULL_PATH_NAMES is set to YES.
-
-STRIP_FROM_PATH =
-
-# The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the
-# path mentioned in the documentation of a class, which tells the reader which
-# header file to include in order to use a class. If left blank only the name of
-# the header file containing the class definition is used. Otherwise one should
-# specify the list of include paths that are normally passed to the compiler
-# using the -I flag.
-
-STRIP_FROM_INC_PATH =
-
-# If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but
-# less readable) file names. This can be useful is your file systems doesn't
-# support long names like on DOS, Mac, or CD-ROM.
-# The default value is: NO.
-
-SHORT_NAMES = NO
-
-# If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the
-# first line (until the first dot) of a Javadoc-style comment as the brief
-# description. If set to NO, the Javadoc-style will behave just like regular Qt-
-# style comments (thus requiring an explicit @brief command for a brief
-# description.)
-# The default value is: NO.
-
-JAVADOC_AUTOBRIEF = YES
-
-# If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first
-# line (until the first dot) of a Qt-style comment as the brief description. If
-# set to NO, the Qt-style will behave just like regular Qt-style comments (thus
-# requiring an explicit \brief command for a brief description.)
-# The default value is: NO.
-
-QT_AUTOBRIEF = NO
-
-# The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a
-# multi-line C++ special comment block (i.e. a block of //! or /// comments) as
-# a brief description. This used to be the default behavior. The new default is
-# to treat a multi-line C++ comment block as a detailed description. Set this
-# tag to YES if you prefer the old behavior instead.
-#
-# Note that setting this tag to YES also means that rational rose comments are
-# not recognized any more.
-# The default value is: NO.
-
-MULTILINE_CPP_IS_BRIEF = NO
-
-# If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the
-# documentation from any documented member that it re-implements.
-# The default value is: YES.
-
-INHERIT_DOCS = YES
-
-# If the SEPARATE_MEMBER_PAGES tag is set to YES then doxygen will produce a new
-# page for each member. If set to NO, the documentation of a member will be part
-# of the file/class/namespace that contains it.
-# The default value is: NO.
-
-SEPARATE_MEMBER_PAGES = NO
-
-# The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen
-# uses this value to replace tabs by spaces in code fragments.
-# Minimum value: 1, maximum value: 16, default value: 4.
-
-TAB_SIZE = 2
-
-# This tag can be used to specify a number of aliases that act as commands in
-# the documentation. An alias has the form:
-# name=value
-# For example adding
-# "sideeffect=@par Side Effects:\n"
-# will allow you to put the command \sideeffect (or @sideeffect) in the
-# documentation, which will result in a user-defined paragraph with heading
-# "Side Effects:". You can put \n's in the value part of an alias to insert
-# newlines.
-
-ALIASES =
-
-# This tag can be used to specify a number of word-keyword mappings (TCL only).
-# A mapping has the form "name=value". For example adding "class=itcl::class"
-# will allow you to use the command class in the itcl::class meaning.
-
-TCL_SUBST =
-
-# Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources
-# only. Doxygen will then generate output that is more tailored for C. For
-# instance, some of the names that are used will be different. The list of all
-# members will be omitted, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_FOR_C = YES
-
-# Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or
-# Python sources only. Doxygen will then generate output that is more tailored
-# for that language. For instance, namespaces will be presented as packages,
-# qualified scopes will look different, etc.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_JAVA = NO
-
-# Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran
-# sources. Doxygen will then generate output that is tailored for Fortran.
-# The default value is: NO.
-
-OPTIMIZE_FOR_FORTRAN = NO
-
-# Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL
-# sources. Doxygen will then generate output that is tailored for VHDL.
-# The default value is: NO.
-
-OPTIMIZE_OUTPUT_VHDL = NO
-
-# Doxygen selects the parser to use depending on the extension of the files it
-# parses. With this tag you can assign which parser to use for a given
-# extension. Doxygen has a built-in mapping, but you can override or extend it
-# using this tag. The format is ext=language, where ext is a file extension, and
-# language is one of the parsers supported by doxygen: IDL, Java, Javascript,
-# C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran:
-# FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran:
-# Fortran. In the later case the parser tries to guess whether the code is fixed
-# or free formatted code, this is the default for Fortran type files), VHDL. For
-# instance to make doxygen treat .inc files as Fortran files (default is PHP),
-# and .f files as C (default is Fortran), use: inc=Fortran f=C.
-#
-# Note: For files without extension you can use no_extension as a placeholder.
-#
-# Note that for custom extensions you also need to set FILE_PATTERNS otherwise
-# the files are not read by doxygen.
-
-EXTENSION_MAPPING =
-
-# If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments
-# according to the Markdown format, which allows for more readable
-# documentation. See http://daringfireball.net/projects/markdown/ for details.
-# The output of markdown processing is further processed by doxygen, so you can
-# mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in
-# case of backward compatibilities issues.
-# The default value is: YES.
-
-MARKDOWN_SUPPORT = YES
-
-# When enabled doxygen tries to link words that correspond to documented
-# classes, or namespaces to their corresponding documentation. Such a link can
-# be prevented in individual cases by putting a % sign in front of the word or
-# globally by setting AUTOLINK_SUPPORT to NO.
-# The default value is: YES.
-
-AUTOLINK_SUPPORT = YES
-
-# If you use STL classes (i.e. std::string, std::vector, etc.) but do not want
-# to include (a tag file for) the STL sources as input, then you should set this
-# tag to YES in order to let doxygen match functions declarations and
-# definitions whose arguments contain STL classes (e.g. func(std::string);
-# versus func(std::string) {}). This also make the inheritance and collaboration
-# diagrams that involve STL classes more complete and accurate.
-# The default value is: NO.
-
-BUILTIN_STL_SUPPORT = NO
-
-# If you use Microsoft's C++/CLI language, you should set this option to YES to
-# enable parsing support.
-# The default value is: NO.
-
-CPP_CLI_SUPPORT = NO
-
-# Set the SIP_SUPPORT tag to YES if your project consists of sip (see:
-# http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen
-# will parse them like normal C++ but will assume all classes use public instead
-# of private inheritance when no explicit protection keyword is present.
-# The default value is: NO.
-
-SIP_SUPPORT = NO
-
-# For Microsoft's IDL there are propget and propput attributes to indicate
-# getter and setter methods for a property. Setting this option to YES will make
-# doxygen to replace the get and set methods by a property in the documentation.
-# This will only work if the methods are indeed getting or setting a simple
-# type. If this is not the case, or you want to show the methods anyway, you
-# should set this option to NO.
-# The default value is: YES.
-
-IDL_PROPERTY_SUPPORT = YES
-
-# If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC
-# tag is set to YES then doxygen will reuse the documentation of the first
-# member in the group (if any) for the other members of the group. By default
-# all members of a group must be documented explicitly.
-# The default value is: NO.
-
-DISTRIBUTE_GROUP_DOC = NO
-
-# Set the SUBGROUPING tag to YES to allow class member groups of the same type
-# (for instance a group of public functions) to be put as a subgroup of that
-# type (e.g. under the Public Functions section). Set it to NO to prevent
-# subgrouping. Alternatively, this can be done per class using the
-# \nosubgrouping command.
-# The default value is: YES.
-
-SUBGROUPING = YES
-
-# When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions
-# are shown inside the group in which they are included (e.g. using \ingroup)
-# instead of on a separate page (for HTML and Man pages) or section (for LaTeX
-# and RTF).
-#
-# Note that this feature does not work in combination with
-# SEPARATE_MEMBER_PAGES.
-# The default value is: NO.
-
-INLINE_GROUPED_CLASSES = NO
-
-# When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions
-# with only public data fields or simple typedef fields will be shown inline in
-# the documentation of the scope in which they are defined (i.e. file,
-# namespace, or group documentation), provided this scope is documented. If set
-# to NO, structs, classes, and unions are shown on a separate page (for HTML and
-# Man pages) or section (for LaTeX and RTF).
-# The default value is: NO.
-
-INLINE_SIMPLE_STRUCTS = NO
-
-# When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or
-# enum is documented as struct, union, or enum with the name of the typedef. So
-# typedef struct TypeS {} TypeT, will appear in the documentation as a struct
-# with name TypeT. When disabled the typedef will appear as a member of a file,
-# namespace, or class. And the struct will be named TypeS. This can typically be
-# useful for C code in case the coding convention dictates that all compound
-# types are typedef'ed and only the typedef is referenced, never the tag name.
-# The default value is: NO.
-
-TYPEDEF_HIDES_STRUCT = NO
-
-# The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This
-# cache is used to resolve symbols given their name and scope. Since this can be
-# an expensive process and often the same symbol appears multiple times in the
-# code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small
-# doxygen will become slower. If the cache is too large, memory is wasted. The
-# cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range
-# is 0..9, the default is 0, corresponding to a cache size of 2^16=65536
-# symbols. At the end of a run doxygen will report the cache usage and suggest
-# the optimal cache size from a speed point of view.
-# Minimum value: 0, maximum value: 9, default value: 0.
-
-LOOKUP_CACHE_SIZE = 0
-
-#---------------------------------------------------------------------------
-# Build related configuration options
-#---------------------------------------------------------------------------
-
-# If the EXTRACT_ALL tag is set to YES, doxygen will assume all entities in
-# documentation are documented, even if no documentation was available. Private
-# class members and static file members will be hidden unless the
-# EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES.
-# Note: This will also disable the warnings about undocumented members that are
-# normally produced when WARNINGS is set to YES.
-# The default value is: NO.
-
-EXTRACT_ALL = YES
-
-# If the EXTRACT_PRIVATE tag is set to YES, all private members of a class will
-# be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PRIVATE = NO
-
-# If the EXTRACT_PACKAGE tag is set to YES, all members with package or internal
-# scope will be included in the documentation.
-# The default value is: NO.
-
-EXTRACT_PACKAGE = NO
-
-# If the EXTRACT_STATIC tag is set to YES, all static members of a file will be
-# included in the documentation.
-# The default value is: NO.
-
-EXTRACT_STATIC = NO
-
-# If the EXTRACT_LOCAL_CLASSES tag is set to YES, classes (and structs) defined
-# locally in source files will be included in the documentation. If set to NO,
-# only classes defined in header files are included. Does not have any effect
-# for Java sources.
-# The default value is: YES.
-
-EXTRACT_LOCAL_CLASSES = YES
-
-# This flag is only useful for Objective-C code. If set to YES, local methods,
-# which are defined in the implementation section but not in the interface are
-# included in the documentation. If set to NO, only methods in the interface are
-# included.
-# The default value is: NO.
-
-EXTRACT_LOCAL_METHODS = NO
-
-# If this flag is set to YES, the members of anonymous namespaces will be
-# extracted and appear in the documentation as a namespace called
-# 'anonymous_namespace{file}', where file will be replaced with the base name of
-# the file that contains the anonymous namespace. By default anonymous namespace
-# are hidden.
-# The default value is: NO.
-
-EXTRACT_ANON_NSPACES = NO
-
-# If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all
-# undocumented members inside documented classes or files. If set to NO these
-# members will be included in the various overviews, but no documentation
-# section is generated. This option has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_MEMBERS = NO
-
-# If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all
-# undocumented classes that are normally visible in the class hierarchy. If set
-# to NO, these classes will be included in the various overviews. This option
-# has no effect if EXTRACT_ALL is enabled.
-# The default value is: NO.
-
-HIDE_UNDOC_CLASSES = NO
-
-# If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend
-# (class|struct|union) declarations. If set to NO, these declarations will be
-# included in the documentation.
-# The default value is: NO.
-
-HIDE_FRIEND_COMPOUNDS = NO
-
-# If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any
-# documentation blocks found inside the body of a function. If set to NO, these
-# blocks will be appended to the function's detailed documentation block.
-# The default value is: NO.
-
-HIDE_IN_BODY_DOCS = NO
-
-# The INTERNAL_DOCS tag determines if documentation that is typed after a
-# \internal command is included. If the tag is set to NO then the documentation
-# will be excluded. Set it to YES to include the internal documentation.
-# The default value is: NO.
-
-INTERNAL_DOCS = NO
-
-# If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file
-# names in lower-case letters. If set to YES, upper-case letters are also
-# allowed. This is useful if you have classes or files whose names only differ
-# in case and if your file system supports case sensitive file names. Windows
-# and Mac users are advised to set this option to NO.
-# The default value is: system dependent.
-
-CASE_SENSE_NAMES = NO
-
-# If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with
-# their full class and namespace scopes in the documentation. If set to YES, the
-# scope will be hidden.
-# The default value is: NO.
-
-HIDE_SCOPE_NAMES = NO
-
-# If the HIDE_COMPOUND_REFERENCE tag is set to NO (default) then doxygen will
-# append additional text to a page's title, such as Class Reference. If set to
-# YES the compound reference will be hidden.
-# The default value is: NO.
-
-HIDE_COMPOUND_REFERENCE= NO
-
-# If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of
-# the files that are included by a file in the documentation of that file.
-# The default value is: YES.
-
-SHOW_INCLUDE_FILES = YES
-
-# If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each
-# grouped member an include statement to the documentation, telling the reader
-# which file to include in order to use the member.
-# The default value is: NO.
-
-SHOW_GROUPED_MEMB_INC = NO
-
-# If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include
-# files with double quotes in the documentation rather than with sharp brackets.
-# The default value is: NO.
-
-FORCE_LOCAL_INCLUDES = NO
-
-# If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the
-# documentation for inline members.
-# The default value is: YES.
-
-INLINE_INFO = YES
-
-# If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the
-# (detailed) documentation of file and class members alphabetically by member
-# name. If set to NO, the members will appear in declaration order.
-# The default value is: YES.
-
-SORT_MEMBER_DOCS = YES
-
-# If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief
-# descriptions of file, namespace and class members alphabetically by member
-# name. If set to NO, the members will appear in declaration order. Note that
-# this will also influence the order of the classes in the class list.
-# The default value is: NO.
-
-SORT_BRIEF_DOCS = NO
-
-# If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the
-# (brief and detailed) documentation of class members so that constructors and
-# destructors are listed first. If set to NO the constructors will appear in the
-# respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS.
-# Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief
-# member documentation.
-# Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting
-# detailed member documentation.
-# The default value is: NO.
-
-SORT_MEMBERS_CTORS_1ST = NO
-
-# If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy
-# of group names into alphabetical order. If set to NO the group names will
-# appear in their defined order.
-# The default value is: NO.
-
-SORT_GROUP_NAMES = NO
-
-# If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by
-# fully-qualified names, including namespaces. If set to NO, the class list will
-# be sorted only by class name, not including the namespace part.
-# Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES.
-# Note: This option applies only to the class list, not to the alphabetical
-# list.
-# The default value is: NO.
-
-SORT_BY_SCOPE_NAME = NO
-
-# If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper
-# type resolution of all parameters of a function it will reject a match between
-# the prototype and the implementation of a member function even if there is
-# only one candidate or it is obvious which candidate to choose by doing a
-# simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still
-# accept a match between prototype and implementation in such cases.
-# The default value is: NO.
-
-STRICT_PROTO_MATCHING = NO
-
-# The GENERATE_TODOLIST tag can be used to enable (YES) or disable (NO) the todo
-# list. This list is created by putting \todo commands in the documentation.
-# The default value is: YES.
-
-GENERATE_TODOLIST = YES
-
-# The GENERATE_TESTLIST tag can be used to enable (YES) or disable (NO) the test
-# list. This list is created by putting \test commands in the documentation.
-# The default value is: YES.
-
-GENERATE_TESTLIST = YES
-
-# The GENERATE_BUGLIST tag can be used to enable (YES) or disable (NO) the bug
-# list. This list is created by putting \bug commands in the documentation.
-# The default value is: YES.
-
-GENERATE_BUGLIST = YES
-
-# The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or disable (NO)
-# the deprecated list. This list is created by putting \deprecated commands in
-# the documentation.
-# The default value is: YES.
-
-GENERATE_DEPRECATEDLIST= YES
-
-# The ENABLED_SECTIONS tag can be used to enable conditional documentation
-# sections, marked by \if <section_label> ... \endif and \cond <section_label>
-# ... \endcond blocks.
-
-ENABLED_SECTIONS =
-
-# The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the
-# initial value of a variable or macro / define can have for it to appear in the
-# documentation. If the initializer consists of more lines than specified here
-# it will be hidden. Use a value of 0 to hide initializers completely. The
-# appearance of the value of individual variables and macros / defines can be
-# controlled using \showinitializer or \hideinitializer command in the
-# documentation regardless of this setting.
-# Minimum value: 0, maximum value: 10000, default value: 30.
-
-MAX_INITIALIZER_LINES = 30
-
-# Set the SHOW_USED_FILES tag to NO to disable the list of files generated at
-# the bottom of the documentation of classes and structs. If set to YES, the
-# list will mention the files that were used to generate the documentation.
-# The default value is: YES.
-
-SHOW_USED_FILES = YES
-
-# Set the SHOW_FILES tag to NO to disable the generation of the Files page. This
-# will remove the Files entry from the Quick Index and from the Folder Tree View
-# (if specified).
-# The default value is: YES.
-
-SHOW_FILES = YES
-
-# Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces
-# page. This will remove the Namespaces entry from the Quick Index and from the
-# Folder Tree View (if specified).
-# The default value is: YES.
-
-SHOW_NAMESPACES = YES
-
-# The FILE_VERSION_FILTER tag can be used to specify a program or script that
-# doxygen should invoke to get the current version for each file (typically from
-# the version control system). Doxygen will invoke the program by executing (via
-# popen()) the command command input-file, where command is the value of the
-# FILE_VERSION_FILTER tag, and input-file is the name of an input file provided
-# by doxygen. Whatever the program writes to standard output is used as the file
-# version. For an example see the documentation.
-
-FILE_VERSION_FILTER =
-
-# The LAYOUT_FILE tag can be used to specify a layout file which will be parsed
-# by doxygen. The layout file controls the global structure of the generated
-# output files in an output format independent way. To create the layout file
-# that represents doxygen's defaults, run doxygen with the -l option. You can
-# optionally specify a file name after the option, if omitted DoxygenLayout.xml
-# will be used as the name of the layout file.
-#
-# Note that if you run doxygen from a directory containing a file called
-# DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE
-# tag is left empty.
-
-LAYOUT_FILE =
-
-# The CITE_BIB_FILES tag can be used to specify one or more bib files containing
-# the reference definitions. This must be a list of .bib files. The .bib
-# extension is automatically appended if omitted. This requires the bibtex tool
-# to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info.
-# For LaTeX the style of the bibliography can be controlled using
-# LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the
-# search path. See also \cite for info how to create references.
-
-CITE_BIB_FILES =
-
-#---------------------------------------------------------------------------
-# Configuration options related to warning and progress messages
-#---------------------------------------------------------------------------
-
-# The QUIET tag can be used to turn on/off the messages that are generated to
-# standard output by doxygen. If QUIET is set to YES this implies that the
-# messages are off.
-# The default value is: NO.
-
-QUIET = NO
-
-# The WARNINGS tag can be used to turn on/off the warning messages that are
-# generated to standard error (stderr) by doxygen. If WARNINGS is set to YES
-# this implies that the warnings are on.
-#
-# Tip: Turn warnings on while writing the documentation.
-# The default value is: YES.
-
-WARNINGS = YES
-
-# If the WARN_IF_UNDOCUMENTED tag is set to YES then doxygen will generate
-# warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag
-# will automatically be disabled.
-# The default value is: YES.
-
-WARN_IF_UNDOCUMENTED = YES
-
-# If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for
-# potential errors in the documentation, such as not documenting some parameters
-# in a documented function, or documenting parameters that don't exist or using
-# markup commands wrongly.
-# The default value is: YES.
-
-WARN_IF_DOC_ERROR = YES
-
-# This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that
-# are documented, but have no documentation for their parameters or return
-# value. If set to NO, doxygen will only warn about wrong or incomplete
-# parameter documentation, but not about the absence of documentation.
-# The default value is: NO.
-
-WARN_NO_PARAMDOC = NO
-
-# The WARN_FORMAT tag determines the format of the warning messages that doxygen
-# can produce. The string should contain the $file, $line, and $text tags, which
-# will be replaced by the file and line number from which the warning originated
-# and the warning text. Optionally the format may contain $version, which will
-# be replaced by the version of the file (if it could be obtained via
-# FILE_VERSION_FILTER)
-# The default value is: $file:$line: $text.
-
-WARN_FORMAT = "$file:$line: $text"
-
-# The WARN_LOGFILE tag can be used to specify a file to which warning and error
-# messages should be written. If left blank the output is written to standard
-# error (stderr).
-
-WARN_LOGFILE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the input files
-#---------------------------------------------------------------------------
-
-# The INPUT tag is used to specify the files and/or directories that contain
-# documented source files. You may enter file names like myfile.cpp or
-# directories like /usr/src/myproject. Separate the files or directories with
-# spaces.
-# Note: If this tag is empty the current directory is searched.
-
-INPUT = include/grpc++/alarm.h \
-include/grpc++/channel.h \
-include/grpc++/client_context.h \
-include/grpc++/completion_queue.h \
-include/grpc++/create_channel.h \
-include/grpc++/generic/async_generic_service.h \
-include/grpc++/generic/generic_stub.h \
-include/grpc++/grpc++.h \
-include/grpc++/impl/call.h \
-include/grpc++/impl/client_unary_call.h \
-include/grpc++/impl/codegen/core_codegen.h \
-include/grpc++/impl/grpc_library.h \
-include/grpc++/impl/method_handler_impl.h \
-include/grpc++/impl/rpc_method.h \
-include/grpc++/impl/rpc_service_method.h \
-include/grpc++/impl/serialization_traits.h \
-include/grpc++/impl/server_builder_option.h \
-include/grpc++/impl/server_builder_plugin.h \
-include/grpc++/impl/server_initializer.h \
-include/grpc++/impl/service_type.h \
-include/grpc++/impl/sync.h \
-include/grpc++/impl/sync_cxx11.h \
-include/grpc++/impl/sync_no_cxx11.h \
-include/grpc++/impl/thd.h \
-include/grpc++/impl/thd_cxx11.h \
-include/grpc++/impl/thd_no_cxx11.h \
-include/grpc++/security/auth_context.h \
-include/grpc++/security/auth_metadata_processor.h \
-include/grpc++/security/credentials.h \
-include/grpc++/security/server_credentials.h \
-include/grpc++/server.h \
-include/grpc++/server_builder.h \
-include/grpc++/server_context.h \
-include/grpc++/support/async_stream.h \
-include/grpc++/support/async_unary_call.h \
-include/grpc++/support/byte_buffer.h \
-include/grpc++/support/channel_arguments.h \
-include/grpc++/support/config.h \
-include/grpc++/support/slice.h \
-include/grpc++/support/status.h \
-include/grpc++/support/status_code_enum.h \
-include/grpc++/support/string_ref.h \
-include/grpc++/support/stub_options.h \
-include/grpc++/support/sync_stream.h \
-include/grpc++/support/time.h \
-include/grpc++/impl/codegen/async_stream.h \
-include/grpc++/impl/codegen/async_unary_call.h \
-include/grpc++/impl/codegen/call.h \
-include/grpc++/impl/codegen/call_hook.h \
-include/grpc++/impl/codegen/channel_interface.h \
-include/grpc++/impl/codegen/client_context.h \
-include/grpc++/impl/codegen/client_unary_call.h \
-include/grpc++/impl/codegen/completion_queue.h \
-include/grpc++/impl/codegen/completion_queue_tag.h \
-include/grpc++/impl/codegen/config.h \
-include/grpc++/impl/codegen/core_codegen_interface.h \
-include/grpc++/impl/codegen/create_auth_context.h \
-include/grpc++/impl/codegen/grpc_library.h \
-include/grpc++/impl/codegen/method_handler_impl.h \
-include/grpc++/impl/codegen/rpc_method.h \
-include/grpc++/impl/codegen/rpc_service_method.h \
-include/grpc++/impl/codegen/security/auth_context.h \
-include/grpc++/impl/codegen/serialization_traits.h \
-include/grpc++/impl/codegen/server_context.h \
-include/grpc++/impl/codegen/server_interface.h \
-include/grpc++/impl/codegen/service_type.h \
-include/grpc++/impl/codegen/status.h \
-include/grpc++/impl/codegen/status_code_enum.h \
-include/grpc++/impl/codegen/string_ref.h \
-include/grpc++/impl/codegen/stub_options.h \
-include/grpc++/impl/codegen/sync.h \
-include/grpc++/impl/codegen/sync_cxx11.h \
-include/grpc++/impl/codegen/sync_no_cxx11.h \
-include/grpc++/impl/codegen/sync_stream.h \
-include/grpc++/impl/codegen/time.h \
-include/grpc/impl/codegen/byte_buffer.h \
-include/grpc/impl/codegen/byte_buffer_reader.h \
-include/grpc/impl/codegen/compression_types.h \
-include/grpc/impl/codegen/connectivity_state.h \
-include/grpc/impl/codegen/grpc_types.h \
-include/grpc/impl/codegen/propagation_bits.h \
-include/grpc/impl/codegen/status.h \
-include/grpc/impl/codegen/alloc.h \
-include/grpc/impl/codegen/atm.h \
-include/grpc/impl/codegen/atm_gcc_atomic.h \
-include/grpc/impl/codegen/atm_gcc_sync.h \
-include/grpc/impl/codegen/atm_windows.h \
-include/grpc/impl/codegen/log.h \
-include/grpc/impl/codegen/port_platform.h \
-include/grpc/impl/codegen/slice.h \
-include/grpc/impl/codegen/slice_buffer.h \
-include/grpc/impl/codegen/sync.h \
-include/grpc/impl/codegen/sync_generic.h \
-include/grpc/impl/codegen/sync_posix.h \
-include/grpc/impl/codegen/sync_windows.h \
-include/grpc/impl/codegen/time.h \
-<<<<<<< HEAD
-include/grpc++/impl/codegen/config.h \
-include/grpc++/impl/codegen/config_protobuf.h \
-include/grpc++/support/config.h \
-include/grpc++/support/config_protobuf.h \
-include/grpc++/impl/codegen/core_codegen.h \
-=======
->>>>>>> d30d4e279c4a63effaa6e912fc00bd4ad96054c7
-src/cpp/client/secure_credentials.h \
-src/cpp/common/secure_auth_context.h \
-src/cpp/server/secure_server_credentials.h \
-src/cpp/client/create_channel_internal.h \
-src/cpp/server/dynamic_thread_pool.h \
-src/cpp/server/thread_pool_interface.h \
-src/cpp/client/secure_credentials.cc \
-src/cpp/common/auth_property_iterator.cc \
-src/cpp/common/secure_auth_context.cc \
-src/cpp/common/secure_channel_arguments.cc \
-src/cpp/common/secure_create_auth_context.cc \
-src/cpp/server/secure_server_credentials.cc \
-src/cpp/client/channel.cc \
-src/cpp/client/client_context.cc \
-src/cpp/client/create_channel.cc \
-src/cpp/client/create_channel_internal.cc \
-src/cpp/client/credentials.cc \
-src/cpp/client/generic_stub.cc \
-src/cpp/client/insecure_credentials.cc \
-src/cpp/common/channel_arguments.cc \
-src/cpp/common/completion_queue.cc \
-src/cpp/common/core_codegen.cc \
-src/cpp/common/rpc_method.cc \
-src/cpp/server/async_generic_service.cc \
-src/cpp/server/create_default_thread_pool.cc \
-src/cpp/server/dynamic_thread_pool.cc \
-src/cpp/server/insecure_server_credentials.cc \
-src/cpp/server/server.cc \
-src/cpp/server/server_builder.cc \
-src/cpp/server/server_context.cc \
-src/cpp/server/server_credentials.cc \
-src/cpp/util/byte_buffer.cc \
-src/cpp/util/slice.cc \
-src/cpp/util/status.cc \
-src/cpp/util/string_ref.cc \
-src/cpp/util/time.cc \
-src/cpp/codegen/codegen_init.cc
-
-# This tag can be used to specify the character encoding of the source files
-# that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses
-# libiconv (or the iconv built into libc) for the transcoding. See the libiconv
-# documentation (see: http://www.gnu.org/software/libiconv) for the list of
-# possible encodings.
-# The default value is: UTF-8.
-
-INPUT_ENCODING = UTF-8
-
-# If the value of the INPUT tag contains directories, you can use the
-# FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank the
-# following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii,
-# *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp,
-# *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown,
-# *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf,
-# *.qsf, *.as and *.js.
-
-FILE_PATTERNS =
-
-# The RECURSIVE tag can be used to specify whether or not subdirectories should
-# be searched for input files as well.
-# The default value is: NO.
-
-RECURSIVE = NO
-
-# The EXCLUDE tag can be used to specify files and/or directories that should be
-# excluded from the INPUT source files. This way you can easily exclude a
-# subdirectory from a directory tree whose root is specified with the INPUT tag.
-#
-# Note that relative paths are relative to the directory from which doxygen is
-# run.
-
-EXCLUDE =
-
-# The EXCLUDE_SYMLINKS tag can be used to select whether or not files or
-# directories that are symbolic links (a Unix file system feature) are excluded
-# from the input.
-# The default value is: NO.
-
-EXCLUDE_SYMLINKS = NO
-
-# If the value of the INPUT tag contains directories, you can use the
-# EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude
-# certain files from those directories.
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories for example use the pattern */test/*
-
-EXCLUDE_PATTERNS =
-
-# The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names
-# (namespaces, classes, functions, etc.) that should be excluded from the
-# output. The symbol name can be a fully qualified name, a word, or if the
-# wildcard * is used, a substring. Examples: ANamespace, AClass,
-# AClass::ANamespace, ANamespace::*Test
-#
-# Note that the wildcards are matched against the file with absolute path, so to
-# exclude all test directories use the pattern */test/*
-
-EXCLUDE_SYMBOLS =
-
-# The EXAMPLE_PATH tag can be used to specify one or more files or directories
-# that contain example code fragments that are included (see the \include
-# command).
-
-EXAMPLE_PATH =
-
-# If the value of the EXAMPLE_PATH tag contains directories, you can use the
-# EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and
-# *.h) to filter out the source-files in the directories. If left blank all
-# files are included.
-
-EXAMPLE_PATTERNS =
-
-# If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be
-# searched for input files to be used with the \include or \dontinclude commands
-# irrespective of the value of the RECURSIVE tag.
-# The default value is: NO.
-
-EXAMPLE_RECURSIVE = NO
-
-# The IMAGE_PATH tag can be used to specify one or more files or directories
-# that contain images that are to be included in the documentation (see the
-# \image command).
-
-IMAGE_PATH =
-
-# The INPUT_FILTER tag can be used to specify a program that doxygen should
-# invoke to filter for each input file. Doxygen will invoke the filter program
-# by executing (via popen()) the command:
-#
-# <filter> <input-file>
-#
-# where <filter> is the value of the INPUT_FILTER tag, and <input-file> is the
-# name of an input file. Doxygen will then use the output that the filter
-# program writes to standard output. If FILTER_PATTERNS is specified, this tag
-# will be ignored.
-#
-# Note that the filter must not add or remove lines; it is applied before the
-# code is scanned, but not when the output code is generated. If lines are added
-# or removed, the anchors will not be placed correctly.
-
-INPUT_FILTER =
-
-# The FILTER_PATTERNS tag can be used to specify filters on a per file pattern
-# basis. Doxygen will compare the file name with each pattern and apply the
-# filter if there is a match. The filters are a list of the form: pattern=filter
-# (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how
-# filters are used. If the FILTER_PATTERNS tag is empty or if none of the
-# patterns match the file name, INPUT_FILTER is applied.
-
-FILTER_PATTERNS =
-
-# If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using
-# INPUT_FILTER) will also be used to filter the input files that are used for
-# producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES).
-# The default value is: NO.
-
-FILTER_SOURCE_FILES = NO
-
-# The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file
-# pattern. A pattern will override the setting for FILTER_PATTERN (if any) and
-# it is also possible to disable source filtering for a specific pattern using
-# *.ext= (so without naming a filter).
-# This tag requires that the tag FILTER_SOURCE_FILES is set to YES.
-
-FILTER_SOURCE_PATTERNS =
-
-# If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that
-# is part of the input, its contents will be placed on the main page
-# (index.html). This can be useful if you have a project on for instance GitHub
-# and want to reuse the introduction page also for the doxygen output.
-
-USE_MDFILE_AS_MAINPAGE =
-
-#---------------------------------------------------------------------------
-# Configuration options related to source browsing
-#---------------------------------------------------------------------------
-
-# If the SOURCE_BROWSER tag is set to YES then a list of source files will be
-# generated. Documented entities will be cross-referenced with these sources.
-#
-# Note: To get rid of all source code in the generated output, make sure that
-# also VERBATIM_HEADERS is set to NO.
-# The default value is: NO.
-
-SOURCE_BROWSER = NO
-
-# Setting the INLINE_SOURCES tag to YES will include the body of functions,
-# classes and enums directly into the documentation.
-# The default value is: NO.
-
-INLINE_SOURCES = NO
-
-# Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any
-# special comment blocks from generated source code fragments. Normal C, C++ and
-# Fortran comments will always remain visible.
-# The default value is: YES.
-
-STRIP_CODE_COMMENTS = YES
-
-# If the REFERENCED_BY_RELATION tag is set to YES then for each documented
-# function all documented functions referencing it will be listed.
-# The default value is: NO.
-
-REFERENCED_BY_RELATION = NO
-
-# If the REFERENCES_RELATION tag is set to YES then for each documented function
-# all documented entities called/used by that function will be listed.
-# The default value is: NO.
-
-REFERENCES_RELATION = NO
-
-# If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set
-# to YES then the hyperlinks from functions in REFERENCES_RELATION and
-# REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will
-# link to the documentation.
-# The default value is: YES.
-
-REFERENCES_LINK_SOURCE = YES
-
-# If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the
-# source code will show a tooltip with additional information such as prototype,
-# brief description and links to the definition and documentation. Since this
-# will make the HTML file larger and loading of large files a bit slower, you
-# can opt to disable this feature.
-# The default value is: YES.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-SOURCE_TOOLTIPS = YES
-
-# If the USE_HTAGS tag is set to YES then the references to source code will
-# point to the HTML generated by the htags(1) tool instead of doxygen built-in
-# source browser. The htags tool is part of GNU's global source tagging system
-# (see http://www.gnu.org/software/global/global.html). You will need version
-# 4.8.6 or higher.
-#
-# To use it do the following:
-# - Install the latest version of global
-# - Enable SOURCE_BROWSER and USE_HTAGS in the config file
-# - Make sure the INPUT points to the root of the source tree
-# - Run doxygen as normal
-#
-# Doxygen will invoke htags (and that will in turn invoke gtags), so these
-# tools must be available from the command line (i.e. in the search path).
-#
-# The result: instead of the source browser generated by doxygen, the links to
-# source code will now point to the output of htags.
-# The default value is: NO.
-# This tag requires that the tag SOURCE_BROWSER is set to YES.
-
-USE_HTAGS = NO
-
-# If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a
-# verbatim copy of the header file for each class for which an include is
-# specified. Set to NO to disable this.
-# See also: Section \class.
-# The default value is: YES.
-
-VERBATIM_HEADERS = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the alphabetical class index
-#---------------------------------------------------------------------------
-
-# If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all
-# compounds will be generated. Enable this if the project contains a lot of
-# classes, structs, unions or interfaces.
-# The default value is: YES.
-
-ALPHABETICAL_INDEX = YES
-
-# The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in
-# which the alphabetical index list will be split.
-# Minimum value: 1, maximum value: 20, default value: 5.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-COLS_IN_ALPHA_INDEX = 5
-
-# In case all classes in a project start with a common prefix, all classes will
-# be put under the same header in the alphabetical index. The IGNORE_PREFIX tag
-# can be used to specify a prefix (or a list of prefixes) that should be ignored
-# while generating the index headers.
-# This tag requires that the tag ALPHABETICAL_INDEX is set to YES.
-
-IGNORE_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the HTML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_HTML tag is set to YES, doxygen will generate HTML output
-# The default value is: YES.
-
-GENERATE_HTML = YES
-
-# The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_OUTPUT = html
-
-# The HTML_FILE_EXTENSION tag can be used to specify the file extension for each
-# generated HTML page (for example: .htm, .php, .asp).
-# The default value is: .html.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FILE_EXTENSION = .html
-
-# The HTML_HEADER tag can be used to specify a user-defined HTML header file for
-# each generated HTML page. If the tag is left blank doxygen will generate a
-# standard header.
-#
-# To get valid HTML the header file that includes any scripts and style sheets
-# that doxygen needs, which is dependent on the configuration options used (e.g.
-# the setting GENERATE_TREEVIEW). It is highly recommended to start with a
-# default header using
-# doxygen -w html new_header.html new_footer.html new_stylesheet.css
-# YourConfigFile
-# and then modify the file new_header.html. See also section "Doxygen usage"
-# for information on how to generate the default header that doxygen normally
-# uses.
-# Note: The header is subject to change so you typically have to regenerate the
-# default header when upgrading to a newer version of doxygen. For a description
-# of the possible markers and block names see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_HEADER =
-
-# The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each
-# generated HTML page. If the tag is left blank doxygen will generate a standard
-# footer. See HTML_HEADER for more information on how to generate a default
-# footer and what special commands can be used inside the footer. See also
-# section "Doxygen usage" for information on how to generate the default footer
-# that doxygen normally uses.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_FOOTER =
-
-# The HTML_STYLESHEET tag can be used to specify a user-defined cascading style
-# sheet that is used by each HTML page. It can be used to fine-tune the look of
-# the HTML output. If left blank doxygen will generate a default style sheet.
-# See also section "Doxygen usage" for information on how to generate the style
-# sheet that doxygen normally uses.
-# Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as
-# it is more robust and this tag (HTML_STYLESHEET) will in the future become
-# obsolete.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_STYLESHEET =
-
-# The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined
-# cascading style sheets that are included after the standard style sheets
-# created by doxygen. Using this option one can overrule certain style aspects.
-# This is preferred over using HTML_STYLESHEET since it does not replace the
-# standard style sheet and is therefore more robust against future updates.
-# Doxygen will copy the style sheet files to the output directory.
-# Note: The order of the extra style sheet files is of importance (e.g. the last
-# style sheet in the list overrules the setting of the previous ones in the
-# list). For an example see the documentation.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_STYLESHEET =
-
-# The HTML_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the HTML output directory. Note
-# that these files will be copied to the base HTML output directory. Use the
-# $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these
-# files. In the HTML_STYLESHEET file, use the file name only. Also note that the
-# files will be copied as-is; there are no commands or markers available.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_EXTRA_FILES =
-
-# The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen
-# will adjust the colors in the style sheet and background images according to
-# this color. Hue is specified as an angle on a colorwheel, see
-# http://en.wikipedia.org/wiki/Hue for more information. For instance the value
-# 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300
-# purple, and 360 is red again.
-# Minimum value: 0, maximum value: 359, default value: 220.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_HUE = 220
-
-# The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors
-# in the HTML output. For a value of 0 the output will use grayscales only. A
-# value of 255 will produce the most vivid colors.
-# Minimum value: 0, maximum value: 255, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_SAT = 100
-
-# The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the
-# luminance component of the colors in the HTML output. Values below 100
-# gradually make the output lighter, whereas values above 100 make the output
-# darker. The value divided by 100 is the actual gamma applied, so 80 represents
-# a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not
-# change the gamma.
-# Minimum value: 40, maximum value: 240, default value: 80.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_COLORSTYLE_GAMMA = 80
-
-# If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML
-# page will contain the date and time when the page was generated. Setting this
-# to NO can help when comparing the output of multiple runs.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_TIMESTAMP = YES
-
-# If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML
-# documentation will contain sections that can be hidden and shown after the
-# page has loaded.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_DYNAMIC_SECTIONS = NO
-
-# With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries
-# shown in the various tree structured indices initially; the user can expand
-# and collapse entries dynamically later on. Doxygen will expand the tree to
-# such a level that at most the specified number of entries are visible (unless
-# a fully collapsed tree already exceeds this amount). So setting the number of
-# entries 1 will produce a full collapsed tree by default. 0 is a special value
-# representing an infinite number of entries and will result in a full expanded
-# tree by default.
-# Minimum value: 0, maximum value: 9999, default value: 100.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-HTML_INDEX_NUM_ENTRIES = 100
-
-# If the GENERATE_DOCSET tag is set to YES, additional index files will be
-# generated that can be used as input for Apple's Xcode 3 integrated development
-# environment (see: http://developer.apple.com/tools/xcode/), introduced with
-# OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a
-# Makefile in the HTML output directory. Running make will produce the docset in
-# that directory and running make install will install the docset in
-# ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at
-# startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html
-# for more information.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_DOCSET = NO
-
-# This tag determines the name of the docset feed. A documentation feed provides
-# an umbrella under which multiple documentation sets from a single provider
-# (such as a company or product suite) can be grouped.
-# The default value is: Doxygen generated docs.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_FEEDNAME = "Doxygen generated docs"
-
-# This tag specifies a string that should uniquely identify the documentation
-# set bundle. This should be a reverse domain-name style string, e.g.
-# com.mycompany.MyDocSet. Doxygen will append .docset to the name.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_BUNDLE_ID = org.doxygen.Project
-
-# The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify
-# the documentation publisher. This should be a reverse domain-name style
-# string, e.g. com.mycompany.MyDocSet.documentation.
-# The default value is: org.doxygen.Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_ID = org.doxygen.Publisher
-
-# The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher.
-# The default value is: Publisher.
-# This tag requires that the tag GENERATE_DOCSET is set to YES.
-
-DOCSET_PUBLISHER_NAME = Publisher
-
-# If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three
-# additional HTML index files: index.hhp, index.hhc, and index.hhk. The
-# index.hhp is a project file that can be read by Microsoft's HTML Help Workshop
-# (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on
-# Windows.
-#
-# The HTML Help Workshop contains a compiler that can convert all HTML output
-# generated by doxygen into a single compiled HTML file (.chm). Compiled HTML
-# files are now used as the Windows 98 help format, and will replace the old
-# Windows help format (.hlp) on all Windows platforms in the future. Compressed
-# HTML files also contain an index, a table of contents, and you can search for
-# words in the documentation. The HTML workshop also contains a viewer for
-# compressed HTML files.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_HTMLHELP = NO
-
-# The CHM_FILE tag can be used to specify the file name of the resulting .chm
-# file. You can add a path in front of the file if the result should not be
-# written to the html output directory.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_FILE =
-
-# The HHC_LOCATION tag can be used to specify the location (absolute path
-# including file name) of the HTML help compiler (hhc.exe). If non-empty,
-# doxygen will try to run the HTML help compiler on the generated index.hhp.
-# The file has to be specified with full path.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-HHC_LOCATION =
-
-# The GENERATE_CHI flag controls if a separate .chi index file is generated
-# (YES) or that it should be included in the master .chm file (NO).
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-GENERATE_CHI = NO
-
-# The CHM_INDEX_ENCODING is used to encode HtmlHelp index (hhk), content (hhc)
-# and project file content.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-CHM_INDEX_ENCODING =
-
-# The BINARY_TOC flag controls whether a binary table of contents is generated
-# (YES) or a normal table of contents (NO) in the .chm file. Furthermore it
-# enables the Previous and Next buttons.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-BINARY_TOC = NO
-
-# The TOC_EXPAND flag can be set to YES to add extra items for group members to
-# the table of contents of the HTML help documentation and to the tree view.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTMLHELP is set to YES.
-
-TOC_EXPAND = NO
-
-# If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and
-# QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that
-# can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help
-# (.qch) of the generated HTML documentation.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_QHP = NO
-
-# If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify
-# the file name of the resulting .qch file. The path specified is relative to
-# the HTML output folder.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QCH_FILE =
-
-# The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help
-# Project output. For more information please see Qt Help Project / Namespace
-# (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace).
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_NAMESPACE = org.doxygen.Project
-
-# The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt
-# Help Project output. For more information please see Qt Help Project / Virtual
-# Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual-
-# folders).
-# The default value is: doc.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_VIRTUAL_FOLDER = doc
-
-# If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom
-# filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_NAME =
-
-# The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the
-# custom filter to add. For more information please see Qt Help Project / Custom
-# Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom-
-# filters).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_CUST_FILTER_ATTRS =
-
-# The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this
-# project's filter section matches. Qt Help Project / Filter Attributes (see:
-# http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes).
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHP_SECT_FILTER_ATTRS =
-
-# The QHG_LOCATION tag can be used to specify the location of Qt's
-# qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the
-# generated .qhp file.
-# This tag requires that the tag GENERATE_QHP is set to YES.
-
-QHG_LOCATION =
-
-# If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be
-# generated, together with the HTML files, they form an Eclipse help plugin. To
-# install this plugin and make it available under the help contents menu in
-# Eclipse, the contents of the directory containing the HTML and XML files needs
-# to be copied into the plugins directory of eclipse. The name of the directory
-# within the plugins directory should be the same as the ECLIPSE_DOC_ID value.
-# After copying Eclipse needs to be restarted before the help appears.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_ECLIPSEHELP = NO
-
-# A unique identifier for the Eclipse help plugin. When installing the plugin
-# the directory name containing the HTML and XML files should also have this
-# name. Each documentation set should have its own identifier.
-# The default value is: org.doxygen.Project.
-# This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES.
-
-ECLIPSE_DOC_ID = org.doxygen.Project
-
-# If you want full control over the layout of the generated HTML pages it might
-# be necessary to disable the index and replace it with your own. The
-# DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top
-# of each HTML page. A value of NO enables the index and the value YES disables
-# it. Since the tabs in the index contain the same information as the navigation
-# tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-DISABLE_INDEX = NO
-
-# The GENERATE_TREEVIEW tag is used to specify whether a tree-like index
-# structure should be generated to display hierarchical information. If the tag
-# value is set to YES, a side panel will be generated containing a tree-like
-# index structure (just like the one that is generated for HTML Help). For this
-# to work a browser that supports JavaScript, DHTML, CSS and frames is required
-# (i.e. any modern browser). Windows users are probably better off using the
-# HTML help feature. Via custom style sheets (see HTML_EXTRA_STYLESHEET) one can
-# further fine-tune the look of the index. As an example, the default style
-# sheet generated by doxygen has an example that shows how to put an image at
-# the root of the tree instead of the PROJECT_NAME. Since the tree basically has
-# the same information as the tab index, you could consider setting
-# DISABLE_INDEX to YES when enabling this option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-GENERATE_TREEVIEW = NO
-
-# The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that
-# doxygen will group on one line in the generated HTML documentation.
-#
-# Note that a value of 0 will completely suppress the enum values from appearing
-# in the overview section.
-# Minimum value: 0, maximum value: 20, default value: 4.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-ENUM_VALUES_PER_LINE = 4
-
-# If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used
-# to set the initial width (in pixels) of the frame in which the tree is shown.
-# Minimum value: 0, maximum value: 1500, default value: 250.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-TREEVIEW_WIDTH = 250
-
-# If the EXT_LINKS_IN_WINDOW option is set to YES, doxygen will open links to
-# external symbols imported via tag files in a separate window.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-EXT_LINKS_IN_WINDOW = NO
-
-# Use this tag to change the font size of LaTeX formulas included as images in
-# the HTML documentation. When you change the font size after a successful
-# doxygen run you need to manually remove any form_*.png images from the HTML
-# output directory to force them to be regenerated.
-# Minimum value: 8, maximum value: 50, default value: 10.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_FONTSIZE = 10
-
-# Use the FORMULA_TRANPARENT tag to determine whether or not the images
-# generated for formulas are transparent PNGs. Transparent PNGs are not
-# supported properly for IE 6.0, but are supported on all modern browsers.
-#
-# Note that when changing this option you need to delete any form_*.png files in
-# the HTML output directory before the changes have effect.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-FORMULA_TRANSPARENT = YES
-
-# Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see
-# http://www.mathjax.org) which uses client side Javascript for the rendering
-# instead of using pre-rendered bitmaps. Use this if you do not have LaTeX
-# installed or if you want to formulas look prettier in the HTML output. When
-# enabled you may also need to install MathJax separately and configure the path
-# to it using the MATHJAX_RELPATH option.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-USE_MATHJAX = NO
-
-# When MathJax is enabled you can set the default output format to be used for
-# the MathJax output. See the MathJax site (see:
-# http://docs.mathjax.org/en/latest/output.html) for more details.
-# Possible values are: HTML-CSS (which is slower, but has the best
-# compatibility), NativeMML (i.e. MathML) and SVG.
-# The default value is: HTML-CSS.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_FORMAT = HTML-CSS
-
-# When MathJax is enabled you need to specify the location relative to the HTML
-# output directory using the MATHJAX_RELPATH option. The destination directory
-# should contain the MathJax.js script. For instance, if the mathjax directory
-# is located at the same level as the HTML output directory, then
-# MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax
-# Content Delivery Network so you can quickly see the result without installing
-# MathJax. However, it is strongly recommended to install a local copy of
-# MathJax from http://www.mathjax.org before deployment.
-# The default value is: http://cdn.mathjax.org/mathjax/latest.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest
-
-# The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax
-# extension names that should be enabled during MathJax rendering. For example
-# MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_EXTENSIONS =
-
-# The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces
-# of code that will be used on startup of the MathJax code. See the MathJax site
-# (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an
-# example see the documentation.
-# This tag requires that the tag USE_MATHJAX is set to YES.
-
-MATHJAX_CODEFILE =
-
-# When the SEARCHENGINE tag is enabled doxygen will generate a search box for
-# the HTML output. The underlying search engine uses javascript and DHTML and
-# should work on any modern browser. Note that when using HTML help
-# (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET)
-# there is already a search function so this one should typically be disabled.
-# For large projects the javascript based search engine can be slow, then
-# enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to
-# search using the keyboard; to jump to the search box use <access key> + S
-# (what the <access key> is depends on the OS and browser, but it is typically
-# <CTRL>, <ALT>/<option>, or both). Inside the search box use the <cursor down
-# key> to jump into the search results window, the results can be navigated
-# using the <cursor keys>. Press <Enter> to select an item or <escape> to cancel
-# the search. The filter options can be selected when the cursor is inside the
-# search box by pressing <Shift>+<cursor down>. Also here use the <cursor keys>
-# to select a filter and <Enter> or <escape> to activate or cancel the filter
-# option.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_HTML is set to YES.
-
-SEARCHENGINE = YES
-
-# When the SERVER_BASED_SEARCH tag is enabled the search engine will be
-# implemented using a web server instead of a web client using Javascript. There
-# are two flavors of web server based searching depending on the EXTERNAL_SEARCH
-# setting. When disabled, doxygen will generate a PHP script for searching and
-# an index file used by the script. When EXTERNAL_SEARCH is enabled the indexing
-# and searching needs to be provided by external tools. See the section
-# "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SERVER_BASED_SEARCH = NO
-
-# When EXTERNAL_SEARCH tag is enabled doxygen will no longer generate the PHP
-# script for searching. Instead the search results are written to an XML file
-# which needs to be processed by an external indexer. Doxygen will invoke an
-# external search engine pointed to by the SEARCHENGINE_URL option to obtain the
-# search results.
-#
-# Doxygen ships with an example indexer (doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/).
-#
-# See the section "External Indexing and Searching" for details.
-# The default value is: NO.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH = NO
-
-# The SEARCHENGINE_URL should point to a search engine hosted by a web server
-# which will return the search results when EXTERNAL_SEARCH is enabled.
-#
-# Doxygen ships with an example indexer (doxyindexer) and search engine
-# (doxysearch.cgi) which are based on the open source search engine library
-# Xapian (see: http://xapian.org/). See the section "External Indexing and
-# Searching" for details.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHENGINE_URL =
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the unindexed
-# search data is written to a file for indexing by an external tool. With the
-# SEARCHDATA_FILE tag the name of this file can be specified.
-# The default file is: searchdata.xml.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-SEARCHDATA_FILE = searchdata.xml
-
-# When SERVER_BASED_SEARCH and EXTERNAL_SEARCH are both enabled the
-# EXTERNAL_SEARCH_ID tag can be used as an identifier for the project. This is
-# useful in combination with EXTRA_SEARCH_MAPPINGS to search through multiple
-# projects and redirect the results back to the right project.
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTERNAL_SEARCH_ID =
-
-# The EXTRA_SEARCH_MAPPINGS tag can be used to enable searching through doxygen
-# projects other than the one defined by this configuration file, but that are
-# all added to the same external search index. Each project needs to have a
-# unique id set via EXTERNAL_SEARCH_ID. The search mapping then maps the id of
-# to a relative location where the documentation can be found. The format is:
-# EXTRA_SEARCH_MAPPINGS = tagname1=loc1 tagname2=loc2 ...
-# This tag requires that the tag SEARCHENGINE is set to YES.
-
-EXTRA_SEARCH_MAPPINGS =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the LaTeX output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_LATEX tag is set to YES, doxygen will generate LaTeX output.
-# The default value is: YES.
-
-GENERATE_LATEX = NO
-
-# The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_OUTPUT = latex
-
-# The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be
-# invoked.
-#
-# Note that when enabling USE_PDFLATEX this option is only used for generating
-# bitmaps for formulas in the HTML output, but not in the Makefile that is
-# written to the output directory.
-# The default file is: latex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_CMD_NAME = latex
-
-# The MAKEINDEX_CMD_NAME tag can be used to specify the command name to generate
-# index for LaTeX.
-# The default file is: makeindex.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-MAKEINDEX_CMD_NAME = makeindex
-
-# If the COMPACT_LATEX tag is set to YES, doxygen generates more compact LaTeX
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-COMPACT_LATEX = NO
-
-# The PAPER_TYPE tag can be used to set the paper type that is used by the
-# printer.
-# Possible values are: a4 (210 x 297 mm), letter (8.5 x 11 inches), legal (8.5 x
-# 14 inches) and executive (7.25 x 10.5 inches).
-# The default value is: a4.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PAPER_TYPE = a4
-
-# The EXTRA_PACKAGES tag can be used to specify one or more LaTeX package names
-# that should be included in the LaTeX output. To get the times font for
-# instance you can specify
-# EXTRA_PACKAGES=times
-# If left blank no extra packages will be included.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-EXTRA_PACKAGES =
-
-# The LATEX_HEADER tag can be used to specify a personal LaTeX header for the
-# generated LaTeX document. The header should contain everything until the first
-# chapter. If it is left blank doxygen will generate a standard header. See
-# section "Doxygen usage" for information on how to let doxygen write the
-# default header to a separate file.
-#
-# Note: Only use a user-defined header if you know what you are doing! The
-# following commands have a special meaning inside the header: $title,
-# $datetime, $date, $doxygenversion, $projectname, $projectnumber,
-# $projectbrief, $projectlogo. Doxygen will replace $title with the empty
-# string, for the replacement values of the other commands the user is referred
-# to HTML_HEADER.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HEADER =
-
-# The LATEX_FOOTER tag can be used to specify a personal LaTeX footer for the
-# generated LaTeX document. The footer should contain everything after the last
-# chapter. If it is left blank doxygen will generate a standard footer. See
-# LATEX_HEADER for more information on how to generate a default footer and what
-# special commands can be used inside the footer.
-#
-# Note: Only use a user-defined footer if you know what you are doing!
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_FOOTER =
-
-# The LATEX_EXTRA_STYLESHEET tag can be used to specify additional user-defined
-# LaTeX style sheets that are included after the standard style sheets created
-# by doxygen. Using this option one can overrule certain style aspects. Doxygen
-# will copy the style sheet files to the output directory.
-# Note: The order of the extra style sheet files is of importance (e.g. the last
-# style sheet in the list overrules the setting of the previous ones in the
-# list).
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_STYLESHEET =
-
-# The LATEX_EXTRA_FILES tag can be used to specify one or more extra images or
-# other source files which should be copied to the LATEX_OUTPUT output
-# directory. Note that the files will be copied as-is; there are no commands or
-# markers available.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_EXTRA_FILES =
-
-# If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated is
-# prepared for conversion to PDF (using ps2pdf or pdflatex). The PDF file will
-# contain links (just like the HTML output) instead of page references. This
-# makes the output suitable for online browsing using a PDF viewer.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-PDF_HYPERLINKS = YES
-
-# If the USE_PDFLATEX tag is set to YES, doxygen will use pdflatex to generate
-# the PDF file directly from the LaTeX files. Set this option to YES, to get a
-# higher quality PDF documentation.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-USE_PDFLATEX = YES
-
-# If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \batchmode
-# command to the generated LaTeX files. This will instruct LaTeX to keep running
-# if errors occur, instead of asking the user for help. This option is also used
-# when generating formulas in HTML.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BATCHMODE = NO
-
-# If the LATEX_HIDE_INDICES tag is set to YES then doxygen will not include the
-# index chapters (such as File Index, Compound Index, etc.) in the output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_HIDE_INDICES = NO
-
-# If the LATEX_SOURCE_CODE tag is set to YES then doxygen will include source
-# code with syntax highlighting in the LaTeX output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_SOURCE_CODE = NO
-
-# The LATEX_BIB_STYLE tag can be used to specify the style to use for the
-# bibliography, e.g. plainnat, or ieeetr. See
-# http://en.wikipedia.org/wiki/BibTeX and \cite for more info.
-# The default value is: plain.
-# This tag requires that the tag GENERATE_LATEX is set to YES.
-
-LATEX_BIB_STYLE = plain
-
-#---------------------------------------------------------------------------
-# Configuration options related to the RTF output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_RTF tag is set to YES, doxygen will generate RTF output. The
-# RTF output is optimized for Word 97 and may not look too pretty with other RTF
-# readers/editors.
-# The default value is: NO.
-
-GENERATE_RTF = NO
-
-# The RTF_OUTPUT tag is used to specify where the RTF docs will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: rtf.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_OUTPUT = rtf
-
-# If the COMPACT_RTF tag is set to YES, doxygen generates more compact RTF
-# documents. This may be useful for small projects and may help to save some
-# trees in general.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-COMPACT_RTF = NO
-
-# If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated will
-# contain hyperlink fields. The RTF file will contain links (just like the HTML
-# output) instead of page references. This makes the output suitable for online
-# browsing using Word or some other Word compatible readers that support those
-# fields.
-#
-# Note: WordPad (write) and others do not support links.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_HYPERLINKS = NO
-
-# Load stylesheet definitions from file. Syntax is similar to doxygen's config
-# file, i.e. a series of assignments. You only have to provide replacements,
-# missing definitions are set to their default value.
-#
-# See also section "Doxygen usage" for information on how to generate the
-# default style sheet that doxygen normally uses.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_STYLESHEET_FILE =
-
-# Set optional variables used in the generation of an RTF document. Syntax is
-# similar to doxygen's config file. A template extensions file can be generated
-# using doxygen -e rtf extensionFile.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_EXTENSIONS_FILE =
-
-# If the RTF_SOURCE_CODE tag is set to YES then doxygen will include source code
-# with syntax highlighting in the RTF output.
-#
-# Note that which sources are shown also depends on other settings such as
-# SOURCE_BROWSER.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_RTF is set to YES.
-
-RTF_SOURCE_CODE = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the man page output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_MAN tag is set to YES, doxygen will generate man pages for
-# classes and files.
-# The default value is: NO.
-
-GENERATE_MAN = NO
-
-# The MAN_OUTPUT tag is used to specify where the man pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it. A directory man3 will be created inside the directory specified by
-# MAN_OUTPUT.
-# The default directory is: man.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_OUTPUT = man
-
-# The MAN_EXTENSION tag determines the extension that is added to the generated
-# man pages. In case the manual section does not start with a number, the number
-# 3 is prepended. The dot (.) at the beginning of the MAN_EXTENSION tag is
-# optional.
-# The default value is: .3.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_EXTENSION = .3
-
-# The MAN_SUBDIR tag determines the name of the directory created within
-# MAN_OUTPUT in which the man pages are placed. If defaults to man followed by
-# MAN_EXTENSION with the initial . removed.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_SUBDIR =
-
-# If the MAN_LINKS tag is set to YES and doxygen generates man output, then it
-# will generate one additional man file for each entity documented in the real
-# man page(s). These additional files only source the real man page, but without
-# them the man command would be unable to find the correct page.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_MAN is set to YES.
-
-MAN_LINKS = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the XML output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_XML tag is set to YES, doxygen will generate an XML file that
-# captures the structure of the code including all documentation.
-# The default value is: NO.
-
-GENERATE_XML = NO
-
-# The XML_OUTPUT tag is used to specify where the XML pages will be put. If a
-# relative path is entered the value of OUTPUT_DIRECTORY will be put in front of
-# it.
-# The default directory is: xml.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_OUTPUT = xml
-
-# If the XML_PROGRAMLISTING tag is set to YES, doxygen will dump the program
-# listings (including syntax highlighting and cross-referencing information) to
-# the XML output. Note that enabling this will significantly increase the size
-# of the XML output.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_XML is set to YES.
-
-XML_PROGRAMLISTING = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to the DOCBOOK output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_DOCBOOK tag is set to YES, doxygen will generate Docbook files
-# that can be used to generate PDF.
-# The default value is: NO.
-
-GENERATE_DOCBOOK = NO
-
-# The DOCBOOK_OUTPUT tag is used to specify where the Docbook pages will be put.
-# If a relative path is entered the value of OUTPUT_DIRECTORY will be put in
-# front of it.
-# The default directory is: docbook.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_OUTPUT = docbook
-
-# If the DOCBOOK_PROGRAMLISTING tag is set to YES, doxygen will include the
-# program listings (including syntax highlighting and cross-referencing
-# information) to the DOCBOOK output. Note that enabling this will significantly
-# increase the size of the DOCBOOK output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_DOCBOOK is set to YES.
-
-DOCBOOK_PROGRAMLISTING = NO
-
-#---------------------------------------------------------------------------
-# Configuration options for the AutoGen Definitions output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_AUTOGEN_DEF tag is set to YES, doxygen will generate an
-# AutoGen Definitions (see http://autogen.sf.net) file that captures the
-# structure of the code including all documentation. Note that this feature is
-# still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_AUTOGEN_DEF = NO
-
-#---------------------------------------------------------------------------
-# Configuration options related to the Perl module output
-#---------------------------------------------------------------------------
-
-# If the GENERATE_PERLMOD tag is set to YES, doxygen will generate a Perl module
-# file that captures the structure of the code including all documentation.
-#
-# Note that this feature is still experimental and incomplete at the moment.
-# The default value is: NO.
-
-GENERATE_PERLMOD = NO
-
-# If the PERLMOD_LATEX tag is set to YES, doxygen will generate the necessary
-# Makefile rules, Perl scripts and LaTeX code to be able to generate PDF and DVI
-# output from the Perl module output.
-# The default value is: NO.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_LATEX = NO
-
-# If the PERLMOD_PRETTY tag is set to YES, the Perl module output will be nicely
-# formatted so it can be parsed by a human reader. This is useful if you want to
-# understand what is going on. On the other hand, if this tag is set to NO, the
-# size of the Perl module output will be much smaller and Perl will parse it
-# just the same.
-# The default value is: YES.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_PRETTY = YES
-
-# The names of the make variables in the generated doxyrules.make file are
-# prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. This is useful
-# so different doxyrules.make files included by the same Makefile don't
-# overwrite each other's variables.
-# This tag requires that the tag GENERATE_PERLMOD is set to YES.
-
-PERLMOD_MAKEVAR_PREFIX =
-
-#---------------------------------------------------------------------------
-# Configuration options related to the preprocessor
-#---------------------------------------------------------------------------
-
-# If the ENABLE_PREPROCESSING tag is set to YES, doxygen will evaluate all
-# C-preprocessor directives found in the sources and include files.
-# The default value is: YES.
-
-ENABLE_PREPROCESSING = YES
-
-# If the MACRO_EXPANSION tag is set to YES, doxygen will expand all macro names
-# in the source code. If set to NO, only conditional compilation will be
-# performed. Macro expansion can be done in a controlled way by setting
-# EXPAND_ONLY_PREDEF to YES.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-MACRO_EXPANSION = YES
-
-# If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES then
-# the macro expansion is limited to the macros specified with the PREDEFINED and
-# EXPAND_AS_DEFINED tags.
-# The default value is: NO.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_ONLY_PREDEF = NO
-
-# If the SEARCH_INCLUDES tag is set to YES, the include files in the
-# INCLUDE_PATH will be searched if a #include is found.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SEARCH_INCLUDES = YES
-
-# The INCLUDE_PATH tag can be used to specify one or more directories that
-# contain include files that are not input files but should be processed by the
-# preprocessor.
-# This tag requires that the tag SEARCH_INCLUDES is set to YES.
-
-INCLUDE_PATH =
-
-# You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard
-# patterns (like *.h and *.hpp) to filter out the header-files in the
-# directories. If left blank, the patterns specified with FILE_PATTERNS will be
-# used.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-INCLUDE_FILE_PATTERNS =
-
-# The PREDEFINED tag can be used to specify one or more macro names that are
-# defined before the preprocessor is started (similar to the -D option of e.g.
-# gcc). The argument of the tag is a list of macros of the form: name or
-# name=definition (no spaces). If the definition and the "=" are omitted, "=1"
-# is assumed. To prevent a macro definition from being undefined via #undef or
-# recursively expanded use the := operator instead of the = operator.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-PREDEFINED = GRPC_FINAL= GRPC_OVERIDE=
-
-# If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then this
-# tag can be used to specify a list of macro names that should be expanded. The
-# macro definition that is found in the sources will be used. Use the PREDEFINED
-# tag if you want to use a different macro definition that overrules the
-# definition found in the source code.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-EXPAND_AS_DEFINED =
-
-# If the SKIP_FUNCTION_MACROS tag is set to YES then doxygen's preprocessor will
-# remove all references to function-like macros that are alone on a line, have
-# an all uppercase name, and do not end with a semicolon. Such function macros
-# are typically used for boiler-plate code, and will confuse the parser if not
-# removed.
-# The default value is: YES.
-# This tag requires that the tag ENABLE_PREPROCESSING is set to YES.
-
-SKIP_FUNCTION_MACROS = YES
-
-#---------------------------------------------------------------------------
-# Configuration options related to external references
-#---------------------------------------------------------------------------
-
-# The TAGFILES tag can be used to specify one or more tag files. For each tag
-# file the location of the external documentation should be added. The format of
-# a tag file without this location is as follows:
-# TAGFILES = file1 file2 ...
-# Adding location for the tag files is done as follows:
-# TAGFILES = file1=loc1 "file2 = loc2" ...
-# where loc1 and loc2 can be relative or absolute paths or URLs. See the
-# section "Linking to external documentation" for more information about the use
-# of tag files.
-# Note: Each tag file must have a unique name (where the name does NOT include
-# the path). If a tag file is not located in the directory in which doxygen is
-# run, you must also specify the path to the tagfile here.
-
-TAGFILES =
-
-# When a file name is specified after GENERATE_TAGFILE, doxygen will create a
-# tag file that is based on the input files it reads. See section "Linking to
-# external documentation" for more information about the usage of tag files.
-
-GENERATE_TAGFILE =
-
-# If the ALLEXTERNALS tag is set to YES, all external class will be listed in
-# the class index. If set to NO, only the inherited external classes will be
-# listed.
-# The default value is: NO.
-
-ALLEXTERNALS = NO
-
-# If the EXTERNAL_GROUPS tag is set to YES, all external groups will be listed
-# in the modules index. If set to NO, only the current project's groups will be
-# listed.
-# The default value is: YES.
-
-EXTERNAL_GROUPS = YES
-
-# If the EXTERNAL_PAGES tag is set to YES, all external pages will be listed in
-# the related pages index. If set to NO, only the current project's pages will
-# be listed.
-# The default value is: YES.
-
-EXTERNAL_PAGES = YES
-
-# The PERL_PATH should be the absolute path and name of the perl script
-# interpreter (i.e. the result of 'which perl').
-# The default file (with absolute path) is: /usr/bin/perl.
-
-PERL_PATH = /usr/bin/perl
-
-#---------------------------------------------------------------------------
-# Configuration options related to the dot tool
-#---------------------------------------------------------------------------
-
-# If the CLASS_DIAGRAMS tag is set to YES, doxygen will generate a class diagram
-# (in HTML and LaTeX) for classes with base or super classes. Setting the tag to
-# NO turns the diagrams off. Note that this option also works with HAVE_DOT
-# disabled, but it is recommended to install and use dot, since it yields more
-# powerful graphs.
-# The default value is: YES.
-
-CLASS_DIAGRAMS = YES
-
-# You can define message sequence charts within doxygen comments using the \msc
-# command. Doxygen will then run the mscgen tool (see:
-# http://www.mcternan.me.uk/mscgen/)) to produce the chart and insert it in the
-# documentation. The MSCGEN_PATH tag allows you to specify the directory where
-# the mscgen tool resides. If left empty the tool is assumed to be found in the
-# default search path.
-
-MSCGEN_PATH =
-
-# You can include diagrams made with dia in doxygen documentation. Doxygen will
-# then run dia to produce the diagram and insert it in the documentation. The
-# DIA_PATH tag allows you to specify the directory where the dia binary resides.
-# If left empty dia is assumed to be found in the default search path.
-
-DIA_PATH =
-
-# If set to YES the inheritance and collaboration graphs will hide inheritance
-# and usage relations if the target is undocumented or is not a class.
-# The default value is: YES.
-
-HIDE_UNDOC_RELATIONS = YES
-
-# If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is
-# available from the path. This tool is part of Graphviz (see:
-# http://www.graphviz.org/), a graph visualization toolkit from AT&T and Lucent
-# Bell Labs. The other options in this section have no effect if this option is
-# set to NO
-# The default value is: NO.
-
-HAVE_DOT = YES
-
-# The DOT_NUM_THREADS specifies the number of dot invocations doxygen is allowed
-# to run in parallel. When set to 0 doxygen will base this on the number of
-# processors available in the system. You can set it explicitly to a value
-# larger than 0 to get control over the balance between CPU load and processing
-# speed.
-# Minimum value: 0, maximum value: 32, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_NUM_THREADS = 0
-
-# When you want a differently looking font in the dot files that doxygen
-# generates you can specify the font name using DOT_FONTNAME. You need to make
-# sure dot is able to find the font, which can be done by putting it in a
-# standard location or by setting the DOTFONTPATH environment variable or by
-# setting DOT_FONTPATH to the directory containing the font.
-# The default value is: Helvetica.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTNAME = Helvetica
-
-# The DOT_FONTSIZE tag can be used to set the size (in points) of the font of
-# dot graphs.
-# Minimum value: 4, maximum value: 24, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTSIZE = 10
-
-# By default doxygen will tell dot to use the default font as specified with
-# DOT_FONTNAME. If you specify a different font using DOT_FONTNAME you can set
-# the path where dot can find it using this tag.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_FONTPATH =
-
-# If the CLASS_GRAPH tag is set to YES then doxygen will generate a graph for
-# each documented class showing the direct and indirect inheritance relations.
-# Setting this tag to YES will force the CLASS_DIAGRAMS tag to NO.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CLASS_GRAPH = NO
-
-# If the COLLABORATION_GRAPH tag is set to YES then doxygen will generate a
-# graph for each documented class showing the direct and indirect implementation
-# dependencies (inheritance, containment, and class references variables) of the
-# class with other documented classes.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-COLLABORATION_GRAPH = NO
-
-# If the GROUP_GRAPHS tag is set to YES then doxygen will generate a graph for
-# groups, showing the direct groups dependencies.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GROUP_GRAPHS = NO
-
-# If the UML_LOOK tag is set to YES, doxygen will generate inheritance and
-# collaboration diagrams in a style similar to the OMG's Unified Modeling
-# Language.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LOOK = NO
-
-# If the UML_LOOK tag is enabled, the fields and methods are shown inside the
-# class node. If there are many fields or methods and many nodes the graph may
-# become too big to be useful. The UML_LIMIT_NUM_FIELDS threshold limits the
-# number of items for each type to make the size more manageable. Set this to 0
-# for no limit. Note that the threshold may be exceeded by 50% before the limit
-# is enforced. So when you set the threshold to 10, up to 15 fields may appear,
-# but if the number exceeds 15, the total amount of fields shown is limited to
-# 10.
-# Minimum value: 0, maximum value: 100, default value: 10.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-UML_LIMIT_NUM_FIELDS = 10
-
-# If the TEMPLATE_RELATIONS tag is set to YES then the inheritance and
-# collaboration graphs will show the relations between templates and their
-# instances.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-TEMPLATE_RELATIONS = NO
-
-# If the INCLUDE_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are set to
-# YES then doxygen will generate a graph for each documented file showing the
-# direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDE_GRAPH = NO
-
-# If the INCLUDED_BY_GRAPH, ENABLE_PREPROCESSING and SEARCH_INCLUDES tags are
-# set to YES then doxygen will generate a graph for each documented file showing
-# the direct and indirect include dependencies of the file with other documented
-# files.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INCLUDED_BY_GRAPH = NO
-
-# If the CALL_GRAPH tag is set to YES then doxygen will generate a call
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable call graphs for selected
-# functions only using the \callgraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALL_GRAPH = NO
-
-# If the CALLER_GRAPH tag is set to YES then doxygen will generate a caller
-# dependency graph for every global function or class method.
-#
-# Note that enabling this option will significantly increase the time of a run.
-# So in most cases it will be better to enable caller graphs for selected
-# functions only using the \callergraph command.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-CALLER_GRAPH = NO
-
-# If the GRAPHICAL_HIERARCHY tag is set to YES then doxygen will graphical
-# hierarchy of all classes instead of a textual one.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GRAPHICAL_HIERARCHY = NO
-
-# If the DIRECTORY_GRAPH tag is set to YES then doxygen will show the
-# dependencies a directory has on other directories in a graphical way. The
-# dependency relations are determined by the #include relations between the
-# files in the directories.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DIRECTORY_GRAPH = NO
-
-# The DOT_IMAGE_FORMAT tag can be used to set the image format of the images
-# generated by dot.
-# Note: If you choose svg you need to set HTML_FILE_EXTENSION to xhtml in order
-# to make the SVG files visible in IE 9+ (other browsers do not have this
-# requirement).
-# Possible values are: png, jpg, gif and svg.
-# The default value is: png.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_IMAGE_FORMAT = png
-
-# If DOT_IMAGE_FORMAT is set to svg, then this option can be set to YES to
-# enable generation of interactive SVG images that allow zooming and panning.
-#
-# Note that this requires a modern browser other than Internet Explorer. Tested
-# and working are Firefox, Chrome, Safari, and Opera.
-# Note: For IE 9+ you need to set HTML_FILE_EXTENSION to xhtml in order to make
-# the SVG files visible. Older versions of IE do not have SVG support.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-INTERACTIVE_SVG = NO
-
-# The DOT_PATH tag can be used to specify the path where the dot tool can be
-# found. If left blank, it is assumed the dot tool can be found in the path.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_PATH =
-
-# The DOTFILE_DIRS tag can be used to specify one or more directories that
-# contain dot files that are included in the documentation (see the \dotfile
-# command).
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOTFILE_DIRS =
-
-# The MSCFILE_DIRS tag can be used to specify one or more directories that
-# contain msc files that are included in the documentation (see the \mscfile
-# command).
-
-MSCFILE_DIRS =
-
-# The DIAFILE_DIRS tag can be used to specify one or more directories that
-# contain dia files that are included in the documentation (see the \diafile
-# command).
-
-DIAFILE_DIRS =
-
-# When using plantuml, the PLANTUML_JAR_PATH tag should be used to specify the
-# path where java can find the plantuml.jar file. If left blank, it is assumed
-# PlantUML is not used or called during a preprocessing step. Doxygen will
-# generate a warning when it encounters a \startuml command in this case and
-# will not generate output for the diagram.
-
-PLANTUML_JAR_PATH =
-
-# When using plantuml, the specified paths are searched for files specified by
-# the !include statement in a plantuml block.
-
-PLANTUML_INCLUDE_PATH =
-
-# The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of nodes
-# that will be shown in the graph. If the number of nodes in a graph becomes
-# larger than this value, doxygen will truncate the graph, which is visualized
-# by representing a node as a red box. Note that doxygen if the number of direct
-# children of the root node in a graph is already larger than
-# DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note that
-# the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH.
-# Minimum value: 0, maximum value: 10000, default value: 50.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_GRAPH_MAX_NODES = 50
-
-# The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the graphs
-# generated by dot. A depth value of 3 means that only nodes reachable from the
-# root by following a path via at most 3 edges will be shown. Nodes that lay
-# further from the root node will be omitted. Note that setting this option to 1
-# or 2 may greatly reduce the computation time needed for large code bases. Also
-# note that the size of a graph can be further restricted by
-# DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction.
-# Minimum value: 0, maximum value: 1000, default value: 0.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-MAX_DOT_GRAPH_DEPTH = 0
-
-# Set the DOT_TRANSPARENT tag to YES to generate images with a transparent
-# background. This is disabled by default, because dot on Windows does not seem
-# to support this out of the box.
-#
-# Warning: Depending on the platform used, enabling this option may lead to
-# badly anti-aliased labels on the edges of a graph (i.e. they become hard to
-# read).
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_TRANSPARENT = NO
-
-# Set the DOT_MULTI_TARGETS tag to YES to allow dot to generate multiple output
-# files in one run (i.e. multiple -o and -T options on the command line). This
-# makes dot run faster, but since only newer versions of dot (>1.8.10) support
-# this, this feature is disabled by default.
-# The default value is: NO.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_MULTI_TARGETS = NO
-
-# If the GENERATE_LEGEND tag is set to YES doxygen will generate a legend page
-# explaining the meaning of the various boxes and arrows in the dot generated
-# graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-GENERATE_LEGEND = YES
-
-# If the DOT_CLEANUP tag is set to YES, doxygen will remove the intermediate dot
-# files that are used to generate the various graphs.
-# The default value is: YES.
-# This tag requires that the tag HAVE_DOT is set to YES.
-
-DOT_CLEANUP = YES
-
diff --git a/tools/doxygen/Doxyfile.core b/tools/doxygen/Doxyfile.core
index 53ae4e4cf4..e631c962b3 100644
--- a/tools/doxygen/Doxyfile.core
+++ b/tools/doxygen/Doxyfile.core
@@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Core"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = 0.16.0-dev
+PROJECT_NUMBER = 1.1.0-dev
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
diff --git a/tools/doxygen/Doxyfile.core.internal b/tools/doxygen/Doxyfile.core.internal
index b846237689..dcb11bd933 100644
--- a/tools/doxygen/Doxyfile.core.internal
+++ b/tools/doxygen/Doxyfile.core.internal
@@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Core"
# could be handy for archiving the generated documentation or if some version
# control system is used.
-PROJECT_NUMBER = 0.16.0-dev
+PROJECT_NUMBER = 1.1.0-dev
# Using the PROJECT_BRIEF tag one can provide an optional one line description
# for a project that appears at the top of each page and should give viewer a
@@ -796,6 +796,7 @@ src/core/lib/channel/channel_stack_builder.h \
src/core/lib/channel/compress_filter.h \
src/core/lib/channel/connected_channel.h \
src/core/lib/channel/context.h \
+src/core/lib/channel/handshaker.h \
src/core/lib/channel/http_client_filter.h \
src/core/lib/channel/http_server_filter.h \
src/core/lib/compression/algorithm_metadata.h \
@@ -866,6 +867,7 @@ src/core/lib/transport/connectivity_state.h \
src/core/lib/transport/metadata.h \
src/core/lib/transport/metadata_batch.h \
src/core/lib/transport/static_metadata.h \
+src/core/lib/transport/timeout_encoding.h \
src/core/lib/transport/transport.h \
src/core/lib/transport/transport_impl.h \
src/core/ext/transport/chttp2/transport/bin_decoder.h \
@@ -887,7 +889,6 @@ src/core/ext/transport/chttp2/transport/incoming_metadata.h \
src/core/ext/transport/chttp2/transport/internal.h \
src/core/ext/transport/chttp2/transport/status_conversion.h \
src/core/ext/transport/chttp2/transport/stream_map.h \
-src/core/ext/transport/chttp2/transport/timeout_encoding.h \
src/core/ext/transport/chttp2/transport/varint.h \
src/core/ext/transport/chttp2/alpn/alpn.h \
src/core/lib/security/context/security_context.h \
@@ -930,6 +931,7 @@ src/core/ext/client_config/subchannel.h \
src/core/ext/client_config/subchannel_call_holder.h \
src/core/ext/client_config/subchannel_index.h \
src/core/ext/client_config/uri_parser.h \
+src/core/ext/lb_policy/grpclb/grpclb.h \
src/core/ext/lb_policy/grpclb/load_balancer_api.h \
src/core/ext/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h \
third_party/nanopb/pb.h \
@@ -939,11 +941,13 @@ third_party/nanopb/pb_encode.h \
src/core/ext/load_reporting/load_reporting.h \
src/core/ext/load_reporting/load_reporting_filter.h \
src/core/ext/census/aggregation.h \
+src/core/ext/census/base_resources.h \
src/core/ext/census/census_interface.h \
src/core/ext/census/census_rpc_stats.h \
src/core/ext/census/gen/census.pb.h \
src/core/ext/census/grpc_filter.h \
src/core/ext/census/mlog.h \
+src/core/ext/census/resource.h \
src/core/ext/census/rpc_metric_id.h \
src/core/lib/surface/init.c \
src/core/lib/channel/channel_args.c \
@@ -951,6 +955,7 @@ src/core/lib/channel/channel_stack.c \
src/core/lib/channel/channel_stack_builder.c \
src/core/lib/channel/compress_filter.c \
src/core/lib/channel/connected_channel.c \
+src/core/lib/channel/handshaker.c \
src/core/lib/channel/http_client_filter.c \
src/core/lib/channel/http_server_filter.c \
src/core/lib/compression/compression.c \
@@ -1031,6 +1036,7 @@ src/core/lib/transport/connectivity_state.c \
src/core/lib/transport/metadata.c \
src/core/lib/transport/metadata_batch.c \
src/core/lib/transport/static_metadata.c \
+src/core/lib/transport/timeout_encoding.c \
src/core/lib/transport/transport.c \
src/core/lib/transport/transport_op_string.c \
src/core/ext/transport/chttp2/server/secure/server_secure_chttp2.c \
@@ -1053,7 +1059,6 @@ src/core/ext/transport/chttp2/transport/parsing.c \
src/core/ext/transport/chttp2/transport/status_conversion.c \
src/core/ext/transport/chttp2/transport/stream_lists.c \
src/core/ext/transport/chttp2/transport/stream_map.c \
-src/core/ext/transport/chttp2/transport/timeout_encoding.c \
src/core/ext/transport/chttp2/transport/varint.c \
src/core/ext/transport/chttp2/transport/writing.c \
src/core/ext/transport/chttp2/alpn/alpn.c \
@@ -1109,6 +1114,7 @@ src/core/ext/transport/chttp2/server/insecure/server_chttp2.c \
src/core/ext/transport/chttp2/server/insecure/server_chttp2_posix.c \
src/core/ext/transport/chttp2/client/insecure/channel_create.c \
src/core/ext/transport/chttp2/client/insecure/channel_create_posix.c \
+src/core/ext/lb_policy/grpclb/grpclb.c \
src/core/ext/lb_policy/grpclb/load_balancer_api.c \
src/core/ext/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c \
third_party/nanopb/pb_common.c \
@@ -1120,6 +1126,7 @@ src/core/ext/resolver/dns/native/dns_resolver.c \
src/core/ext/resolver/sockaddr/sockaddr_resolver.c \
src/core/ext/load_reporting/load_reporting.c \
src/core/ext/load_reporting/load_reporting_filter.c \
+src/core/ext/census/base_resources.c \
src/core/ext/census/context.c \
src/core/ext/census/gen/census.pb.c \
src/core/ext/census/grpc_context.c \
@@ -1129,6 +1136,7 @@ src/core/ext/census/initialize.c \
src/core/ext/census/mlog.c \
src/core/ext/census/operation.c \
src/core/ext/census/placeholders.c \
+src/core/ext/census/resource.c \
src/core/ext/census/tracing.c \
src/core/plugin_registry/grpc_plugin_registry.c \
include/grpc/support/alloc.h \
diff --git a/tools/gce/create_linux_worker.sh b/tools/gce/create_linux_worker.sh
index c41e4d299b..8a2df40859 100755
--- a/tools/gce/create_linux_worker.sh
+++ b/tools/gce/create_linux_worker.sh
@@ -43,7 +43,8 @@ gcloud compute instances create $INSTANCE_NAME \
--project="$CLOUD_PROJECT" \
--zone "$ZONE" \
--machine-type n1-standard-8 \
- --image ubuntu-15-10 \
+ --image-family=ubuntu-1510 \
+ --image-project=ubuntu-os-cloud \
--boot-disk-size 1000
echo 'Created GCE instance, waiting 60 seconds for it to come online.'
diff --git a/tools/gce/linux_performance_worker_init.sh b/tools/gce/linux_performance_worker_init.sh
index 9b8d1d1eb7..23a8f082b2 100755
--- a/tools/gce/linux_performance_worker_init.sh
+++ b/tools/gce/linux_performance_worker_init.sh
@@ -90,13 +90,11 @@ sudo apt-get install -y libgflags-dev libgtest-dev libc++-dev clang
# Python dependencies
sudo pip install tabulate
sudo pip install google-api-python-client
-sudo pip install tox
curl -O https://bootstrap.pypa.io/get-pip.py
sudo pypy get-pip.py
sudo pypy -m pip install tabulate
sudo pip install google-api-python-client
-sudo pip install tox
# Node dependencies (nvm has to be installed under user jenkins)
touch .profile
diff --git a/tools/gcp/stress_test/stress_test_utils.py b/tools/gcp/stress_test/stress_test_utils.py
index b821fc8fcc..be50af3184 100755
--- a/tools/gcp/stress_test/stress_test_utils.py
+++ b/tools/gcp/stress_test/stress_test_utils.py
@@ -121,7 +121,7 @@ class BigQueryHelper:
if not page['jobComplete']:
print('TIMEOUT ERROR: The query %s timed out. Current timeout value is'
' %d msec. Returning False (i.e assuming there are no failures)'
- ) % (query, timeoout_msec)
+ ) % (query, timeout_msec)
return False
num_failures = int(page['totalRows'])
diff --git a/tools/profiling/latency_profile/profile_analyzer.py b/tools/profiling/latency_profile/profile_analyzer.py
index dad0712d40..48b8e9b950 100755
--- a/tools/profiling/latency_profile/profile_analyzer.py
+++ b/tools/profiling/latency_profile/profile_analyzer.py
@@ -43,6 +43,7 @@ TIME_FROM_SCOPE_START = object()
TIME_TO_SCOPE_END = object()
TIME_FROM_STACK_START = object()
TIME_TO_STACK_END = object()
+TIME_FROM_LAST_IMPORTANT = object()
argp = argparse.ArgumentParser(description='Process output of basic_prof builds')
@@ -78,10 +79,14 @@ class ScopeBuilder(object):
self.call_stack_builder.lines.append(line_item)
def finish(self, line):
- assert line['tag'] == self.top_line.tag, 'expected %s, got %s; thread=%s; t0=%f t1=%f' % (self.top_line.tag, line['tag'], line['thd'], self.top_line.start_time, line['t'])
+ assert line['tag'] == self.top_line.tag, (
+ 'expected %s, got %s; thread=%s; t0=%f t1=%f' %
+ (self.top_line.tag, line['tag'], line['thd'], self.top_line.start_time,
+ line['t']))
final_time_stamp = line['t']
assert self.top_line.end_time is None
self.top_line.end_time = final_time_stamp
+ self.top_line.important = self.top_line.important or line['imp']
assert SELF_TIME not in self.top_line.times
self.top_line.times[SELF_TIME] = final_time_stamp - self.top_line.start_time
for line in self.call_stack_builder.lines[self.first_child_pos:]:
@@ -101,9 +106,14 @@ class CallStackBuilder(object):
start_time = self.lines[0].start_time
end_time = self.lines[0].end_time
self.signature = self.signature.hexdigest()
+ last_important = start_time
for line in self.lines:
line.times[TIME_FROM_STACK_START] = line.start_time - start_time
line.times[TIME_TO_STACK_END] = end_time - line.end_time
+ line.times[TIME_FROM_LAST_IMPORTANT] = line.start_time - last_important
+ if line.important:
+ last_important = line.end_time
+ last_important = end_time
def add(self, line):
line_type = line['type']
@@ -113,7 +123,9 @@ class CallStackBuilder(object):
self.stk.append(ScopeBuilder(self, line))
return False
elif line_type == '}':
- assert self.stk, 'expected non-empty stack for closing %s; thread=%s; t=%f' % (line['tag'], line['thd'], line['t'])
+ assert self.stk, (
+ 'expected non-empty stack for closing %s; thread=%s; t=%f' %
+ (line['tag'], line['thd'], line['t']))
self.stk.pop().finish(line)
if not self.stk:
self.finish()
@@ -216,9 +228,16 @@ def time_format(idx):
return ''
return ent
+BANNER = {
+ 'simple': 'Count: %(count)d',
+ 'html': '<h1>Count: %(count)d</h1>'
+}
+
FORMAT = [
('TAG', lambda line: '..'*line.indent + tidy_tag(line.tag)),
('LOC', lambda line: '%s:%d' % (line.filename[line.filename.rfind('/')+1:], line.fileline)),
+ ('IMP', lambda line: '*' if line.important else ''),
+ ('FROM_IMP', time_format(TIME_FROM_LAST_IMPORTANT)),
('FROM_STACK_START', time_format(TIME_FROM_STACK_START)),
('SELF', time_format(SELF_TIME)),
('TO_STACK_END', time_format(TIME_TO_STACK_END)),
@@ -227,11 +246,6 @@ FORMAT = [
('TO_SCOPE_END', time_format(TIME_TO_SCOPE_END)),
]
-BANNER = {
- 'simple': 'Count: %(count)d',
- 'html': '<h1>Count: %(count)d</h1>'
-}
-
if args.fmt == 'html':
print '<html>'
print '<head>'
diff --git a/tools/run_tests/artifact_targets.py b/tools/run_tests/artifact_targets.py
index bd1269ceb7..8550ee7b84 100644
--- a/tools/run_tests/artifact_targets.py
+++ b/tools/run_tests/artifact_targets.py
@@ -30,17 +30,20 @@
"""Definition of targets to build artifacts."""
+import os.path
+import sys
+
import jobset
def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
- flake_retries=0, timeout_retries=0):
+ flake_retries=0, timeout_retries=0, timeout_seconds=30*60):
"""Creates jobspec for a task running under docker."""
environ = environ.copy()
environ['RUN_COMMAND'] = shell_command
docker_args=[]
- for k,v in environ.iteritems():
+ for k,v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh',
@@ -49,20 +52,20 @@ def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
cmdline=['tools/run_tests/dockerize/build_and_run_docker.sh'] + docker_args,
environ=docker_env,
shortname='build_artifact.%s' % (name),
- timeout_seconds=30*60,
+ timeout_seconds=timeout_seconds,
flake_retries=flake_retries,
timeout_retries=timeout_retries)
return jobspec
def create_jobspec(name, cmdline, environ=None, shell=False,
- flake_retries=0, timeout_retries=0):
+ flake_retries=0, timeout_retries=0, timeout_seconds=30*60):
"""Creates jobspec."""
jobspec = jobset.JobSpec(
cmdline=cmdline,
environ=environ,
shortname='build_artifact.%s' % (name),
- timeout_seconds=30*60,
+ timeout_seconds=timeout_seconds,
flake_retries=flake_retries,
timeout_retries=timeout_retries,
shell=shell)
@@ -76,27 +79,30 @@ _ARCH_FLAG_MAP = {
'x64': '-m64'
}
-python_version_arch_map = {
- 'x86': 'Python27_32bits',
- 'x64': 'Python27'
+python_windows_version_arch_map = {
+ ('x86', '2.7'): 'Python27_32bits',
+ ('x64', '2.7'): 'Python27',
+ ('x86', '3.4'): 'Python34_32bits',
+ ('x64', '3.4'): 'Python34',
}
class PythonArtifact:
"""Builds Python artifacts."""
- def __init__(self, platform, arch, manylinux_build=None):
+ def __init__(self, platform, arch, python_version, manylinux_build=None):
if manylinux_build:
- self.name = 'python_%s_%s_%s' % (platform, arch, manylinux_build)
+ self.name = 'python%s_%s_%s_%s' % (python_version, platform, arch, manylinux_build)
else:
- self.name = 'python_%s_%s' % (platform, arch)
+ self.name = 'python%s_%s_%s' % (python_version, platform, arch)
self.platform = platform
self.arch = arch
- self.labels = ['artifact', 'python', platform, arch]
- self.python_version = python_version_arch_map[arch]
+ self.labels = ['artifact', 'python', python_version, platform, arch]
+ self.python_version = python_version
+ self.python_windows_prefix = python_windows_version_arch_map[arch, python_version]
self.manylinux_build = manylinux_build
def pre_build_jobspecs(self):
- return []
+ return []
def build_jobspec(self):
environ = {}
@@ -107,26 +113,27 @@ class PythonArtifact:
# special places...
environ['PYTHON'] = '/opt/python/{}/bin/python'.format(self.manylinux_build)
environ['PIP'] = '/opt/python/{}/bin/pip'.format(self.manylinux_build)
- # Our docker image has all the prerequisites pip-installed already.
- environ['SKIP_PIP_INSTALL'] = '1'
# Platform autodetection for the manylinux1 image breaks so we set the
# defines ourselves.
# TODO(atash) get better platform-detection support in core so we don't
# need to do this manually...
environ['CFLAGS'] = '-DGPR_MANYLINUX1=1'
+ environ['BUILD_HEALTH_CHECKING'] = 'TRUE'
+ environ['BUILD_MANYLINUX_WHEEL'] = 'TRUE'
return create_docker_jobspec(self.name,
'tools/dockerfile/grpc_artifact_python_manylinux_%s' % self.arch,
'tools/run_tests/build_artifact_python.sh',
- environ=environ)
+ environ=environ,
+ timeout_seconds=60*60)
elif self.platform == 'windows':
return create_jobspec(self.name,
['tools\\run_tests\\build_artifact_python.bat',
- self.python_version,
+ self.python_windows_prefix,
'32' if self.arch == 'x86' else '64'
],
shell=True)
else:
- environ['SKIP_PIP_INSTALL'] = 'TRUE'
+ environ['PYTHON'] = 'python{}'.format(self.python_version)
return create_jobspec(self.name,
['tools/run_tests/build_artifact_python.sh'],
environ=environ)
@@ -323,13 +330,18 @@ def targets():
for Cls in (CSharpExtArtifact, NodeExtArtifact, ProtocArtifact)
for platform in ('linux', 'macos', 'windows')
for arch in ('x86', 'x64')] +
- [PythonArtifact('linux', 'x86', 'cp27-cp27m'),
- PythonArtifact('linux', 'x86', 'cp27-cp27mu'),
- PythonArtifact('linux', 'x64', 'cp27-cp27m'),
- PythonArtifact('linux', 'x64', 'cp27-cp27mu'),
- PythonArtifact('macos', 'x64'),
- PythonArtifact('windows', 'x86'),
- PythonArtifact('windows', 'x64'),
+ [PythonArtifact('linux', 'x86', '2.7', 'cp27-cp27m'),
+ PythonArtifact('linux', 'x86', '2.7', 'cp27-cp27mu'),
+ PythonArtifact('linux', 'x64', '2.7', 'cp27-cp27m'),
+ PythonArtifact('linux', 'x64', '2.7', 'cp27-cp27mu'),
+ PythonArtifact('macos', 'x64', '2.7'),
+ PythonArtifact('windows', 'x86', '2.7'),
+ PythonArtifact('windows', 'x64', '2.7'),
+ PythonArtifact('linux', 'x86', '3.4', 'cp34-cp34m'),
+ PythonArtifact('linux', 'x64', '3.4', 'cp34-cp34m'),
+ PythonArtifact('macos', 'x64', '3.4'),
+ PythonArtifact('windows', 'x86', '3.4'),
+ PythonArtifact('windows', 'x64', '3.4'),
RubyArtifact('linux', 'x86'),
RubyArtifact('linux', 'x64'),
RubyArtifact('macos', 'x64'),
diff --git a/tools/run_tests/build_artifact_python.bat b/tools/run_tests/build_artifact_python.bat
index 295347e947..074a3c6781 100644
--- a/tools/run_tests/build_artifact_python.bat
+++ b/tools/run_tests/build_artifact_python.bat
@@ -28,62 +28,33 @@
@rem OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
-set NUGET=C:\nuget\nuget.exe
-%NUGET% restore vsprojects\grpc.sln || goto :error
-
-
-@call vsprojects\build_vs2013.bat vsprojects\grpc.sln /t:grpc_dll /p:Configuration=Release /p:PlatformToolset=v120 /p:Platform=Win32 || goto :error
-@call vsprojects\build_vs2013.bat vsprojects\grpc.sln /t:grpc_dll /p:Configuration=Release /p:PlatformToolset=v120 /p:Platform=x64 || goto :error
-
-mkdir src\python\grpcio\grpc\_cython\_windows
-
-@rem TODO(atash): maybe we could avoid the grpc_c.(32|64).python shim below if
-@rem this used the right python build?
-copy /Y vsprojects\Release\grpc_dll.dll src\python\grpcio\grpc\_cython\_windows\grpc_c.32.python || goto :error
-copy /Y vsprojects\x64\Release\grpc_dll.dll src\python\grpcio\grpc\_cython\_windows\grpc_c.64.python || goto :error
-
set PATH=C:\%1;C:\%1\scripts;C:\msys64\mingw%2\bin;%PATH%
pip install --upgrade six
pip install --upgrade setuptools
pip install -rrequirements.txt
-set GRPC_PYTHON_USE_CUSTOM_BDIST=0
set GRPC_PYTHON_BUILD_WITH_CYTHON=1
-@rem Because this is windows and *everything seems to hate Windows* we have to
-@rem set all of these flags ourselves because Python won't help us (see the
-@rem setup.py of the grpcio_tools project).
-set GRPC_PYTHON_CFLAGS=-fno-wrapv -frtti -std=c++11
-@rem Further confusing things, MSYS2's mingw64 tries to dynamically link
-@rem libgcc, libstdc++, and winpthreads. We have to override this or our
-@rem extensions end up linking to MSYS2 DLLs, which the normal Python on
-@rem Windows user won't have... and ON TOP OF THIS, there's MinGW's GCC default
-@rem behavior of linking msvcrt.dll as the C runtime library, which we need to
-@rem override so that Python's distutils doesn't link us against multiple C
-@rem runtimes.
-python -c "from distutils.cygwinccompiler import get_msvcr; print(get_msvcr()[0])" > temp.txt
-set /p PYTHON_MSVCR=<temp.txt
-set GRPC_PYTHON_LDFLAGS=-static-libgcc -static-libstdc++ -mcrtdll=%PYTHON_MSVCR% -static -lpthread
+@rem Set up gRPC Python tools
+python tools\distrib\python\make_grpcio_tools.py
-@rem Build gRPC
-if %2 == 32 (
- python setup.py build_ext -c mingw32
-) else (
- python setup.py build_ext -c mingw32 -DMS_WIN64
-)
+@rem Build gRPC Python extensions
+python setup.py build_ext -c mingw32
+
+pushd tools\distrib\python\grpcio_tools
+python setup.py build_ext -c mingw32
+popd
+
+
+@rem Build gRPC Python distributions
python setup.py bdist_wheel
+pushd tools\distrib\python\grpcio_tools
+python setup.py bdist_wheel
+popd
-@rem Build gRPC Python tools
-python tools\distrib\python\make_grpcio_tools.py
-if %2 == 32 (
- python tools\distrib\python\grpcio_tools\setup.py build_ext -c mingw32
-) else (
- python tools\distrib\python\grpcio_tools\setup.py build_ext -c mingw32 -DMS_WIN64
-)
-python tools\distrib\python\grpcio_tools\setup.py bdist_wheel
mkdir artifacts
xcopy /Y /I /S dist\* artifacts\ || goto :error
diff --git a/tools/run_tests/build_artifact_python.sh b/tools/run_tests/build_artifact_python.sh
index 55f8eb634b..8f8330ef24 100755
--- a/tools/run_tests/build_artifact_python.sh
+++ b/tools/run_tests/build_artifact_python.sh
@@ -39,15 +39,6 @@ export PIP=${PIP:-pip}
export AUDITWHEEL=${AUDITWHEEL:-auditwheel}
-if [ "$SKIP_PIP_INSTALL" == "" ]
-then
- ${PIP} install --upgrade six
- # There's a bug in newer versions of setuptools (see
- # https://bitbucket.org/pypa/setuptools/issues/503/pkg_resources_vendorpackagingrequirementsi)
- ${PIP} pip install --upgrade 'setuptools==18'
- ${PIP} install -rrequirements.txt
-fi
-
# Build the source distribution first because MANIFEST.in cannot override
# exclusion of built shared objects among package resources (for some
# inexplicable reason).
@@ -71,15 +62,33 @@ CFLAGS="$CFLAGS -fno-wrapv" ${SETARCH_CMD} \
${PYTHON} tools/distrib/python/grpcio_tools/setup.py bdist_wheel
mkdir -p artifacts
-if command -v ${AUDITWHEEL}
+if [ "$BUILD_MANYLINUX_WHEEL" != "" ]
then
for wheel in dist/*.whl; do
${AUDITWHEEL} repair $wheel -w artifacts/
+ rm $wheel
done
for wheel in tools/distrib/python/grpcio_tools/dist/*.whl; do
${AUDITWHEEL} repair $wheel -w artifacts/
+ rm $wheel
done
fi
+# We need to use the built grpcio-tools/grpcio to compile the health proto
+# Wheels are not supported by setup_requires/dependency_links, so we
+# manually install the dependency. Note we should only do this if we
+# are in a docker image or in a virtualenv.
+if [ "$BUILD_HEALTH_CHECKING" != "" ]
+then
+ ${PIP} install -rrequirements.txt
+ ${PIP} install grpcio --no-index --find-links "file://${PWD}/artifacts/"
+ ${PIP} install grpcio-tools --no-index --find-links "file://${PWD}/artifacts/"
+
+ # Build gRPC health check source distribution
+ ${SETARCH_CMD} ${PYTHON} src/python/grpcio_health_checking/setup.py \
+ preprocess build_package_protos sdist
+ cp -r src/python/grpcio_health_checking/dist/* artifacts
+fi
+
cp -r dist/* artifacts
cp -r tools/distrib/python/grpcio_tools/dist/* artifacts
diff --git a/tools/run_tests/build_package_node.sh b/tools/run_tests/build_package_node.sh
index ff4cfdb8bf..ef4a10cca7 100755
--- a/tools/run_tests/build_package_node.sh
+++ b/tools/run_tests/build_package_node.sh
@@ -61,7 +61,7 @@ mkdir -p $output_dir
well_known_protos=( any api compiler/plugin descriptor duration empty field_mask source_context struct timestamp type wrappers )
for arch in {x86,x64}; do
- case arch in
+ case $arch in
x86)
node_arch=ia32
;;
@@ -70,7 +70,7 @@ for arch in {x86,x64}; do
;;
esac
for plat in {windows,linux,macos}; do
- case plat in
+ case $plat in
windows)
node_plat=win32
;;
diff --git a/tools/run_tests/build_python.sh b/tools/run_tests/build_python.sh
index b1c90df824..727b11e273 100755
--- a/tools/run_tests/build_python.sh
+++ b/tools/run_tests/build_python.sh
@@ -33,44 +33,141 @@ set -ex
# change to grpc repo root
cd $(dirname $0)/../..
-TOX_PYTHON_ENV="$1"
-PY_VERSION="${TOX_PYTHON_ENV: -2}"
+##########################
+# Portability operations #
+##########################
+
+PLATFORM=`uname -s`
+
+function is_mingw() {
+ if [ "${PLATFORM/MINGW}" != "$PLATFORM" ]; then
+ echo true
+ else
+ exit 1
+ fi
+}
+
+function is_darwin() {
+ if [ "${PLATFORM/Darwin}" != "$PLATFORM" ]; then
+ echo true
+ else
+ exit 1
+ fi
+}
+
+function is_linux() {
+ if [ "${PLATFORM/Linux}" != "$PLATFORM" ]; then
+ echo true
+ else
+ exit 1
+ fi
+}
+
+# Associated virtual environment name for the given python command.
+function venv() {
+ $1 -c "import sys; print('py{}{}'.format(*sys.version_info[:2]))"
+}
+
+# Path to python executable within a virtual environment depending on the
+# system.
+function venv_relative_python() {
+ if [ $(is_mingw) ]; then
+ echo 'Scripts/python.exe'
+ else
+ echo 'bin/python'
+ fi
+}
+
+# Distutils toolchain to use depending on the system.
+function toolchain() {
+ if [ $(is_mingw) ]; then
+ echo 'mingw32'
+ else
+ echo 'unix'
+ fi
+}
+
+# Command to invoke the linux command `realpath` or equivalent.
+function script_realpath() {
+ # Find `realpath`
+ if [ -x "$(command -v realpath)" ]; then
+ realpath "$@"
+ elif [ -x "$(command -v grealpath)" ]; then
+ grealpath "$@"
+ else
+ exit 1
+ fi
+}
+
+####################
+# Script Arguments #
+####################
+
+PYTHON=${1:-python2.7}
+VENV=${2:-$(venv $PYTHON)}
+VENV_RELATIVE_PYTHON=${3:-$(venv_relative_python)}
+TOOLCHAIN=${4:-$(toolchain)}
ROOT=`pwd`
-export LD_LIBRARY_PATH=$ROOT/libs/$CONFIG
-export DYLD_LIBRARY_PATH=$ROOT/libs/$CONFIG
-export PATH=$ROOT/bins/$CONFIG:$ROOT/bins/$CONFIG/protobuf:$PATH
-export CFLAGS="-I$ROOT/include -std=gnu99"
-export LDFLAGS="-L$ROOT/libs/$CONFIG"
+export CFLAGS="-I$ROOT/include -std=gnu99 -fno-wrapv $CFLAGS"
export GRPC_PYTHON_BUILD_WITH_CYTHON=1
-export GRPC_PYTHON_USE_PRECOMPILED_BINARIES=0
-if [ "$CONFIG" = "gcov" ]
-then
- export GRPC_PYTHON_ENABLE_CYTHON_TRACING=1
-fi
+# Default python on the host to fall back to when instantiating e.g. the
+# virtualenv.
+HOST_PYTHON=${HOST_PYTHON:-python}
-tox -e ${TOX_PYTHON_ENV} --notest
-
-# We force the .so naming convention in PEP 3149 for side by side installation support
-# Note this is the default in Python3, but explicitly disabled for Darwin, so we only
-# use this hack for our testing environment.
-if [ "$PY_VERSION" -gt "27" ]
-then
- mv $ROOT/src/python/grpcio/grpc/_cython/cygrpc.so $ROOT/src/python/grpcio/grpc/_cython/cygrpc.so.backup || true
+# If ccache is available on Linux, use it.
+if [ $(is_linux) ]; then
+ # We're not on Darwin (Mac OS X)
+ if [ -x "$(command -v ccache)" ]; then
+ if [ -x "$(command -v gcc)" ]; then
+ export CC='ccache gcc'
+ elif [ -x "$(command -v clang)" ]; then
+ export CC='ccache clang'
+ fi
+ fi
fi
-$ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/setup.py build
-$ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/setup.py build_py
-$ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/setup.py build_ext --inplace
-$ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/setup.py gather --test
+############################
+# Perform build operations #
+############################
-if [ "$PY_VERSION" -gt "27" ]
-then
- mv $ROOT/src/python/grpcio/grpc/_cython/cygrpc.so $ROOT/src/python/grpcio/grpc/_cython/cygrpc.cpython-${PY_VERSION}m.so || true
- mv $ROOT/src/python/grpcio/grpc/_cython/cygrpc.so.backup $ROOT/src/python/grpcio/grpc/_cython/cygrpc.so || true
-fi
+# Instnatiate the virtualenv, preferring to do so from the relevant python
+# version. Even if these commands fail (e.g. on Windows due to name conflicts)
+# it's possible that the virtualenv is still usable and we trust the tester to
+# be able to 'figure it out' instead of us e.g. doing potentially expensive and
+# unnecessary error recovery by `rm -rf`ing the virtualenv.
+($PYTHON -m virtualenv $VENV ||
+ $HOST_PYTHON -m virtualenv -p $PYTHON $VENV ||
+ true)
+VENV_PYTHON=`script_realpath -s "$VENV/$VENV_RELATIVE_PYTHON"`
+
+# pip-installs the directory specified. Used because on MSYS the vanilla Windows
+# Python gets confused when parsing paths.
+pip_install_dir() {
+ PWD=`pwd`
+ cd $1
+ ($VENV_PYTHON setup.py build_ext -c $TOOLCHAIN || true)
+ # install the dependencies
+ $VENV_PYTHON -m pip install --upgrade .
+ # ensure that we've reinstalled the test packages
+ $VENV_PYTHON -m pip install --upgrade --force-reinstall --no-deps .
+ cd $PWD
+}
-# Build the health checker
-$ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/src/python/grpcio_health_checking/setup.py build
-$ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/src/python/grpcio_health_checking/setup.py build_py
+$VENV_PYTHON -m pip install --upgrade pip
+# TODO(https://github.com/pypa/setuptools/issues/709) get the latest setuptools
+$VENV_PYTHON -m pip install setuptools==25.1.1
+$VENV_PYTHON -m pip install cython
+pip_install_dir $ROOT
+$VENV_PYTHON $ROOT/tools/distrib/python/make_grpcio_tools.py
+pip_install_dir $ROOT/tools/distrib/python/grpcio_tools
+# TODO(atash) figure out namespace packages and grpcio-tools and auditwheel
+# etc...
+pip_install_dir $ROOT
+$VENV_PYTHON $ROOT/src/python/grpcio_health_checking/setup.py preprocess
+$VENV_PYTHON $ROOT/src/python/grpcio_health_checking/setup.py build_package_protos
+pip_install_dir $ROOT/src/python/grpcio_health_checking
+$VENV_PYTHON $ROOT/src/python/grpcio_tests/setup.py preprocess
+$VENV_PYTHON $ROOT/src/python/grpcio_tests/setup.py build_package_protos
+pip_install_dir $ROOT/src/python/grpcio_tests
diff --git a/tools/run_tests/build_python_msys2.sh b/tools/run_tests/build_python_msys2.sh
new file mode 100644
index 0000000000..6e9d369018
--- /dev/null
+++ b/tools/run_tests/build_python_msys2.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+# Copyright 2016, Google Inc.
+# All rights reserved.
+#
+# Redistribution and use in source and binary forms, with or without
+# modification, are permitted provided that the following conditions are
+# met:
+#
+# * Redistributions of source code must retain the above copyright
+# notice, this list of conditions and the following disclaimer.
+# * Redistributions in binary form must reproduce the above
+# copyright notice, this list of conditions and the following disclaimer
+# in the documentation and/or other materials provided with the
+# distribution.
+# * Neither the name of Google Inc. nor the names of its
+# contributors may be used to endorse or promote products derived from
+# this software without specific prior written permission.
+#
+# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
+# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
+# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
+# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
+# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
+# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
+# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
+# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
+# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
+# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
+# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
+
+set -ex
+
+BUILD_PYTHON=`realpath "$(dirname $0)/build_python.sh"`
+export MSYSTEM=$1
+shift 1
+bash --login $BUILD_PYTHON "$@"
diff --git a/tools/run_tests/distribtest_targets.py b/tools/run_tests/distribtest_targets.py
index 1a7aa0bfc8..7930f2a0a4 100644
--- a/tools/run_tests/distribtest_targets.py
+++ b/tools/run_tests/distribtest_targets.py
@@ -41,7 +41,7 @@ def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
environ['RELATIVE_COPY_PATH'] = 'test/distrib'
docker_args=[]
- for k,v in environ.iteritems():
+ for k,v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh'}
diff --git a/tools/run_tests/dockerjob.py b/tools/run_tests/dockerjob.py
index e4ca3b7faa..4a7e61b3c4 100755
--- a/tools/run_tests/dockerjob.py
+++ b/tools/run_tests/dockerjob.py
@@ -29,6 +29,8 @@
"""Helpers to run docker instances as jobs."""
+from __future__ import print_function
+
import jobset
import tempfile
import time
@@ -95,7 +97,7 @@ def remove_image(image, skip_nonexistent=False, max_retries=10):
stderr=subprocess.STDOUT) == 0:
return True
time.sleep(2)
- print 'Failed to remove docker image %s' % image
+ print('Failed to remove docker image %s' % image)
return False
diff --git a/tools/run_tests/jobset.py b/tools/run_tests/jobset.py
index 4fe77487f9..b6fb6318e0 100755
--- a/tools/run_tests/jobset.py
+++ b/tools/run_tests/jobset.py
@@ -29,6 +29,8 @@
"""Run a group of subprocesses and then finish."""
+from __future__ import print_function
+
import multiprocessing
import os
import platform
@@ -47,6 +49,12 @@ measure_cpu_costs = False
_DEFAULT_MAX_JOBS = 16 * multiprocessing.cpu_count()
_MAX_RESULT_SIZE = 8192
+def sanitized_environment(env):
+ sanitized = {}
+ for key, value in env.items():
+ sanitized[str(key).encode()] = str(value).encode()
+ return sanitized
+
def platform_string():
if platform.system() == 'Windows':
return 'windows'
@@ -117,8 +125,8 @@ def message(tag, msg, explanatory_text=None, do_newline=False):
try:
if platform_string() == 'windows' or not sys.stdout.isatty():
if explanatory_text:
- print explanatory_text
- print '%s: %s' % (tag, msg)
+ print(explanatory_text)
+ print('%s: %s' % (tag, msg))
return
sys.stdout.write('%s%s%s\x1b[%d;%dm%s\x1b[0m: %s%s' % (
_BEGINNING_OF_LINE,
@@ -219,6 +227,7 @@ class Job(object):
env = dict(os.environ)
env.update(self._spec.environ)
env.update(self._add_env)
+ env = sanitized_environment(env)
self._start = time.time()
cmdline = self._spec.cmdline
if measure_cpu_costs:
@@ -361,7 +370,7 @@ class Jobset(object):
self._travis,
self._add_env)
self._running.add(job)
- if not self.resultset.has_key(job.GetSpec().shortname):
+ if job.GetSpec().shortname not in self.resultset:
self.resultset[job.GetSpec().shortname] = []
return True
diff --git a/tools/run_tests/package_targets.py b/tools/run_tests/package_targets.py
index 39a11a243d..ce3f08dfbc 100644
--- a/tools/run_tests/package_targets.py
+++ b/tools/run_tests/package_targets.py
@@ -39,7 +39,7 @@ def create_docker_jobspec(name, dockerfile_dir, shell_command, environ={},
environ['RUN_COMMAND'] = shell_command
docker_args=[]
- for k,v in environ.iteritems():
+ for k,v in environ.items():
docker_args += ['-e', '%s=%s' % (k, v)]
docker_env = {'DOCKERFILE_DIR': dockerfile_dir,
'DOCKER_RUN_SCRIPT': 'tools/run_tests/dockerize/docker_run.sh',
diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py
index fbccf3bdca..2a99499843 100755
--- a/tools/run_tests/performance/bq_upload_result.py
+++ b/tools/run_tests/performance/bq_upload_result.py
@@ -118,6 +118,8 @@ def _flatten_result_inplace(scenario_result):
for stats in scenario_result['clientStats']:
stats['latencies'] = json.dumps(stats['latencies'])
scenario_result['serverCores'] = json.dumps(scenario_result['serverCores'])
+ scenario_result['clientSuccess'] = json.dumps(scenario_result['clientSuccess'])
+ scenario_result['serverSuccess'] = json.dumps(scenario_result['serverSuccess'])
def _populate_metadata_inplace(scenario_result):
diff --git a/tools/run_tests/performance/run_worker_python.sh b/tools/run_tests/performance/run_worker_python.sh
index 0da8deda58..06cf172d6f 100755
--- a/tools/run_tests/performance/run_worker_python.sh
+++ b/tools/run_tests/performance/run_worker_python.sh
@@ -32,4 +32,4 @@ set -ex
cd $(dirname $0)/../../..
-PYTHONPATH=src/python/grpcio:src/python/gens .tox/py27/bin/python src/python/grpcio/tests/qps/qps_worker.py $@
+PYTHONPATH=src/python/grpcio_tests:src/python/gens py27/bin/python src/python/grpcio_tests/tests/qps/qps_worker.py $@
diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py
index 2d5130e1e8..4dfd01fc66 100644
--- a/tools/run_tests/performance/scenario_config.py
+++ b/tools/run_tests/performance/scenario_config.py
@@ -387,45 +387,44 @@ class PythonLanguage:
return 500
def scenarios(self):
- # TODO(issue #6522): Empty streaming requests does not work for python
- #yield _ping_pong_scenario(
- # 'python_generic_async_streaming_ping_pong', rpc_type='STREAMING',
- # client_type='ASYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER',
- # use_generic_payload=True,
- # categories=[SMOKETEST])
+ yield _ping_pong_scenario(
+ 'python_generic_sync_streaming_ping_pong', rpc_type='STREAMING',
+ client_type='SYNC_CLIENT', server_type='ASYNC_GENERIC_SERVER',
+ use_generic_payload=True,
+ categories=[SMOKETEST])
yield _ping_pong_scenario(
'python_protobuf_sync_streaming_ping_pong', rpc_type='STREAMING',
- client_type='SYNC_CLIENT', server_type='SYNC_SERVER')
+ client_type='SYNC_CLIENT', server_type='ASYNC_SERVER')
yield _ping_pong_scenario(
'python_protobuf_async_unary_ping_pong', rpc_type='UNARY',
- client_type='ASYNC_CLIENT', server_type='SYNC_SERVER')
+ client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER')
yield _ping_pong_scenario(
'python_protobuf_sync_unary_ping_pong', rpc_type='UNARY',
- client_type='SYNC_CLIENT', server_type='SYNC_SERVER',
+ client_type='SYNC_CLIENT', server_type='ASYNC_SERVER',
categories=[SMOKETEST])
yield _ping_pong_scenario(
'python_protobuf_sync_unary_qps_unconstrained', rpc_type='UNARY',
- client_type='SYNC_CLIENT', server_type='SYNC_SERVER',
+ client_type='SYNC_CLIENT', server_type='ASYNC_SERVER',
unconstrained_client='sync')
yield _ping_pong_scenario(
'python_protobuf_sync_streaming_qps_unconstrained', rpc_type='STREAMING',
- client_type='SYNC_CLIENT', server_type='SYNC_SERVER',
+ client_type='SYNC_CLIENT', server_type='ASYNC_SERVER',
unconstrained_client='sync')
yield _ping_pong_scenario(
'python_to_cpp_protobuf_sync_unary_ping_pong', rpc_type='UNARY',
- client_type='SYNC_CLIENT', server_type='SYNC_SERVER',
+ client_type='SYNC_CLIENT', server_type='ASYNC_SERVER',
server_language='c++', server_core_limit=1, async_server_threads=1,
categories=[SMOKETEST])
yield _ping_pong_scenario(
'python_to_cpp_protobuf_sync_streaming_ping_pong', rpc_type='STREAMING',
- client_type='SYNC_CLIENT', server_type='SYNC_SERVER',
+ client_type='SYNC_CLIENT', server_type='ASYNC_SERVER',
server_language='c++', server_core_limit=1, async_server_threads=1)
def __str__(self):
diff --git a/tools/run_tests/performance/scenario_result_schema.json b/tools/run_tests/performance/scenario_result_schema.json
index 0325414757..6bec21df39 100644
--- a/tools/run_tests/performance/scenario_result_schema.json
+++ b/tools/run_tests/performance/scenario_result_schema.json
@@ -198,5 +198,15 @@
"mode": "NULLABLE"
}
]
+ },
+ {
+ "name": "clientSuccess",
+ "type": "STRING",
+ "mode": "NULLABLE"
+ },
+ {
+ "name": "serverSuccess",
+ "type": "STRING",
+ "mode": "NULLABLE"
}
]
diff --git a/tools/run_tests/port_server.py b/tools/run_tests/port_server.py
index 14e82b601e..e9b3f7ff79 100755
--- a/tools/run_tests/port_server.py
+++ b/tools/run_tests/port_server.py
@@ -30,8 +30,10 @@
"""Manage TCP ports for unit tests; started by run_tests.py"""
+from __future__ import print_function
+
import argparse
-import BaseHTTPServer
+from six.moves import BaseHTTPServer
import hashlib
import os
import socket
@@ -42,11 +44,11 @@ import time
# increment this number whenever making a change to ensure that
# the changes are picked up by running CI servers
# note that all changes must be backwards compatible
-_MY_VERSION = 7
+_MY_VERSION = 9
if len(sys.argv) == 2 and sys.argv[1] == 'dump_version':
- print _MY_VERSION
+ print(_MY_VERSION)
sys.exit(0)
@@ -62,7 +64,7 @@ if args.logfile is not None:
sys.stderr = open(args.logfile, 'w')
sys.stdout = sys.stderr
-print 'port server running on port %d' % args.port
+print('port server running on port %d' % args.port)
pool = []
in_use = {}
@@ -70,7 +72,7 @@ in_use = {}
def refill_pool(max_timeout, req):
"""Scan for ports not marked for being in use"""
- for i in range(1025, 32767):
+ for i in range(1025, 32766):
if len(pool) > 100: break
if i in in_use:
age = time.time() - in_use[i]
@@ -110,6 +112,11 @@ keep_running = True
class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
+
+ def setup(self):
+ # If the client is unreachable for 5 seconds, close the connection
+ self.timeout = 5
+ BaseHTTPServer.BaseHTTPRequestHandler.setup(self)
def do_GET(self):
global keep_running
@@ -147,7 +154,7 @@ class Handler(BaseHTTPServer.BaseHTTPRequestHandler):
self.send_header('Content-Type', 'text/plain')
self.end_headers()
now = time.time()
- self.wfile.write(yaml.dump({'pool': pool, 'in_use': dict((k, now - v) for k, v in in_use.iteritems())}))
+ self.wfile.write(yaml.dump({'pool': pool, 'in_use': dict((k, now - v) for k, v in in_use.items())}))
elif self.path == '/quitquitquit':
self.send_response(200)
self.end_headers()
@@ -159,4 +166,4 @@ while keep_running:
httpd.handle_request()
sys.stderr.flush()
-print 'done'
+print('done')
diff --git a/tools/run_tests/pre_build_csharp.bat b/tools/run_tests/pre_build_csharp.bat
index e7131d504c..580d5638fd 100644
--- a/tools/run_tests/pre_build_csharp.bat
+++ b/tools/run_tests/pre_build_csharp.bat
@@ -38,8 +38,61 @@ cd /d %~dp0\..\..
set NUGET=C:\nuget\nuget.exe
if exist %NUGET% (
+ @rem Restore Grpc packages by packages since Nuget client 3.4.4 doesnt support restore
+ @rem by solution
+ @rem Moving into each directory to let the restores work with both nuget 3.4 and 2.8
%NUGET% restore vsprojects/grpc_csharp_ext.sln || goto :error
- %NUGET% restore src/csharp/Grpc.sln || goto :error
+
+ cd src/csharp
+
+ cd Grpc.Auth || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.Core || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.Core.Tests || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.Examples.MathClient || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.Examples.MathServer || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.Examples || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.HealthCheck.Tests || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.HealthCheck || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.IntegrationTesting.Client || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.IntegrationTesting.QpsWorker || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.IntegrationTesting.StressClient || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+ cd ..
+
+ cd Grpc.IntegrationTesting || goto :error
+ %NUGET% restore -PackagesDirectory ../packages || goto :error
+
+ cd /d %~dp0\..\.. || goto :error
)
endlocal
diff --git a/tools/run_tests/pre_build_csharp.sh b/tools/run_tests/pre_build_csharp.sh
index 3ff1a4e5a8..0fd3b92a95 100755
--- a/tools/run_tests/pre_build_csharp.sh
+++ b/tools/run_tests/pre_build_csharp.sh
@@ -37,5 +37,54 @@ root=`pwd`
if [ -x "$(command -v nuget)" ]
then
- nuget restore Grpc.sln
+ # Restoring Nuget packages by packages rather than by solution because of
+ # inability to restore by solution with Nuget client 3.4.4
+ # Moving into each directory to let the restores work with nuget 3.4 and 2.8
+ cd Grpc.Auth
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.Core.Tests
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.Core
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.Examples.MathClient
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.Examples.MathServer
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.Examples
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.HealthCheck.Tests
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.HealthCheck
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.IntegrationTesting.Client
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.IntegrationTesting.QpsWorker
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.IntegrationTesting.StressClient
+ nuget restore -PackagesDirectory ../packages
+ cd ..
+
+ cd Grpc.IntegrationTesting
+ nuget restore -PackagesDirectory ../packages
+ cd ..
fi
diff --git a/tools/run_tests/report_utils.py b/tools/run_tests/report_utils.py
index 35dcaca3d3..5648a694cd 100644
--- a/tools/run_tests/report_utils.py
+++ b/tools/run_tests/report_utils.py
@@ -29,6 +29,8 @@
"""Generate XML and HTML test reports."""
+from __future__ import print_function
+
try:
from mako.runtime import Context
from mako.template import Template
@@ -60,7 +62,7 @@ def render_junit_xml_report(resultset, xml_report):
root = ET.Element('testsuites')
testsuite = ET.SubElement(root, 'testsuite', id='1', package='grpc',
name='tests')
- for shortname, results in resultset.iteritems():
+ for shortname, results in resultset.items():
for result in results:
xml_test = ET.SubElement(testsuite, 'testcase', name=shortname)
if result.elapsed_time:
@@ -83,10 +85,10 @@ def render_interop_html_report(
try:
mytemplate = Template(filename=template_file, format_exceptions=True)
except NameError:
- print 'Mako template is not installed. Skipping HTML report generation.'
+ print('Mako template is not installed. Skipping HTML report generation.')
return
except IOError as e:
- print 'Failed to find the template %s: %s' % (template_file, e)
+ print('Failed to find the template %s: %s' % (template_file, e))
return
sorted_test_cases = sorted(test_cases)
diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py
index adf9adaf13..78096b216c 100755
--- a/tools/run_tests/run_interop_tests.py
+++ b/tools/run_tests/run_interop_tests.py
@@ -30,6 +30,8 @@
"""Run interop (cross-language) tests in parallel."""
+from __future__ import print_function
+
import argparse
import atexit
import dockerjob
@@ -266,6 +268,31 @@ class PHPLanguage:
return 'php'
+class PHP7Language:
+
+ def __init__(self):
+ self.client_cwd = None
+ self.safename = str(self)
+
+ def client_cmd(self, args):
+ return ['src/php/bin/interop_client.sh'] + args
+
+ def cloud_to_prod_env(self):
+ return {}
+
+ def global_env(self):
+ return {}
+
+ def unimplemented_test_cases(self):
+ return _SKIP_COMPRESSION
+
+ def unimplemented_test_cases_server(self):
+ return []
+
+ def __str__(self):
+ return 'php7'
+
+
class RubyLanguage:
def __init__(self):
@@ -286,7 +313,7 @@ class RubyLanguage:
return {}
def unimplemented_test_cases(self):
- return _SKIP_ADVANCED + _SKIP_COMPRESSION
+ return _SKIP_ADVANCED + _SKIP_SERVER_COMPRESSION
def unimplemented_test_cases_server(self):
return _SKIP_ADVANCED + _SKIP_COMPRESSION
@@ -304,8 +331,11 @@ class PythonLanguage:
def client_cmd(self, args):
return [
- 'tox -einterop_client --',
- ' '.join(args)
+ 'py27/bin/python',
+ 'src/python/grpcio_tests/setup.py',
+ 'run_interop',
+ '--client',
+ '--args="{}"'.format(' '.join(args))
]
def cloud_to_prod_env(self):
@@ -313,8 +343,11 @@ class PythonLanguage:
def server_cmd(self, args):
return [
- 'tox -einterop_server --',
- ' '.join(args) + ' --use_tls=true'
+ 'py27/bin/python',
+ 'src/python/grpcio_tests/setup.py',
+ 'run_interop',
+ '--server',
+ '--args="{}"'.format(' '.join(args) + ' --use_tls=true')
]
def global_env(self):
@@ -338,6 +371,7 @@ _LANGUAGES = {
'java' : JavaLanguage(),
'node' : NodeLanguage(),
'php' : PHPLanguage(),
+ 'php7' : PHP7Language(),
'ruby' : RubyLanguage(),
'python' : PythonLanguage(),
}
@@ -366,7 +400,7 @@ def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None):
# turn environ into -e docker args
if environ:
- for k,v in environ.iteritems():
+ for k,v in environ.items():
docker_cmdline += ['-e', '%s=%s' % (k,v)]
# set working directory
@@ -401,7 +435,7 @@ def auth_options(language, test_case):
default_account_arg = '--default_service_account=830293263384-compute@developer.gserviceaccount.com'
if test_case in ['jwt_token_creds', 'per_rpc_creds', 'oauth2_auth_token']:
- if language in ['csharp', 'node', 'php', 'python', 'ruby']:
+ if language in ['csharp', 'node', 'php', 'php7', 'python', 'ruby']:
env['GOOGLE_APPLICATION_CREDENTIALS'] = key_filepath
else:
cmdargs += [key_file_arg]
@@ -668,15 +702,15 @@ servers = set(s for s in itertools.chain.from_iterable(_SERVERS
if args.use_docker:
if not args.travis:
- print 'Seen --use_docker flag, will run interop tests under docker.'
- print
- print 'IMPORTANT: The changes you are testing need to be locally committed'
- print 'because only the committed changes in the current branch will be'
- print 'copied to the docker environment.'
+ print('Seen --use_docker flag, will run interop tests under docker.')
+ print('')
+ print('IMPORTANT: The changes you are testing need to be locally committed')
+ print('because only the committed changes in the current branch will be')
+ print('copied to the docker environment.')
time.sleep(5)
if not args.use_docker and servers:
- print 'Running interop servers is only supported with --use_docker option enabled.'
+ print('Running interop servers is only supported with --use_docker option enabled.')
sys.exit(1)
languages = set(_LANGUAGES[l]
@@ -762,7 +796,7 @@ try:
(server_host, server_port) = server[1].split(':')
server_addresses[server_name] = (server_host, server_port)
- for server_name, server_address in server_addresses.iteritems():
+ for server_name, server_address in server_addresses.items():
(server_host, server_port) = server_address
server_language = _LANGUAGES.get(server_name, None)
skip_server = [] # test cases unimplemented by server
@@ -794,7 +828,7 @@ try:
jobs.append(test_job)
if not jobs:
- print 'No jobs to run.'
+ print('No jobs to run.')
for image in docker_images.itervalues():
dockerjob.remove_image(image, skip_nonexistent=True)
sys.exit(1)
@@ -808,7 +842,7 @@ try:
report_utils.render_junit_xml_report(resultset, 'report.xml')
- for name, job in resultset.iteritems():
+ for name, job in resultset.items():
if "http2" in name:
job[0].http2results = aggregate_http2_results(job[0].message)
@@ -820,12 +854,12 @@ try:
finally:
# Check if servers are still running.
- for server, job in server_jobs.iteritems():
+ for server, job in server_jobs.items():
if not job.is_running():
- print 'Server "%s" has exited prematurely.' % server
+ print('Server "%s" has exited prematurely.' % server)
dockerjob.finish_jobs([j for j in server_jobs.itervalues()])
for image in docker_images.itervalues():
- print 'Removing docker image %s' % image
+ print('Removing docker image %s' % image)
dockerjob.remove_image(image)
diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py
index 14901caf07..5fdf7a407d 100755
--- a/tools/run_tests/run_performance_tests.py
+++ b/tools/run_tests/run_performance_tests.py
@@ -30,6 +30,8 @@
"""Run performance tests locally or remotely."""
+from __future__ import print_function
+
import argparse
import itertools
import jobset
@@ -310,7 +312,7 @@ def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*',
'in the same scenario')
if custom_server_lang:
if not workers_by_lang.get(custom_server_lang, []):
- print 'Warning: Skipping scenario %s as' % scenario_json['name']
+ print('Warning: Skipping scenario %s as' % scenario_json['name'])
print('SERVER_LANGUAGE is set to %s yet the language has '
'not been selected with -l' % custom_server_lang)
continue
@@ -319,7 +321,7 @@ def create_scenarios(languages, workers_by_lang, remote_host=None, regex='.*',
workers[idx] = workers_by_lang[custom_server_lang][idx]
if custom_client_lang:
if not workers_by_lang.get(custom_client_lang, []):
- print 'Warning: Skipping scenario %s as' % scenario_json['name']
+ print('Warning: Skipping scenario %s as' % scenario_json['name'])
print('CLIENT_LANGUAGE is set to %s yet the language has '
'not been selected with -l' % custom_client_lang)
continue
@@ -344,14 +346,14 @@ def finish_qps_workers(jobs):
while any(job.is_running() for job in jobs):
for job in qpsworker_jobs:
if job.is_running():
- print 'QPS worker "%s" is still running.' % job.host_and_port
+ print('QPS worker "%s" is still running.' % job.host_and_port)
if retries > 10:
- print 'Killing all QPS workers.'
+ print('Killing all QPS workers.')
for job in jobs:
job.kill()
retries += 1
time.sleep(3)
- print 'All QPS workers finished.'
+ print('All QPS workers finished.')
argp = argparse.ArgumentParser(description='Run performance tests.')
diff --git a/tools/run_tests/run_python.sh b/tools/run_tests/run_python.sh
index 8059059d41..17e0186f2a 100755
--- a/tools/run_tests/run_python.sh
+++ b/tools/run_tests/run_python.sh
@@ -33,24 +33,11 @@ set -ex
# change to grpc repo root
cd $(dirname $0)/../..
-TOX_PYTHON_ENV="$1"
+PYTHON=`realpath -s "${1:-py27/bin/python}"`
ROOT=`pwd`
-export LD_LIBRARY_PATH=$ROOT/libs/$CONFIG
-export DYLD_LIBRARY_PATH=$ROOT/libs/$CONFIG
-export PATH=$ROOT/bins/$CONFIG:$ROOT/bins/$CONFIG/protobuf:$PATH
-export CFLAGS="-I$ROOT/include -std=c89"
-export LDFLAGS="-L$ROOT/libs/$CONFIG"
-export GRPC_PYTHON_BUILD_WITH_CYTHON=1
-export GRPC_PYTHON_USE_PRECOMPILED_BINARIES=0
-if [ "$CONFIG" = "gcov" ]
-then
- export GRPC_PYTHON_ENABLE_CYTHON_TRACING=1
- tox -e ${TOX_PYTHON_ENV}
-else
- $ROOT/.tox/${TOX_PYTHON_ENV}/bin/python $ROOT/setup.py test_lite
-fi
+$PYTHON $ROOT/src/python/grpcio_tests/setup.py test_lite
mkdir -p $ROOT/reports
rm -rf $ROOT/reports/python-coverage
diff --git a/tools/run_tests/run_stress_tests.py b/tools/run_tests/run_stress_tests.py
index e42ee24ffb..de4a22877c 100755
--- a/tools/run_tests/run_stress_tests.py
+++ b/tools/run_tests/run_stress_tests.py
@@ -29,6 +29,8 @@
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Run stress test in C++"""
+from __future__ import print_function
+
import argparse
import atexit
import dockerjob
@@ -93,7 +95,7 @@ def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None):
# turn environ into -e docker args
if environ:
- for k, v in environ.iteritems():
+ for k, v in environ.items():
docker_cmdline += ['-e', '%s=%s' % (k, v)]
# set working directory
@@ -140,7 +142,7 @@ def cloud_to_cloud_jobspec(language,
'--num_channels_per_server=%s' % num_channels_per_server,
'--metrics_port=%s' % metrics_port
]))
- print cmdline
+ print(cmdline)
cwd = language.client_cwd
environ = language.global_env()
if docker_image:
@@ -287,7 +289,7 @@ try:
(server_host, server_port) = server[1].split(':')
server_addresses[server_name] = (server_host, server_port)
- for server_name, server_address in server_addresses.iteritems():
+ for server_name, server_address in server_addresses.items():
(server_host, server_port) = server_address
for language in languages:
test_job = cloud_to_cloud_jobspec(
@@ -302,7 +304,7 @@ try:
jobs.append(test_job)
if not jobs:
- print 'No jobs to run.'
+ print('No jobs to run.')
for image in docker_images.itervalues():
dockerjob.remove_image(image, skip_nonexistent=True)
sys.exit(1)
@@ -317,12 +319,12 @@ try:
finally:
# Check if servers are still running.
- for server, job in server_jobs.iteritems():
+ for server, job in server_jobs.items():
if not job.is_running():
- print 'Server "%s" has exited prematurely.' % server
+ print('Server "%s" has exited prematurely.' % server)
dockerjob.finish_jobs([j for j in server_jobs.itervalues()])
for image in docker_images.itervalues():
- print 'Removing docker image %s' % image
+ print('Removing docker image %s' % image)
dockerjob.remove_image(image)
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index c1254275e6..542415d908 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -30,13 +30,18 @@
"""Run tests in parallel."""
+from __future__ import print_function
+
import argparse
import ast
+import collections
import glob
import itertools
import json
import multiprocessing
import os
+import os.path
+import pipes
import platform
import random
import re
@@ -46,7 +51,7 @@ import sys
import tempfile
import traceback
import time
-import urllib2
+from six.moves import urllib
import uuid
import jobset
@@ -58,7 +63,9 @@ _ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
os.chdir(_ROOT)
-_FORCE_ENVIRON_FOR_WRAPPERS = {}
+_FORCE_ENVIRON_FOR_WRAPPERS = {
+ 'GRPC_VERBOSITY': 'DEBUG',
+}
_POLLING_STRATEGIES = {
@@ -70,6 +77,9 @@ def platform_string():
return jobset.platform_string()
+_DEFAULT_TIMEOUT_SECONDS = 5 * 60
+
+
# SimpleConfig: just compile with CONFIG=config, and run the binary to test
class Config(object):
@@ -82,7 +92,7 @@ class Config(object):
self.tool_prefix = tool_prefix
self.timeout_multiplier = timeout_multiplier
- def job_spec(self, cmdline, timeout_seconds=5*60,
+ def job_spec(self, cmdline, timeout_seconds=_DEFAULT_TIMEOUT_SECONDS,
shortname=None, environ={}, cpu_cost=1.0, flaky=False):
"""Construct a jobset.JobSpec for a test under this config
@@ -91,7 +101,7 @@ class Config(object):
would like to run
"""
actual_environ = self.environ.copy()
- for k, v in environ.iteritems():
+ for k, v in environ.items():
actual_environ[k] = v
return jobset.JobSpec(cmdline=self.tool_prefix + cmdline,
shortname=shortname,
@@ -156,8 +166,9 @@ class CLanguage(object):
for polling_strategy in polling_strategies:
env={'GRPC_DEFAULT_SSL_ROOTS_FILE_PATH':
_ROOT + '/src/core/lib/tsi/test_creds/ca.pem',
- 'GRPC_POLL_STRATEGY': polling_strategy}
- shortname_ext = '' if polling_strategy=='all' else ' polling=%s' % polling_strategy
+ 'GRPC_POLL_STRATEGY': polling_strategy,
+ 'GRPC_VERBOSITY': 'DEBUG'}
+ shortname_ext = '' if polling_strategy=='all' else ' GRPC_POLL_STRATEGY=%s' % polling_strategy
if self.config.build_config in target['exclude_configs']:
continue
if self.platform == 'windows':
@@ -188,28 +199,26 @@ class CLanguage(object):
assert line[1] == ' '
test = base + line.strip()
cmdline = [binary] + ['--gtest_filter=%s' % test]
- out.append(self.config.job_spec(cmdline, [binary],
- shortname='%s:%s %s' % (binary, test, shortname_ext),
+ out.append(self.config.job_spec(cmdline,
+ shortname='%s --gtest_filter=%s %s' % (binary, test, shortname_ext),
cpu_cost=target['cpu_cost'],
environ=env))
else:
cmdline = [binary] + target['args']
- out.append(self.config.job_spec(cmdline, [binary],
- shortname=' '.join(cmdline) + shortname_ext,
+ out.append(self.config.job_spec(cmdline,
+ shortname=' '.join(
+ pipes.quote(arg)
+ for arg in cmdline) +
+ shortname_ext,
cpu_cost=target['cpu_cost'],
flaky=target.get('flaky', False),
+ timeout_seconds=target.get('timeout_seconds', _DEFAULT_TIMEOUT_SECONDS),
environ=env))
elif self.args.regex == '.*' or self.platform == 'windows':
- print '\nWARNING: binary not found, skipping', binary
+ print('\nWARNING: binary not found, skipping', binary)
return sorted(out)
def make_targets(self):
- test_regex = self.args.regex
- if self.platform != 'windows' and self.args.regex != '.*':
- # use the regex to minimize the number of things to build
- return [os.path.basename(target['name'])
- for target in get_c_tests(False, self.test_lang)
- if re.search(test_regex, '/' + target['name'])]
if self.platform == 'windows':
# don't build tools on windows just yet
return ['buildtests_%s' % self.make_target]
@@ -372,50 +381,78 @@ class PhpLanguage(object):
return 'php'
+class Php7Language(object):
+
+ def configure(self, config, args):
+ self.config = config
+ self.args = args
+ _check_compiler(self.args.compiler, ['default'])
+
+ def test_specs(self):
+ return [self.config.job_spec(['src/php/bin/run_tests.sh'], None,
+ environ=_FORCE_ENVIRON_FOR_WRAPPERS)]
+
+ def pre_build_steps(self):
+ return []
+
+ def make_targets(self):
+ return ['static_c', 'shared_c']
+
+ def make_options(self):
+ return []
+
+ def build_steps(self):
+ return [['tools/run_tests/build_php.sh']]
+
+ def post_tests_steps(self):
+ return [['tools/run_tests/post_tests_php.sh']]
+
+ def makefile_name(self):
+ return 'Makefile'
+
+ def dockerfile_dir(self):
+ return 'tools/dockerfile/test/php7_jessie_%s' % _docker_arch_suffix(self.args.arch)
+
+ def __str__(self):
+ return 'php7'
+
+
+class PythonConfig(collections.namedtuple('PythonConfig', [
+ 'name', 'build', 'run'])):
+ """Tuple of commands (named s.t. 'what it says on the tin' applies)"""
+
class PythonLanguage(object):
def configure(self, config, args):
self.config = config
self.args = args
- self._tox_envs = self._get_tox_envs(self.args.compiler)
+ self.pythons = self._get_pythons(self.args)
def test_specs(self):
# load list of known test suites
- with open('src/python/grpcio/tests/tests.json') as tests_json_file:
+ with open('src/python/grpcio_tests/tests/tests.json') as tests_json_file:
tests_json = json.load(tests_json_file)
environment = dict(_FORCE_ENVIRON_FOR_WRAPPERS)
- environment['PYTHONPATH'] = '{}:{}'.format(
- os.path.abspath('src/python/gens'),
- os.path.abspath('src/python/grpcio_health_checking'))
- if self.config.build_config != 'gcov':
- return [self.config.job_spec(
- ['tools/run_tests/run_python.sh', tox_env],
- environ=dict(environment.items() +
- [('GRPC_PYTHON_TESTRUNNER_FILTER', suite_name)]),
- shortname='%s.test.%s' % (tox_env, suite_name),
- timeout_seconds=5*60)
- for suite_name in tests_json
- for tox_env in self._tox_envs]
- else:
- return [self.config.job_spec(['tools/run_tests/run_python.sh', tox_env],
- environ=environment,
- shortname='%s.test.coverage' % tox_env,
- timeout_seconds=15*60)
- for tox_env in self._tox_envs]
-
+ return [self.config.job_spec(
+ config.run,
+ timeout_seconds=5*60,
+ environ=dict(list(environment.items()) +
+ [('GRPC_PYTHON_TESTRUNNER_FILTER', suite_name)]),
+ shortname='%s.test.%s' % (config.name, suite_name),)
+ for suite_name in tests_json
+ for config in self.pythons]
def pre_build_steps(self):
return []
def make_targets(self):
- return ['static_c', 'grpc_python_plugin', 'shared_c']
+ return []
def make_options(self):
return []
def build_steps(self):
- return [['tools/run_tests/build_python.sh', tox_env]
- for tox_env in self._tox_envs]
+ return [config.build for config in self.pythons]
def post_tests_steps(self):
return []
@@ -424,18 +461,61 @@ class PythonLanguage(object):
return 'Makefile'
def dockerfile_dir(self):
- return 'tools/dockerfile/test/python_jessie_%s' % _docker_arch_suffix(self.args.arch)
-
- def _get_tox_envs(self, compiler):
- """Returns name of tox environment based on selected compiler."""
- if compiler == 'default':
- return ('py27', 'py34')
- elif compiler == 'python2.7':
- return ('py27',)
- elif compiler == 'python3.4':
- return ('py34',)
+ return 'tools/dockerfile/test/python_%s_%s' % (self.python_manager_name(), _docker_arch_suffix(self.args.arch))
+
+ def python_manager_name(self):
+ return 'pyenv' if self.args.compiler in ['python3.5', 'python3.6'] else 'jessie'
+
+ def _get_pythons(self, args):
+ if args.arch == 'x86':
+ bits = '32'
else:
- raise Exception('Compiler %s not supported.' % compiler)
+ bits = '64'
+ if os.name == 'nt':
+ shell = ['bash']
+ builder = [os.path.abspath('tools/run_tests/build_python_msys2.sh')]
+ builder_prefix_arguments = ['MINGW{}'.format(bits)]
+ venv_relative_python = ['Scripts/python.exe']
+ toolchain = ['mingw32']
+ python_pattern_function = lambda major, minor, bits: (
+ '/c/Python{major}{minor}/python.exe'.format(major=major, minor=minor, bits=bits)
+ if bits == '64' else
+ '/c/Python{major}{minor}_{bits}bits/python.exe'.format(
+ major=major, minor=minor, bits=bits))
+ else:
+ shell = []
+ builder = [os.path.abspath('tools/run_tests/build_python.sh')]
+ builder_prefix_arguments = []
+ venv_relative_python = ['bin/python']
+ toolchain = ['unix']
+ # Bit-ness is handled by the test machine's environment
+ python_pattern_function = lambda major, minor, bits: 'python{major}.{minor}'.format(major=major, minor=minor)
+ runner = [os.path.abspath('tools/run_tests/run_python.sh')]
+ python_config_generator = lambda name, major, minor, bits: PythonConfig(
+ name,
+ shell + builder + builder_prefix_arguments
+ + [python_pattern_function(major=major, minor=minor, bits=bits)]
+ + [name] + venv_relative_python + toolchain,
+ shell + runner + [os.path.join(name, venv_relative_python[0])])
+ python27_config = python_config_generator(name='py27', major='2', minor='7', bits=bits)
+ python34_config = python_config_generator(name='py34', major='3', minor='4', bits=bits)
+ python35_config = python_config_generator(name='py35', major='3', minor='5', bits=bits)
+ python36_config = python_config_generator(name='py36', major='3', minor='6', bits=bits)
+ if args.compiler == 'default':
+ if os.name == 'nt':
+ return (python27_config,)
+ else:
+ return (python27_config, python34_config,)
+ elif args.compiler == 'python2.7':
+ return (python27_config,)
+ elif args.compiler == 'python3.4':
+ return (python34_config,)
+ elif args.compiler == 'python3.5':
+ return (python35_config,)
+ elif args.compiler == 'python3.6':
+ return (python36_config,)
+ else:
+ raise Exception('Compiler %s not supported.' % args.compiler)
def __str__(self):
return 'python'
@@ -621,17 +701,20 @@ class ObjCLanguage(object):
_check_compiler(self.args.compiler, ['default'])
def test_specs(self):
- return [self.config.job_spec(['src/objective-c/tests/run_tests.sh'], None,
- environ=_FORCE_ENVIRON_FOR_WRAPPERS),
+ return [self.config.job_spec(['src/objective-c/tests/run_tests.sh'],
+ timeout_seconds=None,
+ shortname='objc-tests',
+ environ=_FORCE_ENVIRON_FOR_WRAPPERS),
self.config.job_spec(['src/objective-c/tests/build_example_test.sh'],
- None, timeout_seconds=15*60,
+ timeout_seconds=15*60,
+ shortname='objc-examples-build',
environ=_FORCE_ENVIRON_FOR_WRAPPERS)]
def pre_build_steps(self):
return []
def make_targets(self):
- return ['grpc_objective_c_plugin', 'interop_server']
+ return ['interop_server']
def make_options(self):
return []
@@ -702,6 +785,7 @@ _LANGUAGES = {
'c': CLanguage('c', 'c'),
'node': NodeLanguage(),
'php': PhpLanguage(),
+ 'php7': Php7Language(),
'python': PythonLanguage(),
'ruby': RubyLanguage(),
'csharp': CSharpLanguage(),
@@ -724,7 +808,7 @@ def _windows_arch_option(arch):
elif arch == 'x64':
return '/p:Platform=x64'
else:
- print 'Architecture %s not supported.' % arch
+ print('Architecture %s not supported.' % arch)
sys.exit(1)
@@ -742,11 +826,11 @@ def _check_arch_option(arch):
elif runtime_arch == '32bit' and arch == 'x86':
return
else:
- print 'Architecture %s does not match current runtime architecture.' % arch
+ print('Architecture %s does not match current runtime architecture.' % arch)
sys.exit(1)
else:
if args.arch != 'default':
- print 'Architecture %s not supported on current platform.' % args.arch
+ print('Architecture %s not supported on current platform.' % args.arch)
sys.exit(1)
@@ -760,7 +844,7 @@ def _windows_build_bat(compiler):
elif compiler == 'vs2010':
return 'vsprojects\\build_vs2010.bat'
else:
- print 'Compiler %s not supported.' % compiler
+ print('Compiler %s not supported.' % compiler)
sys.exit(1)
@@ -774,7 +858,7 @@ def _windows_toolset_option(compiler):
elif compiler == 'vs2010':
return '/p:PlatformToolset=v100'
else:
- print 'Compiler %s not supported.' % compiler
+ print('Compiler %s not supported.' % compiler)
sys.exit(1)
@@ -785,7 +869,7 @@ def _docker_arch_suffix(arch):
elif arch == 'x86':
return 'x86'
else:
- print 'Architecture %s not supported with current settings.' % arch
+ print('Architecture %s not supported with current settings.' % arch)
sys.exit(1)
@@ -862,7 +946,7 @@ argp.add_argument('--compiler',
'gcc4.4', 'gcc4.6', 'gcc4.9', 'gcc5.3',
'clang3.4', 'clang3.5', 'clang3.6', 'clang3.7',
'vs2010', 'vs2013', 'vs2015',
- 'python2.7', 'python3.4',
+ 'python2.7', 'python3.4', 'python3.5', 'python3.6',
'node0.12', 'node4', 'node5',
'coreclr'],
default='default',
@@ -901,7 +985,7 @@ for spec in args.update_submodules:
branch = spec[1]
cwd = 'third_party/%s' % submodule
def git(cmd, cwd=cwd):
- print 'in %s: git %s' % (cwd, cmd)
+ print('in %s: git %s' % (cwd, cmd))
subprocess.check_call('git %s' % cmd, cwd=cwd, shell=True)
git('fetch')
git('checkout %s' % branch)
@@ -912,8 +996,8 @@ if need_to_regenerate_projects:
if jobset.platform_string() == 'linux':
subprocess.check_call('tools/buildgen/generate_projects.sh', shell=True)
else:
- print 'WARNING: may need to regenerate projects, but since we are not on'
- print ' Linux this step is being skipped. Compilation MAY fail.'
+ print('WARNING: may need to regenerate projects, but since we are not on')
+ print(' Linux this step is being skipped. Compilation MAY fail.')
# grab config
@@ -940,18 +1024,18 @@ for l in languages:
language_make_options=[]
if any(language.make_options() for language in languages):
if not 'gcov' in args.config and len(languages) != 1:
- print 'languages with custom make options cannot be built simultaneously with other languages'
+ print('languages with custom make options cannot be built simultaneously with other languages')
sys.exit(1)
else:
language_make_options = next(iter(languages)).make_options()
if args.use_docker:
if not args.travis:
- print 'Seen --use_docker flag, will run tests under docker.'
- print
- print 'IMPORTANT: The changes you are testing need to be locally committed'
- print 'because only the committed changes in the current branch will be'
- print 'copied to the docker environment.'
+ print('Seen --use_docker flag, will run tests under docker.')
+ print('')
+ print('IMPORTANT: The changes you are testing need to be locally committed')
+ print('because only the committed changes in the current branch will be')
+ print('copied to the docker environment.')
time.sleep(5)
dockerfile_dirs = set([l.dockerfile_dir() for l in languages])
@@ -1035,7 +1119,7 @@ build_steps = list(set(
for l in languages
for cmdline in l.pre_build_steps()))
if make_targets:
- make_commands = itertools.chain.from_iterable(make_jobspec(build_config, list(targets), makefile) for (makefile, targets) in make_targets.iteritems())
+ make_commands = itertools.chain.from_iterable(make_jobspec(build_config, list(targets), makefile) for (makefile, targets) in make_targets.items())
build_steps.extend(set(make_commands))
build_steps.extend(set(
jobset.JobSpec(cmdline, environ=build_step_environ(build_config), timeout_seconds=None)
@@ -1050,35 +1134,59 @@ runs_per_test = args.runs_per_test
forever = args.forever
+def _shut_down_legacy_server(legacy_server_port):
+ try:
+ version = int(urllib.request.urlopen(
+ 'http://localhost:%d/version_number' % legacy_server_port,
+ timeout=10).read())
+ except:
+ pass
+ else:
+ urllib.request.urlopen(
+ 'http://localhost:%d/quitquitquit' % legacy_server_port).read()
+
+
+def _shut_down_legacy_server(legacy_server_port):
+ try:
+ version = int(urllib2.urlopen(
+ 'http://localhost:%d/version_number' % legacy_server_port,
+ timeout=10).read())
+ except:
+ pass
+ else:
+ urllib2.urlopen(
+ 'http://localhost:%d/quitquitquit' % legacy_server_port).read()
+
+
def _start_port_server(port_server_port):
# check if a compatible port server is running
# if incompatible (version mismatch) ==> start a new one
# if not running ==> start a new one
# otherwise, leave it up
try:
- version = int(urllib2.urlopen(
+ version = int(urllib.request.urlopen(
'http://localhost:%d/version_number' % port_server_port,
timeout=10).read())
- print 'detected port server running version %d' % version
+ print('detected port server running version %d' % version)
running = True
except Exception as e:
- print 'failed to detect port server: %s' % sys.exc_info()[0]
- print e.strerror
+ print('failed to detect port server: %s' % sys.exc_info()[0])
+ print(e.strerror)
running = False
if running:
current_version = int(subprocess.check_output(
[sys.executable, os.path.abspath('tools/run_tests/port_server.py'),
'dump_version']))
- print 'my port server is version %d' % current_version
+ print('my port server is version %d' % current_version)
running = (version >= current_version)
if not running:
- print 'port_server version mismatch: killing the old one'
- urllib2.urlopen('http://localhost:%d/quitquitquit' % port_server_port).read()
+ print('port_server version mismatch: killing the old one')
+ urllib.request.urlopen('http://localhost:%d/quitquitquit' % port_server_port).read()
time.sleep(1)
if not running:
fd, logfile = tempfile.mkstemp()
os.close(fd)
- print 'starting port_server, with log file %s' % logfile
+ print('starting port_server, with log file %s' % logfile)
args = [sys.executable, os.path.abspath('tools/run_tests/port_server.py'),
'-p', '%d' % port_server_port, '-l', logfile]
env = dict(os.environ)
@@ -1104,34 +1212,34 @@ def _start_port_server(port_server_port):
waits = 0
while True:
if waits > 10:
- print 'killing port server due to excessive start up waits'
+ print('killing port server due to excessive start up waits')
port_server.kill()
if port_server.poll() is not None:
- print 'port_server failed to start'
+ print('port_server failed to start')
# try one final time: maybe another build managed to start one
time.sleep(1)
try:
- urllib2.urlopen('http://localhost:%d/get' % port_server_port,
+ urllib.request.urlopen('http://localhost:%d/get' % port_server_port,
timeout=1).read()
- print 'last ditch attempt to contact port server succeeded'
+ print('last ditch attempt to contact port server succeeded')
break
except:
traceback.print_exc()
port_log = open(logfile, 'r').read()
- print port_log
+ print(port_log)
sys.exit(1)
try:
- urllib2.urlopen('http://localhost:%d/get' % port_server_port,
+ urllib.request.urlopen('http://localhost:%d/get' % port_server_port,
timeout=1).read()
- print 'port server is up and ready'
+ print('port server is up and ready')
break
except socket.timeout:
- print 'waiting for port_server: timeout'
+ print('waiting for port_server: timeout')
traceback.print_exc();
time.sleep(1)
waits += 1
- except urllib2.URLError:
- print 'waiting for port_server: urlerror'
+ except urllib.error.URLError:
+ print('waiting for port_server: urlerror')
traceback.print_exc();
time.sleep(1)
waits += 1
@@ -1186,7 +1294,7 @@ def _build_and_run(
# start antagonists
antagonists = [subprocess.Popen(['tools/run_tests/antagonist.py'])
for _ in range(0, args.antagonists)]
- port_server_port = 32767
+ port_server_port = 32766
_start_port_server(port_server_port)
resultset = None
num_test_failures = 0
@@ -1228,8 +1336,6 @@ def _build_and_run(
jobset.message(
'FLAKE', '%s [%d/%d runs flaked]' % (k, num_failures, num_runs),
do_newline=True)
- else:
- jobset.message('PASSED', k, do_newline=True)
finally:
for antagonist in antagonists:
antagonist.kill()
diff --git a/tools/run_tests/sanity/check_sources_and_headers.py b/tools/run_tests/sanity/check_sources_and_headers.py
index c028499ca6..28c1dc46d7 100755
--- a/tools/run_tests/sanity/check_sources_and_headers.py
+++ b/tools/run_tests/sanity/check_sources_and_headers.py
@@ -55,7 +55,8 @@ def target_has_header(target, name):
for dep in target['deps']:
if target_has_header(get_target(dep), name):
return True
- if name == 'src/core/lib/profiling/stap_probes.h':
+ if name in ['src/core/lib/profiling/stap_probes.h',
+ 'src/proto/grpc/reflection/v1alpha/reflection.grpc.pb.h']:
return True
return False
diff --git a/tools/run_tests/sanity/check_submodules.sh b/tools/run_tests/sanity/check_submodules.sh
index f2d7a1429e..b602d69564 100755
--- a/tools/run_tests/sanity/check_submodules.sh
+++ b/tools/run_tests/sanity/check_submodules.sh
@@ -45,7 +45,7 @@ cat << EOF | awk '{ print $1 }' | sort > $want_submodules
05b155ff59114735ec8cd089f669c4c3d8f59029 third_party/gflags (v2.1.0-45-g05b155f)
c99458533a9b4c743ed51537e25989ea55944908 third_party/googletest (release-1.7.0)
f8ac463766281625ad710900479130c7fcb4d63b third_party/nanopb (nanopb-0.3.4-29-gf8ac463)
- d4d13a4349e4e59d67f311185ddcc1890d956d7a third_party/protobuf (v3.0.0-beta-3.2)
+ bdeb215cab2985195325fcd5e70c3fa751f46e0f third_party/protobuf (v3.0.0-beta-3.3)
50893291621658f355bc5b4d450a8d06a563053d third_party/zlib (v1.2.8)
EOF
diff --git a/tools/run_tests/sources_and_headers.json b/tools/run_tests/sources_and_headers.json
index 65de89c0ab..5294dddda0 100644
--- a/tools/run_tests/sources_and_headers.json
+++ b/tools/run_tests/sources_and_headers.json
@@ -81,6 +81,23 @@
},
{
"deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc_test_util",
+ "test_tcp_server"
+ ],
+ "headers": [],
+ "language": "c",
+ "name": "bad_server_response_test",
+ "src": [
+ "test/core/end2end/bad_server_response_test.c"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
"grpc",
"grpc_test_util"
],
@@ -132,6 +149,22 @@
],
"headers": [],
"language": "c",
+ "name": "census_resource_test",
+ "src": [
+ "test/core/census/resource_test.c"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc_test_util"
+ ],
+ "headers": [],
+ "language": "c",
"name": "channel_create_test",
"src": [
"test/core/surface/channel_create_test.c"
@@ -805,9 +838,7 @@
{
"deps": [
"gpr",
- "gpr_test_util",
- "grpc",
- "grpc_test_util"
+ "grpc"
],
"headers": [],
"language": "c",
@@ -901,9 +932,7 @@
{
"deps": [
"gpr",
- "gpr_test_util",
- "grpc",
- "grpc_test_util"
+ "grpc"
],
"headers": [],
"language": "c",
@@ -933,9 +962,7 @@
{
"deps": [
"gpr",
- "gpr_test_util",
- "grpc",
- "grpc_test_util"
+ "grpc"
],
"headers": [],
"language": "c",
@@ -1674,7 +1701,7 @@
"language": "c",
"name": "timeout_encoding_test",
"src": [
- "test/core/transport/chttp2/timeout_encoding_test.c"
+ "test/core/transport/timeout_encoding_test.c"
],
"third_party": false,
"type": "target"
@@ -1812,22 +1839,6 @@
"gpr",
"gpr_test_util",
"grpc",
- "grpc_test_util"
- ],
- "headers": [],
- "language": "c",
- "name": "workqueue_test",
- "src": [
- "test/core/iomgr/workqueue_test.c"
- ],
- "third_party": false,
- "type": "target"
- },
- {
- "deps": [
- "gpr",
- "gpr_test_util",
- "grpc",
"grpc++",
"grpc++_test_util",
"grpc_test_util"
@@ -2138,6 +2149,7 @@
"gpr_test_util",
"grpc",
"grpc++",
+ "grpc++_reflection",
"grpc++_test_config",
"grpc++_test_util",
"grpc_cli_libs",
@@ -2258,6 +2270,27 @@
"grpc++_test_util",
"grpc_test_util"
],
+ "headers": [
+ "src/proto/grpc/lb/v1/load_balancer.grpc.pb.h",
+ "src/proto/grpc/lb/v1/load_balancer.pb.h"
+ ],
+ "language": "c++",
+ "name": "grpclb_test",
+ "src": [
+ "test/cpp/grpclb/grpclb_test.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_util",
+ "grpc_test_util"
+ ],
"headers": [],
"language": "c++",
"name": "hybrid_end2end_test",
@@ -3614,9 +3647,9 @@
],
"headers": [],
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"src": [
- "test/core/end2end/fixtures/h2_loadreporting.c"
+ "test/core/end2end/fixtures/h2_load_reporting.c"
],
"third_party": false,
"type": "target"
@@ -3886,9 +3919,9 @@
],
"headers": [],
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"src": [
- "test/core/end2end/fixtures/h2_loadreporting.c"
+ "test/core/end2end/fixtures/h2_load_reporting.c"
],
"third_party": false,
"type": "target"
@@ -4181,6 +4214,7 @@
"gpr",
"grpc_base",
"grpc_lb_policy_grpclb",
+ "grpc_lb_policy_grpclb",
"grpc_lb_policy_pick_first",
"grpc_lb_policy_round_robin",
"grpc_load_reporting",
@@ -4234,6 +4268,7 @@
"gpr",
"gpr_test_util",
"grpc",
+ "grpc_base",
"grpc_test_util_base"
],
"headers": [
@@ -4275,6 +4310,7 @@
"gpr",
"grpc_base",
"grpc_lb_policy_grpclb",
+ "grpc_lb_policy_grpclb",
"grpc_lb_policy_pick_first",
"grpc_lb_policy_round_robin",
"grpc_load_reporting",
@@ -4365,25 +4401,19 @@
{
"deps": [
"grpc++",
- "grpc++_codegen_proto"
+ "grpc++_reflection_proto"
],
"headers": [
"include/grpc++/ext/proto_server_reflection_plugin.h",
- "include/grpc++/ext/reflection.grpc.pb.h",
- "include/grpc++/ext/reflection.pb.h",
"src/cpp/ext/proto_server_reflection.h"
],
"language": "c++",
"name": "grpc++_reflection",
"src": [
"include/grpc++/ext/proto_server_reflection_plugin.h",
- "include/grpc++/ext/reflection.grpc.pb.h",
- "include/grpc++/ext/reflection.pb.h",
"src/cpp/ext/proto_server_reflection.cc",
"src/cpp/ext/proto_server_reflection.h",
- "src/cpp/ext/proto_server_reflection_plugin.cc",
- "src/cpp/ext/reflection.grpc.pb.cc",
- "src/cpp/ext/reflection.pb.cc"
+ "src/cpp/ext/proto_server_reflection_plugin.cc"
],
"third_party": false,
"type": "lib"
@@ -4391,6 +4421,18 @@
{
"deps": [],
"headers": [
+ "src/proto/grpc/reflection/v1alpha/reflection.grpc.pb.h",
+ "src/proto/grpc/reflection/v1alpha/reflection.pb.h"
+ ],
+ "language": "c++",
+ "name": "grpc++_reflection_codegen",
+ "src": [],
+ "third_party": false,
+ "type": "lib"
+ },
+ {
+ "deps": [],
+ "headers": [
"test/cpp/util/test_config.h"
],
"language": "c++",
@@ -4465,11 +4507,13 @@
{
"deps": [
"grpc++",
+ "grpc++_reflection",
"grpc_plugin_support"
],
"headers": [
"test/cpp/util/cli_call.h",
- "test/cpp/util/proto_file_parser.h"
+ "test/cpp/util/proto_file_parser.h",
+ "test/cpp/util/proto_reflection_descriptor_database.h"
],
"language": "c++",
"name": "grpc_cli_libs",
@@ -4477,7 +4521,9 @@
"test/cpp/util/cli_call.cc",
"test/cpp/util/cli_call.h",
"test/cpp/util/proto_file_parser.cc",
- "test/cpp/util/proto_file_parser.h"
+ "test/cpp/util/proto_file_parser.h",
+ "test/cpp/util/proto_reflection_descriptor_database.cc",
+ "test/cpp/util/proto_reflection_descriptor_database.h"
],
"third_party": false,
"type": "lib"
@@ -5377,6 +5423,7 @@
"test/core/end2end/tests/default_host.c",
"test/core/end2end/tests/disappearing_server.c",
"test/core/end2end/tests/empty_batch.c",
+ "test/core/end2end/tests/filter_call_init_fails.c",
"test/core/end2end/tests/filter_causes_close.c",
"test/core/end2end/tests/graceful_server_shutdown.c",
"test/core/end2end/tests/high_initial_seqno.c",
@@ -5384,6 +5431,7 @@
"test/core/end2end/tests/idempotent_request.c",
"test/core/end2end/tests/invoke_large_request.c",
"test/core/end2end/tests/large_metadata.c",
+ "test/core/end2end/tests/load_reporting_hook.c",
"test/core/end2end/tests/max_concurrent_streams.c",
"test/core/end2end/tests/max_message_length.c",
"test/core/end2end/tests/negative_deadline.c",
@@ -5437,6 +5485,7 @@
"test/core/end2end/tests/default_host.c",
"test/core/end2end/tests/disappearing_server.c",
"test/core/end2end/tests/empty_batch.c",
+ "test/core/end2end/tests/filter_call_init_fails.c",
"test/core/end2end/tests/filter_causes_close.c",
"test/core/end2end/tests/graceful_server_shutdown.c",
"test/core/end2end/tests/high_initial_seqno.c",
@@ -5444,6 +5493,7 @@
"test/core/end2end/tests/idempotent_request.c",
"test/core/end2end/tests/invoke_large_request.c",
"test/core/end2end/tests/large_metadata.c",
+ "test/core/end2end/tests/load_reporting_hook.c",
"test/core/end2end/tests/max_concurrent_streams.c",
"test/core/end2end/tests/max_message_length.c",
"test/core/end2end/tests/negative_deadline.c",
@@ -5476,11 +5526,13 @@
"headers": [
"include/grpc/census.h",
"src/core/ext/census/aggregation.h",
+ "src/core/ext/census/base_resources.h",
"src/core/ext/census/census_interface.h",
"src/core/ext/census/census_rpc_stats.h",
"src/core/ext/census/gen/census.pb.h",
"src/core/ext/census/grpc_filter.h",
"src/core/ext/census/mlog.h",
+ "src/core/ext/census/resource.h",
"src/core/ext/census/rpc_metric_id.h"
],
"language": "c",
@@ -5488,6 +5540,8 @@
"src": [
"include/grpc/census.h",
"src/core/ext/census/aggregation.h",
+ "src/core/ext/census/base_resources.c",
+ "src/core/ext/census/base_resources.h",
"src/core/ext/census/census_interface.h",
"src/core/ext/census/census_rpc_stats.h",
"src/core/ext/census/context.c",
@@ -5502,6 +5556,8 @@
"src/core/ext/census/mlog.h",
"src/core/ext/census/operation.c",
"src/core/ext/census/placeholders.c",
+ "src/core/ext/census/resource.c",
+ "src/core/ext/census/resource.h",
"src/core/ext/census/rpc_metric_id.h",
"src/core/ext/census/tracing.c"
],
@@ -5701,6 +5757,7 @@
"src/core/lib/channel/compress_filter.h",
"src/core/lib/channel/connected_channel.h",
"src/core/lib/channel/context.h",
+ "src/core/lib/channel/handshaker.h",
"src/core/lib/channel/http_client_filter.h",
"src/core/lib/channel/http_server_filter.h",
"src/core/lib/compression/algorithm_metadata.h",
@@ -5771,6 +5828,7 @@
"src/core/lib/transport/metadata.h",
"src/core/lib/transport/metadata_batch.h",
"src/core/lib/transport/static_metadata.h",
+ "src/core/lib/transport/timeout_encoding.h",
"src/core/lib/transport/transport.h",
"src/core/lib/transport/transport_impl.h"
],
@@ -5794,6 +5852,8 @@
"src/core/lib/channel/connected_channel.c",
"src/core/lib/channel/connected_channel.h",
"src/core/lib/channel/context.h",
+ "src/core/lib/channel/handshaker.c",
+ "src/core/lib/channel/handshaker.h",
"src/core/lib/channel/http_client_filter.c",
"src/core/lib/channel/http_client_filter.h",
"src/core/lib/channel/http_server_filter.c",
@@ -5944,6 +6004,8 @@
"src/core/lib/transport/metadata_batch.h",
"src/core/lib/transport/static_metadata.c",
"src/core/lib/transport/static_metadata.h",
+ "src/core/lib/transport/timeout_encoding.c",
+ "src/core/lib/transport/timeout_encoding.h",
"src/core/lib/transport/transport.c",
"src/core/lib/transport/transport.h",
"src/core/lib/transport/transport_impl.h",
@@ -6052,12 +6114,15 @@
"nanopb"
],
"headers": [
+ "src/core/ext/lb_policy/grpclb/grpclb.h",
"src/core/ext/lb_policy/grpclb/load_balancer_api.h",
"src/core/ext/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
],
"language": "c",
"name": "grpc_lb_policy_grpclb",
"src": [
+ "src/core/ext/lb_policy/grpclb/grpclb.c",
+ "src/core/ext/lb_policy/grpclb/grpclb.h",
"src/core/ext/lb_policy/grpclb/load_balancer_api.c",
"src/core/ext/lb_policy/grpclb/load_balancer_api.h",
"src/core/ext/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c",
@@ -6303,7 +6368,6 @@
"src/core/ext/transport/chttp2/transport/internal.h",
"src/core/ext/transport/chttp2/transport/status_conversion.h",
"src/core/ext/transport/chttp2/transport/stream_map.h",
- "src/core/ext/transport/chttp2/transport/timeout_encoding.h",
"src/core/ext/transport/chttp2/transport/varint.h"
],
"language": "c",
@@ -6347,8 +6411,6 @@
"src/core/ext/transport/chttp2/transport/stream_lists.c",
"src/core/ext/transport/chttp2/transport/stream_map.c",
"src/core/ext/transport/chttp2/transport/stream_map.h",
- "src/core/ext/transport/chttp2/transport/timeout_encoding.c",
- "src/core/ext/transport/chttp2/transport/timeout_encoding.h",
"src/core/ext/transport/chttp2/transport/varint.c",
"src/core/ext/transport/chttp2/transport/varint.h",
"src/core/ext/transport/chttp2/transport/writing.c"
@@ -6756,5 +6818,24 @@
],
"third_party": false,
"type": "filegroup"
+ },
+ {
+ "deps": [
+ "grpc++_codegen_proto"
+ ],
+ "headers": [
+ "include/grpc++/ext/reflection.grpc.pb.h",
+ "include/grpc++/ext/reflection.pb.h"
+ ],
+ "language": "c++",
+ "name": "grpc++_reflection_proto",
+ "src": [
+ "include/grpc++/ext/reflection.grpc.pb.h",
+ "include/grpc++/ext/reflection.pb.h",
+ "src/cpp/ext/reflection.grpc.pb.cc",
+ "src/cpp/ext/reflection.pb.cc"
+ ],
+ "third_party": false,
+ "type": "filegroup"
}
]
diff --git a/tools/run_tests/task_runner.py b/tools/run_tests/task_runner.py
index b42aa17cbb..2e3fa443b9 100755
--- a/tools/run_tests/task_runner.py
+++ b/tools/run_tests/task_runner.py
@@ -30,6 +30,8 @@
"""Runs selected gRPC test/build tasks."""
+from __future__ import print_function
+
import argparse
import atexit
import jobset
@@ -111,7 +113,7 @@ build_jobs = []
for target in targets:
build_jobs.append(target.build_jobspec())
if not build_jobs:
- print 'Nothing to build.'
+ print('Nothing to build.')
sys.exit(1)
jobset.message('START', 'Building targets.', do_newline=True)
diff --git a/tools/run_tests/tests.json b/tools/run_tests/tests.json
index dedd55774b..f608aa96d9 100644
--- a/tools/run_tests/tests.json
+++ b/tools/run_tests/tests.json
@@ -98,6 +98,27 @@
"flaky": false,
"gtest": false,
"language": "c",
+ "name": "bad_server_response_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ]
+ },
+ {
+ "args": [],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "gtest": false,
+ "language": "c",
"name": "bin_decoder_test",
"platforms": [
"linux",
@@ -161,6 +182,27 @@
"flaky": false,
"gtest": false,
"language": "c",
+ "name": "census_resource_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ]
+ },
+ {
+ "args": [],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "gtest": false,
+ "language": "c",
"name": "channel_create_test",
"platforms": [
"linux",
@@ -1919,25 +1961,6 @@
"ci_platforms": [
"linux",
"mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "flaky": false,
- "gtest": false,
- "language": "c",
- "name": "workqueue_test",
- "platforms": [
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [],
- "ci_platforms": [
- "linux",
- "mac",
"posix",
"windows"
],
@@ -2299,6 +2322,27 @@
"cpu_cost": 1.0,
"exclude_configs": [],
"flaky": false,
+ "gtest": false,
+ "language": "c++",
+ "name": "grpclb_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ]
+ },
+ {
+ "args": [],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
"gtest": true,
"language": "c++",
"name": "hybrid_end2end_test",
@@ -4676,6 +4720,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_census_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -4830,6 +4896,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_census_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -5556,6 +5644,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_compress_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -5710,6 +5820,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_compress_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -6422,6 +6554,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_fakesec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -6569,6 +6722,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_fakesec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -7188,6 +7362,26 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_fd_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -7328,6 +7522,26 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_fd_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -7976,6 +8190,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -8130,6 +8366,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -8772,6 +9030,22 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "linux"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+pipe_test",
+ "platforms": [
+ "linux"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -8884,6 +9158,22 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "linux"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+pipe_test",
+ "platforms": [
+ "linux"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -9496,6 +9786,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+trace_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -9628,6 +9940,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+trace_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -10058,7 +10392,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10080,7 +10414,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10102,7 +10436,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10124,7 +10458,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10146,7 +10480,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10168,7 +10502,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10190,7 +10524,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10212,7 +10546,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10234,7 +10568,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10256,7 +10590,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10278,7 +10612,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10300,7 +10634,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10322,7 +10656,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10344,7 +10678,29 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10366,7 +10722,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10388,7 +10744,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10410,7 +10766,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10432,7 +10788,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10454,7 +10810,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10476,7 +10832,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10498,7 +10854,29 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10520,7 +10898,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10542,7 +10920,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10564,7 +10942,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10586,7 +10964,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10608,7 +10986,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10630,7 +11008,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10652,7 +11030,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10674,7 +11052,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10696,7 +11074,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10718,7 +11096,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10740,7 +11118,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10762,7 +11140,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10784,7 +11162,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10806,7 +11184,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10828,7 +11206,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10850,7 +11228,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10872,7 +11250,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10894,7 +11272,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -10916,7 +11294,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_test",
+ "name": "h2_load_reporting_test",
"platforms": [
"windows",
"linux",
@@ -11220,6 +11598,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_oauth2_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -11367,6 +11766,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_oauth2_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -12018,6 +12438,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_proxy_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -12144,6 +12585,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_proxy_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_message_length"
],
"ci_platforms": [
@@ -12711,6 +13173,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -12858,6 +13341,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -13446,6 +13950,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair+trace_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -13572,6 +14097,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair+trace_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -14160,6 +14706,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_1byte_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -14307,6 +14874,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_1byte_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -14972,6 +15560,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_ssl_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -15126,6 +15736,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_ssl_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -15852,6 +16484,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_ssl_cert_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -16006,6 +16660,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_ssl_cert_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -16676,6 +17352,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_ssl_proxy_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -16802,6 +17499,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_ssl_proxy_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_message_length"
],
"ci_platforms": [
@@ -17398,6 +18116,26 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_uds_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -17538,6 +18276,26 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_uds_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -18204,6 +18962,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_census_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -18358,6 +19138,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_census_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -19062,6 +19864,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_compress_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -19216,6 +20040,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_compress_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -19834,6 +20680,26 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_fd_nosec_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -19974,6 +20840,26 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_fd_nosec_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -20600,6 +21486,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -20754,6 +21662,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -21380,6 +22310,22 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "linux"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+pipe_nosec_test",
+ "platforms": [
+ "linux"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -21492,6 +22438,22 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "linux"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+pipe_nosec_test",
+ "platforms": [
+ "linux"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -22082,6 +23044,28 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+trace_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -22214,6 +23198,28 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_full+trace_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -22644,7 +23650,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22666,7 +23672,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22688,7 +23694,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22710,7 +23716,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22732,7 +23738,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22754,7 +23760,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22776,7 +23782,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22798,7 +23804,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22820,7 +23826,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22842,7 +23848,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22864,7 +23870,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22886,7 +23892,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22908,7 +23914,29 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22930,7 +23958,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22952,7 +23980,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22974,7 +24002,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -22996,7 +24024,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23018,7 +24046,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23040,7 +24068,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23062,7 +24090,29 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23084,7 +24134,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23106,7 +24156,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23128,7 +24178,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23150,7 +24200,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23172,7 +24222,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23194,7 +24244,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23216,7 +24266,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23238,7 +24288,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23260,7 +24310,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23282,7 +24332,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23304,7 +24354,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23326,7 +24376,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23348,7 +24398,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23370,7 +24420,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23392,7 +24442,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23414,7 +24464,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23436,7 +24486,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23458,7 +24508,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23480,7 +24530,7 @@
"exclude_configs": [],
"flaky": false,
"language": "c",
- "name": "h2_loadreporting_nosec_test",
+ "name": "h2_load_reporting_nosec_test",
"platforms": [
"windows",
"linux",
@@ -23721,6 +24771,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_proxy_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -23847,6 +24918,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_proxy_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_message_length"
],
"ci_platforms": [
@@ -24393,6 +25485,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -24540,6 +25653,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -25107,6 +26241,27 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair+trace_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -25233,6 +26388,27 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair+trace_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -25820,6 +26996,29 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [
+ "msan"
+ ],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_1byte_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -25981,6 +27180,29 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "windows",
+ "linux",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [
+ "msan"
+ ],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_sockpair_1byte_nosec_test",
+ "platforms": [
+ "windows",
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -26612,6 +27834,26 @@
},
{
"args": [
+ "filter_call_init_fails"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_uds_nosec_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"filter_causes_close"
],
"ci_platforms": [
@@ -26752,6 +27994,26 @@
},
{
"args": [
+ "load_reporting_hook"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "flaky": false,
+ "language": "c",
+ "name": "h2_uds_nosec_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"max_concurrent_streams"
],
"ci_platforms": [
@@ -27132,8 +28394,8 @@
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_generic_async_streaming_ping_pong_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27154,12 +28416,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_generic_async_streaming_ping_pong_secure"
+ "shortname": "json_run_localhost:cpp_generic_async_streaming_ping_pong_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_streaming_ping_pong_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27180,12 +28443,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_streaming_ping_pong_secure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_streaming_ping_pong_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_unary_ping_pong_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27206,12 +28470,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_unary_ping_pong_secure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_unary_ping_pong_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_sync_unary_ping_pong_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_unary_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27232,12 +28497,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_sync_unary_ping_pong_secure"
+ "shortname": "json_run_localhost:cpp_protobuf_sync_unary_ping_pong_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27258,12 +28524,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_secure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27284,12 +28551,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_secure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_generic_async_streaming_qps_unconstrained_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_qps_unconstrained_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27310,12 +28578,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_unconstrained_secure"
+ "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_unconstrained_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_generic_async_streaming_qps_one_server_core_secure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_qps_one_server_core_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27336,12 +28605,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_one_server_core_secure"
+ "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_one_server_core_secure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_generic_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27362,12 +28632,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_generic_async_streaming_ping_pong_insecure"
+ "shortname": "json_run_localhost:cpp_generic_async_streaming_ping_pong_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27388,12 +28659,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_streaming_ping_pong_insecure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_streaming_ping_pong_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_unary_ping_pong_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27414,12 +28686,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_unary_ping_pong_insecure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_unary_ping_pong_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_sync_unary_ping_pong_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_unary_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 1}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27440,12 +28713,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_sync_unary_ping_pong_insecure"
+ "shortname": "json_run_localhost:cpp_protobuf_sync_unary_ping_pong_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27466,12 +28740,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_insecure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27492,12 +28767,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_insecure"
+ "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_generic_async_streaming_qps_unconstrained_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_qps_unconstrained_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"core_limit\": 0, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27518,12 +28794,13 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_unconstrained_insecure"
+ "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_unconstrained_insecure",
+ "timeout_seconds": 180
},
{
"args": [
- "--scenario_json",
- "'{\"name\": \"cpp_generic_async_streaming_qps_one_server_core_insecure\", \"warmup_seconds\": 1, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}'"
+ "--scenarios_json",
+ "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_qps_one_server_core_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"core_limit\": 1, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}, \"num_clients\": 0}]}"
],
"boringssl": true,
"ci_platforms": [
@@ -27544,7 +28821,8 @@
"posix",
"windows"
],
- "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_one_server_core_insecure"
+ "shortname": "json_run_localhost:cpp_generic_async_streaming_qps_one_server_core_insecure",
+ "timeout_seconds": 180
},
{
"args": [