aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/bazel.rc68
-rwxr-xr-xtools/buildgen/generate_build_additions.sh3
-rwxr-xr-xtools/codegen/core/gen_static_metadata.py1
-rwxr-xr-xtools/distrib/check_copyright.py4
-rwxr-xr-xtools/distrib/check_include_guards.py1
-rwxr-xr-xtools/distrib/check_nanopb_output.sh18
-rwxr-xr-xtools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_go/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_java/build_interop.sh8
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_java_oracle8/build_interop.sh8
-rw-r--r--tools/dockerfile/interoptest/lb_interop_fake_servers/Dockerfile34
-rw-r--r--tools/dockerfile/interoptest/lb_interop_fake_servers/build_interop.sh35
-rw-r--r--tools/dockerfile/test/bazel/Dockerfile7
-rw-r--r--tools/dockerfile/test/python_stretch_2.7_x64/Dockerfile69
-rw-r--r--tools/dockerfile/test/python_stretch_3.5_x64/Dockerfile (renamed from tools/dockerfile/test/python_pyenv_x64/Dockerfile)55
-rw-r--r--tools/dockerfile/test/python_stretch_3.6_x64/Dockerfile72
-rw-r--r--tools/dockerfile/test/python_stretch_3.7_x64/Dockerfile72
-rw-r--r--tools/doxygen/Doxyfile.c++9
-rw-r--r--tools/doxygen/Doxyfile.c++.internal17
-rw-r--r--tools/doxygen/Doxyfile.core4
-rw-r--r--tools/doxygen/Doxyfile.core.internal34
-rwxr-xr-xtools/gce/create_linux_kokoro_performance_worker_from_image.sh38
-rw-r--r--tools/internal_ci/helper_scripts/prepare_build_grpclb_interop_rc33
-rw-r--r--tools/internal_ci/helper_scripts/prepare_build_macos_rc3
-rwxr-xr-xtools/internal_ci/linux/grpc_asan_on_foundry.sh6
-rwxr-xr-xtools/internal_ci/linux/grpc_bazel_on_foundry_base.sh41
-rw-r--r--tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh2
-rw-r--r--tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh2
-rw-r--r--tools/internal_ci/linux/grpc_e2e_performance_singlevm.cfg25
-rwxr-xr-xtools/internal_ci/linux/grpc_e2e_performance_singlevm.sh29
-rwxr-xr-xtools/internal_ci/linux/grpc_full_performance_master.sh2
-rw-r--r--tools/internal_ci/linux/grpc_interop_matrix.cfg6
-rwxr-xr-xtools/internal_ci/linux/grpc_interop_matrix.sh2
-rw-r--r--tools/internal_ci/linux/grpc_msan_on_foundry.sh66
-rwxr-xr-xtools/internal_ci/linux/grpc_performance_profile_daily.sh4
-rwxr-xr-xtools/internal_ci/linux/grpc_run_grpclb_interop_tests.sh26
-rw-r--r--tools/internal_ci/linux/grpc_tsan_on_foundry.sh6
-rw-r--r--tools/internal_ci/linux/grpc_ubsan_on_foundry.sh63
-rw-r--r--tools/internal_ci/linux/grpclb_in_dns_interop.cfg25
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_asan_on_foundry.sh6
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_dbg.sh2
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_opt.sh2
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_interop_matrix_adhoc.cfg30
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_tsan_on_foundry.sh6
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_ubsan_on_foundry.sh57
-rwxr-xr-xtools/internal_ci/linux/run_performance_profile_hourly.sh4
-rw-r--r--tools/internal_ci/macos/grpc_basictests_objc_dbg.cfg31
-rw-r--r--tools/internal_ci/macos/grpc_basictests_objc_opt.cfg31
-rw-r--r--tools/internal_ci/macos/pull_request/grpc_basictests_objc_dbg.cfg31
-rw-r--r--tools/internal_ci/macos/pull_request/grpc_basictests_objc_opt.cfg31
-rw-r--r--tools/interop_matrix/client_matrix.py37
-rwxr-xr-xtools/interop_matrix/run_interop_matrix_tests.py222
-rw-r--r--tools/remote_build/README.md33
-rw-r--r--tools/remote_build/kokoro.bazelrc38
-rw-r--r--tools/remote_build/manual.bazelrc45
-rw-r--r--tools/remote_build/rbe_common.bazelrc83
-rw-r--r--tools/run_tests/generated/lb_interop_test_scenarios.json1167
-rw-r--r--tools/run_tests/generated/sources_and_headers.json268
-rw-r--r--tools/run_tests/generated/tests.json997
-rwxr-xr-xtools/run_tests/lb_interop_tests/gen_build_yaml.py347
-rwxr-xr-xtools/run_tests/performance/build_performance.sh14
-rwxr-xr-xtools/run_tests/performance/run_qps_driver.sh2
-rw-r--r--tools/run_tests/performance/scenario_config.py52
-rwxr-xr-xtools/run_tests/python_utils/dockerjob.py27
-rw-r--r--tools/run_tests/python_utils/upload_rbe_results.py2
-rwxr-xr-xtools/run_tests/run_grpclb_interop_tests.py609
-rwxr-xr-xtools/run_tests/run_interop_tests.py7
-rwxr-xr-xtools/run_tests/run_performance_tests.py15
-rwxr-xr-xtools/run_tests/run_tests.py23
-rwxr-xr-xtools/run_tests/sanity/check_submodules.sh2
-rwxr-xr-xtools/run_tests/sanity/core_banned_functions.py3
71 files changed, 3634 insertions, 1492 deletions
diff --git a/tools/bazel.rc b/tools/bazel.rc
index 39f8071535..d33e6e086b 100644
--- a/tools/bazel.rc
+++ b/tools/bazel.rc
@@ -1,51 +1,51 @@
+# bazelrc file
+# bazel >= 0.18 looks for %workspace%/.bazelrc (which redirects here)
+# Older bazel versions look for %workspace%/tools/bazel.rc (this file)
+# See https://github.com/bazelbuild/bazel/issues/6319
+
build --client_env=CC=clang
-build --copt -DGRPC_BAZEL_BUILD
+build --copt=-DGRPC_BAZEL_BUILD
-build:opt --copt -Wframe-larger-than=16384
+build:opt --copt=-Wframe-larger-than=16384
build:asan --strip=never
-build:asan --copt -fsanitize-coverage=edge
-build:asan --copt -fsanitize=address
-build:asan --copt -O0
-build:asan --copt -fno-omit-frame-pointer
-build:asan --copt -DGPR_NO_DIRECT_SYSCALLS
-build:asan --linkopt -fsanitize=address
+build:asan --copt=-fsanitize=address
+build:asan --copt=-O0
+build:asan --copt=-fno-omit-frame-pointer
+build:asan --copt=-DGPR_NO_DIRECT_SYSCALLS
+build:asan --linkopt=-fsanitize=address
build:asan --action_env=ASAN_OPTIONS=detect_leaks=1:color=always
build:asan --action_env=LSAN_OPTIONS=suppressions=test/core/util/lsan_suppressions.txt:report_objects=1
build:msan --strip=never
-build:msan --copt -fsanitize-coverage=edge
-build:msan --copt -fsanitize=memory
-build:msan --copt -O0
-build:msan --copt -fsanitize-memory-track-origins
-build:msan --copt -fsanitize-memory-use-after-dtor
-build:msan --copt -fno-omit-frame-pointer
-build:msan --copt -fPIE
-build:msan --copt -DGPR_NO_DIRECT_SYSCALLS
-build:msan --linkopt -fsanitize=memory
-build:msan --linkopt -fPIE
+build:msan --copt=-fsanitize=memory
+build:msan --copt=-O0
+build:msan --copt=-fsanitize-memory-track-origins
+build:msan --copt=-fsanitize-memory-use-after-dtor
+build:msan --copt=-fno-omit-frame-pointer
+build:msan --copt=-DGPR_NO_DIRECT_SYSCALLS
+build:msan --linkopt=-fsanitize=memory
build:msan --action_env=MSAN_OPTIONS=poison_in_dtor=1
build:tsan --strip=never
-build:tsan --copt -fsanitize=thread
-build:tsan --copt -fno-omit-frame-pointer
-build:tsan --copt -DGPR_NO_DIRECT_SYSCALLS
-build:tsan --copt -DGRPC_TSAN
-build:tsan --linkopt -fsanitize=thread
+build:tsan --copt=-fsanitize=thread
+build:tsan --copt=-fno-omit-frame-pointer
+build:tsan --copt=-DGPR_NO_DIRECT_SYSCALLS
+build:tsan --copt=-DGRPC_TSAN
+build:tsan --linkopt=-fsanitize=thread
build:tsan --action_env=TSAN_OPTIONS=suppressions=test/core/util/tsan_suppressions.txt:halt_on_error=1:second_deadlock_stack=1
build:ubsan --strip=never
-build:ubsan --copt -fsanitize-coverage=edge
-build:ubsan --copt -fsanitize=undefined
-build:ubsan --copt -fno-omit-frame-pointer
-build:ubsan --copt -DGRPC_UBSAN
-build:ubsan --copt -DNDEBUG
-build:ubsan --copt -fno-sanitize=function,vptr
-build:ubsan --linkopt -fsanitize=undefined
+build:ubsan --copt=-fsanitize=undefined
+build:ubsan --copt=-fno-omit-frame-pointer
+build:ubsan --copt=-DGRPC_UBSAN
+build:ubsan --copt=-DNDEBUG
+build:ubsan --copt=-fno-sanitize=function,vptr
+build:ubsan --linkopt=-fsanitize=undefined
build:ubsan --action_env=UBSAN_OPTIONS=halt_on_error=1:print_stacktrace=1:suppressions=test/core/util/ubsan_suppressions.txt
build:basicprof --strip=never
-build:basicprof --copt -DNDEBUG
-build:basicprof --copt -O2
-build:basicprof --copt -DGRPC_BASIC_PROFILER
-build:basicprof --copt -DGRPC_TIMERS_RDTSC
+build:basicprof --copt=-DNDEBUG
+build:basicprof --copt=-O2
+build:basicprof --copt=-DGRPC_BASIC_PROFILER
+build:basicprof --copt=-DGRPC_TIMERS_RDTSC
diff --git a/tools/buildgen/generate_build_additions.sh b/tools/buildgen/generate_build_additions.sh
index 693c02fdb2..5a1f4a598a 100755
--- a/tools/buildgen/generate_build_additions.sh
+++ b/tools/buildgen/generate_build_additions.sh
@@ -25,7 +25,8 @@ gen_build_yaml_dirs=" \
test/core/bad_ssl \
test/core/end2end \
test/cpp/naming \
- test/cpp/qps"
+ test/cpp/qps \
+ tools/run_tests/lb_interop_tests"
gen_build_files=""
for gen_build_yaml in $gen_build_yaml_dirs
do
diff --git a/tools/codegen/core/gen_static_metadata.py b/tools/codegen/core/gen_static_metadata.py
index f705a9bd41..adfd4a24f9 100755
--- a/tools/codegen/core/gen_static_metadata.py
+++ b/tools/codegen/core/gen_static_metadata.py
@@ -63,6 +63,7 @@ CONFIG = [
'grpc.max_response_message_bytes',
# well known method names
'/grpc.lb.v1.LoadBalancer/BalanceLoad',
+ '/grpc.health.v1.Health/Watch',
# compression algorithm names
'deflate',
'gzip',
diff --git a/tools/distrib/check_copyright.py b/tools/distrib/check_copyright.py
index 77f602ad1f..787bef1778 100755
--- a/tools/distrib/check_copyright.py
+++ b/tools/distrib/check_copyright.py
@@ -75,6 +75,8 @@ _EXEMPT = frozenset((
'examples/python/multiplex/route_guide_pb2_grpc.py',
'examples/python/route_guide/route_guide_pb2.py',
'examples/python/route_guide/route_guide_pb2_grpc.py',
+ 'src/core/ext/filters/client_channel/health/health.pb.h',
+ 'src/core/ext/filters/client_channel/health/health.pb.c',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h',
@@ -87,8 +89,6 @@ _EXEMPT = frozenset((
'src/core/tsi/alts/handshaker/handshaker.pb.c',
'src/core/tsi/alts/handshaker/transport_security_common.pb.h',
'src/core/tsi/alts/handshaker/transport_security_common.pb.c',
- 'src/cpp/server/health/health.pb.h',
- 'src/cpp/server/health/health.pb.c',
# An older file originally from outside gRPC.
'src/php/tests/bootstrap.php',
diff --git a/tools/distrib/check_include_guards.py b/tools/distrib/check_include_guards.py
index 56f48af56a..b8d530cce0 100755
--- a/tools/distrib/check_include_guards.py
+++ b/tools/distrib/check_include_guards.py
@@ -156,6 +156,7 @@ argp.add_argument('--precommit', default=False, action='store_true')
args = argp.parse_args()
KNOWN_BAD = set([
+ 'src/core/ext/filters/client_channel/health/health.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h',
'src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h',
diff --git a/tools/distrib/check_nanopb_output.sh b/tools/distrib/check_nanopb_output.sh
index 6b98619c32..018cbb7b66 100755
--- a/tools/distrib/check_nanopb_output.sh
+++ b/tools/distrib/check_nanopb_output.sh
@@ -16,6 +16,7 @@
set -ex
readonly NANOPB_ALTS_TMP_OUTPUT="$(mktemp -d)"
+readonly NANOPB_HEALTH_TMP_OUTPUT="$(mktemp -d)"
readonly NANOPB_TMP_OUTPUT="$(mktemp -d)"
readonly PROTOBUF_INSTALL_PREFIX="$(mktemp -d)"
@@ -68,6 +69,23 @@ if ! diff -r "$NANOPB_TMP_OUTPUT" src/core/ext/filters/client_channel/lb_policy/
fi
#
+# checks for health.proto
+#
+readonly HEALTH_GRPC_OUTPUT_PATH='src/core/ext/filters/client_channel/health'
+# nanopb-compile the proto to a temp location
+./tools/codegen/core/gen_nano_proto.sh \
+ src/proto/grpc/health/v1/health.proto \
+ "$NANOPB_HEALTH_TMP_OUTPUT" \
+ "$HEALTH_GRPC_OUTPUT_PATH"
+# compare outputs to checked compiled code
+for NANOPB_OUTPUT_FILE in $NANOPB_HEALTH_TMP_OUTPUT/*.pb.*; do
+ if ! diff "$NANOPB_OUTPUT_FILE" "${HEALTH_GRPC_OUTPUT_PATH}/$(basename $NANOPB_OUTPUT_FILE)"; then
+ echo "Outputs differ: $NANOPB_HEALTH_TMP_OUTPUT vs $HEALTH_GRPC_OUTPUT_PATH"
+ exit 2
+ fi
+done
+
+#
# Checks for handshaker.proto and transport_security_common.proto
#
readonly HANDSHAKER_GRPC_OUTPUT_PATH='src/core/tsi/alts/handshaker'
diff --git a/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh b/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh
index 2f31bea69b..fd549fb9e5 100755
--- a/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh
+++ b/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh
@@ -31,7 +31,7 @@ cd /var/local/git/grpc
make install-certs
# build C++ interop client & server
-make interop_client interop_server
+make interop_client interop_server -j2
# build C++ http2 client
make http2_client
diff --git a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
index b136259ce9..e5e68943a4 100644
--- a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
@@ -12,7 +12,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-FROM golang:latest
+FROM golang:1.11
# Using login shell removes Go from path, so we add it.
RUN ln -s /usr/local/go/bin/go /usr/local/bin
diff --git a/tools/dockerfile/interoptest/grpc_interop_java/build_interop.sh b/tools/dockerfile/interoptest/grpc_interop_java/build_interop.sh
index b651ac5b88..0e36c193c9 100644
--- a/tools/dockerfile/interoptest/grpc_interop_java/build_interop.sh
+++ b/tools/dockerfile/interoptest/grpc_interop_java/build_interop.sh
@@ -26,3 +26,11 @@ cd /var/local/git/grpc-java
./gradlew :grpc-interop-testing:installDist -PskipCodegen=true
+# enable extra java logging
+mkdir -p /var/local/grpc_java_logging
+echo "handlers = java.util.logging.ConsoleHandler
+java.util.logging.ConsoleHandler.level = ALL
+.level = FINE
+io.grpc.netty.NettyClientHandler = ALL
+io.grpc.netty.NettyServerHandler = ALL" > /var/local/grpc_java_logging/logconf.txt
+
diff --git a/tools/dockerfile/interoptest/grpc_interop_java_oracle8/build_interop.sh b/tools/dockerfile/interoptest/grpc_interop_java_oracle8/build_interop.sh
index 521111acaa..4c5ba4b7a3 100644
--- a/tools/dockerfile/interoptest/grpc_interop_java_oracle8/build_interop.sh
+++ b/tools/dockerfile/interoptest/grpc_interop_java_oracle8/build_interop.sh
@@ -25,4 +25,12 @@ cp -r /var/local/jenkins/service_account $HOME || true
cd /var/local/git/grpc-java
./gradlew :grpc-interop-testing:installDist -PskipCodegen=true
+
+# enable extra java logging
+mkdir -p /var/local/grpc_java_logging
+echo "handlers = java.util.logging.ConsoleHandler
+java.util.logging.ConsoleHandler.level = ALL
+.level = FINE
+io.grpc.netty.NettyClientHandler = ALL
+io.grpc.netty.NettyServerHandler = ALL" > /var/local/grpc_java_logging/logconf.txt
diff --git a/tools/dockerfile/interoptest/lb_interop_fake_servers/Dockerfile b/tools/dockerfile/interoptest/lb_interop_fake_servers/Dockerfile
new file mode 100644
index 0000000000..22963f7f83
--- /dev/null
+++ b/tools/dockerfile/interoptest/lb_interop_fake_servers/Dockerfile
@@ -0,0 +1,34 @@
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM golang:1.10
+
+RUN apt-get update && apt-get install -y \
+ dnsutils \
+ git \
+ vim \
+ curl \
+ python-pip \
+ python-yaml \
+ make && apt-get clean
+
+RUN ln -s /usr/local/go/bin/go /usr/local/bin
+
+# Install Python packages from PyPI
+RUN pip install --upgrade pip==10.0.1
+RUN pip install virtualenv
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.5.2.post1 six==1.10.0 twisted==17.5.0
+
+# Define the default command.
+CMD ["bash"]
diff --git a/tools/dockerfile/interoptest/lb_interop_fake_servers/build_interop.sh b/tools/dockerfile/interoptest/lb_interop_fake_servers/build_interop.sh
new file mode 100644
index 0000000000..1846d51753
--- /dev/null
+++ b/tools/dockerfile/interoptest/lb_interop_fake_servers/build_interop.sh
@@ -0,0 +1,35 @@
+#!/bin/bash
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# Gets a built Go interop server, fake balancer server, and python
+# DNS server into a base image.
+set -e
+
+# Clone just the grpc-go source code without any dependencies.
+# We are cloning from a local git repo that contains the right revision
+# to test instead of using "go get" to download from Github directly.
+git clone --recursive /var/local/jenkins/grpc-go src/google.golang.org/grpc
+
+# Get all gRPC Go dependencies
+(cd src/google.golang.org/grpc && make deps && make testdeps)
+
+# Build the interop server and fake balancer
+(cd src/google.golang.org/grpc/interop/server && go install)
+(cd src/google.golang.org/grpc/interop/fake_grpclb && go install)
+
+# Clone the grpc/grpc repo to get the python DNS server.
+# Hack: we don't need to init submodules for the scripts we need.
+mkdir -p /var/local/git/grpc
+git clone /var/local/jenkins/grpc /var/local/git/grpc
diff --git a/tools/dockerfile/test/bazel/Dockerfile b/tools/dockerfile/test/bazel/Dockerfile
index 4f913dc396..0aa6209f4f 100644
--- a/tools/dockerfile/test/bazel/Dockerfile
+++ b/tools/dockerfile/test/bazel/Dockerfile
@@ -44,9 +44,10 @@ RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.5.2.post1 six==1.10.0 t
#========================
# Bazel installation
-RUN echo "deb [arch=amd64] http://storage.googleapis.com/bazel-apt stable jdk1.8" > /etc/apt/sources.list.d/bazel.list
-RUN curl https://bazel.build/bazel-release.pub.gpg | apt-key add -
-RUN apt-get -y update && apt-get -y install bazel=0.15.0 && apt-get clean
+
+RUN apt-get update && apt-get install -y wget && apt-get clean
+RUN wget -q https://github.com/bazelbuild/bazel/releases/download/0.17.1/bazel-0.17.1-linux-x86_64 -O /usr/local/bin/bazel
+RUN chmod 755 /usr/local/bin/bazel
RUN mkdir -p /var/local/jenkins
diff --git a/tools/dockerfile/test/python_stretch_2.7_x64/Dockerfile b/tools/dockerfile/test/python_stretch_2.7_x64/Dockerfile
new file mode 100644
index 0000000000..a7a8174db4
--- /dev/null
+++ b/tools/dockerfile/test/python_stretch_2.7_x64/Dockerfile
@@ -0,0 +1,69 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM debian:stretch
+
+# Install Git and basic packages.
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ autotools-dev \
+ build-essential \
+ bzip2 \
+ ccache \
+ curl \
+ dnsutils \
+ gcc \
+ gcc-multilib \
+ git \
+ golang \
+ gyp \
+ lcov \
+ libc6 \
+ libc6-dbg \
+ libc6-dev \
+ libgtest-dev \
+ libtool \
+ make \
+ perl \
+ strace \
+ python-dev \
+ python-setuptools \
+ python-yaml \
+ telnet \
+ unzip \
+ wget \
+ zip && apt-get clean
+
+#================
+# Build profiling
+RUN apt-get update && apt-get install -y time && apt-get clean
+
+# Google Cloud platform API libraries
+RUN apt-get update && apt-get install -y python-pip && apt-get clean
+RUN pip install --upgrade google-api-python-client oauth2client
+
+# Install Python 2.7
+RUN apt-get update && apt-get install -y python2.7 python-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python2.7
+
+# Add Debian 'testing' repository
+RUN echo 'deb http://ftp.de.debian.org/debian testing main' >> /etc/apt/sources.list
+RUN echo 'APT::Default-Release "stable";' | tee -a /etc/apt/apt.conf.d/00local
+
+
+RUN mkdir /var/local/jenkins
+
+# Define the default command.
+CMD ["bash"]
+
diff --git a/tools/dockerfile/test/python_pyenv_x64/Dockerfile b/tools/dockerfile/test/python_stretch_3.5_x64/Dockerfile
index d94ccc8c74..0e97e77e2f 100644
--- a/tools/dockerfile/test/python_pyenv_x64/Dockerfile
+++ b/tools/dockerfile/test/python_stretch_3.5_x64/Dockerfile
@@ -1,4 +1,4 @@
-# Copyright 2016 gRPC authors.
+# Copyright 2018 The gRPC Authors
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
@@ -13,7 +13,7 @@
# limitations under the License.
FROM debian:stretch
-
+
# Install Git and basic packages.
RUN apt-get update && apt-get install -y \
autoconf \
@@ -53,51 +53,20 @@ RUN apt-get update && apt-get install -y time && apt-get clean
RUN apt-get update && apt-get install -y python-pip && apt-get clean
RUN pip install --upgrade google-api-python-client oauth2client
-#====================
-# Python dependencies
-
-# Install dependencies
-
-RUN apt-get update && apt-get install -y \
- python-all-dev \
- python3-all-dev \
- python-pip
-
-# Install Python packages from PyPI
-RUN pip install --upgrade pip==10.0.1
-RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.5.2.post1 six==1.10.0 twisted==17.5.0
-
-# Install dependencies for pyenv
-RUN apt-get update && apt-get install -y \
- libbz2-dev \
- libncurses5-dev \
- libncursesw5-dev \
- libreadline-dev \
- libsqlite3-dev \
- libssl-dev \
- llvm \
- mercurial \
- zlib1g-dev && apt-get clean
-
-# Install Pyenv and dev Python versions 3.{5,6,7}
-RUN curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash
-ENV PATH /root/.pyenv/bin:$PATH
-RUN eval "$(pyenv init -)"
-RUN eval "$(pyenv virtualenv-init -)"
-RUN pyenv update
-RUN pyenv install 3.5-dev
-RUN pyenv install 3.6-dev
-RUN pyenv install 3.7-dev
-RUN pyenv install pypy-5.3.1
-RUN pyenv local 3.5-dev 3.6-dev 3.7-dev pypy-5.3.1
+# Install Python 2.7
+RUN apt-get update && apt-get install -y python2.7 python-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python2.7
-# Install pip and virtualenv for Python 3.5
-RUN curl https://bootstrap.pypa.io/get-pip.py | python3.5
-RUN python3.5 -m pip install virtualenv
+# Add Debian 'testing' repository
+RUN echo 'deb http://ftp.de.debian.org/debian testing main' >> /etc/apt/sources.list
+RUN echo 'APT::Default-Release "stable";' | tee -a /etc/apt/apt.conf.d/00local
RUN mkdir /var/local/jenkins
# Define the default command.
CMD ["bash"]
+
+
+RUN apt-get update && apt-get install -y python3.5 python3-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python3.5
diff --git a/tools/dockerfile/test/python_stretch_3.6_x64/Dockerfile b/tools/dockerfile/test/python_stretch_3.6_x64/Dockerfile
new file mode 100644
index 0000000000..9b16b2d3a1
--- /dev/null
+++ b/tools/dockerfile/test/python_stretch_3.6_x64/Dockerfile
@@ -0,0 +1,72 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM debian:stretch
+
+# Install Git and basic packages.
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ autotools-dev \
+ build-essential \
+ bzip2 \
+ ccache \
+ curl \
+ dnsutils \
+ gcc \
+ gcc-multilib \
+ git \
+ golang \
+ gyp \
+ lcov \
+ libc6 \
+ libc6-dbg \
+ libc6-dev \
+ libgtest-dev \
+ libtool \
+ make \
+ perl \
+ strace \
+ python-dev \
+ python-setuptools \
+ python-yaml \
+ telnet \
+ unzip \
+ wget \
+ zip && apt-get clean
+
+#================
+# Build profiling
+RUN apt-get update && apt-get install -y time && apt-get clean
+
+# Google Cloud platform API libraries
+RUN apt-get update && apt-get install -y python-pip && apt-get clean
+RUN pip install --upgrade google-api-python-client oauth2client
+
+# Install Python 2.7
+RUN apt-get update && apt-get install -y python2.7 python-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python2.7
+
+# Add Debian 'testing' repository
+RUN echo 'deb http://ftp.de.debian.org/debian testing main' >> /etc/apt/sources.list
+RUN echo 'APT::Default-Release "stable";' | tee -a /etc/apt/apt.conf.d/00local
+
+
+RUN mkdir /var/local/jenkins
+
+# Define the default command.
+CMD ["bash"]
+
+
+RUN apt-get update && apt-get -t testing install -y python3.6 python3-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python3.6
diff --git a/tools/dockerfile/test/python_stretch_3.7_x64/Dockerfile b/tools/dockerfile/test/python_stretch_3.7_x64/Dockerfile
new file mode 100644
index 0000000000..add1cc509d
--- /dev/null
+++ b/tools/dockerfile/test/python_stretch_3.7_x64/Dockerfile
@@ -0,0 +1,72 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+FROM debian:stretch
+
+# Install Git and basic packages.
+RUN apt-get update && apt-get install -y \
+ autoconf \
+ autotools-dev \
+ build-essential \
+ bzip2 \
+ ccache \
+ curl \
+ dnsutils \
+ gcc \
+ gcc-multilib \
+ git \
+ golang \
+ gyp \
+ lcov \
+ libc6 \
+ libc6-dbg \
+ libc6-dev \
+ libgtest-dev \
+ libtool \
+ make \
+ perl \
+ strace \
+ python-dev \
+ python-setuptools \
+ python-yaml \
+ telnet \
+ unzip \
+ wget \
+ zip && apt-get clean
+
+#================
+# Build profiling
+RUN apt-get update && apt-get install -y time && apt-get clean
+
+# Google Cloud platform API libraries
+RUN apt-get update && apt-get install -y python-pip && apt-get clean
+RUN pip install --upgrade google-api-python-client oauth2client
+
+# Install Python 2.7
+RUN apt-get update && apt-get install -y python2.7 python-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python2.7
+
+# Add Debian 'testing' repository
+RUN echo 'deb http://ftp.de.debian.org/debian testing main' >> /etc/apt/sources.list
+RUN echo 'APT::Default-Release "stable";' | tee -a /etc/apt/apt.conf.d/00local
+
+
+RUN mkdir /var/local/jenkins
+
+# Define the default command.
+CMD ["bash"]
+
+
+RUN apt-get update && apt-get -t testing install -y python3.7 python3-all-dev
+RUN curl https://bootstrap.pypa.io/get-pip.py | python3.7
diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++
index 40abd726c4..392113c284 100644
--- a/tools/doxygen/Doxyfile.c++
+++ b/tools/doxygen/Doxyfile.c++
@@ -764,7 +764,6 @@ INPUT = doc/PROTOCOL-HTTP2.md \
doc/PROTOCOL-WEB.md \
doc/binary-logging.md \
doc/c-style-guide.md \
-doc/combiner-explainer.md \
doc/command_line_tool.md \
doc/compression.md \
doc/compression_cookbook.md \
@@ -775,7 +774,6 @@ doc/cpp-style-guide.md \
doc/cpp/pending_api_cleanups.md \
doc/cpp/perf_notes.md \
doc/environment_variables.md \
-doc/epoll-polling-engine.md \
doc/fail_fast.md \
doc/fork_support.md \
doc/g_stands_for.md \
@@ -945,6 +943,8 @@ include/grpcpp/impl/codegen/async_unary_call.h \
include/grpcpp/impl/codegen/byte_buffer.h \
include/grpcpp/impl/codegen/call.h \
include/grpcpp/impl/codegen/call_hook.h \
+include/grpcpp/impl/codegen/call_op_set.h \
+include/grpcpp/impl/codegen/call_op_set_interface.h \
include/grpcpp/impl/codegen/callback_common.h \
include/grpcpp/impl/codegen/channel_interface.h \
include/grpcpp/impl/codegen/client_callback.h \
@@ -959,7 +959,9 @@ include/grpcpp/impl/codegen/core_codegen.h \
include/grpcpp/impl/codegen/core_codegen_interface.h \
include/grpcpp/impl/codegen/create_auth_context.h \
include/grpcpp/impl/codegen/grpc_library.h \
+include/grpcpp/impl/codegen/intercepted_channel.h \
include/grpcpp/impl/codegen/interceptor.h \
+include/grpcpp/impl/codegen/interceptor_common.h \
include/grpcpp/impl/codegen/metadata_map.h \
include/grpcpp/impl/codegen/method_handler_impl.h \
include/grpcpp/impl/codegen/proto_buffer_reader.h \
@@ -969,7 +971,9 @@ include/grpcpp/impl/codegen/rpc_method.h \
include/grpcpp/impl/codegen/rpc_service_method.h \
include/grpcpp/impl/codegen/security/auth_context.h \
include/grpcpp/impl/codegen/serialization_traits.h \
+include/grpcpp/impl/codegen/server_callback.h \
include/grpcpp/impl/codegen/server_context.h \
+include/grpcpp/impl/codegen/server_interceptor.h \
include/grpcpp/impl/codegen/server_interface.h \
include/grpcpp/impl/codegen/service_type.h \
include/grpcpp/impl/codegen/slice.h \
@@ -1005,6 +1009,7 @@ include/grpcpp/support/client_callback.h \
include/grpcpp/support/config.h \
include/grpcpp/support/proto_buffer_reader.h \
include/grpcpp/support/proto_buffer_writer.h \
+include/grpcpp/support/server_callback.h \
include/grpcpp/support/slice.h \
include/grpcpp/support/status.h \
include/grpcpp/support/status_code_enum.h \
diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal
index 8fed272159..7f5f9bc0db 100644
--- a/tools/doxygen/Doxyfile.c++.internal
+++ b/tools/doxygen/Doxyfile.c++.internal
@@ -764,7 +764,6 @@ INPUT = doc/PROTOCOL-HTTP2.md \
doc/PROTOCOL-WEB.md \
doc/binary-logging.md \
doc/c-style-guide.md \
-doc/combiner-explainer.md \
doc/command_line_tool.md \
doc/compression.md \
doc/compression_cookbook.md \
@@ -775,7 +774,6 @@ doc/cpp-style-guide.md \
doc/cpp/pending_api_cleanups.md \
doc/cpp/perf_notes.md \
doc/environment_variables.md \
-doc/epoll-polling-engine.md \
doc/fail_fast.md \
doc/fork_support.md \
doc/g_stands_for.md \
@@ -946,6 +944,8 @@ include/grpcpp/impl/codegen/async_unary_call.h \
include/grpcpp/impl/codegen/byte_buffer.h \
include/grpcpp/impl/codegen/call.h \
include/grpcpp/impl/codegen/call_hook.h \
+include/grpcpp/impl/codegen/call_op_set.h \
+include/grpcpp/impl/codegen/call_op_set_interface.h \
include/grpcpp/impl/codegen/callback_common.h \
include/grpcpp/impl/codegen/channel_interface.h \
include/grpcpp/impl/codegen/client_callback.h \
@@ -961,7 +961,9 @@ include/grpcpp/impl/codegen/core_codegen.h \
include/grpcpp/impl/codegen/core_codegen_interface.h \
include/grpcpp/impl/codegen/create_auth_context.h \
include/grpcpp/impl/codegen/grpc_library.h \
+include/grpcpp/impl/codegen/intercepted_channel.h \
include/grpcpp/impl/codegen/interceptor.h \
+include/grpcpp/impl/codegen/interceptor_common.h \
include/grpcpp/impl/codegen/metadata_map.h \
include/grpcpp/impl/codegen/method_handler_impl.h \
include/grpcpp/impl/codegen/proto_buffer_reader.h \
@@ -971,7 +973,9 @@ include/grpcpp/impl/codegen/rpc_method.h \
include/grpcpp/impl/codegen/rpc_service_method.h \
include/grpcpp/impl/codegen/security/auth_context.h \
include/grpcpp/impl/codegen/serialization_traits.h \
+include/grpcpp/impl/codegen/server_callback.h \
include/grpcpp/impl/codegen/server_context.h \
+include/grpcpp/impl/codegen/server_interceptor.h \
include/grpcpp/impl/codegen/server_interface.h \
include/grpcpp/impl/codegen/service_type.h \
include/grpcpp/impl/codegen/slice.h \
@@ -1007,6 +1011,7 @@ include/grpcpp/support/client_callback.h \
include/grpcpp/support/config.h \
include/grpcpp/support/proto_buffer_reader.h \
include/grpcpp/support/proto_buffer_writer.h \
+include/grpcpp/support/server_callback.h \
include/grpcpp/support/slice.h \
include/grpcpp/support/status.h \
include/grpcpp/support/status_code_enum.h \
@@ -1014,6 +1019,8 @@ include/grpcpp/support/string_ref.h \
include/grpcpp/support/stub_options.h \
include/grpcpp/support/sync_stream.h \
include/grpcpp/support/time.h \
+src/core/ext/filters/client_channel/health/health.pb.c \
+src/core/ext/filters/client_channel/health/health.pb.h \
src/core/ext/transport/inproc/inproc_transport.h \
src/core/lib/avl/avl.h \
src/core/lib/backoff/backoff.h \
@@ -1176,6 +1183,7 @@ src/core/lib/transport/status_metadata.h \
src/core/lib/transport/timeout_encoding.h \
src/core/lib/transport/transport.h \
src/core/lib/transport/transport_impl.h \
+src/core/lib/uri/uri_parser.h \
src/cpp/README.md \
src/cpp/client/channel_cc.cc \
src/cpp/client/client_context.cc \
@@ -1210,8 +1218,6 @@ src/cpp/server/dynamic_thread_pool.cc \
src/cpp/server/dynamic_thread_pool.h \
src/cpp/server/health/default_health_check_service.cc \
src/cpp/server/health/default_health_check_service.h \
-src/cpp/server/health/health.pb.c \
-src/cpp/server/health/health.pb.h \
src/cpp/server/health/health_check_service.cc \
src/cpp/server/health/health_check_service_server_builder_option.cc \
src/cpp/server/insecure_server_credentials.cc \
@@ -1230,8 +1236,11 @@ src/cpp/util/status.cc \
src/cpp/util/string_ref.cc \
src/cpp/util/time_cc.cc \
third_party/nanopb/pb.h \
+third_party/nanopb/pb_common.c \
third_party/nanopb/pb_common.h \
+third_party/nanopb/pb_decode.c \
third_party/nanopb/pb_decode.h \
+third_party/nanopb/pb_encode.c \
third_party/nanopb/pb_encode.h
# This tag can be used to specify the character encoding of the source files
diff --git a/tools/doxygen/Doxyfile.core b/tools/doxygen/Doxyfile.core
index 973975ae28..b78fb607ad 100644
--- a/tools/doxygen/Doxyfile.core
+++ b/tools/doxygen/Doxyfile.core
@@ -764,13 +764,14 @@ INPUT = doc/PROTOCOL-HTTP2.md \
doc/PROTOCOL-WEB.md \
doc/binary-logging.md \
doc/c-style-guide.md \
-doc/combiner-explainer.md \
doc/command_line_tool.md \
doc/compression.md \
doc/compression_cookbook.md \
doc/connection-backoff-interop-test-description.md \
doc/connection-backoff.md \
doc/connectivity-semantics-and-api.md \
+doc/core/combiner-explainer.md \
+doc/core/epoll-polling-engine.md \
doc/core/grpc-client-server-polling-engine-usage.md \
doc/core/grpc-cq.md \
doc/core/grpc-error.md \
@@ -780,7 +781,6 @@ doc/core/pending_api_cleanups.md \
doc/core/transport_explainer.md \
doc/cpp-style-guide.md \
doc/environment_variables.md \
-doc/epoll-polling-engine.md \
doc/fail_fast.md \
doc/fork_support.md \
doc/g_stands_for.md \
diff --git a/tools/doxygen/Doxyfile.core.internal b/tools/doxygen/Doxyfile.core.internal
index d5986fde90..0ddfcbd4fb 100644
--- a/tools/doxygen/Doxyfile.core.internal
+++ b/tools/doxygen/Doxyfile.core.internal
@@ -764,13 +764,14 @@ INPUT = doc/PROTOCOL-HTTP2.md \
doc/PROTOCOL-WEB.md \
doc/binary-logging.md \
doc/c-style-guide.md \
-doc/combiner-explainer.md \
doc/command_line_tool.md \
doc/compression.md \
doc/compression_cookbook.md \
doc/connection-backoff-interop-test-description.md \
doc/connection-backoff.md \
doc/connectivity-semantics-and-api.md \
+doc/core/combiner-explainer.md \
+doc/core/epoll-polling-engine.md \
doc/core/grpc-client-server-polling-engine-usage.md \
doc/core/grpc-cq.md \
doc/core/grpc-error.md \
@@ -780,7 +781,6 @@ doc/core/pending_api_cleanups.md \
doc/core/transport_explainer.md \
doc/cpp-style-guide.md \
doc/environment_variables.md \
-doc/epoll-polling-engine.md \
doc/fail_fast.md \
doc/fork_support.md \
doc/g_stands_for.md \
@@ -885,6 +885,10 @@ src/core/ext/filters/client_channel/client_channel_factory.h \
src/core/ext/filters/client_channel/client_channel_plugin.cc \
src/core/ext/filters/client_channel/connector.cc \
src/core/ext/filters/client_channel/connector.h \
+src/core/ext/filters/client_channel/health/health.pb.c \
+src/core/ext/filters/client_channel/health/health.pb.h \
+src/core/ext/filters/client_channel/health/health_check_client.cc \
+src/core/ext/filters/client_channel/health/health_check_client.h \
src/core/ext/filters/client_channel/http_connect_handshaker.cc \
src/core/ext/filters/client_channel/http_connect_handshaker.h \
src/core/ext/filters/client_channel/http_proxy.cc \
@@ -910,6 +914,14 @@ src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balan
src/core/ext/filters/client_channel/lb_policy/pick_first/pick_first.cc \
src/core/ext/filters/client_channel/lb_policy/round_robin/round_robin.cc \
src/core/ext/filters/client_channel/lb_policy/subchannel_list.h \
+src/core/ext/filters/client_channel/lb_policy/xds/xds.cc \
+src/core/ext/filters/client_channel/lb_policy/xds/xds.h \
+src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.h \
+src/core/ext/filters/client_channel/lb_policy/xds/xds_channel_secure.cc \
+src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.cc \
+src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.h \
+src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.cc \
+src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.h \
src/core/ext/filters/client_channel/lb_policy_factory.cc \
src/core/ext/filters/client_channel/lb_policy_factory.h \
src/core/ext/filters/client_channel/lb_policy_registry.cc \
@@ -950,8 +962,6 @@ src/core/ext/filters/client_channel/subchannel.cc \
src/core/ext/filters/client_channel/subchannel.h \
src/core/ext/filters/client_channel/subchannel_index.cc \
src/core/ext/filters/client_channel/subchannel_index.h \
-src/core/ext/filters/client_channel/uri_parser.cc \
-src/core/ext/filters/client_channel/uri_parser.h \
src/core/ext/filters/deadline/deadline_filter.cc \
src/core/ext/filters/deadline/deadline_filter.h \
src/core/ext/filters/http/client/http_client_filter.cc \
@@ -1361,16 +1371,22 @@ src/core/lib/security/credentials/plugin/plugin_credentials.cc \
src/core/lib/security/credentials/plugin/plugin_credentials.h \
src/core/lib/security/credentials/ssl/ssl_credentials.cc \
src/core/lib/security/credentials/ssl/ssl_credentials.h \
-src/core/lib/security/security_connector/alts_security_connector.cc \
-src/core/lib/security/security_connector/alts_security_connector.h \
+src/core/lib/security/security_connector/alts/alts_security_connector.cc \
+src/core/lib/security/security_connector/alts/alts_security_connector.h \
+src/core/lib/security/security_connector/fake/fake_security_connector.cc \
+src/core/lib/security/security_connector/fake/fake_security_connector.h \
src/core/lib/security/security_connector/load_system_roots.h \
src/core/lib/security/security_connector/load_system_roots_fallback.cc \
src/core/lib/security/security_connector/load_system_roots_linux.cc \
src/core/lib/security/security_connector/load_system_roots_linux.h \
-src/core/lib/security/security_connector/local_security_connector.cc \
-src/core/lib/security/security_connector/local_security_connector.h \
+src/core/lib/security/security_connector/local/local_security_connector.cc \
+src/core/lib/security/security_connector/local/local_security_connector.h \
src/core/lib/security/security_connector/security_connector.cc \
src/core/lib/security/security_connector/security_connector.h \
+src/core/lib/security/security_connector/ssl/ssl_security_connector.cc \
+src/core/lib/security/security_connector/ssl/ssl_security_connector.h \
+src/core/lib/security/security_connector/ssl_utils.cc \
+src/core/lib/security/security_connector/ssl_utils.h \
src/core/lib/security/transport/auth_filters.h \
src/core/lib/security/transport/client_auth_filter.cc \
src/core/lib/security/transport/secure_endpoint.cc \
@@ -1460,6 +1476,8 @@ src/core/lib/transport/transport.cc \
src/core/lib/transport/transport.h \
src/core/lib/transport/transport_impl.h \
src/core/lib/transport/transport_op_string.cc \
+src/core/lib/uri/uri_parser.cc \
+src/core/lib/uri/uri_parser.h \
src/core/plugin_registry/grpc_plugin_registry.cc \
src/core/tsi/README.md \
src/core/tsi/alts/crypt/aes_gcm.cc \
diff --git a/tools/gce/create_linux_kokoro_performance_worker_from_image.sh b/tools/gce/create_linux_kokoro_performance_worker_from_image.sh
new file mode 100755
index 0000000000..0f7939be4c
--- /dev/null
+++ b/tools/gce/create_linux_kokoro_performance_worker_from_image.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Creates a performance worker on GCE from an image that's used for kokoro
+# perf workers.
+
+set -ex
+
+cd "$(dirname "$0")"
+
+CLOUD_PROJECT=grpc-testing
+ZONE=us-central1-b # this zone allows 32core machines
+LATEST_PERF_WORKER_IMAGE=grpc-performance-kokoro-v2 # update if newer image exists
+
+INSTANCE_NAME="${1:-grpc-kokoro-performance-server}"
+MACHINE_TYPE="${2:-n1-standard-32}"
+
+gcloud compute instances create "$INSTANCE_NAME" \
+ --project="$CLOUD_PROJECT" \
+ --zone "$ZONE" \
+ --machine-type "$MACHINE_TYPE" \
+ --image-project "$CLOUD_PROJECT" \
+ --image "$LATEST_PERF_WORKER_IMAGE" \
+ --boot-disk-size 300 \
+ --scopes https://www.googleapis.com/auth/bigquery \
+ --tags=allow-ssh
diff --git a/tools/internal_ci/helper_scripts/prepare_build_grpclb_interop_rc b/tools/internal_ci/helper_scripts/prepare_build_grpclb_interop_rc
new file mode 100644
index 0000000000..a8e350be58
--- /dev/null
+++ b/tools/internal_ci/helper_scripts/prepare_build_grpclb_interop_rc
@@ -0,0 +1,33 @@
+#!/bin/bash
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Source this rc script to prepare the environment for interop builds
+# This rc script must be used in the root directory of gRPC
+
+export LANG=en_US.UTF-8
+
+# Download Docker images from DockerHub
+export DOCKERHUB_ORGANIZATION=grpctesting
+
+git submodule update --init
+
+# Set up gRPC-Go and gRPC-Java to test
+git clone --recursive https://github.com/grpc/grpc-go ./../grpc-go
+git clone --recursive https://github.com/grpc/grpc-java ./../grpc-java
+
+# TODO(apolcyn): move to kokoro image?
+virtualenv env
+source env/bin/activate
+pip install twisted
diff --git a/tools/internal_ci/helper_scripts/prepare_build_macos_rc b/tools/internal_ci/helper_scripts/prepare_build_macos_rc
index 3212e80854..4b7477db14 100644
--- a/tools/internal_ci/helper_scripts/prepare_build_macos_rc
+++ b/tools/internal_ci/helper_scripts/prepare_build_macos_rc
@@ -93,6 +93,7 @@ git submodule update --init
# Store intermediate build files of ObjC tests into /tmpfs
mkdir /tmpfs/Build-ios-binary-size
ln -s /tmpfs/Build-ios-binary-size src/objective-c/examples/Sample/Build
-mkdir /tmpfs/DerivedData
+mkdir -p /tmpfs/DerivedData
rm -rf ~/Library/Developer/Xcode/DerivedData
+mkdir -p ~/Library/Developer/Xcode
ln -s /tmpfs/DerivedData ~/Library/Developer/Xcode/DerivedData
diff --git a/tools/internal_ci/linux/grpc_asan_on_foundry.sh b/tools/internal_ci/linux/grpc_asan_on_foundry.sh
index dfef004a60..87ec60c7c0 100755
--- a/tools/internal_ci/linux/grpc_asan_on_foundry.sh
+++ b/tools/internal_ci/linux/grpc_asan_on_foundry.sh
@@ -13,8 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+set -ex
+
export UPLOAD_TEST_RESULTS=true
-EXTRA_FLAGS="--copt=-gmlt --strip=never --copt=-fsanitize=address --linkopt=-fsanitize=address --test_timeout=3600 --cache_test_results=no"
-EXCLUDE_TESTS="--test_tag_filters=-qps_json_driver,-json_run_localhost"
-github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}" "${EXCLUDE_TESTS}"
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=asan --cache_test_results=no
diff --git a/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh b/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh
index bb2a85138c..74778d9d29 100755
--- a/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh
+++ b/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh
@@ -21,45 +21,28 @@ set -ex
mkdir -p ${KOKORO_KEYSTORE_DIR}
cp ${KOKORO_GFILE_DIR}/GrpcTesting-d0eeee2db331.json ${KOKORO_KEYSTORE_DIR}/4321_grpc-testing-service
-temp_dir=$(mktemp -d)
-ln -f "${KOKORO_GFILE_DIR}/bazel-latest-release" ${temp_dir}/bazel
-chmod 755 "${KOKORO_GFILE_DIR}/bazel-latest-release"
+# Download bazel
+temp_dir="$(mktemp -d)"
+wget -q https://github.com/bazelbuild/bazel/releases/download/0.17.1/bazel-0.17.1-linux-x86_64 -O "${temp_dir}/bazel"
+chmod 755 "${temp_dir}/bazel"
export PATH="${temp_dir}:${PATH}"
# This should show ${temp_dir}/bazel
which bazel
-chmod +x "${KOKORO_GFILE_DIR}/bazel_wrapper.py"
# change to grpc repo root
cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_rc
-export KOKORO_FOUNDRY_PROJECT_ID="projects/grpc-testing/instances/default_instance"
+# to get "bazel" link for kokoro build, we need to generate
+# invocation UUID, set an env var for bazel to pick it up
+# and upload "bazel_invocation_ids" file as artifact.
+export BAZEL_INTERNAL_INVOCATION_ID="$(uuidgen)"
+echo "${BAZEL_INTERNAL_INVOCATION_ID}" >"${KOKORO_ARTIFACTS_DIR}/bazel_invocation_ids"
-# TODO(adelez): implement size for test targets and change test_timeout back
-"${KOKORO_GFILE_DIR}/bazel_wrapper.py" \
- --host_jvm_args=-Dbazel.DigestFunction=SHA256 \
- test --jobs="200" \
- --test_output=errors \
- --verbose_failures=true \
- --keep_going \
- --remote_accept_cached=true \
- --spawn_strategy=remote \
- --remote_local_fallback=false \
- --remote_timeout=3600 \
- --strategy=Javac=remote \
- --strategy=Closure=remote \
- --genrule_strategy=remote \
- --experimental_strict_action_env=true \
- --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/default:toolchain \
- --define GRPC_PORT_ISOLATED_RUNTIME=1 \
- --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1 \
- --extra_toolchains=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/cpp:cc-toolchain-clang-x86_64-default \
- --extra_execution_platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --host_platform=//third_party/toolchains:rbe_ubuntu1604 \
- --platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --test_env=GRPC_VERBOSITY=debug \
- --remote_instance_name=projects/grpc-testing/instances/default_instance \
+bazel \
+ --bazelrc=tools/remote_build/kokoro.bazelrc \
+ test \
$@ \
-- //test/... || FAILED="true"
diff --git a/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh b/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh
index 51cb66f5b8..d3cd9c20cc 100644
--- a/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh
+++ b/tools/internal_ci/linux/grpc_bazel_on_foundry_dbg.sh
@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+set -ex
+
export UPLOAD_TEST_RESULTS=true
EXTRA_FLAGS="-c dbg --test_timeout=300,450,1200,3600 --cache_test_results=no"
github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}"
diff --git a/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh b/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh
index cbba9067ad..48b3f9e674 100644
--- a/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh
+++ b/tools/internal_ci/linux/grpc_bazel_on_foundry_opt.sh
@@ -13,6 +13,8 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+set -ex
+
export UPLOAD_TEST_RESULTS=true
EXTRA_FLAGS="-c opt --test_timeout=300,450,1200,3600 --cache_test_results=no"
github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}"
diff --git a/tools/internal_ci/linux/grpc_e2e_performance_singlevm.cfg b/tools/internal_ci/linux/grpc_e2e_performance_singlevm.cfg
new file mode 100644
index 0000000000..c27baad3d7
--- /dev/null
+++ b/tools/internal_ci/linux/grpc_e2e_performance_singlevm.cfg
@@ -0,0 +1,25 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/linux/grpc_e2e_performance_singlevm.sh"
+timeout_mins: 360
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.*"
+ regex: "**/perf_reports/**"
+ }
+}
diff --git a/tools/internal_ci/linux/grpc_e2e_performance_singlevm.sh b/tools/internal_ci/linux/grpc_e2e_performance_singlevm.sh
new file mode 100755
index 0000000000..21f9d48ac4
--- /dev/null
+++ b/tools/internal_ci/linux/grpc_e2e_performance_singlevm.sh
@@ -0,0 +1,29 @@
+#!/usr/bin/env bash
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+set -ex
+
+# Enter the gRPC repo root
+cd $(dirname $0)/../../..
+
+source tools/internal_ci/helper_scripts/prepare_build_linux_perf_multilang_rc
+
+# "smoketest" scenarios on a single VM (=no remote VM for running qps_workers)
+tools/run_tests/run_performance_tests.py \
+ -l c++ csharp ruby java python go php7 php7_protobuf_c node \
+ --netperf \
+ --category smoketest \
+ -u kbuilder \
+ --bq_result_table performance_test.performance_experiment_singlevm \
+ --xml_report reports/singlemachine/sponge_log.xml
diff --git a/tools/internal_ci/linux/grpc_full_performance_master.sh b/tools/internal_ci/linux/grpc_full_performance_master.sh
index 24ee71edd1..05a014b88a 100755
--- a/tools/internal_ci/linux/grpc_full_performance_master.sh
+++ b/tools/internal_ci/linux/grpc_full_performance_master.sh
@@ -21,7 +21,7 @@ source tools/internal_ci/helper_scripts/prepare_build_linux_perf_multilang_rc
# run 8core client vs 8core server
tools/run_tests/run_performance_tests.py \
- -l c++ csharp ruby java python go php7 php7_protobuf_c node node_purejs \
+ -l c++ csharp ruby java python go php7 php7_protobuf_c node \
--netperf \
--category scalable \
--remote_worker_host grpc-kokoro-performance-server-8core grpc-kokoro-performance-client-8core grpc-kokoro-performance-client2-8core \
diff --git a/tools/internal_ci/linux/grpc_interop_matrix.cfg b/tools/internal_ci/linux/grpc_interop_matrix.cfg
index 696a55c0df..e13c26e8ba 100644
--- a/tools/internal_ci/linux/grpc_interop_matrix.cfg
+++ b/tools/internal_ci/linux/grpc_interop_matrix.cfg
@@ -16,7 +16,6 @@
# Location of the continuous shell script in repository.
build_file: "grpc/tools/internal_ci/linux/grpc_interop_matrix.sh"
-# grpc_interop tests can take 1 hours to complete.
timeout_mins: 300
action {
define_artifacts {
@@ -24,3 +23,8 @@ action {
regex: "github/grpc/reports/**"
}
}
+
+env_vars {
+ key: "RUN_TESTS_FLAGS"
+ value: "--language=all --release=all --allow_flakes --report_file=sponge_log.xml --bq_result_table interop_results"
+}
diff --git a/tools/internal_ci/linux/grpc_interop_matrix.sh b/tools/internal_ci/linux/grpc_interop_matrix.sh
index 4c24c43488..a5220ea087 100755
--- a/tools/internal_ci/linux/grpc_interop_matrix.sh
+++ b/tools/internal_ci/linux/grpc_interop_matrix.sh
@@ -22,4 +22,4 @@ cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_rc
-tools/interop_matrix/run_interop_matrix_tests.py --language=all --release=all --allow_flakes --report_file=sponge_log.xml --bq_result_table interop_results $@
+tools/interop_matrix/run_interop_matrix_tests.py $RUN_TESTS_FLAGS
diff --git a/tools/internal_ci/linux/grpc_msan_on_foundry.sh b/tools/internal_ci/linux/grpc_msan_on_foundry.sh
index 1ef13ef0d4..c7e4b1fa1a 100644
--- a/tools/internal_ci/linux/grpc_msan_on_foundry.sh
+++ b/tools/internal_ci/linux/grpc_msan_on_foundry.sh
@@ -15,67 +15,5 @@
set -ex
-# A temporary solution to give Kokoro credentials.
-# The file name 4321_grpc-testing-service needs to match auth_credential in
-# the build config.
-# TODO: Use keystore.
-mkdir -p ${KOKORO_KEYSTORE_DIR}
-cp ${KOKORO_GFILE_DIR}/GrpcTesting-d0eeee2db331.json ${KOKORO_KEYSTORE_DIR}/4321_grpc-testing-service
-
-temp_dir=$(mktemp -d)
-ln -f "${KOKORO_GFILE_DIR}/bazel-latest-release" ${temp_dir}/bazel
-chmod 755 "${KOKORO_GFILE_DIR}/bazel-latest-release"
-export PATH="${temp_dir}:${PATH}"
-# This should show ${temp_dir}/bazel
-which bazel
-chmod +x "${KOKORO_GFILE_DIR}/bazel_wrapper.py"
-
-# change to grpc repo root
-cd $(dirname $0)/../../..
-
-source tools/internal_ci/helper_scripts/prepare_build_linux_rc
-
-export KOKORO_FOUNDRY_PROJECT_ID="projects/grpc-testing/instances/default_instance"
-
-"${KOKORO_GFILE_DIR}/bazel_wrapper.py" \
- --host_jvm_args=-Dbazel.DigestFunction=SHA256 \
- test --jobs="200" \
- --test_timeout="3600,3600,3600,3600" \
- --test_output=errors \
- --verbose_failures=true \
- --keep_going \
- --remote_accept_cached=true \
- --spawn_strategy=remote \
- --remote_local_fallback=false \
- --remote_timeout=3600 \
- --strategy=Javac=remote \
- --strategy=Closure=remote \
- --genrule_strategy=remote \
- --experimental_strict_action_env=true \
- --define GRPC_PORT_ISOLATED_RUNTIME=1 \
- --copt=-gmlt \
- --strip=never \
- --cxxopt=--stdlib=libc++ \
- --copt=-fsanitize=memory \
- --linkopt=-fsanitize=memory \
- --copt=-fsanitize-memory-track-origins \
- --action_env=LD_LIBRARY_PATH=/usr/local/lib:$LD_LIBRARY_PATH \
- --host_crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/default:toolchain \
- --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/msan:toolchain \
- --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1 \
- --extra_toolchains=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/cpp:cc-toolchain-clang-x86_64-default \
- --extra_execution_platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --host_platform=//third_party/toolchains:rbe_ubuntu1604 \
- --platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --test_env=GRPC_VERBOSITY=debug \
- --remote_instance_name=projects/grpc-testing/instances/default_instance \
- -- //test/... || FAILED="true"
-
-# Sleep to let ResultStore finish writing results before querying
-sleep 60
-python ./tools/run_tests/python_utils/upload_rbe_results.py
-
-if [ "$FAILED" != "" ]
-then
- exit 1
-fi
+export UPLOAD_TEST_RESULTS=true
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=msan --cache_test_results=no
diff --git a/tools/internal_ci/linux/grpc_performance_profile_daily.sh b/tools/internal_ci/linux/grpc_performance_profile_daily.sh
index 34d41bc04c..0e08699923 100755
--- a/tools/internal_ci/linux/grpc_performance_profile_daily.sh
+++ b/tools/internal_ci/linux/grpc_performance_profile_daily.sh
@@ -24,8 +24,8 @@ CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
./tools/run_tests/start_port_server.py || true
-make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server -j $CPUS
-bins/opt/memory_profile_test
+make CONFIG=opt memory_usage_test memory_usage_client memory_usage_server -j $CPUS
+bins/opt/memory_usage_test
bq load microbenchmarks.memory memory_usage.csv
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload || FAILED="true"
diff --git a/tools/internal_ci/linux/grpc_run_grpclb_interop_tests.sh b/tools/internal_ci/linux/grpc_run_grpclb_interop_tests.sh
new file mode 100755
index 0000000000..806b5c947e
--- /dev/null
+++ b/tools/internal_ci/linux/grpc_run_grpclb_interop_tests.sh
@@ -0,0 +1,26 @@
+#!/bin/bash
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set -ex
+
+export LANG=en_US.UTF-8
+
+# Enter the gRPC repo root
+cd $(dirname $0)/../../..
+
+source tools/internal_ci/helper_scripts/prepare_build_linux_rc
+source tools/internal_ci/helper_scripts/prepare_build_grpclb_interop_rc
+
+tools/run_tests/run_grpclb_interop_tests.py -l all --scenarios_file=tools/run_tests/generated/lb_interop_test_scenarios.json
diff --git a/tools/internal_ci/linux/grpc_tsan_on_foundry.sh b/tools/internal_ci/linux/grpc_tsan_on_foundry.sh
index 366b5cbe34..fcf3095d48 100644
--- a/tools/internal_ci/linux/grpc_tsan_on_foundry.sh
+++ b/tools/internal_ci/linux/grpc_tsan_on_foundry.sh
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+set -ex
+
export UPLOAD_TEST_RESULTS=true
-EXTRA_FLAGS="--copt=-gmlt --strip=never --copt=-fsanitize=thread --linkopt=-fsanitize=thread --test_timeout=3600 --action_env=TSAN_OPTIONS=suppressions=test/core/util/tsan_suppressions.txt:halt_on_error=1:second_deadlock_stack=1 --cache_test_results=no"
-EXCLUDE_TESTS="--test_tag_filters=-qps_json_driver,-json_run_localhost"
-github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}" "${EXCLUDE_TESTS}"
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=tsan --cache_test_results=no
diff --git a/tools/internal_ci/linux/grpc_ubsan_on_foundry.sh b/tools/internal_ci/linux/grpc_ubsan_on_foundry.sh
index e0ae9103c4..f45be4d1c9 100644
--- a/tools/internal_ci/linux/grpc_ubsan_on_foundry.sh
+++ b/tools/internal_ci/linux/grpc_ubsan_on_foundry.sh
@@ -15,64 +15,5 @@
set -ex
-# A temporary solution to give Kokoro credentials.
-# The file name 4321_grpc-testing-service needs to match auth_credential in
-# the build config.
-# TODO: Use keystore.
-mkdir -p ${KOKORO_KEYSTORE_DIR}
-cp ${KOKORO_GFILE_DIR}/GrpcTesting-d0eeee2db331.json ${KOKORO_KEYSTORE_DIR}/4321_grpc-testing-service
-
-temp_dir=$(mktemp -d)
-ln -f "${KOKORO_GFILE_DIR}/bazel-latest-release" ${temp_dir}/bazel
-chmod 755 "${KOKORO_GFILE_DIR}/bazel-latest-release"
-export PATH="${temp_dir}:${PATH}"
-# This should show ${temp_dir}/bazel
-which bazel
-chmod +x "${KOKORO_GFILE_DIR}/bazel_wrapper.py"
-
-# change to grpc repo root
-cd $(dirname $0)/../../..
-
-source tools/internal_ci/helper_scripts/prepare_build_linux_rc
-
-export KOKORO_FOUNDRY_PROJECT_ID="projects/grpc-testing/instances/default_instance"
-
-"${KOKORO_GFILE_DIR}/bazel_wrapper.py" \
- --host_jvm_args=-Dbazel.DigestFunction=SHA256 \
- test --jobs="200" \
- --test_timeout="3600,3600,3600,3600" \
- --test_output=errors \
- --verbose_failures=true \
- --keep_going \
- --remote_accept_cached=true \
- --spawn_strategy=remote \
- --remote_local_fallback=false \
- --remote_timeout=3600 \
- --strategy=Javac=remote \
- --strategy=Closure=remote \
- --genrule_strategy=remote \
- --experimental_strict_action_env=true \
- --define GRPC_PORT_ISOLATED_RUNTIME=1 \
- --copt=-gmlt \
- --strip=never \
- --copt=-fsanitize=undefined \
- --linkopt=-fsanitize=undefined \
- --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/experimental/ubuntu16_04_clang/1.0/bazel_0.15.0/ubsan:toolchain \
- --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1 \
- --extra_toolchains=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/cpp:cc-toolchain-clang-x86_64-default \
- --extra_execution_platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --host_platform=//third_party/toolchains:rbe_ubuntu1604 \
- --platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --cache_test_results=no \
- --test_env=GRPC_VERBOSITY=debug \
- --remote_instance_name=projects/grpc-testing/instances/default_instance \
- -- //test/... || FAILED="true"
-
-# Sleep to let ResultStore finish writing results before querying
-sleep 60
-python ./tools/run_tests/python_utils/upload_rbe_results.py
-
-if [ "$FAILED" != "" ]
-then
- exit 1
-fi
+export UPLOAD_TEST_RESULTS=true
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=ubsan --cache_test_results=no
diff --git a/tools/internal_ci/linux/grpclb_in_dns_interop.cfg b/tools/internal_ci/linux/grpclb_in_dns_interop.cfg
new file mode 100644
index 0000000000..6cd5f2e21a
--- /dev/null
+++ b/tools/internal_ci/linux/grpclb_in_dns_interop.cfg
@@ -0,0 +1,25 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/linux/grpc_run_grpclb_interop_tests.sh"
+timeout_mins: 60
+action {
+ define_artifacts {
+ regex: "**/sponge_log.xml"
+ regex: "github/grpc/reports/**"
+ }
+}
diff --git a/tools/internal_ci/linux/pull_request/grpc_asan_on_foundry.sh b/tools/internal_ci/linux/pull_request/grpc_asan_on_foundry.sh
index 39c991f291..00f92921de 100644
--- a/tools/internal_ci/linux/pull_request/grpc_asan_on_foundry.sh
+++ b/tools/internal_ci/linux/pull_request/grpc_asan_on_foundry.sh
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-EXTRA_FLAGS="--copt=-gmlt --strip=never --copt=-fsanitize=address --linkopt=-fsanitize=address --test_timeout=3600"
-EXCLUDE_TESTS="--test_tag_filters=-qps_json_driver,-json_run_localhost"
-github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}" "${EXCLUDE_TESTS}"
+set -ex
+
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=asan
diff --git a/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_dbg.sh b/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_dbg.sh
index 8e2aaebaee..eb1c7320a7 100644
--- a/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_dbg.sh
+++ b/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_dbg.sh
@@ -13,5 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+set -ex
+
EXTRA_FLAGS="-c dbg --test_timeout=300,450,1200,3600"
github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}"
diff --git a/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_opt.sh b/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_opt.sh
index ded0d368a5..f179dc9483 100644
--- a/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_opt.sh
+++ b/tools/internal_ci/linux/pull_request/grpc_bazel_on_foundry_opt.sh
@@ -13,5 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
+set -ex
+
EXTRA_FLAGS="-c opt --test_timeout=300,450,1200,3600"
github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}"
diff --git a/tools/internal_ci/linux/pull_request/grpc_interop_matrix_adhoc.cfg b/tools/internal_ci/linux/pull_request/grpc_interop_matrix_adhoc.cfg
new file mode 100644
index 0000000000..6726384f18
--- /dev/null
+++ b/tools/internal_ci/linux/pull_request/grpc_interop_matrix_adhoc.cfg
@@ -0,0 +1,30 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/linux/grpc_interop_matrix.sh"
+timeout_mins: 300
+action {
+ define_artifacts {
+ regex: "**/sponge_log.xml"
+ regex: "github/grpc/reports/**"
+ }
+}
+
+env_vars {
+ key: "RUN_TESTS_FLAGS"
+ value: "--language=all --release=all --allow_flakes --report_file=sponge_log.xml"
+}
diff --git a/tools/internal_ci/linux/pull_request/grpc_tsan_on_foundry.sh b/tools/internal_ci/linux/pull_request/grpc_tsan_on_foundry.sh
index 3dee115300..f3e98e63da 100644
--- a/tools/internal_ci/linux/pull_request/grpc_tsan_on_foundry.sh
+++ b/tools/internal_ci/linux/pull_request/grpc_tsan_on_foundry.sh
@@ -13,6 +13,6 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-EXTRA_FLAGS="--copt=-gmlt --strip=never --copt=-fsanitize=thread --linkopt=-fsanitize=thread --test_timeout=3600 --action_env=TSAN_OPTIONS=suppressions=test/core/util/tsan_suppressions.txt:halt_on_error=1:second_deadlock_stack=1"
-EXCLUDE_TESTS="--test_tag_filters=-qps_json_driver,-json_run_localhost"
-github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh "${EXTRA_FLAGS}" "${EXCLUDE_TESTS}"
+set -ex
+
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=tsan
diff --git a/tools/internal_ci/linux/pull_request/grpc_ubsan_on_foundry.sh b/tools/internal_ci/linux/pull_request/grpc_ubsan_on_foundry.sh
index 8547fa4d93..b94935eab1 100644
--- a/tools/internal_ci/linux/pull_request/grpc_ubsan_on_foundry.sh
+++ b/tools/internal_ci/linux/pull_request/grpc_ubsan_on_foundry.sh
@@ -15,59 +15,4 @@
set -ex
-# A temporary solution to give Kokoro credentials.
-# The file name 4321_grpc-testing-service needs to match auth_credential in
-# the build config.
-# TODO: Use keystore.
-mkdir -p ${KOKORO_KEYSTORE_DIR}
-cp ${KOKORO_GFILE_DIR}/GrpcTesting-d0eeee2db331.json ${KOKORO_KEYSTORE_DIR}/4321_grpc-testing-service
-
-temp_dir=$(mktemp -d)
-ln -f "${KOKORO_GFILE_DIR}/bazel-latest-release" ${temp_dir}/bazel
-chmod 755 "${KOKORO_GFILE_DIR}/bazel-latest-release"
-export PATH="${temp_dir}:${PATH}"
-# This should show ${temp_dir}/bazel
-which bazel
-chmod +x "${KOKORO_GFILE_DIR}/bazel_wrapper.py"
-
-# change to grpc repo root
-cd $(dirname $0)/../../../..
-
-source tools/internal_ci/helper_scripts/prepare_build_linux_rc
-
-export KOKORO_FOUNDRY_PROJECT_ID="projects/grpc-testing/instances/default_instance"
-
-"${KOKORO_GFILE_DIR}/bazel_wrapper.py" \
- --host_jvm_args=-Dbazel.DigestFunction=SHA256 \
- test --jobs="200" \
- --test_timeout="3600,3600,3600,3600" \
- --test_output=errors \
- --verbose_failures=true \
- --keep_going \
- --remote_accept_cached=true \
- --spawn_strategy=remote \
- --remote_local_fallback=false \
- --remote_timeout=3600 \
- --strategy=Javac=remote \
- --strategy=Closure=remote \
- --genrule_strategy=remote \
- --experimental_strict_action_env=true \
- --define GRPC_PORT_ISOLATED_RUNTIME=1 \
- --copt=-gmlt \
- --strip=never \
- --copt=-fsanitize=undefined \
- --linkopt=-fsanitize=undefined \
- --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/experimental/ubuntu16_04_clang/1.0/bazel_0.15.0/ubsan:toolchain \
- --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1 \
- --extra_toolchains=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/cpp:cc-toolchain-clang-x86_64-default \
- --extra_execution_platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --host_platform=//third_party/toolchains:rbe_ubuntu1604 \
- --platforms=//third_party/toolchains:rbe_ubuntu1604 \
- --test_env=GRPC_VERBOSITY=debug \
- --remote_instance_name=projects/grpc-testing/instances/default_instance \
- -- //test/... || FAILED="true"
-
-if [ "$FAILED" != "" ]
-then
- exit 1
-fi
+github/grpc/tools/internal_ci/linux/grpc_bazel_on_foundry_base.sh --config=ubsan
diff --git a/tools/internal_ci/linux/run_performance_profile_hourly.sh b/tools/internal_ci/linux/run_performance_profile_hourly.sh
index edf85c2e2c..2be9edf756 100755
--- a/tools/internal_ci/linux/run_performance_profile_hourly.sh
+++ b/tools/internal_ci/linux/run_performance_profile_hourly.sh
@@ -21,8 +21,8 @@ cd $(dirname $0)/../../..
CPUS=`python -c 'import multiprocessing; print multiprocessing.cpu_count()'`
-make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server -j $CPUS
-bins/opt/memory_profile_test
+make CONFIG=opt memory_usage_test memory_usage_client memory_usage_server -j $CPUS
+bins/opt/memory_usage_test
bq load microbenchmarks.memory memory_usage.csv
tools/run_tests/run_microbenchmark.py --collect summary --bigquery_upload
diff --git a/tools/internal_ci/macos/grpc_basictests_objc_dbg.cfg b/tools/internal_ci/macos/grpc_basictests_objc_dbg.cfg
new file mode 100644
index 0000000000..068961234b
--- /dev/null
+++ b/tools/internal_ci/macos/grpc_basictests_objc_dbg.cfg
@@ -0,0 +1,31 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/macos/grpc_run_tests_matrix.sh"
+gfile_resources: "/bigstore/grpc-testing-secrets/gcp_credentials/GrpcTesting-d0eeee2db331.json"
+timeout_mins: 120
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.*"
+ regex: "github/grpc/reports/**"
+ }
+}
+
+env_vars {
+ key: "RUN_TESTS_FLAGS"
+ value: "-f basictests macos objc dbg --internal_ci -j 1 --inner_jobs 4 --bq_result_table aggregate_results"
+}
diff --git a/tools/internal_ci/macos/grpc_basictests_objc_opt.cfg b/tools/internal_ci/macos/grpc_basictests_objc_opt.cfg
new file mode 100644
index 0000000000..927fa50deb
--- /dev/null
+++ b/tools/internal_ci/macos/grpc_basictests_objc_opt.cfg
@@ -0,0 +1,31 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/macos/grpc_run_tests_matrix.sh"
+gfile_resources: "/bigstore/grpc-testing-secrets/gcp_credentials/GrpcTesting-d0eeee2db331.json"
+timeout_mins: 120
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.*"
+ regex: "github/grpc/reports/**"
+ }
+}
+
+env_vars {
+ key: "RUN_TESTS_FLAGS"
+ value: "-f basictests macos objc opt --internal_ci -j 1 --inner_jobs 4 --bq_result_table aggregate_results"
+}
diff --git a/tools/internal_ci/macos/pull_request/grpc_basictests_objc_dbg.cfg b/tools/internal_ci/macos/pull_request/grpc_basictests_objc_dbg.cfg
new file mode 100644
index 0000000000..775fd355a5
--- /dev/null
+++ b/tools/internal_ci/macos/pull_request/grpc_basictests_objc_dbg.cfg
@@ -0,0 +1,31 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/macos/grpc_run_tests_matrix.sh"
+gfile_resources: "/bigstore/grpc-testing-secrets/gcp_credentials/GrpcTesting-d0eeee2db331.json"
+timeout_mins: 120
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.*"
+ regex: "github/grpc/reports/**"
+ }
+}
+
+env_vars {
+ key: "RUN_TESTS_FLAGS"
+ value: "-f basictests macos objc dbg --internal_ci -j 1 --inner_jobs 4 --max_time=3600"
+}
diff --git a/tools/internal_ci/macos/pull_request/grpc_basictests_objc_opt.cfg b/tools/internal_ci/macos/pull_request/grpc_basictests_objc_opt.cfg
new file mode 100644
index 0000000000..652ef1bb77
--- /dev/null
+++ b/tools/internal_ci/macos/pull_request/grpc_basictests_objc_opt.cfg
@@ -0,0 +1,31 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/macos/grpc_run_tests_matrix.sh"
+gfile_resources: "/bigstore/grpc-testing-secrets/gcp_credentials/GrpcTesting-d0eeee2db331.json"
+timeout_mins: 120
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.*"
+ regex: "github/grpc/reports/**"
+ }
+}
+
+env_vars {
+ key: "RUN_TESTS_FLAGS"
+ value: "-f basictests macos objc opt --internal_ci -j 1 --inner_jobs 4 --max_time=3600"
+}
diff --git a/tools/interop_matrix/client_matrix.py b/tools/interop_matrix/client_matrix.py
index 15b53d1716..ff3344cd95 100644
--- a/tools/interop_matrix/client_matrix.py
+++ b/tools/interop_matrix/client_matrix.py
@@ -13,7 +13,7 @@
# See the License for the specific language governing permissions and
# limitations under the License.
-# Dictionaries used for client matrix testing.
+# Defines languages, runtimes and releases for backward compatibility testing
def get_github_repo(lang):
@@ -53,8 +53,7 @@ LANG_RUNTIME_MATRIX = {
'csharp': ['csharp', 'csharpcoreclr'],
}
-# Dictionary of releases per language. For each language, we need to provide
-# a release tag pointing to the latest build of the branch.
+# Dictionary of known releases for given language.
LANG_RELEASE_MATRIX = {
'cxx': [
{
@@ -102,6 +101,9 @@ LANG_RELEASE_MATRIX = {
{
'v1.15.0': None
},
+ {
+ 'v1.16.0': None
+ },
],
'go': [
{
@@ -138,7 +140,19 @@ LANG_RELEASE_MATRIX = {
'v1.11.3': None
},
{
- 'v1.12.0': None
+ 'v1.12.2': None
+ },
+ {
+ 'v1.13.0': None
+ },
+ {
+ 'v1.14.0': None
+ },
+ {
+ 'v1.15.0': None
+ },
+ {
+ 'v1.16.0': None
},
],
'java': [
@@ -190,6 +204,9 @@ LANG_RELEASE_MATRIX = {
{
'v1.15.0': None
},
+ {
+ 'v1.16.1': None
+ },
],
'python': [
{
@@ -237,6 +254,9 @@ LANG_RELEASE_MATRIX = {
{
'v1.15.0': None
},
+ {
+ 'v1.16.0': None
+ },
],
'node': [
{
@@ -328,6 +348,9 @@ LANG_RELEASE_MATRIX = {
{
'v1.15.0': None
},
+ {
+ 'v1.16.0': None
+ },
],
'php': [
{
@@ -375,6 +398,9 @@ LANG_RELEASE_MATRIX = {
{
'v1.15.0': None
},
+ {
+ 'v1.16.0': None
+ },
],
'csharp': [
{
@@ -427,6 +453,9 @@ LANG_RELEASE_MATRIX = {
{
'v1.15.0': None
},
+ {
+ 'v1.16.0': None
+ },
],
}
diff --git a/tools/interop_matrix/run_interop_matrix_tests.py b/tools/interop_matrix/run_interop_matrix_tests.py
index 6cd6f43167..6cf2a9b036 100755
--- a/tools/interop_matrix/run_interop_matrix_tests.py
+++ b/tools/interop_matrix/run_interop_matrix_tests.py
@@ -26,7 +26,7 @@ import subprocess
import sys
import uuid
-# Langauage Runtime Matrix
+# Language Runtime Matrix
import client_matrix
python_util_dir = os.path.abspath(
@@ -37,6 +37,9 @@ import jobset
import report_utils
import upload_test_results
+_TEST_TIMEOUT_SECONDS = 60
+_PULL_IMAGE_TIMEOUT_SECONDS = 15 * 60
+_MAX_PARALLEL_DOWNLOADS = 6
_LANGUAGES = client_matrix.LANG_RUNTIME_MATRIX.keys()
# All gRPC release tags, flattened, deduped and sorted.
_RELEASES = sorted(
@@ -45,7 +48,6 @@ _RELEASES = sorted(
client_matrix.get_release_tag_name(info)
for lang in client_matrix.LANG_RELEASE_MATRIX.values()
for info in lang)))
-_TEST_TIMEOUT = 60
argp = argparse.ArgumentParser(description='Run interop tests.')
argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
@@ -56,7 +58,7 @@ argp.add_argument(
argp.add_argument(
'--release',
default='all',
- choices=['all', 'master'] + _RELEASES,
+ choices=['all'] + _RELEASES,
help='Release tags to test. When testing all '
'releases defined in client_matrix.py, use "all".')
argp.add_argument(
@@ -92,136 +94,154 @@ argp.add_argument(
nargs='?',
help='The gateway to backend services.')
-args = argp.parse_args()
-
-print(str(args))
-
-def find_all_images_for_lang(lang):
+def _get_test_images_for_lang(lang, release_arg, image_path_prefix):
"""Find docker images for a language across releases and runtimes.
Returns dictionary of list of (<tag>, <image-full-path>) keyed by runtime.
"""
- # Find all defined releases.
- if args.release == 'all':
- releases = ['master'] + client_matrix.get_release_tags(lang)
+ if release_arg == 'all':
+ # Use all defined releases for given language
+ releases = client_matrix.get_release_tags(lang)
else:
# Look for a particular release.
- if args.release not in ['master'
- ] + client_matrix.get_release_tags(lang):
+ if release_arg not in client_matrix.get_release_tags(lang):
jobset.message(
'SKIPPED',
- '%s for %s is not defined' % (args.release, lang),
+ 'release %s for %s is not defined' % (release_arg, lang),
do_newline=True)
return {}
- releases = [args.release]
+ releases = [release_arg]
- # TODO(jtattermusch): why do we need to query the existing images/tags?
- # From LANG_RUNTIME_MATRIX and LANG_RELEASE_MATRIX it should be obvious
- # which tags we want to test - and it should be an error if they are
- # missing.
# Images tuples keyed by runtime.
images = {}
for runtime in client_matrix.LANG_RUNTIME_MATRIX[lang]:
- image_path = '%s/grpc_interop_%s' % (args.gcr_path, runtime)
- output = subprocess.check_output([
- 'gcloud', 'beta', 'container', 'images', 'list-tags',
- '--format=json', image_path
- ])
- docker_image_list = json.loads(output)
- # All images should have a single tag or no tag.
- # TODO(adelez): Remove tagless images.
- tags = [i['tags'][0] for i in docker_image_list if i['tags']]
- jobset.message(
- 'START',
- 'Found images for %s: %s' % (image_path, tags),
- do_newline=True)
- skipped = len(docker_image_list) - len(tags)
- jobset.message(
- 'SKIPPED',
- 'Skipped images (no-tag/unknown-tag): %d' % skipped,
- do_newline=True)
- # Filter tags based on the releases.
- images[runtime] = [(tag, '%s:%s' % (image_path, tag))
- for tag in tags
- if tag in releases]
+ image_path = '%s/grpc_interop_%s' % (image_path_prefix, runtime)
+ images[runtime] = [
+ (tag, '%s:%s' % (image_path, tag)) for tag in releases
+ ]
return images
-# caches test cases (list of JobSpec) loaded from file. Keyed by lang and runtime.
-def find_test_cases(lang, runtime, release, suite_name):
- """Returns the list of test cases from testcase files per lang/release."""
+def _read_test_cases_file(lang, runtime, release):
+ """Read test cases from a bash-like file and return a list of commands"""
testcase_dir = os.path.join(os.path.dirname(__file__), 'testcases')
filename_prefix = lang
if lang == 'csharp':
+ # TODO(jtattermusch): remove this odd specialcase
filename_prefix = runtime
# Check to see if we need to use a particular version of test cases.
lang_version = '%s_%s' % (filename_prefix, release)
if lang_version in client_matrix.TESTCASES_VERSION_MATRIX:
- testcases = os.path.join(
+ testcase_file = os.path.join(
testcase_dir, client_matrix.TESTCASES_VERSION_MATRIX[lang_version])
else:
- testcases = os.path.join(testcase_dir, '%s__master' % filename_prefix)
+ # TODO(jtattermusch): remove the double-underscore, it is pointless
+ testcase_file = os.path.join(testcase_dir,
+ '%s__master' % filename_prefix)
+
+ lines = []
+ with open(testcase_file) as f:
+ for line in f.readlines():
+ line = re.sub('\\#.*$', '', line) # remove hash comments
+ line = line.strip()
+ if line and not line.startswith('echo'):
+ # Each non-empty line is a treated as a test case command
+ lines.append(line)
+ return lines
+
+
+def _cleanup_docker_image(image):
+ jobset.message('START', 'Cleanup docker image %s' % image, do_newline=True)
+ dockerjob.remove_image(image, skip_nonexistent=True)
+
+
+args = argp.parse_args()
+
+
+# caches test cases (list of JobSpec) loaded from file. Keyed by lang and runtime.
+def _generate_test_case_jobspecs(lang, runtime, release, suite_name):
+ """Returns the list of test cases from testcase files per lang/release."""
+ testcase_lines = _read_test_cases_file(lang, runtime, release)
job_spec_list = []
- try:
- with open(testcases) as f:
- # Only line start with 'docker run' are test cases.
- for line in f.readlines():
- if line.startswith('docker run'):
- m = re.search('--test_case=(.*)"', line)
- shortname = m.group(1) if m else 'unknown_test'
- m = re.search(
- '--server_host_override=(.*).sandbox.googleapis.com',
- line)
- server = m.group(1) if m else 'unknown_server'
-
- # If server_host arg is not None, replace the original
- # server_host with the one provided or append to the end of
- # the command if server_host does not appear originally.
- if args.server_host:
- if line.find('--server_host=') > -1:
- line = re.sub('--server_host=[^ ]*',
- '--server_host=%s' % args.server_host,
- line)
- else:
- line = '%s --server_host=%s"' % (line[:-1],
- args.server_host)
- print(line)
-
- spec = jobset.JobSpec(
- cmdline=line,
- shortname='%s:%s:%s:%s' % (suite_name, lang, server,
- shortname),
- timeout_seconds=_TEST_TIMEOUT,
- shell=True,
- flake_retries=5 if args.allow_flakes else 0)
- job_spec_list.append(spec)
- jobset.message(
- 'START',
- 'Loaded %s tests from %s' % (len(job_spec_list), testcases),
- do_newline=True)
- except IOError as err:
- jobset.message('FAILED', err, do_newline=True)
+ for line in testcase_lines:
+ # TODO(jtattermusch): revisit the logic for updating test case commands
+ # what it currently being done seems fragile.
+ m = re.search('--test_case=(.*)"', line)
+ shortname = m.group(1) if m else 'unknown_test'
+ m = re.search('--server_host_override=(.*).sandbox.googleapis.com',
+ line)
+ server = m.group(1) if m else 'unknown_server'
+
+ # If server_host arg is not None, replace the original
+ # server_host with the one provided or append to the end of
+ # the command if server_host does not appear originally.
+ if args.server_host:
+ if line.find('--server_host=') > -1:
+ line = re.sub('--server_host=[^ ]*',
+ '--server_host=%s' % args.server_host, line)
+ else:
+ line = '%s --server_host=%s"' % (line[:-1], args.server_host)
+
+ spec = jobset.JobSpec(
+ cmdline=line,
+ shortname='%s:%s:%s:%s' % (suite_name, lang, server, shortname),
+ timeout_seconds=_TEST_TIMEOUT_SECONDS,
+ shell=True,
+ flake_retries=5 if args.allow_flakes else 0)
+ job_spec_list.append(spec)
return job_spec_list
-_xml_report_tree = report_utils.new_junit_xml_tree()
+def _pull_images_for_lang(lang, images):
+ """Pull all images for given lang from container registry."""
+ jobset.message(
+ 'START', 'Downloading images for language "%s"' % lang, do_newline=True)
+ download_specs = []
+ for release, image in images:
+ # Pull the image and warm it up.
+ # First time we use an image with "docker run", it takes time to unpack
+ # the image and later this delay would fail our test cases.
+ cmdline = [
+ 'time gcloud docker -- pull %s && time docker run --rm=true %s /bin/true'
+ % (image, image)
+ ]
+ spec = jobset.JobSpec(
+ cmdline=cmdline,
+ shortname='pull_image_%s' % (image),
+ timeout_seconds=_PULL_IMAGE_TIMEOUT_SECONDS,
+ shell=True)
+ download_specs.append(spec)
+ # too many image downloads at once tend to get stuck
+ max_pull_jobs = min(args.jobs, _MAX_PARALLEL_DOWNLOADS)
+ num_failures, resultset = jobset.run(
+ download_specs, newline_on_success=True, maxjobs=max_pull_jobs)
+ if num_failures:
+ jobset.message(
+ 'FAILED', 'Failed to download some images', do_newline=True)
+ return False
+ else:
+ jobset.message(
+ 'SUCCESS', 'All images downloaded successfully.', do_newline=True)
+ return True
-def run_tests_for_lang(lang, runtime, images):
+def _run_tests_for_lang(lang, runtime, images, xml_report_tree):
"""Find and run all test cases for a language.
images is a list of (<release-tag>, <image-full-path>) tuple.
"""
+ if not _pull_images_for_lang(lang, images):
+ jobset.message(
+ 'FAILED', 'Image download failed. Exiting.', do_newline=True)
+ return 1
+
total_num_failures = 0
- for image_tuple in images:
- release, image = image_tuple
- jobset.message('START', 'Testing %s' % image, do_newline=True)
- # Download the docker image before running each test case.
- subprocess.check_call(['gcloud', 'docker', '--', 'pull', image])
+ for release, image in images:
suite_name = '%s__%s_%s' % (lang, runtime, release)
- job_spec_list = find_test_cases(lang, runtime, release, suite_name)
+ job_spec_list = _generate_test_case_jobspecs(lang, runtime, release,
+ suite_name)
if not job_spec_list:
jobset.message(
@@ -242,28 +262,24 @@ def run_tests_for_lang(lang, runtime, images):
else:
jobset.message('SUCCESS', 'All tests passed', do_newline=True)
- report_utils.append_junit_xml_results(_xml_report_tree, resultset,
+ report_utils.append_junit_xml_results(xml_report_tree, resultset,
'grpc_interop_matrix', suite_name,
str(uuid.uuid4()))
if not args.keep:
- cleanup(image)
+ _cleanup_docker_image(image)
return total_num_failures
-def cleanup(image):
- jobset.message('START', 'Cleanup docker image %s' % image, do_newline=True)
- dockerjob.remove_image(image, skip_nonexistent=True)
-
-
languages = args.language if args.language != ['all'] else _LANGUAGES
total_num_failures = 0
+_xml_report_tree = report_utils.new_junit_xml_tree()
for lang in languages:
- docker_images = find_all_images_for_lang(lang)
+ docker_images = _get_test_images_for_lang(lang, args.release, args.gcr_path)
for runtime in sorted(docker_images.keys()):
- total_num_failures += run_tests_for_lang(lang, runtime,
- docker_images[runtime])
+ total_num_failures += _run_tests_for_lang(
+ lang, runtime, docker_images[runtime], _xml_report_tree)
report_utils.create_xml_report_file(_xml_report_tree, args.report_file)
diff --git a/tools/remote_build/README.md b/tools/remote_build/README.md
new file mode 100644
index 0000000000..c4d03547a2
--- /dev/null
+++ b/tools/remote_build/README.md
@@ -0,0 +1,33 @@
+# Running Remote Builds with bazel
+
+This allows you to spawn gRPC C/C++ remote build and tests from your workstation with
+configuration that's very similar to what's used by our CI Kokoro.
+
+Note that this will only work for gRPC team members (it requires access to the
+remote build and execution cluster), others will need to rely on local test runs
+and tests run by Kokoro CI.
+
+
+## Prerequisites
+
+- See [Installing Bazel](https://docs.bazel.build/versions/master/install.html) for instructions how to install bazel on your system.
+
+- Setup application default credentials for running remote builds by following [RBE Credentials Setup](https://cloud.google.com/remote-build-execution/docs/getting-started#set_credentials)
+
+
+## Running remote build manually from dev workstation
+
+Run from repository root:
+```
+# manual run of bazel tests remotely on Foundry
+bazel --bazelrc=tools/remote_build/manual.bazelrc test -c opt //test/...
+```
+
+Sanitizer runs (asan, msan, tsan, ubsan):
+```
+# manual run of bazel tests remotely on Foundry with given sanitizer
+bazel --bazelrc=tools/remote_build/manual.bazelrc test --config=asan //test/...
+```
+
+Available command line options can be found in
+[Bazel command line reference](https://docs.bazel.build/versions/master/command-line-reference.html)
diff --git a/tools/remote_build/kokoro.bazelrc b/tools/remote_build/kokoro.bazelrc
new file mode 100644
index 0000000000..11462bd301
--- /dev/null
+++ b/tools/remote_build/kokoro.bazelrc
@@ -0,0 +1,38 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# bazelrc file for running gRPC tests on Kokoro using Foundry
+
+import %workspace%/tools/remote_build/rbe_common.bazelrc
+
+build --remote_cache=remotebuildexecution.googleapis.com
+build --remote_executor=remotebuildexecution.googleapis.com
+build --tls_enabled=true
+
+build --auth_enabled=true
+# magic location where kokoro script puts the credentials
+build --auth_credentials=/tmpfs/src/keystore/4321_grpc-testing-service
+build --auth_scope=https://www.googleapis.com/auth/cloud-source-tools
+
+build --bes_backend=buildeventservice.googleapis.com
+build --bes_best_effort=false
+build --bes_timeout=600s
+build --project_id=grpc-testing
+
+# required by kokoro for some reason
+build --test_env=USER=anon
+
+build --jobs=200
+build --test_output=errors
+build --keep_going=true
diff --git a/tools/remote_build/manual.bazelrc b/tools/remote_build/manual.bazelrc
new file mode 100644
index 0000000000..b4fdc70637
--- /dev/null
+++ b/tools/remote_build/manual.bazelrc
@@ -0,0 +1,45 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# bazelrc file for running gRPC tests with Foundry (remote build execution)
+# manually from developer's workstation
+
+import %workspace%/tools/remote_build/rbe_common.bazelrc
+
+build --remote_cache=remotebuildexecution.googleapis.com
+build --remote_executor=remotebuildexecution.googleapis.com
+build --tls_enabled=true
+
+# Enable authentication. This will pick up application default credentials by
+# default. You can use --auth_credentials=some_file.json to use a service
+# account credential instead.
+# How to setup credentials:
+# See https://cloud.google.com/remote-build-execution/docs/getting-started#set_credentials
+build --auth_enabled=true
+
+# Set flags for uploading to BES in order to view results in the Bazel Build
+# Results UI.
+build --bes_backend="buildeventservice.googleapis.com"
+build --bes_timeout=60s
+build --bes_results_url="https://source.cloud.google.com/results/invocations/"
+build --project_id=grpc-testing
+
+build --jobs=100
+
+# TODO(jtattermusch): this should be part of the common config
+# but currently sanitizers use different test_timeout values
+build --test_timeout=300,450,1200,3600
+
+# print output for tests that fail (default is "summary")
+build --test_output=errors
diff --git a/tools/remote_build/rbe_common.bazelrc b/tools/remote_build/rbe_common.bazelrc
new file mode 100644
index 0000000000..e8c7f0b9cb
--- /dev/null
+++ b/tools/remote_build/rbe_common.bazelrc
@@ -0,0 +1,83 @@
+# Copyright 2018 The gRPC Authors
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# bazelrc with Foundry setting common to both manual run and runs started by Kokoro
+# see https://github.com/bazelbuild/bazel-toolchains/tree/master/bazelrc
+# for examples and more documentation
+
+startup --host_jvm_args=-Dbazel.DigestFunction=SHA256
+
+build --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/default:toolchain
+build --extra_toolchains=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/cpp:cc-toolchain-clang-x86_64-default
+# Use custom execution platforms defined in third_party/toolchains
+build --extra_execution_platforms=//third_party/toolchains:rbe_ubuntu1604
+build --host_platform=//third_party/toolchains:rbe_ubuntu1604
+build --platforms=//third_party/toolchains:rbe_ubuntu1604
+
+build --spawn_strategy=remote
+build --strategy=Javac=remote
+build --strategy=Closure=remote
+build --genrule_strategy=remote
+build --remote_timeout=3600
+
+build --remote_instance_name=projects/grpc-testing/instances/default_instance
+
+build --verbose_failures=true
+
+build --experimental_strict_action_env=true
+build --action_env=BAZEL_DO_NOT_DETECT_CPP_TOOLCHAIN=1
+
+# don't use port server
+build --define GRPC_PORT_ISOLATED_RUNTIME=1
+# without verbose gRPC logs the test outputs are not very useful
+test --test_env=GRPC_VERBOSITY=debug
+
+# address sanitizer: most settings are already in %workspace%/.bazelrc
+# we only need a few additional ones that are Foundry specific
+build:asan --copt=-gmlt
+# TODO(jtattermusch): use more reasonable test timeout
+build:asan --test_timeout=3600
+build:asan --test_tag_filters=-qps_json_driver,-json_run_localhost
+
+# memory sanitizer: most settings are already in %workspace%/.bazelrc
+# we only need a few additional ones that are Foundry specific
+build:msan --copt=-gmlt
+# TODO(jtattermusch): use more reasonable test timeout
+build:msan --test_timeout=3600
+build:msan --cxxopt=--stdlib=libc++
+# setting LD_LIBRARY_PATH is necessary
+# to avoid "libc++.so.1: cannot open shared object file"
+build:msan --action_env=LD_LIBRARY_PATH=/usr/local/lib
+build:msan --host_crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/default:toolchain
+# override the config-agnostic crosstool_top
+build:msan --crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/ubuntu16_04_clang/1.0/bazel_0.16.1/msan:toolchain
+
+# thread sanitizer: most settings are already in %workspace%/.bazelrc
+# we only need a few additional ones that are Foundry specific
+build:tsan --copt=-gmlt
+# TODO(jtattermusch): use more reasonable test timeout
+build:tsan --test_timeout=3600
+build:tsan --test_tag_filters=-qps_json_driver,-json_run_localhost
+
+# undefined behavior sanitizer: most settings are already in %workspace%/.bazelrc
+# we only need a few additional ones that are Foundry specific
+build:ubsan --copt=-gmlt
+# TODO(jtattermusch): use more reasonable test timeout
+build:ubsan --test_timeout=3600
+# override the config-agnostic crosstool_top
+--crosstool_top=@com_github_bazelbuild_bazeltoolchains//configs/experimental/ubuntu16_04_clang/1.0/bazel_0.16.1/ubsan:toolchain
+# TODO(jtattermusch): remove this once Foundry adds the env to the docker image.
+# ubsan needs symbolizer to work properly, otherwise the suppression file doesn't work
+# and we get test failures.
+build:ubsan --action_env=UBSAN_SYMBOLIZER_PATH=/usr/local/bin/llvm-symbolizer
diff --git a/tools/run_tests/generated/lb_interop_test_scenarios.json b/tools/run_tests/generated/lb_interop_test_scenarios.json
new file mode 100644
index 0000000000..4f956c568a
--- /dev/null
+++ b/tools/run_tests/generated/lb_interop_test_scenarios.json
@@ -0,0 +1,1167 @@
+
+[
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_nx_domain_insecure",
+ "skip_langs": [],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_nx_domain_alts",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_nx_domain_tls",
+ "skip_langs": [],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_nx_domain_google_default_credentials",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": true,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_no_data_insecure",
+ "skip_langs": [],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": true,
+ "fallback_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_no_data_alts",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": true,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_no_data_tls",
+ "skip_langs": [],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [],
+ "cause_no_error_no_data_for_balancer_a_record": true,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "no_balancer_because_lb_a_record_returns_no_data_google_default_credentials",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_insecure_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_alts_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_tls_short_stream_True",
+ "skip_langs": [
+ "java",
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_google_default_credentials_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_insecure_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_alts_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_tls_short_stream_False",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_google_default_credentials_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_referred_to_backend_fallback_broken_alts_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_referred_to_backend_fallback_broken_tls_short_stream_True",
+ "skip_langs": [
+ "java",
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_referred_to_backend_fallback_broken_google_default_credentials_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_referred_to_backend_fallback_broken_alts_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_referred_to_backend_fallback_broken_tls_short_stream_False",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_referred_to_backend_fallback_broken_google_default_credentials_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_insecure_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_alts_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_tls_short_stream_True",
+ "skip_langs": [
+ "java",
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_google_default_credentials_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ },
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_insecure_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_alts_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ },
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_tls_short_stream_False",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ },
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_backends_google_default_credentials_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_insecure_short_stream_True",
+ "skip_langs": [
+ "go",
+ "java",
+ "java"
+ ],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_alts_short_stream_True",
+ "skip_langs": [
+ "go",
+ "java",
+ "java"
+ ],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_tls_short_stream_True",
+ "skip_langs": [
+ "go",
+ "java",
+ "java",
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_google_default_credentials_short_stream_True",
+ "skip_langs": [
+ "go",
+ "java",
+ "java"
+ ],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_insecure_short_stream_False",
+ "skip_langs": [
+ "go",
+ "java"
+ ],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_alts_short_stream_False",
+ "skip_langs": [
+ "go",
+ "java"
+ ],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_tls_short_stream_False",
+ "skip_langs": [
+ "go",
+ "java",
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "client_falls_back_because_no_backends_google_default_credentials_short_stream_False",
+ "skip_langs": [
+ "go",
+ "java"
+ ],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "name": "client_falls_back_because_balancer_connection_broken_alts",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "client_falls_back_because_balancer_connection_broken_tls",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "name": "client_falls_back_because_balancer_connection_broken_google_default_credentials",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_insecure_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_alts_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_tls_short_stream_True",
+ "skip_langs": [
+ "java",
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": true,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_google_default_credentials_short_stream_True",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "google_default_credentials"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "insecure"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "insecure"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_insecure_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "insecure"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_alts_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "alts"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "tls"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "tls"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_tls_short_stream_False",
+ "skip_langs": [
+ "java"
+ ],
+ "transport_sec": "tls"
+ },
+ {
+ "backend_configs": [
+ {
+ "transport_sec": "alts"
+ }
+ ],
+ "balancer_configs": [
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ },
+ {
+ "short_stream": false,
+ "transport_sec": "alts"
+ }
+ ],
+ "cause_no_error_no_data_for_balancer_a_record": false,
+ "fallback_configs": [],
+ "name": "client_referred_to_backend_multiple_balancers_google_default_credentials_short_stream_False",
+ "skip_langs": [],
+ "transport_sec": "google_default_credentials"
+ }
+]
diff --git a/tools/run_tests/generated/sources_and_headers.json b/tools/run_tests/generated/sources_and_headers.json
index b27f5b5037..f79b391681 100644
--- a/tools/run_tests/generated/sources_and_headers.json
+++ b/tools/run_tests/generated/sources_and_headers.json
@@ -1159,7 +1159,7 @@
"headers": [],
"is_filegroup": false,
"language": "c",
- "name": "handshake_client",
+ "name": "handshake_client_ssl",
"src": [
"test/core/handshake/client_ssl.cc"
],
@@ -1178,7 +1178,7 @@
],
"is_filegroup": false,
"language": "c",
- "name": "handshake_server",
+ "name": "handshake_server_ssl",
"src": [
"test/core/handshake/server_ssl.cc",
"test/core/handshake/server_ssl_common.cc",
@@ -1593,7 +1593,7 @@
"headers": [],
"is_filegroup": false,
"language": "c",
- "name": "memory_profile_client",
+ "name": "memory_usage_client",
"src": [
"test/core/memory_usage/client.cc"
],
@@ -1610,7 +1610,7 @@
"headers": [],
"is_filegroup": false,
"language": "c",
- "name": "memory_profile_server",
+ "name": "memory_usage_server",
"src": [
"test/core/memory_usage/server.cc"
],
@@ -1627,7 +1627,7 @@
"headers": [],
"is_filegroup": false,
"language": "c",
- "name": "memory_profile_test",
+ "name": "memory_usage_test",
"src": [
"test/core/memory_usage/memory_usage_test.cc"
],
@@ -3394,6 +3394,28 @@
"grpc++_test_util",
"grpc_test_util"
],
+ "headers": [
+ "test/cpp/end2end/interceptors_util.h"
+ ],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "client_interceptors_end2end_test",
+ "src": [
+ "test/cpp/end2end/client_interceptors_end2end_test.cc",
+ "test/cpp/end2end/interceptors_util.h"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_util",
+ "grpc_test_util"
+ ],
"headers": [],
"is_filegroup": false,
"language": "c++",
@@ -4702,6 +4724,28 @@
"deps": [
"gpr",
"gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_util",
+ "grpc_test_util"
+ ],
+ "headers": [
+ "test/cpp/end2end/interceptors_util.h"
+ ],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "server_interceptors_end2end_test",
+ "src": [
+ "test/cpp/end2end/interceptors_util.h",
+ "test/cpp/end2end/server_interceptors_end2end_test.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
"grpc++_test_util_unsecure",
"grpc++_unsecure",
"grpc_test_util_unsecure",
@@ -6557,24 +6601,6 @@
},
{
"deps": [
- "end2end_nosec_tests",
- "gpr",
- "gpr_test_util",
- "grpc_test_util_unsecure",
- "grpc_unsecure"
- ],
- "headers": [],
- "is_filegroup": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "src": [
- "test/core/end2end/fixtures/inproc.cc"
- ],
- "third_party": false,
- "type": "target"
- },
- {
- "deps": [
"gpr",
"gpr_test_util",
"grpc++_test_config",
@@ -7047,6 +7073,7 @@
"grpc_lb_policy_grpclb_secure",
"grpc_lb_policy_pick_first",
"grpc_lb_policy_round_robin",
+ "grpc_lb_policy_xds_secure",
"grpc_max_age_filter",
"grpc_message_size_filter",
"grpc_resolver_dns_ares",
@@ -7140,6 +7167,7 @@
"grpc_lb_policy_grpclb",
"grpc_lb_policy_pick_first",
"grpc_lb_policy_round_robin",
+ "grpc_lb_policy_xds",
"grpc_max_age_filter",
"grpc_message_size_filter",
"grpc_resolver_dns_ares",
@@ -9620,7 +9648,8 @@
"src/core/lib/transport/status_metadata.cc",
"src/core/lib/transport/timeout_encoding.cc",
"src/core/lib/transport/transport.cc",
- "src/core/lib/transport/transport_op_string.cc"
+ "src/core/lib/transport/transport_op_string.cc",
+ "src/core/lib/uri/uri_parser.cc"
],
"third_party": false,
"type": "filegroup"
@@ -9777,7 +9806,8 @@
"src/core/lib/transport/status_metadata.h",
"src/core/lib/transport/timeout_encoding.h",
"src/core/lib/transport/transport.h",
- "src/core/lib/transport/transport_impl.h"
+ "src/core/lib/transport/transport_impl.h",
+ "src/core/lib/uri/uri_parser.h"
],
"is_filegroup": true,
"language": "c",
@@ -9928,7 +9958,8 @@
"src/core/lib/transport/status_metadata.h",
"src/core/lib/transport/timeout_encoding.h",
"src/core/lib/transport/transport.h",
- "src/core/lib/transport/transport_impl.h"
+ "src/core/lib/transport/transport_impl.h",
+ "src/core/lib/uri/uri_parser.h"
],
"third_party": false,
"type": "filegroup"
@@ -9982,7 +10013,8 @@
"deps": [
"gpr",
"grpc_base",
- "grpc_deadline_filter"
+ "grpc_deadline_filter",
+ "health_proto"
],
"headers": [
"src/core/ext/filters/client_channel/backup_poller.h",
@@ -9990,6 +10022,7 @@
"src/core/ext/filters/client_channel/client_channel_channelz.h",
"src/core/ext/filters/client_channel/client_channel_factory.h",
"src/core/ext/filters/client_channel/connector.h",
+ "src/core/ext/filters/client_channel/health/health_check_client.h",
"src/core/ext/filters/client_channel/http_connect_handshaker.h",
"src/core/ext/filters/client_channel/http_proxy.h",
"src/core/ext/filters/client_channel/lb_policy.h",
@@ -10004,8 +10037,7 @@
"src/core/ext/filters/client_channel/resolver_registry.h",
"src/core/ext/filters/client_channel/retry_throttle.h",
"src/core/ext/filters/client_channel/subchannel.h",
- "src/core/ext/filters/client_channel/subchannel_index.h",
- "src/core/ext/filters/client_channel/uri_parser.h"
+ "src/core/ext/filters/client_channel/subchannel_index.h"
],
"is_filegroup": true,
"language": "c",
@@ -10023,6 +10055,8 @@
"src/core/ext/filters/client_channel/client_channel_plugin.cc",
"src/core/ext/filters/client_channel/connector.cc",
"src/core/ext/filters/client_channel/connector.h",
+ "src/core/ext/filters/client_channel/health/health_check_client.cc",
+ "src/core/ext/filters/client_channel/health/health_check_client.h",
"src/core/ext/filters/client_channel/http_connect_handshaker.cc",
"src/core/ext/filters/client_channel/http_connect_handshaker.h",
"src/core/ext/filters/client_channel/http_proxy.cc",
@@ -10051,9 +10085,7 @@
"src/core/ext/filters/client_channel/subchannel.cc",
"src/core/ext/filters/client_channel/subchannel.h",
"src/core/ext/filters/client_channel/subchannel_index.cc",
- "src/core/ext/filters/client_channel/subchannel_index.h",
- "src/core/ext/filters/client_channel/uri_parser.cc",
- "src/core/ext/filters/client_channel/uri_parser.h"
+ "src/core/ext/filters/client_channel/subchannel_index.h"
],
"third_party": false,
"type": "filegroup"
@@ -10137,6 +10169,7 @@
"grpc_base",
"grpc_client_channel",
"grpc_resolver_fake",
+ "grpclb_proto",
"nanopb"
],
"headers": [
@@ -10144,10 +10177,7 @@
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h"
],
"is_filegroup": true,
"language": "c",
@@ -10162,13 +10192,7 @@
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.c",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.c",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h"
],
"third_party": false,
"type": "filegroup"
@@ -10180,6 +10204,7 @@
"grpc_client_channel",
"grpc_resolver_fake",
"grpc_secure",
+ "grpclb_proto",
"nanopb"
],
"headers": [
@@ -10187,10 +10212,7 @@
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_channel.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h"
],
"is_filegroup": true,
"language": "c",
@@ -10205,13 +10227,7 @@
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.cc",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb_client_stats.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.cc",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.c",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.c",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c",
- "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/load_balancer_api.h"
],
"third_party": false,
"type": "filegroup"
@@ -10254,6 +10270,69 @@
"deps": [
"gpr",
"grpc_base",
+ "grpc_client_channel",
+ "grpc_resolver_fake",
+ "grpclb_proto",
+ "nanopb"
+ ],
+ "headers": [
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.h"
+ ],
+ "is_filegroup": true,
+ "language": "c",
+ "name": "grpc_lb_policy_xds",
+ "src": [
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.h"
+ ],
+ "third_party": false,
+ "type": "filegroup"
+ },
+ {
+ "deps": [
+ "gpr",
+ "grpc_base",
+ "grpc_client_channel",
+ "grpc_resolver_fake",
+ "grpc_secure",
+ "grpclb_proto",
+ "nanopb"
+ ],
+ "headers": [
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.h"
+ ],
+ "is_filegroup": true,
+ "language": "c",
+ "name": "grpc_lb_policy_xds_secure",
+ "src": [
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_channel.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_channel_secure.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_client_stats.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.cc",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds_load_balancer_api.h"
+ ],
+ "third_party": false,
+ "type": "filegroup"
+ },
+ {
+ "deps": [
+ "gpr",
+ "grpc_base",
"grpc_client_channel"
],
"headers": [
@@ -10395,6 +10474,7 @@
"headers": [
"include/grpc/grpc_security.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.h",
"src/core/lib/security/context/security_context.h",
"src/core/lib/security/credentials/alts/alts_credentials.h",
"src/core/lib/security/credentials/composite/composite_credentials.h",
@@ -10409,11 +10489,14 @@
"src/core/lib/security/credentials/oauth2/oauth2_credentials.h",
"src/core/lib/security/credentials/plugin/plugin_credentials.h",
"src/core/lib/security/credentials/ssl/ssl_credentials.h",
- "src/core/lib/security/security_connector/alts_security_connector.h",
+ "src/core/lib/security/security_connector/alts/alts_security_connector.h",
+ "src/core/lib/security/security_connector/fake/fake_security_connector.h",
"src/core/lib/security/security_connector/load_system_roots.h",
"src/core/lib/security/security_connector/load_system_roots_linux.h",
- "src/core/lib/security/security_connector/local_security_connector.h",
+ "src/core/lib/security/security_connector/local/local_security_connector.h",
"src/core/lib/security/security_connector/security_connector.h",
+ "src/core/lib/security/security_connector/ssl/ssl_security_connector.h",
+ "src/core/lib/security/security_connector/ssl_utils.h",
"src/core/lib/security/transport/auth_filters.h",
"src/core/lib/security/transport/secure_endpoint.h",
"src/core/lib/security/transport/security_handshaker.h",
@@ -10427,6 +10510,7 @@
"src": [
"include/grpc/grpc_security.h",
"src/core/ext/filters/client_channel/lb_policy/grpclb/grpclb.h",
+ "src/core/ext/filters/client_channel/lb_policy/xds/xds.h",
"src/core/lib/http/httpcli_security_connector.cc",
"src/core/lib/security/context/security_context.cc",
"src/core/lib/security/context/security_context.h",
@@ -10458,16 +10542,22 @@
"src/core/lib/security/credentials/plugin/plugin_credentials.h",
"src/core/lib/security/credentials/ssl/ssl_credentials.cc",
"src/core/lib/security/credentials/ssl/ssl_credentials.h",
- "src/core/lib/security/security_connector/alts_security_connector.cc",
- "src/core/lib/security/security_connector/alts_security_connector.h",
+ "src/core/lib/security/security_connector/alts/alts_security_connector.cc",
+ "src/core/lib/security/security_connector/alts/alts_security_connector.h",
+ "src/core/lib/security/security_connector/fake/fake_security_connector.cc",
+ "src/core/lib/security/security_connector/fake/fake_security_connector.h",
"src/core/lib/security/security_connector/load_system_roots.h",
"src/core/lib/security/security_connector/load_system_roots_fallback.cc",
"src/core/lib/security/security_connector/load_system_roots_linux.cc",
"src/core/lib/security/security_connector/load_system_roots_linux.h",
- "src/core/lib/security/security_connector/local_security_connector.cc",
- "src/core/lib/security/security_connector/local_security_connector.h",
+ "src/core/lib/security/security_connector/local/local_security_connector.cc",
+ "src/core/lib/security/security_connector/local/local_security_connector.h",
"src/core/lib/security/security_connector/security_connector.cc",
"src/core/lib/security/security_connector/security_connector.h",
+ "src/core/lib/security/security_connector/ssl/ssl_security_connector.cc",
+ "src/core/lib/security/security_connector/ssl/ssl_security_connector.h",
+ "src/core/lib/security/security_connector/ssl_utils.cc",
+ "src/core/lib/security/security_connector/ssl_utils.h",
"src/core/lib/security/transport/auth_filters.h",
"src/core/lib/security/transport/client_auth_filter.cc",
"src/core/lib/security/transport/secure_endpoint.cc",
@@ -10923,6 +11013,46 @@
},
{
"deps": [
+ "nanopb"
+ ],
+ "headers": [
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
+ ],
+ "is_filegroup": true,
+ "language": "c",
+ "name": "grpclb_proto",
+ "src": [
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.c",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/duration.pb.h",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.c",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/google/protobuf/timestamp.pb.h",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.c",
+ "src/core/ext/filters/client_channel/lb_policy/grpclb/proto/grpc/lb/v1/load_balancer.pb.h"
+ ],
+ "third_party": false,
+ "type": "filegroup"
+ },
+ {
+ "deps": [
+ "nanopb"
+ ],
+ "headers": [
+ "src/core/ext/filters/client_channel/health/health.pb.h"
+ ],
+ "is_filegroup": true,
+ "language": "c",
+ "name": "health_proto",
+ "src": [
+ "src/core/ext/filters/client_channel/health/health.pb.c",
+ "src/core/ext/filters/client_channel/health/health.pb.h"
+ ],
+ "third_party": false,
+ "type": "filegroup"
+ },
+ {
+ "deps": [
"nanopb_headers"
],
"headers": [],
@@ -11068,6 +11198,8 @@
"include/grpcpp/impl/codegen/byte_buffer.h",
"include/grpcpp/impl/codegen/call.h",
"include/grpcpp/impl/codegen/call_hook.h",
+ "include/grpcpp/impl/codegen/call_op_set.h",
+ "include/grpcpp/impl/codegen/call_op_set_interface.h",
"include/grpcpp/impl/codegen/callback_common.h",
"include/grpcpp/impl/codegen/channel_interface.h",
"include/grpcpp/impl/codegen/client_callback.h",
@@ -11080,14 +11212,18 @@
"include/grpcpp/impl/codegen/core_codegen_interface.h",
"include/grpcpp/impl/codegen/create_auth_context.h",
"include/grpcpp/impl/codegen/grpc_library.h",
+ "include/grpcpp/impl/codegen/intercepted_channel.h",
"include/grpcpp/impl/codegen/interceptor.h",
+ "include/grpcpp/impl/codegen/interceptor_common.h",
"include/grpcpp/impl/codegen/metadata_map.h",
"include/grpcpp/impl/codegen/method_handler_impl.h",
"include/grpcpp/impl/codegen/rpc_method.h",
"include/grpcpp/impl/codegen/rpc_service_method.h",
"include/grpcpp/impl/codegen/security/auth_context.h",
"include/grpcpp/impl/codegen/serialization_traits.h",
+ "include/grpcpp/impl/codegen/server_callback.h",
"include/grpcpp/impl/codegen/server_context.h",
+ "include/grpcpp/impl/codegen/server_interceptor.h",
"include/grpcpp/impl/codegen/server_interface.h",
"include/grpcpp/impl/codegen/service_type.h",
"include/grpcpp/impl/codegen/slice.h",
@@ -11138,6 +11274,8 @@
"include/grpcpp/impl/codegen/byte_buffer.h",
"include/grpcpp/impl/codegen/call.h",
"include/grpcpp/impl/codegen/call_hook.h",
+ "include/grpcpp/impl/codegen/call_op_set.h",
+ "include/grpcpp/impl/codegen/call_op_set_interface.h",
"include/grpcpp/impl/codegen/callback_common.h",
"include/grpcpp/impl/codegen/channel_interface.h",
"include/grpcpp/impl/codegen/client_callback.h",
@@ -11150,14 +11288,18 @@
"include/grpcpp/impl/codegen/core_codegen_interface.h",
"include/grpcpp/impl/codegen/create_auth_context.h",
"include/grpcpp/impl/codegen/grpc_library.h",
+ "include/grpcpp/impl/codegen/intercepted_channel.h",
"include/grpcpp/impl/codegen/interceptor.h",
+ "include/grpcpp/impl/codegen/interceptor_common.h",
"include/grpcpp/impl/codegen/metadata_map.h",
"include/grpcpp/impl/codegen/method_handler_impl.h",
"include/grpcpp/impl/codegen/rpc_method.h",
"include/grpcpp/impl/codegen/rpc_service_method.h",
"include/grpcpp/impl/codegen/security/auth_context.h",
"include/grpcpp/impl/codegen/serialization_traits.h",
+ "include/grpcpp/impl/codegen/server_callback.h",
"include/grpcpp/impl/codegen/server_context.h",
+ "include/grpcpp/impl/codegen/server_interceptor.h",
"include/grpcpp/impl/codegen/server_interface.h",
"include/grpcpp/impl/codegen/service_type.h",
"include/grpcpp/impl/codegen/slice.h",
@@ -11215,6 +11357,7 @@
"grpc++_codegen_base",
"grpc_base_headers",
"grpc_transport_inproc_headers",
+ "health_proto",
"nanopb_headers"
],
"headers": [
@@ -11304,6 +11447,7 @@
"include/grpcpp/support/config.h",
"include/grpcpp/support/proto_buffer_reader.h",
"include/grpcpp/support/proto_buffer_writer.h",
+ "include/grpcpp/support/server_callback.h",
"include/grpcpp/support/slice.h",
"include/grpcpp/support/status.h",
"include/grpcpp/support/status_code_enum.h",
@@ -11315,7 +11459,6 @@
"src/cpp/common/channel_filter.h",
"src/cpp/server/dynamic_thread_pool.h",
"src/cpp/server/health/default_health_check_service.h",
- "src/cpp/server/health/health.pb.h",
"src/cpp/server/thread_pool_interface.h",
"src/cpp/thread_manager/thread_manager.h"
],
@@ -11409,6 +11552,7 @@
"include/grpcpp/support/config.h",
"include/grpcpp/support/proto_buffer_reader.h",
"include/grpcpp/support/proto_buffer_writer.h",
+ "include/grpcpp/support/server_callback.h",
"include/grpcpp/support/slice.h",
"include/grpcpp/support/status.h",
"include/grpcpp/support/status_code_enum.h",
@@ -11440,8 +11584,6 @@
"src/cpp/server/dynamic_thread_pool.h",
"src/cpp/server/health/default_health_check_service.cc",
"src/cpp/server/health/default_health_check_service.h",
- "src/cpp/server/health/health.pb.c",
- "src/cpp/server/health/health.pb.h",
"src/cpp/server/health/health_check_service.cc",
"src/cpp/server/health/health_check_service_server_builder_option.cc",
"src/cpp/server/server_builder.cc",
diff --git a/tools/run_tests/generated/tests.json b/tools/run_tests/generated/tests.json
index 4fdd26efa4..ef34cd6556 100644
--- a/tools/run_tests/generated/tests.json
+++ b/tools/run_tests/generated/tests.json
@@ -1447,7 +1447,7 @@
"flaky": false,
"gtest": false,
"language": "c",
- "name": "handshake_client",
+ "name": "handshake_client_ssl",
"platforms": [
"linux"
],
@@ -1467,7 +1467,7 @@
"flaky": false,
"gtest": false,
"language": "c",
- "name": "handshake_server",
+ "name": "handshake_server_ssl",
"platforms": [
"linux"
],
@@ -1879,7 +1879,7 @@
"flaky": false,
"gtest": false,
"language": "c",
- "name": "memory_profile_test",
+ "name": "memory_usage_test",
"platforms": [
"linux",
"mac",
@@ -4036,6 +4036,30 @@
"posix",
"windows"
],
+ "cpu_cost": 0.5,
+ "exclude_configs": [],
+ "exclude_iomgrs": [],
+ "flaky": false,
+ "gtest": true,
+ "language": "c++",
+ "name": "client_interceptors_end2end_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "uses_polling": true
+ },
+ {
+ "args": [],
+ "benchmark": false,
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
"cpu_cost": 1.0,
"exclude_configs": [],
"exclude_iomgrs": [],
@@ -5126,6 +5150,30 @@
"posix",
"windows"
],
+ "cpu_cost": 0.5,
+ "exclude_configs": [],
+ "exclude_iomgrs": [],
+ "flaky": false,
+ "gtest": true,
+ "language": "c++",
+ "name": "server_interceptors_end2end_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "uses_polling": true
+ },
+ {
+ "args": [],
+ "benchmark": false,
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
"cpu_cost": 1.0,
"exclude_configs": [],
"exclude_iomgrs": [],
@@ -56266,949 +56314,6 @@
},
{
"args": [
- "authority_not_supported"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "binary_metadata"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_after_accept"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_after_client_done"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_after_invoke"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_after_round_trip"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_before_invoke"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_in_a_vacuum"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "cancel_with_status"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "channelz"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "empty_batch"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "filter_call_init_fails"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "filter_causes_close"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "filter_latency"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "filter_status_code"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "high_initial_seqno"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "hpack_size"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "idempotent_request"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "invoke_large_request"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "large_metadata"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "max_message_length"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "negative_deadline"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "network_status_change"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "no_error_on_hotpath"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "no_logging"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "no_op"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "payload"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "ping_pong_streaming"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "registered_call"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "request_with_flags"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "request_with_payload"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "resource_quota_server"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "server_finishes_request"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "shutdown_finishes_calls"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "shutdown_finishes_tags"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "simple_cacheable_request"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "simple_metadata"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "simple_request"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "streaming_error_response"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 0.1,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "trailing_metadata"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
- "workaround_cronet_compression"
- ],
- "ci_platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ],
- "cpu_cost": 1.0,
- "exclude_configs": [],
- "exclude_iomgrs": [],
- "flaky": false,
- "language": "c",
- "name": "inproc_nosec_test",
- "platforms": [
- "windows",
- "linux",
- "mac",
- "posix"
- ]
- },
- {
- "args": [
"--scenarios_json",
"{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_1channel_100rpcs_1MB\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 0, \"channel_args\": [{\"str_value\": \"throughput\", \"name\": \"grpc.optimization_target\"}], \"security_params\": null, \"threads_per_cq\": 0, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"security_params\": null, \"channel_args\": [{\"str_value\": \"throughput\", \"name\": \"grpc.optimization_target\"}], \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"payload_config\": {\"simple_params\": {\"resp_size\": 1048576, \"req_size\": 1048576}}, \"client_channels\": 1, \"threads_per_cq\": 0, \"load_params\": {\"closed_loop\": {}}, \"client_type\": \"ASYNC_CLIENT\", \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}"
],
diff --git a/tools/run_tests/lb_interop_tests/gen_build_yaml.py b/tools/run_tests/lb_interop_tests/gen_build_yaml.py
new file mode 100755
index 0000000000..b7d655b75b
--- /dev/null
+++ b/tools/run_tests/lb_interop_tests/gen_build_yaml.py
@@ -0,0 +1,347 @@
+#!/usr/bin/env python2.7
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Generates the appropriate JSON data for LB interop test scenarios."""
+
+import json
+import os
+import yaml
+
+all_scenarios = []
+
+# TODO(https://github.com/grpc/grpc-go/issues/2347): enable
+# client_falls_back_because_no_backends_* scenarios for Java/Go.
+
+# TODO(https://github.com/grpc/grpc-java/issues/4887): enable
+# *short_stream* scenarios for Java.
+
+# TODO(https://github.com/grpc/grpc-java/issues/4912): enable
+# Java TLS tests involving TLS to the balancer.
+
+
+def server_sec(transport_sec):
+ if transport_sec == 'google_default_credentials':
+ return 'alts', 'alts', 'tls'
+ return transport_sec, transport_sec, transport_sec
+
+
+def generate_no_balancer_because_lb_a_record_returns_nx_domain():
+ all_configs = []
+ for transport_sec in [
+ 'insecure', 'alts', 'tls', 'google_default_credentials'
+ ]:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ config = {
+ 'name':
+ 'no_balancer_because_lb_a_record_returns_nx_domain_%s' %
+ transport_sec,
+ 'skip_langs': [],
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [],
+ 'backend_configs': [],
+ 'fallback_configs': [{
+ 'transport_sec': fallback_sec,
+ }],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_no_balancer_because_lb_a_record_returns_nx_domain()
+
+
+def generate_no_balancer_because_lb_a_record_returns_no_data():
+ all_configs = []
+ for transport_sec in [
+ 'insecure', 'alts', 'tls', 'google_default_credentials'
+ ]:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ config = {
+ 'name':
+ 'no_balancer_because_lb_a_record_returns_no_data_%s' %
+ transport_sec,
+ 'skip_langs': [],
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [],
+ 'backend_configs': [],
+ 'fallback_configs': [{
+ 'transport_sec': fallback_sec,
+ }],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ True,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_no_balancer_because_lb_a_record_returns_no_data()
+
+
+def generate_client_referred_to_backend():
+ all_configs = []
+ for balancer_short_stream in [True, False]:
+ for transport_sec in [
+ 'insecure', 'alts', 'tls', 'google_default_credentials'
+ ]:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ skip_langs = []
+ if transport_sec == 'tls':
+ skip_langs += ['java']
+ if balancer_short_stream:
+ skip_langs += ['java']
+ config = {
+ 'name':
+ 'client_referred_to_backend_%s_short_stream_%s' %
+ (transport_sec, balancer_short_stream),
+ 'skip_langs':
+ skip_langs,
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [{
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ }],
+ 'backend_configs': [{
+ 'transport_sec': backend_sec,
+ }],
+ 'fallback_configs': [],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_client_referred_to_backend()
+
+
+def generate_client_referred_to_backend_fallback_broken():
+ all_configs = []
+ for balancer_short_stream in [True, False]:
+ for transport_sec in ['alts', 'tls', 'google_default_credentials']:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ skip_langs = []
+ if transport_sec == 'tls':
+ skip_langs += ['java']
+ if balancer_short_stream:
+ skip_langs += ['java']
+ config = {
+ 'name':
+ 'client_referred_to_backend_fallback_broken_%s_short_stream_%s'
+ % (transport_sec, balancer_short_stream),
+ 'skip_langs':
+ skip_langs,
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [{
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ }],
+ 'backend_configs': [{
+ 'transport_sec': backend_sec,
+ }],
+ 'fallback_configs': [{
+ 'transport_sec': 'insecure',
+ }],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_client_referred_to_backend_fallback_broken()
+
+
+def generate_client_referred_to_backend_multiple_backends():
+ all_configs = []
+ for balancer_short_stream in [True, False]:
+ for transport_sec in [
+ 'insecure', 'alts', 'tls', 'google_default_credentials'
+ ]:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ skip_langs = []
+ if transport_sec == 'tls':
+ skip_langs += ['java']
+ if balancer_short_stream:
+ skip_langs += ['java']
+ config = {
+ 'name':
+ 'client_referred_to_backend_multiple_backends_%s_short_stream_%s'
+ % (transport_sec, balancer_short_stream),
+ 'skip_langs':
+ skip_langs,
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [{
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ }],
+ 'backend_configs': [{
+ 'transport_sec': backend_sec,
+ }, {
+ 'transport_sec': backend_sec,
+ }, {
+ 'transport_sec': backend_sec,
+ }, {
+ 'transport_sec': backend_sec,
+ }, {
+ 'transport_sec': backend_sec,
+ }],
+ 'fallback_configs': [],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_client_referred_to_backend_multiple_backends()
+
+
+def generate_client_falls_back_because_no_backends():
+ all_configs = []
+ for balancer_short_stream in [True, False]:
+ for transport_sec in [
+ 'insecure', 'alts', 'tls', 'google_default_credentials'
+ ]:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ skip_langs = ['go', 'java']
+ if transport_sec == 'tls':
+ skip_langs += ['java']
+ if balancer_short_stream:
+ skip_langs += ['java']
+ config = {
+ 'name':
+ 'client_falls_back_because_no_backends_%s_short_stream_%s' %
+ (transport_sec, balancer_short_stream),
+ 'skip_langs':
+ skip_langs,
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [{
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ }],
+ 'backend_configs': [],
+ 'fallback_configs': [{
+ 'transport_sec': fallback_sec,
+ }],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_client_falls_back_because_no_backends()
+
+
+def generate_client_falls_back_because_balancer_connection_broken():
+ all_configs = []
+ for transport_sec in ['alts', 'tls', 'google_default_credentials']:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ skip_langs = []
+ if transport_sec == 'tls':
+ skip_langs = ['java']
+ config = {
+ 'name':
+ 'client_falls_back_because_balancer_connection_broken_%s' %
+ transport_sec,
+ 'skip_langs':
+ skip_langs,
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [{
+ 'transport_sec': 'insecure',
+ 'short_stream': False,
+ }],
+ 'backend_configs': [],
+ 'fallback_configs': [{
+ 'transport_sec': fallback_sec,
+ }],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_client_falls_back_because_balancer_connection_broken()
+
+
+def generate_client_referred_to_backend_multiple_balancers():
+ all_configs = []
+ for balancer_short_stream in [True, False]:
+ for transport_sec in [
+ 'insecure', 'alts', 'tls', 'google_default_credentials'
+ ]:
+ balancer_sec, backend_sec, fallback_sec = server_sec(transport_sec)
+ skip_langs = []
+ if transport_sec == 'tls':
+ skip_langs += ['java']
+ if balancer_short_stream:
+ skip_langs += ['java']
+ config = {
+ 'name':
+ 'client_referred_to_backend_multiple_balancers_%s_short_stream_%s'
+ % (transport_sec, balancer_short_stream),
+ 'skip_langs':
+ skip_langs,
+ 'transport_sec':
+ transport_sec,
+ 'balancer_configs': [
+ {
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ },
+ {
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ },
+ {
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ },
+ {
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ },
+ {
+ 'transport_sec': balancer_sec,
+ 'short_stream': balancer_short_stream,
+ },
+ ],
+ 'backend_configs': [
+ {
+ 'transport_sec': backend_sec,
+ },
+ ],
+ 'fallback_configs': [],
+ 'cause_no_error_no_data_for_balancer_a_record':
+ False,
+ }
+ all_configs.append(config)
+ return all_configs
+
+
+all_scenarios += generate_client_referred_to_backend_multiple_balancers()
+
+print(yaml.dump({
+ 'lb_interop_test_scenarios': all_scenarios,
+}))
diff --git a/tools/run_tests/performance/build_performance.sh b/tools/run_tests/performance/build_performance.sh
index 9e6e72d97b..ab6bffdc34 100755
--- a/tools/run_tests/performance/build_performance.sh
+++ b/tools/run_tests/performance/build_performance.sh
@@ -25,10 +25,16 @@ CONFIG=${CONFIG:-opt}
# TODO(jtattermusch): C++ worker and driver are not buildable on Windows yet
if [ "$OSTYPE" != "msys" ]
then
- # TODO(jtattermusch): not embedding OpenSSL breaks the C# build because
- # grpc_csharp_ext needs OpenSSL embedded and some intermediate files from
- # this build will be reused.
- make CONFIG="${CONFIG}" EMBED_OPENSSL=true EMBED_ZLIB=true qps_worker qps_json_driver -j8
+ # build C++ with cmake as building with "make" disables boringssl assembly
+ # optimizations that can have huge impact on secure channel throughput.
+ mkdir -p cmake/build
+ cd cmake/build
+ cmake -DgRPC_BUILD_TESTS=ON -DCMAKE_BUILD_TYPE=Release ../..
+ make qps_worker qps_json_driver -j8
+ cd ../..
+ # unbreak subsequent make builds by restoring zconf.h (previously renamed by cmake build)
+ # See https://github.com/grpc/grpc/issues/11581
+ (cd third_party/zlib; git checkout zconf.h)
fi
PHP_ALREADY_BUILT=""
diff --git a/tools/run_tests/performance/run_qps_driver.sh b/tools/run_tests/performance/run_qps_driver.sh
index 2d9e310dec..47a03db026 100755
--- a/tools/run_tests/performance/run_qps_driver.sh
+++ b/tools/run_tests/performance/run_qps_driver.sh
@@ -17,7 +17,7 @@ set -ex
cd "$(dirname "$0")/../../.."
-bins/opt/qps_json_driver "$@"
+cmake/build/qps_json_driver "$@"
if [ "$BQ_RESULT_TABLE" != "" ]
then
diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py
index 2e78bd07fb..481918c52e 100644
--- a/tools/run_tests/performance/scenario_config.py
+++ b/tools/run_tests/performance/scenario_config.py
@@ -231,7 +231,7 @@ class CXXLanguage:
self.safename = 'cxx'
def worker_cmdline(self):
- return ['bins/opt/qps_worker']
+ return ['cmake/build/qps_worker']
def worker_port_offset(self):
return 0
@@ -250,7 +250,7 @@ class CXXLanguage:
channels=1,
num_clients=1,
secure=False,
- categories=[SMOKETEST] + [INPROC] + [SCALABLE])
+ categories=[INPROC] + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_streaming_from_client_1channel_1MB',
@@ -280,12 +280,12 @@ class CXXLanguage:
secure=False,
async_server_threads=16,
server_threads_per_cq=1,
- categories=[SMOKETEST] + [SCALABLE])
+ categories=[SCALABLE])
for secure in [True, False]:
secstr = 'secure' if secure else 'insecure'
- smoketest_categories = ([SMOKETEST]
- if secure else [INPROC]) + [SCALABLE]
+ smoketest_categories = ([SMOKETEST] if secure else [])
+ inproc_categories = ([INPROC] if not secure else [])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_ping_pong_%s' % secstr,
@@ -295,7 +295,8 @@ class CXXLanguage:
use_generic_payload=True,
async_server_threads=1,
secure=secure,
- categories=smoketest_categories)
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_%s' % secstr,
@@ -306,7 +307,8 @@ class CXXLanguage:
use_generic_payload=True,
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE])
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE])
for mps in geometric_progression(1, 20, 10):
yield _ping_pong_scenario(
@@ -320,7 +322,8 @@ class CXXLanguage:
secure=secure,
messages_per_stream=mps,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE])
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE])
for mps in geometric_progression(1, 200, math.sqrt(10)):
yield _ping_pong_scenario(
@@ -347,7 +350,7 @@ class CXXLanguage:
use_generic_payload=True,
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE],
+ categories=inproc_categories + [SCALABLE],
channels=1,
outstanding=100)
@@ -363,7 +366,7 @@ class CXXLanguage:
use_generic_payload=True,
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_1cq_%s' % secstr,
@@ -375,7 +378,8 @@ class CXXLanguage:
secure=secure,
client_threads_per_cq=1000000,
server_threads_per_cq=1000000,
- categories=smoketest_categories + [SCALABLE])
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_unconstrained_2waysharedcq_%s'
@@ -388,7 +392,7 @@ class CXXLanguage:
secure=secure,
client_threads_per_cq=2,
server_threads_per_cq=2,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_streaming_qps_unconstrained_1cq_%s' %
@@ -400,7 +404,7 @@ class CXXLanguage:
secure=secure,
client_threads_per_cq=1000000,
server_threads_per_cq=1000000,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_streaming_qps_unconstrained_2waysharedcq_%s'
@@ -412,7 +416,7 @@ class CXXLanguage:
secure=secure,
client_threads_per_cq=2,
server_threads_per_cq=2,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_qps_unconstrained_1cq_%s' % secstr,
@@ -423,7 +427,8 @@ class CXXLanguage:
secure=secure,
client_threads_per_cq=1000000,
server_threads_per_cq=1000000,
- categories=smoketest_categories + [SCALABLE])
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_unary_qps_unconstrained_2waysharedcq_%s' %
@@ -435,7 +440,7 @@ class CXXLanguage:
secure=secure,
client_threads_per_cq=2,
server_threads_per_cq=2,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_generic_async_streaming_qps_one_server_core_%s' % secstr,
@@ -457,7 +462,8 @@ class CXXLanguage:
unconstrained_client='async',
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE],
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE],
excluded_poll_engines=['poll-cv'])
yield _ping_pong_scenario(
@@ -472,7 +478,7 @@ class CXXLanguage:
resp_size=8 * 1024 * 1024,
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
yield _ping_pong_scenario(
'cpp_protobuf_async_client_sync_server_streaming_qps_unconstrained_%s'
@@ -483,7 +489,8 @@ class CXXLanguage:
unconstrained_client='async',
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE],
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE],
excluded_poll_engines=['poll-cv'])
yield _ping_pong_scenario(
@@ -495,7 +502,8 @@ class CXXLanguage:
resp_size=1024 * 1024,
secure=secure,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE])
+ categories=smoketest_categories + inproc_categories +
+ [SCALABLE])
for rpc_type in [
'unary', 'streaming', 'streaming_from_client',
@@ -538,7 +546,7 @@ class CXXLanguage:
minimal_stack=not secure,
server_threads_per_cq=3,
client_threads_per_cq=3,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
# TODO(vjpai): Re-enable this test. It has a lot of timeouts
# and hasn't yet been conclusively identified as a test failure
@@ -565,7 +573,7 @@ class CXXLanguage:
secure=secure,
messages_per_stream=mps,
minimal_stack=not secure,
- categories=smoketest_categories + [SCALABLE])
+ categories=inproc_categories + [SCALABLE])
for mps in geometric_progression(1, 200, math.sqrt(10)):
yield _ping_pong_scenario(
diff --git a/tools/run_tests/python_utils/dockerjob.py b/tools/run_tests/python_utils/dockerjob.py
index 2d22dc13a0..5260f7b44b 100755
--- a/tools/run_tests/python_utils/dockerjob.py
+++ b/tools/run_tests/python_utils/dockerjob.py
@@ -20,6 +20,7 @@ import time
import uuid
import os
import subprocess
+import json
import jobset
@@ -54,6 +55,25 @@ def docker_mapped_port(cid, port, timeout_seconds=15):
cid))
+def docker_ip_address(cid, timeout_seconds=15):
+ """Get port mapped to internal given internal port for given container."""
+ started = time.time()
+ while time.time() - started < timeout_seconds:
+ cmd = 'docker inspect %s' % cid
+ try:
+ output = subprocess.check_output(cmd, stderr=_DEVNULL, shell=True)
+ json_info = json.loads(output)
+ assert len(json_info) == 1
+ out = json_info[0]['NetworkSettings']['IPAddress']
+ if not out:
+ continue
+ return out
+ except subprocess.CalledProcessError as e:
+ pass
+ raise Exception(
+ 'Non-retryable error: Failed to get ip address of container %s.' % cid)
+
+
def wait_for_healthy(cid, shortname, timeout_seconds):
"""Wait timeout_seconds for the container to become healthy"""
started = time.time()
@@ -74,10 +94,10 @@ def wait_for_healthy(cid, shortname, timeout_seconds):
(shortname, cid))
-def finish_jobs(jobs):
+def finish_jobs(jobs, suppress_failure=True):
"""Kills given docker containers and waits for corresponding jobs to finish"""
for job in jobs:
- job.kill(suppress_failure=True)
+ job.kill(suppress_failure=suppress_failure)
while any(job.is_running() for job in jobs):
time.sleep(1)
@@ -120,6 +140,9 @@ class DockerJob:
def mapped_port(self, port):
return docker_mapped_port(self._container_name, port)
+ def ip_address(self):
+ return docker_ip_address(self._container_name)
+
def wait_for_healthy(self, timeout_seconds):
wait_for_healthy(self._container_name, self._spec.shortname,
timeout_seconds)
diff --git a/tools/run_tests/python_utils/upload_rbe_results.py b/tools/run_tests/python_utils/upload_rbe_results.py
index 74f329048f..3f3bd382bb 100644
--- a/tools/run_tests/python_utils/upload_rbe_results.py
+++ b/tools/run_tests/python_utils/upload_rbe_results.py
@@ -62,7 +62,7 @@ def _get_invocation_id():
bazel_id_directory = os.getenv('KOKORO_ARTIFACTS_DIR')
bazel_id_file = os.path.join(bazel_id_directory, 'bazel_invocation_ids')
assert os.path.isfile(bazel_id_file), 'bazel_invocation_ids file, written ' \
- 'by bazel_wrapper.py, expected but not found.'
+ 'by RBE initialization script, expected but not found.'
with open(bazel_id_file, 'r') as f:
return f.read().replace('\n', '')
diff --git a/tools/run_tests/run_grpclb_interop_tests.py b/tools/run_tests/run_grpclb_interop_tests.py
new file mode 100755
index 0000000000..3bfbcecf06
--- /dev/null
+++ b/tools/run_tests/run_grpclb_interop_tests.py
@@ -0,0 +1,609 @@
+#!/usr/bin/env python
+# Copyright 2015 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+"""Run interop (cross-language) tests in parallel."""
+
+from __future__ import print_function
+
+import argparse
+import atexit
+import itertools
+import json
+import multiprocessing
+import os
+import re
+import subprocess
+import sys
+import tempfile
+import time
+import uuid
+import six
+import traceback
+
+import python_utils.dockerjob as dockerjob
+import python_utils.jobset as jobset
+import python_utils.report_utils as report_utils
+
+# Docker doesn't clean up after itself, so we do it on exit.
+atexit.register(lambda: subprocess.call(['stty', 'echo']))
+
+ROOT = os.path.abspath(os.path.join(os.path.dirname(sys.argv[0]), '../..'))
+os.chdir(ROOT)
+
+_FALLBACK_SERVER_PORT = 443
+_BALANCER_SERVER_PORT = 12000
+_BACKEND_SERVER_PORT = 8080
+
+_TEST_TIMEOUT = 30
+
+_FAKE_SERVERS_SAFENAME = 'fake_servers'
+
+# Use a name that's verified by the test certs
+_SERVICE_NAME = 'server.test.google.fr'
+
+
+class CXXLanguage:
+
+ def __init__(self):
+ self.client_cwd = '/var/local/git/grpc'
+ self.safename = 'cxx'
+
+ def client_cmd(self, args):
+ return ['bins/opt/interop_client'] + args
+
+ def global_env(self):
+ # 1) Set c-ares as the resolver, to
+ # enable grpclb.
+ # 2) Turn on verbose logging.
+ # 3) Set the ROOTS_PATH env variable
+ # to the test CA in order for
+ # GoogleDefaultCredentials to be
+ # able to use the test CA.
+ return {
+ 'GRPC_DNS_RESOLVER':
+ 'ares',
+ 'GRPC_VERBOSITY':
+ 'DEBUG',
+ 'GRPC_TRACE':
+ 'client_channel,glb',
+ 'GRPC_DEFAULT_SSL_ROOTS_FILE_PATH':
+ '/var/local/git/grpc/src/core/tsi/test_creds/ca.pem',
+ }
+
+ def __str__(self):
+ return 'c++'
+
+
+class JavaLanguage:
+
+ def __init__(self):
+ self.client_cwd = '/var/local/git/grpc-java'
+ self.safename = str(self)
+
+ def client_cmd(self, args):
+ # Take necessary steps to import our test CA into
+ # the set of test CA's that the Java runtime of the
+ # docker container will pick up, so that
+ # Java GoogleDefaultCreds can use it.
+ pem_to_der_cmd = ('openssl x509 -outform der '
+ '-in /external_mount/src/core/tsi/test_creds/ca.pem '
+ '-out /tmp/test_ca.der')
+ keystore_import_cmd = (
+ 'keytool -import '
+ '-keystore /usr/lib/jvm/java-8-oracle/jre/lib/security/cacerts '
+ '-file /tmp/test_ca.der '
+ '-deststorepass changeit '
+ '-noprompt')
+ return [
+ 'bash', '-c', ('{pem_to_der_cmd} && '
+ '{keystore_import_cmd} && '
+ './run-test-client.sh {java_client_args}').format(
+ pem_to_der_cmd=pem_to_der_cmd,
+ keystore_import_cmd=keystore_import_cmd,
+ java_client_args=' '.join(args))
+ ]
+
+ def global_env(self):
+ # 1) Enable grpclb
+ # 2) Enable verbose logging
+ return {
+ 'JAVA_OPTS':
+ ('-Dio.grpc.internal.DnsNameResolverProvider.enable_grpclb=true '
+ '-Djava.util.logging.config.file=/var/local/grpc_java_logging/logconf.txt'
+ )
+ }
+
+ def __str__(self):
+ return 'java'
+
+
+class GoLanguage:
+
+ def __init__(self):
+ self.client_cwd = '/go/src/google.golang.org/grpc/interop/client'
+ self.safename = str(self)
+
+ def client_cmd(self, args):
+ # Copy the test CA file into the path that
+ # the Go runtime in the docker container will use, so
+ # that Go's GoogleDefaultCredentials can use it.
+ # See https://golang.org/src/crypto/x509/root_linux.go.
+ return [
+ 'bash', '-c', ('cp /external_mount/src/core/tsi/test_creds/ca.pem '
+ '/etc/ssl/certs/ca-certificates.crt && '
+ '/go/bin/client {go_client_args}'
+ ).format(go_client_args=' '.join(args))
+ ]
+
+ def global_env(self):
+ return {
+ 'GRPC_GO_LOG_VERBOSITY_LEVEL': '3',
+ 'GRPC_GO_LOG_SEVERITY_LEVEL': 'INFO'
+ }
+
+ def __str__(self):
+ return 'go'
+
+
+_LANGUAGES = {
+ 'c++': CXXLanguage(),
+ 'go': GoLanguage(),
+ 'java': JavaLanguage(),
+}
+
+
+def docker_run_cmdline(cmdline, image, docker_args, cwd, environ=None):
+ """Wraps given cmdline array to create 'docker run' cmdline from it."""
+ # turn environ into -e docker args
+ docker_cmdline = 'docker run -i --rm=true'.split()
+ if environ:
+ for k, v in environ.items():
+ docker_cmdline += ['-e', '%s=%s' % (k, v)]
+ return docker_cmdline + ['-w', cwd] + docker_args + [image] + cmdline
+
+
+def _job_kill_handler(job):
+ assert job._spec.container_name
+ dockerjob.docker_kill(job._spec.container_name)
+
+
+def transport_security_to_args(transport_security):
+ args = []
+ if transport_security == 'tls':
+ args += ['--use_tls=true']
+ elif transport_security == 'alts':
+ args += ['--use_tls=false', '--use_alts=true']
+ elif transport_security == 'insecure':
+ args += ['--use_tls=false']
+ elif transport_security == 'google_default_credentials':
+ args += ['--custom_credentials_type=google_default_credentials']
+ else:
+ print('Invalid transport security option.')
+ sys.exit(1)
+ return args
+
+
+def lb_client_interop_jobspec(language,
+ dns_server_ip,
+ docker_image,
+ transport_security='tls'):
+ """Runs a gRPC client under test in a docker container"""
+ interop_only_options = [
+ '--server_host=%s' % _SERVICE_NAME,
+ '--server_port=%d' % _FALLBACK_SERVER_PORT
+ ] + transport_security_to_args(transport_security)
+ # Don't set the server host override in any client;
+ # Go and Java default to no override.
+ # We're using a DNS server so there's no need.
+ if language.safename == 'c++':
+ interop_only_options += ['--server_host_override=""']
+ # Don't set --use_test_ca; we're configuring
+ # clients to use test CA's via alternate means.
+ interop_only_options += ['--use_test_ca=false']
+ client_args = language.client_cmd(interop_only_options)
+ container_name = dockerjob.random_name(
+ 'lb_interop_client_%s' % language.safename)
+ docker_cmdline = docker_run_cmdline(
+ client_args,
+ environ=language.global_env(),
+ image=docker_image,
+ cwd=language.client_cwd,
+ docker_args=[
+ '--dns=%s' % dns_server_ip,
+ '--net=host',
+ '--name=%s' % container_name,
+ '-v',
+ '{grpc_grpc_root_dir}:/external_mount:ro'.format(
+ grpc_grpc_root_dir=ROOT),
+ ])
+ jobset.message(
+ 'IDLE',
+ 'docker_cmdline:\b|%s|' % ' '.join(docker_cmdline),
+ do_newline=True)
+ test_job = jobset.JobSpec(
+ cmdline=docker_cmdline,
+ shortname=('lb_interop_client:%s' % language),
+ timeout_seconds=_TEST_TIMEOUT,
+ kill_handler=_job_kill_handler)
+ test_job.container_name = container_name
+ return test_job
+
+
+def fallback_server_jobspec(transport_security, shortname):
+ """Create jobspec for running a fallback server"""
+ cmdline = [
+ 'bin/server',
+ '--port=%d' % _FALLBACK_SERVER_PORT,
+ ] + transport_security_to_args(transport_security)
+ return grpc_server_in_docker_jobspec(
+ server_cmdline=cmdline, shortname=shortname)
+
+
+def backend_server_jobspec(transport_security, shortname):
+ """Create jobspec for running a backend server"""
+ cmdline = [
+ 'bin/server',
+ '--port=%d' % _BACKEND_SERVER_PORT,
+ ] + transport_security_to_args(transport_security)
+ return grpc_server_in_docker_jobspec(
+ server_cmdline=cmdline, shortname=shortname)
+
+
+def grpclb_jobspec(transport_security, short_stream, backend_addrs, shortname):
+ """Create jobspec for running a balancer server"""
+ cmdline = [
+ 'bin/fake_grpclb',
+ '--backend_addrs=%s' % ','.join(backend_addrs),
+ '--port=%d' % _BALANCER_SERVER_PORT,
+ '--short_stream=%s' % short_stream,
+ '--service_name=%s' % _SERVICE_NAME,
+ ] + transport_security_to_args(transport_security)
+ return grpc_server_in_docker_jobspec(
+ server_cmdline=cmdline, shortname=shortname)
+
+
+def grpc_server_in_docker_jobspec(server_cmdline, shortname):
+ container_name = dockerjob.random_name(shortname)
+ environ = {
+ 'GRPC_GO_LOG_VERBOSITY_LEVEL': '3',
+ 'GRPC_GO_LOG_SEVERITY_LEVEL': 'INFO ',
+ }
+ docker_cmdline = docker_run_cmdline(
+ server_cmdline,
+ cwd='/go',
+ image=docker_images.get(_FAKE_SERVERS_SAFENAME),
+ environ=environ,
+ docker_args=['--name=%s' % container_name])
+ jobset.message(
+ 'IDLE',
+ 'docker_cmdline:\b|%s|' % ' '.join(docker_cmdline),
+ do_newline=True)
+ server_job = jobset.JobSpec(
+ cmdline=docker_cmdline, shortname=shortname, timeout_seconds=30 * 60)
+ server_job.container_name = container_name
+ return server_job
+
+
+def dns_server_in_docker_jobspec(grpclb_ips, fallback_ips, shortname,
+ cause_no_error_no_data_for_balancer_a_record):
+ container_name = dockerjob.random_name(shortname)
+ run_dns_server_cmdline = [
+ 'python',
+ 'test/cpp/naming/utils/run_dns_server_for_lb_interop_tests.py',
+ '--grpclb_ips=%s' % ','.join(grpclb_ips),
+ '--fallback_ips=%s' % ','.join(fallback_ips),
+ ]
+ if cause_no_error_no_data_for_balancer_a_record:
+ run_dns_server_cmdline.append(
+ '--cause_no_error_no_data_for_balancer_a_record')
+ docker_cmdline = docker_run_cmdline(
+ run_dns_server_cmdline,
+ cwd='/var/local/git/grpc',
+ image=docker_images.get(_FAKE_SERVERS_SAFENAME),
+ docker_args=['--name=%s' % container_name])
+ jobset.message(
+ 'IDLE',
+ 'docker_cmdline:\b|%s|' % ' '.join(docker_cmdline),
+ do_newline=True)
+ server_job = jobset.JobSpec(
+ cmdline=docker_cmdline, shortname=shortname, timeout_seconds=30 * 60)
+ server_job.container_name = container_name
+ return server_job
+
+
+def build_interop_image_jobspec(lang_safename, basename_prefix='grpc_interop'):
+ """Creates jobspec for building interop docker image for a language"""
+ tag = '%s_%s:%s' % (basename_prefix, lang_safename, uuid.uuid4())
+ env = {
+ 'INTEROP_IMAGE': tag,
+ 'BASE_NAME': '%s_%s' % (basename_prefix, lang_safename),
+ }
+ build_job = jobset.JobSpec(
+ cmdline=['tools/run_tests/dockerize/build_interop_image.sh'],
+ environ=env,
+ shortname='build_docker_%s' % lang_safename,
+ timeout_seconds=30 * 60)
+ build_job.tag = tag
+ return build_job
+
+
+argp = argparse.ArgumentParser(description='Run interop tests.')
+argp.add_argument(
+ '-l',
+ '--language',
+ choices=['all'] + sorted(_LANGUAGES),
+ nargs='+',
+ default=['all'],
+ help='Clients to run.')
+argp.add_argument('-j', '--jobs', default=multiprocessing.cpu_count(), type=int)
+argp.add_argument(
+ '-s',
+ '--scenarios_file',
+ default=None,
+ type=str,
+ help='File containing test scenarios as JSON configs.')
+argp.add_argument(
+ '-n',
+ '--scenario_name',
+ default=None,
+ type=str,
+ help=(
+ 'Useful for manual runs: specify the name of '
+ 'the scenario to run from scenarios_file. Run all scenarios if unset.'))
+argp.add_argument(
+ '--cxx_image_tag',
+ default=None,
+ type=str,
+ help=('Setting this skips the clients docker image '
+ 'build step and runs the client from the named '
+ 'image. Only supports running a one client language.'))
+argp.add_argument(
+ '--go_image_tag',
+ default=None,
+ type=str,
+ help=('Setting this skips the clients docker image build '
+ 'step and runs the client from the named image. Only '
+ 'supports running a one client language.'))
+argp.add_argument(
+ '--java_image_tag',
+ default=None,
+ type=str,
+ help=('Setting this skips the clients docker image build '
+ 'step and runs the client from the named image. Only '
+ 'supports running a one client language.'))
+argp.add_argument(
+ '--servers_image_tag',
+ default=None,
+ type=str,
+ help=('Setting this skips the fake servers docker image '
+ 'build step and runs the servers from the named image.'))
+argp.add_argument(
+ '--no_skips',
+ default=False,
+ type=bool,
+ nargs='?',
+ const=True,
+ help=('Useful for manual runs. Setting this overrides test '
+ '"skips" configured in test scenarios.'))
+argp.add_argument(
+ '--verbose',
+ default=False,
+ type=bool,
+ nargs='?',
+ const=True,
+ help='Increase logging.')
+args = argp.parse_args()
+
+docker_images = {}
+
+build_jobs = []
+if len(args.language) and args.language[0] == 'all':
+ languages = _LANGUAGES.keys()
+else:
+ languages = args.language
+for lang_name in languages:
+ l = _LANGUAGES[lang_name]
+ # First check if a pre-built image was supplied, and avoid
+ # rebuilding the particular docker image if so.
+ if lang_name == 'c++' and args.cxx_image_tag:
+ docker_images[str(l.safename)] = args.cxx_image_tag
+ elif lang_name == 'go' and args.go_image_tag:
+ docker_images[str(l.safename)] = args.go_image_tag
+ elif lang_name == 'java' and args.java_image_tag:
+ docker_images[str(l.safename)] = args.java_image_tag
+ else:
+ # Build the test client in docker and save the fully
+ # built image.
+ job = build_interop_image_jobspec(l.safename)
+ build_jobs.append(job)
+ docker_images[str(l.safename)] = job.tag
+
+# First check if a pre-built image was supplied.
+if args.servers_image_tag:
+ docker_images[_FAKE_SERVERS_SAFENAME] = args.servers_image_tag
+else:
+ # Build the test servers in docker and save the fully
+ # built image.
+ job = build_interop_image_jobspec(
+ _FAKE_SERVERS_SAFENAME, basename_prefix='lb_interop')
+ build_jobs.append(job)
+ docker_images[_FAKE_SERVERS_SAFENAME] = job.tag
+
+if build_jobs:
+ jobset.message('START', 'Building interop docker images.', do_newline=True)
+ print('Jobs to run: \n%s\n' % '\n'.join(str(j) for j in build_jobs))
+ num_failures, _ = jobset.run(
+ build_jobs, newline_on_success=True, maxjobs=args.jobs)
+ if num_failures == 0:
+ jobset.message(
+ 'SUCCESS', 'All docker images built successfully.', do_newline=True)
+ else:
+ jobset.message(
+ 'FAILED', 'Failed to build interop docker images.', do_newline=True)
+ sys.exit(1)
+
+
+def wait_until_dns_server_is_up(dns_server_ip):
+ """Probes the DNS server until it's running and safe for tests."""
+ for i in range(0, 30):
+ print('Health check: attempt to connect to DNS server over TCP.')
+ tcp_connect_subprocess = subprocess.Popen([
+ os.path.join(os.getcwd(), 'test/cpp/naming/utils/tcp_connect.py'),
+ '--server_host', dns_server_ip, '--server_port',
+ str(53), '--timeout',
+ str(1)
+ ])
+ tcp_connect_subprocess.communicate()
+ if tcp_connect_subprocess.returncode == 0:
+ print(('Health check: attempt to make an A-record '
+ 'query to DNS server.'))
+ dns_resolver_subprocess = subprocess.Popen(
+ [
+ os.path.join(os.getcwd(),
+ 'test/cpp/naming/utils/dns_resolver.py'),
+ '--qname', ('health-check-local-dns-server-is-alive.'
+ 'resolver-tests.grpctestingexp'),
+ '--server_host', dns_server_ip, '--server_port',
+ str(53)
+ ],
+ stdout=subprocess.PIPE)
+ dns_resolver_stdout, _ = dns_resolver_subprocess.communicate()
+ if dns_resolver_subprocess.returncode == 0:
+ if '123.123.123.123' in dns_resolver_stdout:
+ print(('DNS server is up! '
+ 'Successfully reached it over UDP and TCP.'))
+ return
+ time.sleep(0.1)
+ raise Exception(('Failed to reach DNS server over TCP and/or UDP. '
+ 'Exitting without running tests.'))
+
+
+def shortname(shortname_prefix, shortname, index):
+ return '%s_%s_%d' % (shortname_prefix, shortname, index)
+
+
+def run_one_scenario(scenario_config):
+ jobset.message('START', 'Run scenario: %s' % scenario_config['name'])
+ server_jobs = {}
+ server_addresses = {}
+ suppress_server_logs = True
+ try:
+ backend_addrs = []
+ fallback_ips = []
+ grpclb_ips = []
+ shortname_prefix = scenario_config['name']
+ # Start backends
+ for i in xrange(len(scenario_config['backend_configs'])):
+ backend_config = scenario_config['backend_configs'][i]
+ backend_shortname = shortname(shortname_prefix, 'backend_server', i)
+ backend_spec = backend_server_jobspec(
+ backend_config['transport_sec'], backend_shortname)
+ backend_job = dockerjob.DockerJob(backend_spec)
+ server_jobs[backend_shortname] = backend_job
+ backend_addrs.append('%s:%d' % (backend_job.ip_address(),
+ _BACKEND_SERVER_PORT))
+ # Start fallbacks
+ for i in xrange(len(scenario_config['fallback_configs'])):
+ fallback_config = scenario_config['fallback_configs'][i]
+ fallback_shortname = shortname(shortname_prefix, 'fallback_server',
+ i)
+ fallback_spec = fallback_server_jobspec(
+ fallback_config['transport_sec'], fallback_shortname)
+ fallback_job = dockerjob.DockerJob(fallback_spec)
+ server_jobs[fallback_shortname] = fallback_job
+ fallback_ips.append(fallback_job.ip_address())
+ # Start balancers
+ for i in xrange(len(scenario_config['balancer_configs'])):
+ balancer_config = scenario_config['balancer_configs'][i]
+ grpclb_shortname = shortname(shortname_prefix, 'grpclb_server', i)
+ grpclb_spec = grpclb_jobspec(balancer_config['transport_sec'],
+ balancer_config['short_stream'],
+ backend_addrs, grpclb_shortname)
+ grpclb_job = dockerjob.DockerJob(grpclb_spec)
+ server_jobs[grpclb_shortname] = grpclb_job
+ grpclb_ips.append(grpclb_job.ip_address())
+ # Start DNS server
+ dns_server_shortname = shortname(shortname_prefix, 'dns_server', 0)
+ dns_server_spec = dns_server_in_docker_jobspec(
+ grpclb_ips, fallback_ips, dns_server_shortname,
+ scenario_config['cause_no_error_no_data_for_balancer_a_record'])
+ dns_server_job = dockerjob.DockerJob(dns_server_spec)
+ server_jobs[dns_server_shortname] = dns_server_job
+ # Get the IP address of the docker container running the DNS server.
+ # The DNS server is running on port 53 of that IP address. Note we will
+ # point the DNS resolvers of grpc clients under test to our controlled
+ # DNS server by effectively modifying the /etc/resolve.conf "nameserver"
+ # lists of their docker containers.
+ dns_server_ip = dns_server_job.ip_address()
+ wait_until_dns_server_is_up(dns_server_ip)
+ # Run clients
+ jobs = []
+ for lang_name in languages:
+ # Skip languages that are known to not currently
+ # work for this test.
+ if not args.no_skips and lang_name in scenario_config.get(
+ 'skip_langs', []):
+ jobset.message('IDLE',
+ 'Skipping scenario: %s for language: %s\n' %
+ (scenario_config['name'], lang_name))
+ continue
+ lang = _LANGUAGES[lang_name]
+ test_job = lb_client_interop_jobspec(
+ lang,
+ dns_server_ip,
+ docker_image=docker_images.get(lang.safename),
+ transport_security=scenario_config['transport_sec'])
+ jobs.append(test_job)
+ jobset.message('IDLE', 'Jobs to run: \n%s\n' % '\n'.join(
+ str(job) for job in jobs))
+ num_failures, resultset = jobset.run(
+ jobs, newline_on_success=True, maxjobs=args.jobs)
+ report_utils.render_junit_xml_report(resultset, 'sponge_log.xml')
+ if num_failures:
+ suppress_server_logs = False
+ jobset.message(
+ 'FAILED',
+ 'Scenario: %s. Some tests failed' % scenario_config['name'],
+ do_newline=True)
+ else:
+ jobset.message(
+ 'SUCCESS',
+ 'Scenario: %s. All tests passed' % scenario_config['name'],
+ do_newline=True)
+ return num_failures
+ finally:
+ # Check if servers are still running.
+ for server, job in server_jobs.items():
+ if not job.is_running():
+ print('Server "%s" has exited prematurely.' % server)
+ suppress_failure = suppress_server_logs and not args.verbose
+ dockerjob.finish_jobs(
+ [j for j in six.itervalues(server_jobs)],
+ suppress_failure=suppress_failure)
+
+
+num_failures = 0
+with open(args.scenarios_file, 'r') as scenarios_input:
+ all_scenarios = json.loads(scenarios_input.read())
+ for scenario in all_scenarios:
+ if args.scenario_name:
+ if args.scenario_name != scenario['name']:
+ jobset.message('IDLE',
+ 'Skipping scenario: %s' % scenario['name'])
+ continue
+ num_failures += run_one_scenario(scenario)
+if num_failures == 0:
+ sys.exit(0)
+else:
+ sys.exit(1)
diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py
index 7ec02b707e..5722a88182 100755
--- a/tools/run_tests/run_interop_tests.py
+++ b/tools/run_tests/run_interop_tests.py
@@ -688,6 +688,10 @@ def write_cmdlog_maybe(cmdlog, filename):
if cmdlog:
with open(filename, 'w') as logfile:
logfile.write('#!/bin/bash\n')
+ logfile.write('# DO NOT MODIFY\n')
+ logfile.write(
+ '# This file is generated by run_interop_tests.py/create_testcases.sh\n'
+ )
logfile.writelines("%s\n" % line for line in cmdlog)
print('Command log written to file %s' % filename)
@@ -1378,8 +1382,7 @@ try:
transport_security='tls')
jobs.append(tls_test_job)
if str(language) in [
- 'c++',
- 'go',
+ 'go'
]: # Add more languages to the list to turn on tests.
google_default_creds_test_job = cloud_to_prod_jobspec(
language,
diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py
index a746d531a1..c6e67eaf56 100755
--- a/tools/run_tests/run_performance_tests.py
+++ b/tools/run_tests/run_performance_tests.py
@@ -41,6 +41,11 @@ os.chdir(_ROOT)
_REMOTE_HOST_USERNAME = 'jenkins'
+_SCENARIO_TIMEOUT = 3 * 60
+_WORKER_TIMEOUT = 3 * 60
+_NETPERF_TIMEOUT = 60
+_QUIT_WORKER_TIMEOUT = 2 * 60
+
class QpsWorkerJob:
"""Encapsulates a qps worker server job."""
@@ -85,7 +90,7 @@ def create_qpsworker_job(language,
cmdline = perf_cmd + ['-o', '%s-perf.data' % perf_file_base_name
] + cmdline
- worker_timeout = 3 * 60
+ worker_timeout = _WORKER_TIMEOUT
if remote_host:
user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
ssh_cmd = ['ssh']
@@ -131,7 +136,7 @@ def create_scenario_jobspec(scenario_json,
return jobset.JobSpec(
cmdline=[cmd],
shortname='qps_json_driver.%s' % scenario_json['name'],
- timeout_seconds=12 * 60,
+ timeout_seconds=_SCENARIO_TIMEOUT,
shell=True,
verbose_success=True)
@@ -139,7 +144,7 @@ def create_scenario_jobspec(scenario_json,
def create_quit_jobspec(workers, remote_host=None):
"""Runs quit using QPS driver."""
# setting QPS_WORKERS env variable here makes sure it works with SSH too.
- cmd = 'QPS_WORKERS="%s" bins/opt/qps_json_driver --quit' % ','.join(
+ cmd = 'QPS_WORKERS="%s" cmake/build/qps_json_driver --quit' % ','.join(
w.host_and_port for w in workers)
if remote_host:
user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host)
@@ -149,7 +154,7 @@ def create_quit_jobspec(workers, remote_host=None):
return jobset.JobSpec(
cmdline=[cmd],
shortname='qps_json_driver.quit',
- timeout_seconds=3 * 60,
+ timeout_seconds=_QUIT_WORKER_TIMEOUT,
shell=True,
verbose_success=True)
@@ -181,7 +186,7 @@ def create_netperf_jobspec(server_host='localhost',
return jobset.JobSpec(
cmdline=[cmd],
shortname='netperf',
- timeout_seconds=60,
+ timeout_seconds=_NETPERF_TIMEOUT,
shell=True,
verbose_success=True)
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index c9b4c8b28b..44151f49fb 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -759,8 +759,10 @@ class PythonLanguage(object):
self.python_manager_name(), _docker_arch_suffix(self.args.arch))
def python_manager_name(self):
- if self.args.compiler in ['python3.5', 'python3.6']:
- return 'pyenv'
+ if self.args.compiler in [
+ 'python2.7', 'python3.5', 'python3.6', 'python3.7'
+ ]:
+ return 'stretch_' + self.args.compiler[len('python'):]
elif self.args.compiler == 'python_alpine':
return 'alpine'
else:
@@ -825,6 +827,12 @@ class PythonLanguage(object):
minor='6',
bits=bits,
config_vars=config_vars)
+ python37_config = _python_config_generator(
+ name='py37',
+ major='3',
+ minor='7',
+ bits=bits,
+ config_vars=config_vars)
pypy27_config = _pypy_config_generator(
name='pypy', major='2', config_vars=config_vars)
pypy32_config = _pypy_config_generator(
@@ -846,6 +854,8 @@ class PythonLanguage(object):
return (python35_config,)
elif args.compiler == 'python3.6':
return (python36_config,)
+ elif args.compiler == 'python3.7':
+ return (python37_config,)
elif args.compiler == 'pypy':
return (pypy27_config,)
elif args.compiler == 'pypy3':
@@ -858,6 +868,7 @@ class PythonLanguage(object):
python34_config,
python35_config,
python36_config,
+ python37_config,
)
else:
raise Exception('Compiler %s not supported.' % args.compiler)
@@ -1360,9 +1371,9 @@ argp.add_argument(
choices=[
'default', 'gcc4.4', 'gcc4.6', 'gcc4.8', 'gcc4.9', 'gcc5.3', 'gcc7.2',
'gcc_musl', 'clang3.4', 'clang3.5', 'clang3.6', 'clang3.7', 'clang7.0',
- 'python2.7', 'python3.4', 'python3.5', 'python3.6', 'pypy', 'pypy3',
- 'python_alpine', 'all_the_cpythons', 'electron1.3', 'electron1.6',
- 'coreclr', 'cmake', 'cmake_vs2015', 'cmake_vs2017'
+ 'python2.7', 'python3.4', 'python3.5', 'python3.6', 'python3.7', 'pypy',
+ 'pypy3', 'python_alpine', 'all_the_cpythons', 'electron1.3',
+ 'electron1.6', 'coreclr', 'cmake', 'cmake_vs2015', 'cmake_vs2017'
],
default='default',
help=
@@ -1506,7 +1517,7 @@ else:
lang_list = args.language
# We don't support code coverage on some languages
if 'gcov' in args.config:
- for bad in ['grpc-node', 'objc', 'sanity']:
+ for bad in ['csharp', 'grpc-node', 'objc', 'sanity']:
if bad in lang_list:
lang_list.remove(bad)
diff --git a/tools/run_tests/sanity/check_submodules.sh b/tools/run_tests/sanity/check_submodules.sh
index 2c7c140716..8ea53dfec5 100755
--- a/tools/run_tests/sanity/check_submodules.sh
+++ b/tools/run_tests/sanity/check_submodules.sh
@@ -30,7 +30,7 @@ cat << EOF | awk '{ print $1 }' | sort > "$want_submodules"
5b7683f49e1e9223cf9927b24f6fd3d6bd82e3f8 third_party/benchmark (v1.2.0)
73594cde8c9a52a102c4341c244c833aa61b9c06 third_party/bloaty (remotes/origin/wide-14-g73594cd)
b29b21a81b32ec273f118f589f46d56ad3332420 third_party/boringssl (remotes/origin/chromium-stable)
- 8149b351bf797bd80e063787886b7618f508e451 third_party/boringssl-with-bazel (version_for_cocoapods_10.0-434-g8149b351)
+ afc30d43eef92979b05776ec0963c9cede5fb80f third_party/boringssl-with-bazel (fips-20180716-116-gafc30d43e)
3be1924221e1326df520f8498d704a5c4c8d0cce third_party/cares/cares (cares-1_13_0)
30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e third_party/gflags (v2.2.0-5-g30dbc81)
ec44c6c1675c25b9827aacd08c02433cccde7780 third_party/googletest (release-1.8.0)
diff --git a/tools/run_tests/sanity/core_banned_functions.py b/tools/run_tests/sanity/core_banned_functions.py
index 8afd826453..2a5dcda5be 100755
--- a/tools/run_tests/sanity/core_banned_functions.py
+++ b/tools/run_tests/sanity/core_banned_functions.py
@@ -24,7 +24,8 @@ os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '../../..'))
# map of banned function signature to whitelist
BANNED_EXCEPT = {
'grpc_resource_quota_ref(': ['src/core/lib/iomgr/resource_quota.cc'],
- 'grpc_resource_quota_unref(': ['src/core/lib/iomgr/resource_quota.cc'],
+ 'grpc_resource_quota_unref(':
+ ['src/core/lib/iomgr/resource_quota.cc', 'src/core/lib/surface/server.cc'],
'grpc_slice_buffer_destroy(': ['src/core/lib/slice/slice_buffer.cc'],
'grpc_slice_buffer_reset_and_unref(':
['src/core/lib/slice/slice_buffer.cc'],