aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
authorGravatar Craig Tiller <ctiller@google.com>2017-09-18 08:57:19 -0700
committerGravatar Craig Tiller <ctiller@google.com>2017-09-18 08:57:19 -0700
commitadbcec4f57f7e25859326a6daf8ad7e08f2805ca (patch)
treec029f503127946f0564134d343c8af9f88ac8da5 /tools
parent7f10c299c7eb5d6688c20ae43c63b0548ca8acd1 (diff)
parenta5f46e29bcee3068f5ec6691b7e06cf944430881 (diff)
Merge github.com:grpc/grpc into pollset_kick_stats
Diffstat (limited to 'tools')
-rw-r--r--tools/buildgen/generate_build_additions.sh1
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile3
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile3
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_go/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_go1.7/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_go1.8/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_java/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_java_oracle8/Dockerfile2
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_node/Dockerfile3
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_php/Dockerfile1
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_python/Dockerfile3
-rw-r--r--tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile3
-rw-r--r--tools/dockerfile/test/csharp_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/cxx_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/cxx_jessie_x86/Dockerfile3
-rw-r--r--tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/fuzzer/Dockerfile3
-rw-r--r--tools/dockerfile/test/multilang_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/node_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/php7_jessie_x64/Dockerfile2
-rw-r--r--tools/dockerfile/test/php_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/python_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/python_pyenv_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/ruby_jessie_x64/Dockerfile3
-rw-r--r--tools/dockerfile/test/sanity/Dockerfile3
-rwxr-xr-xtools/gce/create_interop_worker.sh3
-rwxr-xr-xtools/gce/create_linux_performance_worker.sh3
-rwxr-xr-xtools/gce/create_linux_worker.sh3
-rw-r--r--tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc15
-rw-r--r--tools/internal_ci/helper_scripts/prepare_build_macos_rc9
-rw-r--r--tools/internal_ci/helper_scripts/prepare_build_windows.bat8
-rwxr-xr-xtools/internal_ci/linux/grpc_microbenchmark_diff.sh38
-rwxr-xr-xtools/internal_ci/linux/grpc_run_tests_matrix.sh12
-rwxr-xr-xtools/internal_ci/linux/grpc_trickle_diff.sh42
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_microbenchmark_diff.cfg25
-rw-r--r--tools/internal_ci/linux/pull_request/grpc_trickle_diff.cfg25
-rwxr-xr-xtools/internal_ci/macos/grpc_run_tests_matrix.sh5
-rw-r--r--tools/internal_ci/windows/grpc_run_tests_matrix.bat10
-rw-r--r--tools/profiling/microbenchmarks/bm_diff/bm_constants.py2
-rw-r--r--tools/run_tests/generated/sources_and_headers.json80
-rw-r--r--tools/run_tests/generated/tests.json46
-rw-r--r--tools/run_tests/performance/massage_qps_stats.py25
-rw-r--r--tools/run_tests/performance/scenario_result_schema.json250
-rw-r--r--tools/run_tests/python_utils/filter_pull_request_tests.py2
-rwxr-xr-xtools/run_tests/python_utils/jobset.py22
-rwxr-xr-xtools/run_tests/run_performance_tests.py10
-rwxr-xr-xtools/run_tests/run_tests.py6
49 files changed, 650 insertions, 61 deletions
diff --git a/tools/buildgen/generate_build_additions.sh b/tools/buildgen/generate_build_additions.sh
index 7649145f86..693c02fdb2 100644
--- a/tools/buildgen/generate_build_additions.sh
+++ b/tools/buildgen/generate_build_additions.sh
@@ -24,6 +24,7 @@ gen_build_yaml_dirs=" \
test/core/bad_client \
test/core/bad_ssl \
test/core/end2end \
+ test/cpp/naming \
test/cpp/qps"
gen_build_files=""
for gen_build_yaml in $gen_build_yaml_dirs
diff --git a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile
index 4ccfbc43c3..dbf58023c5 100644
--- a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -61,7 +62,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#================
# C# dependencies
diff --git a/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile
index 824ecf4604..7cfe98cbc8 100644
--- a/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -61,7 +62,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# C++ dependencies
diff --git a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
index 5f7bedde73..febe2fa251 100644
--- a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile
@@ -30,7 +30,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Define the default command.
CMD ["bash"]
diff --git a/tools/dockerfile/interoptest/grpc_interop_go1.7/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_go1.7/Dockerfile
index 2bfa87c4ea..3a516cbb62 100644
--- a/tools/dockerfile/interoptest/grpc_interop_go1.7/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_go1.7/Dockerfile
@@ -30,7 +30,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Define the default command.
CMD ["bash"]
diff --git a/tools/dockerfile/interoptest/grpc_interop_go1.8/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_go1.8/Dockerfile
index 96391372ca..acb640a81d 100644
--- a/tools/dockerfile/interoptest/grpc_interop_go1.8/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_go1.8/Dockerfile
@@ -30,7 +30,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Define the default command.
CMD ["bash"]
diff --git a/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile
index 27a924343f..354b7bfdb5 100644
--- a/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile
@@ -30,7 +30,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
RUN pip install twisted h2==2.6.1 hyper
diff --git a/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile
index 264a0f9271..92a542ff76 100644
--- a/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile
@@ -45,7 +45,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Trigger download of as many Gradle artifacts as possible.
diff --git a/tools/dockerfile/interoptest/grpc_interop_java_oracle8/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_java_oracle8/Dockerfile
index 264a0f9271..92a542ff76 100644
--- a/tools/dockerfile/interoptest/grpc_interop_java_oracle8/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_java_oracle8/Dockerfile
@@ -45,7 +45,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Trigger download of as many Gradle artifacts as possible.
diff --git a/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile
index c59b12d155..4343d56cb4 100644
--- a/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -61,7 +62,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#==================
# Node dependencies
diff --git a/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile
index f859a46514..d6e229f84a 100644
--- a/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_php/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
diff --git a/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile
index 42b1107c11..271c6e75e9 100644
--- a/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -61,7 +62,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
diff --git a/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile
index 3c744b434e..7bcada68e0 100644
--- a/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile
+++ b/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -61,7 +62,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#==================
# Ruby dependencies
diff --git a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile
index 60dc7eb21a..40d46fcf58 100644
--- a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#================
# C# dependencies
diff --git a/tools/dockerfile/test/cxx_jessie_x64/Dockerfile b/tools/dockerfile/test/cxx_jessie_x64/Dockerfile
index f9622dd757..888a37baca 100644
--- a/tools/dockerfile/test/cxx_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# C++ dependencies
diff --git a/tools/dockerfile/test/cxx_jessie_x86/Dockerfile b/tools/dockerfile/test/cxx_jessie_x86/Dockerfile
index 2d396ce2ff..319f1e1889 100644
--- a/tools/dockerfile/test/cxx_jessie_x86/Dockerfile
+++ b/tools/dockerfile/test/cxx_jessie_x86/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# C++ dependencies
diff --git a/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile b/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
index 349a6eff83..61f005d9da 100644
--- a/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# C++ dependencies
diff --git a/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile b/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile
index bccfdba7cd..f35247eccb 100644
--- a/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile
+++ b/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# C++ dependencies
diff --git a/tools/dockerfile/test/fuzzer/Dockerfile b/tools/dockerfile/test/fuzzer/Dockerfile
index 94c37b3f63..ce1badfeb1 100644
--- a/tools/dockerfile/test/fuzzer/Dockerfile
+++ b/tools/dockerfile/test/fuzzer/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# C++ dependencies
diff --git a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile
index b7373b5d9c..59fe4d8f93 100644
--- a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -122,7 +123,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Install coverage for Python test coverage reporting
RUN pip install coverage
diff --git a/tools/dockerfile/test/node_jessie_x64/Dockerfile b/tools/dockerfile/test/node_jessie_x64/Dockerfile
index 31602e594e..103be8412b 100644
--- a/tools/dockerfile/test/node_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/node_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -76,7 +77,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#==================
# Node dependencies
diff --git a/tools/dockerfile/test/php7_jessie_x64/Dockerfile b/tools/dockerfile/test/php7_jessie_x64/Dockerfile
index e79305bb27..f6d426bcd6 100644
--- a/tools/dockerfile/test/php7_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/php7_jessie_x64/Dockerfile
@@ -77,7 +77,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
diff --git a/tools/dockerfile/test/php_jessie_x64/Dockerfile b/tools/dockerfile/test/php_jessie_x64/Dockerfile
index 245a642367..ae82a8d99f 100644
--- a/tools/dockerfile/test/php_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/php_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#=================
# PHP dependencies
diff --git a/tools/dockerfile/test/python_jessie_x64/Dockerfile b/tools/dockerfile/test/python_jessie_x64/Dockerfile
index 5093b793b0..d5d781cd1a 100644
--- a/tools/dockerfile/test/python_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/python_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Prepare ccache
RUN ln -s /usr/bin/ccache /usr/local/bin/gcc
diff --git a/tools/dockerfile/test/python_pyenv_x64/Dockerfile b/tools/dockerfile/test/python_pyenv_x64/Dockerfile
index ff8f0231cb..3b4ad12b6d 100644
--- a/tools/dockerfile/test/python_pyenv_x64/Dockerfile
+++ b/tools/dockerfile/test/python_pyenv_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
# Install dependencies for pyenv
RUN apt-get update && apt-get install -y \
diff --git a/tools/dockerfile/test/ruby_jessie_x64/Dockerfile b/tools/dockerfile/test/ruby_jessie_x64/Dockerfile
index d77917c87f..3d879bb0c9 100644
--- a/tools/dockerfile/test/ruby_jessie_x64/Dockerfile
+++ b/tools/dockerfile/test/ruby_jessie_x64/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#==================
# Ruby dependencies
diff --git a/tools/dockerfile/test/sanity/Dockerfile b/tools/dockerfile/test/sanity/Dockerfile
index 0f2f9ede58..44732a5ae4 100644
--- a/tools/dockerfile/test/sanity/Dockerfile
+++ b/tools/dockerfile/test/sanity/Dockerfile
@@ -22,6 +22,7 @@ RUN apt-get update && apt-get install -y \
bzip2 \
ccache \
curl \
+ dnsutils \
gcc \
gcc-multilib \
git \
@@ -65,7 +66,7 @@ RUN apt-get update && apt-get install -y \
# Install Python packages from PyPI
RUN pip install pip --upgrade
RUN pip install virtualenv
-RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0
+RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 twisted==17.5.0
#========================
# Sanity test dependencies
diff --git a/tools/gce/create_interop_worker.sh b/tools/gce/create_interop_worker.sh
index 76e4905272..3e59dc501a 100755
--- a/tools/gce/create_interop_worker.sh
+++ b/tools/gce/create_interop_worker.sh
@@ -33,7 +33,8 @@ gcloud compute instances create $INSTANCE_NAME \
--machine-type n1-standard-16 \
--image ubuntu-15-10 \
--boot-disk-size 1000 \
- --scopes https://www.googleapis.com/auth/xapi.zoo
+ --scopes https://www.googleapis.com/auth/xapi.zoo \
+ --tags=allow-ssh
echo 'Created GCE instance, waiting 60 seconds for it to come online.'
sleep 60
diff --git a/tools/gce/create_linux_performance_worker.sh b/tools/gce/create_linux_performance_worker.sh
index 7f53732c05..4270f5cbfb 100755
--- a/tools/gce/create_linux_performance_worker.sh
+++ b/tools/gce/create_linux_performance_worker.sh
@@ -36,7 +36,8 @@ gcloud compute instances create $INSTANCE_NAME \
--image-project ubuntu-os-cloud \
--image-family ubuntu-1704 \
--boot-disk-size 300 \
- --scopes https://www.googleapis.com/auth/bigquery
+ --scopes https://www.googleapis.com/auth/bigquery \
+ --tags=allow-ssh
echo 'Created GCE instance, waiting 60 seconds for it to come online.'
sleep 60
diff --git a/tools/gce/create_linux_worker.sh b/tools/gce/create_linux_worker.sh
index c96f3281fc..8bf44aa729 100755
--- a/tools/gce/create_linux_worker.sh
+++ b/tools/gce/create_linux_worker.sh
@@ -31,7 +31,8 @@ gcloud compute instances create $INSTANCE_NAME \
--image=ubuntu-1510 \
--image-project=grpc-testing \
--boot-disk-size 1000 \
- --scopes https://www.googleapis.com/auth/bigquery
+ --scopes https://www.googleapis.com/auth/bigquery \
+ --tags=allow-ssh
echo 'Created GCE instance, waiting 60 seconds for it to come online.'
sleep 60
diff --git a/tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc b/tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
index 1b7779caa8..dc3c75a6a1 100644
--- a/tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
+++ b/tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
@@ -19,4 +19,19 @@
ulimit -n 32768
ulimit -c unlimited
+# Performance PR testing needs GH API key and PR metadata to comment results
+if [ -n "$KOKORO_GITHUB_PULL_REQUEST_NUMBER" ]; then
+ set +x
+ sudo apt-get install -y jq
+ export ghprbTargetBranch=$(curl -s https://api.github.com/repos/grpc/grpc/pulls/$KOKORO_GITHUB_PULL_REQUEST_NUMBER | jq -r .base.ref)
+
+ gsutil cp gs://grpc-testing-secrets/github_credentials/oauth_token.txt ~/
+ # TODO(matt-kwong): rename this to GITHUB_OAUTH_TOKEN after Jenkins deprecation
+ export JENKINS_OAUTH_TOKEN=$(cat ~/oauth_token.txt)
+ export ghprbPullId=$KOKORO_GITHUB_PULL_REQUEST_NUMBER
+ set -x
+fi
+
+sudo pip install tabulate
+
git submodule update --init
diff --git a/tools/internal_ci/helper_scripts/prepare_build_macos_rc b/tools/internal_ci/helper_scripts/prepare_build_macos_rc
index dd2741e595..bec529f85e 100644
--- a/tools/internal_ci/helper_scripts/prepare_build_macos_rc
+++ b/tools/internal_ci/helper_scripts/prepare_build_macos_rc
@@ -31,6 +31,13 @@ ulimit -a
pip install google-api-python-client --user python
export GOOGLE_APPLICATION_CREDENTIALS=${KOKORO_GFILE_DIR}/GrpcTesting-d0eeee2db331.json
+# If this is a PR using RUN_TESTS_FLAGS var, then add flags to filter tests
+if [ -n "$KOKORO_GITHUB_PULL_REQUEST_NUMBER" ] && [ -n "$RUN_TESTS_FLAGS" ]; then
+ brew install jq
+ ghprbTargetBranch=$(curl -s https://api.github.com/repos/grpc/grpc/pulls/$KOKORO_GITHUB_PULL_REQUEST_NUMBER | jq -r .base.ref)
+ export RUN_TESTS_FLAGS="$RUN_TESTS_FLAGS --filter_pr_tests --base_branch origin/$ghprbTargetBranch"
+fi
+
set +ex # rvm script is very verbose and exits with errorcode
source $HOME/.rvm/scripts/rvm
set -e # rvm commands are very verbose
@@ -46,7 +53,7 @@ pod repo update # needed by python
# python
brew install coreutils # we need grealpath
pip install virtualenv --user python
-pip install -U six tox setuptools --user python
+pip install -U six tox setuptools twisted pyyaml --user python
export PYTHONPATH=/Library/Python/3.4/site-packages
# set xcode version for Obj-C tests
diff --git a/tools/internal_ci/helper_scripts/prepare_build_windows.bat b/tools/internal_ci/helper_scripts/prepare_build_windows.bat
index 69e087e1db..f625755db9 100644
--- a/tools/internal_ci/helper_scripts/prepare_build_windows.bat
+++ b/tools/internal_ci/helper_scripts/prepare_build_windows.bat
@@ -18,6 +18,14 @@ set PATH=C:\tools\msys64\usr\bin;C:\Python27;%PATH%
bash tools/internal_ci/helper_scripts/gen_report_index.sh
+@rem If this is a PR using RUN_TESTS_FLAGS var, then add flags to filter tests
+if defined KOKORO_GITHUB_PULL_REQUEST_NUMBER if defined RUN_TESTS_FLAGS (
+ chocolatey install -y jq
+ for /f "usebackq delims=" %%x in (`curl -s https://api.github.com/repos/grpc/grpc/pulls/%KOKORO_GITHUB_PULL_REQUEST_NUMBER% ^| jq -r .base.ref`) do (
+ set RUN_TESTS_FLAGS=%RUN_TESTS_FLAGS% --filter_pr_tests --base_branch origin/%%x
+ )
+)
+
@rem Update DNS settings to:
@rem 1. allow resolving metadata.google.internal hostname
@rem 2. make fetching default GCE credential by oauth2client work
diff --git a/tools/internal_ci/linux/grpc_microbenchmark_diff.sh b/tools/internal_ci/linux/grpc_microbenchmark_diff.sh
new file mode 100755
index 0000000000..58ffcf336b
--- /dev/null
+++ b/tools/internal_ci/linux/grpc_microbenchmark_diff.sh
@@ -0,0 +1,38 @@
+#!/usr/bin/env bash
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This script is invoked by Jenkins and runs a diff on the microbenchmarks
+set -ex
+
+# List of benchmarks that provide good signal for analyzing performance changes in pull requests
+BENCHMARKS_TO_RUN="bm_fullstack_unary_ping_pong bm_fullstack_streaming_ping_pong bm_fullstack_streaming_pump bm_closure bm_cq bm_call_create bm_error bm_chttp2_hpack bm_chttp2_transport bm_pollset bm_metadata"
+
+# Enter the gRPC repo root
+cd $(dirname $0)/../../..
+
+source tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
+
+tools/run_tests/start_port_server.py
+tools/jenkins/run_c_cpp_test.sh tools/profiling/microbenchmarks/bm_diff/bm_main.py \
+ -d origin/$ghprbTargetBranch \
+ -b $BENCHMARKS_TO_RUN || FAILED="true"
+
+# kill port_server.py to prevent the build from hanging
+ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
+
+if [ "$FAILED" != "" ]
+then
+ exit 1
+fi
diff --git a/tools/internal_ci/linux/grpc_run_tests_matrix.sh b/tools/internal_ci/linux/grpc_run_tests_matrix.sh
index 028704b3ff..bd1430b741 100755
--- a/tools/internal_ci/linux/grpc_run_tests_matrix.sh
+++ b/tools/internal_ci/linux/grpc_run_tests_matrix.sh
@@ -20,4 +20,14 @@ cd $(dirname $0)/../../..
source tools/internal_ci/helper_scripts/prepare_build_linux_rc
-tools/run_tests/run_tests_matrix.py $RUN_TESTS_FLAGS
+tools/run_tests/run_tests_matrix.py $RUN_TESTS_FLAGS || FAILED="true"
+
+# Reveal leftover processes that might be left behind by the build
+ps aux | grep -i kbuilder
+
+echo 'Exiting gRPC main test script.'
+
+if [ "$FAILED" != "" ]
+then
+ exit 1
+fi
diff --git a/tools/internal_ci/linux/grpc_trickle_diff.sh b/tools/internal_ci/linux/grpc_trickle_diff.sh
new file mode 100755
index 0000000000..624031afa7
--- /dev/null
+++ b/tools/internal_ci/linux/grpc_trickle_diff.sh
@@ -0,0 +1,42 @@
+#!/usr/bin/env bash
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+#
+# This script is invoked by Jenkins and runs a diff on the microbenchmarks
+set -ex
+
+# List of benchmarks that provide good signal for analyzing performance changes in pull requests
+BENCHMARKS_TO_RUN="cli_transport_stalls_per_iteration cli_stream_stalls_per_iteration svr_transport_stalls_per_iteration svr_stream_stalls_per_iteration"
+
+# Enter the gRPC repo root
+cd $(dirname $0)/../../..
+
+source tools/internal_ci/helper_scripts/prepare_build_linux_perf_rc
+
+tools/run_tests/start_port_server.py
+tools/jenkins/run_c_cpp_test.sh tools/profiling/microbenchmarks/bm_diff/bm_main.py \
+ -d origin/$ghprbTargetBranch \
+ -b bm_fullstack_trickle \
+ -l 4 \
+ -t $BENCHMARKS_TO_RUN \
+ --no-counters \
+ --pr_comment_name trickle || FAILED="true"
+
+# kill port_server.py to prevent the build from hanging
+ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
+
+if [ "$FAILED" != "" ]
+then
+ exit 1
+fi
diff --git a/tools/internal_ci/linux/pull_request/grpc_microbenchmark_diff.cfg b/tools/internal_ci/linux/pull_request/grpc_microbenchmark_diff.cfg
new file mode 100644
index 0000000000..9269c345f0
--- /dev/null
+++ b/tools/internal_ci/linux/pull_request/grpc_microbenchmark_diff.cfg
@@ -0,0 +1,25 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/linux/grpc_microbenchmark_diff.sh"
+timeout_mins: 120
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ regex: "github/grpc/reports/**"
+ }
+}
diff --git a/tools/internal_ci/linux/pull_request/grpc_trickle_diff.cfg b/tools/internal_ci/linux/pull_request/grpc_trickle_diff.cfg
new file mode 100644
index 0000000000..e86b3ab475
--- /dev/null
+++ b/tools/internal_ci/linux/pull_request/grpc_trickle_diff.cfg
@@ -0,0 +1,25 @@
+# Copyright 2017 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+# Config file for the internal CI (in protobuf text format)
+
+# Location of the continuous shell script in repository.
+build_file: "grpc/tools/internal_ci/linux/grpc_trickle_diff.sh"
+timeout_mins: 120
+action {
+ define_artifacts {
+ regex: "**/*sponge_log.xml"
+ regex: "github/grpc/reports/**"
+ }
+}
diff --git a/tools/internal_ci/macos/grpc_run_tests_matrix.sh b/tools/internal_ci/macos/grpc_run_tests_matrix.sh
index 9a43e4869b..8e7fd54a62 100755
--- a/tools/internal_ci/macos/grpc_run_tests_matrix.sh
+++ b/tools/internal_ci/macos/grpc_run_tests_matrix.sh
@@ -25,6 +25,11 @@ tools/run_tests/run_tests_matrix.py $RUN_TESTS_FLAGS || FAILED="true"
# kill port_server.py to prevent the build from hanging
ps aux | grep port_server\\.py | awk '{print $2}' | xargs kill -9
+# Reveal leftover processes that might be left behind by the build
+ps aux | grep -i kbuilder
+
+echo 'Exiting gRPC main test script.'
+
if [ "$FAILED" != "" ]
then
exit 1
diff --git a/tools/internal_ci/windows/grpc_run_tests_matrix.bat b/tools/internal_ci/windows/grpc_run_tests_matrix.bat
index 08d834f8b0..10627add25 100644
--- a/tools/internal_ci/windows/grpc_run_tests_matrix.bat
+++ b/tools/internal_ci/windows/grpc_run_tests_matrix.bat
@@ -17,8 +17,10 @@ cd /d %~dp0\..\..\..
call tools/internal_ci/helper_scripts/prepare_build_windows.bat
-python tools/run_tests/run_tests_matrix.py %RUN_TESTS_FLAGS% || goto :error
-goto :EOF
+python tools/run_tests/run_tests_matrix.py %RUN_TESTS_FLAGS%
+set RUNTESTS_EXITCODE=%errorlevel%
-:error
-exit /b %errorlevel%
+@rem Reveal leftover processes that might be left behind by the build
+tasklist /V
+
+exit /b %RUNTESTS_EXITCODE%
diff --git a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py
index ad79a0a197..f8989b17fc 100644
--- a/tools/profiling/microbenchmarks/bm_diff/bm_constants.py
+++ b/tools/profiling/microbenchmarks/bm_diff/bm_constants.py
@@ -28,4 +28,4 @@ _INTERESTING = ('cpu_time', 'real_time', 'locks_per_iteration',
'atm_cas_per_iteration', 'atm_add_per_iteration',
'nows_per_iteration', 'cli_transport_stalls_per_iteration',
'cli_stream_stalls_per_iteration', 'svr_transport_stalls_per_iteration',
- 'svr_stream_stalls_per_iteration',)
+ 'svr_stream_stalls_per_iteration', 'http2_pings_sent_per_iteration')
diff --git a/tools/run_tests/generated/sources_and_headers.json b/tools/run_tests/generated/sources_and_headers.json
index 2f6e34bfb3..8d42c3e432 100644
--- a/tools/run_tests/generated/sources_and_headers.json
+++ b/tools/run_tests/generated/sources_and_headers.json
@@ -5631,6 +5631,86 @@
"deps": [
"gpr",
"gpr_test_util",
+ "grpc++_test_config",
+ "grpc++_test_util_unsecure",
+ "grpc++_unsecure",
+ "grpc_test_util_unsecure",
+ "grpc_unsecure"
+ ],
+ "headers": [],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "resolver_component_test_unsecure",
+ "src": [
+ "test/cpp/naming/resolver_component_test.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_config",
+ "grpc++_test_util",
+ "grpc_test_util"
+ ],
+ "headers": [],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "resolver_component_test",
+ "src": [
+ "test/cpp/naming/resolver_component_test.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_config",
+ "grpc++_test_util",
+ "grpc_test_util"
+ ],
+ "headers": [],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "resolver_component_tests_runner_invoker_unsecure",
+ "src": [
+ "test/cpp/naming/resolver_component_tests_runner_invoker.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc++",
+ "grpc++_test_config",
+ "grpc++_test_util",
+ "grpc_test_util"
+ ],
+ "headers": [],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "resolver_component_tests_runner_invoker",
+ "src": [
+ "test/cpp/naming/resolver_component_tests_runner_invoker.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
+ "gpr",
+ "gpr_test_util",
"grpc",
"grpc_test_util"
],
diff --git a/tools/run_tests/generated/tests.json b/tools/run_tests/generated/tests.json
index 45d7145e8e..72dfbc0b98 100644
--- a/tools/run_tests/generated/tests.json
+++ b/tools/run_tests/generated/tests.json
@@ -4406,6 +4406,52 @@
},
{
"args": [
+ "--test_bin_name=resolver_component_test_unsecure",
+ "--running_under_bazel=false"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "exclude_iomgrs": [],
+ "flaky": false,
+ "gtest": false,
+ "language": "c++",
+ "name": "resolver_component_tests_runner_invoker_unsecure",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
+ "--test_bin_name=resolver_component_test",
+ "--running_under_bazel=false"
+ ],
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "exclude_iomgrs": [],
+ "flaky": false,
+ "gtest": false,
+ "language": "c++",
+ "name": "resolver_component_tests_runner_invoker",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix"
+ ]
+ },
+ {
+ "args": [
"third_party/boringssl/crypto/aes/aes_tests.txt"
],
"boringssl": true,
diff --git a/tools/run_tests/performance/massage_qps_stats.py b/tools/run_tests/performance/massage_qps_stats.py
index 606660f700..85251eaaf3 100644
--- a/tools/run_tests/performance/massage_qps_stats.py
+++ b/tools/run_tests/performance/massage_qps_stats.py
@@ -22,6 +22,10 @@ def massage_qps_stats(scenario_result):
del stats["coreStats"]
stats["core_client_calls_created"] = massage_qps_stats_helpers.counter(core_stats, "client_calls_created")
stats["core_server_calls_created"] = massage_qps_stats_helpers.counter(core_stats, "server_calls_created")
+ stats["core_cqs_created"] = massage_qps_stats_helpers.counter(core_stats, "cqs_created")
+ stats["core_client_channels_created"] = massage_qps_stats_helpers.counter(core_stats, "client_channels_created")
+ stats["core_client_subchannels_created"] = massage_qps_stats_helpers.counter(core_stats, "client_subchannels_created")
+ stats["core_server_channels_created"] = massage_qps_stats_helpers.counter(core_stats, "server_channels_created")
stats["core_syscall_poll"] = massage_qps_stats_helpers.counter(core_stats, "syscall_poll")
stats["core_syscall_wait"] = massage_qps_stats_helpers.counter(core_stats, "syscall_wait")
stats["core_pollset_kick"] = massage_qps_stats_helpers.counter(core_stats, "pollset_kick")
@@ -49,6 +53,27 @@ def massage_qps_stats(scenario_result):
stats["core_http2_writes_offloaded"] = massage_qps_stats_helpers.counter(core_stats, "http2_writes_offloaded")
stats["core_http2_writes_continued"] = massage_qps_stats_helpers.counter(core_stats, "http2_writes_continued")
stats["core_http2_partial_writes"] = massage_qps_stats_helpers.counter(core_stats, "http2_partial_writes")
+ stats["core_http2_initiate_write_due_to_initial_write"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_initial_write")
+ stats["core_http2_initiate_write_due_to_start_new_stream"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_start_new_stream")
+ stats["core_http2_initiate_write_due_to_send_message"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_send_message")
+ stats["core_http2_initiate_write_due_to_send_initial_metadata"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_send_initial_metadata")
+ stats["core_http2_initiate_write_due_to_send_trailing_metadata"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_send_trailing_metadata")
+ stats["core_http2_initiate_write_due_to_retry_send_ping"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_retry_send_ping")
+ stats["core_http2_initiate_write_due_to_continue_pings"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_continue_pings")
+ stats["core_http2_initiate_write_due_to_goaway_sent"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_goaway_sent")
+ stats["core_http2_initiate_write_due_to_rst_stream"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_rst_stream")
+ stats["core_http2_initiate_write_due_to_close_from_api"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_close_from_api")
+ stats["core_http2_initiate_write_due_to_stream_flow_control"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_stream_flow_control")
+ stats["core_http2_initiate_write_due_to_transport_flow_control"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_transport_flow_control")
+ stats["core_http2_initiate_write_due_to_send_settings"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_send_settings")
+ stats["core_http2_initiate_write_due_to_bdp_estimator_ping"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_bdp_estimator_ping")
+ stats["core_http2_initiate_write_due_to_flow_control_unstalled_by_setting"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_flow_control_unstalled_by_setting")
+ stats["core_http2_initiate_write_due_to_flow_control_unstalled_by_update"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_flow_control_unstalled_by_update")
+ stats["core_http2_initiate_write_due_to_application_ping"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_application_ping")
+ stats["core_http2_initiate_write_due_to_keepalive_ping"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_keepalive_ping")
+ stats["core_http2_initiate_write_due_to_transport_flow_control_unstalled"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_transport_flow_control_unstalled")
+ stats["core_http2_initiate_write_due_to_ping_response"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_ping_response")
+ stats["core_http2_initiate_write_due_to_force_rst_stream"] = massage_qps_stats_helpers.counter(core_stats, "http2_initiate_write_due_to_force_rst_stream")
stats["core_combiner_locks_initiated"] = massage_qps_stats_helpers.counter(core_stats, "combiner_locks_initiated")
stats["core_combiner_locks_scheduled_items"] = massage_qps_stats_helpers.counter(core_stats, "combiner_locks_scheduled_items")
stats["core_combiner_locks_scheduled_final_items"] = massage_qps_stats_helpers.counter(core_stats, "combiner_locks_scheduled_final_items")
diff --git a/tools/run_tests/performance/scenario_result_schema.json b/tools/run_tests/performance/scenario_result_schema.json
index 11f41ab012..766a890cee 100644
--- a/tools/run_tests/performance/scenario_result_schema.json
+++ b/tools/run_tests/performance/scenario_result_schema.json
@@ -122,6 +122,26 @@
},
{
"mode": "NULLABLE",
+ "name": "core_cqs_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_client_channels_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_client_subchannels_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_server_channels_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
"name": "core_syscall_poll",
"type": "INTEGER"
},
@@ -257,6 +277,111 @@
},
{
"mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_initial_write",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_start_new_stream",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_message",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_initial_metadata",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_trailing_metadata",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_retry_send_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_continue_pings",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_goaway_sent",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_rst_stream",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_close_from_api",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_stream_flow_control",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_transport_flow_control",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_settings",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_bdp_estimator_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_flow_control_unstalled_by_setting",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_flow_control_unstalled_by_update",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_application_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_keepalive_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_transport_flow_control_unstalled",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_ping_response",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_force_rst_stream",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
"name": "core_combiner_locks_initiated",
"type": "INTEGER"
},
@@ -679,6 +804,26 @@
},
{
"mode": "NULLABLE",
+ "name": "core_cqs_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_client_channels_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_client_subchannels_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_server_channels_created",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
"name": "core_syscall_poll",
"type": "INTEGER"
},
@@ -814,6 +959,111 @@
},
{
"mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_initial_write",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_start_new_stream",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_message",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_initial_metadata",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_trailing_metadata",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_retry_send_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_continue_pings",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_goaway_sent",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_rst_stream",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_close_from_api",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_stream_flow_control",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_transport_flow_control",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_send_settings",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_bdp_estimator_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_flow_control_unstalled_by_setting",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_flow_control_unstalled_by_update",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_application_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_keepalive_ping",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_transport_flow_control_unstalled",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_ping_response",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
+ "name": "core_http2_initiate_write_due_to_force_rst_stream",
+ "type": "INTEGER"
+ },
+ {
+ "mode": "NULLABLE",
"name": "core_combiner_locks_initiated",
"type": "INTEGER"
},
diff --git a/tools/run_tests/python_utils/filter_pull_request_tests.py b/tools/run_tests/python_utils/filter_pull_request_tests.py
index 032c0701e2..4ad981237b 100644
--- a/tools/run_tests/python_utils/filter_pull_request_tests.py
+++ b/tools/run_tests/python_utils/filter_pull_request_tests.py
@@ -128,7 +128,7 @@ def _get_changed_files(base_branch):
# Get file changes between branch and merge-base of specified branch
# Not combined to be Windows friendly
base_commit = check_output(["git", "merge-base", base_branch, "HEAD"]).rstrip()
- return check_output(["git", "diff", base_commit, "--name-only"]).splitlines()
+ return check_output(["git", "diff", base_commit, "--name-only", "HEAD"]).splitlines()
def _can_skip_tests(file_names, triggers):
diff --git a/tools/run_tests/python_utils/jobset.py b/tools/run_tests/python_utils/jobset.py
index 80e79444ea..062c79a0de 100755
--- a/tools/run_tests/python_utils/jobset.py
+++ b/tools/run_tests/python_utils/jobset.py
@@ -71,10 +71,8 @@ def platform_string():
if platform_string() == 'windows':
pass
else:
- have_alarm = False
def alarm_handler(unused_signum, unused_frame):
- global have_alarm
- have_alarm = False
+ pass
signal.signal(signal.SIGCHLD, lambda unused_signum, unused_frame: None)
signal.signal(signal.SIGALRM, alarm_handler)
@@ -367,10 +365,9 @@ class Jobset(object):
"""Manages one run of jobs."""
def __init__(self, check_cancelled, maxjobs, newline_on_success, travis,
- stop_on_failure, add_env, quiet_success, max_time, clear_alarms):
+ stop_on_failure, add_env, quiet_success, max_time):
self._running = set()
self._check_cancelled = check_cancelled
- self._clear_alarms = clear_alarms
self._cancelled = False
self._failures = 0
self._completed = 0
@@ -455,10 +452,7 @@ class Jobset(object):
if platform_string() == 'windows':
time.sleep(0.1)
else:
- global have_alarm
- if not have_alarm:
- have_alarm = True
- signal.alarm(10)
+ signal.alarm(10)
signal.pause()
def cancelled(self):
@@ -474,10 +468,7 @@ class Jobset(object):
while self._running:
if self.cancelled(): pass # poll cancellation
self.reap()
- # Clear the alarms when finished to avoid a race condition causing job
- # failures. Don't do this when running multi-VM tests because clearing
- # the alarms causes the test to stall
- if platform_string() != 'windows' and self._clear_alarms:
+ if platform_string() != 'windows':
signal.alarm(0)
return not self.cancelled() and self._failures == 0
@@ -507,8 +498,7 @@ def run(cmdlines,
add_env={},
skip_jobs=False,
quiet_success=False,
- max_time=-1,
- clear_alarms=True):
+ max_time=-1):
if skip_jobs:
resultset = {}
skipped_job_result = JobResult()
@@ -520,7 +510,7 @@ def run(cmdlines,
js = Jobset(check_cancelled,
maxjobs if maxjobs is not None else _DEFAULT_MAX_JOBS,
newline_on_success, travis, stop_on_failure, add_env,
- quiet_success, max_time, clear_alarms)
+ quiet_success, max_time)
for cmdline, remaining in tag_remaining(cmdlines):
if not js.start(cmdline):
break
diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py
index 9b20fae78f..1bbab9e894 100755
--- a/tools/run_tests/run_performance_tests.py
+++ b/tools/run_tests/run_performance_tests.py
@@ -183,7 +183,7 @@ def archive_repo(languages):
jobset.message('START', 'Archiving local repository.', do_newline=True)
num_failures, _ = jobset.run(
- [archive_job], newline_on_success=True, maxjobs=1, clear_alarms=False)
+ [archive_job], newline_on_success=True, maxjobs=1)
if num_failures == 0:
jobset.message('SUCCESS',
'Archive with local repository created successfully.',
@@ -215,7 +215,7 @@ def prepare_remote_hosts(hosts, prepare_local=False):
timeout_seconds=prepare_timeout))
jobset.message('START', 'Preparing hosts.', do_newline=True)
num_failures, _ = jobset.run(
- prepare_jobs, newline_on_success=True, maxjobs=10, clear_alarms=False)
+ prepare_jobs, newline_on_success=True, maxjobs=10)
if num_failures == 0:
jobset.message('SUCCESS',
'Prepare step completed successfully.',
@@ -248,7 +248,7 @@ def build_on_remote_hosts(hosts, languages=scenario_config.LANGUAGES.keys(), bui
timeout_seconds=build_timeout))
jobset.message('START', 'Building.', do_newline=True)
num_failures, _ = jobset.run(
- build_jobs, newline_on_success=True, maxjobs=10, clear_alarms=False)
+ build_jobs, newline_on_success=True, maxjobs=10)
if num_failures == 0:
jobset.message('SUCCESS',
'Built successfully.',
@@ -414,7 +414,7 @@ def run_collect_perf_profile_jobs(hosts_and_base_names, scenario_name, flame_gra
perf_report_jobs.append(perf_report_processor_job(host, perf_base_name, output_filename, flame_graph_reports))
jobset.message('START', 'Collecting perf reports from qps workers', do_newline=True)
- failures, _ = jobset.run(perf_report_jobs, newline_on_success=True, maxjobs=1, clear_alarms=False)
+ failures, _ = jobset.run(perf_report_jobs, newline_on_success=True, maxjobs=1)
jobset.message('END', 'Collecting perf reports from qps workers', do_newline=True)
return failures
@@ -556,7 +556,7 @@ def main():
jobs = [scenario.jobspec]
if scenario.workers:
jobs.append(create_quit_jobspec(scenario.workers, remote_host=args.remote_driver_host))
- scenario_failures, resultset = jobset.run(jobs, newline_on_success=True, maxjobs=1, clear_alarms=False)
+ scenario_failures, resultset = jobset.run(jobs, newline_on_success=True, maxjobs=1)
total_scenario_failures += scenario_failures
merged_resultset = dict(itertools.chain(six.iteritems(merged_resultset),
six.iteritems(resultset)))
diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py
index b66c5f7f71..591a0bebbd 100755
--- a/tools/run_tests/run_tests.py
+++ b/tools/run_tests/run_tests.py
@@ -80,7 +80,7 @@ def get_bqtest_data(limit=None):
SELECT
filtered_test_name,
SUM(result != 'PASSED' AND result != 'SKIPPED') > 0 as flaky,
- MAX(cpu_measured) as cpu
+ MAX(cpu_measured) + 0.01 as cpu
FROM (
SELECT
REGEXP_REPLACE(test_name, r'/\d+', '') AS filtered_test_name,
@@ -92,9 +92,7 @@ SELECT
AND platform = '"""+platform_string()+"""'
AND NOT REGEXP_MATCH(job_name, '.*portability.*') )
GROUP BY
- filtered_test_name
-HAVING
- flaky OR cpu > 0"""
+ filtered_test_name"""
if limit:
query += " limit {}".format(limit)
query_job = big_query_utils.sync_query_job(bq, 'grpc-testing', query)