aboutsummaryrefslogtreecommitdiffhomepage
path: root/tools
diff options
context:
space:
mode:
Diffstat (limited to 'tools')
-rw-r--r--tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc328
-rw-r--r--tools/distrib/python/grpcio_tools/setup.py24
-rw-r--r--tools/dockerfile/OWNERS2
-rw-r--r--tools/dockerfile/distribtest/csharp_centos7_x64/Dockerfile3
-rw-r--r--tools/dockerfile/distribtest/csharp_jessie_x64/Dockerfile4
-rw-r--r--tools/dockerfile/distribtest/csharp_jessie_x86/Dockerfile4
-rw-r--r--tools/dockerfile/distribtest/csharp_ubuntu1404_x64/Dockerfile4
-rw-r--r--tools/dockerfile/distribtest/csharp_ubuntu1604_x64/Dockerfile4
-rw-r--r--tools/doxygen/Doxyfile.c++1
-rw-r--r--tools/doxygen/Doxyfile.c++.internal2
-rw-r--r--tools/doxygen/Doxyfile.core1
-rw-r--r--tools/doxygen/Doxyfile.core.internal8
-rw-r--r--tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc11
-rwxr-xr-xtools/internal_ci/linux/grpc_full_performance_master.sh2
-rwxr-xr-xtools/internal_ci/macos/grpc_interop_toprod.sh2
-rw-r--r--tools/interop_matrix/client_matrix.py10
-rw-r--r--tools/package_hosting/build-201807.xsl116
-rw-r--r--tools/run_tests/artifacts/artifact_targets.py15
-rwxr-xr-xtools/run_tests/dockerize/build_and_run_docker.sh4
-rw-r--r--tools/run_tests/generated/sources_and_headers.json28
-rw-r--r--tools/run_tests/generated/tests.json26
-rw-r--r--tools/run_tests/performance/OWNERS2
-rwxr-xr-xtools/run_tests/performance/bq_upload_result.py14
-rwxr-xr-xtools/run_tests/performance/build_performance.sh3
-rwxr-xr-xtools/run_tests/performance/build_performance_node.sh28
-rwxr-xr-xtools/run_tests/performance/run_worker_node.sh32
-rw-r--r--tools/run_tests/performance/scenario_config.py102
-rw-r--r--tools/run_tests/python_utils/upload_test_results.py14
-rwxr-xr-xtools/run_tests/run_interop_tests.py80
-rwxr-xr-xtools/run_tests/run_performance_tests.py14
-rwxr-xr-xtools/run_tests/sanity/check_bad_dependencies.sh (renamed from tools/run_tests/sanity/check_unsecure.sh)4
-rw-r--r--tools/run_tests/sanity/sanity_tests.yaml2
32 files changed, 416 insertions, 478 deletions
diff --git a/tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc b/tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc
deleted file mode 100644
index ba93621e4f..0000000000
--- a/tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc
+++ /dev/null
@@ -1,328 +0,0 @@
-// Copyright 2017 gRPC authors.
-//
-// Licensed under the Apache License, Version 2.0 (the "License");
-// you may not use this file except in compliance with the License.
-// You may obtain a copy of the License at
-//
-// http://www.apache.org/licenses/LICENSE-2.0
-//
-// Unless required by applicable law or agreed to in writing, software
-// distributed under the License is distributed on an "AS IS" BASIS,
-// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
-// See the License for the specific language governing permissions and
-// limitations under the License.
-
-// HACK: Embed the generated well_known_types_js.cc to make
-// grpc-tools python package compilation easy.
-#include <google/protobuf/compiler/js/well_known_types_embed.h>
-struct FileToc well_known_types_js[] = {
-{"any.js",
- "// Protocol Buffers - Google's data interchange format\n"
- "// Copyright 2008 Google Inc. All rights reserved.\n"
- "// https://developers.google.com/protocol-buffers/\n"
- "//\n"
- "// Redistribution and use in source and binary forms, with or without\n"
- "// modification, are permitted provided that the following conditions are\n"
- "// met:\n"
- "//\n"
- "// * Redistributions of source code must retain the above copyright\n"
- "// notice, this list of conditions and the following disclaimer.\n"
- "// * Redistributions in binary form must reproduce the above\n"
- "// copyright notice, this list of conditions and the following disclaimer\n"
- "// in the documentation and/or other materials provided with the\n"
- "// distribution.\n"
- "// * Neither the name of Google Inc. nor the names of its\n"
- "// contributors may be used to endorse or promote products derived from\n"
- "// this software without specific prior written permission.\n"
- "//\n"
- "// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n"
- "// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n"
- "// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n"
- "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n"
- "// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n"
- "// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n"
- "// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n"
- "// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n"
- "// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n"
- "// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n"
- "// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
- "\n"
- "/* This code will be inserted into generated code for\n"
- " * google/protobuf/any.proto. */\n"
- "\n"
- "/**\n"
- " * Returns the type name contained in this instance, if any.\n"
- " * @return {string|undefined}\n"
- " */\n"
- "proto.google.protobuf.Any.prototype.getTypeName = function() {\n"
- " return this.getTypeUrl().split('/').pop();\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Packs the given message instance into this Any.\n"
- " * @param {!Uint8Array} serialized The serialized data to pack.\n"
- " * @param {string} name The type name of this message object.\n"
- " * @param {string=} opt_typeUrlPrefix the type URL prefix.\n"
- " */\n"
- "proto.google.protobuf.Any.prototype.pack = function(serialized, name,\n"
- " opt_typeUrlPrefix) {\n"
- " if (!opt_typeUrlPrefix) {\n"
- " opt_typeUrlPrefix = 'type.googleapis.com/';\n"
- " }\n"
- "\n"
- " if (opt_typeUrlPrefix.substr(-1) != '/') {\n"
- " this.setTypeUrl(opt_typeUrlPrefix + '/' + name);\n"
- " } else {\n"
- " this.setTypeUrl(opt_typeUrlPrefix + name);\n"
- " }\n"
- "\n"
- " this.setValue(serialized);\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * @template T\n"
- " * Unpacks this Any into the given message object.\n"
- " * @param {function(Uint8Array):T} deserialize Function that will deserialize\n"
- " * the binary data properly.\n"
- " * @param {string} name The expected type name of this message object.\n"
- " * @return {?T} If the name matched the expected name, returns the deserialized\n"
- " * object, otherwise returns undefined.\n"
- " */\n"
- "proto.google.protobuf.Any.prototype.unpack = function(deserialize, name) {\n"
- " if (this.getTypeName() == name) {\n"
- " return deserialize(this.getValue_asU8());\n"
- " } else {\n"
- " return null;\n"
- " }\n"
- "};\n"
-},
-{"struct.js",
- "// Protocol Buffers - Google's data interchange format\n"
- "// Copyright 2008 Google Inc. All rights reserved.\n"
- "// https://developers.google.com/protocol-buffers/\n"
- "//\n"
- "// Redistribution and use in source and binary forms, with or without\n"
- "// modification, are permitted provided that the following conditions are\n"
- "// met:\n"
- "//\n"
- "// * Redistributions of source code must retain the above copyright\n"
- "// notice, this list of conditions and the following disclaimer.\n"
- "// * Redistributions in binary form must reproduce the above\n"
- "// copyright notice, this list of conditions and the following disclaimer\n"
- "// in the documentation and/or other materials provided with the\n"
- "// distribution.\n"
- "// * Neither the name of Google Inc. nor the names of its\n"
- "// contributors may be used to endorse or promote products derived from\n"
- "// this software without specific prior written permission.\n"
- "//\n"
- "// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n"
- "// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n"
- "// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n"
- "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n"
- "// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n"
- "// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n"
- "// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n"
- "// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n"
- "// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n"
- "// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n"
- "// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
- "\n"
- "/* This code will be inserted into generated code for\n"
- " * google/protobuf/struct.proto. */\n"
- "\n"
- "/**\n"
- " * Typedef representing plain JavaScript values that can go into a\n"
- " * Struct.\n"
- " * @typedef {null|number|string|boolean|Array|Object}\n"
- " */\n"
- "proto.google.protobuf.JavaScriptValue;\n"
- "\n"
- "\n"
- "/**\n"
- " * Converts this Value object to a plain JavaScript value.\n"
- " * @return {?proto.google.protobuf.JavaScriptValue} a plain JavaScript\n"
- " * value representing this Struct.\n"
- " */\n"
- "proto.google.protobuf.Value.prototype.toJavaScript = function() {\n"
- " var kindCase = proto.google.protobuf.Value.KindCase;\n"
- " switch (this.getKindCase()) {\n"
- " case kindCase.NULL_VALUE:\n"
- " return null;\n"
- " case kindCase.NUMBER_VALUE:\n"
- " return this.getNumberValue();\n"
- " case kindCase.STRING_VALUE:\n"
- " return this.getStringValue();\n"
- " case kindCase.BOOL_VALUE:\n"
- " return this.getBoolValue();\n"
- " case kindCase.STRUCT_VALUE:\n"
- " return this.getStructValue().toJavaScript();\n"
- " case kindCase.LIST_VALUE:\n"
- " return this.getListValue().toJavaScript();\n"
- " default:\n"
- " throw new Error('Unexpected struct type');\n"
- " }\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Converts this JavaScript value to a new Value proto.\n"
- " * @param {!proto.google.protobuf.JavaScriptValue} value The value to\n"
- " * convert.\n"
- " * @return {!proto.google.protobuf.Value} The newly constructed value.\n"
- " */\n"
- "proto.google.protobuf.Value.fromJavaScript = function(value) {\n"
- " var ret = new proto.google.protobuf.Value();\n"
- " switch (goog.typeOf(value)) {\n"
- " case 'string':\n"
- " ret.setStringValue(/** @type {string} */ (value));\n"
- " break;\n"
- " case 'number':\n"
- " ret.setNumberValue(/** @type {number} */ (value));\n"
- " break;\n"
- " case 'boolean':\n"
- " ret.setBoolValue(/** @type {boolean} */ (value));\n"
- " break;\n"
- " case 'null':\n"
- " ret.setNullValue(proto.google.protobuf.NullValue.NULL_VALUE);\n"
- " break;\n"
- " case 'array':\n"
- " ret.setListValue(proto.google.protobuf.ListValue.fromJavaScript(\n"
- " /** @type{!Array} */ (value)));\n"
- " break;\n"
- " case 'object':\n"
- " ret.setStructValue(proto.google.protobuf.Struct.fromJavaScript(\n"
- " /** @type{!Object} */ (value)));\n"
- " break;\n"
- " default:\n"
- " throw new Error('Unexpected struct type.');\n"
- " }\n"
- "\n"
- " return ret;\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Converts this ListValue object to a plain JavaScript array.\n"
- " * @return {!Array} a plain JavaScript array representing this List.\n"
- " */\n"
- "proto.google.protobuf.ListValue.prototype.toJavaScript = function() {\n"
- " var ret = [];\n"
- " var values = this.getValuesList();\n"
- "\n"
- " for (var i = 0; i < values.length; i++) {\n"
- " ret[i] = values[i].toJavaScript();\n"
- " }\n"
- "\n"
- " return ret;\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Constructs a ListValue protobuf from this plain JavaScript array.\n"
- " * @param {!Array} array a plain JavaScript array\n"
- " * @return {proto.google.protobuf.ListValue} a new ListValue object\n"
- " */\n"
- "proto.google.protobuf.ListValue.fromJavaScript = function(array) {\n"
- " var ret = new proto.google.protobuf.ListValue();\n"
- "\n"
- " for (var i = 0; i < array.length; i++) {\n"
- " ret.addValues(proto.google.protobuf.Value.fromJavaScript(array[i]));\n"
- " }\n"
- "\n"
- " return ret;\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Converts this Struct object to a plain JavaScript object.\n"
- " * @return {!Object<string, !proto.google.protobuf.JavaScriptValue>} a plain\n"
- " * JavaScript object representing this Struct.\n"
- " */\n"
- "proto.google.protobuf.Struct.prototype.toJavaScript = function() {\n"
- " var ret = {};\n"
- "\n"
- " this.getFieldsMap().forEach(function(value, key) {\n"
- " ret[key] = value.toJavaScript();\n"
- " });\n"
- "\n"
- " return ret;\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Constructs a Struct protobuf from this plain JavaScript object.\n"
- " * @param {!Object} obj a plain JavaScript object\n"
- " * @return {proto.google.protobuf.Struct} a new Struct object\n"
- " */\n"
- "proto.google.protobuf.Struct.fromJavaScript = function(obj) {\n"
- " var ret = new proto.google.protobuf.Struct();\n"
- " var map = ret.getFieldsMap();\n"
- "\n"
- " for (var property in obj) {\n"
- " var val = obj[property];\n"
- " map.set(property, proto.google.protobuf.Value.fromJavaScript(val));\n"
- " }\n"
- "\n"
- " return ret;\n"
- "};\n"
-},
-{"timestamp.js",
- "// Protocol Buffers - Google's data interchange format\n"
- "// Copyright 2008 Google Inc. All rights reserved.\n"
- "// https://developers.google.com/protocol-buffers/\n"
- "//\n"
- "// Redistribution and use in source and binary forms, with or without\n"
- "// modification, are permitted provided that the following conditions are\n"
- "// met:\n"
- "//\n"
- "// * Redistributions of source code must retain the above copyright\n"
- "// notice, this list of conditions and the following disclaimer.\n"
- "// * Redistributions in binary form must reproduce the above\n"
- "// copyright notice, this list of conditions and the following disclaimer\n"
- "// in the documentation and/or other materials provided with the\n"
- "// distribution.\n"
- "// * Neither the name of Google Inc. nor the names of its\n"
- "// contributors may be used to endorse or promote products derived from\n"
- "// this software without specific prior written permission.\n"
- "//\n"
- "// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n"
- "// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n"
- "// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n"
- "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n"
- "// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n"
- "// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n"
- "// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n"
- "// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n"
- "// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n"
- "// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n"
- "// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
- "\n"
- "/* This code will be inserted into generated code for\n"
- " * google/protobuf/timestamp.proto. */\n"
- "\n"
- "/**\n"
- " * Returns a JavaScript 'Date' object corresponding to this Timestamp.\n"
- " * @return {!Date}\n"
- " */\n"
- "proto.google.protobuf.Timestamp.prototype.toDate = function() {\n"
- " var seconds = this.getSeconds();\n"
- " var nanos = this.getNanos();\n"
- "\n"
- " return new Date((seconds * 1000) + (nanos / 1000000));\n"
- "};\n"
- "\n"
- "\n"
- "/**\n"
- " * Sets the value of this Timestamp object to be the given Date.\n"
- " * @param {!Date} value The value to set.\n"
- " */\n"
- "proto.google.protobuf.Timestamp.prototype.fromDate = function(value) {\n"
- " var millis = value.getTime();\n"
- " this.setSeconds(Math.floor(value.getTime() / 1000));\n"
- " this.setNanos(value.getMilliseconds() * 1000000);\n"
- "};\n"
-},
- {NULL, NULL} // Terminate the list.
-};
diff --git a/tools/distrib/python/grpcio_tools/setup.py b/tools/distrib/python/grpcio_tools/setup.py
index c6bcee497f..c13dfe9ade 100644
--- a/tools/distrib/python/grpcio_tools/setup.py
+++ b/tools/distrib/python/grpcio_tools/setup.py
@@ -160,29 +160,7 @@ def extension_modules():
plugin_sources += [
os.path.join('grpc_tools', 'main.cc'),
os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')
- ]
-
- #HACK: Substitute the embed.cc, which is a JS to C++
- # preprocessor with the generated code.
- # The generated code should not be material
- # to the parts of protoc we use (it affects
- # the JavaScript code generator, supposedly),
- # but we need to be cautious about it.
- cc_files_clone = list(CC_FILES)
- embed_cc_file = os.path.normpath('google/protobuf/compiler/js/embed.cc')
- well_known_types_file = os.path.normpath(
- 'google/protobuf/compiler/js/well_known_types_embed.cc')
- if embed_cc_file in cc_files_clone:
- cc_files_clone.remove(embed_cc_file)
- if well_known_types_file in cc_files_clone:
- cc_files_clone.remove(well_known_types_file)
- plugin_sources += [
- os.path.join('grpc_tools',
- 'protobuf_generated_well_known_types_embed.cc')
- ]
- plugin_sources += [
- os.path.join(CC_INCLUDE, cc_file) for cc_file in cc_files_clone
- ]
+ ] + [os.path.join(CC_INCLUDE, cc_file) for cc_file in CC_FILES]
plugin_ext = extension.Extension(
name='grpc_tools._protoc_compiler',
diff --git a/tools/dockerfile/OWNERS b/tools/dockerfile/OWNERS
index db4ab546a6..5f0ad58d61 100644
--- a/tools/dockerfile/OWNERS
+++ b/tools/dockerfile/OWNERS
@@ -7,5 +7,5 @@ set noparent
# for kokoro to be able to access the pre-built images.
@jtattermusch
-@matt-kwong
+@mehrdada
@nicolasnoble
diff --git a/tools/dockerfile/distribtest/csharp_centos7_x64/Dockerfile b/tools/dockerfile/distribtest/csharp_centos7_x64/Dockerfile
index 3e1faafdc0..e32b3cb5e2 100644
--- a/tools/dockerfile/distribtest/csharp_centos7_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/csharp_centos7_x64/Dockerfile
@@ -22,3 +22,6 @@ RUN yum install -y mono-devel
RUN yum install -y nuget
RUN yum install -y unzip
+
+# Make sure the mono certificate store is up-to-date to prevent issues with nuget restore
+RUN curl https://curl.haxx.se/ca/cacert.pem > ~/cacert.pem && cert-sync ~/cacert.pem && rm -f ~/cacert.pem
diff --git a/tools/dockerfile/distribtest/csharp_jessie_x64/Dockerfile b/tools/dockerfile/distribtest/csharp_jessie_x64/Dockerfile
index 03fb7a5343..e95d781dfd 100644
--- a/tools/dockerfile/distribtest/csharp_jessie_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/csharp_jessie_x64/Dockerfile
@@ -25,3 +25,7 @@ RUN apt-get update && apt-get install -y \
&& apt-get clean
RUN apt-get update && apt-get install -y unzip && apt-get clean
+
+# Make sure the mono certificate store is up-to-date to prevent issues with nuget restore
+RUN apt-get update && apt-get install -y curl && apt-get clean
+RUN curl https://curl.haxx.se/ca/cacert.pem > ~/cacert.pem && cert-sync ~/cacert.pem && rm -f ~/cacert.pem
diff --git a/tools/dockerfile/distribtest/csharp_jessie_x86/Dockerfile b/tools/dockerfile/distribtest/csharp_jessie_x86/Dockerfile
index f2fa61a691..aec936a5b8 100644
--- a/tools/dockerfile/distribtest/csharp_jessie_x86/Dockerfile
+++ b/tools/dockerfile/distribtest/csharp_jessie_x86/Dockerfile
@@ -25,3 +25,7 @@ RUN apt-get update && apt-get install -y \
&& apt-get clean
RUN apt-get update && apt-get install -y unzip && apt-get clean
+
+# Make sure the mono certificate store is up-to-date to prevent issues with nuget restore
+RUN apt-get update && apt-get install -y curl && apt-get clean
+RUN curl https://curl.haxx.se/ca/cacert.pem > ~/cacert.pem && cert-sync ~/cacert.pem && rm -f ~/cacert.pem
diff --git a/tools/dockerfile/distribtest/csharp_ubuntu1404_x64/Dockerfile b/tools/dockerfile/distribtest/csharp_ubuntu1404_x64/Dockerfile
index 3edc31e170..61ca1a08a4 100644
--- a/tools/dockerfile/distribtest/csharp_ubuntu1404_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/csharp_ubuntu1404_x64/Dockerfile
@@ -38,3 +38,7 @@ RUN mkdir warmup \
&& dotnet new \
&& cd .. \
&& rm -rf warmup
+
+# Make sure the mono certificate store is up-to-date to prevent issues with nuget restore
+RUN apt-get update && apt-get install -y curl && apt-get clean
+RUN curl https://curl.haxx.se/ca/cacert.pem > ~/cacert.pem && cert-sync ~/cacert.pem && rm -f ~/cacert.pem
diff --git a/tools/dockerfile/distribtest/csharp_ubuntu1604_x64/Dockerfile b/tools/dockerfile/distribtest/csharp_ubuntu1604_x64/Dockerfile
index 1a58f9784b..93ee75cfcd 100644
--- a/tools/dockerfile/distribtest/csharp_ubuntu1604_x64/Dockerfile
+++ b/tools/dockerfile/distribtest/csharp_ubuntu1604_x64/Dockerfile
@@ -25,3 +25,7 @@ RUN apt-get update && apt-get install -y \
&& apt-get clean
RUN apt-get update && apt-get install -y unzip && apt-get clean
+
+# Make sure the mono certificate store is up-to-date to prevent issues with nuget restore
+RUN apt-get update && apt-get install -y curl && apt-get clean
+RUN curl https://curl.haxx.se/ca/cacert.pem > ~/cacert.pem && cert-sync ~/cacert.pem && rm -f ~/cacert.pem
diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++
index 2f06bda016..688c271fea 100644
--- a/tools/doxygen/Doxyfile.c++
+++ b/tools/doxygen/Doxyfile.c++
@@ -791,6 +791,7 @@ doc/server-reflection.md \
doc/server_reflection_tutorial.md \
doc/server_side_auth.md \
doc/service_config.md \
+doc/ssl-performance.md \
doc/status_ordering.md \
doc/statuscodes.md \
doc/unit_testing.md \
diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal
index a46ebe6197..43ebf8cad9 100644
--- a/tools/doxygen/Doxyfile.c++.internal
+++ b/tools/doxygen/Doxyfile.c++.internal
@@ -791,6 +791,7 @@ doc/server-reflection.md \
doc/server_reflection_tutorial.md \
doc/server_side_auth.md \
doc/service_config.md \
+doc/ssl-performance.md \
doc/status_ordering.md \
doc/statuscodes.md \
doc/unit_testing.md \
@@ -1057,6 +1058,7 @@ src/core/lib/gprpp/fork.h \
src/core/lib/gprpp/inlined_vector.h \
src/core/lib/gprpp/manual_constructor.h \
src/core/lib/gprpp/memory.h \
+src/core/lib/gprpp/mutex_lock.h \
src/core/lib/gprpp/orphanable.h \
src/core/lib/gprpp/ref_counted.h \
src/core/lib/gprpp/ref_counted_ptr.h \
diff --git a/tools/doxygen/Doxyfile.core b/tools/doxygen/Doxyfile.core
index 4899eee3ea..aa75bc6828 100644
--- a/tools/doxygen/Doxyfile.core
+++ b/tools/doxygen/Doxyfile.core
@@ -793,6 +793,7 @@ doc/server-reflection.md \
doc/server_reflection_tutorial.md \
doc/server_side_auth.md \
doc/service_config.md \
+doc/ssl-performance.md \
doc/status_ordering.md \
doc/statuscodes.md \
doc/unit_testing.md \
diff --git a/tools/doxygen/Doxyfile.core.internal b/tools/doxygen/Doxyfile.core.internal
index a2b7e4fce1..c1706fd070 100644
--- a/tools/doxygen/Doxyfile.core.internal
+++ b/tools/doxygen/Doxyfile.core.internal
@@ -793,6 +793,7 @@ doc/server-reflection.md \
doc/server_reflection_tutorial.md \
doc/server_side_auth.md \
doc/service_config.md \
+doc/ssl-performance.md \
doc/status_ordering.md \
doc/statuscodes.md \
doc/unit_testing.md \
@@ -867,6 +868,7 @@ include/grpc/support/time.h \
include/grpc/support/workaround_list.h \
src/core/README.md \
src/core/ext/README.md \
+src/core/ext/filters/census/grpc_context.cc \
src/core/ext/filters/client_channel/README.md \
src/core/ext/filters/client_channel/backup_poller.cc \
src/core/ext/filters/client_channel/backup_poller.h \
@@ -1141,6 +1143,7 @@ src/core/lib/gprpp/fork.h \
src/core/lib/gprpp/inlined_vector.h \
src/core/lib/gprpp/manual_constructor.h \
src/core/lib/gprpp/memory.h \
+src/core/lib/gprpp/mutex_lock.h \
src/core/lib/gprpp/orphanable.h \
src/core/lib/gprpp/ref_counted.h \
src/core/lib/gprpp/ref_counted_ptr.h \
@@ -1355,6 +1358,10 @@ src/core/lib/security/credentials/ssl/ssl_credentials.cc \
src/core/lib/security/credentials/ssl/ssl_credentials.h \
src/core/lib/security/security_connector/alts_security_connector.cc \
src/core/lib/security/security_connector/alts_security_connector.h \
+src/core/lib/security/security_connector/load_system_roots.h \
+src/core/lib/security/security_connector/load_system_roots_fallback.cc \
+src/core/lib/security/security_connector/load_system_roots_linux.cc \
+src/core/lib/security/security_connector/load_system_roots_linux.h \
src/core/lib/security/security_connector/local_security_connector.cc \
src/core/lib/security/security_connector/local_security_connector.h \
src/core/lib/security/security_connector/security_connector.cc \
@@ -1517,7 +1524,6 @@ src/core/tsi/transport_security.h \
src/core/tsi/transport_security_grpc.cc \
src/core/tsi/transport_security_grpc.h \
src/core/tsi/transport_security_interface.h \
-src/cpp/ext/filters/census/grpc_context.cc \
third_party/nanopb/pb.h \
third_party/nanopb/pb_common.c \
third_party/nanopb/pb_common.h \
diff --git a/tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc b/tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc
index b0feeef363..43bc9609c7 100644
--- a/tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc
+++ b/tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc
@@ -17,17 +17,6 @@
# builds. This rc script must be used in the root directory of gRPC
# and is expected to be used before prepare_build_macos_rc
-export CONFIG=opt
-
-# Move gRPC repo to directory that Docker for Mac has drive access to
-mkdir /Users/kbuilder/workspace
-cp -R ./ /Users/kbuilder/workspace/grpc
-cd /Users/kbuilder/workspace/grpc
-
-# Needed for identifying Docker image sha1
-brew update
-brew install md5sha1sum
-
# Set up gRPC-Go and gRPC-Java to test
git clone --recursive https://github.com/grpc/grpc-go ./../grpc-go
git clone --recursive https://github.com/grpc/grpc-java ./../grpc-java
diff --git a/tools/internal_ci/linux/grpc_full_performance_master.sh b/tools/internal_ci/linux/grpc_full_performance_master.sh
index 4eddc18731..24ee71edd1 100755
--- a/tools/internal_ci/linux/grpc_full_performance_master.sh
+++ b/tools/internal_ci/linux/grpc_full_performance_master.sh
@@ -21,7 +21,7 @@ source tools/internal_ci/helper_scripts/prepare_build_linux_perf_multilang_rc
# run 8core client vs 8core server
tools/run_tests/run_performance_tests.py \
- -l c++ csharp ruby java python go php7 php7_protobuf_c \
+ -l c++ csharp ruby java python go php7 php7_protobuf_c node node_purejs \
--netperf \
--category scalable \
--remote_worker_host grpc-kokoro-performance-server-8core grpc-kokoro-performance-client-8core grpc-kokoro-performance-client2-8core \
diff --git a/tools/internal_ci/macos/grpc_interop_toprod.sh b/tools/internal_ci/macos/grpc_interop_toprod.sh
index 5ddabb9bf9..e748a62e76 100755
--- a/tools/internal_ci/macos/grpc_interop_toprod.sh
+++ b/tools/internal_ci/macos/grpc_interop_toprod.sh
@@ -18,8 +18,8 @@ set -ex
# change to grpc repo root
cd $(dirname $0)/../../..
-source tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc
source tools/internal_ci/helper_scripts/prepare_build_macos_rc
+source tools/internal_ci/helper_scripts/prepare_build_macos_interop_rc
# using run_interop_tests.py without --use_docker, so we need to build first
tools/run_tests/run_tests.py -l c++ -c opt --build_only
diff --git a/tools/interop_matrix/client_matrix.py b/tools/interop_matrix/client_matrix.py
index dff33c88c0..bb9222d953 100644
--- a/tools/interop_matrix/client_matrix.py
+++ b/tools/interop_matrix/client_matrix.py
@@ -97,7 +97,7 @@ LANG_RELEASE_MATRIX = {
'v1.13.0': None
},
{
- 'v1.14.0': None
+ 'v1.14.1': None
},
],
'go': [
@@ -226,7 +226,7 @@ LANG_RELEASE_MATRIX = {
'v1.13.0': None
},
{
- 'v1.14.0': None
+ 'v1.14.1': None
},
],
'node': [
@@ -314,7 +314,7 @@ LANG_RELEASE_MATRIX = {
'v1.13.0': None
},
{
- 'v1.14.0': None
+ 'v1.14.1': None
},
],
'php': [
@@ -358,7 +358,7 @@ LANG_RELEASE_MATRIX = {
'v1.13.0': None
},
{
- 'v1.14.0': None
+ 'v1.14.1': None
},
],
'csharp': [
@@ -407,7 +407,7 @@ LANG_RELEASE_MATRIX = {
'v1.13.0': None
},
{
- 'v1.14.0': None
+ 'v1.14.1': None
},
],
}
diff --git a/tools/package_hosting/build-201807.xsl b/tools/package_hosting/build-201807.xsl
index 69a190446f..f7571d178e 100644
--- a/tools/package_hosting/build-201807.xsl
+++ b/tools/package_hosting/build-201807.xsl
@@ -1,75 +1,74 @@
-<?xml version="1.0"?>
+<?xml version="1.0"?>
<xsl:stylesheet version="1.0" xmlns:xsl="http://www.w3.org/1999/XSL/Transform">
<xsl:template match="//build">
- <html>
- <head>
- <title>Artifacts for gRPC Build <xsl:value-of select="@id"/> </title>
- <link rel="stylesheet" type="text/css" href="/web-assets/style.css" />
- <link rel="apple-touch-icon" href="/web-assets/favicons/apple-touch-icon.png" sizes="180x180" />
- <link rel="icon" type="image/png" href="/web-assets/favicons/android-chrome-192x192.png" sizes="192x192" />
- <link rel="icon" type="image/png" href="/web-assets/favicons/favicon-32x32.png" sizes="32x32" />
- <link rel="icon" type="image/png" href="/web-assets/favicons/favicon-16x16.png" sizes="16x16" />
- <link rel="manifest" href="/web-assets/favicons/manifest.json" />
- <link rel="mask-icon" href="/web-assets/favicons/safari-pinned-tab.svg" color="#2DA6B0" />
- <meta name="msapplication-TileColor" content="#ffffff" />
- <meta name="msapplication-TileImage" content="/web-assets/favicons/mstile-150x150.png" />
+<html>
+ <head>
+ <title>Artifacts for gRPC Build <xsl:value-of select="@id"/></title>
+ <link rel="stylesheet" type="text/css" href="/web-assets/style.css" />
+ <link rel="apple-touch-icon" href="/web-assets/favicons/apple-touch-icon.png" sizes="180x180" />
+ <link rel="icon" type="image/png" href="/web-assets/favicons/android-chrome-192x192.png" sizes="192x192" />
+ <link rel="icon" type="image/png" href="/web-assets/favicons/favicon-32x32.png" sizes="32x32" />
+ <link rel="icon" type="image/png" href="/web-assets/favicons/favicon-16x16.png" sizes="16x16" />
+ <link rel="manifest" href="/web-assets/favicons/manifest.json" />
+ <link rel="mask-icon" href="/web-assets/favicons/safari-pinned-tab.svg" color="#2DA6B0" />
+ <meta name="msapplication-TileColor" content="#ffffff" />
+ <meta name="msapplication-TileImage" content="/web-assets/favicons/mstile-150x150.png" />
+ <meta name="og:title" content="gRPC Package Build"/>
+ <meta name="og:image" content="https://grpc.io/img/grpc_square_reverse_4x.png"/>
+ <meta name="og:description" content="gRPC Package Build"/>
+ </head>
+ <body bgcolor="#ffffff">
+ <div id="topbar">
+ <span class="title">Artifacts for gRPC Build <xsl:value-of select="@id"/></span>
+ </div>
+ <div id="main">
+ <div id="metadata">
+ <span class="fieldname">Build: </span> <a href='#'><xsl:value-of select="@id"/></a>
+ [<a href="https://source.cloud.google.com/results/invocations/{@id}">invocation</a>]<br />
+ <span class="fieldname">Timestamp: </span>
+ <xsl:value-of select="@timestamp"/> <br />
+ <span class="fieldname">Branch: </span>
+ <a href="https://github.com/grpc/grpc/tree/{./metadata/branch[text()]}">
+ <xsl:value-of select="./metadata/branch[text()]" />
+ </a><br />
+ <span class="fieldname">Commit: </span>
+ <a href="https://github.com/grpc/grpc/tree/{./metadata/commit[text()]}">
+ <xsl:value-of select="./metadata/commit[text()]" /><br /></a>
+ </div>
+ <xsl:apply-templates select="artifacts" />
+ <br />
+ <br />
- <meta name="og:title" content="gRPC Package Build"/>
- <meta name="og:image" content="https://grpc.io/img/grpc_square_reverse_4x.png"/>
- <meta name="og:description" content="gRPC Package Build"/>
- </head>
- <body bgcolor="#ffffff">
- <div id="topbar">
- <span class="title">Artifacts for gRPC Build <xsl:value-of select="@id"/></span>
- </div>
- <div id="main">
- <div id="metadata">
- <span class="fieldname">Build: </span> <a href='#'><xsl:value-of select="@id"/></a>
- [<a href="https://source.cloud.google.com/results/invocations/{@id}">invocation</a>]<br />
- <span class="fieldname">Timestamp: </span>
- <xsl:value-of select="@timestamp"/> <br />
- <span class="fieldname">Branch: </span>
- <a href="https://github.com/grpc/grpc/tree/{./metadata/branch[text()]}">
- <xsl:value-of select="./metadata/branch[text()]" />
- </a><br />
- <span class="fieldname">Commit: </span>
- <a href="https://github.com/grpc/grpc/tree/{./metadata/commit[text()]}">
- <xsl:value-of select="./metadata/commit[text()]" /><br /></a>
- </div>
- <xsl:apply-templates select="artifacts" />
- <br />
- <br />
-
- <p class="description"><a href="https://grpc.io">gRPC</a> is a <a href="https://www.cncf.io" class="external">Cloud Native Computing Foundation</a> project. <a href="https://policies.google.com/privacy" class="external">Privacy Policy</a>.</p>
- <p class="description">
- Copyright &#169;&#160;<xsl:value-of select="substring(@timestamp, 1, 4)" />&#160;<a href="https://github.com/grpc/grpc/blob/{./metadata/commit[text()]}/AUTHORS">The gRPC Authors</a></p>
- <br />
- <br />
- </div>
- </body>
- </html>
+ <p class="description"><a href="https://grpc.io">gRPC</a> is a <a href="https://www.cncf.io" class="external">Cloud Native Computing Foundation</a> project. <a href="https://policies.google.com/privacy" class="external">Privacy Policy</a>.</p>
+ <p class="description">
+ Copyright &#169;&#160;<xsl:value-of select="substring(@timestamp, 1, 4)" />&#160;<a href="https://github.com/grpc/grpc/blob/{./metadata/commit[text()]}/AUTHORS">The gRPC Authors</a></p>
+ <br />
+ <br />
+ </div>
+ </body>
+</html>
</xsl:template>
<xsl:template match="artifacts">
<h2> gRPC <code>protoc</code> Plugins </h2>
<table>
-<xsl:apply-templates select="artifact[@type='protoc']">
- <xsl:sort select="artifact/@name" />
+ <xsl:apply-templates select="artifact[@type='protoc']">
+ <xsl:sort select="@name" />
</xsl:apply-templates>
</table>
<h2> C# </h2>
<table>
-<xsl:apply-templates select="artifact[@type='csharp']">
- <xsl:sort select="artifact/@name" />
+ <xsl:apply-templates select="artifact[@type='csharp']">
+ <xsl:sort select="@name" />
</xsl:apply-templates>
</table>
<h2> PHP </h2>
<table>
-<xsl:apply-templates select="artifact[@type='php']">
- <xsl:sort select="artifact/@name" />
+ <xsl:apply-templates select="artifact[@type='php']">
+ <xsl:sort select="@name" />
</xsl:apply-templates>
</table>
@@ -87,24 +86,23 @@ document.write("<p><code>" +
</script>
<table>
<xsl:apply-templates select="artifact[@type='python']">
- <xsl:sort select="artifact/@name" />
+ <xsl:sort select="@name" />
</xsl:apply-templates>
</table>
<h2> Ruby </h2>
<table>
-<xsl:apply-templates select="artifact[@type='ruby']">
- <xsl:sort select="artifact/@name" />
+ <xsl:apply-templates select="artifact[@type='ruby']">
+ <xsl:sort select="@name" />
</xsl:apply-templates>
</table>
-
</xsl:template>
<xsl:template match="artifact">
<tr>
-<td class="name"> <a href="{@path}"><xsl:value-of select="@name" /></a> </td>
-<td class="hash"> <xsl:value-of select="@sha256"/> </td>
+ <td class="name"><a href="{@path}"><xsl:value-of select="@name" /></a></td>
+ <td class="hash"><xsl:value-of select="@sha256"/></td>
</tr>
</xsl:template>
diff --git a/tools/run_tests/artifacts/artifact_targets.py b/tools/run_tests/artifacts/artifact_targets.py
index a37098d9bf..bdeb258e1f 100644
--- a/tools/run_tests/artifacts/artifact_targets.py
+++ b/tools/run_tests/artifacts/artifact_targets.py
@@ -290,15 +290,9 @@ class PHPArtifact:
return []
def build_jobspec(self):
- if self.platform == 'linux':
- return create_docker_jobspec(
- self.name, 'tools/dockerfile/grpc_artifact_linux_{}'.format(
- self.arch),
- 'tools/run_tests/artifacts/build_artifact_php.sh')
- else:
- return create_jobspec(
- self.name, ['tools/run_tests/artifacts/build_artifact_php.sh'],
- use_workspace=True)
+ return create_docker_jobspec(
+ self.name, 'tools/dockerfile/grpc_artifact_linux_{}'.format(
+ self.arch), 'tools/run_tests/artifacts/build_artifact_php.sh')
class ProtocArtifact:
@@ -400,6 +394,5 @@ def targets():
PythonArtifact('windows', 'x64', 'Python37'),
RubyArtifact('linux', 'x64'),
RubyArtifact('macos', 'x64'),
- PHPArtifact('linux', 'x64'),
- PHPArtifact('macos', 'x64')
+ PHPArtifact('linux', 'x64')
])
diff --git a/tools/run_tests/dockerize/build_and_run_docker.sh b/tools/run_tests/dockerize/build_and_run_docker.sh
index 4ef74085f9..3f01fbc7b7 100755
--- a/tools/run_tests/dockerize/build_and_run_docker.sh
+++ b/tools/run_tests/dockerize/build_and_run_docker.sh
@@ -73,6 +73,10 @@ docker run \
# Copy output artifacts
if [ "$OUTPUT_DIR" != "" ]
then
+ # Create the artifact directory in advance to avoid a race in "docker cp" if tasks
+ # that were running in parallel finish at the same time.
+ # see https://github.com/grpc/grpc/issues/16155
+ mkdir -p "$git_root/$OUTPUT_DIR"
docker cp "$CONTAINER_NAME:/var/local/git/grpc/$OUTPUT_DIR" "$git_root" || FAILED="true"
fi
diff --git a/tools/run_tests/generated/sources_and_headers.json b/tools/run_tests/generated/sources_and_headers.json
index 2e1acc61f4..3cb3fe20da 100644
--- a/tools/run_tests/generated/sources_and_headers.json
+++ b/tools/run_tests/generated/sources_and_headers.json
@@ -3709,6 +3709,23 @@
},
{
"deps": [
+ "gpr",
+ "gpr_test_util",
+ "grpc",
+ "grpc_test_util"
+ ],
+ "headers": [],
+ "is_filegroup": false,
+ "language": "c++",
+ "name": "grpc_linux_system_roots_test",
+ "src": [
+ "test/core/security/linux_system_roots_test.cc"
+ ],
+ "third_party": false,
+ "type": "target"
+ },
+ {
+ "deps": [
"grpc_plugin_support"
],
"headers": [],
@@ -9171,7 +9188,7 @@
"name": "census",
"src": [
"include/grpc/census.h",
- "src/cpp/ext/filters/census/grpc_context.cc"
+ "src/core/ext/filters/census/grpc_context.cc"
],
"third_party": false,
"type": "filegroup"
@@ -9290,6 +9307,7 @@
"src/core/lib/gprpp/fork.h",
"src/core/lib/gprpp/manual_constructor.h",
"src/core/lib/gprpp/memory.h",
+ "src/core/lib/gprpp/mutex_lock.h",
"src/core/lib/gprpp/thd.h",
"src/core/lib/profiling/timers.h"
],
@@ -9337,6 +9355,7 @@
"src/core/lib/gprpp/fork.h",
"src/core/lib/gprpp/manual_constructor.h",
"src/core/lib/gprpp/memory.h",
+ "src/core/lib/gprpp/mutex_lock.h",
"src/core/lib/gprpp/thd.h",
"src/core/lib/profiling/timers.h"
],
@@ -9905,6 +9924,7 @@
"src/core/lib/iomgr/endpoint_cfstream.h",
"src/core/lib/iomgr/error_cfstream.cc",
"src/core/lib/iomgr/error_cfstream.h",
+ "src/core/lib/iomgr/iomgr_posix_cfstream.cc",
"src/core/lib/iomgr/tcp_client_cfstream.cc"
],
"third_party": false,
@@ -10360,6 +10380,8 @@
"src/core/lib/security/credentials/plugin/plugin_credentials.h",
"src/core/lib/security/credentials/ssl/ssl_credentials.h",
"src/core/lib/security/security_connector/alts_security_connector.h",
+ "src/core/lib/security/security_connector/load_system_roots.h",
+ "src/core/lib/security/security_connector/load_system_roots_linux.h",
"src/core/lib/security/security_connector/local_security_connector.h",
"src/core/lib/security/security_connector/security_connector.h",
"src/core/lib/security/transport/auth_filters.h",
@@ -10408,6 +10430,10 @@
"src/core/lib/security/credentials/ssl/ssl_credentials.h",
"src/core/lib/security/security_connector/alts_security_connector.cc",
"src/core/lib/security/security_connector/alts_security_connector.h",
+ "src/core/lib/security/security_connector/load_system_roots.h",
+ "src/core/lib/security/security_connector/load_system_roots_fallback.cc",
+ "src/core/lib/security/security_connector/load_system_roots_linux.cc",
+ "src/core/lib/security/security_connector/load_system_roots_linux.h",
"src/core/lib/security/security_connector/local_security_connector.cc",
"src/core/lib/security/security_connector/local_security_connector.h",
"src/core/lib/security/security_connector/security_connector.cc",
diff --git a/tools/run_tests/generated/tests.json b/tools/run_tests/generated/tests.json
index cf3b54e044..a51be28ad5 100644
--- a/tools/run_tests/generated/tests.json
+++ b/tools/run_tests/generated/tests.json
@@ -3483,7 +3483,7 @@
"mac",
"posix"
],
- "uses_polling": true
+ "uses_polling": false
},
{
"args": [],
@@ -4348,6 +4348,30 @@
"flaky": false,
"gtest": true,
"language": "c++",
+ "name": "grpc_linux_system_roots_test",
+ "platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "uses_polling": true
+ },
+ {
+ "args": [],
+ "benchmark": false,
+ "ci_platforms": [
+ "linux",
+ "mac",
+ "posix",
+ "windows"
+ ],
+ "cpu_cost": 1.0,
+ "exclude_configs": [],
+ "exclude_iomgrs": [],
+ "flaky": false,
+ "gtest": true,
+ "language": "c++",
"name": "grpc_tool_test",
"platforms": [
"linux",
diff --git a/tools/run_tests/performance/OWNERS b/tools/run_tests/performance/OWNERS
index fc1d6eec9c..9cf8c13111 100644
--- a/tools/run_tests/performance/OWNERS
+++ b/tools/run_tests/performance/OWNERS
@@ -5,5 +5,5 @@ set noparent
# to update the BigQuery schema
@ncteisen
-@matt-kwong
+@apolcyn
@jtattermusch
diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py
index 6702587557..b442f0cf83 100755
--- a/tools/run_tests/performance/bq_upload_result.py
+++ b/tools/run_tests/performance/bq_upload_result.py
@@ -128,14 +128,16 @@ def _flatten_result_inplace(scenario_result):
def _populate_metadata_inplace(scenario_result):
"""Populates metadata based on environment variables set by Jenkins."""
- # NOTE: Grabbing the Jenkins environment variables will only work if the
- # driver is running locally on the same machine where Jenkins has started
+ # NOTE: Grabbing the Kokoro environment variables will only work if the
+ # driver is running locally on the same machine where Kokoro has started
# the job. For our setup, this is currently the case, so just assume that.
- build_number = os.getenv('BUILD_NUMBER')
- build_url = os.getenv('BUILD_URL')
- job_name = os.getenv('JOB_NAME')
- git_commit = os.getenv('GIT_COMMIT')
+ build_number = os.getenv('KOKORO_BUILD_NUMBER')
+ build_url = 'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
+ 'KOKORO_BUILD_ID')
+ job_name = os.getenv('KOKORO_JOB_NAME')
+ git_commit = os.getenv('KOKORO_GIT_COMMIT')
# actual commit is the actual head of PR that is getting tested
+ # TODO(jtattermusch): unclear how to obtain on Kokoro
git_actual_commit = os.getenv('ghprbActualCommit')
utc_timestamp = str(calendar.timegm(time.gmtime()))
diff --git a/tools/run_tests/performance/build_performance.sh b/tools/run_tests/performance/build_performance.sh
index 22e0ca9fa0..35d9e90598 100755
--- a/tools/run_tests/performance/build_performance.sh
+++ b/tools/run_tests/performance/build_performance.sh
@@ -55,6 +55,9 @@ do
"csharp")
python tools/run_tests/run_tests.py -l "$language" -c "$CONFIG" --build_only -j 8 --compiler coreclr
;;
+ "node"|"node_purejs")
+ tools/run_tests/performance/build_performance_node.sh
+ ;;
*)
python tools/run_tests/run_tests.py -l "$language" -c "$CONFIG" --build_only -j 8
;;
diff --git a/tools/run_tests/performance/build_performance_node.sh b/tools/run_tests/performance/build_performance_node.sh
new file mode 100755
index 0000000000..12e0872264
--- /dev/null
+++ b/tools/run_tests/performance/build_performance_node.sh
@@ -0,0 +1,28 @@
+#!/bin/bash
+# Copyright 2018 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+set +ex
+
+. "$HOME/.nvm/nvm.sh"
+
+nvm install 10
+
+set -ex
+
+cd "$(dirname "$0")/../../../../grpc-node"
+
+npm install
+
+./node_modules/.bin/gulp setup
diff --git a/tools/run_tests/performance/run_worker_node.sh b/tools/run_tests/performance/run_worker_node.sh
new file mode 100755
index 0000000000..3e5dd18edb
--- /dev/null
+++ b/tools/run_tests/performance/run_worker_node.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+# Copyright 2018 gRPC authors.
+#
+# Licensed under the Apache License, Version 2.0 (the "License");
+# you may not use this file except in compliance with the License.
+# You may obtain a copy of the License at
+#
+# http://www.apache.org/licenses/LICENSE-2.0
+#
+# Unless required by applicable law or agreed to in writing, software
+# distributed under the License is distributed on an "AS IS" BASIS,
+# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
+# See the License for the specific language governing permissions and
+# limitations under the License.
+
+. "$HOME/.nvm/nvm.sh"
+
+nvm use 10
+
+set -ex
+
+fixture=$1
+
+shift
+
+# Enter repo root
+cd "$(dirname "$0")/../../.."
+
+# Enter the grpc-node repo root (expected to be next to grpc repo root)
+cd ../grpc-node
+
+node -r "./test/fixtures/$fixture" test/performance/worker.js "$@"
diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py
index f05753154e..2e78bd07fb 100644
--- a/tools/run_tests/performance/scenario_config.py
+++ b/tools/run_tests/performance/scenario_config.py
@@ -1151,6 +1151,106 @@ class GoLanguage:
return 'go'
+class NodeLanguage:
+
+ def __init__(self, node_purejs=False):
+ pass
+ self.node_purejs = node_purejs
+ self.safename = str(self)
+
+ def worker_cmdline(self):
+ fixture = 'native_js' if self.node_purejs else 'native_native'
+ return [
+ 'tools/run_tests/performance/run_worker_node.sh', fixture,
+ '--benchmark_impl=grpc'
+ ]
+
+ def worker_port_offset(self):
+ if self.node_purejs:
+ return 1100
+ return 1000
+
+ def scenarios(self):
+ node_implementation = 'node_purejs' if self.node_purejs else 'node'
+ for secure in [True, False]:
+ secstr = 'secure' if secure else 'insecure'
+ smoketest_categories = ([SMOKETEST] if secure else []) + [SCALABLE]
+
+ yield _ping_pong_scenario(
+ '%s_to_node_generic_async_streaming_ping_pong_%s' %
+ (node_implementation, secstr),
+ rpc_type='STREAMING',
+ client_type='ASYNC_CLIENT',
+ server_type='ASYNC_GENERIC_SERVER',
+ server_language='node',
+ use_generic_payload=True,
+ async_server_threads=1,
+ secure=secure,
+ categories=smoketest_categories)
+
+ yield _ping_pong_scenario(
+ '%s_to_node_protobuf_async_streaming_ping_pong_%s' %
+ (node_implementation, secstr),
+ rpc_type='STREAMING',
+ client_type='ASYNC_CLIENT',
+ server_type='ASYNC_SERVER',
+ server_language='node',
+ async_server_threads=1,
+ secure=secure)
+
+ yield _ping_pong_scenario(
+ '%s_to_node_protobuf_async_unary_ping_pong_%s' %
+ (node_implementation, secstr),
+ rpc_type='UNARY',
+ client_type='ASYNC_CLIENT',
+ server_type='ASYNC_SERVER',
+ server_language='node',
+ async_server_threads=1,
+ secure=secure,
+ categories=smoketest_categories)
+
+ yield _ping_pong_scenario(
+ '%s_to_node_protobuf_async_unary_qps_unconstrained_%s' %
+ (node_implementation, secstr),
+ rpc_type='UNARY',
+ client_type='ASYNC_CLIENT',
+ server_type='ASYNC_SERVER',
+ server_language='node',
+ unconstrained_client='async',
+ secure=secure,
+ categories=smoketest_categories + [SCALABLE])
+
+ yield _ping_pong_scenario(
+ '%s_to_node_protobuf_async_streaming_qps_unconstrained_%s' %
+ (node_implementation, secstr),
+ rpc_type='STREAMING',
+ client_type='ASYNC_CLIENT',
+ server_type='ASYNC_SERVER',
+ server_language='node',
+ unconstrained_client='async',
+ secure=secure,
+ categories=[SCALABLE])
+
+ yield _ping_pong_scenario(
+ '%s_to_node_generic_async_streaming_qps_unconstrained_%s' %
+ (node_implementation, secstr),
+ rpc_type='STREAMING',
+ client_type='ASYNC_CLIENT',
+ server_type='ASYNC_GENERIC_SERVER',
+ server_language='node',
+ unconstrained_client='async',
+ use_generic_payload=True,
+ secure=secure,
+ categories=[SCALABLE])
+
+ # TODO(murgatroid99): add scenarios node vs C++
+
+ def __str__(self):
+ if self.node_purejs:
+ return 'node_purejs'
+ return 'node'
+
+
LANGUAGES = {
'c++': CXXLanguage(),
'csharp': CSharpLanguage(),
@@ -1160,4 +1260,6 @@ LANGUAGES = {
'java': JavaLanguage(),
'python': PythonLanguage(),
'go': GoLanguage(),
+ 'node': NodeLanguage(),
+ 'node_purejs': NodeLanguage(node_purejs=True)
}
diff --git a/tools/run_tests/python_utils/upload_test_results.py b/tools/run_tests/python_utils/upload_test_results.py
index cbb4c32a2a..9d99703725 100644
--- a/tools/run_tests/python_utils/upload_test_results.py
+++ b/tools/run_tests/python_utils/upload_test_results.py
@@ -68,15 +68,13 @@ _INTEROP_RESULTS_SCHEMA = [
def _get_build_metadata(test_results):
- """Add Jenkins/Kokoro build metadata to test_results based on environment
- variables set by Jenkins/Kokoro.
+ """Add Kokoro build metadata to test_results based on environment
+ variables set by Kokoro.
"""
- build_id = os.getenv('BUILD_ID') or os.getenv('KOKORO_BUILD_NUMBER')
- build_url = os.getenv('BUILD_URL')
- if os.getenv('KOKORO_BUILD_ID'):
- build_url = 'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
- 'KOKORO_BUILD_ID')
- job_name = os.getenv('JOB_BASE_NAME') or os.getenv('KOKORO_JOB_NAME')
+ build_id = os.getenv('KOKORO_BUILD_NUMBER')
+ build_url = 'https://source.cloud.google.com/results/invocations/%s' % os.getenv(
+ 'KOKORO_BUILD_ID')
+ job_name = os.getenv('KOKORO_JOB_NAME')
if build_id:
test_results['build_id'] = build_id
diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py
index aa58107ced..22055d58e8 100755
--- a/tools/run_tests/run_interop_tests.py
+++ b/tools/run_tests/run_interop_tests.py
@@ -637,13 +637,13 @@ _LANGUAGES_WITH_HTTP2_CLIENTS_FOR_HTTP2_SERVER_TEST_CASES = [
'java', 'go', 'python', 'c++'
]
-#TODO: Add c++ when c++ ALTS interop client is ready.
_LANGUAGES_FOR_ALTS_TEST_CASES = ['java', 'go', 'c++']
-#TODO: Add c++ when c++ ALTS interop server is ready.
_SERVERS_FOR_ALTS_TEST_CASES = ['java', 'go', 'c++']
-_TRANSPORT_SECURITY_OPTIONS = ['tls', 'alts', 'insecure']
+_TRANSPORT_SECURITY_OPTIONS = [
+ 'tls', 'alts', 'google_default_credentials', 'insecure'
+]
DOCKER_WORKDIR_ROOT = '/var/local/git/grpc'
@@ -724,6 +724,9 @@ def auth_options(language, test_case, service_account_key_file=None):
key_file_arg = '--service_account_key_file=%s' % service_account_key_file
default_account_arg = '--default_service_account=830293263384-compute@developer.gserviceaccount.com'
+ # TODO: When using google_default_credentials outside of cloud-to-prod, the environment variable
+ # 'GOOGLE_APPLICATION_CREDENTIALS' needs to be set for the test case
+ # 'jwt_token_creds' to work.
if test_case in ['jwt_token_creds', 'per_rpc_creds', 'oauth2_auth_token']:
if language in [
'csharp', 'csharpcoreclr', 'node', 'php', 'php7', 'python',
@@ -763,15 +766,25 @@ def cloud_to_prod_jobspec(language,
docker_image=None,
auth=False,
manual_cmd_log=None,
- service_account_key_file=None):
+ service_account_key_file=None,
+ transport_security='tls'):
"""Creates jobspec for cloud-to-prod interop test"""
container_name = None
cmdargs = [
'--server_host=%s' % server_host,
'--server_host_override=%s' % server_host, '--server_port=443',
- '--use_tls=true',
'--test_case=%s' % test_case
]
+ if transport_security == 'tls':
+ transport_security_options += ['--use_tls=true']
+ elif transport_security == 'google_default_credentials' and language == 'c++':
+ transport_security_options += [
+ '--custom_credentials_type=google_default_credentials'
+ ]
+ else:
+ print('Invalid transport security option.')
+ sys.exit(1)
+ cmdargs = cmdargs + transport_security_options
environ = dict(language.cloud_to_prod_env(), **language.global_env())
if auth:
auth_cmdargs, auth_env = auth_options(language, test_case,
@@ -1285,14 +1298,16 @@ try:
jobs = []
if args.cloud_to_prod:
- if args.transport_security != 'tls':
- print('TLS is always enabled for cloud_to_prod scenarios.')
+ if args.transport_security not in ['tls', 'google_default_credentials']:
+ print(
+ 'TLS or google default credential is always enabled for cloud_to_prod scenarios.'
+ )
for server_host_nickname in args.prod_servers:
for language in languages:
for test_case in _TEST_CASES:
if not test_case in language.unimplemented_test_cases():
if not test_case in _SKIP_ADVANCED + _SKIP_COMPRESSION:
- test_job = cloud_to_prod_jobspec(
+ tls_test_job = cloud_to_prod_jobspec(
language,
test_case,
server_host_nickname,
@@ -1300,8 +1315,23 @@ try:
docker_image=docker_images.get(str(language)),
manual_cmd_log=client_manual_cmd_log,
service_account_key_file=args.
- service_account_key_file)
- jobs.append(test_job)
+ service_account_key_file,
+ transport_security='tls')
+ jobs.append(tls_test_job)
+ if language == 'c++':
+ google_default_creds_test_job = cloud_to_prod_jobspec(
+ language,
+ test_case,
+ server_host_nickname,
+ prod_servers[server_host_nickname],
+ docker_image=docker_images.get(
+ str(language)),
+ manual_cmd_log=client_manual_cmd_log,
+ service_account_key_file=args.
+ service_account_key_file,
+ transport_security=
+ 'google_default_credentials')
+ jobs.append(google_default_creds_test_job)
if args.http2_interop:
for test_case in _HTTP2_TEST_CASES:
@@ -1312,12 +1342,15 @@ try:
prod_servers[server_host_nickname],
docker_image=docker_images.get(str(http2Interop)),
manual_cmd_log=client_manual_cmd_log,
- service_account_key_file=args.service_account_key_file)
+ service_account_key_file=args.service_account_key_file,
+ transport_security=args.transport_security)
jobs.append(test_job)
if args.cloud_to_prod_auth:
- if args.transport_security != 'tls':
- print('TLS is always enabled for cloud_to_prod scenarios.')
+ if args.transport_security not in ['tls', 'google_default_credentials']:
+ print(
+ 'TLS or google default credential is always enabled for cloud_to_prod scenarios.'
+ )
for server_host_nickname in args.prod_servers:
for language in languages:
for test_case in _AUTH_TEST_CASES:
@@ -1325,7 +1358,7 @@ try:
not compute_engine_creds_required(
language, test_case)):
if not test_case in language.unimplemented_test_cases():
- test_job = cloud_to_prod_jobspec(
+ tls_test_job = cloud_to_prod_jobspec(
language,
test_case,
server_host_nickname,
@@ -1334,8 +1367,23 @@ try:
auth=True,
manual_cmd_log=client_manual_cmd_log,
service_account_key_file=args.
- service_account_key_file)
- jobs.append(test_job)
+ service_account_key_file,
+ transport_security='tls')
+ jobs.append(tls_test_job)
+ if language == 'c++':
+ google_default_creds_test_job = cloud_to_prod_jobspec(
+ language,
+ test_case,
+ server_host_nickname,
+ prod_servers[server_host_nickname],
+ docker_image=docker_images.get(
+ str(language)),
+ manual_cmd_log=client_manual_cmd_log,
+ service_account_key_file=args.
+ service_account_key_file,
+ transport_security=
+ 'google_default_credentials')
+ jobs.append(google_default_creds_test_job)
for server in args.override_server:
server_name = server[0]
diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py
index 9a9f74e9e5..04e706fa5b 100755
--- a/tools/run_tests/run_performance_tests.py
+++ b/tools/run_tests/run_performance_tests.py
@@ -189,11 +189,19 @@ def create_netperf_jobspec(server_host='localhost',
def archive_repo(languages):
"""Archives local version of repo including submodules."""
- cmdline = ['tar', '-cf', '../grpc.tar', '../grpc/']
+ # Directory contains symlinks that can't be correctly untarred on Windows
+ # so we just skip them as a workaround.
+ # See https://github.com/grpc/grpc/issues/16334
+ bad_symlinks_dir = '../grpc/third_party/libcxx/test/std/experimental/filesystem/Inputs/static_test_env'
+ cmdline = [
+ 'tar', '--exclude', bad_symlinks_dir, '-cf', '../grpc.tar', '../grpc/'
+ ]
if 'java' in languages:
cmdline.append('../grpc-java')
if 'go' in languages:
cmdline.append('../grpc-go')
+ if 'node' in languages or 'node_purejs' in languages:
+ cmdline.append('../grpc-node')
archive_job = jobset.JobSpec(
cmdline=cmdline, shortname='archive_repo', timeout_seconds=3 * 60)
@@ -247,9 +255,9 @@ def build_on_remote_hosts(hosts,
languages=scenario_config.LANGUAGES.keys(),
build_local=False):
"""Builds performance worker on remote hosts (and maybe also locally)."""
- build_timeout = 15 * 60
+ build_timeout = 45 * 60
# Kokoro VMs (which are local only) do not have caching, so they need more time to build
- local_build_timeout = 30 * 60
+ local_build_timeout = 60 * 60
build_jobs = []
for host in hosts:
user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, host)
diff --git a/tools/run_tests/sanity/check_unsecure.sh b/tools/run_tests/sanity/check_bad_dependencies.sh
index cca1235479..5ae0e02c81 100755
--- a/tools/run_tests/sanity/check_unsecure.sh
+++ b/tools/run_tests/sanity/check_bad_dependencies.sh
@@ -23,5 +23,9 @@ test "$(bazel query 'somepath("//:grpc++_unsecure", "//external:libssl")' 2>/dev
test "$(bazel query 'somepath("//:grpc++_codegen_proto", "//external:libssl")' 2>/dev/null | wc -l)" -eq 0 || exit 1
test "$(bazel query 'somepath("//test/cpp/microbenchmarks:helpers", "//external:libssl")' 2>/dev/null | wc -l)" -eq 0 || exit 1
+# Make sure that core doesn't depend on anything in C++ library
+
+test "$(bazel query 'deps("//:grpc")' 2>/dev/null | egrep 'src/cpp|include/grpcpp' | wc -l)" -eq 0 || exit 1
+
exit 0
diff --git a/tools/run_tests/sanity/sanity_tests.yaml b/tools/run_tests/sanity/sanity_tests.yaml
index 72bfad90bc..fd9b34a198 100644
--- a/tools/run_tests/sanity/sanity_tests.yaml
+++ b/tools/run_tests/sanity/sanity_tests.yaml
@@ -1,4 +1,5 @@
# a set of tests that are run in parallel for sanity tests
+- script: tools/run_tests/sanity/check_bad_dependencies.sh
- script: tools/run_tests/sanity/check_bazel_workspace.py
- script: tools/run_tests/sanity/check_cache_mk.sh
- script: tools/run_tests/sanity/check_owners.sh
@@ -6,7 +7,6 @@
- script: tools/run_tests/sanity/check_submodules.sh
- script: tools/run_tests/sanity/check_test_filtering.py
- script: tools/run_tests/sanity/check_tracer_sanity.py
-- script: tools/run_tests/sanity/check_unsecure.sh
- script: tools/run_tests/sanity/core_banned_functions.py
- script: tools/run_tests/sanity/core_untyped_structs.sh
- script: tools/run_tests/sanity/check_deprecated_grpc++.py