diff options
author | Mark D. Roth <roth@google.com> | 2017-03-15 08:06:21 -0700 |
---|---|---|
committer | Mark D. Roth <roth@google.com> | 2017-03-15 08:06:21 -0700 |
commit | 3a91707acefc5e099f37ee24c19abfd16fabef8b (patch) | |
tree | a4a7837b399ca19105a0e807ea93a12d7814bc8e /tools | |
parent | 02612c163e949ed70a399cf9dd311457bb29c902 (diff) | |
parent | 3cafec2f55f66e3df61569579c8259b2e5b5ee00 (diff) |
Merge remote-tracking branch 'upstream/master' into retry_throttle
Diffstat (limited to 'tools')
97 files changed, 1882 insertions, 954 deletions
diff --git a/tools/distrib/python/bazel_deps.sh b/tools/distrib/python/bazel_deps.sh index de3ee07970..f6d42d29eb 100755 --- a/tools/distrib/python/bazel_deps.sh +++ b/tools/distrib/python/bazel_deps.sh @@ -33,14 +33,14 @@ cd $(dirname $0)/../../../ # First check if bazel is installed on the machine. If it is, then we don't need # to invoke the docker bazel. -if [ "bazel version" ] +if [ -x "$(command -v bazel)" ] then cd third_party/protobuf bazel query 'deps('$1')' else - docker build -t bazel `realpath ./tools/dockerfile/bazel/` - docker run -v "`realpath .`:/src/grpc/" \ - -w /src/grpc/third_party/protobuf \ - bazel \ + docker build -t bazel_local_img tools/dockerfile/test/sanity + docker run -v "$(realpath .):/src/grpc/:ro" \ + -w /src/grpc/third_party/protobuf \ + bazel_local_img \ bazel query 'deps('$1')' fi diff --git a/tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc b/tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc new file mode 100644 index 0000000000..682837a27f --- /dev/null +++ b/tools/distrib/python/grpcio_tools/grpc_tools/protobuf_generated_well_known_types_embed.cc @@ -0,0 +1,343 @@ +// Copyright 2017, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// HACK: Embed the generated well_known_types_js.cc to make +// grpc-tools python package compilation easy. +#include <google/protobuf/compiler/js/well_known_types_embed.h> +struct FileToc well_known_types_js[] = { +{"any.js", + "// Protocol Buffers - Google's data interchange format\n" + "// Copyright 2008 Google Inc. All rights reserved.\n" + "// https://developers.google.com/protocol-buffers/\n" + "//\n" + "// Redistribution and use in source and binary forms, with or without\n" + "// modification, are permitted provided that the following conditions are\n" + "// met:\n" + "//\n" + "// * Redistributions of source code must retain the above copyright\n" + "// notice, this list of conditions and the following disclaimer.\n" + "// * Redistributions in binary form must reproduce the above\n" + "// copyright notice, this list of conditions and the following disclaimer\n" + "// in the documentation and/or other materials provided with the\n" + "// distribution.\n" + "// * Neither the name of Google Inc. nor the names of its\n" + "// contributors may be used to endorse or promote products derived from\n" + "// this software without specific prior written permission.\n" + "//\n" + "// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n" + "// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n" + "// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n" + "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n" + "// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n" + "// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n" + "// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n" + "// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n" + "// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n" + "// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n" + "// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" + "\n" + "/* This code will be inserted into generated code for\n" + " * google/protobuf/any.proto. */\n" + "\n" + "/**\n" + " * Returns the type name contained in this instance, if any.\n" + " * @return {string|undefined}\n" + " */\n" + "proto.google.protobuf.Any.prototype.getTypeName = function() {\n" + " return this.getTypeUrl().split('/').pop();\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Packs the given message instance into this Any.\n" + " * @param {!Uint8Array} serialized The serialized data to pack.\n" + " * @param {string} name The type name of this message object.\n" + " * @param {string=} opt_typeUrlPrefix the type URL prefix.\n" + " */\n" + "proto.google.protobuf.Any.prototype.pack = function(serialized, name,\n" + " opt_typeUrlPrefix) {\n" + " if (!opt_typeUrlPrefix) {\n" + " opt_typeUrlPrefix = 'type.googleapis.com/';\n" + " }\n" + "\n" + " if (opt_typeUrlPrefix.substr(-1) != '/') {\n" + " this.setTypeUrl(opt_typeUrlPrefix + '/' + name);\n" + " } else {\n" + " this.setTypeUrl(opt_typeUrlPrefix + name);\n" + " }\n" + "\n" + " this.setValue(serialized);\n" + "};\n" + "\n" + "\n" + "/**\n" + " * @template T\n" + " * Unpacks this Any into the given message object.\n" + " * @param {function(Uint8Array):T} deserialize Function that will deserialize\n" + " * the binary data properly.\n" + " * @param {string} name The expected type name of this message object.\n" + " * @return {?T} If the name matched the expected name, returns the deserialized\n" + " * object, otherwise returns undefined.\n" + " */\n" + "proto.google.protobuf.Any.prototype.unpack = function(deserialize, name) {\n" + " if (this.getTypeName() == name) {\n" + " return deserialize(this.getValue_asU8());\n" + " } else {\n" + " return null;\n" + " }\n" + "};\n" +}, +{"struct.js", + "// Protocol Buffers - Google's data interchange format\n" + "// Copyright 2008 Google Inc. All rights reserved.\n" + "// https://developers.google.com/protocol-buffers/\n" + "//\n" + "// Redistribution and use in source and binary forms, with or without\n" + "// modification, are permitted provided that the following conditions are\n" + "// met:\n" + "//\n" + "// * Redistributions of source code must retain the above copyright\n" + "// notice, this list of conditions and the following disclaimer.\n" + "// * Redistributions in binary form must reproduce the above\n" + "// copyright notice, this list of conditions and the following disclaimer\n" + "// in the documentation and/or other materials provided with the\n" + "// distribution.\n" + "// * Neither the name of Google Inc. nor the names of its\n" + "// contributors may be used to endorse or promote products derived from\n" + "// this software without specific prior written permission.\n" + "//\n" + "// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n" + "// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n" + "// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n" + "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n" + "// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n" + "// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n" + "// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n" + "// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n" + "// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n" + "// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n" + "// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" + "\n" + "/* This code will be inserted into generated code for\n" + " * google/protobuf/struct.proto. */\n" + "\n" + "/**\n" + " * Typedef representing plain JavaScript values that can go into a\n" + " * Struct.\n" + " * @typedef {null|number|string|boolean|Array|Object}\n" + " */\n" + "proto.google.protobuf.JavaScriptValue;\n" + "\n" + "\n" + "/**\n" + " * Converts this Value object to a plain JavaScript value.\n" + " * @return {?proto.google.protobuf.JavaScriptValue} a plain JavaScript\n" + " * value representing this Struct.\n" + " */\n" + "proto.google.protobuf.Value.prototype.toJavaScript = function() {\n" + " var kindCase = proto.google.protobuf.Value.KindCase;\n" + " switch (this.getKindCase()) {\n" + " case kindCase.NULL_VALUE:\n" + " return null;\n" + " case kindCase.NUMBER_VALUE:\n" + " return this.getNumberValue();\n" + " case kindCase.STRING_VALUE:\n" + " return this.getStringValue();\n" + " case kindCase.BOOL_VALUE:\n" + " return this.getBoolValue();\n" + " case kindCase.STRUCT_VALUE:\n" + " return this.getStructValue().toJavaScript();\n" + " case kindCase.LIST_VALUE:\n" + " return this.getListValue().toJavaScript();\n" + " default:\n" + " throw new Error('Unexpected struct type');\n" + " }\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Converts this JavaScript value to a new Value proto.\n" + " * @param {!proto.google.protobuf.JavaScriptValue} value The value to\n" + " * convert.\n" + " * @return {!proto.google.protobuf.Value} The newly constructed value.\n" + " */\n" + "proto.google.protobuf.Value.fromJavaScript = function(value) {\n" + " var ret = new proto.google.protobuf.Value();\n" + " switch (goog.typeOf(value)) {\n" + " case 'string':\n" + " ret.setStringValue(/** @type {string} */ (value));\n" + " break;\n" + " case 'number':\n" + " ret.setNumberValue(/** @type {number} */ (value));\n" + " break;\n" + " case 'boolean':\n" + " ret.setBoolValue(/** @type {boolean} */ (value));\n" + " break;\n" + " case 'null':\n" + " ret.setNullValue(proto.google.protobuf.NullValue.NULL_VALUE);\n" + " break;\n" + " case 'array':\n" + " ret.setListValue(proto.google.protobuf.ListValue.fromJavaScript(\n" + " /** @type{!Array} */ (value)));\n" + " break;\n" + " case 'object':\n" + " ret.setStructValue(proto.google.protobuf.Struct.fromJavaScript(\n" + " /** @type{!Object} */ (value)));\n" + " break;\n" + " default:\n" + " throw new Error('Unexpected struct type.');\n" + " }\n" + "\n" + " return ret;\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Converts this ListValue object to a plain JavaScript array.\n" + " * @return {!Array} a plain JavaScript array representing this List.\n" + " */\n" + "proto.google.protobuf.ListValue.prototype.toJavaScript = function() {\n" + " var ret = [];\n" + " var values = this.getValuesList();\n" + "\n" + " for (var i = 0; i < values.length; i++) {\n" + " ret[i] = values[i].toJavaScript();\n" + " }\n" + "\n" + " return ret;\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Constructs a ListValue protobuf from this plain JavaScript array.\n" + " * @param {!Array} array a plain JavaScript array\n" + " * @return {proto.google.protobuf.ListValue} a new ListValue object\n" + " */\n" + "proto.google.protobuf.ListValue.fromJavaScript = function(array) {\n" + " var ret = new proto.google.protobuf.ListValue();\n" + "\n" + " for (var i = 0; i < array.length; i++) {\n" + " ret.addValues(proto.google.protobuf.Value.fromJavaScript(array[i]));\n" + " }\n" + "\n" + " return ret;\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Converts this Struct object to a plain JavaScript object.\n" + " * @return {!Object<string, !proto.google.protobuf.JavaScriptValue>} a plain\n" + " * JavaScript object representing this Struct.\n" + " */\n" + "proto.google.protobuf.Struct.prototype.toJavaScript = function() {\n" + " var ret = {};\n" + "\n" + " this.getFieldsMap().forEach(function(value, key) {\n" + " ret[key] = value.toJavaScript();\n" + " });\n" + "\n" + " return ret;\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Constructs a Struct protobuf from this plain JavaScript object.\n" + " * @param {!Object} obj a plain JavaScript object\n" + " * @return {proto.google.protobuf.Struct} a new Struct object\n" + " */\n" + "proto.google.protobuf.Struct.fromJavaScript = function(obj) {\n" + " var ret = new proto.google.protobuf.Struct();\n" + " var map = ret.getFieldsMap();\n" + "\n" + " for (var property in obj) {\n" + " var val = obj[property];\n" + " map.set(property, proto.google.protobuf.Value.fromJavaScript(val));\n" + " }\n" + "\n" + " return ret;\n" + "};\n" +}, +{"timestamp.js", + "// Protocol Buffers - Google's data interchange format\n" + "// Copyright 2008 Google Inc. All rights reserved.\n" + "// https://developers.google.com/protocol-buffers/\n" + "//\n" + "// Redistribution and use in source and binary forms, with or without\n" + "// modification, are permitted provided that the following conditions are\n" + "// met:\n" + "//\n" + "// * Redistributions of source code must retain the above copyright\n" + "// notice, this list of conditions and the following disclaimer.\n" + "// * Redistributions in binary form must reproduce the above\n" + "// copyright notice, this list of conditions and the following disclaimer\n" + "// in the documentation and/or other materials provided with the\n" + "// distribution.\n" + "// * Neither the name of Google Inc. nor the names of its\n" + "// contributors may be used to endorse or promote products derived from\n" + "// this software without specific prior written permission.\n" + "//\n" + "// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n" + "// \"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\n" + "// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\n" + "// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\n" + "// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\n" + "// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\n" + "// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\n" + "// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\n" + "// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n" + "// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\n" + "// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n" + "\n" + "/* This code will be inserted into generated code for\n" + " * google/protobuf/timestamp.proto. */\n" + "\n" + "/**\n" + " * Returns a JavaScript 'Date' object corresponding to this Timestamp.\n" + " * @return {!Date}\n" + " */\n" + "proto.google.protobuf.Timestamp.prototype.toDate = function() {\n" + " var seconds = this.getSeconds();\n" + " var nanos = this.getNanos();\n" + "\n" + " return new Date((seconds * 1000) + (nanos / 1000000));\n" + "};\n" + "\n" + "\n" + "/**\n" + " * Sets the value of this Timestamp object to be the given Date.\n" + " * @param {!Date} value The value to set.\n" + " */\n" + "proto.google.protobuf.Timestamp.prototype.fromDate = function(value) {\n" + " var millis = value.getTime();\n" + " this.setSeconds(Math.floor(value.getTime() / 1000));\n" + " this.setNanos(value.getMilliseconds() * 1000000);\n" + "};\n" +}, + {NULL, NULL} // Terminate the list. +}; diff --git a/tools/distrib/python/grpcio_tools/grpc_version.py b/tools/distrib/python/grpcio_tools/grpc_version.py index 263785b774..ad35b90bad 100644 --- a/tools/distrib/python/grpcio_tools/grpc_version.py +++ b/tools/distrib/python/grpcio_tools/grpc_version.py @@ -29,4 +29,4 @@ # AUTO-GENERATED FROM `$REPO_ROOT/templates/tools/distrib/python/grpcio_tools/grpc_version.py.template`!!! -VERSION='1.2.0.dev0' +VERSION='1.3.0.dev0' diff --git a/tools/distrib/python/grpcio_tools/protoc_lib_deps.py b/tools/distrib/python/grpcio_tools/protoc_lib_deps.py index 569328e57e..c2aa6198b3 100644 --- a/tools/distrib/python/grpcio_tools/protoc_lib_deps.py +++ b/tools/distrib/python/grpcio_tools/protoc_lib_deps.py @@ -29,7 +29,7 @@ # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. # AUTO-GENERATED BY make_grpcio_tools.py! -CC_FILES=['google/protobuf/compiler/zip_writer.cc', 'google/protobuf/compiler/subprocess.cc', 'google/protobuf/compiler/ruby/ruby_generator.cc', 'google/protobuf/compiler/python/python_generator.cc', 'google/protobuf/compiler/plugin.pb.cc', 'google/protobuf/compiler/plugin.cc', 'google/protobuf/compiler/php/php_generator.cc', 'google/protobuf/compiler/objectivec/objectivec_primitive_field.cc', 'google/protobuf/compiler/objectivec/objectivec_oneof.cc', 'google/protobuf/compiler/objectivec/objectivec_message_field.cc', 'google/protobuf/compiler/objectivec/objectivec_message.cc', 'google/protobuf/compiler/objectivec/objectivec_map_field.cc', 'google/protobuf/compiler/objectivec/objectivec_helpers.cc', 'google/protobuf/compiler/objectivec/objectivec_generator.cc', 'google/protobuf/compiler/objectivec/objectivec_file.cc', 'google/protobuf/compiler/objectivec/objectivec_field.cc', 'google/protobuf/compiler/objectivec/objectivec_extension.cc', 'google/protobuf/compiler/objectivec/objectivec_enum_field.cc', 'google/protobuf/compiler/objectivec/objectivec_enum.cc', 'google/protobuf/compiler/js/js_generator.cc', 'google/protobuf/compiler/javanano/javanano_primitive_field.cc', 'google/protobuf/compiler/javanano/javanano_message_field.cc', 'google/protobuf/compiler/javanano/javanano_message.cc', 'google/protobuf/compiler/javanano/javanano_map_field.cc', 'google/protobuf/compiler/javanano/javanano_helpers.cc', 'google/protobuf/compiler/javanano/javanano_generator.cc', 'google/protobuf/compiler/javanano/javanano_file.cc', 'google/protobuf/compiler/javanano/javanano_field.cc', 'google/protobuf/compiler/javanano/javanano_extension.cc', 'google/protobuf/compiler/javanano/javanano_enum_field.cc', 'google/protobuf/compiler/javanano/javanano_enum.cc', 'google/protobuf/compiler/java/java_string_field_lite.cc', 'google/protobuf/compiler/java/java_string_field.cc', 'google/protobuf/compiler/java/java_shared_code_generator.cc', 'google/protobuf/compiler/java/java_service.cc', 'google/protobuf/compiler/java/java_primitive_field_lite.cc', 'google/protobuf/compiler/java/java_primitive_field.cc', 'google/protobuf/compiler/java/java_name_resolver.cc', 'google/protobuf/compiler/java/java_message_lite.cc', 'google/protobuf/compiler/java/java_message_field_lite.cc', 'google/protobuf/compiler/java/java_message_field.cc', 'google/protobuf/compiler/java/java_message_builder_lite.cc', 'google/protobuf/compiler/java/java_message_builder.cc', 'google/protobuf/compiler/java/java_message.cc', 'google/protobuf/compiler/java/java_map_field_lite.cc', 'google/protobuf/compiler/java/java_map_field.cc', 'google/protobuf/compiler/java/java_lazy_message_field_lite.cc', 'google/protobuf/compiler/java/java_lazy_message_field.cc', 'google/protobuf/compiler/java/java_helpers.cc', 'google/protobuf/compiler/java/java_generator_factory.cc', 'google/protobuf/compiler/java/java_generator.cc', 'google/protobuf/compiler/java/java_file.cc', 'google/protobuf/compiler/java/java_field.cc', 'google/protobuf/compiler/java/java_extension_lite.cc', 'google/protobuf/compiler/java/java_extension.cc', 'google/protobuf/compiler/java/java_enum_lite.cc', 'google/protobuf/compiler/java/java_enum_field_lite.cc', 'google/protobuf/compiler/java/java_enum_field.cc', 'google/protobuf/compiler/java/java_enum.cc', 'google/protobuf/compiler/java/java_doc_comment.cc', 'google/protobuf/compiler/java/java_context.cc', 'google/protobuf/compiler/csharp/csharp_wrapper_field.cc', 'google/protobuf/compiler/csharp/csharp_source_generator_base.cc', 'google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc', 'google/protobuf/compiler/csharp/csharp_repeated_message_field.cc', 'google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc', 'google/protobuf/compiler/csharp/csharp_reflection_class.cc', 'google/protobuf/compiler/csharp/csharp_primitive_field.cc', 'google/protobuf/compiler/csharp/csharp_message_field.cc', 'google/protobuf/compiler/csharp/csharp_message.cc', 'google/protobuf/compiler/csharp/csharp_map_field.cc', 'google/protobuf/compiler/csharp/csharp_helpers.cc', 'google/protobuf/compiler/csharp/csharp_generator.cc', 'google/protobuf/compiler/csharp/csharp_field_base.cc', 'google/protobuf/compiler/csharp/csharp_enum_field.cc', 'google/protobuf/compiler/csharp/csharp_enum.cc', 'google/protobuf/compiler/csharp/csharp_doc_comment.cc', 'google/protobuf/compiler/cpp/cpp_string_field.cc', 'google/protobuf/compiler/cpp/cpp_service.cc', 'google/protobuf/compiler/cpp/cpp_primitive_field.cc', 'google/protobuf/compiler/cpp/cpp_message_field.cc', 'google/protobuf/compiler/cpp/cpp_message.cc', 'google/protobuf/compiler/cpp/cpp_map_field.cc', 'google/protobuf/compiler/cpp/cpp_helpers.cc', 'google/protobuf/compiler/cpp/cpp_generator.cc', 'google/protobuf/compiler/cpp/cpp_file.cc', 'google/protobuf/compiler/cpp/cpp_field.cc', 'google/protobuf/compiler/cpp/cpp_extension.cc', 'google/protobuf/compiler/cpp/cpp_enum_field.cc', 'google/protobuf/compiler/cpp/cpp_enum.cc', 'google/protobuf/compiler/command_line_interface.cc', 'google/protobuf/compiler/code_generator.cc', 'google/protobuf/wrappers.pb.cc', 'google/protobuf/wire_format.cc', 'google/protobuf/util/type_resolver_util.cc', 'google/protobuf/util/time_util.cc', 'google/protobuf/util/message_differencer.cc', 'google/protobuf/util/json_util.cc', 'google/protobuf/util/internal/utility.cc', 'google/protobuf/util/internal/type_info_test_helper.cc', 'google/protobuf/util/internal/type_info.cc', 'google/protobuf/util/internal/protostream_objectwriter.cc', 'google/protobuf/util/internal/protostream_objectsource.cc', 'google/protobuf/util/internal/proto_writer.cc', 'google/protobuf/util/internal/object_writer.cc', 'google/protobuf/util/internal/json_stream_parser.cc', 'google/protobuf/util/internal/json_objectwriter.cc', 'google/protobuf/util/internal/json_escaping.cc', 'google/protobuf/util/internal/field_mask_utility.cc', 'google/protobuf/util/internal/error_listener.cc', 'google/protobuf/util/internal/default_value_objectwriter.cc', 'google/protobuf/util/internal/datapiece.cc', 'google/protobuf/util/field_mask_util.cc', 'google/protobuf/util/field_comparator.cc', 'google/protobuf/unknown_field_set.cc', 'google/protobuf/type.pb.cc', 'google/protobuf/timestamp.pb.cc', 'google/protobuf/text_format.cc', 'google/protobuf/stubs/substitute.cc', 'google/protobuf/stubs/mathlimits.cc', 'google/protobuf/struct.pb.cc', 'google/protobuf/source_context.pb.cc', 'google/protobuf/service.cc', 'google/protobuf/reflection_ops.cc', 'google/protobuf/message.cc', 'google/protobuf/map_field.cc', 'google/protobuf/io/zero_copy_stream_impl.cc', 'google/protobuf/io/tokenizer.cc', 'google/protobuf/io/strtod.cc', 'google/protobuf/io/printer.cc', 'google/protobuf/io/gzip_stream.cc', 'google/protobuf/generated_message_reflection.cc', 'google/protobuf/field_mask.pb.cc', 'google/protobuf/extension_set_heavy.cc', 'google/protobuf/empty.pb.cc', 'google/protobuf/dynamic_message.cc', 'google/protobuf/duration.pb.cc', 'google/protobuf/descriptor_database.cc', 'google/protobuf/descriptor.pb.cc', 'google/protobuf/descriptor.cc', 'google/protobuf/compiler/parser.cc', 'google/protobuf/compiler/importer.cc', 'google/protobuf/api.pb.cc', 'google/protobuf/any.pb.cc', 'google/protobuf/any.cc', 'google/protobuf/wire_format_lite.cc', 'google/protobuf/stubs/time.cc', 'google/protobuf/stubs/strutil.cc', 'google/protobuf/stubs/structurally_valid.cc', 'google/protobuf/stubs/stringprintf.cc', 'google/protobuf/stubs/stringpiece.cc', 'google/protobuf/stubs/statusor.cc', 'google/protobuf/stubs/status.cc', 'google/protobuf/stubs/once.cc', 'google/protobuf/stubs/int128.cc', 'google/protobuf/stubs/common.cc', 'google/protobuf/stubs/bytestream.cc', 'google/protobuf/stubs/atomicops_internals_x86_msvc.cc', 'google/protobuf/stubs/atomicops_internals_x86_gcc.cc', 'google/protobuf/repeated_field.cc', 'google/protobuf/message_lite.cc', 'google/protobuf/io/zero_copy_stream_impl_lite.cc', 'google/protobuf/io/zero_copy_stream.cc', 'google/protobuf/io/coded_stream.cc', 'google/protobuf/generated_message_util.cc', 'google/protobuf/extension_set.cc', 'google/protobuf/arenastring.cc', 'google/protobuf/arena.cc'] +CC_FILES=['google/protobuf/compiler/zip_writer.cc', 'google/protobuf/compiler/subprocess.cc', 'google/protobuf/compiler/ruby/ruby_generator.cc', 'google/protobuf/compiler/python/python_generator.cc', 'google/protobuf/compiler/plugin.pb.cc', 'google/protobuf/compiler/plugin.cc', 'google/protobuf/compiler/php/php_generator.cc', 'google/protobuf/compiler/objectivec/objectivec_primitive_field.cc', 'google/protobuf/compiler/objectivec/objectivec_oneof.cc', 'google/protobuf/compiler/objectivec/objectivec_message_field.cc', 'google/protobuf/compiler/objectivec/objectivec_message.cc', 'google/protobuf/compiler/objectivec/objectivec_map_field.cc', 'google/protobuf/compiler/objectivec/objectivec_helpers.cc', 'google/protobuf/compiler/objectivec/objectivec_generator.cc', 'google/protobuf/compiler/objectivec/objectivec_file.cc', 'google/protobuf/compiler/objectivec/objectivec_field.cc', 'google/protobuf/compiler/objectivec/objectivec_extension.cc', 'google/protobuf/compiler/objectivec/objectivec_enum_field.cc', 'google/protobuf/compiler/objectivec/objectivec_enum.cc', 'google/protobuf/compiler/js/well_known_types_embed.cc', 'google/protobuf/compiler/js/js_generator.cc', 'google/protobuf/compiler/javanano/javanano_primitive_field.cc', 'google/protobuf/compiler/javanano/javanano_message_field.cc', 'google/protobuf/compiler/javanano/javanano_message.cc', 'google/protobuf/compiler/javanano/javanano_map_field.cc', 'google/protobuf/compiler/javanano/javanano_helpers.cc', 'google/protobuf/compiler/javanano/javanano_generator.cc', 'google/protobuf/compiler/javanano/javanano_file.cc', 'google/protobuf/compiler/javanano/javanano_field.cc', 'google/protobuf/compiler/javanano/javanano_extension.cc', 'google/protobuf/compiler/javanano/javanano_enum_field.cc', 'google/protobuf/compiler/javanano/javanano_enum.cc', 'google/protobuf/compiler/java/java_string_field_lite.cc', 'google/protobuf/compiler/java/java_string_field.cc', 'google/protobuf/compiler/java/java_shared_code_generator.cc', 'google/protobuf/compiler/java/java_service.cc', 'google/protobuf/compiler/java/java_primitive_field_lite.cc', 'google/protobuf/compiler/java/java_primitive_field.cc', 'google/protobuf/compiler/java/java_name_resolver.cc', 'google/protobuf/compiler/java/java_message_lite.cc', 'google/protobuf/compiler/java/java_message_field_lite.cc', 'google/protobuf/compiler/java/java_message_field.cc', 'google/protobuf/compiler/java/java_message_builder_lite.cc', 'google/protobuf/compiler/java/java_message_builder.cc', 'google/protobuf/compiler/java/java_message.cc', 'google/protobuf/compiler/java/java_map_field_lite.cc', 'google/protobuf/compiler/java/java_map_field.cc', 'google/protobuf/compiler/java/java_lazy_message_field_lite.cc', 'google/protobuf/compiler/java/java_lazy_message_field.cc', 'google/protobuf/compiler/java/java_helpers.cc', 'google/protobuf/compiler/java/java_generator_factory.cc', 'google/protobuf/compiler/java/java_generator.cc', 'google/protobuf/compiler/java/java_file.cc', 'google/protobuf/compiler/java/java_field.cc', 'google/protobuf/compiler/java/java_extension_lite.cc', 'google/protobuf/compiler/java/java_extension.cc', 'google/protobuf/compiler/java/java_enum_lite.cc', 'google/protobuf/compiler/java/java_enum_field_lite.cc', 'google/protobuf/compiler/java/java_enum_field.cc', 'google/protobuf/compiler/java/java_enum.cc', 'google/protobuf/compiler/java/java_doc_comment.cc', 'google/protobuf/compiler/java/java_context.cc', 'google/protobuf/compiler/csharp/csharp_wrapper_field.cc', 'google/protobuf/compiler/csharp/csharp_source_generator_base.cc', 'google/protobuf/compiler/csharp/csharp_repeated_primitive_field.cc', 'google/protobuf/compiler/csharp/csharp_repeated_message_field.cc', 'google/protobuf/compiler/csharp/csharp_repeated_enum_field.cc', 'google/protobuf/compiler/csharp/csharp_reflection_class.cc', 'google/protobuf/compiler/csharp/csharp_primitive_field.cc', 'google/protobuf/compiler/csharp/csharp_message_field.cc', 'google/protobuf/compiler/csharp/csharp_message.cc', 'google/protobuf/compiler/csharp/csharp_map_field.cc', 'google/protobuf/compiler/csharp/csharp_helpers.cc', 'google/protobuf/compiler/csharp/csharp_generator.cc', 'google/protobuf/compiler/csharp/csharp_field_base.cc', 'google/protobuf/compiler/csharp/csharp_enum_field.cc', 'google/protobuf/compiler/csharp/csharp_enum.cc', 'google/protobuf/compiler/csharp/csharp_doc_comment.cc', 'google/protobuf/compiler/cpp/cpp_string_field.cc', 'google/protobuf/compiler/cpp/cpp_service.cc', 'google/protobuf/compiler/cpp/cpp_primitive_field.cc', 'google/protobuf/compiler/cpp/cpp_message_field.cc', 'google/protobuf/compiler/cpp/cpp_message.cc', 'google/protobuf/compiler/cpp/cpp_map_field.cc', 'google/protobuf/compiler/cpp/cpp_helpers.cc', 'google/protobuf/compiler/cpp/cpp_generator.cc', 'google/protobuf/compiler/cpp/cpp_file.cc', 'google/protobuf/compiler/cpp/cpp_field.cc', 'google/protobuf/compiler/cpp/cpp_extension.cc', 'google/protobuf/compiler/cpp/cpp_enum_field.cc', 'google/protobuf/compiler/cpp/cpp_enum.cc', 'google/protobuf/compiler/command_line_interface.cc', 'google/protobuf/compiler/code_generator.cc', 'google/protobuf/wrappers.pb.cc', 'google/protobuf/wire_format.cc', 'google/protobuf/util/type_resolver_util.cc', 'google/protobuf/util/time_util.cc', 'google/protobuf/util/message_differencer.cc', 'google/protobuf/util/json_util.cc', 'google/protobuf/util/internal/utility.cc', 'google/protobuf/util/internal/type_info_test_helper.cc', 'google/protobuf/util/internal/type_info.cc', 'google/protobuf/util/internal/protostream_objectwriter.cc', 'google/protobuf/util/internal/protostream_objectsource.cc', 'google/protobuf/util/internal/proto_writer.cc', 'google/protobuf/util/internal/object_writer.cc', 'google/protobuf/util/internal/json_stream_parser.cc', 'google/protobuf/util/internal/json_objectwriter.cc', 'google/protobuf/util/internal/json_escaping.cc', 'google/protobuf/util/internal/field_mask_utility.cc', 'google/protobuf/util/internal/error_listener.cc', 'google/protobuf/util/internal/default_value_objectwriter.cc', 'google/protobuf/util/internal/datapiece.cc', 'google/protobuf/util/field_mask_util.cc', 'google/protobuf/util/field_comparator.cc', 'google/protobuf/unknown_field_set.cc', 'google/protobuf/type.pb.cc', 'google/protobuf/timestamp.pb.cc', 'google/protobuf/text_format.cc', 'google/protobuf/stubs/substitute.cc', 'google/protobuf/stubs/mathlimits.cc', 'google/protobuf/struct.pb.cc', 'google/protobuf/source_context.pb.cc', 'google/protobuf/service.cc', 'google/protobuf/reflection_ops.cc', 'google/protobuf/message.cc', 'google/protobuf/map_field.cc', 'google/protobuf/io/zero_copy_stream_impl.cc', 'google/protobuf/io/tokenizer.cc', 'google/protobuf/io/strtod.cc', 'google/protobuf/io/printer.cc', 'google/protobuf/io/gzip_stream.cc', 'google/protobuf/generated_message_reflection.cc', 'google/protobuf/field_mask.pb.cc', 'google/protobuf/extension_set_heavy.cc', 'google/protobuf/empty.pb.cc', 'google/protobuf/dynamic_message.cc', 'google/protobuf/duration.pb.cc', 'google/protobuf/descriptor_database.cc', 'google/protobuf/descriptor.pb.cc', 'google/protobuf/descriptor.cc', 'google/protobuf/compiler/parser.cc', 'google/protobuf/compiler/importer.cc', 'google/protobuf/api.pb.cc', 'google/protobuf/any.pb.cc', 'google/protobuf/any.cc', 'google/protobuf/wire_format_lite.cc', 'google/protobuf/stubs/time.cc', 'google/protobuf/stubs/strutil.cc', 'google/protobuf/stubs/structurally_valid.cc', 'google/protobuf/stubs/stringprintf.cc', 'google/protobuf/stubs/stringpiece.cc', 'google/protobuf/stubs/statusor.cc', 'google/protobuf/stubs/status.cc', 'google/protobuf/stubs/once.cc', 'google/protobuf/stubs/int128.cc', 'google/protobuf/stubs/common.cc', 'google/protobuf/stubs/bytestream.cc', 'google/protobuf/stubs/atomicops_internals_x86_msvc.cc', 'google/protobuf/stubs/atomicops_internals_x86_gcc.cc', 'google/protobuf/repeated_field.cc', 'google/protobuf/message_lite.cc', 'google/protobuf/io/zero_copy_stream_impl_lite.cc', 'google/protobuf/io/zero_copy_stream.cc', 'google/protobuf/io/coded_stream.cc', 'google/protobuf/generated_message_util.cc', 'google/protobuf/extension_set.cc', 'google/protobuf/arenastring.cc', 'google/protobuf/arena.cc', 'google/protobuf/compiler/js/embed.cc'] PROTO_FILES=['google/protobuf/wrappers.proto', 'google/protobuf/type.proto', 'google/protobuf/timestamp.proto', 'google/protobuf/struct.proto', 'google/protobuf/source_context.proto', 'google/protobuf/field_mask.proto', 'google/protobuf/empty.proto', 'google/protobuf/duration.proto', 'google/protobuf/descriptor.proto', 'google/protobuf/compiler/plugin.proto', 'google/protobuf/api.proto', 'google/protobuf/any.proto'] CC_INCLUDE='third_party/protobuf/src' diff --git a/tools/distrib/python/grpcio_tools/setup.py b/tools/distrib/python/grpcio_tools/setup.py index 502d7ef27b..ed27f1f835 100644 --- a/tools/distrib/python/grpcio_tools/setup.py +++ b/tools/distrib/python/grpcio_tools/setup.py @@ -157,11 +157,28 @@ def extension_modules(): plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.pyx')] else: plugin_sources = [os.path.join('grpc_tools', '_protoc_compiler.cpp')] + plugin_sources += [ os.path.join('grpc_tools', 'main.cc'), - os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')] + [ - os.path.join(CC_INCLUDE, cc_file) - for cc_file in CC_FILES] + os.path.join('grpc_root', 'src', 'compiler', 'python_generator.cc')] + + #HACK: Substitute the embed.cc, which is a JS to C++ + # preprocessor with the generated code. + # The generated code should not be material + # to the parts of protoc we use (it affects + # the JavaScript code generator, supposedly), + # but we need to be cautious about it. + cc_files_clone = list(CC_FILES) + embed_cc_file = os.path.normpath('google/protobuf/compiler/js/embed.cc') + well_known_types_file = os.path.normpath( + 'google/protobuf/compiler/js/well_known_types_embed.cc') + if embed_cc_file in cc_files_clone: + cc_files_clone.remove(embed_cc_file) + if well_known_types_file in cc_files_clone: + cc_files_clone.remove(well_known_types_file) + plugin_sources += [os.path.join('grpc_tools', 'protobuf_generated_well_known_types_embed.cc')] + plugin_sources += [os.path.join(CC_INCLUDE, cc_file) for cc_file in cc_files_clone] + plugin_ext = extension.Extension( name='grpc_tools._protoc_compiler', sources=plugin_sources, @@ -190,7 +207,7 @@ setuptools.setup( ext_modules=extension_modules(), packages=setuptools.find_packages('.'), install_requires=[ - 'protobuf>=3.0.0', + 'protobuf>=3.2.0', 'grpcio>={version}'.format(version=grpc_version.VERSION), ], package_data=package_data(), diff --git a/tools/distrib/yapf_code.sh b/tools/distrib/yapf_code.sh index 007b14810e..f28a1ce8ba 100755 --- a/tools/distrib/yapf_code.sh +++ b/tools/distrib/yapf_code.sh @@ -31,31 +31,48 @@ set -ex # change to root directory -cd $(dirname $0)/../.. +cd "$(dirname "${0}")/../.." -DIRS=src/python -EXCLUSIONS='src/python/grpcio/grpc_*.py src/python/grpcio_health_checking/grpc_*.py src/python/grpcio_reflection/grpc_*.py src/python/grpcio_tests/grpc_*.py' +DIRS=( + 'src/python' +) +EXCLUSIONS=( + 'grpcio/grpc_*.py' + 'grpcio_health_checking/grpc_*.py' + 'grpcio_reflection/grpc_*.py' + 'grpcio_tests/grpc_*.py' +) -VIRTUALENV=python_format_venv +VIRTUALENV=yapf_virtual_environment virtualenv $VIRTUALENV -PYTHON=`realpath $VIRTUALENV/bin/python` -$PYTHON -m pip install futures +PYTHON=$(realpath "${VIRTUALENV}/bin/python") +$PYTHON -m pip install --upgrade pip +$PYTHON -m pip install --upgrade futures $PYTHON -m pip install yapf==0.16.0 -exclusion_args="" -for exclusion in $EXCLUSIONS; do - exclusion_args="$exclusion_args --exclude $exclusion" -done +yapf() { + local exclusion exclusion_args=() + for exclusion in "${EXCLUSIONS[@]}"; do + exclusion_args+=( "--exclude" "$1/${exclusion}" ) + done + $PYTHON -m yapf -i -r --style=setup.cfg -p "${exclusion_args[@]}" "${1}" +} -script_result=0 -for dir in $DIRS; do - tempdir=`mktemp -d` - cp -RT $dir $tempdir - $PYTHON -m yapf -i -r -p $exclusion_args $dir - if ! diff -r $dir $tempdir; then - script_result=1 - fi - rm -rf $tempdir -done -exit $script_result +if [[ -z "${TEST}" ]]; then + for dir in "${DIRS[@]}"; do + yapf "${dir}" + done +else + ok=yes + for dir in "${DIRS[@]}"; do + tempdir=$(mktemp -d) + cp -RT "${dir}" "${tempdir}" + yapf "${tempdir}" + diff -ru "${dir}" "${tempdir}" || ok=no + rm -rf "${tempdir}" + done + if [[ ${ok} == no ]]; then + false + fi +fi diff --git a/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile b/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile index 669e3557b8..3479944717 100644 --- a/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile +++ b/tools/dockerfile/grpc_artifact_linux_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 ################## diff --git a/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile b/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile index 860b8f4fb9..75d156f6d8 100644 --- a/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile +++ b/tools/dockerfile/grpc_artifact_linux_x86/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 ################## diff --git a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile index 087cc4e2bb..14a2468abc 100644 --- a/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_csharp/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================ # C# dependencies diff --git a/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile index efe6e39118..c26c9a2826 100644 --- a/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_csharpcoreclr/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================ # C# dependencies @@ -99,8 +99,12 @@ RUN nuget update -self # Install dotnet SDK based on https://www.microsoft.com/net/core#debian RUN apt-get update && apt-get install -y curl libunwind8 gettext -RUN curl -sSL -o dotnet.tar.gz https://go.microsoft.com/fwlink/?LinkID=809130 -RUN mkdir -p /opt/dotnet && tar zxf dotnet.tar.gz -C /opt/dotnet +# dotnet-dev-1.0.0-preview2-003121 +RUN curl -sSL -o dotnet100.tar.gz https://go.microsoft.com/fwlink/?LinkID=809130 +RUN mkdir -p /opt/dotnet && tar zxf dotnet100.tar.gz -C /opt/dotnet +# dotnet-dev-1.0.1 +RUN curl -sSL -o dotnet101.tar.gz https://go.microsoft.com/fwlink/?LinkID=843453 +RUN mkdir -p /opt/dotnet && tar zxf dotnet101.tar.gz -C /opt/dotnet RUN ln -s /opt/dotnet/dotnet /usr/local/bin # Trigger the population of the local package cache diff --git a/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile index aa77d5f127..2d10e3fdfe 100644 --- a/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_cxx/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh b/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh index 7a7ca0d3d1..0a61334103 100755 --- a/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh +++ b/tools/dockerfile/interoptest/grpc_interop_cxx/build_interop.sh @@ -47,3 +47,6 @@ make install-certs # build C++ interop client & server make interop_client interop_server + +# build C++ http2 client +make http2_client diff --git a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile index 05e963d1e6..d3bf071c72 100644 --- a/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_go/Dockerfile @@ -45,7 +45,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Define the default command. CMD ["bash"] diff --git a/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile index 3a5e15d21b..66d9b4f640 100644 --- a/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_http2/Dockerfile @@ -45,7 +45,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 RUN pip install twisted h2 diff --git a/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile index b5fe54f991..2023467d59 100644 --- a/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_java/Dockerfile @@ -60,7 +60,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Trigger download of as many Gradle artifacts as possible. diff --git a/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile index d9a7501829..9945260ea4 100644 --- a/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_node/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================== # Node dependencies diff --git a/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile index 10a88916ad..94c17078d3 100644 --- a/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_python/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile b/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile index dae64e5c8c..679c8ff47a 100644 --- a/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile +++ b/tools/dockerfile/interoptest/grpc_interop_ruby/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================== # Ruby dependencies diff --git a/tools/dockerfile/push_testing_images.sh b/tools/dockerfile/push_testing_images.sh index 9dceb29a87..973e045ffe 100755 --- a/tools/dockerfile/push_testing_images.sh +++ b/tools/dockerfile/push_testing_images.sh @@ -44,7 +44,7 @@ cd - DOCKERHUB_ORGANIZATION=grpctesting -for DOCKERFILE_DIR in tools/dockerfile/test/fuzzer tools/dockerfile/test/sanity +for DOCKERFILE_DIR in tools/dockerfile/test/* do # Generate image name based on Dockerfile checksum. That works well as long # as can count on dockerfiles being written in a way that changing the logical diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile index 328825392b..cd1e934341 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_csharp/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile index e082da648b..d0f66d9955 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_cxx/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile index 1e2b7d8c67..bbf7de7f91 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_go/Dockerfile @@ -47,7 +47,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Using login shell removes Go from path, so we add it. RUN ln -s /usr/local/go/bin/go /usr/local/bin diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile index 0c17ff595e..229ea469c4 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_java/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile index 0594f69a5b..5fd0bc0eb2 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_node/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================== # Node dependencies diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile index 0fe9c151e5..b5198b4652 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_php/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================== # Ruby dependencies diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile index 20d2d3f57b..8e1de51f33 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_python/Dockerfile @@ -93,7 +93,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 RUN pip install coverage diff --git a/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile b/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile index f459153fe5..9d291aac58 100644 --- a/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile +++ b/tools/dockerfile/stress_test/grpc_interop_stress_ruby/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile b/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile index efe6e39118..c26c9a2826 100644 --- a/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile +++ b/tools/dockerfile/test/csharp_coreclr_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================ # C# dependencies @@ -99,8 +99,12 @@ RUN nuget update -self # Install dotnet SDK based on https://www.microsoft.com/net/core#debian RUN apt-get update && apt-get install -y curl libunwind8 gettext -RUN curl -sSL -o dotnet.tar.gz https://go.microsoft.com/fwlink/?LinkID=809130 -RUN mkdir -p /opt/dotnet && tar zxf dotnet.tar.gz -C /opt/dotnet +# dotnet-dev-1.0.0-preview2-003121 +RUN curl -sSL -o dotnet100.tar.gz https://go.microsoft.com/fwlink/?LinkID=809130 +RUN mkdir -p /opt/dotnet && tar zxf dotnet100.tar.gz -C /opt/dotnet +# dotnet-dev-1.0.1 +RUN curl -sSL -o dotnet101.tar.gz https://go.microsoft.com/fwlink/?LinkID=843453 +RUN mkdir -p /opt/dotnet && tar zxf dotnet101.tar.gz -C /opt/dotnet RUN ln -s /opt/dotnet/dotnet /usr/local/bin # Trigger the population of the local package cache diff --git a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile index 087cc4e2bb..14a2468abc 100644 --- a/tools/dockerfile/test/csharp_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/csharp_jessie_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================ # C# dependencies diff --git a/tools/dockerfile/test/cxx_jessie_x64/Dockerfile b/tools/dockerfile/test/cxx_jessie_x64/Dockerfile index e968a0f589..4bb97c7aa9 100644 --- a/tools/dockerfile/test/cxx_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/cxx_jessie_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/test/cxx_jessie_x86/Dockerfile b/tools/dockerfile/test/cxx_jessie_x86/Dockerfile index f985480254..c4b710b5df 100644 --- a/tools/dockerfile/test/cxx_jessie_x86/Dockerfile +++ b/tools/dockerfile/test/cxx_jessie_x86/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile b/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile index 2b3f4af3e6..bd742dff34 100644 --- a/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile +++ b/tools/dockerfile/test/cxx_ubuntu1404_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile b/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile index 2d282276d3..bc46b3055a 100644 --- a/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile +++ b/tools/dockerfile/test/cxx_ubuntu1604_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile b/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile index f22fcacc13..f7d7f542c1 100644 --- a/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile +++ b/tools/dockerfile/test/cxx_wheezy_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/test/fuzzer/Dockerfile b/tools/dockerfile/test/fuzzer/Dockerfile index bd04f07cea..b398b70b64 100644 --- a/tools/dockerfile/test/fuzzer/Dockerfile +++ b/tools/dockerfile/test/fuzzer/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # C++ dependencies diff --git a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile index 2540b52ec8..5d0c1686f1 100644 --- a/tools/dockerfile/test/multilang_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/multilang_jessie_x64/Dockerfile @@ -133,7 +133,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/test/node_jessie_x64/Dockerfile b/tools/dockerfile/test/node_jessie_x64/Dockerfile index 7f93933eca..4595aa6bea 100644 --- a/tools/dockerfile/test/node_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/node_jessie_x64/Dockerfile @@ -87,7 +87,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================== # Node dependencies diff --git a/tools/dockerfile/test/php7_jessie_x64/Dockerfile b/tools/dockerfile/test/php7_jessie_x64/Dockerfile index 221338956e..0e2c103afd 100644 --- a/tools/dockerfile/test/php7_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/php7_jessie_x64/Dockerfile @@ -88,7 +88,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/test/php_jessie_x64/Dockerfile b/tools/dockerfile/test/php_jessie_x64/Dockerfile index 17ea36b76c..c6f3dde39a 100644 --- a/tools/dockerfile/test/php_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/php_jessie_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================= # PHP dependencies diff --git a/tools/dockerfile/test/python_jessie_x64/Dockerfile b/tools/dockerfile/test/python_jessie_x64/Dockerfile index 10a88916ad..94c17078d3 100644 --- a/tools/dockerfile/test/python_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/python_jessie_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Prepare ccache RUN ln -s /usr/bin/ccache /usr/local/bin/gcc diff --git a/tools/dockerfile/test/python_pyenv_x64/Dockerfile b/tools/dockerfile/test/python_pyenv_x64/Dockerfile index ecd785a86d..435a9fdc97 100644 --- a/tools/dockerfile/test/python_pyenv_x64/Dockerfile +++ b/tools/dockerfile/test/python_pyenv_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 # Install dependencies for pyenv RUN apt-get update && apt-get install -y \ @@ -92,6 +92,9 @@ RUN apt-get update && apt-get install -y \ # Install Pyenv and dev Python versions 3.5 and 3.6 RUN curl -L https://raw.githubusercontent.com/yyuu/pyenv-installer/master/bin/pyenv-installer | bash +ENV PATH /root/.pyenv/bin:$PATH +RUN eval "$(pyenv init -)" +RUN eval "$(pyenv virtualenv-init -)" RUN pyenv update RUN pyenv install 3.5-dev RUN pyenv install 3.6-dev diff --git a/tools/dockerfile/test/ruby_jessie_x64/Dockerfile b/tools/dockerfile/test/ruby_jessie_x64/Dockerfile index dae64e5c8c..679c8ff47a 100644 --- a/tools/dockerfile/test/ruby_jessie_x64/Dockerfile +++ b/tools/dockerfile/test/ruby_jessie_x64/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #================== # Ruby dependencies diff --git a/tools/dockerfile/test/sanity/Dockerfile b/tools/dockerfile/test/sanity/Dockerfile index 811384fda1..0da2a1914a 100644 --- a/tools/dockerfile/test/sanity/Dockerfile +++ b/tools/dockerfile/test/sanity/Dockerfile @@ -76,7 +76,7 @@ RUN apt-get update && apt-get install -y \ # Install Python packages from PyPI RUN pip install pip --upgrade RUN pip install virtualenv -RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.0.0a2 six==1.10.0 +RUN pip install futures==2.2.0 enum34==1.0.4 protobuf==3.2.0 six==1.10.0 #======================== # Sanity test dependencies diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++ index 4d2c429957..b5021ede15 100644 --- a/tools/doxygen/Doxyfile.c++ +++ b/tools/doxygen/Doxyfile.c++ @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.2.0-dev +PROJECT_NUMBER = 1.3.0-dev # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a @@ -786,7 +786,9 @@ doc/naming.md \ doc/negative-http2-interop-test-descriptions.md \ doc/server-reflection.md \ doc/server_reflection_tutorial.md \ +doc/server_side_auth.md \ doc/service_config.md \ +doc/status_ordering.md \ doc/statuscodes.md \ doc/stress_test_framework.md \ doc/wait-for-ready.md \ @@ -802,6 +804,7 @@ include/grpc++/generic/generic_stub.h \ include/grpc++/grpc++.h \ include/grpc++/health_check_service_interface.h \ include/grpc++/impl/call.h \ +include/grpc++/impl/channel_argument_option.h \ include/grpc++/impl/client_unary_call.h \ include/grpc++/impl/codegen/async_stream.h \ include/grpc++/impl/codegen/async_unary_call.h \ diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index 498f572dc6..758ba2402a 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 1.2.0-dev +PROJECT_NUMBER = 1.3.0-dev # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a @@ -786,7 +786,9 @@ doc/naming.md \ doc/negative-http2-interop-test-descriptions.md \ doc/server-reflection.md \ doc/server_reflection_tutorial.md \ +doc/server_side_auth.md \ doc/service_config.md \ +doc/status_ordering.md \ doc/statuscodes.md \ doc/stress_test_framework.md \ doc/wait-for-ready.md \ @@ -802,6 +804,7 @@ include/grpc++/generic/generic_stub.h \ include/grpc++/grpc++.h \ include/grpc++/health_check_service_interface.h \ include/grpc++/impl/call.h \ +include/grpc++/impl/channel_argument_option.h \ include/grpc++/impl/client_unary_call.h \ include/grpc++/impl/codegen/async_stream.h \ include/grpc++/impl/codegen/async_unary_call.h \ @@ -913,6 +916,7 @@ src/cpp/common/secure_channel_arguments.cc \ src/cpp/common/secure_create_auth_context.cc \ src/cpp/common/version_cc.cc \ src/cpp/server/async_generic_service.cc \ +src/cpp/server/channel_argument_option.cc \ src/cpp/server/create_default_thread_pool.cc \ src/cpp/server/dynamic_thread_pool.cc \ src/cpp/server/dynamic_thread_pool.h \ diff --git a/tools/doxygen/Doxyfile.core b/tools/doxygen/Doxyfile.core index fa41e56b43..cd3f2af44c 100644 --- a/tools/doxygen/Doxyfile.core +++ b/tools/doxygen/Doxyfile.core @@ -785,7 +785,9 @@ doc/naming.md \ doc/negative-http2-interop-test-descriptions.md \ doc/server-reflection.md \ doc/server_reflection_tutorial.md \ +doc/server_side_auth.md \ doc/service_config.md \ +doc/status_ordering.md \ doc/statuscodes.md \ doc/stress_test_framework.md \ doc/wait-for-ready.md \ diff --git a/tools/doxygen/Doxyfile.core.internal b/tools/doxygen/Doxyfile.core.internal index 91984348db..fbe1f7f78e 100644 --- a/tools/doxygen/Doxyfile.core.internal +++ b/tools/doxygen/Doxyfile.core.internal @@ -785,7 +785,9 @@ doc/naming.md \ doc/negative-http2-interop-test-descriptions.md \ doc/server-reflection.md \ doc/server_reflection_tutorial.md \ +doc/server_side_auth.md \ doc/service_config.md \ +doc/status_ordering.md \ doc/statuscodes.md \ doc/stress_test_framework.md \ doc/wait-for-ready.md \ diff --git a/tools/gce/linux_performance_worker_init.sh b/tools/gce/linux_performance_worker_init.sh index ab29e015e0..3380f3de3e 100755 --- a/tools/gce/linux_performance_worker_init.sh +++ b/tools/gce/linux_performance_worker_init.sh @@ -142,7 +142,7 @@ gem install bundler # Significant performance improvements with grpc-go have been observed after # upgrading from go 1.5 to a later version, so a later go version is preferred. # Following go install instructions from https://golang.org/doc/install -GO_VERSION=1.7.1 +GO_VERSION=1.8 OS=linux ARCH=amd64 curl -O https://storage.googleapis.com/golang/go${GO_VERSION}.${OS}-${ARCH}.tar.gz diff --git a/tools/internal_ci/linux/grpc_master.cfg b/tools/internal_ci/linux/grpc_master.cfg index 7536a91a67..6c94c3b4d8 100644 --- a/tools/internal_ci/linux/grpc_master.cfg +++ b/tools/internal_ci/linux/grpc_master.cfg @@ -34,6 +34,6 @@ build_file: "grpc/tools/internal_ci/linux/grpc_master.sh" timeout_mins: 240 action { define_artifacts { - regex: "**/sponge_log.xml" + regex: "**/*sponge_log.xml" } } diff --git a/tools/internal_ci/linux/grpc_master.sh b/tools/internal_ci/linux/grpc_master.sh index d01d6375e9..9ecf123959 100755 --- a/tools/internal_ci/linux/grpc_master.sh +++ b/tools/internal_ci/linux/grpc_master.sh @@ -45,6 +45,8 @@ ulimit -n 2000 git submodule update --init +# download docker images from dockerhub +export DOCKERHUB_ORGANIZATION=grpctesting tools/run_tests/run_tests.py -l c -t -x sponge_log.xml || FAILED="true" # kill port_server.py to prevent the build from hanging diff --git a/tools/internal_ci/linux/grpc_master_sanitizers.cfg b/tools/internal_ci/linux/grpc_master_sanitizers.cfg new file mode 100644 index 0000000000..a2a9407128 --- /dev/null +++ b/tools/internal_ci/linux/grpc_master_sanitizers.cfg @@ -0,0 +1,39 @@ +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Config file for the internal CI (in protobuf text format) + +# Location of the continuous shell script in repository. +build_file: "grpc/tools/internal_ci/linux/grpc_master_sanitizers.sh" +timeout_mins: 1440 +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} diff --git a/tools/dockerfile/bazel/Dockerfile b/tools/internal_ci/linux/grpc_master_sanitizers.sh index 2a80a4d4d5..d22387fb20 100644..100755 --- a/tools/dockerfile/bazel/Dockerfile +++ b/tools/internal_ci/linux/grpc_master_sanitizers.sh @@ -1,4 +1,5 @@ -# Copyright 2015, Google Inc. +#!/bin/bash +# Copyright 2017, Google Inc. # All rights reserved. # # Redistribution and use in source and binary forms, with or without @@ -27,26 +28,13 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -FROM ubuntu:wily -RUN apt-get update -RUN apt-get -y install software-properties-common python-software-properties -RUN add-apt-repository ppa:webupd8team/java -RUN apt-get update -RUN apt-get -y install \ - vim \ - wget \ - openjdk-8-jdk \ - pkg-config \ - zip \ - g++ \ - zlib1g-dev \ - unzip \ - git +set -ex -RUN git clone https://github.com/bazelbuild/bazel.git /bazel -RUN cd /bazel && ./compile.sh +# change to grpc repo root +cd $(dirname $0)/../../.. -RUN ln -s /bazel/output/bazel /bin/ +git submodule update --init -# ensure the installation has been extracted -RUN bazel +# download docker images from dockerhub +export DOCKERHUB_ORGANIZATION=grpctesting +tools/run_tests/run_tests_matrix.py -f sanitizers linux diff --git a/tools/internal_ci/linux/grpc_portability.cfg b/tools/internal_ci/linux/grpc_portability.cfg new file mode 100644 index 0000000000..0c25661d62 --- /dev/null +++ b/tools/internal_ci/linux/grpc_portability.cfg @@ -0,0 +1,39 @@ +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Config file for the internal CI (in protobuf text format) + +# Location of the continuous shell script in repository. +build_file: "grpc/tools/internal_ci/linux/grpc_portability.sh" +timeout_mins: 360 +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} diff --git a/tools/internal_ci/linux/grpc_portability.sh b/tools/internal_ci/linux/grpc_portability.sh new file mode 100755 index 0000000000..58d3c58e70 --- /dev/null +++ b/tools/internal_ci/linux/grpc_portability.sh @@ -0,0 +1,40 @@ +#!/bin/bash +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +set -ex + +# change to grpc repo root +cd $(dirname $0)/../../.. + +git submodule update --init + +# download docker images from dockerhub +export DOCKERHUB_ORGANIZATION=grpctesting +tools/run_tests/run_tests_matrix.py -f portability linux diff --git a/tools/internal_ci/linux/grpc_portability_build_only.cfg b/tools/internal_ci/linux/grpc_portability_build_only.cfg index ce5be5abe9..4d3dda4082 100644 --- a/tools/internal_ci/linux/grpc_portability_build_only.cfg +++ b/tools/internal_ci/linux/grpc_portability_build_only.cfg @@ -34,6 +34,6 @@ build_file: "grpc/tools/internal_ci/linux/grpc_portability_build_only.sh" timeout_mins: 180 action { define_artifacts { - regex: "**report**.xml" + regex: "**/*sponge_log.xml" } } diff --git a/tools/internal_ci/linux/grpc_portability_build_only.sh b/tools/internal_ci/linux/grpc_portability_build_only.sh index ebdc0e82d7..80b5c4cb96 100644..100755 --- a/tools/internal_ci/linux/grpc_portability_build_only.sh +++ b/tools/internal_ci/linux/grpc_portability_build_only.sh @@ -37,5 +37,4 @@ git submodule update --init # download docker images from dockerhub export DOCKERHUB_ORGANIZATION=grpctesting - -tools/jenkins/run_jenkins_matrix.sh -f portability linux --build_only +tools/run_tests/run_tests_matrix.py -f portability linux --build_only diff --git a/tools/internal_ci/linux/grpc_pull_request_sanity.cfg b/tools/internal_ci/linux/grpc_pull_request_sanity.cfg index 511f2d6b35..1abf6ac600 100644 --- a/tools/internal_ci/linux/grpc_pull_request_sanity.cfg +++ b/tools/internal_ci/linux/grpc_pull_request_sanity.cfg @@ -34,6 +34,6 @@ build_file: "grpc/tools/internal_ci/linux/grpc_sanity.sh" timeout_mins: 30 action { define_artifacts { - regex: "**/sponge_log.xml" + regex: "**/*sponge_log.xml" } } diff --git a/tools/internal_ci/windows/grpc_master.bat b/tools/internal_ci/windows/grpc_master.bat index 4041c50313..8943390a8d 100644 --- a/tools/internal_ci/windows/grpc_master.bat +++ b/tools/internal_ci/windows/grpc_master.bat @@ -27,18 +27,17 @@ @rem (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE @rem OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. -setlocal +@rem make sure msys binaries are preferred over cygwin binaries +@rem set path to python 2.7 +set PATH=C:\tools\msys64\usr\bin;C:\Python27;%PATH% @rem enter repo root cd /d %~dp0\..\..\.. git submodule update --init -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x c_windows_dbg_sponge_log.xml --report_suite_name c_windows_dbg -l c -c dbg -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x c_windows_opt_sponge_log.xml --report_suite_name c_windows_opt -l c -c opt -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x csharp_windows_dbg_sponge_log.xml --report_suite_name csharp_windows_dbg -l csharp -c dbg -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x csharp_windows_opt_sponge_log.xml --report_suite_name csharp_windows_opt -l csharp -c opt -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x node_windows_dbg_sponge_log.xml --report_suite_name node_windows_dbg -l node -c dbg -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x node_windows_opt_sponge_log.xml --report_suite_name node_windows_opt -l node -c opt -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x python_windows_dbg_sponge_log.xml --report_suite_name python_windows_dbg -l python -c dbg -sh tools\run_tests\helper_scripts\run_tests_in_workspace.sh -t -j 4 -x python_windows_opt_sponge_log.xml --report_suite_name python_windows_opt -l python -c opt +python tools/run_tests/run_tests_matrix.py -f basictests windows -j 1 --inner_jobs 8 || goto :error +goto :EOF + +:error +exit /b %errorlevel% diff --git a/tools/internal_ci/windows/grpc_master.cfg b/tools/internal_ci/windows/grpc_master.cfg index f90af11308..21a9d6a985 100644 --- a/tools/internal_ci/windows/grpc_master.cfg +++ b/tools/internal_ci/windows/grpc_master.cfg @@ -34,6 +34,6 @@ build_file: "grpc/tools/internal_ci/windows/grpc_master.bat" timeout_mins: 360 action { define_artifacts { - regex: "**sponge_log.xml" + regex: "**/*sponge_log.xml" } } diff --git a/tools/internal_ci/windows/grpc_portability_master.bat b/tools/internal_ci/windows/grpc_portability_master.bat new file mode 100644 index 0000000000..b98c70146c --- /dev/null +++ b/tools/internal_ci/windows/grpc_portability_master.bat @@ -0,0 +1,43 @@ +@rem Copyright 2017, Google Inc. +@rem All rights reserved. +@rem +@rem Redistribution and use in source and binary forms, with or without +@rem modification, are permitted provided that the following conditions are +@rem met: +@rem +@rem * Redistributions of source code must retain the above copyright +@rem notice, this list of conditions and the following disclaimer. +@rem * Redistributions in binary form must reproduce the above +@rem copyright notice, this list of conditions and the following disclaimer +@rem in the documentation and/or other materials provided with the +@rem distribution. +@rem * Neither the name of Google Inc. nor the names of its +@rem contributors may be used to endorse or promote products derived from +@rem this software without specific prior written permission. +@rem +@rem THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +@rem "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +@rem LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +@rem A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +@rem OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +@rem SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +@rem LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +@rem DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +@rem THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +@rem (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +@rem OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +@rem make sure msys binaries are preferred over cygwin binaries +@rem set path to python 2.7 +set PATH=C:\tools\msys64\usr\bin;C:\Python27;%PATH% + +@rem enter repo root +cd /d %~dp0\..\..\.. + +git submodule update --init + +python tools/run_tests/run_tests_matrix.py -f portability windows -j 1 --inner_jobs 8 || goto :error +goto :EOF + +:error +exit /b %errorlevel% diff --git a/tools/internal_ci/windows/grpc_portability_master.cfg b/tools/internal_ci/windows/grpc_portability_master.cfg new file mode 100644 index 0000000000..10d8e98591 --- /dev/null +++ b/tools/internal_ci/windows/grpc_portability_master.cfg @@ -0,0 +1,39 @@ +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +# Config file for the internal CI (in protobuf text format) + +# Location of the continuous shell script in repository. +build_file: "grpc/tools/internal_ci/windows/grpc_portability_master.bat" +timeout_mins: 360 +action { + define_artifacts { + regex: "**/*sponge_log.xml" + } +} diff --git a/tools/jenkins/run_full_performance_released.sh b/tools/jenkins/run_full_performance_released.sh new file mode 100755 index 0000000000..ae2289e32c --- /dev/null +++ b/tools/jenkins/run_full_performance_released.sh @@ -0,0 +1,73 @@ +#!/usr/bin/env bash +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +# +# A frozen version of run_full_performance.sh that runs full performance test +# suite for the latest released stable version of gRPC. +set -ex + +# Enter the gRPC repo root +cd $(dirname $0)/../.. + +# run 8core client vs 8core server +tools/run_tests/run_performance_tests.py \ + -l c++ csharp node ruby java python go node_express \ + --netperf \ + --category scalable \ + --bq_result_table performance_released.performance_experiment \ + --remote_worker_host grpc-performance-server-8core grpc-performance-client-8core grpc-performance-client2-8core \ + --xml_report report_8core.xml \ + || EXIT_CODE=1 + +# prevent pushing leftover build files to remote hosts in the next step. +git clean -fdxq --exclude='report*.xml' + +# scalability with 32cores (and upload to a different BQ table) +tools/run_tests/run_performance_tests.py \ + -l c++ java csharp go \ + --netperf \ + --category scalable \ + --bq_result_table performance_released.performance_experiment_32core \ + --remote_worker_host grpc-performance-server-32core grpc-performance-client-32core grpc-performance-client2-32core \ + --xml_report report_32core.xml \ + || EXIT_CODE=1 + +# prevent pushing leftover build files to remote hosts in the next step. +git clean -fdxq --exclude='report*.xml' + +# selected scenarios on Windows +tools/run_tests/run_performance_tests.py \ + -l csharp \ + --category scalable \ + --bq_result_table performance_released.performance_experiment_windows \ + --remote_worker_host grpc-performance-windows1 grpc-performance-windows2 \ + --xml_report report_windows.xml \ + || EXIT_CODE=1 + +exit $EXIT_CODE diff --git a/tools/profiling/latency_profile/run_latency_profile.sh b/tools/profiling/latency_profile/run_latency_profile.sh index e9baee0957..41423fc3c1 100755 --- a/tools/profiling/latency_profile/run_latency_profile.sh +++ b/tools/profiling/latency_profile/run_latency_profile.sh @@ -44,4 +44,9 @@ else PYTHON=python2.7 fi +make CONFIG=opt memory_profile_test memory_profile_client memory_profile_server +bins/opt/memory_profile_test +bq load microbenchmarks.memory memory_usage.csv + $PYTHON tools/run_tests/run_microbenchmark.py --collect summary perf latency --bigquery_upload + diff --git a/tools/profiling/microbenchmarks/bm2bq.py b/tools/profiling/microbenchmarks/bm2bq.py index 76ed0fef0d..ffb11f57d8 100755 --- a/tools/profiling/microbenchmarks/bm2bq.py +++ b/tools/profiling/microbenchmarks/bm2bq.py @@ -36,7 +36,7 @@ import sys import json import csv -import os +import bm_json columns = [ ('jenkins_build', 'integer'), @@ -73,6 +73,14 @@ columns = [ ('framing_bytes_per_iteration', 'float'), ] +SANITIZE = { + 'integer': int, + 'float': float, + 'boolean': bool, + 'string': str, + 'timestamp': str, +} + if sys.argv[1] == '--schema': print ',\n'.join('%s:%s' % (k, t.upper()) for k, t in columns) sys.exit(0) @@ -88,143 +96,11 @@ else: writer = csv.DictWriter(sys.stdout, [c for c,t in columns]) -bm_specs = { - 'BM_UnaryPingPong': { - 'tpl': ['fixture', 'client_mutator', 'server_mutator'], - 'dyn': ['request_size', 'response_size'], - }, - 'BM_PumpStreamClientToServer': { - 'tpl': ['fixture'], - 'dyn': ['request_size'], - }, - 'BM_PumpStreamServerToClient': { - 'tpl': ['fixture'], - 'dyn': ['request_size'], - }, - 'BM_StreamingPingPong': { - 'tpl': ['fixture', 'client_mutator', 'server_mutator'], - 'dyn': ['request_size', 'request_count'], - }, - 'BM_StreamingPingPongMsgs': { - 'tpl': ['fixture', 'client_mutator', 'server_mutator'], - 'dyn': ['request_size'], - }, - 'BM_PumpStreamServerToClient_Trickle': { - 'tpl': [], - 'dyn': ['request_size', 'bandwidth_kilobits'], - }, - 'BM_ErrorStringOnNewError': { - 'tpl': ['fixture'], - 'dyn': [], - }, - 'BM_ErrorStringRepeatedly': { - 'tpl': ['fixture'], - 'dyn': [], - }, - 'BM_ErrorGetStatus': { - 'tpl': ['fixture'], - 'dyn': [], - }, - 'BM_ErrorGetStatusCode': { - 'tpl': ['fixture'], - 'dyn': [], - }, - 'BM_ErrorHttpError': { - 'tpl': ['fixture'], - 'dyn': [], - }, - 'BM_HasClearGrpcStatus': { - 'tpl': ['fixture'], - 'dyn': [], - }, - 'BM_IsolatedFilter' : { - 'tpl': ['fixture', 'client_mutator'], - 'dyn': [], - }, - 'BM_HpackEncoderEncodeHeader' : { - 'tpl': ['fixture'], - 'dyn': ['end_of_stream', 'request_size'], - }, - 'BM_HpackParserParseHeader' : { - 'tpl': ['fixture'], - 'dyn': [], - }, -} - -def numericalize(s): - if not s: return '' - if s[-1] == 'k': - return int(s[:-1]) * 1024 - if s[-1] == 'M': - return int(s[:-1]) * 1024 * 1024 - if 0 <= (ord(s[-1]) - ord('0')) <= 9: - return int(s) - assert 'not a number: %s' % s - -def parse_name(name): - if '<' not in name and '/' not in name and name not in bm_specs: - return {'name': name} - rest = name - out = {} - tpl_args = [] - dyn_args = [] - if '<' in rest: - tpl_bit = rest[rest.find('<') + 1 : rest.rfind('>')] - arg = '' - nesting = 0 - for c in tpl_bit: - if c == '<': - nesting += 1 - arg += c - elif c == '>': - nesting -= 1 - arg += c - elif c == ',': - if nesting == 0: - tpl_args.append(arg.strip()) - arg = '' - else: - arg += c - else: - arg += c - tpl_args.append(arg.strip()) - rest = rest[:rest.find('<')] + rest[rest.rfind('>') + 1:] - if '/' in rest: - s = rest.split('/') - rest = s[0] - dyn_args = s[1:] - name = rest - assert name in bm_specs, 'bm_specs needs to be expanded for %s' % name - assert len(dyn_args) == len(bm_specs[name]['dyn']) - assert len(tpl_args) == len(bm_specs[name]['tpl']) - out['name'] = name - out.update(dict((k, numericalize(v)) for k, v in zip(bm_specs[name]['dyn'], dyn_args))) - out.update(dict(zip(bm_specs[name]['tpl'], tpl_args))) - return out +for row in bm_json.expand_json(js, js2): + sane_row = {} + for name, sql_type in columns: + if name in row: + if row[name] == '': continue + sane_row[name] = SANITIZE[sql_type](row[name]) + writer.writerow(sane_row) -for bm in js['benchmarks']: - context = js['context'] - if 'label' in bm: - labels_list = [s.split(':') for s in bm['label'].strip().split(' ') if len(s) and s[0] != '#'] - for el in labels_list: - el[0] = el[0].replace('/iter', '_per_iteration') - labels = dict(labels_list) - else: - labels = {} - row = { - 'jenkins_build': os.environ.get('BUILD_NUMBER', ''), - 'jenkins_job': os.environ.get('JOB_NAME', ''), - } - row.update(context) - row.update(bm) - row.update(parse_name(row['name'])) - row.update(labels) - if 'label' in row: - del row['label'] - if js2: - for bm2 in js2['benchmarks']: - if bm['name'] == bm2['name']: - row['cpu_time'] = bm2['cpu_time'] - row['real_time'] = bm2['real_time'] - row['iterations'] = bm2['iterations'] - writer.writerow(row) diff --git a/tools/profiling/microbenchmarks/bm_diff.py b/tools/profiling/microbenchmarks/bm_diff.py new file mode 100755 index 0000000000..14ea24f65e --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_diff.py @@ -0,0 +1,118 @@ +#!/usr/bin/env python2.7 +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import sys +import json +import bm_json +import tabulate +import argparse + +def changed_ratio(n, o): + if float(o) <= .0001: o = 0 + if float(n) <= .0001: n = 0 + if o == 0 and n == 0: return 0 + if o == 0: return 100 + return (float(n)-float(o))/float(o) + +def min_change(pct): + return lambda n, o: abs(changed_ratio(n,o)) > pct/100.0 + +_INTERESTING = { + 'cpu_time': min_change(10), + 'real_time': min_change(10), + 'locks_per_iteration': min_change(5), + 'allocs_per_iteration': min_change(5), + 'writes_per_iteration': min_change(5), + 'atm_cas_per_iteration': min_change(1), + 'atm_add_per_iteration': min_change(5), +} + +argp = argparse.ArgumentParser(description='Perform diff on microbenchmarks') +argp.add_argument('-t', '--track', + choices=sorted(_INTERESTING.keys()), + nargs='+', + default=sorted(_INTERESTING.keys()), + help='Which metrics to track') +argp.add_argument('files', metavar='bm_file.json', type=str, nargs=4, + help='files to diff. ') +args = argp.parse_args() + +with open(args.files[0]) as f: + js_new_ctr = json.loads(f.read()) +with open(args.files[1]) as f: + js_new_opt = json.loads(f.read()) +with open(args.files[2]) as f: + js_old_ctr = json.loads(f.read()) +with open(args.files[3]) as f: + js_old_opt = json.loads(f.read()) + +new = {} +old = {} + +for row in bm_json.expand_json(js_new_ctr, js_new_opt): + new[row['cpp_name']] = row +for row in bm_json.expand_json(js_old_ctr, js_old_opt): + old[row['cpp_name']] = row + +changed = [] +for fld in args.track: + chk = _INTERESTING[fld] + for bm in new.keys(): + if bm not in old: continue + n = new[bm] + o = old[bm] + if fld not in n or fld not in o: continue + if chk(n[fld], o[fld]): + changed.append((fld, chk)) + break + +headers = ['Benchmark'] + [c[0] for c in changed] + ['Details'] +rows = [] +for bm in sorted(new.keys()): + if bm not in old: continue + row = [bm] + any_changed = False + n = new[bm] + o = old[bm] + details = '' + for fld in args.track: + chk = _INTERESTING[fld] + if fld not in n or fld not in o: continue + if chk(n[fld], o[fld]): + row.append(changed_ratio(n[fld], o[fld])) + if details: details += ', ' + details += '%s:%r-->%r' % (fld, float(o[fld]), float(n[fld])) + any_changed = True + else: + row.append('') + if any_changed: + row.append(details) + rows.append(row) +print tabulate.tabulate(rows, headers=headers, floatfmt='+.2f') diff --git a/tools/profiling/microbenchmarks/bm_json.py b/tools/profiling/microbenchmarks/bm_json.py new file mode 100644 index 0000000000..4695f829f4 --- /dev/null +++ b/tools/profiling/microbenchmarks/bm_json.py @@ -0,0 +1,184 @@ +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import os + +_BM_SPECS = { + 'BM_UnaryPingPong': { + 'tpl': ['fixture', 'client_mutator', 'server_mutator'], + 'dyn': ['request_size', 'response_size'], + }, + 'BM_PumpStreamClientToServer': { + 'tpl': ['fixture'], + 'dyn': ['request_size'], + }, + 'BM_PumpStreamServerToClient': { + 'tpl': ['fixture'], + 'dyn': ['request_size'], + }, + 'BM_StreamingPingPong': { + 'tpl': ['fixture', 'client_mutator', 'server_mutator'], + 'dyn': ['request_size', 'request_count'], + }, + 'BM_StreamingPingPongMsgs': { + 'tpl': ['fixture', 'client_mutator', 'server_mutator'], + 'dyn': ['request_size'], + }, + 'BM_PumpStreamServerToClient_Trickle': { + 'tpl': [], + 'dyn': ['request_size', 'bandwidth_kilobits'], + }, + 'BM_ErrorStringOnNewError': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_ErrorStringRepeatedly': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_ErrorGetStatus': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_ErrorGetStatusCode': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_ErrorHttpError': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_HasClearGrpcStatus': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_IsolatedFilter': { + 'tpl': ['fixture', 'client_mutator'], + 'dyn': [], + }, + 'BM_HpackEncoderEncodeHeader': { + 'tpl': ['fixture'], + 'dyn': ['end_of_stream', 'request_size'], + }, + 'BM_HpackParserParseHeader': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_CallCreateDestroy': { + 'tpl': ['fixture'], + 'dyn': [], + }, + 'BM_Zalloc': { + 'tpl': [], + 'dyn': ['request_size'], + }, + 'BM_PollEmptyPollset_SpeedOfLight': { + 'tpl': [], + 'dyn': ['request_size', 'request_count'], + } +} + +def numericalize(s): + if not s: return '' + if s[-1] == 'k': + return float(s[:-1]) * 1024 + if s[-1] == 'M': + return float(s[:-1]) * 1024 * 1024 + if 0 <= (ord(s[-1]) - ord('0')) <= 9: + return float(s) + assert 'not a number: %s' % s + +def parse_name(name): + cpp_name = name + if '<' not in name and '/' not in name and name not in _BM_SPECS: + return {'name': name, 'cpp_name': name} + rest = name + out = {} + tpl_args = [] + dyn_args = [] + if '<' in rest: + tpl_bit = rest[rest.find('<') + 1 : rest.rfind('>')] + arg = '' + nesting = 0 + for c in tpl_bit: + if c == '<': + nesting += 1 + arg += c + elif c == '>': + nesting -= 1 + arg += c + elif c == ',': + if nesting == 0: + tpl_args.append(arg.strip()) + arg = '' + else: + arg += c + else: + arg += c + tpl_args.append(arg.strip()) + rest = rest[:rest.find('<')] + rest[rest.rfind('>') + 1:] + if '/' in rest: + s = rest.split('/') + rest = s[0] + dyn_args = s[1:] + name = rest + assert name in _BM_SPECS, '_BM_SPECS needs to be expanded for %s' % name + assert len(dyn_args) == len(_BM_SPECS[name]['dyn']) + assert len(tpl_args) == len(_BM_SPECS[name]['tpl']) + out['name'] = name + out['cpp_name'] = cpp_name + out.update(dict((k, numericalize(v)) for k, v in zip(_BM_SPECS[name]['dyn'], dyn_args))) + out.update(dict(zip(_BM_SPECS[name]['tpl'], tpl_args))) + return out + +def expand_json(js, js2 = None): + for bm in js['benchmarks']: + context = js['context'] + if 'label' in bm: + labels_list = [s.split(':') for s in bm['label'].strip().split(' ') if len(s) and s[0] != '#'] + for el in labels_list: + el[0] = el[0].replace('/iter', '_per_iteration') + labels = dict(labels_list) + else: + labels = {} + row = { + 'jenkins_build': os.environ.get('BUILD_NUMBER', ''), + 'jenkins_job': os.environ.get('JOB_NAME', ''), + } + row.update(context) + row.update(bm) + row.update(parse_name(row['name'])) + row.update(labels) + if js2: + for bm2 in js2['benchmarks']: + if bm['name'] == bm2['name']: + row['cpu_time'] = bm2['cpu_time'] + row['real_time'] = bm2['real_time'] + row['iterations'] = bm2['iterations'] + yield row diff --git a/tools/run_tests/artifacts/artifact_targets.py b/tools/run_tests/artifacts/artifact_targets.py index e0658f4678..04702bacca 100644 --- a/tools/run_tests/artifacts/artifact_targets.py +++ b/tools/run_tests/artifacts/artifact_targets.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # diff --git a/tools/run_tests/artifacts/distribtest_targets.py b/tools/run_tests/artifacts/distribtest_targets.py index a7535b3852..90bbde83cf 100644 --- a/tools/run_tests/artifacts/distribtest_targets.py +++ b/tools/run_tests/artifacts/distribtest_targets.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # diff --git a/tools/run_tests/artifacts/package_targets.py b/tools/run_tests/artifacts/package_targets.py index d490f571c3..2547f2073c 100644 --- a/tools/run_tests/artifacts/package_targets.py +++ b/tools/run_tests/artifacts/package_targets.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # diff --git a/tools/run_tests/generated/sources_and_headers.json b/tools/run_tests/generated/sources_and_headers.json index 7ebe40331c..b2f9078c05 100644 --- a/tools/run_tests/generated/sources_and_headers.json +++ b/tools/run_tests/generated/sources_and_headers.json @@ -431,6 +431,23 @@ "headers": [], "is_filegroup": false, "language": "c", + "name": "error_test", + "src": [ + "test/core/iomgr/error_test.c" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "gpr", + "gpr_test_util", + "grpc", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c", "name": "ev_epoll_linux_test", "src": [ "test/core/iomgr/ev_epoll_linux_test.c" @@ -1667,6 +1684,23 @@ "headers": [], "is_filegroup": false, "language": "c", + "name": "parse_address_test", + "src": [ + "test/core/client_channel/parse_address_test.c" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "gpr", + "gpr_test_util", + "grpc", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c", "name": "percent_decode_fuzzer", "src": [ "test/core/slice/percent_decode_fuzzer.c" @@ -2042,6 +2076,23 @@ "headers": [], "is_filegroup": false, "language": "c", + "name": "stream_owned_slice_test", + "src": [ + "test/core/transport/stream_owned_slice_test.c" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "gpr", + "gpr_test_util", + "grpc", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c", "name": "tcp_client_posix_test", "src": [ "test/core/iomgr/tcp_client_posix_test.c" @@ -2386,6 +2437,7 @@ "grpc", "grpc++", "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], @@ -2406,6 +2458,7 @@ "grpc", "grpc++", "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], @@ -2426,6 +2479,7 @@ "grpc", "grpc++", "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], @@ -2446,6 +2500,7 @@ "grpc", "grpc++", "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], @@ -2466,6 +2521,7 @@ "grpc", "grpc++", "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], @@ -2486,14 +2542,15 @@ "grpc", "grpc++", "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], "is_filegroup": false, "language": "c++", - "name": "bm_fullstack", + "name": "bm_fullstack_streaming_ping_pong", "src": [ - "test/cpp/microbenchmarks/bm_fullstack.cc" + "test/cpp/microbenchmarks/bm_fullstack_streaming_ping_pong.cc" ], "third_party": false, "type": "target" @@ -2504,6 +2561,72 @@ "gpr", "gpr_test_util", "grpc", + "grpc++", + "grpc++_test_util", + "grpc_benchmark", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c++", + "name": "bm_fullstack_streaming_pump", + "src": [ + "test/cpp/microbenchmarks/bm_fullstack_streaming_pump.cc" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "benchmark", + "gpr", + "gpr_test_util", + "grpc", + "grpc++", + "grpc++_test_util", + "grpc_benchmark", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c++", + "name": "bm_fullstack_trickle", + "src": [ + "test/cpp/microbenchmarks/bm_fullstack_trickle.cc" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "benchmark", + "gpr", + "gpr_test_util", + "grpc", + "grpc++", + "grpc++_test_util", + "grpc_benchmark", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c++", + "name": "bm_fullstack_unary_ping_pong", + "src": [ + "test/cpp/microbenchmarks/bm_fullstack_unary_ping_pong.cc" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "benchmark", + "gpr", + "gpr_test_util", + "grpc", + "grpc++", + "grpc++_test_util", + "grpc_benchmark", "grpc_test_util" ], "headers": [], @@ -2518,6 +2641,27 @@ }, { "deps": [ + "benchmark", + "gpr", + "gpr_test_util", + "grpc", + "grpc++", + "grpc++_test_util", + "grpc_benchmark", + "grpc_test_util" + ], + "headers": [], + "is_filegroup": false, + "language": "c++", + "name": "bm_pollset", + "src": [ + "test/cpp/microbenchmarks/bm_pollset.cc" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ "gpr", "grpc", "grpc++" @@ -3461,6 +3605,30 @@ "gpr_test_util", "grpc", "grpc++", + "grpc++_test_util", + "grpc_test_util" + ], + "headers": [ + "src/proto/grpc/testing/echo.grpc.pb.h", + "src/proto/grpc/testing/echo.pb.h", + "src/proto/grpc/testing/echo_messages.grpc.pb.h", + "src/proto/grpc/testing/echo_messages.pb.h" + ], + "is_filegroup": false, + "language": "c++", + "name": "server_builder_test", + "src": [ + "test/cpp/server/server_builder_test.cc" + ], + "third_party": false, + "type": "target" + }, + { + "deps": [ + "gpr", + "gpr_test_util", + "grpc", + "grpc++", "grpc++_test", "grpc_test_util" ], @@ -5709,6 +5877,30 @@ }, { "deps": [ + "benchmark", + "grpc", + "grpc++", + "grpc_test_util" + ], + "headers": [ + "test/cpp/microbenchmarks/fullstack_context_mutators.h", + "test/cpp/microbenchmarks/fullstack_fixtures.h", + "test/cpp/microbenchmarks/helpers.h" + ], + "is_filegroup": false, + "language": "c++", + "name": "grpc_benchmark", + "src": [ + "test/cpp/microbenchmarks/fullstack_context_mutators.h", + "test/cpp/microbenchmarks/fullstack_fixtures.h", + "test/cpp/microbenchmarks/helpers.cc", + "test/cpp/microbenchmarks/helpers.h" + ], + "third_party": false, + "type": "lib" + }, + { + "deps": [ "grpc++", "grpc++_config_proto", "grpc++_proto_reflection_desc_db", @@ -7915,7 +8107,7 @@ "headers": [ "test/core/end2end/cq_verifier.h", "test/core/end2end/fake_resolver.h", - "test/core/end2end/fixtures/http_proxy.h", + "test/core/end2end/fixtures/http_proxy_fixture.h", "test/core/end2end/fixtures/proxy.h", "test/core/iomgr/endpoint_tests.h", "test/core/util/debugger_macros.h", @@ -7937,8 +8129,8 @@ "test/core/end2end/cq_verifier.h", "test/core/end2end/fake_resolver.c", "test/core/end2end/fake_resolver.h", - "test/core/end2end/fixtures/http_proxy.c", - "test/core/end2end/fixtures/http_proxy.h", + "test/core/end2end/fixtures/http_proxy_fixture.c", + "test/core/end2end/fixtures/http_proxy_fixture.h", "test/core/end2end/fixtures/proxy.c", "test/core/end2end/fixtures/proxy.h", "test/core/iomgr/endpoint_tests.c", @@ -7955,12 +8147,10 @@ "test/core/util/parse_hexstring.h", "test/core/util/passthru_endpoint.c", "test/core/util/passthru_endpoint.h", + "test/core/util/port.c", "test/core/util/port.h", - "test/core/util/port_posix.c", "test/core/util/port_server_client.c", "test/core/util/port_server_client.h", - "test/core/util/port_uv.c", - "test/core/util/port_windows.c", "test/core/util/slice_splitter.c", "test/core/util/slice_splitter.h", "test/core/util/trickle_endpoint.c", @@ -8256,6 +8446,7 @@ "include/grpc++/grpc++.h", "include/grpc++/health_check_service_interface.h", "include/grpc++/impl/call.h", + "include/grpc++/impl/channel_argument_option.h", "include/grpc++/impl/client_unary_call.h", "include/grpc++/impl/codegen/core_codegen.h", "include/grpc++/impl/grpc_library.h", @@ -8312,6 +8503,7 @@ "include/grpc++/grpc++.h", "include/grpc++/health_check_service_interface.h", "include/grpc++/impl/call.h", + "include/grpc++/impl/channel_argument_option.h", "include/grpc++/impl/client_unary_call.h", "include/grpc++/impl/codegen/core_codegen.h", "include/grpc++/impl/grpc_library.h", @@ -8361,6 +8553,7 @@ "src/cpp/common/rpc_method.cc", "src/cpp/common/version_cc.cc", "src/cpp/server/async_generic_service.cc", + "src/cpp/server/channel_argument_option.cc", "src/cpp/server/create_default_thread_pool.cc", "src/cpp/server/dynamic_thread_pool.cc", "src/cpp/server/dynamic_thread_pool.h", diff --git a/tools/run_tests/generated/tests.json b/tools/run_tests/generated/tests.json index ab7938d0ca..afb57d4f1f 100644 --- a/tools/run_tests/generated/tests.json +++ b/tools/run_tests/generated/tests.json @@ -518,6 +518,28 @@ { "args": [], "ci_platforms": [ + "linux", + "mac", + "posix", + "windows" + ], + "cpu_cost": 30, + "exclude_configs": [], + "exclude_iomgrs": [], + "flaky": false, + "gtest": false, + "language": "c", + "name": "error_test", + "platforms": [ + "linux", + "mac", + "posix", + "windows" + ] + }, + { + "args": [], + "ci_platforms": [ "linux" ], "cpu_cost": 1.0, @@ -1733,6 +1755,28 @@ "flaky": false, "gtest": false, "language": "c", + "name": "parse_address_test", + "platforms": [ + "linux", + "mac", + "posix", + "windows" + ] + }, + { + "args": [], + "ci_platforms": [ + "linux", + "mac", + "posix", + "windows" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "flaky": false, + "gtest": false, + "language": "c", "name": "percent_encoding_test", "platforms": [ "linux", @@ -1768,7 +1812,9 @@ ], "cpu_cost": 1.0, "exclude_configs": [], - "exclude_iomgrs": [], + "exclude_iomgrs": [ + "uv" + ], "flaky": false, "gtest": false, "language": "c", @@ -2120,6 +2166,28 @@ "ci_platforms": [ "linux", "mac", + "posix", + "windows" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "flaky": false, + "gtest": false, + "language": "c", + "name": "stream_owned_slice_test", + "platforms": [ + "linux", + "mac", + "posix", + "windows" + ] + }, + { + "args": [], + "ci_platforms": [ + "linux", + "mac", "posix" ], "cpu_cost": 0.5, @@ -2668,7 +2736,88 @@ "flaky": false, "gtest": false, "language": "c++", - "name": "bm_fullstack", + "name": "bm_fullstack_streaming_ping_pong", + "platforms": [ + "linux", + "mac", + "posix" + ], + "timeout_seconds": 1200 + }, + { + "args": [ + "--benchmark_min_time=0" + ], + "ci_platforms": [ + "linux", + "mac", + "posix" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "excluded_poll_engines": [ + "poll", + "poll-cv" + ], + "flaky": false, + "gtest": false, + "language": "c++", + "name": "bm_fullstack_streaming_pump", + "platforms": [ + "linux", + "mac", + "posix" + ], + "timeout_seconds": 1200 + }, + { + "args": [ + "--benchmark_min_time=0" + ], + "ci_platforms": [ + "linux", + "mac", + "posix" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "excluded_poll_engines": [ + "poll", + "poll-cv" + ], + "flaky": false, + "gtest": false, + "language": "c++", + "name": "bm_fullstack_trickle", + "platforms": [ + "linux", + "mac", + "posix" + ], + "timeout_seconds": 1200 + }, + { + "args": [ + "--benchmark_min_time=0" + ], + "ci_platforms": [ + "linux", + "mac", + "posix" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "excluded_poll_engines": [ + "poll", + "poll-cv" + ], + "flaky": false, + "gtest": false, + "language": "c++", + "name": "bm_fullstack_unary_ping_pong", "platforms": [ "linux", "mac", @@ -2699,6 +2848,28 @@ ] }, { + "args": [ + "--benchmark_min_time=0" + ], + "ci_platforms": [ + "linux", + "mac", + "posix" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "flaky": false, + "gtest": false, + "language": "c++", + "name": "bm_pollset", + "platforms": [ + "linux", + "mac", + "posix" + ] + }, + { "args": [], "ci_platforms": [ "linux", @@ -3366,6 +3537,28 @@ "flaky": false, "gtest": true, "language": "c++", + "name": "server_builder_test", + "platforms": [ + "linux", + "mac", + "posix", + "windows" + ] + }, + { + "args": [], + "ci_platforms": [ + "linux", + "mac", + "posix", + "windows" + ], + "cpu_cost": 1.0, + "exclude_configs": [], + "exclude_iomgrs": [], + "flaky": false, + "gtest": true, + "language": "c++", "name": "server_context_test_spouse_test", "platforms": [ "linux", @@ -39470,31 +39663,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_unary_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 16, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 1024, - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_unary_qps_unconstrained_secure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -39545,31 +39713,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_secure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -39620,31 +39763,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 16, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 1024, - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_streaming_qps_unconstrained_secure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -39695,31 +39813,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_secure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -39899,31 +39992,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_unary_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 16, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 1024, - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_unary_qps_unconstrained_insecure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -39974,31 +40042,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_insecure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -40049,31 +40092,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 16, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 1024, - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_streaming_qps_unconstrained_insecure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -40124,31 +40142,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "tsan", - "asan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_insecure_500kib_resource_quota", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -40405,42 +40398,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_unary_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 64, - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_unary_qps_unconstrained_secure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -40513,42 +40470,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_secure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -40621,42 +40542,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 64, - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_streaming_qps_unconstrained_secure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_ping_pong_secure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -40729,42 +40614,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_secure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": {\"use_test_ca\": true, \"server_host_override\": \"foo.test.google.fr\"}, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_secure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_generic_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"server_type\": \"ASYNC_GENERIC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"bytebuf_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -41021,42 +40870,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_unary_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 64, - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_unary_qps_unconstrained_insecure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -41129,42 +40942,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_unary_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"UNARY\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_unary_qps_unconstrained_insecure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -41237,42 +41014,6 @@ { "args": [ "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_sync_streaming_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"SYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"SYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": 64, - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_sync_streaming_qps_unconstrained_insecure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ - "--scenarios_json", "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_ping_pong_insecure\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"async_server_threads\": 1, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 1, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 1, \"async_client_threads\": 1, \"outstanding_rpcs_per_channel\": 1, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" ], "boringssl": true, @@ -41344,42 +41085,6 @@ }, { "args": [ - "--scenarios_json", - "{\"scenarios\": [{\"name\": \"cpp_protobuf_async_streaming_qps_unconstrained_insecure_500kib_resource_quota\", \"warmup_seconds\": 0, \"benchmark_seconds\": 1, \"num_servers\": 1, \"server_config\": {\"resource_quota_size\": 512000, \"async_server_threads\": 0, \"security_params\": null, \"server_type\": \"ASYNC_SERVER\"}, \"num_clients\": 0, \"client_config\": {\"client_type\": \"ASYNC_CLIENT\", \"security_params\": null, \"payload_config\": {\"simple_params\": {\"resp_size\": 0, \"req_size\": 0}}, \"client_channels\": 64, \"async_client_threads\": 0, \"outstanding_rpcs_per_channel\": 100, \"rpc_type\": \"STREAMING\", \"load_params\": {\"closed_loop\": {}}, \"histogram_params\": {\"max_possible\": 60000000000.0, \"resolution\": 0.01}}}]}" - ], - "boringssl": true, - "ci_platforms": [ - "linux" - ], - "cpu_cost": "capacity", - "defaults": "boringssl", - "exclude_configs": [ - "asan-noleaks", - "asan-trace-cmp", - "basicprof", - "counters", - "dbg", - "gcov", - "helgrind", - "memcheck", - "msan", - "mutrace", - "opt", - "stapprof", - "ubsan" - ], - "excluded_poll_engines": [], - "flaky": false, - "language": "c++", - "name": "json_run_localhost", - "platforms": [ - "linux" - ], - "shortname": "json_run_localhost:cpp_protobuf_async_streaming_qps_unconstrained_insecure_500kib_resource_quota_low_thread_count", - "timeout_seconds": 360 - }, - { - "args": [ "test/core/end2end/fuzzers/api_fuzzer_corpus/00.bin" ], "ci_platforms": [ @@ -80562,6 +80267,28 @@ }, { "args": [ + "test/core/end2end/fuzzers/api_fuzzer_corpus/clusterfuzz-testcase-6520142139752448" + ], + "ci_platforms": [ + "linux" + ], + "cpu_cost": 0.1, + "exclude_configs": [ + "tsan" + ], + "exclude_iomgrs": [ + "uv" + ], + "flaky": false, + "language": "c", + "name": "api_fuzzer_one_entry", + "platforms": [ + "linux" + ], + "uses_polling": false + }, + { + "args": [ "test/core/end2end/fuzzers/api_fuzzer_corpus/crash-0597bbdd657fa4ed14443994c9147a1a7bbc205f" ], "ci_platforms": [ diff --git a/tools/run_tests/helper_scripts/pre_build_csharp.bat b/tools/run_tests/helper_scripts/pre_build_csharp.bat index f37f63b584..99df1c6626 100644 --- a/tools/run_tests/helper_scripts/pre_build_csharp.bat +++ b/tools/run_tests/helper_scripts/pre_build_csharp.bat @@ -89,18 +89,6 @@ if exist %NUGET% ( %NUGET% restore -PackagesDirectory ../packages || goto :error cd .. - cd Grpc.IntegrationTesting.Client || goto :error - %NUGET% restore -PackagesDirectory ../packages || goto :error - cd .. - - cd Grpc.IntegrationTesting.QpsWorker || goto :error - %NUGET% restore -PackagesDirectory ../packages || goto :error - cd .. - - cd Grpc.IntegrationTesting.StressClient || goto :error - %NUGET% restore -PackagesDirectory ../packages || goto :error - cd .. - cd Grpc.IntegrationTesting || goto :error %NUGET% restore -PackagesDirectory ../packages || goto :error diff --git a/tools/run_tests/helper_scripts/pre_build_csharp.sh b/tools/run_tests/helper_scripts/pre_build_csharp.sh index 1f808556f4..d7665e15af 100755 --- a/tools/run_tests/helper_scripts/pre_build_csharp.sh +++ b/tools/run_tests/helper_scripts/pre_build_csharp.sh @@ -73,18 +73,6 @@ then nuget restore -PackagesDirectory ../packages cd .. - cd Grpc.IntegrationTesting.Client - nuget restore -PackagesDirectory ../packages - cd .. - - cd Grpc.IntegrationTesting.QpsWorker - nuget restore -PackagesDirectory ../packages - cd .. - - cd Grpc.IntegrationTesting.StressClient - nuget restore -PackagesDirectory ../packages - cd .. - cd Grpc.IntegrationTesting nuget restore -PackagesDirectory ../packages cd .. diff --git a/tools/run_tests/performance/bq_upload_result.py b/tools/run_tests/performance/bq_upload_result.py index 89d2a9b320..3bac1199a7 100755 --- a/tools/run_tests/performance/bq_upload_result.py +++ b/tools/run_tests/performance/bq_upload_result.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # @@ -30,6 +30,8 @@ # Uploads performance benchmark result file to bigquery. +from __future__ import print_function + import argparse import calendar import json @@ -70,7 +72,7 @@ def _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, result_file): _create_results_table(bq, dataset_id, table_id) if not _insert_result(bq, dataset_id, table_id, scenario_result, flatten=False): - print 'Error uploading result to bigquery.' + print('Error uploading result to bigquery.') sys.exit(1) @@ -82,7 +84,7 @@ def _upload_scenario_result_to_bigquery(dataset_id, table_id, result_file): _create_results_table(bq, dataset_id, table_id) if not _insert_result(bq, dataset_id, table_id, scenario_result): - print 'Error uploading result to bigquery.' + print('Error uploading result to bigquery.') sys.exit(1) @@ -179,4 +181,4 @@ if args.file_format == 'netperf_latency_csv': _upload_netperf_latency_csv_to_bigquery(dataset_id, table_id, args.file_to_upload) else: _upload_scenario_result_to_bigquery(dataset_id, table_id, args.file_to_upload) -print 'Successfully uploaded %s to BigQuery.\n' % args.file_to_upload +print('Successfully uploaded %s to BigQuery.\n' % args.file_to_upload) diff --git a/tools/run_tests/performance/scenario_config.py b/tools/run_tests/performance/scenario_config.py index 865125fcd7..1d91b61ba4 100644 --- a/tools/run_tests/performance/scenario_config.py +++ b/tools/run_tests/performance/scenario_config.py @@ -275,15 +275,18 @@ class CXXLanguage: secure=secure, categories=smoketest_categories+[SCALABLE]) - yield _ping_pong_scenario( - 'cpp_protobuf_%s_%s_qps_unconstrained_%s_500kib_resource_quota' % (synchronicity, rpc_type, secstr), - rpc_type=rpc_type.upper(), - client_type='%s_CLIENT' % synchronicity.upper(), - server_type='%s_SERVER' % synchronicity.upper(), - unconstrained_client=synchronicity, - secure=secure, - categories=smoketest_categories+[SCALABLE], - resource_quota_size=500*1024) + # TODO(vjpai): Re-enable this test. It has a lot of timeouts + # and hasn't yet been conclusively identified as a test failure + # or race in the library + # yield _ping_pong_scenario( + # 'cpp_protobuf_%s_%s_qps_unconstrained_%s_500kib_resource_quota' % (synchronicity, rpc_type, secstr), + # rpc_type=rpc_type.upper(), + # client_type='%s_CLIENT' % synchronicity.upper(), + # server_type='%s_SERVER' % synchronicity.upper(), + # unconstrained_client=synchronicity, + # secure=secure, + # categories=smoketest_categories+[SCALABLE], + # resource_quota_size=500*1024) for channels in geometric_progression(1, 20000, math.sqrt(10)): for outstanding in geometric_progression(1, 200000, math.sqrt(10)): @@ -413,11 +416,12 @@ class NodeLanguage: client_type='ASYNC_CLIENT', server_type='async_server', client_language='c++') - yield _ping_pong_scenario( - 'node_protobuf_async_unary_qps_unconstrained', rpc_type='UNARY', - client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', - unconstrained_client='async', - categories=[SCALABLE, SMOKETEST]) + # TODO(murgatroid99): fix bugs with this scenario and re-enable it + # yield _ping_pong_scenario( + # 'node_protobuf_async_unary_qps_unconstrained', rpc_type='UNARY', + # client_type='ASYNC_CLIENT', server_type='ASYNC_SERVER', + # unconstrained_client='async', + # categories=[SCALABLE, SMOKETEST]) # TODO(jtattermusch): make this scenario work #yield _ping_pong_scenario( diff --git a/tools/run_tests/python_utils/antagonist.py b/tools/run_tests/python_utils/antagonist.py index 857addfb38..111839ccf9 100755 --- a/tools/run_tests/python_utils/antagonist.py +++ b/tools/run_tests/python_utils/antagonist.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # diff --git a/tools/run_tests/python_utils/filter_pull_request_tests.py b/tools/run_tests/python_utils/filter_pull_request_tests.py index 3734f025d5..e013376295 100644 --- a/tools/run_tests/python_utils/filter_pull_request_tests.py +++ b/tools/run_tests/python_utils/filter_pull_request_tests.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # @@ -30,7 +30,10 @@ """Filter out tests based on file differences compared to merge target branch""" +from __future__ import print_function + import re +import six from subprocess import check_output @@ -125,7 +128,7 @@ _WHITELIST_DICT = { } # Add all triggers to their respective test suites -for trigger, test_suites in _WHITELIST_DICT.iteritems(): +for trigger, test_suites in six.iteritems(_WHITELIST_DICT): for test_suite in test_suites: test_suite.add_trigger(trigger) diff --git a/tools/run_tests/python_utils/jobset.py b/tools/run_tests/python_utils/jobset.py index f3047431e2..5d812f28ee 100755 --- a/tools/run_tests/python_utils/jobset.py +++ b/tools/run_tests/python_utils/jobset.py @@ -476,13 +476,13 @@ def run(cmdlines, skip_jobs=False, quiet_success=False): if skip_jobs: - results = {} + resultset = {} skipped_job_result = JobResult() skipped_job_result.state = 'SKIPPED' for job in cmdlines: message('SKIPPED', job.shortname, do_newline=True) - results[job.shortname] = [skipped_job_result] - return results + resultset[job.shortname] = [skipped_job_result] + return 0, resultset js = Jobset(check_cancelled, maxjobs if maxjobs is not None else _DEFAULT_MAX_JOBS, newline_on_success, travis, stop_on_failure, add_env, diff --git a/tools/run_tests/python_utils/port_server.py b/tools/run_tests/python_utils/port_server.py index e9b3f7ff79..dbd32efc0e 100755 --- a/tools/run_tests/python_utils/port_server.py +++ b/tools/run_tests/python_utils/port_server.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # diff --git a/tools/run_tests/python_utils/report_utils.py b/tools/run_tests/python_utils/report_utils.py index 9dad60408f..131772f55f 100644 --- a/tools/run_tests/python_utils/report_utils.py +++ b/tools/run_tests/python_utils/report_utils.py @@ -40,6 +40,7 @@ except (ImportError): import os import string import xml.etree.cElementTree as ET +import six def _filter_msg(msg, output_format): @@ -63,7 +64,7 @@ def render_junit_xml_report(resultset, xml_report, suite_package='grpc', root = ET.Element('testsuites') testsuite = ET.SubElement(root, 'testsuite', id='1', package=suite_package, name=suite_name) - for shortname, results in resultset.iteritems(): + for shortname, results in six.iteritems(resultset): for result in results: xml_test = ET.SubElement(testsuite, 'testcase', name=shortname) if result.elapsed_time: diff --git a/tools/run_tests/python_utils/start_port_server.py b/tools/run_tests/python_utils/start_port_server.py index 4c9f6aac63..deb7354438 100644 --- a/tools/run_tests/python_utils/start_port_server.py +++ b/tools/run_tests/python_utils/start_port_server.py @@ -40,7 +40,10 @@ import tempfile import time -def start_port_server(port_server_port): +# must be synchronized with test/core/utils/port_server_client.h +_PORT_SERVER_PORT = 32766 + +def start_port_server(): # check if a compatible port server is running # if incompatible (version mismatch) ==> start a new one # if not running ==> start a new one @@ -48,7 +51,7 @@ def start_port_server(port_server_port): try: version = int( urllib.request.urlopen( - 'http://localhost:%d/version_number' % port_server_port, + 'http://localhost:%d/version_number' % _PORT_SERVER_PORT, timeout=10).read()) logging.info('detected port server running version %d', version) running = True @@ -67,7 +70,7 @@ def start_port_server(port_server_port): if not running: logging.info('port_server version mismatch: killing the old one') urllib.request.urlopen('http://localhost:%d/quitquitquit' % - port_server_port).read() + _PORT_SERVER_PORT).read() time.sleep(1) if not running: fd, logfile = tempfile.mkstemp() @@ -76,7 +79,7 @@ def start_port_server(port_server_port): args = [ sys.executable, os.path.abspath('tools/run_tests/python_utils/port_server.py'), - '-p', '%d' % port_server_port, '-l', logfile + '-p', '%d' % _PORT_SERVER_PORT, '-l', logfile ] env = dict(os.environ) env['BUILD_ID'] = 'pleaseDontKillMeJenkins' @@ -107,7 +110,7 @@ def start_port_server(port_server_port): time.sleep(1) try: urllib.request.urlopen( - 'http://localhost:%d/get' % port_server_port, + 'http://localhost:%d/get' % _PORT_SERVER_PORT, timeout=1).read() logging.info( 'last ditch attempt to contact port server succeeded') @@ -119,7 +122,7 @@ def start_port_server(port_server_port): print(port_log) sys.exit(1) try: - port_server_url = 'http://localhost:%d/get' % port_server_port + port_server_url = 'http://localhost:%d/get' % _PORT_SERVER_PORT urllib.request.urlopen(port_server_url, timeout=1).read() logging.info('port server is up and ready') break diff --git a/tools/run_tests/run_build_statistics.py b/tools/run_tests/run_build_statistics.py index 654cf95a38..dd11a45e78 100755 --- a/tools/run_tests/run_build_statistics.py +++ b/tools/run_tests/run_build_statistics.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # @@ -30,6 +30,8 @@ """Tool to get build statistics from Jenkins and upload to BigQuery.""" +from __future__ import print_function + import argparse import jenkinsapi from jenkinsapi.custom_exceptions import JenkinsAPIException @@ -243,6 +245,6 @@ for build_name in _BUILDS.keys() if 'all' in args.builds else args.builds: rows = [big_query_utils.make_row(build_number, build_result)] if not big_query_utils.insert_rows(bq, _PROJECT_ID, _DATASET_ID, build_name, rows): - print '====> Error uploading result to bigquery.' + print('====> Error uploading result to bigquery.') sys.exit(1) diff --git a/tools/run_tests/run_interop_tests.py b/tools/run_tests/run_interop_tests.py index d29bfa9b0c..29d5a9aaa0 100755 --- a/tools/run_tests/run_interop_tests.py +++ b/tools/run_tests/run_interop_tests.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # @@ -44,6 +44,7 @@ import sys import tempfile import time import uuid +import six import python_utils.dockerjob as dockerjob import python_utils.jobset as jobset @@ -77,11 +78,15 @@ class CXXLanguage: def __init__(self): self.client_cwd = None self.server_cwd = None + self.http2_cwd = None self.safename = 'cxx' def client_cmd(self, args): return ['bins/opt/interop_client'] + args + def client_cmd_http2interop(self, args): + return ['bins/opt/http2_client'] + args + def cloud_to_prod_env(self): return {} @@ -474,7 +479,7 @@ _HTTP2_TEST_CASES = ['tls', 'framing'] _HTTP2_BADSERVER_TEST_CASES = ['rst_after_header', 'rst_after_data', 'rst_during_data', 'goaway', 'ping', 'max_streams'] -_LANGUAGES_FOR_HTTP2_BADSERVER_TESTS = ['java', 'go', 'python'] +_LANGUAGES_FOR_HTTP2_BADSERVER_TESTS = ['java', 'go', 'python', 'c++'] DOCKER_WORKDIR_ROOT = '/var/local/git/grpc' @@ -497,6 +502,28 @@ def docker_run_cmdline(cmdline, image, docker_args=[], cwd=None, environ=None): return docker_cmdline +def manual_cmdline(docker_cmdline): + """Returns docker cmdline adjusted for manual invocation.""" + print_cmdline = [] + for item in docker_cmdline: + if item.startswith('--name='): + continue + # add quotes when necessary + if any(character.isspace() for character in item): + item = "\"%s\"" % item + print_cmdline.append(item) + return ' '.join(print_cmdline) + + +def write_cmdlog_maybe(cmdlog, filename): + """Returns docker cmdline adjusted for manual invocation.""" + if cmdlog: + with open(filename, 'w') as logfile: + logfile.write('#!/bin/bash\n') + logfile.writelines("%s\n" % line for line in cmdlog) + print('Command log written to file %s' % filename) + + def bash_cmdline(cmdline): """Creates bash -c cmdline from args list.""" # Use login shell: @@ -547,7 +574,8 @@ def _job_kill_handler(job): def cloud_to_prod_jobspec(language, test_case, server_host_name, - server_host_detail, docker_image=None, auth=False): + server_host_detail, docker_image=None, auth=False, + manual_cmd_log=None): """Creates jobspec for cloud-to-prod interop test""" container_name = None cmdargs = [ @@ -572,7 +600,9 @@ def cloud_to_prod_jobspec(language, test_case, server_host_name, cwd=cwd, environ=environ, docker_args=['--net=host', - '--name', container_name]) + '--name=%s' % container_name]) + if manual_cmd_log is not None: + manual_cmd_log.append(manual_cmdline(cmdline)) cwd = None environ = None @@ -593,7 +623,8 @@ def cloud_to_prod_jobspec(language, test_case, server_host_name, def cloud_to_cloud_jobspec(language, test_case, server_name, server_host, - server_port, docker_image=None, insecure=False): + server_port, docker_image=None, insecure=False, + manual_cmd_log=None): """Creates jobspec for cloud-to-cloud interop test""" interop_only_options = [ '--server_host_override=foo.test.google.fr', @@ -624,7 +655,9 @@ def cloud_to_cloud_jobspec(language, test_case, server_name, server_host, environ=environ, cwd=cwd, docker_args=['--net=host', - '--name', container_name]) + '--name=%s' % container_name]) + if manual_cmd_log is not None: + manual_cmd_log.append(manual_cmdline(cmdline)) cwd = None test_job = jobset.JobSpec( @@ -642,7 +675,7 @@ def cloud_to_cloud_jobspec(language, test_case, server_name, server_host, return test_job -def server_jobspec(language, docker_image, insecure=False): +def server_jobspec(language, docker_image, insecure=False, manual_cmd_log=None): """Create jobspec for running a server""" container_name = dockerjob.random_name('interop_server_%s' % language.safename) cmdline = bash_cmdline( @@ -672,7 +705,9 @@ def server_jobspec(language, docker_image, insecure=False): cwd=language.server_cwd, environ=environ, docker_args=port_args + - ['--name', container_name]) + ['--name=%s' % container_name]) + if manual_cmd_log is not None: + manual_cmd_log.append(manual_cmdline(docker_cmdline)) server_job = jobset.JobSpec( cmdline=docker_cmdline, environ=environ, @@ -802,6 +837,14 @@ argp.add_argument('--allow_flakes', action='store_const', const=True, help='Allow flaky tests to show as passing (re-runs failed tests up to five times)') +argp.add_argument('--manual_run', + default=False, + action='store_const', + const=True, + help='Prepare things for running interop tests manually. ' + + 'Preserve docker images after building them and skip ' + 'actually running the tests. Only print commands to run by ' + + 'hand.') argp.add_argument('--http2_interop', default=False, action='store_const', @@ -833,13 +876,17 @@ if args.use_docker: print('copied to the docker environment.') time.sleep(5) +if args.manual_run and not args.use_docker: + print('--manual_run is only supported with --use_docker option enabled.') + sys.exit(1) + if not args.use_docker and servers: print('Running interop servers is only supported with --use_docker option enabled.') sys.exit(1) languages = set(_LANGUAGES[l] for l in itertools.chain.from_iterable( - _LANGUAGES.iterkeys() if x == 'all' else [x] + six.iterkeys(_LANGUAGES) if x == 'all' else [x] for x in args.language)) languages_http2_badserver_interop = set() @@ -879,28 +926,40 @@ if args.use_docker: else: jobset.message('FAILED', 'Failed to build interop docker images.', do_newline=True) - for image in docker_images.itervalues(): + for image in six.itervalues(docker_images): dockerjob.remove_image(image, skip_nonexistent=True) sys.exit(1) +server_manual_cmd_log = [] if args.manual_run else None +client_manual_cmd_log = [] if args.manual_run else None + # Start interop servers. -server_jobs={} -server_addresses={} +server_jobs = {} +server_addresses = {} try: for s in servers: lang = str(s) spec = server_jobspec(_LANGUAGES[lang], docker_images.get(lang), - args.insecure) - job = dockerjob.DockerJob(spec) - server_jobs[lang] = job - server_addresses[lang] = ('localhost', job.mapped_port(_DEFAULT_SERVER_PORT)) + args.insecure, manual_cmd_log=server_manual_cmd_log) + if not args.manual_run: + job = dockerjob.DockerJob(spec) + server_jobs[lang] = job + server_addresses[lang] = ('localhost', job.mapped_port(_DEFAULT_SERVER_PORT)) + else: + # don't run the server, set server port to a placeholder value + server_addresses[lang] = ('localhost', '${SERVER_PORT}') if args.http2_badserver_interop: # launch a HTTP2 server emulator that creates edge cases lang = str(http2InteropServer) - spec = server_jobspec(http2InteropServer, docker_images.get(lang)) - job = dockerjob.DockerJob(spec) - server_jobs[lang] = job + spec = server_jobspec(http2InteropServer, docker_images.get(lang), + manual_cmd_log=server_manual_cmd_log) + if not args.manual_run: + job = dockerjob.DockerJob(spec) + server_jobs[lang] = job + else: + # don't run the server, set server port to a placeholder value + server_addresses[lang] = ('localhost', '${SERVER_PORT}') jobs = [] if args.cloud_to_prod: @@ -914,7 +973,8 @@ try: test_job = cloud_to_prod_jobspec( language, test_case, server_host_name, prod_servers[server_host_name], - docker_image=docker_images.get(str(language))) + docker_image=docker_images.get(str(language)), + manual_cmd_log=client_manual_cmd_log) jobs.append(test_job) if args.http2_interop: @@ -922,7 +982,8 @@ try: test_job = cloud_to_prod_jobspec( http2Interop, test_case, server_host_name, prod_servers[server_host_name], - docker_image=docker_images.get(str(http2Interop))) + docker_image=docker_images.get(str(http2Interop)), + manual_cmd_log=client_manual_cmd_log) jobs.append(test_job) if args.cloud_to_prod_auth: @@ -935,7 +996,8 @@ try: test_job = cloud_to_prod_jobspec( language, test_case, server_host_name, prod_servers[server_host_name], - docker_image=docker_images.get(str(language)), auth=True) + docker_image=docker_images.get(str(language)), auth=True, + manual_cmd_log=client_manual_cmd_log) jobs.append(test_job) for server in args.override_server: @@ -959,7 +1021,8 @@ try: server_host, server_port, docker_image=docker_images.get(str(language)), - insecure=args.insecure) + insecure=args.insecure, + manual_cmd_log=client_manual_cmd_log) jobs.append(test_job) if args.http2_interop: @@ -973,7 +1036,8 @@ try: server_host, server_port, docker_image=docker_images.get(str(http2Interop)), - insecure=args.insecure) + insecure=args.insecure, + manual_cmd_log=client_manual_cmd_log) jobs.append(test_job) if args.http2_badserver_interop: @@ -984,22 +1048,30 @@ try: str(http2InteropServer), 'localhost', _DEFAULT_SERVER_PORT, - docker_image=docker_images.get(str(language))) + docker_image=docker_images.get(str(language)), + manual_cmd_log=client_manual_cmd_log) jobs.append(test_job) if not jobs: print('No jobs to run.') - for image in docker_images.itervalues(): + for image in six.itervalues(docker_images): dockerjob.remove_image(image, skip_nonexistent=True) sys.exit(1) + if args.manual_run: + print('All tests will skipped --manual_run option is active.') + num_failures, resultset = jobset.run(jobs, newline_on_success=True, - maxjobs=args.jobs) + maxjobs=args.jobs, + skip_jobs=args.manual_run) if num_failures: jobset.message('FAILED', 'Some tests failed', do_newline=True) else: jobset.message('SUCCESS', 'All tests passed', do_newline=True) + write_cmdlog_maybe(server_manual_cmd_log, 'interop_server_cmds.sh') + write_cmdlog_maybe(client_manual_cmd_log, 'interop_client_cmds.sh') + report_utils.render_junit_xml_report(resultset, 'report.xml') for name, job in resultset.items(): @@ -1022,8 +1094,11 @@ finally: if not job.is_running(): print('Server "%s" has exited prematurely.' % server) - dockerjob.finish_jobs([j for j in server_jobs.itervalues()]) + dockerjob.finish_jobs([j for j in six.itervalues(server_jobs)]) - for image in docker_images.itervalues(): - print('Removing docker image %s' % image) - dockerjob.remove_image(image) + for image in six.itervalues(docker_images): + if not args.manual_run: + print('Removing docker image %s' % image) + dockerjob.remove_image(image) + else: + print('Preserving docker image: %s' % image) diff --git a/tools/run_tests/run_microbenchmark.py b/tools/run_tests/run_microbenchmark.py index c6cc60715e..57b2636e56 100755 --- a/tools/run_tests/run_microbenchmark.py +++ b/tools/run_tests/run_microbenchmark.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2017, Google Inc. # All rights reserved. # @@ -44,8 +44,7 @@ os.chdir(os.path.join(os.path.dirname(sys.argv[0]), '../..')) if not os.path.exists('reports'): os.makedirs('reports') -port_server_port = 32766 -start_port_server.start_port_server(port_server_port) +start_port_server.start_port_server() def fnize(s): out = '' @@ -110,8 +109,7 @@ def collect_latency(bm_name, args): if len(benchmarks) >= min(16, multiprocessing.cpu_count()): # run up to half the cpu count: each benchmark can use up to two cores # (one for the microbenchmark, one for the data flush) - jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2), - add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}) + jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2)) jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count()) jobset.run(cleanup, maxjobs=multiprocessing.cpu_count()) benchmarks = [] @@ -119,8 +117,7 @@ def collect_latency(bm_name, args): cleanup = [] # run the remaining benchmarks that weren't flushed if len(benchmarks): - jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2), - add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}) + jobset.run(benchmarks, maxjobs=max(1, multiprocessing.cpu_count()/2)) jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count()) jobset.run(cleanup, maxjobs=multiprocessing.cpu_count()) @@ -156,8 +153,7 @@ def collect_perf(bm_name, args): if len(benchmarks) >= 20: # run up to half the cpu count: each benchmark can use up to two cores # (one for the microbenchmark, one for the data flush) - jobset.run(benchmarks, maxjobs=1, - add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}) + jobset.run(benchmarks, maxjobs=1) jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count()) jobset.run(cleanup, maxjobs=multiprocessing.cpu_count()) benchmarks = [] @@ -165,17 +161,16 @@ def collect_perf(bm_name, args): cleanup = [] # run the remaining benchmarks that weren't flushed if len(benchmarks): - jobset.run(benchmarks, maxjobs=1, - add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}) + jobset.run(benchmarks, maxjobs=1) jobset.run(profile_analysis, maxjobs=multiprocessing.cpu_count()) jobset.run(cleanup, maxjobs=multiprocessing.cpu_count()) -def run_summary(cfg): +def run_summary(bm_name, cfg, base_json_name): subprocess.check_call( ['make', bm_name, 'CONFIG=%s' % cfg, '-j', '%d' % multiprocessing.cpu_count()]) cmd = ['bins/%s/%s' % (cfg, bm_name), - '--benchmark_out=out.%s.json' % cfg, + '--benchmark_out=%s.%s.json' % (base_json_name, cfg), '--benchmark_out_format=json'] if args.summary_time is not None: cmd += ['--benchmark_min_time=%d' % args.summary_time] @@ -183,13 +178,15 @@ def run_summary(cfg): def collect_summary(bm_name, args): heading('Summary: %s [no counters]' % bm_name) - text(run_summary('opt')) + text(run_summary(bm_name, 'opt', bm_name)) heading('Summary: %s [with counters]' % bm_name) - text(run_summary('counters')) + text(run_summary(bm_name, 'counters', bm_name)) if args.bigquery_upload: - with open('out.csv', 'w') as f: - f.write(subprocess.check_output(['tools/profiling/microbenchmarks/bm2bq.py', 'out.counters.json', 'out.opt.json'])) - subprocess.check_call(['bq', 'load', 'microbenchmarks.microbenchmarks', 'out.csv']) + with open('%s.csv' % bm_name, 'w') as f: + f.write(subprocess.check_output(['tools/profiling/microbenchmarks/bm2bq.py', + '%s.counters.json' % bm_name, + '%s.opt.json' % bm_name])) + subprocess.check_call(['bq', 'load', 'microbenchmarks.microbenchmarks', '%s.csv' % bm_name]) collectors = { 'latency': collect_latency, @@ -200,20 +197,28 @@ collectors = { argp = argparse.ArgumentParser(description='Collect data from microbenchmarks') argp.add_argument('-c', '--collect', choices=sorted(collectors.keys()), - nargs='+', + nargs='*', default=sorted(collectors.keys()), help='Which collectors should be run against each benchmark') argp.add_argument('-b', '--benchmarks', - default=['bm_fullstack', + default=['bm_fullstack_unary_ping_pong', + 'bm_fullstack_streaming_ping_pong', + 'bm_fullstack_streaming_pump', 'bm_closure', 'bm_cq', 'bm_call_create', 'bm_error', 'bm_chttp2_hpack', - 'bm_metadata'], + 'bm_metadata', + 'bm_fullstack_trickle', + ], nargs='+', type=str, help='Which microbenchmarks should be run') +argp.add_argument('--diff_perf', + default=None, + type=str, + help='Diff microbenchmarks against this git revision') argp.add_argument('--bigquery_upload', default=False, action='store_const', @@ -225,10 +230,39 @@ argp.add_argument('--summary_time', help='Minimum time to run benchmarks for the summary collection') args = argp.parse_args() -for bm_name in args.benchmarks: +try: for collect in args.collect: - collectors[collect](bm_name, args) - -index_html += "</body>\n</html>\n" -with open('reports/index.html', 'w') as f: - f.write(index_html) + for bm_name in args.benchmarks: + collectors[collect](bm_name, args) + if args.diff_perf: + if 'summary' not in args.collect: + for bm_name in args.benchmarks: + run_summary(bm_name, 'opt', bm_name) + run_summary(bm_name, 'counters', bm_name) + where_am_i = subprocess.check_output(['git', 'rev-parse', '--abbrev-ref', 'HEAD']).strip() + subprocess.check_call(['git', 'checkout', args.diff_perf]) + comparables = [] + subprocess.check_call(['make', 'clean']) + try: + for bm_name in args.benchmarks: + try: + run_summary(bm_name, 'opt', '%s.old' % bm_name) + run_summary(bm_name, 'counters', '%s.old' % bm_name) + comparables.append(bm_name) + except subprocess.CalledProcessError, e: + pass + finally: + subprocess.check_call(['git', 'checkout', where_am_i]) + for bm_name in comparables: + diff = subprocess.check_output(['tools/profiling/microbenchmarks/bm_diff.py', + '%s.counters.json' % bm_name, + '%s.opt.json' % bm_name, + '%s.old.counters.json' % bm_name, + '%s.old.opt.json' % bm_name]).strip() + if diff: + heading('Performance diff: %s' % bm_name) + text(diff) +finally: + index_html += "</body>\n</html>\n" + with open('reports/index.html', 'w') as f: + f.write(index_html) diff --git a/tools/run_tests/run_performance_tests.py b/tools/run_tests/run_performance_tests.py index 7c04d228ba..35d20be5b7 100755 --- a/tools/run_tests/run_performance_tests.py +++ b/tools/run_tests/run_performance_tests.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # @@ -46,6 +46,7 @@ import tempfile import time import traceback import uuid +import six import performance.scenario_config as scenario_config import python_utils.jobset as jobset @@ -101,7 +102,7 @@ def create_qpsworker_job(language, shortname=None, port=10000, remote_host=None, user_at_host = '%s@%s' % (_REMOTE_HOST_USERNAME, remote_host) ssh_cmd = ['ssh'] cmdline = ['timeout', '%s' % (worker_timeout + 30)] + cmdline - ssh_cmd.extend([str(user_at_host), 'cd ~/performance_workspace/grpc/ && %s' % ' '.join(cmdline)]) + ssh_cmd.extend([str(user_at_host), 'cd ~/performance_workspace/grpc/ && tools/run_tests/start_port_server.py && %s' % ' '.join(cmdline)]) cmdline = ssh_cmd jobspec = jobset.JobSpec( @@ -502,8 +503,8 @@ args = argp.parse_args() languages = set(scenario_config.LANGUAGES[l] for l in itertools.chain.from_iterable( - scenario_config.LANGUAGES.iterkeys() if x == 'all' else [x] - for x in args.language)) + six.iterkeys(scenario_config.LANGUAGES) if x == 'all' + else [x] for x in args.language)) # Put together set of remote hosts where to run and build @@ -572,8 +573,8 @@ for scenario in scenarios: jobs.append(create_quit_jobspec(scenario.workers, remote_host=args.remote_driver_host)) scenario_failures, resultset = jobset.run(jobs, newline_on_success=True, maxjobs=1) total_scenario_failures += scenario_failures - merged_resultset = dict(itertools.chain(merged_resultset.iteritems(), - resultset.iteritems())) + merged_resultset = dict(itertools.chain(six.iteritems(merged_resultset), + six.iteritems(resultset))) finally: # Consider qps workers that need to be killed as failures qps_workers_killed += finish_qps_workers(scenario.workers) diff --git a/tools/run_tests/run_stress_tests.py b/tools/run_tests/run_stress_tests.py index a94a615b88..4eea02118e 100755 --- a/tools/run_tests/run_stress_tests.py +++ b/tools/run_tests/run_stress_tests.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # @@ -43,6 +43,7 @@ import sys import tempfile import time import uuid +import six import python_utils.dockerjob as dockerjob import python_utils.jobset as jobset @@ -239,9 +240,8 @@ servers = set( for s in itertools.chain.from_iterable(_SERVERS if x == 'all' else [x] for x in args.server)) -languages = set(_LANGUAGES[l] - for l in itertools.chain.from_iterable(_LANGUAGES.iterkeys( - ) if x == 'all' else [x] for x in args.language)) +languages = set(_LANGUAGES[l] for l in itertools.chain.from_iterable( + six.iterkeys(_LANGUAGES) if x == 'all' else [x] for x in args.language)) docker_images = {} # languages for which to build docker images @@ -267,7 +267,7 @@ if build_jobs: jobset.message('FAILED', 'Failed to build interop docker images.', do_newline=True) - for image in docker_images.itervalues(): + for image in six.itervalues(docker_images): dockerjob.remove_image(image, skip_nonexistent=True) sys.exit(1) @@ -306,7 +306,7 @@ try: if not jobs: print('No jobs to run.') - for image in docker_images.itervalues(): + for image in six.itervalues(docker_images): dockerjob.remove_image(image, skip_nonexistent=True) sys.exit(1) @@ -324,8 +324,8 @@ finally: if not job.is_running(): print('Server "%s" has exited prematurely.' % server) - dockerjob.finish_jobs([j for j in server_jobs.itervalues()]) + dockerjob.finish_jobs([j for j in six.itervalues(server_jobs)]) - for image in docker_images.itervalues(): + for image in six.itervalues(docker_images): print('Removing docker image %s' % image) dockerjob.remove_image(image) diff --git a/tools/run_tests/run_tests.py b/tools/run_tests/run_tests.py index 9741624c4f..cba91a9a63 100755 --- a/tools/run_tests/run_tests.py +++ b/tools/run_tests/run_tests.py @@ -54,6 +54,7 @@ import traceback import time from six.moves import urllib import uuid +import six import python_utils.jobset as jobset import python_utils.report_utils as report_utils @@ -692,18 +693,11 @@ class RubyLanguage(object): _check_compiler(self.args.compiler, ['default']) def test_specs(self): - #TODO(apolcyn) turn mac ruby tests back on once ruby 2.4 issues done - if platform_string() == 'mac': - print('skipping ruby test_specs on mac until running on 2.4') - return [] return [self.config.job_spec(['tools/run_tests/helper_scripts/run_ruby.sh'], timeout_seconds=10*60, environ=_FORCE_ENVIRON_FOR_WRAPPERS)] def pre_build_steps(self): - if platform_string() == 'mac': - print('skipping ruby pre_build_steps on mac until running on 2.4') - return [] return [['tools/run_tests/helper_scripts/pre_build_ruby.sh']] def make_targets(self): @@ -713,15 +707,9 @@ class RubyLanguage(object): return [] def build_steps(self): - if platform_string() == 'mac': - print('skipping ruby build_steps on mac until running on 2.4') - return [] return [['tools/run_tests/helper_scripts/build_ruby.sh']] def post_tests_steps(self): - if platform_string() == 'mac': - print('skipping ruby post_test_steps on mac until running on 2.4') - return [] return [['tools/run_tests/helper_scripts/post_tests_ruby.sh']] def makefile_name(self): @@ -784,7 +772,7 @@ class CSharpLanguage(object): runtime_cmd = ['mono'] specs = [] - for assembly in tests_by_assembly.iterkeys(): + for assembly in six.iterkeys(tests_by_assembly): assembly_file = 'src/csharp/%s/%s/%s%s' % (assembly, assembly_subdir, assembly, @@ -1311,7 +1299,9 @@ if args.use_docker: if not args.travis: env['TTY_FLAG'] = '-t' # enables Ctrl-C when not on Jenkins. - run_shell_command('tools/run_tests/dockerize/build_docker_and_run_tests.sh', env=env) + subprocess.check_call('tools/run_tests/dockerize/build_docker_and_run_tests.sh', + shell=True, + env=env) sys.exit(0) _check_arch_option(args.arch) @@ -1451,8 +1441,7 @@ def _build_and_run( # start antagonists antagonists = [subprocess.Popen(['tools/run_tests/python_utils/antagonist.py']) for _ in range(0, args.antagonists)] - port_server_port = 32766 - start_port_server.start_port_server(port_server_port) + start_port_server.start_port_server() resultset = None num_test_failures = 0 try: @@ -1479,10 +1468,9 @@ def _build_and_run( sample_size = int(num_jobs * args.sample_percent/100.0) massaged_one_run = random.sample(massaged_one_run, sample_size) if not isclose(args.sample_percent, 100.0): + assert args.runs_per_test == 1, "Can't do sampling (-p) over multiple runs (-n)." print("Running %d tests out of %d (~%d%%)" % (sample_size, num_jobs, args.sample_percent)) - else: - assert args.runs_per_test == 1, "Can't do sampling (-p) over multiple runs (-n)." if infinite_runs: assert len(massaged_one_run) > 0, 'Must have at least one test for a -n inf run' runs_sequence = (itertools.repeat(massaged_one_run) if infinite_runs @@ -1495,7 +1483,6 @@ def _build_and_run( all_runs, check_cancelled, newline_on_success=newline_on_success, travis=args.travis, maxjobs=args.jobs, stop_on_failure=args.stop_on_failure, - add_env={'GRPC_TEST_PORT_SERVER': 'localhost:%d' % port_server_port}, quiet_success=args.quiet_success) if resultset: for k, v in sorted(resultset.items()): diff --git a/tools/run_tests/run_tests_matrix.py b/tools/run_tests/run_tests_matrix.py index bc4fdaba71..b9bf6e5037 100755 --- a/tools/run_tests/run_tests_matrix.py +++ b/tools/run_tests/run_tests_matrix.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # @@ -30,6 +30,8 @@ """Run test matrix.""" +from __future__ import print_function + import argparse import multiprocessing import os @@ -49,6 +51,9 @@ _RUNTESTS_TIMEOUT = 4*60*60 # Number of jobs assigned to each run_tests.py instance _DEFAULT_INNER_JOBS = 2 +# report suffix is important for reports to get picked up by internal CI +_REPORT_SUFFIX = 'sponge_log.xml' + def _docker_jobspec(name, runtests_args=[], inner_jobs=_DEFAULT_INNER_JOBS): """Run a single instance of run_tests.py in a docker container""" @@ -57,7 +62,7 @@ def _docker_jobspec(name, runtests_args=[], inner_jobs=_DEFAULT_INNER_JOBS): '--use_docker', '-t', '-j', str(inner_jobs), - '-x', 'report_%s.xml' % name, + '-x', 'report_%s_%s' % (name, _REPORT_SUFFIX), '--report_suite_name', '%s' % name] + runtests_args, shortname='run_tests_%s' % name, timeout_seconds=_RUNTESTS_TIMEOUT) @@ -70,10 +75,11 @@ def _workspace_jobspec(name, runtests_args=[], workspace_name=None, inner_jobs=_ workspace_name = 'workspace_%s' % name env = {'WORKSPACE_NAME': workspace_name} test_job = jobset.JobSpec( - cmdline=['tools/run_tests/helper_scripts/run_tests_in_workspace.sh', + cmdline=['bash', + 'tools/run_tests/helper_scripts/run_tests_in_workspace.sh', '-t', '-j', str(inner_jobs), - '-x', '../report_%s.xml' % name, + '-x', '../report_%s_%s' % (name, _REPORT_SUFFIX), '--report_suite_name', '%s' % name] + runtests_args, environ=env, shortname='run_tests_%s' % name, @@ -91,19 +97,19 @@ def _generate_jobs(languages, configs, platforms, iomgr_platform = 'native', for config in configs: name = '%s_%s_%s_%s' % (language, platform, config, iomgr_platform) runtests_args = ['-l', language, - '-c', config] + '-c', config, + '--iomgr_platform', iomgr_platform] if arch or compiler: name += '_%s_%s' % (arch, compiler) runtests_args += ['--arch', arch, '--compiler', compiler] - runtests_args += extra_args if platform == 'linux': job = _docker_jobspec(name=name, runtests_args=runtests_args, inner_jobs=inner_jobs) else: job = _workspace_jobspec(name=name, runtests_args=runtests_args, inner_jobs=inner_jobs) - job.labels = [platform, config, language] + labels + job.labels = [platform, config, language, iomgr_platform] + labels result.append(job) return result @@ -411,10 +417,10 @@ if __name__ == "__main__": maxjobs=args.jobs) # Merge skipped tests into results to show skipped tests on report.xml if skipped_jobs: - skipped_results = jobset.run(skipped_jobs, - skip_jobs=True) + ignored_num_skipped_failures, skipped_results = jobset.run( + skipped_jobs, skip_jobs=True) resultset.update(skipped_results) - report_utils.render_junit_xml_report(resultset, 'report.xml', + report_utils.render_junit_xml_report(resultset, 'report_%s' % _REPORT_SUFFIX, suite_name='aggregate_tests') if num_failures == 0: diff --git a/tools/run_tests/sanity/check_sources_and_headers.py b/tools/run_tests/sanity/check_sources_and_headers.py index a86db02b80..f2e0bfeb3d 100755 --- a/tools/run_tests/sanity/check_sources_and_headers.py +++ b/tools/run_tests/sanity/check_sources_and_headers.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015, Google Inc. # All rights reserved. # @@ -28,6 +28,8 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import print_function + import json import os import re diff --git a/tools/run_tests/sanity/check_submodules.sh b/tools/run_tests/sanity/check_submodules.sh index cfe4e2731c..3c5ba16b93 100755 --- a/tools/run_tests/sanity/check_submodules.sh +++ b/tools/run_tests/sanity/check_submodules.sh @@ -46,7 +46,7 @@ cat << EOF | awk '{ print $1 }' | sort > $want_submodules 886e7d75368e3f4fab3f4d0d3584e4abfc557755 third_party/boringssl-with-bazel (version_for_cocoapods_7.0-857-g886e7d7) 30dbc81fb5ffdc98ea9b14b1918bfe4e8779b26e third_party/gflags (v2.2.0) c99458533a9b4c743ed51537e25989ea55944908 third_party/googletest (release-1.7.0) - a428e42072765993ff674fda72863c9f1aa2d268 third_party/protobuf (v3.1.0-alpha-1) + 593e917c176b5bc5aafa57bf9f6030d749d91cd5 third_party/protobuf (v3.1.0-alpha-1-326-g593e917) bcad91771b7f0bff28a1cac1981d7ef2b9bcef3c third_party/thrift (bcad917) 50893291621658f355bc5b4d450a8d06a563053d third_party/zlib (v1.2.8) EOF diff --git a/tools/run_tests/sanity/check_test_filtering.py b/tools/run_tests/sanity/check_test_filtering.py index 290a6e2ddf..ba03f11209 100755 --- a/tools/run_tests/sanity/check_test_filtering.py +++ b/tools/run_tests/sanity/check_test_filtering.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. @@ -62,7 +62,7 @@ class TestFilteringTest(unittest.TestCase): def _get_changed_files(foo): return changed_files filter_pull_request_tests._get_changed_files = _get_changed_files - print + print() filtered_jobs = filter_pull_request_tests.filter_tests(all_jobs, "test") # Make sure sanity tests aren't being filtered out diff --git a/tools/run_tests/sanity/check_version.py b/tools/run_tests/sanity/check_version.py index e62f390818..d247260dbb 100755 --- a/tools/run_tests/sanity/check_version.py +++ b/tools/run_tests/sanity/check_version.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. @@ -29,6 +29,8 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import print_function + import sys import yaml import os @@ -48,13 +50,13 @@ try: 'git rev-parse --abbrev-ref HEAD', shell=True) except: - print 'WARNING: not a git repository' + print('WARNING: not a git repository') branch_name = None if branch_name is not None: m = re.match(r'^release-([0-9]+)_([0-9]+)$', branch_name) if m: - print 'RELEASE branch' + print('RELEASE branch') # version number should align with the branched version check_version = lambda version: ( version.major == int(m.group(1)) and @@ -78,7 +80,7 @@ settings = build_yaml['settings'] top_version = Version(settings['version']) if not check_version(top_version): errors += 1 - print warning % ('version', top_version) + print(warning % ('version', top_version)) for tag, value in settings.iteritems(): if re.match(r'^[a-z]+_version$', tag): @@ -86,12 +88,14 @@ for tag, value in settings.iteritems(): if tag != 'core_version': if value.major != top_version.major: errors += 1 - print 'major version mismatch on %s: %d vs %d' % (tag, value.major, top_version.major) + print('major version mismatch on %s: %d vs %d' % (tag, value.major, + top_version.major)) if value.minor != top_version.minor: errors += 1 - print 'minor version mismatch on %s: %d vs %d' % (tag, value.minor, top_version.minor) + print('minor version mismatch on %s: %d vs %d' % (tag, value.minor, + top_version.minor)) if not check_version(value): errors += 1 - print warning % (tag, value) + print(warning % (tag, value)) sys.exit(errors) diff --git a/tools/run_tests/sanity/core_banned_functions.py b/tools/run_tests/sanity/core_banned_functions.py index afac10bf80..c3c3cbec76 100755 --- a/tools/run_tests/sanity/core_banned_functions.py +++ b/tools/run_tests/sanity/core_banned_functions.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. @@ -29,6 +29,8 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +from __future__ import print_function + import os import sys @@ -42,6 +44,12 @@ BANNED_EXCEPT = { 'grpc_slice_buffer_reset_and_unref(': ['src/core/lib/slice/slice_buffer.c'], 'grpc_slice_ref(': ['src/core/lib/slice/slice.c'], 'grpc_slice_unref(': ['src/core/lib/slice/slice.c'], + 'grpc_error_create(': ['src/core/lib/iomgr/error.c'], + 'grpc_error_ref(': ['src/core/lib/iomgr/error.c'], + 'grpc_error_unref(': ['src/core/lib/iomgr/error.c'], + 'grpc_os_error(': ['src/core/lib/iomgr/error.c'], + 'grpc_wsa_error(': ['src/core/lib/iomgr/error.c'], + 'grpc_log_if_error(': ['src/core/lib/iomgr/error.c'], } errors = 0 @@ -54,7 +62,7 @@ for root, dirs, files in os.walk('src/core'): for banned, exceptions in BANNED_EXCEPT.items(): if path in exceptions: continue if banned in text: - print 'Illegal use of "%s" in %s' % (banned, path) + print('Illegal use of "%s" in %s' % (banned, path)) errors += 1 assert errors == 0 diff --git a/tools/run_tests/start_port_server.py b/tools/run_tests/start_port_server.py new file mode 100755 index 0000000000..bfd72222b6 --- /dev/null +++ b/tools/run_tests/start_port_server.py @@ -0,0 +1,48 @@ +#!/usr/bin/env python + +# Copyright 2017, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +""" +Wrapper around port server starting code. + +Used by developers who wish to run individual C/C++ tests outside of the +run_tests.py infrastructure. + +The path to this file is called out in test/core/util/port.c, and printed as +an error message to users. +""" + +from __future__ import print_function + +import python_utils.start_port_server as start_port_server + +start_port_server.start_port_server() + +print("Port server started successfully") diff --git a/tools/run_tests/stress_test/print_summary.py b/tools/run_tests/stress_test/print_summary.py index cb1a33961e..6f4ada2f4f 100755 --- a/tools/run_tests/stress_test/print_summary.py +++ b/tools/run_tests/stress_test/print_summary.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # diff --git a/tools/run_tests/stress_test/run_on_gke.py b/tools/run_tests/stress_test/run_on_gke.py index e2be76e245..b190ebde7a 100755 --- a/tools/run_tests/stress_test/run_on_gke.py +++ b/tools/run_tests/stress_test/run_on_gke.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2015-2016, Google Inc. # All rights reserved. # @@ -27,6 +27,9 @@ # THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +from __future__ import print_function + import argparse import datetime import json @@ -124,23 +127,24 @@ class DockerImage: return 'gcr.io/%s/%s' % (project_id, image_name) def build_image(self): - print 'Building docker image: %s (tag: %s)' % (self.image_name, - self.tag_name) + print('Building docker image: %s (tag: %s)' % (self.image_name, + self.tag_name)) os.environ['INTEROP_IMAGE'] = self.image_name os.environ['INTEROP_IMAGE_REPOSITORY_TAG'] = self.tag_name os.environ['BASE_NAME'] = self.dockerfile_dir os.environ['BUILD_TYPE'] = self.build_type - print 'DEBUG: path: ', self.build_script_path + print('DEBUG: path: ', self.build_script_path) if subprocess.call(args=[self.build_script_path]) != 0: - print 'Error in building the Docker image' + print('Error in building the Docker image') return False return True def push_to_gke_registry(self): cmd = ['gcloud', 'docker', 'push', self.tag_name] - print 'Pushing %s to the GKE registry..' % self.tag_name + print('Pushing %s to the GKE registry..' % self.tag_name) if subprocess.call(args=cmd) != 0: - print 'Error in pushing the image %s to the GKE registry' % self.tag_name + print('Error in pushing the image %s to the GKE registry' % + self.tag_name) return False return True @@ -199,11 +203,11 @@ class Gke: cmd = ['kubectl', 'proxy', '--port=%d' % port] self.p = subprocess.Popen(args=cmd) time.sleep(2) - print '\nStarted kubernetes proxy on port: %d' % port + print('\nStarted kubernetes proxy on port: %d' % port) def __del__(self): if self.p is not None: - print 'Shutting down Kubernetes proxy..' + print('Shutting down Kubernetes proxy..') self.p.kill() def __init__(self, project_id, run_id, dataset_id, summary_table_id, @@ -253,7 +257,7 @@ class Gke: for pod_name in server_pod_spec.pod_names(): server_env['POD_NAME'] = pod_name - print 'Creating server: %s' % pod_name + print('Creating server: %s' % pod_name) is_success = kubernetes_api.create_pod_and_service( 'localhost', self.kubernetes_port, @@ -267,11 +271,11 @@ class Gke: True # Headless = True for server to that GKE creates a DNS record for pod_name ) if not is_success: - print 'Error in launching server: %s' % pod_name + print('Error in launching server: %s' % pod_name) break if is_success: - print 'Successfully created server(s)' + print('Successfully created server(s)') return is_success @@ -301,7 +305,7 @@ class Gke: for pod_name in client_pod_spec.pod_names(): client_env['POD_NAME'] = pod_name - print 'Creating client: %s' % pod_name + print('Creating client: %s' % pod_name) is_success = kubernetes_api.create_pod_and_service( 'localhost', self.kubernetes_port, @@ -316,18 +320,18 @@ class Gke: ) if not is_success: - print 'Error in launching client %s' % pod_name + print('Error in launching client %s' % pod_name) break if is_success: - print 'Successfully created all client(s)' + print('Successfully created all client(s)') return is_success def _delete_pods(self, pod_name_list): is_success = True for pod_name in pod_name_list: - print 'Deleting %s' % pod_name + print('Deleting %s' % pod_name) is_success = kubernetes_api.delete_pod_and_service( 'localhost', self.kubernetes_port, @@ -335,11 +339,11 @@ class Gke: pod_name) if not is_success: - print 'Error in deleting pod %s' % pod_name + print('Error in deleting pod %s' % pod_name) break if is_success: - print 'Successfully deleted all pods' + print('Successfully deleted all pods') return is_success @@ -353,7 +357,7 @@ class Gke: class Config: def __init__(self, config_filename, gcp_project_id): - print 'Loading configuration...' + print('Loading configuration...') config_dict = self._load_config(config_filename) self.global_settings = self._parse_global_settings(config_dict, @@ -367,7 +371,7 @@ class Config: self.client_pod_specs_dict = self._parse_client_pod_specs( config_dict, self.docker_images_dict, self.client_templates_dict, self.server_pod_specs_dict) - print 'Loaded Configuaration.' + print('Loaded Configuaration.') def _parse_global_settings(self, config_dict, gcp_project_id): global_settings_dict = config_dict['globalSettings'] @@ -540,8 +544,8 @@ def run_tests(config): # run id. This is useful in debugging when looking at records in Biq query) run_id = datetime.datetime.now().strftime('%Y_%m_%d_%H_%M_%S') dataset_id = '%s_%s' % (config.global_settings.dataset_id_prefix, run_id) - print 'Run id:', run_id - print 'Dataset id:', dataset_id + print('Run id:', run_id) + print('Dataset id:', dataset_id) bq_helper = BigQueryHelper(run_id, '', '', config.global_settings.gcp_project_id, dataset_id, @@ -557,7 +561,7 @@ def run_tests(config): is_success = True try: - print 'Launching servers..' + print('Launching servers..') for name, server_pod_spec in config.server_pod_specs_dict.iteritems(): if not gke.launch_servers(server_pod_spec): is_success = False # is_success is checked in the 'finally' block @@ -579,11 +583,12 @@ def run_tests(config): start_time = datetime.datetime.now() end_time = start_time + datetime.timedelta( seconds=config.global_settings.test_duration_secs) - print 'Running the test until %s' % end_time.isoformat() + print('Running the test until %s' % end_time.isoformat()) while True: if datetime.datetime.now() > end_time: - print 'Test was run for %d seconds' % config.global_settings.test_duration_secs + print('Test was run for %d seconds' % + config.global_settings.test_duration_secs) break # Check if either stress server or clients have failed (btw, the bq_helper @@ -591,11 +596,12 @@ def run_tests(config): # have a failure status) if bq_helper.check_if_any_tests_failed(): is_success = False - print 'Some tests failed.' + print('Some tests failed.') break # Don't 'return' here. We still want to call bq_helper to print qps/summary tables # Tests running fine. Wait until next poll time to check the status - print 'Sleeping for %d seconds..' % config.global_settings.test_poll_interval_secs + print('Sleeping for %d seconds..' % + config.global_settings.test_poll_interval_secs) time.sleep(config.global_settings.test_poll_interval_secs) # Print BiqQuery tables diff --git a/tools/run_tests/task_runner.py b/tools/run_tests/task_runner.py index fdc4668222..0ec7efbbee 100755 --- a/tools/run_tests/task_runner.py +++ b/tools/run_tests/task_runner.py @@ -1,4 +1,4 @@ -#!/usr/bin/env python2.7 +#!/usr/bin/env python # Copyright 2016, Google Inc. # All rights reserved. # |