From 8f4b1b032e886a39f91b6950874a54372bc30efd Mon Sep 17 00:00:00 2001 From: Jan Tattermusch Date: Thu, 30 Jul 2015 17:22:10 -0700 Subject: bump C# version to 0.6.1 and core version to 0.10.1 Conflicts: src/csharp/Grpc.Core/Version.cs --- tools/doxygen/Doxyfile.c++ | 2 +- tools/doxygen/Doxyfile.c++.internal | 2 +- tools/doxygen/Doxyfile.core | 2 +- tools/doxygen/Doxyfile.core.internal | 2 +- 4 files changed, 4 insertions(+), 4 deletions(-) (limited to 'tools') diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++ index 785779beb5..4a17fb7365 100644 --- a/tools/doxygen/Doxyfile.c++ +++ b/tools/doxygen/Doxyfile.c++ @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 0.10.0.0 +PROJECT_NUMBER = 0.10.1.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index 5cf6168388..297a21ae22 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC C++" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 0.10.0.0 +PROJECT_NUMBER = 0.10.1.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.core b/tools/doxygen/Doxyfile.core index 1bfd787528..38107b392e 100644 --- a/tools/doxygen/Doxyfile.core +++ b/tools/doxygen/Doxyfile.core @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Core" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 0.10.0.0 +PROJECT_NUMBER = 0.10.1.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a diff --git a/tools/doxygen/Doxyfile.core.internal b/tools/doxygen/Doxyfile.core.internal index 9247beb86b..487b81a93d 100644 --- a/tools/doxygen/Doxyfile.core.internal +++ b/tools/doxygen/Doxyfile.core.internal @@ -40,7 +40,7 @@ PROJECT_NAME = "GRPC Core" # could be handy for archiving the generated documentation or if some version # control system is used. -PROJECT_NUMBER = 0.10.0.0 +PROJECT_NUMBER = 0.10.1.0 # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a -- cgit v1.2.3 From ed96983e22a32abd533733f3cc04f6f3b1e2ef47 Mon Sep 17 00:00:00 2001 From: Masood Malekghassemi Date: Mon, 3 Aug 2015 14:54:44 -0700 Subject: Move Python protoc plugin tests to grpcio_test --- .../grpcio_test/grpc_protoc_plugin/__init__.py | 30 ++ .../grpc_protoc_plugin/python_plugin_test.py | 541 +++++++++++++++++++++ .../grpcio_test/grpc_protoc_plugin/test.proto | 139 ++++++ src/python/grpcio_test/setup.py | 11 +- test/compiler/python_plugin_test.py | 537 -------------------- test/compiler/test.proto | 139 ------ tools/run_tests/run_python.sh | 1 + 7 files changed, 719 insertions(+), 679 deletions(-) create mode 100644 src/python/grpcio_test/grpc_protoc_plugin/__init__.py create mode 100644 src/python/grpcio_test/grpc_protoc_plugin/python_plugin_test.py create mode 100644 src/python/grpcio_test/grpc_protoc_plugin/test.proto delete mode 100644 test/compiler/python_plugin_test.py delete mode 100644 test/compiler/test.proto (limited to 'tools') diff --git a/src/python/grpcio_test/grpc_protoc_plugin/__init__.py b/src/python/grpcio_test/grpc_protoc_plugin/__init__.py new file mode 100644 index 0000000000..7086519106 --- /dev/null +++ b/src/python/grpcio_test/grpc_protoc_plugin/__init__.py @@ -0,0 +1,30 @@ +# Copyright 2015, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + diff --git a/src/python/grpcio_test/grpc_protoc_plugin/python_plugin_test.py b/src/python/grpcio_test/grpc_protoc_plugin/python_plugin_test.py new file mode 100644 index 0000000000..b200d129a9 --- /dev/null +++ b/src/python/grpcio_test/grpc_protoc_plugin/python_plugin_test.py @@ -0,0 +1,541 @@ +# Copyright 2015, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +import argparse +import contextlib +import distutils.spawn +import errno +import itertools +import os +import pkg_resources +import shutil +import subprocess +import sys +import tempfile +import threading +import time +import unittest + +from grpc.framework.alpha import exceptions +from grpc.framework.foundation import future + +# Identifiers of entities we expect to find in the generated module. +SERVICER_IDENTIFIER = 'EarlyAdopterTestServiceServicer' +SERVER_IDENTIFIER = 'EarlyAdopterTestServiceServer' +STUB_IDENTIFIER = 'EarlyAdopterTestServiceStub' +SERVER_FACTORY_IDENTIFIER = 'early_adopter_create_TestService_server' +STUB_FACTORY_IDENTIFIER = 'early_adopter_create_TestService_stub' + +# The timeout used in tests of RPCs that are supposed to expire. +SHORT_TIMEOUT = 2 +# The timeout used in tests of RPCs that are not supposed to expire. The +# absurdly large value doesn't matter since no passing execution of this test +# module will ever wait the duration. +LONG_TIMEOUT = 600 +NO_DELAY = 0 + + +class _ServicerMethods(object): + + def __init__(self, test_pb2, delay): + self._condition = threading.Condition() + self._delay = delay + self._paused = False + self._fail = False + self._test_pb2 = test_pb2 + + @contextlib.contextmanager + def pause(self): # pylint: disable=invalid-name + with self._condition: + self._paused = True + yield + with self._condition: + self._paused = False + self._condition.notify_all() + + @contextlib.contextmanager + def fail(self): # pylint: disable=invalid-name + with self._condition: + self._fail = True + yield + with self._condition: + self._fail = False + + def _control(self): # pylint: disable=invalid-name + with self._condition: + if self._fail: + raise ValueError() + while self._paused: + self._condition.wait() + time.sleep(self._delay) + + def UnaryCall(self, request, unused_rpc_context): + response = self._test_pb2.SimpleResponse() + response.payload.payload_type = self._test_pb2.COMPRESSABLE + response.payload.payload_compressable = 'a' * request.response_size + self._control() + return response + + def StreamingOutputCall(self, request, unused_rpc_context): + for parameter in request.response_parameters: + response = self._test_pb2.StreamingOutputCallResponse() + response.payload.payload_type = self._test_pb2.COMPRESSABLE + response.payload.payload_compressable = 'a' * parameter.size + self._control() + yield response + + def StreamingInputCall(self, request_iter, unused_rpc_context): + response = self._test_pb2.StreamingInputCallResponse() + aggregated_payload_size = 0 + for request in request_iter: + aggregated_payload_size += len(request.payload.payload_compressable) + response.aggregated_payload_size = aggregated_payload_size + self._control() + return response + + def FullDuplexCall(self, request_iter, unused_rpc_context): + for request in request_iter: + for parameter in request.response_parameters: + response = self._test_pb2.StreamingOutputCallResponse() + response.payload.payload_type = self._test_pb2.COMPRESSABLE + response.payload.payload_compressable = 'a' * parameter.size + self._control() + yield response + + def HalfDuplexCall(self, request_iter, unused_rpc_context): + responses = [] + for request in request_iter: + for parameter in request.response_parameters: + response = self._test_pb2.StreamingOutputCallResponse() + response.payload.payload_type = self._test_pb2.COMPRESSABLE + response.payload.payload_compressable = 'a' * parameter.size + self._control() + responses.append(response) + for response in responses: + yield response + + +@contextlib.contextmanager +def _CreateService(test_pb2, delay): + """Provides a servicer backend and a stub. + + The servicer is just the implementation + of the actual servicer passed to the face player of the python RPC + implementation; the two are detached. + + Non-zero delay puts a delay on each call to the servicer, representative of + communication latency. Timeout is the default timeout for the stub while + waiting for the service. + + Args: + test_pb2: The test_pb2 module generated by this test. + delay: Delay in seconds per response from the servicer. + + Yields: + A (servicer_methods, servicer, stub) three-tuple where servicer_methods is + the back-end of the service bound to the stub and the server and stub + are both activated and ready for use. + """ + servicer_methods = _ServicerMethods(test_pb2, delay) + + class Servicer(getattr(test_pb2, SERVICER_IDENTIFIER)): + + def UnaryCall(self, request, context): + return servicer_methods.UnaryCall(request, context) + + def StreamingOutputCall(self, request, context): + return servicer_methods.StreamingOutputCall(request, context) + + def StreamingInputCall(self, request_iter, context): + return servicer_methods.StreamingInputCall(request_iter, context) + + def FullDuplexCall(self, request_iter, context): + return servicer_methods.FullDuplexCall(request_iter, context) + + def HalfDuplexCall(self, request_iter, context): + return servicer_methods.HalfDuplexCall(request_iter, context) + + servicer = Servicer() + server = getattr( + test_pb2, SERVER_FACTORY_IDENTIFIER)(servicer, 0) + with server: + port = server.port() + stub = getattr(test_pb2, STUB_FACTORY_IDENTIFIER)('localhost', port) + with stub: + yield servicer_methods, stub, server + + +def _streaming_input_request_iterator(test_pb2): + for _ in range(3): + request = test_pb2.StreamingInputCallRequest() + request.payload.payload_type = test_pb2.COMPRESSABLE + request.payload.payload_compressable = 'a' + yield request + + +def _streaming_output_request(test_pb2): + request = test_pb2.StreamingOutputCallRequest() + sizes = [1, 2, 3] + request.response_parameters.add(size=sizes[0], interval_us=0) + request.response_parameters.add(size=sizes[1], interval_us=0) + request.response_parameters.add(size=sizes[2], interval_us=0) + return request + + +def _full_duplex_request_iterator(test_pb2): + request = test_pb2.StreamingOutputCallRequest() + request.response_parameters.add(size=1, interval_us=0) + yield request + request = test_pb2.StreamingOutputCallRequest() + request.response_parameters.add(size=2, interval_us=0) + request.response_parameters.add(size=3, interval_us=0) + yield request + + +class PythonPluginTest(unittest.TestCase): + """Test case for the gRPC Python protoc-plugin. + + While reading these tests, remember that the futures API + (`stub.method.async()`) only gives futures for the *non-streaming* responses, + else it behaves like its blocking cousin. + """ + + def setUp(self): + # Assume that the appropriate protoc and grpc_python_plugins are on the + # path. + protoc_command = 'protoc' + protoc_plugin_filename = distutils.spawn.find_executable( + 'grpc_python_plugin') + test_proto_filename = pkg_resources.resource_filename( + 'grpc_protoc_plugin', 'test.proto') + if not os.path.isfile(protoc_command): + # Assume that if we haven't built protoc that it's on the system. + protoc_command = 'protoc' + + # Ensure that the output directory exists. + self.outdir = tempfile.mkdtemp() + + # Invoke protoc with the plugin. + cmd = [ + protoc_command, + '--plugin=protoc-gen-python-grpc=%s' % protoc_plugin_filename, + '-I .', + '--python_out=%s' % self.outdir, + '--python-grpc_out=%s' % self.outdir, + os.path.basename(test_proto_filename), + ] + subprocess.check_call(' '.join(cmd), shell=True, env=os.environ, + cwd=os.path.dirname(test_proto_filename)) + sys.path.append(self.outdir) + + def tearDown(self): + try: + shutil.rmtree(self.outdir) + except OSError as exc: + if exc.errno != errno.ENOENT: + raise + + # TODO(atash): Figure out which of these tests is hanging flakily with small + # probability. + + def testImportAttributes(self): + # check that we can access the generated module and its members. + import test_pb2 # pylint: disable=g-import-not-at-top + self.assertIsNotNone(getattr(test_pb2, SERVICER_IDENTIFIER, None)) + self.assertIsNotNone(getattr(test_pb2, SERVER_IDENTIFIER, None)) + self.assertIsNotNone(getattr(test_pb2, STUB_IDENTIFIER, None)) + self.assertIsNotNone(getattr(test_pb2, SERVER_FACTORY_IDENTIFIER, None)) + self.assertIsNotNone(getattr(test_pb2, STUB_FACTORY_IDENTIFIER, None)) + + def testUpDown(self): + import test_pb2 + with _CreateService( + test_pb2, NO_DELAY) as (servicer, stub, unused_server): + request = test_pb2.SimpleRequest(response_size=13) + + def testUnaryCall(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): + timeout = 6 # TODO(issue 2039): LONG_TIMEOUT like the other methods. + request = test_pb2.SimpleRequest(response_size=13) + response = stub.UnaryCall(request, timeout) + expected_response = methods.UnaryCall(request, 'not a real RpcContext!') + self.assertEqual(expected_response, response) + + def testUnaryCallAsync(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = test_pb2.SimpleRequest(response_size=13) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + # Check that the call does not block waiting for the server to respond. + with methods.pause(): + response_future = stub.UnaryCall.async(request, LONG_TIMEOUT) + response = response_future.result() + expected_response = methods.UnaryCall(request, 'not a real RpcContext!') + self.assertEqual(expected_response, response) + + def testUnaryCallAsyncExpired(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + request = test_pb2.SimpleRequest(response_size=13) + with methods.pause(): + response_future = stub.UnaryCall.async(request, SHORT_TIMEOUT) + with self.assertRaises(exceptions.ExpirationError): + response_future.result() + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testUnaryCallAsyncCancelled(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = test_pb2.SimpleRequest(response_size=13) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.pause(): + response_future = stub.UnaryCall.async(request, 1) + response_future.cancel() + self.assertTrue(response_future.cancelled()) + + def testUnaryCallAsyncFailed(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = test_pb2.SimpleRequest(response_size=13) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.fail(): + response_future = stub.UnaryCall.async(request, LONG_TIMEOUT) + self.assertIsNotNone(response_future.exception()) + + def testStreamingOutputCall(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = _streaming_output_request(test_pb2) + with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): + responses = stub.StreamingOutputCall(request, LONG_TIMEOUT) + expected_responses = methods.StreamingOutputCall( + request, 'not a real RpcContext!') + for expected_response, response in itertools.izip_longest( + expected_responses, responses): + self.assertEqual(expected_response, response) + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testStreamingOutputCallExpired(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = _streaming_output_request(test_pb2) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.pause(): + responses = stub.StreamingOutputCall(request, SHORT_TIMEOUT) + with self.assertRaises(exceptions.ExpirationError): + list(responses) + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testStreamingOutputCallCancelled(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = _streaming_output_request(test_pb2) + with _CreateService(test_pb2, NO_DELAY) as ( + unused_methods, stub, unused_server): + responses = stub.StreamingOutputCall(request, SHORT_TIMEOUT) + next(responses) + responses.cancel() + with self.assertRaises(future.CancelledError): + next(responses) + + @unittest.skip('TODO(atash,nathaniel): figure out why this times out ' + 'instead of raising the proper error.') + def testStreamingOutputCallFailed(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request = _streaming_output_request(test_pb2) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.fail(): + responses = stub.StreamingOutputCall(request, 1) + self.assertIsNotNone(responses) + with self.assertRaises(exceptions.ServicerError): + next(responses) + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testStreamingInputCall(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): + response = stub.StreamingInputCall( + _streaming_input_request_iterator(test_pb2), LONG_TIMEOUT) + expected_response = methods.StreamingInputCall( + _streaming_input_request_iterator(test_pb2), 'not a real RpcContext!') + self.assertEqual(expected_response, response) + + def testStreamingInputCallAsync(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.pause(): + response_future = stub.StreamingInputCall.async( + _streaming_input_request_iterator(test_pb2), LONG_TIMEOUT) + response = response_future.result() + expected_response = methods.StreamingInputCall( + _streaming_input_request_iterator(test_pb2), 'not a real RpcContext!') + self.assertEqual(expected_response, response) + + def testStreamingInputCallAsyncExpired(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.pause(): + response_future = stub.StreamingInputCall.async( + _streaming_input_request_iterator(test_pb2), SHORT_TIMEOUT) + with self.assertRaises(exceptions.ExpirationError): + response_future.result() + self.assertIsInstance( + response_future.exception(), exceptions.ExpirationError) + + def testStreamingInputCallAsyncCancelled(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.pause(): + timeout = 6 # TODO(issue 2039): LONG_TIMEOUT like the other methods. + response_future = stub.StreamingInputCall.async( + _streaming_input_request_iterator(test_pb2), timeout) + response_future.cancel() + self.assertTrue(response_future.cancelled()) + with self.assertRaises(future.CancelledError): + response_future.result() + + def testStreamingInputCallAsyncFailed(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.fail(): + response_future = stub.StreamingInputCall.async( + _streaming_input_request_iterator(test_pb2), SHORT_TIMEOUT) + self.assertIsNotNone(response_future.exception()) + + def testFullDuplexCall(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): + responses = stub.FullDuplexCall( + _full_duplex_request_iterator(test_pb2), LONG_TIMEOUT) + expected_responses = methods.FullDuplexCall( + _full_duplex_request_iterator(test_pb2), 'not a real RpcContext!') + for expected_response, response in itertools.izip_longest( + expected_responses, responses): + self.assertEqual(expected_response, response) + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testFullDuplexCallExpired(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request_iterator = _full_duplex_request_iterator(test_pb2) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.pause(): + responses = stub.FullDuplexCall(request_iterator, SHORT_TIMEOUT) + with self.assertRaises(exceptions.ExpirationError): + list(responses) + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testFullDuplexCallCancelled(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): + request_iterator = _full_duplex_request_iterator(test_pb2) + responses = stub.FullDuplexCall(request_iterator, LONG_TIMEOUT) + next(responses) + responses.cancel() + with self.assertRaises(future.CancelledError): + next(responses) + + @unittest.skip('TODO(atash,nathaniel): figure out why this hangs forever ' + 'and fix.') + def testFullDuplexCallFailed(self): + import test_pb2 # pylint: disable=g-import-not-at-top + request_iterator = _full_duplex_request_iterator(test_pb2) + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + with methods.fail(): + responses = stub.FullDuplexCall(request_iterator, LONG_TIMEOUT) + self.assertIsNotNone(responses) + with self.assertRaises(exceptions.ServicerError): + next(responses) + + @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' + 'forever and fix.') + def testHalfDuplexCall(self): + import test_pb2 # pylint: disable=g-import-not-at-top + with _CreateService(test_pb2, NO_DELAY) as ( + methods, stub, unused_server): + def half_duplex_request_iterator(): + request = test_pb2.StreamingOutputCallRequest() + request.response_parameters.add(size=1, interval_us=0) + yield request + request = test_pb2.StreamingOutputCallRequest() + request.response_parameters.add(size=2, interval_us=0) + request.response_parameters.add(size=3, interval_us=0) + yield request + responses = stub.HalfDuplexCall( + half_duplex_request_iterator(), LONG_TIMEOUT) + expected_responses = methods.HalfDuplexCall( + half_duplex_request_iterator(), 'not a real RpcContext!') + for check in itertools.izip_longest(expected_responses, responses): + expected_response, response = check + self.assertEqual(expected_response, response) + + def testHalfDuplexCallWedged(self): + import test_pb2 # pylint: disable=g-import-not-at-top + condition = threading.Condition() + wait_cell = [False] + @contextlib.contextmanager + def wait(): # pylint: disable=invalid-name + # Where's Python 3's 'nonlocal' statement when you need it? + with condition: + wait_cell[0] = True + yield + with condition: + wait_cell[0] = False + condition.notify_all() + def half_duplex_request_iterator(): + request = test_pb2.StreamingOutputCallRequest() + request.response_parameters.add(size=1, interval_us=0) + yield request + with condition: + while wait_cell[0]: + condition.wait() + with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): + with wait(): + responses = stub.HalfDuplexCall( + half_duplex_request_iterator(), SHORT_TIMEOUT) + # half-duplex waits for the client to send all info + with self.assertRaises(exceptions.ExpirationError): + next(responses) + + +if __name__ == '__main__': + os.chdir(os.path.dirname(sys.argv[0])) + unittest.main(verbosity=2) diff --git a/src/python/grpcio_test/grpc_protoc_plugin/test.proto b/src/python/grpcio_test/grpc_protoc_plugin/test.proto new file mode 100644 index 0000000000..ed7c6a7b79 --- /dev/null +++ b/src/python/grpcio_test/grpc_protoc_plugin/test.proto @@ -0,0 +1,139 @@ +// Copyright 2015, Google Inc. +// All rights reserved. +// +// Redistribution and use in source and binary forms, with or without +// modification, are permitted provided that the following conditions are +// met: +// +// * Redistributions of source code must retain the above copyright +// notice, this list of conditions and the following disclaimer. +// * Redistributions in binary form must reproduce the above +// copyright notice, this list of conditions and the following disclaimer +// in the documentation and/or other materials provided with the +// distribution. +// * Neither the name of Google Inc. nor the names of its +// contributors may be used to endorse or promote products derived from +// this software without specific prior written permission. +// +// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +// An integration test service that covers all the method signature permutations +// of unary/streaming requests/responses. +// This file is duplicated around the code base. See GitHub issue #526. +syntax = "proto2"; + +package grpc.testing; + +enum PayloadType { + // Compressable text format. + COMPRESSABLE= 1; + + // Uncompressable binary format. + UNCOMPRESSABLE = 2; + + // Randomly chosen from all other formats defined in this enum. + RANDOM = 3; +} + +message Payload { + required PayloadType payload_type = 1; + oneof payload_body { + string payload_compressable = 2; + bytes payload_uncompressable = 3; + } +} + +message SimpleRequest { + // Desired payload type in the response from the server. + // If response_type is RANDOM, server randomly chooses one from other formats. + optional PayloadType response_type = 1 [default=COMPRESSABLE]; + + // Desired payload size in the response from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + optional int32 response_size = 2; + + // Optional input payload sent along with the request. + optional Payload payload = 3; +} + +message SimpleResponse { + optional Payload payload = 1; +} + +message StreamingInputCallRequest { + // Optional input payload sent along with the request. + optional Payload payload = 1; + + // Not expecting any payload from the response. +} + +message StreamingInputCallResponse { + // Aggregated size of payloads received from the client. + optional int32 aggregated_payload_size = 1; +} + +message ResponseParameters { + // Desired payload sizes in responses from the server. + // If response_type is COMPRESSABLE, this denotes the size before compression. + required int32 size = 1; + + // Desired interval between consecutive responses in the response stream in + // microseconds. + required int32 interval_us = 2; +} + +message StreamingOutputCallRequest { + // Desired payload type in the response from the server. + // If response_type is RANDOM, the payload from each response in the stream + // might be of different types. This is to simulate a mixed type of payload + // stream. + optional PayloadType response_type = 1 [default=COMPRESSABLE]; + + repeated ResponseParameters response_parameters = 2; + + // Optional input payload sent along with the request. + optional Payload payload = 3; +} + +message StreamingOutputCallResponse { + optional Payload payload = 1; +} + +service TestService { + // One request followed by one response. + // The server returns the client payload as-is. + rpc UnaryCall(SimpleRequest) returns (SimpleResponse); + + // One request followed by a sequence of responses (streamed download). + // The server returns the payload with client desired type and sizes. + rpc StreamingOutputCall(StreamingOutputCallRequest) + returns (stream StreamingOutputCallResponse); + + // A sequence of requests followed by one response (streamed upload). + // The server returns the aggregated size of client payload as the result. + rpc StreamingInputCall(stream StreamingInputCallRequest) + returns (StreamingInputCallResponse); + + // A sequence of requests with each request served by the server immediately. + // As one request could lead to multiple responses, this interface + // demonstrates the idea of full duplexing. + rpc FullDuplexCall(stream StreamingOutputCallRequest) + returns (stream StreamingOutputCallResponse); + + // A sequence of requests followed by a sequence of responses. + // The server buffers all the client requests and then serves them in order. A + // stream of responses are returned to the client when the server starts with + // first request. + rpc HalfDuplexCall(stream StreamingOutputCallRequest) + returns (stream StreamingOutputCallResponse); +} diff --git a/src/python/grpcio_test/setup.py b/src/python/grpcio_test/setup.py index 925c32720f..a6203cae2d 100644 --- a/src/python/grpcio_test/setup.py +++ b/src/python/grpcio_test/setup.py @@ -48,8 +48,13 @@ _PACKAGE_DIRECTORIES = { _PACKAGE_DATA = { 'grpc_interop': [ - 'credentials/ca.pem', 'credentials/server1.key', - 'credentials/server1.pem',] + 'credentials/ca.pem', + 'credentials/server1.key', + 'credentials/server1.pem', + ], + 'grpc_protoc_plugin': [ + 'test.proto', + ], } _SETUP_REQUIRES = ( @@ -75,5 +80,5 @@ setuptools.setup( package_data=_PACKAGE_DATA, install_requires=_INSTALL_REQUIRES + _SETUP_REQUIRES, setup_requires=_SETUP_REQUIRES, - cmdclass=_COMMAND_CLASS + cmdclass=_COMMAND_CLASS, ) diff --git a/test/compiler/python_plugin_test.py b/test/compiler/python_plugin_test.py deleted file mode 100644 index 0e58d912b9..0000000000 --- a/test/compiler/python_plugin_test.py +++ /dev/null @@ -1,537 +0,0 @@ -# Copyright 2015, Google Inc. -# All rights reserved. -# -# Redistribution and use in source and binary forms, with or without -# modification, are permitted provided that the following conditions are -# met: -# -# * Redistributions of source code must retain the above copyright -# notice, this list of conditions and the following disclaimer. -# * Redistributions in binary form must reproduce the above -# copyright notice, this list of conditions and the following disclaimer -# in the documentation and/or other materials provided with the -# distribution. -# * Neither the name of Google Inc. nor the names of its -# contributors may be used to endorse or promote products derived from -# this software without specific prior written permission. -# -# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -import argparse -import contextlib -import errno -import itertools -import os -import shutil -import subprocess -import sys -import tempfile -import threading -import time -import unittest - -from grpc.framework.alpha import exceptions -from grpc.framework.foundation import future - -# Identifiers of entities we expect to find in the generated module. -SERVICER_IDENTIFIER = 'EarlyAdopterTestServiceServicer' -SERVER_IDENTIFIER = 'EarlyAdopterTestServiceServer' -STUB_IDENTIFIER = 'EarlyAdopterTestServiceStub' -SERVER_FACTORY_IDENTIFIER = 'early_adopter_create_TestService_server' -STUB_FACTORY_IDENTIFIER = 'early_adopter_create_TestService_stub' - -# The timeout used in tests of RPCs that are supposed to expire. -SHORT_TIMEOUT = 2 -# The timeout used in tests of RPCs that are not supposed to expire. The -# absurdly large value doesn't matter since no passing execution of this test -# module will ever wait the duration. -LONG_TIMEOUT = 600 -NO_DELAY = 0 - -# Build mode environment variable set by tools/run_tests/run_tests.py. -_build_mode = os.environ['CONFIG'] - - -class _ServicerMethods(object): - - def __init__(self, test_pb2, delay): - self._condition = threading.Condition() - self._delay = delay - self._paused = False - self._fail = False - self._test_pb2 = test_pb2 - - @contextlib.contextmanager - def pause(self): # pylint: disable=invalid-name - with self._condition: - self._paused = True - yield - with self._condition: - self._paused = False - self._condition.notify_all() - - @contextlib.contextmanager - def fail(self): # pylint: disable=invalid-name - with self._condition: - self._fail = True - yield - with self._condition: - self._fail = False - - def _control(self): # pylint: disable=invalid-name - with self._condition: - if self._fail: - raise ValueError() - while self._paused: - self._condition.wait() - time.sleep(self._delay) - - def UnaryCall(self, request, unused_rpc_context): - response = self._test_pb2.SimpleResponse() - response.payload.payload_type = self._test_pb2.COMPRESSABLE - response.payload.payload_compressable = 'a' * request.response_size - self._control() - return response - - def StreamingOutputCall(self, request, unused_rpc_context): - for parameter in request.response_parameters: - response = self._test_pb2.StreamingOutputCallResponse() - response.payload.payload_type = self._test_pb2.COMPRESSABLE - response.payload.payload_compressable = 'a' * parameter.size - self._control() - yield response - - def StreamingInputCall(self, request_iter, unused_rpc_context): - response = self._test_pb2.StreamingInputCallResponse() - aggregated_payload_size = 0 - for request in request_iter: - aggregated_payload_size += len(request.payload.payload_compressable) - response.aggregated_payload_size = aggregated_payload_size - self._control() - return response - - def FullDuplexCall(self, request_iter, unused_rpc_context): - for request in request_iter: - for parameter in request.response_parameters: - response = self._test_pb2.StreamingOutputCallResponse() - response.payload.payload_type = self._test_pb2.COMPRESSABLE - response.payload.payload_compressable = 'a' * parameter.size - self._control() - yield response - - def HalfDuplexCall(self, request_iter, unused_rpc_context): - responses = [] - for request in request_iter: - for parameter in request.response_parameters: - response = self._test_pb2.StreamingOutputCallResponse() - response.payload.payload_type = self._test_pb2.COMPRESSABLE - response.payload.payload_compressable = 'a' * parameter.size - self._control() - responses.append(response) - for response in responses: - yield response - - -@contextlib.contextmanager -def _CreateService(test_pb2, delay): - """Provides a servicer backend and a stub. - - The servicer is just the implementation - of the actual servicer passed to the face player of the python RPC - implementation; the two are detached. - - Non-zero delay puts a delay on each call to the servicer, representative of - communication latency. Timeout is the default timeout for the stub while - waiting for the service. - - Args: - test_pb2: The test_pb2 module generated by this test. - delay: Delay in seconds per response from the servicer. - - Yields: - A (servicer_methods, servicer, stub) three-tuple where servicer_methods is - the back-end of the service bound to the stub and the server and stub - are both activated and ready for use. - """ - servicer_methods = _ServicerMethods(test_pb2, delay) - - class Servicer(getattr(test_pb2, SERVICER_IDENTIFIER)): - - def UnaryCall(self, request, context): - return servicer_methods.UnaryCall(request, context) - - def StreamingOutputCall(self, request, context): - return servicer_methods.StreamingOutputCall(request, context) - - def StreamingInputCall(self, request_iter, context): - return servicer_methods.StreamingInputCall(request_iter, context) - - def FullDuplexCall(self, request_iter, context): - return servicer_methods.FullDuplexCall(request_iter, context) - - def HalfDuplexCall(self, request_iter, context): - return servicer_methods.HalfDuplexCall(request_iter, context) - - servicer = Servicer() - server = getattr( - test_pb2, SERVER_FACTORY_IDENTIFIER)(servicer, 0) - with server: - port = server.port() - stub = getattr(test_pb2, STUB_FACTORY_IDENTIFIER)('localhost', port) - with stub: - yield servicer_methods, stub, server - - -def _streaming_input_request_iterator(test_pb2): - for _ in range(3): - request = test_pb2.StreamingInputCallRequest() - request.payload.payload_type = test_pb2.COMPRESSABLE - request.payload.payload_compressable = 'a' - yield request - - -def _streaming_output_request(test_pb2): - request = test_pb2.StreamingOutputCallRequest() - sizes = [1, 2, 3] - request.response_parameters.add(size=sizes[0], interval_us=0) - request.response_parameters.add(size=sizes[1], interval_us=0) - request.response_parameters.add(size=sizes[2], interval_us=0) - return request - - -def _full_duplex_request_iterator(test_pb2): - request = test_pb2.StreamingOutputCallRequest() - request.response_parameters.add(size=1, interval_us=0) - yield request - request = test_pb2.StreamingOutputCallRequest() - request.response_parameters.add(size=2, interval_us=0) - request.response_parameters.add(size=3, interval_us=0) - yield request - - -class PythonPluginTest(unittest.TestCase): - """Test case for the gRPC Python protoc-plugin. - - While reading these tests, remember that the futures API - (`stub.method.async()`) only gives futures for the *non-streaming* responses, - else it behaves like its blocking cousin. - """ - - def setUp(self): - protoc_command = '../../bins/%s/protobuf/protoc' % _build_mode - protoc_plugin_filename = '../../bins/%s/grpc_python_plugin' % _build_mode - test_proto_filename = './test.proto' - if not os.path.isfile(protoc_command): - # Assume that if we haven't built protoc that it's on the system. - protoc_command = 'protoc' - - # Ensure that the output directory exists. - self.outdir = tempfile.mkdtemp() - - # Invoke protoc with the plugin. - cmd = [ - protoc_command, - '--plugin=protoc-gen-python-grpc=%s' % protoc_plugin_filename, - '-I %s' % os.path.dirname(test_proto_filename), - '--python_out=%s' % self.outdir, - '--python-grpc_out=%s' % self.outdir, - os.path.basename(test_proto_filename), - ] - subprocess.call(' '.join(cmd), shell=True) - sys.path.append(self.outdir) - - def tearDown(self): - try: - shutil.rmtree(self.outdir) - except OSError as exc: - if exc.errno != errno.ENOENT: - raise - - # TODO(atash): Figure out which of these tests is hanging flakily with small - # probability. - - def testImportAttributes(self): - # check that we can access the generated module and its members. - import test_pb2 # pylint: disable=g-import-not-at-top - self.assertIsNotNone(getattr(test_pb2, SERVICER_IDENTIFIER, None)) - self.assertIsNotNone(getattr(test_pb2, SERVER_IDENTIFIER, None)) - self.assertIsNotNone(getattr(test_pb2, STUB_IDENTIFIER, None)) - self.assertIsNotNone(getattr(test_pb2, SERVER_FACTORY_IDENTIFIER, None)) - self.assertIsNotNone(getattr(test_pb2, STUB_FACTORY_IDENTIFIER, None)) - - def testUpDown(self): - import test_pb2 - with _CreateService( - test_pb2, NO_DELAY) as (servicer, stub, unused_server): - request = test_pb2.SimpleRequest(response_size=13) - - def testUnaryCall(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): - timeout = 6 # TODO(issue 2039): LONG_TIMEOUT like the other methods. - request = test_pb2.SimpleRequest(response_size=13) - response = stub.UnaryCall(request, timeout) - expected_response = methods.UnaryCall(request, 'not a real RpcContext!') - self.assertEqual(expected_response, response) - - def testUnaryCallAsync(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = test_pb2.SimpleRequest(response_size=13) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - # Check that the call does not block waiting for the server to respond. - with methods.pause(): - response_future = stub.UnaryCall.async(request, LONG_TIMEOUT) - response = response_future.result() - expected_response = methods.UnaryCall(request, 'not a real RpcContext!') - self.assertEqual(expected_response, response) - - def testUnaryCallAsyncExpired(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - request = test_pb2.SimpleRequest(response_size=13) - with methods.pause(): - response_future = stub.UnaryCall.async(request, SHORT_TIMEOUT) - with self.assertRaises(exceptions.ExpirationError): - response_future.result() - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testUnaryCallAsyncCancelled(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = test_pb2.SimpleRequest(response_size=13) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.pause(): - response_future = stub.UnaryCall.async(request, 1) - response_future.cancel() - self.assertTrue(response_future.cancelled()) - - def testUnaryCallAsyncFailed(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = test_pb2.SimpleRequest(response_size=13) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.fail(): - response_future = stub.UnaryCall.async(request, LONG_TIMEOUT) - self.assertIsNotNone(response_future.exception()) - - def testStreamingOutputCall(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = _streaming_output_request(test_pb2) - with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): - responses = stub.StreamingOutputCall(request, LONG_TIMEOUT) - expected_responses = methods.StreamingOutputCall( - request, 'not a real RpcContext!') - for expected_response, response in itertools.izip_longest( - expected_responses, responses): - self.assertEqual(expected_response, response) - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testStreamingOutputCallExpired(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = _streaming_output_request(test_pb2) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.pause(): - responses = stub.StreamingOutputCall(request, SHORT_TIMEOUT) - with self.assertRaises(exceptions.ExpirationError): - list(responses) - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testStreamingOutputCallCancelled(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = _streaming_output_request(test_pb2) - with _CreateService(test_pb2, NO_DELAY) as ( - unused_methods, stub, unused_server): - responses = stub.StreamingOutputCall(request, SHORT_TIMEOUT) - next(responses) - responses.cancel() - with self.assertRaises(future.CancelledError): - next(responses) - - @unittest.skip('TODO(atash,nathaniel): figure out why this times out ' - 'instead of raising the proper error.') - def testStreamingOutputCallFailed(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request = _streaming_output_request(test_pb2) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.fail(): - responses = stub.StreamingOutputCall(request, 1) - self.assertIsNotNone(responses) - with self.assertRaises(exceptions.ServicerError): - next(responses) - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testStreamingInputCall(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): - response = stub.StreamingInputCall( - _streaming_input_request_iterator(test_pb2), LONG_TIMEOUT) - expected_response = methods.StreamingInputCall( - _streaming_input_request_iterator(test_pb2), 'not a real RpcContext!') - self.assertEqual(expected_response, response) - - def testStreamingInputCallAsync(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.pause(): - response_future = stub.StreamingInputCall.async( - _streaming_input_request_iterator(test_pb2), LONG_TIMEOUT) - response = response_future.result() - expected_response = methods.StreamingInputCall( - _streaming_input_request_iterator(test_pb2), 'not a real RpcContext!') - self.assertEqual(expected_response, response) - - def testStreamingInputCallAsyncExpired(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.pause(): - response_future = stub.StreamingInputCall.async( - _streaming_input_request_iterator(test_pb2), SHORT_TIMEOUT) - with self.assertRaises(exceptions.ExpirationError): - response_future.result() - self.assertIsInstance( - response_future.exception(), exceptions.ExpirationError) - - def testStreamingInputCallAsyncCancelled(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.pause(): - timeout = 6 # TODO(issue 2039): LONG_TIMEOUT like the other methods. - response_future = stub.StreamingInputCall.async( - _streaming_input_request_iterator(test_pb2), timeout) - response_future.cancel() - self.assertTrue(response_future.cancelled()) - with self.assertRaises(future.CancelledError): - response_future.result() - - def testStreamingInputCallAsyncFailed(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.fail(): - response_future = stub.StreamingInputCall.async( - _streaming_input_request_iterator(test_pb2), SHORT_TIMEOUT) - self.assertIsNotNone(response_future.exception()) - - def testFullDuplexCall(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): - responses = stub.FullDuplexCall( - _full_duplex_request_iterator(test_pb2), LONG_TIMEOUT) - expected_responses = methods.FullDuplexCall( - _full_duplex_request_iterator(test_pb2), 'not a real RpcContext!') - for expected_response, response in itertools.izip_longest( - expected_responses, responses): - self.assertEqual(expected_response, response) - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testFullDuplexCallExpired(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request_iterator = _full_duplex_request_iterator(test_pb2) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.pause(): - responses = stub.FullDuplexCall(request_iterator, SHORT_TIMEOUT) - with self.assertRaises(exceptions.ExpirationError): - list(responses) - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testFullDuplexCallCancelled(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): - request_iterator = _full_duplex_request_iterator(test_pb2) - responses = stub.FullDuplexCall(request_iterator, LONG_TIMEOUT) - next(responses) - responses.cancel() - with self.assertRaises(future.CancelledError): - next(responses) - - @unittest.skip('TODO(atash,nathaniel): figure out why this hangs forever ' - 'and fix.') - def testFullDuplexCallFailed(self): - import test_pb2 # pylint: disable=g-import-not-at-top - request_iterator = _full_duplex_request_iterator(test_pb2) - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - with methods.fail(): - responses = stub.FullDuplexCall(request_iterator, LONG_TIMEOUT) - self.assertIsNotNone(responses) - with self.assertRaises(exceptions.ServicerError): - next(responses) - - @unittest.skip('TODO(atash,nathaniel): figure out why this flakily hangs ' - 'forever and fix.') - def testHalfDuplexCall(self): - import test_pb2 # pylint: disable=g-import-not-at-top - with _CreateService(test_pb2, NO_DELAY) as ( - methods, stub, unused_server): - def half_duplex_request_iterator(): - request = test_pb2.StreamingOutputCallRequest() - request.response_parameters.add(size=1, interval_us=0) - yield request - request = test_pb2.StreamingOutputCallRequest() - request.response_parameters.add(size=2, interval_us=0) - request.response_parameters.add(size=3, interval_us=0) - yield request - responses = stub.HalfDuplexCall( - half_duplex_request_iterator(), LONG_TIMEOUT) - expected_responses = methods.HalfDuplexCall( - half_duplex_request_iterator(), 'not a real RpcContext!') - for check in itertools.izip_longest(expected_responses, responses): - expected_response, response = check - self.assertEqual(expected_response, response) - - def testHalfDuplexCallWedged(self): - import test_pb2 # pylint: disable=g-import-not-at-top - condition = threading.Condition() - wait_cell = [False] - @contextlib.contextmanager - def wait(): # pylint: disable=invalid-name - # Where's Python 3's 'nonlocal' statement when you need it? - with condition: - wait_cell[0] = True - yield - with condition: - wait_cell[0] = False - condition.notify_all() - def half_duplex_request_iterator(): - request = test_pb2.StreamingOutputCallRequest() - request.response_parameters.add(size=1, interval_us=0) - yield request - with condition: - while wait_cell[0]: - condition.wait() - with _CreateService(test_pb2, NO_DELAY) as (methods, stub, unused_server): - with wait(): - responses = stub.HalfDuplexCall( - half_duplex_request_iterator(), SHORT_TIMEOUT) - # half-duplex waits for the client to send all info - with self.assertRaises(exceptions.ExpirationError): - next(responses) - - -if __name__ == '__main__': - os.chdir(os.path.dirname(sys.argv[0])) - unittest.main(verbosity=2) diff --git a/test/compiler/test.proto b/test/compiler/test.proto deleted file mode 100644 index ed7c6a7b79..0000000000 --- a/test/compiler/test.proto +++ /dev/null @@ -1,139 +0,0 @@ -// Copyright 2015, Google Inc. -// All rights reserved. -// -// Redistribution and use in source and binary forms, with or without -// modification, are permitted provided that the following conditions are -// met: -// -// * Redistributions of source code must retain the above copyright -// notice, this list of conditions and the following disclaimer. -// * Redistributions in binary form must reproduce the above -// copyright notice, this list of conditions and the following disclaimer -// in the documentation and/or other materials provided with the -// distribution. -// * Neither the name of Google Inc. nor the names of its -// contributors may be used to endorse or promote products derived from -// this software without specific prior written permission. -// -// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS -// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT -// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR -// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT -// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, -// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT -// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, -// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY -// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT -// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE -// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - -// An integration test service that covers all the method signature permutations -// of unary/streaming requests/responses. -// This file is duplicated around the code base. See GitHub issue #526. -syntax = "proto2"; - -package grpc.testing; - -enum PayloadType { - // Compressable text format. - COMPRESSABLE= 1; - - // Uncompressable binary format. - UNCOMPRESSABLE = 2; - - // Randomly chosen from all other formats defined in this enum. - RANDOM = 3; -} - -message Payload { - required PayloadType payload_type = 1; - oneof payload_body { - string payload_compressable = 2; - bytes payload_uncompressable = 3; - } -} - -message SimpleRequest { - // Desired payload type in the response from the server. - // If response_type is RANDOM, server randomly chooses one from other formats. - optional PayloadType response_type = 1 [default=COMPRESSABLE]; - - // Desired payload size in the response from the server. - // If response_type is COMPRESSABLE, this denotes the size before compression. - optional int32 response_size = 2; - - // Optional input payload sent along with the request. - optional Payload payload = 3; -} - -message SimpleResponse { - optional Payload payload = 1; -} - -message StreamingInputCallRequest { - // Optional input payload sent along with the request. - optional Payload payload = 1; - - // Not expecting any payload from the response. -} - -message StreamingInputCallResponse { - // Aggregated size of payloads received from the client. - optional int32 aggregated_payload_size = 1; -} - -message ResponseParameters { - // Desired payload sizes in responses from the server. - // If response_type is COMPRESSABLE, this denotes the size before compression. - required int32 size = 1; - - // Desired interval between consecutive responses in the response stream in - // microseconds. - required int32 interval_us = 2; -} - -message StreamingOutputCallRequest { - // Desired payload type in the response from the server. - // If response_type is RANDOM, the payload from each response in the stream - // might be of different types. This is to simulate a mixed type of payload - // stream. - optional PayloadType response_type = 1 [default=COMPRESSABLE]; - - repeated ResponseParameters response_parameters = 2; - - // Optional input payload sent along with the request. - optional Payload payload = 3; -} - -message StreamingOutputCallResponse { - optional Payload payload = 1; -} - -service TestService { - // One request followed by one response. - // The server returns the client payload as-is. - rpc UnaryCall(SimpleRequest) returns (SimpleResponse); - - // One request followed by a sequence of responses (streamed download). - // The server returns the payload with client desired type and sizes. - rpc StreamingOutputCall(StreamingOutputCallRequest) - returns (stream StreamingOutputCallResponse); - - // A sequence of requests followed by one response (streamed upload). - // The server returns the aggregated size of client payload as the result. - rpc StreamingInputCall(stream StreamingInputCallRequest) - returns (StreamingInputCallResponse); - - // A sequence of requests with each request served by the server immediately. - // As one request could lead to multiple responses, this interface - // demonstrates the idea of full duplexing. - rpc FullDuplexCall(stream StreamingOutputCallRequest) - returns (stream StreamingOutputCallResponse); - - // A sequence of requests followed by a sequence of responses. - // The server buffers all the client requests and then serves them in order. A - // stream of responses are returned to the client when the server starts with - // first request. - rpc HalfDuplexCall(stream StreamingOutputCallRequest) - returns (stream StreamingOutputCallResponse); -} diff --git a/tools/run_tests/run_python.sh b/tools/run_tests/run_python.sh index 5ffd4460b9..6f80219b0e 100755 --- a/tools/run_tests/run_python.sh +++ b/tools/run_tests/run_python.sh @@ -37,5 +37,6 @@ ROOT=`pwd` GRPCIO_TEST=$ROOT/src/python/grpcio_test export LD_LIBRARY_PATH=$ROOT/libs/$CONFIG export DYLD_LIBRARY_PATH=$ROOT/libs/$CONFIG +export PATH=$ROOT/bins/$CONFIG:$ROOT/bins/$CONFIG/protobuf:$PATH source "python"$PYVER"_virtual_environment"/bin/activate "python"$PYVER $GRPCIO_TEST/setup.py test -a "-n8 --cov=grpc --junitxml=./report.xml" -- cgit v1.2.3 From c897798fb163cf82f8070512b9b39d366d4b1118 Mon Sep 17 00:00:00 2001 From: Donna Dionne Date: Fri, 7 Aug 2015 11:22:07 -0700 Subject: adding node, ruby, and php to cloud to prod interop tests. --- tools/run_tests/run_interops.py | 18 +++++++----------- tools/run_tests/run_interops_build.sh | 36 +++++++++++++++++++++++++++++++---- tools/run_tests/run_interops_test.sh | 11 +++++++++++ 3 files changed, 50 insertions(+), 15 deletions(-) (limited to 'tools') diff --git a/tools/run_tests/run_interops.py b/tools/run_tests/run_interops.py index 1cf268526d..4e6b5ce2f6 100755 --- a/tools/run_tests/run_interops.py +++ b/tools/run_tests/run_interops.py @@ -4,24 +4,20 @@ import jobset argp = argparse.ArgumentParser(description='Run interop tests.') argp.add_argument('-l', '--language', - choices=['build_only', 'c++'], - nargs='+', - default=['build_only']) + default='c++') args = argp.parse_args() # build job -build_steps = 'tools/run_tests/run_interops_build.sh' -build_job = jobset.JobSpec(cmdline=build_steps, shortname='build') +build_job = jobset.JobSpec(cmdline=['tools/run_tests/run_interops_build.sh', '%s' % args.language], shortname='build') -# test jobs +# test jobs, each test is a separate job to run in parallel _TESTS = ['large_unary', 'empty_unary', 'ping_pong', 'client_streaming', 'server_streaming'] jobs = [] jobNumber = 0 -for lang in args.language: - for test in _TESTS: - test_job = jobset.JobSpec(cmdline=['tools/run_tests/run_interops_test.sh', '%s' % lang, '%s' % test], shortname=test) - jobs.append(test_job) - jobNumber+=1 +for test in _TESTS: + test_job = jobset.JobSpec(cmdline=['tools/run_tests/run_interops_test.sh', '%s' % args.language, '%s' % test], shortname=test) + jobs.append(test_job) + jobNumber+=1 root = ET.Element('testsuites') testsuite = ET.SubElement(root, 'testsuite', id='1', package='grpc', name='tests') diff --git a/tools/run_tests/run_interops_build.sh b/tools/run_tests/run_interops_build.sh index 23441a5300..ff1a26cf89 100755 --- a/tools/run_tests/run_interops_build.sh +++ b/tools/run_tests/run_interops_build.sh @@ -29,6 +29,8 @@ # (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE # OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. +language=$1 + set -e #clean up any old docker files and start mirroring repository if not started already @@ -40,8 +42,34 @@ sudo docker run -d -e GCS_BUCKET=docker-interop-images -e STORAGE_PATH=/admin/d #prepare building by pulling down base images and necessary files sudo docker pull 0.0.0.0:5000/grpc/base sudo docker tag -f 0.0.0.0:5000/grpc/base grpc/base -gsutil cp -R gs://docker-interop-images/admin/service_account tools/dockerfile/grpc_cxx -gsutil cp -R gs://docker-interop-images/admin/cacerts tools/dockerfile/grpc_cxx -#build docker file, add more languages later -sudo docker build --no-cache -t grpc/cxx tools/dockerfile/grpc_cxx +if [ "$language" = "c++" ] +then + gsutil cp -R gs://docker-interop-images/admin/service_account tools/dockerfile/grpc_cxx + gsutil cp -R gs://docker-interop-images/admin/cacerts tools/dockerfile/grpc_cxx + sudo docker build --no-cache -t grpc/cxx tools/dockerfile/grpc_cxx +elif [ "$language" = "node" ] +then + sudo docker pull 0.0.0.0:5000/grpc/node_base + sudo docker tag -f 0.0.0.0:5000/grpc/node_base grpc/node_base + gsutil cp -R gs://docker-interop-images/admin/service_account tools/dockerfile/grpc_node + gsutil cp -R gs://docker-interop-images/admin/cacerts tools/dockerfile/grpc_node + sudo docker build --no-cache -t grpc/node tools/dockerfile/grpc_node +elif [ "$language" = "ruby" ] +then + sudo docker pull 0.0.0.0:5000/grpc/ruby_base + sudo docker tag -f 0.0.0.0:5000/grpc/ruby_base grpc/ruby_base + gsutil cp -R gs://docker-interop-images/admin/service_account tools/dockerfile/grpc_ruby + gsutil cp -R gs://docker-interop-images/admin/cacerts tools/dockerfile/grpc_ruby + sudo docker build --no-cache -t grpc/ruby tools/dockerfile/grpc_ruby +elif [ "$language" = "php" ] +then + sudo docker pull 0.0.0.0:5000/grpc/php_base + sudo docker tag -f 0.0.0.0:5000/grpc/php_base grpc/php_base + gsutil cp -R gs://docker-interop-images/admin/service_account tools/dockerfile/grpc_php + gsutil cp -R gs://docker-interop-images/admin/cacerts tools/dockerfile/grpc_php + sudo docker build --no-cache -t grpc/php tools/dockerfile/grpc_php +else + echo "interop testss not added for $language" + exit 1 +fi diff --git a/tools/run_tests/run_interops_test.sh b/tools/run_tests/run_interops_test.sh index 1d0eedad85..9be253af46 100755 --- a/tools/run_tests/run_interops_test.sh +++ b/tools/run_tests/run_interops_test.sh @@ -36,6 +36,17 @@ set -e if [ "$language" = "c++" ] then sudo docker run grpc/cxx /var/local/git/grpc/bins/opt/interop_client --enable_ssl --use_prod_roots --server_host_override=grpc-test.sandbox.google.com --server_host=grpc-test.sandbox.google.com --server_port=443 --test_case=$test_case +elif [ "$language" = "node" ] +then + sudo docker run grpc/node /usr/bin/nodejs /var/local/git/grpc/src/node/interop/interop_client.js --use_tls=true --use_test_ca=true --server_port=443 --server_host=grpc-test.sandbox.google.com --server_host_override=grpc-test.sandbox.google.com --test_case=$test_case +elif [ "$language" = "ruby" ] +then + cmd_prefix="SSL_CERT_FILE=/cacerts/roots.pem ruby /var/local/git/grpc/src/ruby/bin/interop/interop_client.rb --use_tls --server_port=443 --server_host=grpc-test.sandbox.google.com --server_host_override=grpc-test.sandbox.google.com " + cmd="$cmd_prefix --test_case=$test_case" + sudo docker run grpc/ruby bin/bash -l -c '$cmd' +elif [ "$language" = "php" ] +then + sudo docker run -e SSL_CERT_FILE=/cacerts/roots.pem grpc/php /var/local/git/grpc/src/php/bin/interop_client.sh --server_port=443 --server_host=grpc-test.sandbox.google.com --server_host_override=grpc-test.sandbox.google.com --test_case=$test_case else echo "interop testss not added for $language" exit 1 -- cgit v1.2.3 From ffc8a6b4318fdf14516b4b0a816e118131c2f209 Mon Sep 17 00:00:00 2001 From: yang-g Date: Mon, 10 Aug 2015 12:26:39 -0700 Subject: move auth property iterator declaration into auth_context.h --- BUILD | 2 - Makefile | 2 - build.json | 1 - include/grpc++/auth_context.h | 32 ++++++++- include/grpc++/auth_property_iterator.h | 77 ---------------------- src/cpp/common/auth_property_iterator.cc | 2 +- tools/doxygen/Doxyfile.c++ | 1 - tools/doxygen/Doxyfile.c++.internal | 1 - tools/run_tests/sources_and_headers.json | 4 -- vsprojects/grpc++/grpc++.vcxproj | 1 - vsprojects/grpc++/grpc++.vcxproj.filters | 3 - vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj | 1 - .../grpc++_unsecure.vcxproj.filters | 3 - 13 files changed, 32 insertions(+), 98 deletions(-) delete mode 100644 include/grpc++/auth_property_iterator.h (limited to 'tools') diff --git a/BUILD b/BUILD index dcabd648e4..9eaabbbccd 100644 --- a/BUILD +++ b/BUILD @@ -690,7 +690,6 @@ cc_library( "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", @@ -778,7 +777,6 @@ cc_library( "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", diff --git a/Makefile b/Makefile index 181194f78f..5cdc46241e 100644 --- a/Makefile +++ b/Makefile @@ -4484,7 +4484,6 @@ PUBLIC_HEADERS_CXX += \ include/grpc++/async_generic_service.h \ include/grpc++/async_unary_call.h \ include/grpc++/auth_context.h \ - include/grpc++/auth_property_iterator.h \ include/grpc++/byte_buffer.h \ include/grpc++/channel_arguments.h \ include/grpc++/channel_interface.h \ @@ -4728,7 +4727,6 @@ PUBLIC_HEADERS_CXX += \ include/grpc++/async_generic_service.h \ include/grpc++/async_unary_call.h \ include/grpc++/auth_context.h \ - include/grpc++/auth_property_iterator.h \ include/grpc++/byte_buffer.h \ include/grpc++/channel_arguments.h \ include/grpc++/channel_interface.h \ diff --git a/build.json b/build.json index 515cecdc5a..252694a663 100644 --- a/build.json +++ b/build.json @@ -33,7 +33,6 @@ "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", diff --git a/include/grpc++/auth_context.h b/include/grpc++/auth_context.h index c42105b927..f8ea8ad6f4 100644 --- a/include/grpc++/auth_context.h +++ b/include/grpc++/auth_context.h @@ -34,12 +34,42 @@ #ifndef GRPCXX_AUTH_CONTEXT_H #define GRPCXX_AUTH_CONTEXT_H +#include #include -#include #include +struct grpc_auth_context; +struct grpc_auth_property; +struct grpc_auth_property_iterator; + namespace grpc { +class SecureAuthContext; + +typedef std::pair AuthProperty; + +class AuthPropertyIterator + : public std::iterator { + public: + ~AuthPropertyIterator(); + AuthPropertyIterator& operator++(); + AuthPropertyIterator operator++(int); + bool operator==(const AuthPropertyIterator& rhs) const; + bool operator!=(const AuthPropertyIterator& rhs) const; + const AuthProperty operator*(); + + protected: + AuthPropertyIterator(); + AuthPropertyIterator(const grpc_auth_property* property, + const grpc_auth_property_iterator* iter); + private: + friend class SecureAuthContext; + const grpc_auth_property* property_; + // The following items form a grpc_auth_property_iterator. + const grpc_auth_context* ctx_; + size_t index_; + const char* name_; +}; class AuthContext { public: diff --git a/include/grpc++/auth_property_iterator.h b/include/grpc++/auth_property_iterator.h deleted file mode 100644 index c7870c46be..0000000000 --- a/include/grpc++/auth_property_iterator.h +++ /dev/null @@ -1,77 +0,0 @@ -/* - * - * Copyright 2015, Google Inc. - * All rights reserved. - * - * Redistribution and use in source and binary forms, with or without - * modification, are permitted provided that the following conditions are - * met: - * - * * Redistributions of source code must retain the above copyright - * notice, this list of conditions and the following disclaimer. - * * Redistributions in binary form must reproduce the above - * copyright notice, this list of conditions and the following disclaimer - * in the documentation and/or other materials provided with the - * distribution. - * * Neither the name of Google Inc. nor the names of its - * contributors may be used to endorse or promote products derived from - * this software without specific prior written permission. - * - * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS - * "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT - * LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR - * A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT - * OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, - * SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT - * LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, - * DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY - * THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT - * (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE - * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. - * - */ - -#ifndef GRPCXX_AUTH_PROPERTY_ITERATOR_H -#define GRPCXX_AUTH_PROPERTY_ITERATOR_H - -#include -#include - -#include - -struct grpc_auth_context; -struct grpc_auth_property; -struct grpc_auth_property_iterator; - -namespace grpc { -class SecureAuthContext; - -typedef std::pair AuthProperty; - -class AuthPropertyIterator - : public std::iterator { - public: - ~AuthPropertyIterator(); - AuthPropertyIterator& operator++(); - AuthPropertyIterator operator++(int); - bool operator==(const AuthPropertyIterator& rhs) const; - bool operator!=(const AuthPropertyIterator& rhs) const; - const AuthProperty operator*(); - - protected: - AuthPropertyIterator(); - AuthPropertyIterator(const grpc_auth_property* property, - const grpc_auth_property_iterator* iter); - private: - friend class SecureAuthContext; - const grpc_auth_property* property_; - // The following items form a grpc_auth_property_iterator. - const grpc_auth_context* ctx_; - size_t index_; - const char* name_; -}; - -} // namespace grpc - - #endif // GRPCXX_AUTH_PROPERTY_ITERATOR_H - diff --git a/src/cpp/common/auth_property_iterator.cc b/src/cpp/common/auth_property_iterator.cc index e706c6c921..ba88983515 100644 --- a/src/cpp/common/auth_property_iterator.cc +++ b/src/cpp/common/auth_property_iterator.cc @@ -31,7 +31,7 @@ * */ -#include +#include #include diff --git a/tools/doxygen/Doxyfile.c++ b/tools/doxygen/Doxyfile.c++ index eb14925fff..374125ad19 100644 --- a/tools/doxygen/Doxyfile.c++ +++ b/tools/doxygen/Doxyfile.c++ @@ -763,7 +763,6 @@ WARN_LOGFILE = INPUT = include/grpc++/async_generic_service.h \ include/grpc++/async_unary_call.h \ include/grpc++/auth_context.h \ -include/grpc++/auth_property_iterator.h \ include/grpc++/byte_buffer.h \ include/grpc++/channel_arguments.h \ include/grpc++/channel_interface.h \ diff --git a/tools/doxygen/Doxyfile.c++.internal b/tools/doxygen/Doxyfile.c++.internal index 3d6649bef6..c7f8e292bd 100644 --- a/tools/doxygen/Doxyfile.c++.internal +++ b/tools/doxygen/Doxyfile.c++.internal @@ -763,7 +763,6 @@ WARN_LOGFILE = INPUT = include/grpc++/async_generic_service.h \ include/grpc++/async_unary_call.h \ include/grpc++/auth_context.h \ -include/grpc++/auth_property_iterator.h \ include/grpc++/byte_buffer.h \ include/grpc++/channel_arguments.h \ include/grpc++/channel_interface.h \ diff --git a/tools/run_tests/sources_and_headers.json b/tools/run_tests/sources_and_headers.json index 5d23bf9e88..d2c3ec3add 100644 --- a/tools/run_tests/sources_and_headers.json +++ b/tools/run_tests/sources_and_headers.json @@ -13051,7 +13051,6 @@ "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", @@ -13102,7 +13101,6 @@ "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", @@ -13227,7 +13225,6 @@ "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", @@ -13275,7 +13272,6 @@ "include/grpc++/async_generic_service.h", "include/grpc++/async_unary_call.h", "include/grpc++/auth_context.h", - "include/grpc++/auth_property_iterator.h", "include/grpc++/byte_buffer.h", "include/grpc++/channel_arguments.h", "include/grpc++/channel_interface.h", diff --git a/vsprojects/grpc++/grpc++.vcxproj b/vsprojects/grpc++/grpc++.vcxproj index 58474511fc..929bc1500e 100644 --- a/vsprojects/grpc++/grpc++.vcxproj +++ b/vsprojects/grpc++/grpc++.vcxproj @@ -216,7 +216,6 @@ - diff --git a/vsprojects/grpc++/grpc++.vcxproj.filters b/vsprojects/grpc++/grpc++.vcxproj.filters index 2a8ee08b08..0408fb46a5 100644 --- a/vsprojects/grpc++/grpc++.vcxproj.filters +++ b/vsprojects/grpc++/grpc++.vcxproj.filters @@ -105,9 +105,6 @@ include\grpc++ - - include\grpc++ - include\grpc++ diff --git a/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj b/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj index 0d989c4a93..2ff252e04e 100644 --- a/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj +++ b/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj @@ -216,7 +216,6 @@ - diff --git a/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj.filters b/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj.filters index 71d42e5c6d..b4fae7741c 100644 --- a/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj.filters +++ b/vsprojects/grpc++_unsecure/grpc++_unsecure.vcxproj.filters @@ -90,9 +90,6 @@ include\grpc++ - - include\grpc++ - include\grpc++ -- cgit v1.2.3 From 689b70596d8ecb89c0c69c114aec7fc3f8d7d6dc Mon Sep 17 00:00:00 2001 From: Donna Dionne Date: Wed, 12 Aug 2015 13:13:00 -0700 Subject: adding build.sh for go so that go can test with private changes and private docker images. fixing a go docker image build error. --- tools/dockerfile/grpc_go/Dockerfile | 2 ++ tools/dockerfile/grpc_go/build.sh | 35 +++++++++++++++++++++++++++++++++++ 2 files changed, 37 insertions(+) create mode 100755 tools/dockerfile/grpc_go/build.sh (limited to 'tools') diff --git a/tools/dockerfile/grpc_go/Dockerfile b/tools/dockerfile/grpc_go/Dockerfile index 06bb3e2d5e..7f3bd8719c 100644 --- a/tools/dockerfile/grpc_go/Dockerfile +++ b/tools/dockerfile/grpc_go/Dockerfile @@ -32,6 +32,8 @@ FROM golang:1.4 # Get the source from GitHub RUN go get google.golang.org/grpc +RUN go get golang.org/x/oauth2 +RUN go get google.golang.org/cloud # Add a service_account directory containing the auth creds file ADD service_account service_account diff --git a/tools/dockerfile/grpc_go/build.sh b/tools/dockerfile/grpc_go/build.sh new file mode 100755 index 0000000000..a3887c731b --- /dev/null +++ b/tools/dockerfile/grpc_go/build.sh @@ -0,0 +1,35 @@ +#!/bin/bash +# Copyright 2015, Google Inc. +# All rights reserved. +# +# Redistribution and use in source and binary forms, with or without +# modification, are permitted provided that the following conditions are +# met: +# +# * Redistributions of source code must retain the above copyright +# notice, this list of conditions and the following disclaimer. +# * Redistributions in binary form must reproduce the above +# copyright notice, this list of conditions and the following disclaimer +# in the documentation and/or other materials provided with the +# distribution. +# * Neither the name of Google Inc. nor the names of its +# contributors may be used to endorse or promote products derived from +# this software without specific prior written permission. +# +# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS +# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT +# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR +# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT +# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, +# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT +# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, +# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY +# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT +# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE +# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + +cp -R /var/local/git-clone/grpc-go/. /go/ +go get golang.org/x/oauth2 +go get google.golang.org/cloud +cd src/google.golang.org/grpc/interop/client && go install +cd src/google.golang.org/grpc/interop/server && go install -- cgit v1.2.3 From 0218dcae0eb1bc2866750bbdd6ffc5801ce9295d Mon Sep 17 00:00:00 2001 From: Donna Dionne Date: Wed, 12 Aug 2015 14:11:49 -0700 Subject: small fix for go docker private image building --- tools/dockerfile/grpc_go/build.sh | 1 - 1 file changed, 1 deletion(-) (limited to 'tools') diff --git a/tools/dockerfile/grpc_go/build.sh b/tools/dockerfile/grpc_go/build.sh index a3887c731b..254942fe01 100755 --- a/tools/dockerfile/grpc_go/build.sh +++ b/tools/dockerfile/grpc_go/build.sh @@ -32,4 +32,3 @@ cp -R /var/local/git-clone/grpc-go/. /go/ go get golang.org/x/oauth2 go get google.golang.org/cloud cd src/google.golang.org/grpc/interop/client && go install -cd src/google.golang.org/grpc/interop/server && go install -- cgit v1.2.3