aboutsummaryrefslogtreecommitdiffhomepage
path: root/src
diff options
context:
space:
mode:
authorGravatar thinkerou <thinkerou@gmail.com>2016-05-12 14:03:00 +0800
committerGravatar thinkerou <thinkerou@gmail.com>2016-05-12 14:03:00 +0800
commite43e27de3c6da208903c06a36fc6c23debcd172c (patch)
treef0e6f9c90bc2475180f6229fb95a4dcc24f264c2 /src
parent161b03f5cf8dbd057e27c263722c34c702b3dc92 (diff)
parent1acbe3e8a8cfa8d35843b0007b9cd79d91c62c16 (diff)
Merge remote-tracking branch 'upstream/master'
Diffstat (limited to 'src')
-rw-r--r--src/cpp/server/server.cc19
-rw-r--r--src/cpp/server/server_builder.cc36
-rw-r--r--src/csharp/Grpc.Examples.Tests/MathClientServerTests.cs2
-rw-r--r--src/csharp/Grpc.Examples/MathExamples.cs38
-rw-r--r--src/csharp/Grpc.Examples/MathServiceImpl.cs29
-rw-r--r--src/csharp/Grpc.IntegrationTesting/StressTestClient.cs2
-rwxr-xr-xsrc/node/tools/bin/protoc.js7
-rwxr-xr-xsrc/node/tools/bin/protoc_plugin.js8
-rw-r--r--src/proto/grpc/testing/echo_messages.proto7
-rw-r--r--src/python/grpcio/tests/qps/benchmark_client.py60
-rw-r--r--src/python/grpcio/tests/qps/client_runner.py2
-rw-r--r--src/python/grpcio/tests/qps/worker_server.py5
-rw-r--r--src/ruby/lib/grpc/generic/client_stub.rb7
-rw-r--r--src/ruby/tools/grpc-tools.gemspec2
14 files changed, 188 insertions, 36 deletions
diff --git a/src/cpp/server/server.cc b/src/cpp/server/server.cc
index fafe31e84c..f955a31494 100644
--- a/src/cpp/server/server.cc
+++ b/src/cpp/server/server.cc
@@ -33,6 +33,7 @@
#include <grpc++/server.h>
+#include <sstream>
#include <utility>
#include <grpc++/completion_queue.h>
@@ -41,6 +42,7 @@
#include <grpc++/impl/grpc_library.h>
#include <grpc++/impl/method_handler_impl.h>
#include <grpc++/impl/rpc_service_method.h>
+#include <grpc++/impl/server_initializer.h>
#include <grpc++/impl/service_type.h>
#include <grpc++/security/server_credentials.h>
#include <grpc++/server_context.h>
@@ -284,7 +286,8 @@ Server::Server(ThreadPoolInterface* thread_pool, bool thread_pool_owned,
has_generic_service_(false),
server_(nullptr),
thread_pool_(thread_pool),
- thread_pool_owned_(thread_pool_owned) {
+ thread_pool_owned_(thread_pool_owned),
+ server_initializer_(new ServerInitializer(this)) {
g_gli_initializer.summon();
gpr_once_init(&g_once_init_callbacks, InitGlobalCallbacks);
global_callbacks_ = g_callbacks;
@@ -341,6 +344,7 @@ bool Server::RegisterService(const grpc::string* host, Service* service) {
"Can only register an asynchronous service against one server.");
service->server_ = this;
}
+ const char* method_name = nullptr;
for (auto it = service->methods_.begin(); it != service->methods_.end();
++it) {
if (it->get() == nullptr) { // Handled by generic service if any.
@@ -360,6 +364,17 @@ bool Server::RegisterService(const grpc::string* host, Service* service) {
} else {
sync_methods_->emplace_back(method, tag);
}
+ method_name = method->name();
+ }
+
+ // Parse service name.
+ if (method_name != nullptr) {
+ std::stringstream ss(method_name);
+ grpc::string service_name;
+ if (std::getline(ss, service_name, '/') &&
+ std::getline(ss, service_name, '/')) {
+ services_.push_back(service_name);
+ }
}
return true;
}
@@ -598,4 +613,6 @@ void Server::RunRpc() {
}
}
+ServerInitializer* Server::initializer() { return server_initializer_.get(); }
+
} // namespace grpc
diff --git a/src/cpp/server/server_builder.cc b/src/cpp/server/server_builder.cc
index 68cc38258c..9658a56745 100644
--- a/src/cpp/server/server_builder.cc
+++ b/src/cpp/server/server_builder.cc
@@ -41,9 +41,23 @@
namespace grpc {
+static std::vector<std::unique_ptr<ServerBuilderPlugin> (*)()>*
+ g_plugin_factory_list;
+static gpr_once once_init_plugin_list = GPR_ONCE_INIT;
+
+static void do_plugin_list_init(void) {
+ g_plugin_factory_list =
+ new std::vector<std::unique_ptr<ServerBuilderPlugin> (*)()>();
+}
+
ServerBuilder::ServerBuilder()
: max_message_size_(-1), generic_service_(nullptr) {
grpc_compression_options_init(&compression_options_);
+ gpr_once_init(&once_init_plugin_list, do_plugin_list_init);
+ for (auto factory : (*g_plugin_factory_list)) {
+ std::unique_ptr<ServerBuilderPlugin> plugin = factory();
+ plugins_[plugin->name()] = std::move(plugin);
+ }
}
std::unique_ptr<ServerCompletionQueue> ServerBuilder::AddCompletionQueue() {
@@ -96,6 +110,15 @@ std::unique_ptr<Server> ServerBuilder::BuildAndStart() {
ChannelArguments args;
for (auto option = options_.begin(); option != options_.end(); ++option) {
(*option)->UpdateArguments(&args);
+ (*option)->UpdatePlugins(&plugins_);
+ }
+ if (thread_pool == nullptr) {
+ for (auto plugin = plugins_.begin(); plugin != plugins_.end(); plugin++) {
+ if ((*plugin).second->has_sync_methods()) {
+ thread_pool.reset(CreateDefaultThreadPool());
+ break;
+ }
+ }
}
if (max_message_size_ > 0) {
args.SetInt(GRPC_ARG_MAX_MESSAGE_LENGTH, max_message_size_);
@@ -104,6 +127,7 @@ std::unique_ptr<Server> ServerBuilder::BuildAndStart() {
compression_options_.enabled_algorithms_bitset);
std::unique_ptr<Server> server(
new Server(thread_pool.release(), true, max_message_size_, &args));
+ ServerInitializer* initializer = server->initializer();
for (auto cq = cqs_.begin(); cq != cqs_.end(); ++cq) {
grpc_server_register_completion_queue(server->server_, (*cq)->cq(),
nullptr);
@@ -114,6 +138,9 @@ std::unique_ptr<Server> ServerBuilder::BuildAndStart() {
return nullptr;
}
}
+ for (auto plugin = plugins_.begin(); plugin != plugins_.end(); plugin++) {
+ (*plugin).second->InitServer(initializer);
+ }
if (generic_service_) {
server->RegisterAsyncGenericService(generic_service_);
} else {
@@ -137,7 +164,16 @@ std::unique_ptr<Server> ServerBuilder::BuildAndStart() {
if (!server->Start(cqs_data, cqs_.size())) {
return nullptr;
}
+ for (auto plugin = plugins_.begin(); plugin != plugins_.end(); plugin++) {
+ (*plugin).second->Finish(initializer);
+ }
return server;
}
+void ServerBuilder::InternalAddPluginFactory(
+ std::unique_ptr<ServerBuilderPlugin> (*CreatePlugin)()) {
+ gpr_once_init(&once_init_plugin_list, do_plugin_list_init);
+ (*g_plugin_factory_list).push_back(CreatePlugin);
+}
+
} // namespace grpc
diff --git a/src/csharp/Grpc.Examples.Tests/MathClientServerTests.cs b/src/csharp/Grpc.Examples.Tests/MathClientServerTests.cs
index 875202b950..ee11105efe 100644
--- a/src/csharp/Grpc.Examples.Tests/MathClientServerTests.cs
+++ b/src/csharp/Grpc.Examples.Tests/MathClientServerTests.cs
@@ -92,7 +92,7 @@ namespace Math.Tests
public void DivByZero()
{
var ex = Assert.Throws<RpcException>(() => client.Div(new DivArgs { Dividend = 0, Divisor = 0 }));
- Assert.AreEqual(StatusCode.Unknown, ex.Status.StatusCode);
+ Assert.AreEqual(StatusCode.InvalidArgument, ex.Status.StatusCode);
}
[Test]
diff --git a/src/csharp/Grpc.Examples/MathExamples.cs b/src/csharp/Grpc.Examples/MathExamples.cs
index 6075420974..d260830b94 100644
--- a/src/csharp/Grpc.Examples/MathExamples.cs
+++ b/src/csharp/Grpc.Examples/MathExamples.cs
@@ -32,6 +32,7 @@
using System;
using System.Collections.Generic;
using System.Threading.Tasks;
+using Grpc.Core;
using Grpc.Core.Utils;
namespace Math
@@ -109,5 +110,42 @@ namespace Math
DivReply result = await client.DivAsync(new DivArgs { Dividend = sum.Num_, Divisor = numbers.Count });
Console.WriteLine("Avg Result: " + result);
}
+
+ /// <summary>
+ /// Shows how to handle a call ending with non-OK status.
+ /// </summary>
+ public static async Task HandleErrorExample(Math.MathClient client)
+ {
+ try
+ {
+ DivReply result = await client.DivAsync(new DivArgs { Dividend = 5, Divisor = 0 });
+ }
+ catch (RpcException ex)
+ {
+ Console.WriteLine(string.Format("RPC ended with status {0}", ex.Status));
+ }
+ }
+
+ /// <summary>
+ /// Shows how to send request headers and how to access response headers
+ /// and response trailers.
+ /// </summary>
+ public static async Task MetadataExample(Math.MathClient client)
+ {
+ var requestHeaders = new Metadata
+ {
+ { "custom-header", "custom-value" }
+ };
+
+ var call = client.DivAsync(new DivArgs { Dividend = 5, Divisor = 0 }, requestHeaders);
+
+ // Get response headers
+ Metadata responseHeaders = await call.ResponseHeadersAsync;
+
+ var result = await call;
+
+ // Get response trailers after the call has finished.
+ Metadata responseTrailers = call.GetTrailers();
+ }
}
}
diff --git a/src/csharp/Grpc.Examples/MathServiceImpl.cs b/src/csharp/Grpc.Examples/MathServiceImpl.cs
index 79c56e57a8..a28020f62f 100644
--- a/src/csharp/Grpc.Examples/MathServiceImpl.cs
+++ b/src/csharp/Grpc.Examples/MathServiceImpl.cs
@@ -52,23 +52,15 @@ namespace Math
public override async Task Fib(FibArgs request, IServerStreamWriter<Num> responseStream, ServerCallContext context)
{
- if (request.Limit <= 0)
- {
- // keep streaming the sequence until cancelled.
- IEnumerator<Num> fibEnumerator = FibInternal(long.MaxValue).GetEnumerator();
- while (!context.CancellationToken.IsCancellationRequested && fibEnumerator.MoveNext())
- {
- await responseStream.WriteAsync(fibEnumerator.Current);
- await Task.Delay(100);
- }
- }
+ var limit = request.Limit > 0 ? request.Limit : long.MaxValue;
+ var fibEnumerator = FibInternal(limit).GetEnumerator();
- if (request.Limit > 0)
+ // Keep streaming the sequence until the call is cancelled.
+ // Use CancellationToken from ServerCallContext to detect the cancellation.
+ while (!context.CancellationToken.IsCancellationRequested && fibEnumerator.MoveNext())
{
- foreach (var num in FibInternal(request.Limit))
- {
- await responseStream.WriteAsync(num);
- }
+ await responseStream.WriteAsync(fibEnumerator.Current);
+ await Task.Delay(100);
}
}
@@ -89,6 +81,13 @@ namespace Math
static DivReply DivInternal(DivArgs args)
{
+ if (args.Divisor == 0)
+ {
+ // One can finish the RPC with non-ok status by throwing RpcException instance.
+ // Alternatively, resulting status can be set using ServerCallContext.Status
+ throw new RpcException(new Status(StatusCode.InvalidArgument, "Division by zero"));
+ }
+
long quotient = args.Dividend / args.Divisor;
long remainder = args.Dividend % args.Divisor;
return new DivReply { Quotient = quotient, Remainder = remainder };
diff --git a/src/csharp/Grpc.IntegrationTesting/StressTestClient.cs b/src/csharp/Grpc.IntegrationTesting/StressTestClient.cs
index 8db691cb04..4d6ca7ece5 100644
--- a/src/csharp/Grpc.IntegrationTesting/StressTestClient.cs
+++ b/src/csharp/Grpc.IntegrationTesting/StressTestClient.cs
@@ -311,7 +311,7 @@ namespace Grpc.IntegrationTesting
var snapshot = histogram.GetSnapshot(true);
var elapsedSnapshot = wallClockStopwatch.GetElapsedSnapshot(true);
- return (long) (snapshot.Count / elapsedSnapshot.Seconds);
+ return (long) (snapshot.Count / elapsedSnapshot.TotalSeconds);
}
}
}
diff --git a/src/node/tools/bin/protoc.js b/src/node/tools/bin/protoc.js
index 4d50c94b0f..53fc5dc428 100755
--- a/src/node/tools/bin/protoc.js
+++ b/src/node/tools/bin/protoc.js
@@ -47,10 +47,11 @@ var exe_ext = process.platform === 'win32' ? '.exe' : '';
var protoc = path.resolve(__dirname, 'protoc' + exe_ext);
-execFile(protoc, process.argv.slice(2), function(error, stdout, stderr) {
+var child_process = execFile(protoc, process.argv.slice(2), function(error, stdout, stderr) {
if (error) {
throw error;
}
- console.log(stdout);
- console.log(stderr);
});
+
+child_process.stdout.pipe(process.stdout);
+child_process.stderr.pipe(process.stderr);
diff --git a/src/node/tools/bin/protoc_plugin.js b/src/node/tools/bin/protoc_plugin.js
index 281ec0d85e..857882e1c3 100755
--- a/src/node/tools/bin/protoc_plugin.js
+++ b/src/node/tools/bin/protoc_plugin.js
@@ -47,10 +47,12 @@ var exe_ext = process.platform === 'win32' ? '.exe' : '';
var plugin = path.resolve(__dirname, 'grpc_node_plugin' + exe_ext);
-execFile(plugin, process.argv.slice(2), function(error, stdout, stderr) {
+var child_process = execFile(plugin, process.argv.slice(2), {encoding: 'buffer'}, function(error, stdout, stderr) {
if (error) {
throw error;
}
- console.log(stdout);
- console.log(stderr);
});
+
+process.stdin.pipe(child_process.stdin);
+child_process.stdout.pipe(process.stdout);
+child_process.stderr.pipe(process.stderr);
diff --git a/src/proto/grpc/testing/echo_messages.proto b/src/proto/grpc/testing/echo_messages.proto
index 1be1966f10..b405acf043 100644
--- a/src/proto/grpc/testing/echo_messages.proto
+++ b/src/proto/grpc/testing/echo_messages.proto
@@ -32,6 +32,12 @@ syntax = "proto3";
package grpc.testing;
+// Message to be echoed back serialized in trailer.
+message DebugInfo {
+ repeated string stack_entries = 1;
+ string detail = 2;
+}
+
message RequestParams {
bool echo_deadline = 1;
int32 client_cancel_after_us = 2;
@@ -43,6 +49,7 @@ message RequestParams {
string expected_client_identity = 8; // will force check_auth_context.
bool skip_cancelled_check = 9;
string expected_transport_security_type = 10;
+ DebugInfo debug_info = 11;
}
message EchoRequest {
diff --git a/src/python/grpcio/tests/qps/benchmark_client.py b/src/python/grpcio/tests/qps/benchmark_client.py
index eed0b0c6da..b372ea01ad 100644
--- a/src/python/grpcio/tests/qps/benchmark_client.py
+++ b/src/python/grpcio/tests/qps/benchmark_client.py
@@ -39,6 +39,7 @@ except ImportError:
from concurrent import futures
from grpc.beta import implementations
+from grpc.framework.interfaces.face import face
from src.proto.grpc.testing import messages_pb2
from src.proto.grpc.testing import services_pb2
from tests.unit import resources
@@ -141,10 +142,10 @@ class UnaryAsyncBenchmarkClient(BenchmarkClient):
self._stub = None
-class StreamingAsyncBenchmarkClient(BenchmarkClient):
+class StreamingSyncBenchmarkClient(BenchmarkClient):
def __init__(self, server, config, hist):
- super(StreamingAsyncBenchmarkClient, self).__init__(server, config, hist)
+ super(StreamingSyncBenchmarkClient, self).__init__(server, config, hist)
self._is_streaming = False
self._pool = futures.ThreadPoolExecutor(max_workers=1)
# Use a thread-safe queue to put requests on the stream
@@ -167,12 +168,12 @@ class StreamingAsyncBenchmarkClient(BenchmarkClient):
def _request_stream(self):
self._is_streaming = True
if self._generic:
- response_stream = self._stub.inline_stream_stream(
- 'grpc.testing.BenchmarkService', 'StreamingCall',
- self._request_generator(), _TIMEOUT)
+ stream_callable = self._stub.stream_stream(
+ 'grpc.testing.BenchmarkService', 'StreamingCall')
else:
- response_stream = self._stub.StreamingCall(self._request_generator(),
- _TIMEOUT)
+ stream_callable = self._stub.StreamingCall
+
+ response_stream = stream_callable(self._request_generator(), _TIMEOUT)
for _ in response_stream:
end_time = time.time()
self._handle_response(end_time - self._send_time_queue.get_nowait())
@@ -184,3 +185,48 @@ class StreamingAsyncBenchmarkClient(BenchmarkClient):
yield request
except queue.Empty:
pass
+
+
+class AsyncReceiver(face.ResponseReceiver):
+ """Receiver for async stream responses."""
+
+ def __init__(self, send_time_queue, response_handler):
+ self._send_time_queue = send_time_queue
+ self._response_handler = response_handler
+
+ def initial_metadata(self, initial_mdetadata):
+ pass
+
+ def response(self, response):
+ end_time = time.time()
+ self._response_handler(end_time - self._send_time_queue.get_nowait())
+
+ def complete(self, terminal_metadata, code, details):
+ pass
+
+
+class StreamingAsyncBenchmarkClient(BenchmarkClient):
+
+ def __init__(self, server, config, hist):
+ super(StreamingAsyncBenchmarkClient, self).__init__(server, config, hist)
+ self._send_time_queue = queue.Queue()
+ self._receiver = AsyncReceiver(self._send_time_queue, self._handle_response)
+ self._rendezvous = None
+
+ def send_request(self):
+ if self._rendezvous is not None:
+ self._send_time_queue.put(time.time())
+ self._rendezvous.consume(self._request)
+
+ def start(self):
+ if self._generic:
+ stream_callable = self._stub.stream_stream(
+ 'grpc.testing.BenchmarkService', 'StreamingCall')
+ else:
+ stream_callable = self._stub.StreamingCall
+ self._rendezvous = stream_callable.event(
+ self._receiver, lambda *args: None, _TIMEOUT)
+
+ def stop(self):
+ self._rendezvous.terminate()
+ self._rendezvous = None
diff --git a/src/python/grpcio/tests/qps/client_runner.py b/src/python/grpcio/tests/qps/client_runner.py
index a36c30ccc0..1ede7d2af1 100644
--- a/src/python/grpcio/tests/qps/client_runner.py
+++ b/src/python/grpcio/tests/qps/client_runner.py
@@ -89,9 +89,9 @@ class ClosedLoopClientRunner(ClientRunner):
def start(self):
self._is_running = True
+ self._client.start()
for _ in xrange(self._request_count):
self._client.send_request()
- self._client.start()
def stop(self):
self._is_running = False
diff --git a/src/python/grpcio/tests/qps/worker_server.py b/src/python/grpcio/tests/qps/worker_server.py
index 0b3acc14e7..1f9af5482c 100644
--- a/src/python/grpcio/tests/qps/worker_server.py
+++ b/src/python/grpcio/tests/qps/worker_server.py
@@ -146,8 +146,9 @@ class WorkerServer(services_pb2.BetaWorkerServiceServicer):
if config.rpc_type == control_pb2.UNARY:
client = benchmark_client.UnarySyncBenchmarkClient(
server, config, qps_data)
- else:
- raise Exception('STREAMING SYNC client not supported')
+ elif config.rpc_type == control_pb2.STREAMING:
+ client = benchmark_client.StreamingSyncBenchmarkClient(
+ server, config, qps_data)
elif config.client_type == control_pb2.ASYNC_CLIENT:
if config.rpc_type == control_pb2.UNARY:
client = benchmark_client.UnaryAsyncBenchmarkClient(
diff --git a/src/ruby/lib/grpc/generic/client_stub.rb b/src/ruby/lib/grpc/generic/client_stub.rb
index 68e167a69f..12946fe819 100644
--- a/src/ruby/lib/grpc/generic/client_stub.rb
+++ b/src/ruby/lib/grpc/generic/client_stub.rb
@@ -49,7 +49,12 @@ module GRPC
fail(TypeError, '!Channel') unless alt_chan.is_a?(Core::Channel)
return alt_chan
end
- kw['grpc.primary_user_agent'] = "grpc-ruby/#{VERSION}"
+ if kw['grpc.primary_user_agent'].nil?
+ kw['grpc.primary_user_agent'] = ''
+ else
+ kw['grpc.primary_user_agent'] += ' '
+ end
+ kw['grpc.primary_user_agent'] += "grpc-ruby/#{VERSION}"
unless creds.is_a?(Core::ChannelCredentials) || creds.is_a?(Symbol)
fail(TypeError, '!ChannelCredentials or Symbol')
end
diff --git a/src/ruby/tools/grpc-tools.gemspec b/src/ruby/tools/grpc-tools.gemspec
index af904de4a9..9fa4b66392 100644
--- a/src/ruby/tools/grpc-tools.gemspec
+++ b/src/ruby/tools/grpc-tools.gemspec
@@ -18,5 +18,5 @@ Gem::Specification.new do |s|
s.platform = Gem::Platform::RUBY
- s.executables = %w( protoc.rb protoc_grpc_ruby_plugin.rb )
+ s.executables = %w( grpc_tools_ruby_protoc.rb grpc_tools_ruby_protoc_plugin.rb )
end