aboutsummaryrefslogtreecommitdiffhomepage
path: root/src/ruby/lib
diff options
context:
space:
mode:
authorGravatar murgatroid99 <mlumish@google.com>2016-05-16 12:21:39 -0700
committerGravatar murgatroid99 <mlumish@google.com>2016-05-16 12:21:39 -0700
commitb19f181f3e01f346c7b760d75bfe58e53561ddc1 (patch)
treef147393ba7fdc381825f5642afcdf61a865e9ad1 /src/ruby/lib
parenta8663fa55433732ea0f6e63c2fd586db0ea6f73f (diff)
Avoid using unspecified keyword args where possible
Diffstat (limited to 'src/ruby/lib')
-rw-r--r--src/ruby/lib/grpc/errors.rb5
-rw-r--r--src/ruby/lib/grpc/generic/active_call.rb65
-rw-r--r--src/ruby/lib/grpc/generic/client_stub.rb85
-rw-r--r--src/ruby/lib/grpc/generic/rpc_desc.rb8
-rw-r--r--src/ruby/lib/grpc/generic/rpc_server.rb19
-rw-r--r--src/ruby/lib/grpc/generic/service.rb16
6 files changed, 80 insertions, 118 deletions
diff --git a/src/ruby/lib/grpc/errors.rb b/src/ruby/lib/grpc/errors.rb
index a1dd1e3e9d..23b2bb7e12 100644
--- a/src/ruby/lib/grpc/errors.rb
+++ b/src/ruby/lib/grpc/errors.rb
@@ -40,11 +40,12 @@ module GRPC
# @param code [Numeric] the status code
# @param details [String] the details of the exception
- def initialize(code, details = 'unknown cause', **kw)
+ # @param metadata [Hash] the error's metadata
+ def initialize(code, details = 'unknown cause', metadata = {})
super("#{code}:#{details}")
@code = code
@details = details
- @metadata = kw
+ @metadata = metadata
end
# Converts the exception to a GRPC::Status for use in the networking
diff --git a/src/ruby/lib/grpc/generic/active_call.rb b/src/ruby/lib/grpc/generic/active_call.rb
index fd20a86144..9dc06e6b50 100644
--- a/src/ruby/lib/grpc/generic/active_call.rb
+++ b/src/ruby/lib/grpc/generic/active_call.rb
@@ -45,7 +45,7 @@ class Struct
# raise BadStatus, propagating the metadata if present.
md = status.metadata
with_sym_keys = Hash[md.each_pair.collect { |x, y| [x.to_sym, y] }]
- fail GRPC::BadStatus.new(status.code, status.details, **with_sym_keys)
+ fail GRPC::BadStatus.new(status.code, status.details, with_sym_keys)
end
status
end
@@ -77,14 +77,15 @@ module GRPC
#
# @param call [Call] a call on which to start and invocation
# @param q [CompletionQueue] the completion queue
- def self.client_invoke(call, q, **kw)
+ # @param metadata [Hash] the metadata
+ def self.client_invoke(call, q, metadata = {})
fail(TypeError, '!Core::Call') unless call.is_a? Core::Call
unless q.is_a? Core::CompletionQueue
fail(TypeError, '!Core::CompletionQueue')
end
metadata_tag = Object.new
call.run_batch(q, metadata_tag, INFINITE_FUTURE,
- SEND_INITIAL_METADATA => kw)
+ SEND_INITIAL_METADATA => metadata)
metadata_tag
end
@@ -216,13 +217,12 @@ module GRPC
# @param details [String] details
# @param assert_finished [true, false] when true(default), waits for
# FINISHED.
- #
- # == Keyword Arguments ==
- # any keyword arguments are treated as metadata to be sent to the server
- # if a keyword value is a list, multiple metadata for it's key are sent
- def send_status(code = OK, details = '', assert_finished = false, **kw)
+ # @param metadata [Hash] metadata to send to the server. If a value is a
+ # list, mulitple metadata for its key are sent
+ def send_status(code = OK, details = '', assert_finished = false,
+ metadata: {})
ops = {
- SEND_STATUS_FROM_SERVER => Struct::Status.new(code, details, kw)
+ SEND_STATUS_FROM_SERVER => Struct::Status.new(code, details, metadata)
}
ops[RECV_CLOSE_ON_SERVER] = nil if assert_finished
@call.run_batch(@cq, self, INFINITE_FUTURE, ops)
@@ -316,14 +316,12 @@ module GRPC
# request_response sends a request to a GRPC server, and returns the
# response.
#
- # == Keyword Arguments ==
- # any keyword arguments are treated as metadata to be sent to the server
- # if a keyword value is a list, multiple metadata for it's key are sent
- #
# @param req [Object] the request sent to the server
+ # @param metadata [Hash] metadata to be sent to the server. If a value is
+ # a list, multiple metadata for its key are sent
# @return [Object] the response received from the server
- def request_response(req, **kw)
- start_call(**kw) unless @started
+ def request_response(req, metadata: {})
+ start_call(metadata) unless @started
remote_send(req)
writes_done(false)
response = remote_read
@@ -342,14 +340,12 @@ module GRPC
# array of marshallable objects; in typical case it will be an Enumerable
# that allows dynamic construction of the marshallable objects.
#
- # == Keyword Arguments ==
- # any keyword arguments are treated as metadata to be sent to the server
- # if a keyword value is a list, multiple metadata for it's key are sent
- #
# @param requests [Object] an Enumerable of requests to send
+ # @param metadata [Hash] metadata to be sent to the server. If a value is
+ # a list, multiple metadata for its key are sent
# @return [Object] the response received from the server
- def client_streamer(requests, **kw)
- start_call(**kw) unless @started
+ def client_streamer(requests, metadata: {})
+ start_call(metadata) unless @started
requests.each { |r| remote_send(r) }
writes_done(false)
response = remote_read
@@ -370,15 +366,12 @@ module GRPC
# it is executed with each response as the argument and no result is
# returned.
#
- # == Keyword Arguments ==
- # any keyword arguments are treated as metadata to be sent to the server
- # if a keyword value is a list, multiple metadata for it's key are sent
- # any keyword arguments are treated as metadata to be sent to the server.
- #
# @param req [Object] the request sent to the server
+ # @param metadata [Hash] metadata to be sent to the server. If a value is
+ # a list, multiple metadata for its key are sent
# @return [Enumerator|nil] a response Enumerator
- def server_streamer(req, **kw)
- start_call(**kw) unless @started
+ def server_streamer(req, metadata: {})
+ start_call(metadata) unless @started
remote_send(req)
writes_done(false)
replies = enum_for(:each_remote_read_then_finish)
@@ -412,14 +405,12 @@ module GRPC
# the_call#writes_done has been called, otherwise the block will loop
# forever.
#
- # == Keyword Arguments ==
- # any keyword arguments are treated as metadata to be sent to the server
- # if a keyword value is a list, multiple metadata for it's key are sent
- #
# @param requests [Object] an Enumerable of requests to send
+ # @param metadata [Hash] metadata to be sent to the server. If a value is
+ # a list, multiple metadata for its key are sent
# @return [Enumerator, nil] a response Enumerator
- def bidi_streamer(requests, **kw, &blk)
- start_call(**kw) unless @started
+ def bidi_streamer(requests, metadata: {}, &blk)
+ start_call(metadata) unless @started
bd = BidiCall.new(@call, @cq, @marshal, @unmarshal,
metadata_tag: @metadata_tag)
@metadata_tag = nil # run_on_client ensures metadata is read
@@ -458,9 +449,11 @@ module GRPC
private
# Starts the call if not already started
- def start_call(**kw)
+ # @param metadata [Hash] metadata to be sent to the server. If a value is
+ # a list, multiple metadata for its key are sent
+ def start_call(metadata = {})
return if @started
- @metadata_tag = ActiveCall.client_invoke(@call, @cq, **kw)
+ @metadata_tag = ActiveCall.client_invoke(@call, @cq, metadata)
@started = true
end
diff --git a/src/ruby/lib/grpc/generic/client_stub.rb b/src/ruby/lib/grpc/generic/client_stub.rb
index 12946fe819..cddca13d17 100644
--- a/src/ruby/lib/grpc/generic/client_stub.rb
+++ b/src/ruby/lib/grpc/generic/client_stub.rb
@@ -44,21 +44,21 @@ module GRPC
# setup_channel is used by #initialize to constuct a channel from its
# arguments.
- def self.setup_channel(alt_chan, host, creds, **kw)
+ def self.setup_channel(alt_chan, host, creds, channel_args = {})
unless alt_chan.nil?
fail(TypeError, '!Channel') unless alt_chan.is_a?(Core::Channel)
return alt_chan
end
- if kw['grpc.primary_user_agent'].nil?
- kw['grpc.primary_user_agent'] = ''
+ if channel_args['grpc.primary_user_agent'].nil?
+ channel_args['grpc.primary_user_agent'] = ''
else
- kw['grpc.primary_user_agent'] += ' '
+ channel_args['grpc.primary_user_agent'] += ' '
end
- kw['grpc.primary_user_agent'] += "grpc-ruby/#{VERSION}"
+ channel_args['grpc.primary_user_agent'] += "grpc-ruby/#{VERSION}"
unless creds.is_a?(Core::ChannelCredentials) || creds.is_a?(Symbol)
fail(TypeError, '!ChannelCredentials or Symbol')
end
- Core::Channel.new(host, kw, creds)
+ Core::Channel.new(host, channel_args, creds)
end
# Allows users of the stub to modify the propagate mask.
@@ -96,15 +96,16 @@ module GRPC
# :this_channel_is_insecure
# @param channel_override [Core::Channel] a pre-created channel
# @param timeout [Number] the default timeout to use in requests
- # @param kw [KeywordArgs]the channel arguments
+ # @param channel_args [Hash] the channel arguments
def initialize(host, q, creds,
channel_override: nil,
timeout: nil,
propagate_mask: nil,
- **kw)
+ channel_args: {})
fail(TypeError, '!CompletionQueue') unless q.is_a?(Core::CompletionQueue)
- @ch = ClientStub.setup_channel(channel_override, host, creds, **kw)
- alt_host = kw[Core::Channel::SSL_TARGET]
+ @ch = ClientStub.setup_channel(channel_override, host, creds,
+ channel_args)
+ alt_host = channel_args[Core::Channel::SSL_TARGET]
@host = alt_host.nil? ? host : alt_host
@propagate_mask = propagate_mask
@timeout = timeout.nil? ? DEFAULT_TIMEOUT : timeout
@@ -135,42 +136,35 @@ module GRPC
# If return_op is true, the call returns an Operation, calling execute
# on the Operation returns the response.
#
- # == Keyword Args ==
- #
- # Unspecified keyword arguments are treated as metadata to be sent to the
- # server.
- #
# @param method [String] the RPC method to call on the GRPC server
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
- # @param timeout [Numeric] (optional) the max completion time in seconds
# @param deadline [Time] (optional) the time the request should complete
+ # @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
- # @param return_op [true|false] return an Operation if true
+ # @param metadata [Hash] metadata to be sent to the server
# @return [Object] the response received from the server
def request_response(method, req, marshal, unmarshal,
deadline: nil,
- timeout: nil,
return_op: false,
parent: nil,
credentials: nil,
- **kw)
+ metadata: {})
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
- timeout: timeout,
parent: parent,
credentials: credentials)
- return c.request_response(req, **kw) unless return_op
+ return c.request_response(req, metadata: metadata) unless return_op
# return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #request_response.
op = c.operation
op.define_singleton_method(:execute) do
- c.request_response(req, **kw)
+ c.request_response(req, metadata: metadata)
end
op
end
@@ -205,42 +199,35 @@ module GRPC
#
# If return_op is true, the call returns the response.
#
- # == Keyword Args ==
- #
- # Unspecified keyword arguments are treated as metadata to be sent to the
- # server.
- #
# @param method [String] the RPC method to call on the GRPC server
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
- # @param timeout [Numeric] (optional) the max completion time in seconds
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false] return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
+ # @param metadata [Hash] metadata to be sent to the server
# @return [Object|Operation] the response received from the server
def client_streamer(method, requests, marshal, unmarshal,
deadline: nil,
- timeout: nil,
return_op: false,
parent: nil,
credentials: nil,
- **kw)
+ metadata: {})
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
- timeout: timeout,
parent: parent,
credentials: credentials)
- return c.client_streamer(requests, **kw) unless return_op
+ return c.client_streamer(requests, metadata: metadata) unless return_op
# return the operation view of the active_call; define #execute as a
# new method for this instance that invokes #client_streamer.
op = c.operation
op.define_singleton_method(:execute) do
- c.client_streamer(requests, **kw)
+ c.client_streamer(requests, metadata: metadata)
end
op
end
@@ -292,35 +279,33 @@ module GRPC
# @param req [Object] the request sent to the server
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
- # @param timeout [Numeric] (optional) the max completion time in seconds
# @param deadline [Time] (optional) the time the request should complete
# @param return_op [true|false]return an Operation if true
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
+ # @param metadata [Hash] metadata to be sent to the server
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|Operation|nil] as discussed above
def server_streamer(method, req, marshal, unmarshal,
deadline: nil,
- timeout: nil,
return_op: false,
parent: nil,
credentials: nil,
- **kw,
+ metadata: {},
&blk)
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
- timeout: timeout,
parent: parent,
credentials: credentials)
- return c.server_streamer(req, **kw, &blk) unless return_op
+ return c.server_streamer(req, metadata: metadata, &blk) unless return_op
# return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #server_streamer
op = c.operation
op.define_singleton_method(:execute) do
- c.server_streamer(req, **kw, &blk)
+ c.server_streamer(req, metadata: metadata, &blk)
end
op
end
@@ -391,11 +376,6 @@ module GRPC
# * the deadline is exceeded
#
#
- # == Keyword Args ==
- #
- # Unspecified keyword arguments are treated as metadata to be sent to the
- # server.
- #
# == Return Value ==
#
# if the return_op is false, the return value is an Enumerator of the
@@ -411,36 +391,35 @@ module GRPC
# @param requests [Object] an Enumerable of requests to send
# @param marshal [Function] f(obj)->string that marshals requests
# @param unmarshal [Function] f(string)->obj that unmarshals responses
- # @param timeout [Numeric] (optional) the max completion time in seconds
# @param deadline [Time] (optional) the time the request should complete
# @param parent [Core::Call] a prior call whose reserved metadata
# will be propagated by this one.
# @param credentials [Core::CallCredentials] credentials to use when making
# the call
# @param return_op [true|false] return an Operation if true
+ # @param metadata [Hash] metadata to be sent to the server
# @param blk [Block] when provided, is executed for each response
# @return [Enumerator|nil|Operation] as discussed above
def bidi_streamer(method, requests, marshal, unmarshal,
deadline: nil,
- timeout: nil,
return_op: false,
parent: nil,
credentials: nil,
- **kw,
+ metadata: {},
&blk)
c = new_active_call(method, marshal, unmarshal,
deadline: deadline,
- timeout: timeout,
parent: parent,
credentials: credentials)
- return c.bidi_streamer(requests, **kw, &blk) unless return_op
+ return c.bidi_streamer(requests, metadata: metadata,
+ &blk) unless return_op
# return the operation view of the active_call; define #execute
# as a new method for this instance that invokes #bidi_streamer
op = c.operation
op.define_singleton_method(:execute) do
- c.bidi_streamer(requests, **kw, &blk)
+ c.bidi_streamer(requests, metadata: metadata, &blk)
end
op
end
@@ -457,12 +436,10 @@ module GRPC
# @param timeout [TimeConst]
def new_active_call(method, marshal, unmarshal,
deadline: nil,
- timeout: nil,
parent: nil,
credentials: nil)
- if deadline.nil?
- deadline = from_relative_time(timeout.nil? ? @timeout : timeout)
- end
+
+ deadline = from_relative_time(@timeout) if deadline.nil?
# Provide each new client call with its own completion queue
call_queue = Core::CompletionQueue.new
call = @ch.create_call(call_queue,
diff --git a/src/ruby/lib/grpc/generic/rpc_desc.rb b/src/ruby/lib/grpc/generic/rpc_desc.rb
index cc21ffd3c5..5dc9202f2e 100644
--- a/src/ruby/lib/grpc/generic/rpc_desc.rb
+++ b/src/ruby/lib/grpc/generic/rpc_desc.rb
@@ -80,12 +80,12 @@ module GRPC
else # is a bidi_stream
active_call.run_server_bidi(mth)
end
- send_status(active_call, OK, 'OK', **active_call.output_metadata)
+ send_status(active_call, OK, 'OK', active_call.output_metadata)
rescue BadStatus => e
# this is raised by handlers that want GRPC to send an application error
# code and detail message and some additional app-specific metadata.
GRPC.logger.debug("app err:#{active_call}, status:#{e.code}:#{e.details}")
- send_status(active_call, e.code, e.details, **e.metadata)
+ send_status(active_call, e.code, e.details, e.metadata)
rescue Core::CallError => e
# This is raised by GRPC internals but should rarely, if ever happen.
# Log it, but don't notify the other endpoint..
@@ -135,10 +135,10 @@ module GRPC
"##{mth.name}: bad arg count; got:#{mth.arity}, want:#{want}, #{msg}"
end
- def send_status(active_client, code, details, **kw)
+ def send_status(active_client, code, details, metadata = {})
details = 'Not sure why' if details.nil?
GRPC.logger.debug("Sending status #{code}:#{details}")
- active_client.send_status(code, details, code == OK, **kw)
+ active_client.send_status(code, details, code == OK, metadata)
rescue StandardError => e
GRPC.logger.warn("Could not send status #{code}:#{details}")
GRPC.logger.warn(e)
diff --git a/src/ruby/lib/grpc/generic/rpc_server.rb b/src/ruby/lib/grpc/generic/rpc_server.rb
index 238aaa9656..fdb7d7ce55 100644
--- a/src/ruby/lib/grpc/generic/rpc_server.rb
+++ b/src/ruby/lib/grpc/generic/rpc_server.rb
@@ -170,14 +170,6 @@ module GRPC
alt_cq
end
- # setup_srv is used by #initialize to constuct a Core::Server from its
- # arguments.
- def self.setup_srv(alt_srv, cq, **kw)
- return Core::Server.new(cq, kw) if alt_srv.nil?
- fail(TypeError, '!Server') unless alt_srv.is_a? Core::Server
- alt_srv
- end
-
# setup_connect_md_proc is used by #initialize to validate the
# connect_md_proc.
def self.setup_connect_md_proc(a_proc)
@@ -194,9 +186,6 @@ module GRPC
# instance, however other arbitrary are allowed and when present are used
# to configure the listeninng connection set up by the RpcServer.
#
- # * server_override: which if passed must be a [GRPC::Core::Server]. When
- # present.
- #
# * poll_period: when present, the server polls for new events with this
# period
#
@@ -218,13 +207,15 @@ module GRPC
# when non-nil is a proc for determining metadata to to send back the client
# on receiving an invocation req. The proc signature is:
# {key: val, ..} func(method_name, {key: val, ...})
+ #
+ # * server_args:
+ # A server arguments hash to be passed down to the underlying core server
def initialize(pool_size:DEFAULT_POOL_SIZE,
max_waiting_requests:DEFAULT_MAX_WAITING_REQUESTS,
poll_period:DEFAULT_POLL_PERIOD,
completion_queue_override:nil,
- server_override:nil,
connect_md_proc:nil,
- **kw)
+ server_args:{})
@connect_md_proc = RpcServer.setup_connect_md_proc(connect_md_proc)
@cq = RpcServer.setup_cq(completion_queue_override)
@max_waiting_requests = max_waiting_requests
@@ -236,7 +227,7 @@ module GRPC
# running_state can take 4 values: :not_started, :running, :stopping, and
# :stopped. State transitions can only proceed in that order.
@running_state = :not_started
- @server = RpcServer.setup_srv(server_override, @cq, **kw)
+ @server = Core::Server.new(cq, server_args)
end
# stops a running server
diff --git a/src/ruby/lib/grpc/generic/service.rb b/src/ruby/lib/grpc/generic/service.rb
index 0a166e823e..f30242ee80 100644
--- a/src/ruby/lib/grpc/generic/service.rb
+++ b/src/ruby/lib/grpc/generic/service.rb
@@ -179,24 +179,24 @@ module GRPC
unmarshal = desc.unmarshal_proc(:output)
route = "/#{route_prefix}/#{name}"
if desc.request_response?
- define_method(mth_name) do |req, **kw|
+ define_method(mth_name) do |req, metadata = {}|
GRPC.logger.debug("calling #{@host}:#{route}")
- request_response(route, req, marshal, unmarshal, **kw)
+ request_response(route, req, marshal, unmarshal, metadata)
end
elsif desc.client_streamer?
- define_method(mth_name) do |reqs, **kw|
+ define_method(mth_name) do |reqs, metadata = {}|
GRPC.logger.debug("calling #{@host}:#{route}")
- client_streamer(route, reqs, marshal, unmarshal, **kw)
+ client_streamer(route, reqs, marshal, unmarshal, metadata)
end
elsif desc.server_streamer?
- define_method(mth_name) do |req, **kw, &blk|
+ define_method(mth_name) do |req, metadata = {}, &blk|
GRPC.logger.debug("calling #{@host}:#{route}")
- server_streamer(route, req, marshal, unmarshal, **kw, &blk)
+ server_streamer(route, req, marshal, unmarshal, metadata, &blk)
end
else # is a bidi_stream
- define_method(mth_name) do |reqs, **kw, &blk|
+ define_method(mth_name) do |reqs, metadata = {}, &blk|
GRPC.logger.debug("calling #{@host}:#{route}")
- bidi_streamer(route, reqs, marshal, unmarshal, **kw, &blk)
+ bidi_streamer(route, reqs, marshal, unmarshal, metadata, &blk)
end
end
end