grpc 1.58.3-aarch64-linux
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/etc/roots.pem +4337 -0
- data/grpc_c.32-msvcrt.ruby +0 -0
- data/grpc_c.64-msvcrt.ruby +0 -0
- data/grpc_c.64-ucrt.ruby +0 -0
- data/src/ruby/bin/math_client.rb +140 -0
- data/src/ruby/bin/math_pb.rb +40 -0
- data/src/ruby/bin/math_server.rb +191 -0
- data/src/ruby/bin/math_services_pb.rb +51 -0
- data/src/ruby/bin/noproto_client.rb +93 -0
- data/src/ruby/bin/noproto_server.rb +97 -0
- data/src/ruby/ext/grpc/ext-export-truffleruby-with-ruby-abi-version.clang +2 -0
- data/src/ruby/ext/grpc/ext-export-truffleruby-with-ruby-abi-version.gcc +7 -0
- data/src/ruby/ext/grpc/ext-export-with-ruby-abi-version.clang +2 -0
- data/src/ruby/ext/grpc/ext-export-with-ruby-abi-version.gcc +7 -0
- data/src/ruby/ext/grpc/ext-export.clang +1 -0
- data/src/ruby/ext/grpc/ext-export.gcc +6 -0
- data/src/ruby/ext/grpc/extconf.rb +208 -0
- data/src/ruby/ext/grpc/rb_byte_buffer.c +65 -0
- data/src/ruby/ext/grpc/rb_byte_buffer.h +35 -0
- data/src/ruby/ext/grpc/rb_call.c +1075 -0
- data/src/ruby/ext/grpc/rb_call.h +57 -0
- data/src/ruby/ext/grpc/rb_call_credentials.c +340 -0
- data/src/ruby/ext/grpc/rb_call_credentials.h +31 -0
- data/src/ruby/ext/grpc/rb_channel.c +875 -0
- data/src/ruby/ext/grpc/rb_channel.h +35 -0
- data/src/ruby/ext/grpc/rb_channel_args.c +170 -0
- data/src/ruby/ext/grpc/rb_channel_args.h +42 -0
- data/src/ruby/ext/grpc/rb_channel_credentials.c +285 -0
- data/src/ruby/ext/grpc/rb_channel_credentials.h +37 -0
- data/src/ruby/ext/grpc/rb_completion_queue.c +101 -0
- data/src/ruby/ext/grpc/rb_completion_queue.h +36 -0
- data/src/ruby/ext/grpc/rb_compression_options.c +470 -0
- data/src/ruby/ext/grpc/rb_compression_options.h +29 -0
- data/src/ruby/ext/grpc/rb_enable_cpp.cc +22 -0
- data/src/ruby/ext/grpc/rb_event_thread.c +161 -0
- data/src/ruby/ext/grpc/rb_event_thread.h +22 -0
- data/src/ruby/ext/grpc/rb_grpc.c +496 -0
- data/src/ruby/ext/grpc/rb_grpc.h +83 -0
- data/src/ruby/ext/grpc/rb_grpc_imports.generated.c +599 -0
- data/src/ruby/ext/grpc/rb_grpc_imports.generated.h +904 -0
- data/src/ruby/ext/grpc/rb_loader.c +61 -0
- data/src/ruby/ext/grpc/rb_loader.h +25 -0
- data/src/ruby/ext/grpc/rb_server.c +405 -0
- data/src/ruby/ext/grpc/rb_server.h +32 -0
- data/src/ruby/ext/grpc/rb_server_credentials.c +258 -0
- data/src/ruby/ext/grpc/rb_server_credentials.h +37 -0
- data/src/ruby/ext/grpc/rb_xds_channel_credentials.c +217 -0
- data/src/ruby/ext/grpc/rb_xds_channel_credentials.h +37 -0
- data/src/ruby/ext/grpc/rb_xds_server_credentials.c +169 -0
- data/src/ruby/ext/grpc/rb_xds_server_credentials.h +37 -0
- data/src/ruby/lib/grpc/2.6/grpc_c.so +0 -0
- data/src/ruby/lib/grpc/2.7/grpc_c.so +0 -0
- data/src/ruby/lib/grpc/3.0/grpc_c.so +0 -0
- data/src/ruby/lib/grpc/3.1/grpc_c.so +0 -0
- data/src/ruby/lib/grpc/3.2/grpc_c.so +0 -0
- data/src/ruby/lib/grpc/core/status_codes.rb +135 -0
- data/src/ruby/lib/grpc/core/time_consts.rb +56 -0
- data/src/ruby/lib/grpc/errors.rb +277 -0
- data/src/ruby/lib/grpc/generic/active_call.rb +670 -0
- data/src/ruby/lib/grpc/generic/bidi_call.rb +237 -0
- data/src/ruby/lib/grpc/generic/client_stub.rb +503 -0
- data/src/ruby/lib/grpc/generic/interceptor_registry.rb +53 -0
- data/src/ruby/lib/grpc/generic/interceptors.rb +186 -0
- data/src/ruby/lib/grpc/generic/rpc_desc.rb +204 -0
- data/src/ruby/lib/grpc/generic/rpc_server.rb +551 -0
- data/src/ruby/lib/grpc/generic/service.rb +211 -0
- data/src/ruby/lib/grpc/google_rpc_status_utils.rb +40 -0
- data/src/ruby/lib/grpc/grpc.rb +24 -0
- data/src/ruby/lib/grpc/logconfig.rb +44 -0
- data/src/ruby/lib/grpc/notifier.rb +45 -0
- data/src/ruby/lib/grpc/structs.rb +15 -0
- data/src/ruby/lib/grpc/version.rb +18 -0
- data/src/ruby/lib/grpc.rb +37 -0
- data/src/ruby/pb/README.md +42 -0
- data/src/ruby/pb/generate_proto_ruby.sh +46 -0
- data/src/ruby/pb/grpc/health/checker.rb +75 -0
- data/src/ruby/pb/grpc/health/v1/health_pb.rb +42 -0
- data/src/ruby/pb/grpc/health/v1/health_services_pb.rb +62 -0
- data/src/ruby/pb/grpc/testing/duplicate/echo_duplicate_services_pb.rb +44 -0
- data/src/ruby/pb/grpc/testing/metrics_pb.rb +28 -0
- data/src/ruby/pb/grpc/testing/metrics_services_pb.rb +49 -0
- data/src/ruby/pb/src/proto/grpc/testing/empty_pb.rb +38 -0
- data/src/ruby/pb/src/proto/grpc/testing/messages_pb.rb +63 -0
- data/src/ruby/pb/src/proto/grpc/testing/test_pb.rb +40 -0
- data/src/ruby/pb/src/proto/grpc/testing/test_services_pb.rb +152 -0
- data/src/ruby/pb/test/client.rb +785 -0
- data/src/ruby/pb/test/server.rb +252 -0
- data/src/ruby/pb/test/xds_client.rb +415 -0
- data/src/ruby/spec/call_credentials_spec.rb +42 -0
- data/src/ruby/spec/call_spec.rb +180 -0
- data/src/ruby/spec/channel_connection_spec.rb +126 -0
- data/src/ruby/spec/channel_credentials_spec.rb +124 -0
- data/src/ruby/spec/channel_spec.rb +207 -0
- data/src/ruby/spec/client_auth_spec.rb +152 -0
- data/src/ruby/spec/client_server_spec.rb +676 -0
- data/src/ruby/spec/compression_options_spec.rb +149 -0
- data/src/ruby/spec/debug_message_spec.rb +134 -0
- data/src/ruby/spec/error_sanity_spec.rb +49 -0
- data/src/ruby/spec/errors_spec.rb +142 -0
- data/src/ruby/spec/generic/active_call_spec.rb +692 -0
- data/src/ruby/spec/generic/client_interceptors_spec.rb +153 -0
- data/src/ruby/spec/generic/client_stub_spec.rb +1083 -0
- data/src/ruby/spec/generic/interceptor_registry_spec.rb +65 -0
- data/src/ruby/spec/generic/rpc_desc_spec.rb +374 -0
- data/src/ruby/spec/generic/rpc_server_pool_spec.rb +127 -0
- data/src/ruby/spec/generic/rpc_server_spec.rb +748 -0
- data/src/ruby/spec/generic/server_interceptors_spec.rb +218 -0
- data/src/ruby/spec/generic/service_spec.rb +263 -0
- data/src/ruby/spec/google_rpc_status_utils_spec.rb +282 -0
- data/src/ruby/spec/pb/codegen/grpc/testing/package_options.proto +28 -0
- data/src/ruby/spec/pb/codegen/grpc/testing/package_options_import.proto +22 -0
- data/src/ruby/spec/pb/codegen/grpc/testing/package_options_import2.proto +23 -0
- data/src/ruby/spec/pb/codegen/grpc/testing/package_options_ruby_style.proto +41 -0
- data/src/ruby/spec/pb/codegen/grpc/testing/same_package_service_name.proto +27 -0
- data/src/ruby/spec/pb/codegen/grpc/testing/same_ruby_package_service_name.proto +29 -0
- data/src/ruby/spec/pb/codegen/package_option_spec.rb +98 -0
- data/src/ruby/spec/pb/duplicate/codegen_spec.rb +57 -0
- data/src/ruby/spec/pb/health/checker_spec.rb +236 -0
- data/src/ruby/spec/server_credentials_spec.rb +104 -0
- data/src/ruby/spec/server_spec.rb +231 -0
- data/src/ruby/spec/spec_helper.rb +61 -0
- data/src/ruby/spec/support/helpers.rb +107 -0
- data/src/ruby/spec/support/services.rb +160 -0
- data/src/ruby/spec/testdata/README +1 -0
- data/src/ruby/spec/testdata/ca.pem +20 -0
- data/src/ruby/spec/testdata/client.key +28 -0
- data/src/ruby/spec/testdata/client.pem +20 -0
- data/src/ruby/spec/testdata/server1.key +28 -0
- data/src/ruby/spec/testdata/server1.pem +22 -0
- data/src/ruby/spec/time_consts_spec.rb +74 -0
- data/src/ruby/spec/user_agent_spec.rb +74 -0
- metadata +406 -0
@@ -0,0 +1,670 @@
|
|
1
|
+
# Copyright 2015 gRPC authors.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
require 'forwardable'
|
16
|
+
require 'weakref'
|
17
|
+
require_relative 'bidi_call'
|
18
|
+
|
19
|
+
class Struct
|
20
|
+
# BatchResult is the struct returned by calls to call#start_batch.
|
21
|
+
class BatchResult
|
22
|
+
# check_status returns the status, raising an error if the status
|
23
|
+
# is non-nil and not OK.
|
24
|
+
def check_status
|
25
|
+
return nil if status.nil?
|
26
|
+
if status.code != GRPC::Core::StatusCodes::OK
|
27
|
+
GRPC.logger.debug("Failing with status #{status}")
|
28
|
+
# raise BadStatus, propagating the metadata if present.
|
29
|
+
fail GRPC::BadStatus.new_status_exception(
|
30
|
+
status.code, status.details, status.metadata,
|
31
|
+
status.debug_error_string)
|
32
|
+
end
|
33
|
+
status
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
|
38
|
+
# GRPC contains the General RPC module.
|
39
|
+
module GRPC
|
40
|
+
# The ActiveCall class provides simple methods for sending marshallable
|
41
|
+
# data to a call
|
42
|
+
class ActiveCall # rubocop:disable Metrics/ClassLength
|
43
|
+
include Core::TimeConsts
|
44
|
+
include Core::CallOps
|
45
|
+
extend Forwardable
|
46
|
+
attr_reader :deadline, :metadata_sent, :metadata_to_send, :peer, :peer_cert
|
47
|
+
def_delegators :@call, :cancel, :metadata, :write_flag, :write_flag=,
|
48
|
+
:trailing_metadata, :status
|
49
|
+
|
50
|
+
# client_invoke begins a client invocation.
|
51
|
+
#
|
52
|
+
# Flow Control note: this blocks until flow control accepts that client
|
53
|
+
# request can go ahead.
|
54
|
+
#
|
55
|
+
# deadline is the absolute deadline for the call.
|
56
|
+
#
|
57
|
+
# == Keyword Arguments ==
|
58
|
+
# any keyword arguments are treated as metadata to be sent to the server
|
59
|
+
# if a keyword value is a list, multiple metadata for it's key are sent
|
60
|
+
#
|
61
|
+
# @param call [Call] a call on which to start and invocation
|
62
|
+
# @param metadata [Hash] the metadata
|
63
|
+
def self.client_invoke(call, metadata = {})
|
64
|
+
fail(TypeError, '!Core::Call') unless call.is_a? Core::Call
|
65
|
+
call.run_batch(SEND_INITIAL_METADATA => metadata)
|
66
|
+
end
|
67
|
+
|
68
|
+
# Creates an ActiveCall.
|
69
|
+
#
|
70
|
+
# ActiveCall should only be created after a call is accepted. That
|
71
|
+
# means different things on a client and a server. On the client, the
|
72
|
+
# call is accepted after calling call.invoke. On the server, this is
|
73
|
+
# after call.accept.
|
74
|
+
#
|
75
|
+
# #initialize cannot determine if the call is accepted or not; so if a
|
76
|
+
# call that's not accepted is used here, the error won't be visible until
|
77
|
+
# the ActiveCall methods are called.
|
78
|
+
#
|
79
|
+
# deadline is the absolute deadline for the call.
|
80
|
+
#
|
81
|
+
# @param call [Call] the call used by the ActiveCall
|
82
|
+
# @param marshal [Function] f(obj)->string that marshal requests
|
83
|
+
# @param unmarshal [Function] f(string)->obj that unmarshals responses
|
84
|
+
# @param deadline [Fixnum] the deadline for the call to complete
|
85
|
+
# @param started [true|false] indicates that metadata was sent
|
86
|
+
# @param metadata_received [true|false] indicates if metadata has already
|
87
|
+
# been received. Should always be true for server calls
|
88
|
+
def initialize(call, marshal, unmarshal, deadline, started: true,
|
89
|
+
metadata_received: false, metadata_to_send: nil)
|
90
|
+
fail(TypeError, '!Core::Call') unless call.is_a? Core::Call
|
91
|
+
@call = call
|
92
|
+
@deadline = deadline
|
93
|
+
@marshal = marshal
|
94
|
+
@unmarshal = unmarshal
|
95
|
+
@metadata_received = metadata_received
|
96
|
+
@metadata_sent = started
|
97
|
+
@op_notifier = nil
|
98
|
+
|
99
|
+
fail(ArgumentError, 'Already sent md') if started && metadata_to_send
|
100
|
+
@metadata_to_send = metadata_to_send || {} unless started
|
101
|
+
@send_initial_md_mutex = Mutex.new
|
102
|
+
|
103
|
+
@output_stream_done = false
|
104
|
+
@input_stream_done = false
|
105
|
+
@call_finished = false
|
106
|
+
@call_finished_mu = Mutex.new
|
107
|
+
|
108
|
+
@client_call_executed = false
|
109
|
+
@client_call_executed_mu = Mutex.new
|
110
|
+
|
111
|
+
# set the peer now so that the accessor can still function
|
112
|
+
# after the server closes the call
|
113
|
+
@peer = call.peer
|
114
|
+
end
|
115
|
+
|
116
|
+
# Sends the initial metadata that has yet to be sent.
|
117
|
+
# Does nothing if metadata has already been sent for this call.
|
118
|
+
def send_initial_metadata(new_metadata = {})
|
119
|
+
@send_initial_md_mutex.synchronize do
|
120
|
+
return if @metadata_sent
|
121
|
+
@metadata_to_send.merge!(new_metadata)
|
122
|
+
ActiveCall.client_invoke(@call, @metadata_to_send)
|
123
|
+
@metadata_sent = true
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
# output_metadata are provides access to hash that can be used to
|
128
|
+
# save metadata to be sent as trailer
|
129
|
+
def output_metadata
|
130
|
+
@output_metadata ||= {}
|
131
|
+
end
|
132
|
+
|
133
|
+
# cancelled indicates if the call was cancelled
|
134
|
+
def cancelled?
|
135
|
+
!@call.status.nil? && @call.status.code == Core::StatusCodes::CANCELLED
|
136
|
+
end
|
137
|
+
|
138
|
+
# multi_req_view provides a restricted view of this ActiveCall for use
|
139
|
+
# in a server client-streaming handler.
|
140
|
+
def multi_req_view
|
141
|
+
MultiReqView.new(self)
|
142
|
+
end
|
143
|
+
|
144
|
+
# single_req_view provides a restricted view of this ActiveCall for use in
|
145
|
+
# a server request-response handler.
|
146
|
+
def single_req_view
|
147
|
+
SingleReqView.new(self)
|
148
|
+
end
|
149
|
+
|
150
|
+
# operation provides a restricted view of this ActiveCall for use as
|
151
|
+
# a Operation.
|
152
|
+
def operation
|
153
|
+
@op_notifier = Notifier.new
|
154
|
+
Operation.new(self)
|
155
|
+
end
|
156
|
+
|
157
|
+
##
|
158
|
+
# Returns a restricted view of this ActiveCall for use in interceptors
|
159
|
+
#
|
160
|
+
# @return [InterceptableView]
|
161
|
+
#
|
162
|
+
def interceptable
|
163
|
+
InterceptableView.new(self)
|
164
|
+
end
|
165
|
+
|
166
|
+
def receive_and_check_status
|
167
|
+
ops = { RECV_STATUS_ON_CLIENT => nil }
|
168
|
+
ops[RECV_INITIAL_METADATA] = nil unless @metadata_received
|
169
|
+
batch_result = @call.run_batch(ops)
|
170
|
+
unless @metadata_received
|
171
|
+
@call.metadata = batch_result.metadata
|
172
|
+
@metadata_received = true
|
173
|
+
end
|
174
|
+
set_input_stream_done
|
175
|
+
attach_status_results_and_complete_call(batch_result)
|
176
|
+
end
|
177
|
+
|
178
|
+
def attach_status_results_and_complete_call(recv_status_batch_result)
|
179
|
+
unless recv_status_batch_result.status.nil?
|
180
|
+
@call.trailing_metadata = recv_status_batch_result.status.metadata
|
181
|
+
end
|
182
|
+
@call.status = recv_status_batch_result.status
|
183
|
+
|
184
|
+
# The RECV_STATUS in run_batch always succeeds
|
185
|
+
# Check the status for a bad status or failed run batch
|
186
|
+
recv_status_batch_result.check_status
|
187
|
+
end
|
188
|
+
|
189
|
+
# remote_send sends a request to the remote endpoint.
|
190
|
+
#
|
191
|
+
# It blocks until the remote endpoint accepts the message.
|
192
|
+
#
|
193
|
+
# @param req [Object, String] the object to send or it's marshal form.
|
194
|
+
# @param marshalled [false, true] indicates if the object is already
|
195
|
+
# marshalled.
|
196
|
+
def remote_send(req, marshalled = false)
|
197
|
+
send_initial_metadata
|
198
|
+
GRPC.logger.debug("sending #{req}, marshalled? #{marshalled}")
|
199
|
+
payload = marshalled ? req : @marshal.call(req)
|
200
|
+
@call.run_batch(SEND_MESSAGE => payload)
|
201
|
+
end
|
202
|
+
|
203
|
+
# send_status sends a status to the remote endpoint.
|
204
|
+
#
|
205
|
+
# @param code [int] the status code to send
|
206
|
+
# @param details [String] details
|
207
|
+
# @param assert_finished [true, false] when true(default), waits for
|
208
|
+
# FINISHED.
|
209
|
+
# @param metadata [Hash] metadata to send to the server. If a value is a
|
210
|
+
# list, mulitple metadata for its key are sent
|
211
|
+
def send_status(code = OK, details = '', assert_finished = false,
|
212
|
+
metadata: {})
|
213
|
+
send_initial_metadata
|
214
|
+
ops = {
|
215
|
+
SEND_STATUS_FROM_SERVER => Struct::Status.new(code, details, metadata)
|
216
|
+
}
|
217
|
+
ops[RECV_CLOSE_ON_SERVER] = nil if assert_finished
|
218
|
+
@call.run_batch(ops)
|
219
|
+
set_output_stream_done
|
220
|
+
|
221
|
+
nil
|
222
|
+
end
|
223
|
+
|
224
|
+
# Intended for use on server-side calls when a single request from
|
225
|
+
# the client is expected (i.e., unary and server-streaming RPC types).
|
226
|
+
def read_unary_request
|
227
|
+
req = remote_read
|
228
|
+
set_input_stream_done
|
229
|
+
req
|
230
|
+
end
|
231
|
+
|
232
|
+
def server_unary_response(req, trailing_metadata: {},
|
233
|
+
code: Core::StatusCodes::OK, details: 'OK')
|
234
|
+
ops = {}
|
235
|
+
ops[SEND_MESSAGE] = @marshal.call(req)
|
236
|
+
ops[SEND_STATUS_FROM_SERVER] = Struct::Status.new(
|
237
|
+
code, details, trailing_metadata)
|
238
|
+
ops[RECV_CLOSE_ON_SERVER] = nil
|
239
|
+
|
240
|
+
@send_initial_md_mutex.synchronize do
|
241
|
+
ops[SEND_INITIAL_METADATA] = @metadata_to_send unless @metadata_sent
|
242
|
+
@metadata_sent = true
|
243
|
+
end
|
244
|
+
|
245
|
+
@call.run_batch(ops)
|
246
|
+
set_output_stream_done
|
247
|
+
end
|
248
|
+
|
249
|
+
# remote_read reads a response from the remote endpoint.
|
250
|
+
#
|
251
|
+
# It blocks until the remote endpoint replies with a message or status.
|
252
|
+
# On receiving a message, it returns the response after unmarshalling it.
|
253
|
+
# On receiving a status, it returns nil if the status is OK, otherwise
|
254
|
+
# raising BadStatus
|
255
|
+
def remote_read
|
256
|
+
ops = { RECV_MESSAGE => nil }
|
257
|
+
ops[RECV_INITIAL_METADATA] = nil unless @metadata_received
|
258
|
+
batch_result = @call.run_batch(ops)
|
259
|
+
unless @metadata_received
|
260
|
+
@call.metadata = batch_result.metadata
|
261
|
+
@metadata_received = true
|
262
|
+
end
|
263
|
+
get_message_from_batch_result(batch_result)
|
264
|
+
rescue GRPC::Core::CallError => e
|
265
|
+
GRPC.logger.info("remote_read: #{e}")
|
266
|
+
nil
|
267
|
+
end
|
268
|
+
|
269
|
+
def get_message_from_batch_result(recv_message_batch_result)
|
270
|
+
unless recv_message_batch_result.nil? ||
|
271
|
+
recv_message_batch_result.message.nil?
|
272
|
+
return @unmarshal.call(recv_message_batch_result.message)
|
273
|
+
end
|
274
|
+
GRPC.logger.debug('found nil; the final response has been sent')
|
275
|
+
nil
|
276
|
+
end
|
277
|
+
|
278
|
+
# each_remote_read passes each response to the given block or returns an
|
279
|
+
# enumerator the responses if no block is given.
|
280
|
+
# Used to generate the request enumerable for
|
281
|
+
# server-side client-streaming RPC's.
|
282
|
+
#
|
283
|
+
# == Enumerator ==
|
284
|
+
#
|
285
|
+
# * #next blocks until the remote endpoint sends a READ or FINISHED
|
286
|
+
# * for each read, enumerator#next yields the response
|
287
|
+
# * on status
|
288
|
+
# * if it's is OK, enumerator#next raises StopException
|
289
|
+
# * if is not OK, enumerator#next raises RuntimeException
|
290
|
+
#
|
291
|
+
# == Block ==
|
292
|
+
#
|
293
|
+
# * if provided it is executed for each response
|
294
|
+
# * the call blocks until no more responses are provided
|
295
|
+
#
|
296
|
+
# @return [Enumerator] if no block was given
|
297
|
+
def each_remote_read
|
298
|
+
return enum_for(:each_remote_read) unless block_given?
|
299
|
+
begin
|
300
|
+
loop do
|
301
|
+
resp = remote_read
|
302
|
+
break if resp.nil? # the last response was received
|
303
|
+
yield resp
|
304
|
+
end
|
305
|
+
ensure
|
306
|
+
set_input_stream_done
|
307
|
+
end
|
308
|
+
end
|
309
|
+
|
310
|
+
# each_remote_read_then_finish passes each response to the given block or
|
311
|
+
# returns an enumerator of the responses if no block is given.
|
312
|
+
#
|
313
|
+
# It is like each_remote_read, but it blocks on finishing on detecting
|
314
|
+
# the final message.
|
315
|
+
#
|
316
|
+
# == Enumerator ==
|
317
|
+
#
|
318
|
+
# * #next blocks until the remote endpoint sends a READ or FINISHED
|
319
|
+
# * for each read, enumerator#next yields the response
|
320
|
+
# * on status
|
321
|
+
# * if it's is OK, enumerator#next raises StopException
|
322
|
+
# * if is not OK, enumerator#next raises RuntimeException
|
323
|
+
#
|
324
|
+
# == Block ==
|
325
|
+
#
|
326
|
+
# * if provided it is executed for each response
|
327
|
+
# * the call blocks until no more responses are provided
|
328
|
+
#
|
329
|
+
# @return [Enumerator] if no block was given
|
330
|
+
def each_remote_read_then_finish
|
331
|
+
return enum_for(:each_remote_read_then_finish) unless block_given?
|
332
|
+
loop do
|
333
|
+
resp = remote_read
|
334
|
+
break if resp.nil? # the last response was received
|
335
|
+
yield resp
|
336
|
+
end
|
337
|
+
|
338
|
+
receive_and_check_status
|
339
|
+
ensure
|
340
|
+
set_input_stream_done
|
341
|
+
end
|
342
|
+
|
343
|
+
# request_response sends a request to a GRPC server, and returns the
|
344
|
+
# response.
|
345
|
+
#
|
346
|
+
# @param req [Object] the request sent to the server
|
347
|
+
# @param metadata [Hash] metadata to be sent to the server. If a value is
|
348
|
+
# a list, multiple metadata for its key are sent
|
349
|
+
# @return [Object] the response received from the server
|
350
|
+
def request_response(req, metadata: {})
|
351
|
+
raise_error_if_already_executed
|
352
|
+
ops = {
|
353
|
+
SEND_MESSAGE => @marshal.call(req),
|
354
|
+
SEND_CLOSE_FROM_CLIENT => nil,
|
355
|
+
RECV_INITIAL_METADATA => nil,
|
356
|
+
RECV_MESSAGE => nil,
|
357
|
+
RECV_STATUS_ON_CLIENT => nil
|
358
|
+
}
|
359
|
+
@send_initial_md_mutex.synchronize do
|
360
|
+
# Metadata might have already been sent if this is an operation view
|
361
|
+
unless @metadata_sent
|
362
|
+
ops[SEND_INITIAL_METADATA] = @metadata_to_send.merge!(metadata)
|
363
|
+
end
|
364
|
+
@metadata_sent = true
|
365
|
+
end
|
366
|
+
|
367
|
+
begin
|
368
|
+
batch_result = @call.run_batch(ops)
|
369
|
+
# no need to check for cancellation after a CallError because this
|
370
|
+
# batch contains a RECV_STATUS op
|
371
|
+
ensure
|
372
|
+
set_input_stream_done
|
373
|
+
set_output_stream_done
|
374
|
+
end
|
375
|
+
|
376
|
+
@call.metadata = batch_result.metadata
|
377
|
+
attach_status_results_and_complete_call(batch_result)
|
378
|
+
get_message_from_batch_result(batch_result)
|
379
|
+
end
|
380
|
+
|
381
|
+
# client_streamer sends a stream of requests to a GRPC server, and
|
382
|
+
# returns a single response.
|
383
|
+
#
|
384
|
+
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's
|
385
|
+
# #each enumeration protocol. In the simplest case, requests will be an
|
386
|
+
# array of marshallable objects; in typical case it will be an Enumerable
|
387
|
+
# that allows dynamic construction of the marshallable objects.
|
388
|
+
#
|
389
|
+
# @param requests [Object] an Enumerable of requests to send
|
390
|
+
# @param metadata [Hash] metadata to be sent to the server. If a value is
|
391
|
+
# a list, multiple metadata for its key are sent
|
392
|
+
# @return [Object] the response received from the server
|
393
|
+
def client_streamer(requests, metadata: {})
|
394
|
+
raise_error_if_already_executed
|
395
|
+
begin
|
396
|
+
send_initial_metadata(metadata)
|
397
|
+
requests.each { |r| @call.run_batch(SEND_MESSAGE => @marshal.call(r)) }
|
398
|
+
rescue GRPC::Core::CallError => e
|
399
|
+
receive_and_check_status # check for Cancelled
|
400
|
+
raise e
|
401
|
+
rescue => e
|
402
|
+
set_input_stream_done
|
403
|
+
raise e
|
404
|
+
ensure
|
405
|
+
set_output_stream_done
|
406
|
+
end
|
407
|
+
|
408
|
+
batch_result = @call.run_batch(
|
409
|
+
SEND_CLOSE_FROM_CLIENT => nil,
|
410
|
+
RECV_INITIAL_METADATA => nil,
|
411
|
+
RECV_MESSAGE => nil,
|
412
|
+
RECV_STATUS_ON_CLIENT => nil
|
413
|
+
)
|
414
|
+
|
415
|
+
set_input_stream_done
|
416
|
+
|
417
|
+
@call.metadata = batch_result.metadata
|
418
|
+
attach_status_results_and_complete_call(batch_result)
|
419
|
+
get_message_from_batch_result(batch_result)
|
420
|
+
end
|
421
|
+
|
422
|
+
# server_streamer sends one request to the GRPC server, which yields a
|
423
|
+
# stream of responses.
|
424
|
+
#
|
425
|
+
# responses provides an enumerator over the streamed responses, i.e. it
|
426
|
+
# follows Ruby's #each iteration protocol. The enumerator blocks while
|
427
|
+
# waiting for each response, stops when the server signals that no
|
428
|
+
# further responses will be supplied. If the implicit block is provided,
|
429
|
+
# it is executed with each response as the argument and no result is
|
430
|
+
# returned.
|
431
|
+
#
|
432
|
+
# @param req [Object] the request sent to the server
|
433
|
+
# @param metadata [Hash] metadata to be sent to the server. If a value is
|
434
|
+
# a list, multiple metadata for its key are sent
|
435
|
+
# @return [Enumerator|nil] a response Enumerator
|
436
|
+
def server_streamer(req, metadata: {})
|
437
|
+
raise_error_if_already_executed
|
438
|
+
ops = {
|
439
|
+
SEND_MESSAGE => @marshal.call(req),
|
440
|
+
SEND_CLOSE_FROM_CLIENT => nil
|
441
|
+
}
|
442
|
+
@send_initial_md_mutex.synchronize do
|
443
|
+
# Metadata might have already been sent if this is an operation view
|
444
|
+
unless @metadata_sent
|
445
|
+
ops[SEND_INITIAL_METADATA] = @metadata_to_send.merge!(metadata)
|
446
|
+
end
|
447
|
+
@metadata_sent = true
|
448
|
+
end
|
449
|
+
|
450
|
+
begin
|
451
|
+
@call.run_batch(ops)
|
452
|
+
rescue GRPC::Core::CallError => e
|
453
|
+
receive_and_check_status # checks for Cancelled
|
454
|
+
raise e
|
455
|
+
rescue => e
|
456
|
+
set_input_stream_done
|
457
|
+
raise e
|
458
|
+
ensure
|
459
|
+
set_output_stream_done
|
460
|
+
end
|
461
|
+
|
462
|
+
replies = enum_for(:each_remote_read_then_finish)
|
463
|
+
return replies unless block_given?
|
464
|
+
replies.each { |r| yield r }
|
465
|
+
end
|
466
|
+
|
467
|
+
# bidi_streamer sends a stream of requests to the GRPC server, and yields
|
468
|
+
# a stream of responses.
|
469
|
+
#
|
470
|
+
# This method takes an Enumerable of requests, and returns and enumerable
|
471
|
+
# of responses.
|
472
|
+
#
|
473
|
+
# == requests ==
|
474
|
+
#
|
475
|
+
# requests provides an 'iterable' of Requests. I.e. it follows Ruby's
|
476
|
+
# #each enumeration protocol. In the simplest case, requests will be an
|
477
|
+
# array of marshallable objects; in typical case it will be an
|
478
|
+
# Enumerable that allows dynamic construction of the marshallable
|
479
|
+
# objects.
|
480
|
+
#
|
481
|
+
# == responses ==
|
482
|
+
#
|
483
|
+
# This is an enumerator of responses. I.e, its #next method blocks
|
484
|
+
# waiting for the next response. Also, if at any point the block needs
|
485
|
+
# to consume all the remaining responses, this can be done using #each or
|
486
|
+
# #collect. Calling #each or #collect should only be done if
|
487
|
+
# the_call#writes_done has been called, otherwise the block will loop
|
488
|
+
# forever.
|
489
|
+
#
|
490
|
+
# @param requests [Object] an Enumerable of requests to send
|
491
|
+
# @param metadata [Hash] metadata to be sent to the server. If a value is
|
492
|
+
# a list, multiple metadata for its key are sent
|
493
|
+
# @return [Enumerator, nil] a response Enumerator
|
494
|
+
def bidi_streamer(requests, metadata: {}, &blk)
|
495
|
+
raise_error_if_already_executed
|
496
|
+
# Metadata might have already been sent if this is an operation view
|
497
|
+
begin
|
498
|
+
send_initial_metadata(metadata)
|
499
|
+
rescue GRPC::Core::CallError => e
|
500
|
+
batch_result = @call.run_batch(RECV_STATUS_ON_CLIENT => nil)
|
501
|
+
set_input_stream_done
|
502
|
+
set_output_stream_done
|
503
|
+
attach_status_results_and_complete_call(batch_result)
|
504
|
+
raise e
|
505
|
+
rescue => e
|
506
|
+
set_input_stream_done
|
507
|
+
set_output_stream_done
|
508
|
+
raise e
|
509
|
+
end
|
510
|
+
|
511
|
+
bd = BidiCall.new(@call,
|
512
|
+
@marshal,
|
513
|
+
@unmarshal,
|
514
|
+
metadata_received: @metadata_received)
|
515
|
+
|
516
|
+
bd.run_on_client(requests,
|
517
|
+
proc { set_input_stream_done },
|
518
|
+
proc { set_output_stream_done },
|
519
|
+
&blk)
|
520
|
+
end
|
521
|
+
|
522
|
+
# run_server_bidi orchestrates a BiDi stream processing on a server.
|
523
|
+
#
|
524
|
+
# N.B. gen_each_reply is a func(Enumerable<Requests>)
|
525
|
+
#
|
526
|
+
# It takes an enumerable of requests as an arg, in case there is a
|
527
|
+
# relationship between the stream of requests and the stream of replies.
|
528
|
+
#
|
529
|
+
# This does not mean that must necessarily be one. E.g, the replies
|
530
|
+
# produced by gen_each_reply could ignore the received_msgs
|
531
|
+
#
|
532
|
+
# @param mth [Proc] generates the BiDi stream replies
|
533
|
+
# @param interception_ctx [InterceptionContext]
|
534
|
+
#
|
535
|
+
def run_server_bidi(mth, interception_ctx)
|
536
|
+
view = multi_req_view
|
537
|
+
bidi_call = BidiCall.new(
|
538
|
+
@call,
|
539
|
+
@marshal,
|
540
|
+
@unmarshal,
|
541
|
+
metadata_received: @metadata_received,
|
542
|
+
req_view: view
|
543
|
+
)
|
544
|
+
requests = bidi_call.read_next_loop(proc { set_input_stream_done }, false)
|
545
|
+
interception_ctx.intercept!(
|
546
|
+
:bidi_streamer,
|
547
|
+
call: view,
|
548
|
+
method: mth,
|
549
|
+
requests: requests
|
550
|
+
) do
|
551
|
+
bidi_call.run_on_server(mth, requests)
|
552
|
+
end
|
553
|
+
end
|
554
|
+
|
555
|
+
# Waits till an operation completes
|
556
|
+
def wait
|
557
|
+
return if @op_notifier.nil?
|
558
|
+
GRPC.logger.debug("active_call.wait: on #{@op_notifier}")
|
559
|
+
@op_notifier.wait
|
560
|
+
end
|
561
|
+
|
562
|
+
# Signals that an operation is done.
|
563
|
+
# Only relevant on the client-side (this is a no-op on the server-side)
|
564
|
+
def op_is_done
|
565
|
+
return if @op_notifier.nil?
|
566
|
+
@op_notifier.notify(self)
|
567
|
+
end
|
568
|
+
|
569
|
+
# Add to the metadata that will be sent from the server.
|
570
|
+
# Fails if metadata has already been sent.
|
571
|
+
# Unused by client calls.
|
572
|
+
def merge_metadata_to_send(new_metadata = {})
|
573
|
+
@send_initial_md_mutex.synchronize do
|
574
|
+
fail('cant change metadata after already sent') if @metadata_sent
|
575
|
+
@metadata_to_send.merge!(new_metadata)
|
576
|
+
end
|
577
|
+
end
|
578
|
+
|
579
|
+
def attach_peer_cert(peer_cert)
|
580
|
+
@peer_cert = peer_cert
|
581
|
+
end
|
582
|
+
|
583
|
+
private
|
584
|
+
|
585
|
+
# To be called once the "input stream" has been completelly
|
586
|
+
# read through (i.e, done reading from client or received status)
|
587
|
+
# note this is idempotent
|
588
|
+
def set_input_stream_done
|
589
|
+
@call_finished_mu.synchronize do
|
590
|
+
@input_stream_done = true
|
591
|
+
maybe_finish_and_close_call_locked
|
592
|
+
end
|
593
|
+
end
|
594
|
+
|
595
|
+
# To be called once the "output stream" has been completelly
|
596
|
+
# sent through (i.e, done sending from client or sent status)
|
597
|
+
# note this is idempotent
|
598
|
+
def set_output_stream_done
|
599
|
+
@call_finished_mu.synchronize do
|
600
|
+
@output_stream_done = true
|
601
|
+
maybe_finish_and_close_call_locked
|
602
|
+
end
|
603
|
+
end
|
604
|
+
|
605
|
+
def maybe_finish_and_close_call_locked
|
606
|
+
return unless @output_stream_done && @input_stream_done
|
607
|
+
return if @call_finished
|
608
|
+
@call_finished = true
|
609
|
+
op_is_done
|
610
|
+
@call.close
|
611
|
+
end
|
612
|
+
|
613
|
+
# Starts the call if not already started
|
614
|
+
# @param metadata [Hash] metadata to be sent to the server. If a value is
|
615
|
+
# a list, multiple metadata for its key are sent
|
616
|
+
def start_call(metadata = {})
|
617
|
+
merge_metadata_to_send(metadata) && send_initial_metadata
|
618
|
+
end
|
619
|
+
|
620
|
+
def raise_error_if_already_executed
|
621
|
+
@client_call_executed_mu.synchronize do
|
622
|
+
if @client_call_executed
|
623
|
+
fail GRPC::Core::CallError, 'attempting to re-run a call'
|
624
|
+
end
|
625
|
+
@client_call_executed = true
|
626
|
+
end
|
627
|
+
end
|
628
|
+
|
629
|
+
def self.view_class(*visible_methods)
|
630
|
+
Class.new do
|
631
|
+
extend ::Forwardable
|
632
|
+
def_delegators :@wrapped, *visible_methods
|
633
|
+
|
634
|
+
# @param wrapped [ActiveCall] the call whose methods are shielded
|
635
|
+
def initialize(wrapped)
|
636
|
+
@wrapped = wrapped
|
637
|
+
end
|
638
|
+
end
|
639
|
+
end
|
640
|
+
|
641
|
+
# SingleReqView limits access to an ActiveCall's methods for use in server
|
642
|
+
# handlers that receive just one request.
|
643
|
+
SingleReqView = view_class(:cancelled?, :deadline, :metadata,
|
644
|
+
:output_metadata, :peer, :peer_cert,
|
645
|
+
:send_initial_metadata,
|
646
|
+
:metadata_to_send,
|
647
|
+
:merge_metadata_to_send,
|
648
|
+
:metadata_sent)
|
649
|
+
|
650
|
+
# MultiReqView limits access to an ActiveCall's methods for use in
|
651
|
+
# server client_streamer handlers.
|
652
|
+
MultiReqView = view_class(:cancelled?, :deadline,
|
653
|
+
:each_remote_read, :metadata, :output_metadata,
|
654
|
+
:peer, :peer_cert,
|
655
|
+
:send_initial_metadata,
|
656
|
+
:metadata_to_send,
|
657
|
+
:merge_metadata_to_send,
|
658
|
+
:metadata_sent)
|
659
|
+
|
660
|
+
# Operation limits access to an ActiveCall's methods for use as
|
661
|
+
# a Operation on the client.
|
662
|
+
Operation = view_class(:cancel, :cancelled?, :deadline, :execute,
|
663
|
+
:metadata, :status, :start_call, :wait, :write_flag,
|
664
|
+
:write_flag=, :trailing_metadata)
|
665
|
+
|
666
|
+
# InterceptableView further limits access to an ActiveCall's methods
|
667
|
+
# for use in interceptors on the client, exposing only the deadline
|
668
|
+
InterceptableView = view_class(:deadline)
|
669
|
+
end
|
670
|
+
end
|