grpc 0.15.0-x86-linux → 1.0.0.pre1-x86-linux

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of grpc might be problematic. Click here for more details.

Files changed (40) hide show
  1. checksums.yaml +4 -4
  2. data/etc/roots.pem +784 -509
  3. data/grpc_c.32.ruby +0 -0
  4. data/grpc_c.64.ruby +0 -0
  5. data/src/ruby/ext/grpc/rb_byte_buffer.c +4 -1
  6. data/src/ruby/ext/grpc/rb_call.c +87 -54
  7. data/src/ruby/ext/grpc/rb_call.h +1 -1
  8. data/src/ruby/ext/grpc/rb_call_credentials.c +1 -30
  9. data/src/ruby/ext/grpc/rb_channel.c +25 -50
  10. data/src/ruby/ext/grpc/rb_channel_credentials.c +1 -31
  11. data/src/ruby/ext/grpc/rb_completion_queue.c +15 -134
  12. data/src/ruby/ext/grpc/rb_completion_queue.h +3 -7
  13. data/src/ruby/ext/grpc/rb_grpc.c +2 -4
  14. data/src/ruby/ext/grpc/rb_grpc_imports.generated.c +2 -0
  15. data/src/ruby/ext/grpc/rb_grpc_imports.generated.h +4 -1
  16. data/src/ruby/ext/grpc/rb_server.c +81 -133
  17. data/src/ruby/ext/grpc/rb_server_credentials.c +4 -33
  18. data/src/ruby/lib/grpc/2.0/grpc_c.so +0 -0
  19. data/src/ruby/lib/grpc/2.1/grpc_c.so +0 -0
  20. data/src/ruby/lib/grpc/2.2/grpc_c.so +0 -0
  21. data/src/ruby/lib/grpc/2.3/grpc_c.so +0 -0
  22. data/src/ruby/lib/grpc/generic/active_call.rb +40 -55
  23. data/src/ruby/lib/grpc/generic/bidi_call.rb +21 -23
  24. data/src/ruby/lib/grpc/generic/client_stub.rb +20 -15
  25. data/src/ruby/lib/grpc/generic/rpc_server.rb +15 -37
  26. data/src/ruby/lib/grpc/generic/service.rb +1 -1
  27. data/src/ruby/lib/grpc/grpc_c.so +0 -0
  28. data/src/ruby/lib/grpc/version.rb +1 -1
  29. data/src/ruby/pb/test/client.rb +25 -7
  30. data/src/ruby/pb/test/server.rb +7 -5
  31. data/src/ruby/spec/call_spec.rb +1 -2
  32. data/src/ruby/spec/channel_spec.rb +2 -3
  33. data/src/ruby/spec/client_server_spec.rb +74 -59
  34. data/src/ruby/spec/generic/active_call_spec.rb +66 -86
  35. data/src/ruby/spec/generic/client_stub_spec.rb +27 -48
  36. data/src/ruby/spec/generic/rpc_server_spec.rb +4 -34
  37. data/src/ruby/spec/pb/health/checker_spec.rb +0 -2
  38. data/src/ruby/spec/server_spec.rb +20 -24
  39. metadata +4 -6
  40. data/src/ruby/spec/completion_queue_spec.rb +0 -42
@@ -38,6 +38,7 @@
38
38
 
39
39
  #include <grpc/grpc.h>
40
40
  #include <grpc/grpc_security.h>
41
+ #include <grpc/support/log.h>
41
42
 
42
43
  #include "rb_grpc.h"
43
44
 
@@ -46,8 +47,8 @@
46
47
  static VALUE grpc_rb_cServerCredentials = Qnil;
47
48
 
48
49
  /* grpc_rb_server_credentials wraps a grpc_server_credentials. It provides a
49
- peer ruby object, 'mark' to minimize copying when a server credential is
50
- created from ruby. */
50
+ peer ruby object, 'mark' to hold references to objects involved in
51
+ constructing the server credentials. */
51
52
  typedef struct grpc_rb_server_credentials {
52
53
  /* Holder of ruby objects involved in constructing the server credentials */
53
54
  VALUE mark;
@@ -111,36 +112,6 @@ static VALUE grpc_rb_server_credentials_alloc(VALUE cls) {
111
112
  wrapper);
112
113
  }
113
114
 
114
- /* Clones ServerCredentials instances.
115
-
116
- Gives ServerCredentials a consistent implementation of Ruby's object copy/dup
117
- protocol. */
118
- static VALUE grpc_rb_server_credentials_init_copy(VALUE copy, VALUE orig) {
119
- grpc_rb_server_credentials *orig_ch = NULL;
120
- grpc_rb_server_credentials *copy_ch = NULL;
121
-
122
- if (copy == orig) {
123
- return copy;
124
- }
125
-
126
- /* Raise an error if orig is not a server_credentials object or a subclass. */
127
- if (TYPE(orig) != T_DATA ||
128
- RDATA(orig)->dfree != (RUBY_DATA_FUNC)grpc_rb_server_credentials_free) {
129
- rb_raise(rb_eTypeError, "not a %s",
130
- rb_obj_classname(grpc_rb_cServerCredentials));
131
- }
132
-
133
- TypedData_Get_Struct(orig, grpc_rb_server_credentials,
134
- &grpc_rb_server_credentials_data_type, orig_ch);
135
- TypedData_Get_Struct(copy, grpc_rb_server_credentials,
136
- &grpc_rb_server_credentials_data_type, copy_ch);
137
-
138
- /* use ruby's MEMCPY to make a byte-for-byte copy of the server_credentials
139
- wrapper object. */
140
- MEMCPY(copy_ch, orig_ch, grpc_rb_server_credentials, 1);
141
- return copy;
142
- }
143
-
144
115
  /* The attribute used on the mark object to preserve the pem_root_certs. */
145
116
  static ID id_pem_root_certs;
146
117
 
@@ -270,7 +241,7 @@ void Init_grpc_server_credentials() {
270
241
  rb_define_method(grpc_rb_cServerCredentials, "initialize",
271
242
  grpc_rb_server_credentials_init, 3);
272
243
  rb_define_method(grpc_rb_cServerCredentials, "initialize_copy",
273
- grpc_rb_server_credentials_init_copy, 1);
244
+ grpc_rb_cannot_init_copy, 1);
274
245
 
275
246
  id_pem_key_certs = rb_intern("__pem_key_certs");
276
247
  id_pem_root_certs = rb_intern("__pem_root_certs");
@@ -43,8 +43,7 @@ class Struct
43
43
  GRPC.logger.debug("Failing with status #{status}")
44
44
  # raise BadStatus, propagating the metadata if present.
45
45
  md = status.metadata
46
- with_sym_keys = Hash[md.each_pair.collect { |x, y| [x.to_sym, y] }]
47
- fail GRPC::BadStatus.new(status.code, status.details, with_sym_keys)
46
+ fail GRPC::BadStatus.new(status.code, status.details, md)
48
47
  end
49
48
  status
50
49
  end
@@ -61,7 +60,7 @@ module GRPC
61
60
  extend Forwardable
62
61
  attr_reader(:deadline)
63
62
  def_delegators :@call, :cancel, :metadata, :write_flag, :write_flag=,
64
- :peer, :peer_cert
63
+ :peer, :peer_cert, :trailing_metadata
65
64
 
66
65
  # client_invoke begins a client invocation.
67
66
  #
@@ -75,17 +74,10 @@ module GRPC
75
74
  # if a keyword value is a list, multiple metadata for it's key are sent
76
75
  #
77
76
  # @param call [Call] a call on which to start and invocation
78
- # @param q [CompletionQueue] the completion queue
79
77
  # @param metadata [Hash] the metadata
80
- def self.client_invoke(call, q, metadata = {})
78
+ def self.client_invoke(call, metadata = {})
81
79
  fail(TypeError, '!Core::Call') unless call.is_a? Core::Call
82
- unless q.is_a? Core::CompletionQueue
83
- fail(TypeError, '!Core::CompletionQueue')
84
- end
85
- metadata_tag = Object.new
86
- call.run_batch(q, metadata_tag, INFINITE_FUTURE,
87
- SEND_INITIAL_METADATA => metadata)
88
- metadata_tag
80
+ call.run_batch(SEND_INITIAL_METADATA => metadata)
89
81
  end
90
82
 
91
83
  # Creates an ActiveCall.
@@ -102,26 +94,21 @@ module GRPC
102
94
  # deadline is the absolute deadline for the call.
103
95
  #
104
96
  # @param call [Call] the call used by the ActiveCall
105
- # @param q [CompletionQueue] the completion queue used to accept
106
- # the call. This queue will be closed on call completion.
107
97
  # @param marshal [Function] f(obj)->string that marshal requests
108
98
  # @param unmarshal [Function] f(string)->obj that unmarshals responses
109
99
  # @param deadline [Fixnum] the deadline for the call to complete
110
- # @param metadata_tag [Object] the object use obtain metadata for clients
111
- # @param started [true|false] indicates if the call has begun
112
- def initialize(call, q, marshal, unmarshal, deadline, started: true,
113
- metadata_tag: nil)
100
+ # @param started [true|false] indicates that metadata was sent
101
+ # @param metadata_received [true|false] indicates if metadata has already
102
+ # been received. Should always be true for server calls
103
+ def initialize(call, marshal, unmarshal, deadline, started: true,
104
+ metadata_received: false)
114
105
  fail(TypeError, '!Core::Call') unless call.is_a? Core::Call
115
- unless q.is_a? Core::CompletionQueue
116
- fail(TypeError, '!Core::CompletionQueue')
117
- end
118
106
  @call = call
119
- @cq = q
120
107
  @deadline = deadline
121
108
  @marshal = marshal
122
- @started = started
123
109
  @unmarshal = unmarshal
124
- @metadata_tag = metadata_tag
110
+ @metadata_received = metadata_received
111
+ @metadata_sent = started
125
112
  @op_notifier = nil
126
113
  end
127
114
 
@@ -132,7 +119,7 @@ module GRPC
132
119
  end
133
120
 
134
121
  # cancelled indicates if the call was cancelled
135
- def cancelled
122
+ def cancelled?
136
123
  !@call.status.nil? && @call.status.code == Core::StatusCodes::CANCELLED
137
124
  end
138
125
 
@@ -168,8 +155,11 @@ module GRPC
168
155
  SEND_CLOSE_FROM_CLIENT => nil
169
156
  }
170
157
  ops[RECV_STATUS_ON_CLIENT] = nil if assert_finished
171
- batch_result = @call.run_batch(@cq, self, INFINITE_FUTURE, ops)
158
+ batch_result = @call.run_batch(ops)
172
159
  return unless assert_finished
160
+ unless batch_result.status.nil?
161
+ @call.trailing_metadata = batch_result.status.metadata
162
+ end
173
163
  @call.status = batch_result.status
174
164
  op_is_done
175
165
  batch_result.check_status
@@ -179,20 +169,14 @@ module GRPC
179
169
  #
180
170
  # It blocks until the remote endpoint acknowledges by sending a status.
181
171
  def finished
182
- batch_result = @call.run_batch(@cq, self, INFINITE_FUTURE,
183
- RECV_STATUS_ON_CLIENT => nil)
172
+ batch_result = @call.run_batch(RECV_STATUS_ON_CLIENT => nil)
184
173
  unless batch_result.status.nil?
185
- if @call.metadata.nil?
186
- @call.metadata = batch_result.status.metadata
187
- else
188
- @call.metadata.merge!(batch_result.status.metadata)
189
- end
174
+ @call.trailing_metadata = batch_result.status.metadata
190
175
  end
191
176
  @call.status = batch_result.status
192
177
  op_is_done
193
178
  batch_result.check_status
194
179
  @call.close
195
- @cq.close
196
180
  end
197
181
 
198
182
  # remote_send sends a request to the remote endpoint.
@@ -203,9 +187,10 @@ module GRPC
203
187
  # @param marshalled [false, true] indicates if the object is already
204
188
  # marshalled.
205
189
  def remote_send(req, marshalled = false)
190
+ # TODO(murgatroid99): ensure metadata was sent
206
191
  GRPC.logger.debug("sending #{req}, marshalled? #{marshalled}")
207
192
  payload = marshalled ? req : @marshal.call(req)
208
- @call.run_batch(@cq, self, INFINITE_FUTURE, SEND_MESSAGE => payload)
193
+ @call.run_batch(SEND_MESSAGE => payload)
209
194
  end
210
195
 
211
196
  # send_status sends a status to the remote endpoint.
@@ -222,7 +207,7 @@ module GRPC
222
207
  SEND_STATUS_FROM_SERVER => Struct::Status.new(code, details, metadata)
223
208
  }
224
209
  ops[RECV_CLOSE_ON_SERVER] = nil if assert_finished
225
- @call.run_batch(@cq, self, INFINITE_FUTURE, ops)
210
+ @call.run_batch(ops)
226
211
  nil
227
212
  end
228
213
 
@@ -234,11 +219,11 @@ module GRPC
234
219
  # raising BadStatus
235
220
  def remote_read
236
221
  ops = { RECV_MESSAGE => nil }
237
- ops[RECV_INITIAL_METADATA] = nil unless @metadata_tag.nil?
238
- batch_result = @call.run_batch(@cq, self, INFINITE_FUTURE, ops)
239
- unless @metadata_tag.nil?
222
+ ops[RECV_INITIAL_METADATA] = nil unless @metadata_received
223
+ batch_result = @call.run_batch(ops)
224
+ unless @metadata_received
240
225
  @call.metadata = batch_result.metadata
241
- @metadata_tag = nil
226
+ @metadata_received = true
242
227
  end
243
228
  GRPC.logger.debug("received req: #{batch_result}")
244
229
  unless batch_result.nil? || batch_result.message.nil?
@@ -318,7 +303,7 @@ module GRPC
318
303
  # a list, multiple metadata for its key are sent
319
304
  # @return [Object] the response received from the server
320
305
  def request_response(req, metadata: {})
321
- start_call(metadata) unless @started
306
+ start_call(metadata)
322
307
  remote_send(req)
323
308
  writes_done(false)
324
309
  response = remote_read
@@ -342,7 +327,7 @@ module GRPC
342
327
  # a list, multiple metadata for its key are sent
343
328
  # @return [Object] the response received from the server
344
329
  def client_streamer(requests, metadata: {})
345
- start_call(metadata) unless @started
330
+ start_call(metadata)
346
331
  requests.each { |r| remote_send(r) }
347
332
  writes_done(false)
348
333
  response = remote_read
@@ -368,7 +353,7 @@ module GRPC
368
353
  # a list, multiple metadata for its key are sent
369
354
  # @return [Enumerator|nil] a response Enumerator
370
355
  def server_streamer(req, metadata: {})
371
- start_call(metadata) unless @started
356
+ start_call(metadata)
372
357
  remote_send(req)
373
358
  writes_done(false)
374
359
  replies = enum_for(:each_remote_read_then_finish)
@@ -407,10 +392,9 @@ module GRPC
407
392
  # a list, multiple metadata for its key are sent
408
393
  # @return [Enumerator, nil] a response Enumerator
409
394
  def bidi_streamer(requests, metadata: {}, &blk)
410
- start_call(metadata) unless @started
411
- bd = BidiCall.new(@call, @cq, @marshal, @unmarshal,
412
- metadata_tag: @metadata_tag)
413
- @metadata_tag = nil # run_on_client ensures metadata is read
395
+ start_call(metadata)
396
+ bd = BidiCall.new(@call, @marshal, @unmarshal,
397
+ metadata_received: @metadata_received)
414
398
  bd.run_on_client(requests, @op_notifier, &blk)
415
399
  end
416
400
 
@@ -426,7 +410,8 @@ module GRPC
426
410
  #
427
411
  # @param gen_each_reply [Proc] generates the BiDi stream replies
428
412
  def run_server_bidi(gen_each_reply)
429
- bd = BidiCall.new(@call, @cq, @marshal, @unmarshal)
413
+ bd = BidiCall.new(@call, @marshal, @unmarshal,
414
+ metadata_received: @metadata_received)
430
415
  bd.run_on_server(gen_each_reply)
431
416
  end
432
417
 
@@ -449,9 +434,9 @@ module GRPC
449
434
  # @param metadata [Hash] metadata to be sent to the server. If a value is
450
435
  # a list, multiple metadata for its key are sent
451
436
  def start_call(metadata = {})
452
- return if @started
453
- @metadata_tag = ActiveCall.client_invoke(@call, @cq, metadata)
454
- @started = true
437
+ return if @metadata_sent
438
+ @metadata_tag = ActiveCall.client_invoke(@call, metadata)
439
+ @metadata_sent = true
455
440
  end
456
441
 
457
442
  def self.view_class(*visible_methods)
@@ -468,18 +453,18 @@ module GRPC
468
453
 
469
454
  # SingleReqView limits access to an ActiveCall's methods for use in server
470
455
  # handlers that receive just one request.
471
- SingleReqView = view_class(:cancelled, :deadline, :metadata,
456
+ SingleReqView = view_class(:cancelled?, :deadline, :metadata,
472
457
  :output_metadata, :peer, :peer_cert)
473
458
 
474
459
  # MultiReqView limits access to an ActiveCall's methods for use in
475
460
  # server client_streamer handlers.
476
- MultiReqView = view_class(:cancelled, :deadline, :each_queued_msg,
461
+ MultiReqView = view_class(:cancelled?, :deadline, :each_queued_msg,
477
462
  :each_remote_read, :metadata, :output_metadata)
478
463
 
479
464
  # Operation limits access to an ActiveCall's methods for use as
480
465
  # a Operation on the client.
481
- Operation = view_class(:cancel, :cancelled, :deadline, :execute,
466
+ Operation = view_class(:cancel, :cancelled?, :deadline, :execute,
482
467
  :metadata, :status, :start_call, :wait, :write_flag,
483
- :write_flag=)
468
+ :write_flag=, :trailing_metadata)
484
469
  end
485
470
  end
@@ -52,23 +52,18 @@ module GRPC
52
52
  # deadline is the absolute deadline for the call.
53
53
  #
54
54
  # @param call [Call] the call used by the ActiveCall
55
- # @param q [CompletionQueue] the completion queue used to accept
56
- # the call
57
55
  # @param marshal [Function] f(obj)->string that marshal requests
58
56
  # @param unmarshal [Function] f(string)->obj that unmarshals responses
59
- # @param metadata_tag [Object] tag object used to collect metadata
60
- def initialize(call, q, marshal, unmarshal, metadata_tag: nil)
57
+ # @param metadata_received [true|false] indicates if metadata has already
58
+ # been received. Should always be true for server calls
59
+ def initialize(call, marshal, unmarshal, metadata_received: false)
61
60
  fail(ArgumentError, 'not a call') unless call.is_a? Core::Call
62
- unless q.is_a? Core::CompletionQueue
63
- fail(ArgumentError, 'not a CompletionQueue')
64
- end
65
61
  @call = call
66
- @cq = q
67
62
  @marshal = marshal
68
63
  @op_notifier = nil # signals completion on clients
69
64
  @readq = Queue.new
70
65
  @unmarshal = unmarshal
71
- @metadata_tag = metadata_tag
66
+ @metadata_received = metadata_received
72
67
  @reads_complete = false
73
68
  @writes_complete = false
74
69
  @complete = false
@@ -81,7 +76,7 @@ module GRPC
81
76
  # block that can be invoked with each response.
82
77
  #
83
78
  # @param requests the Enumerable of requests to send
84
- # @op_notifier a Notifier used to signal completion
79
+ # @param op_notifier a Notifier used to signal completion
85
80
  # @return an Enumerator of requests to yield
86
81
  def run_on_client(requests, op_notifier, &blk)
87
82
  @op_notifier = op_notifier
@@ -124,7 +119,6 @@ module GRPC
124
119
  @done_mutex.synchronize do
125
120
  return unless @reads_complete && @writes_complete && !@complete
126
121
  @call.close
127
- @cq.close
128
122
  @complete = true
129
123
  end
130
124
  end
@@ -132,11 +126,11 @@ module GRPC
132
126
  # performs a read using @call.run_batch, ensures metadata is set up
133
127
  def read_using_run_batch
134
128
  ops = { RECV_MESSAGE => nil }
135
- ops[RECV_INITIAL_METADATA] = nil unless @metadata_tag.nil?
136
- batch_result = @call.run_batch(@cq, self, INFINITE_FUTURE, ops)
137
- unless @metadata_tag.nil?
129
+ ops[RECV_INITIAL_METADATA] = nil unless @metadata_received
130
+ batch_result = @call.run_batch(ops)
131
+ unless @metadata_received
138
132
  @call.metadata = batch_result.metadata
139
- @metadata_tag = nil
133
+ @metadata_received = true
140
134
  end
141
135
  batch_result
142
136
  end
@@ -161,20 +155,26 @@ module GRPC
161
155
 
162
156
  def write_loop(requests, is_client: true)
163
157
  GRPC.logger.debug('bidi-write-loop: starting')
164
- write_tag = Object.new
165
158
  count = 0
166
159
  requests.each do |req|
167
160
  GRPC.logger.debug("bidi-write-loop: #{count}")
168
161
  count += 1
169
162
  payload = @marshal.call(req)
170
- @call.run_batch(@cq, write_tag, INFINITE_FUTURE,
171
- SEND_MESSAGE => payload)
163
+ # Fails if status already received
164
+ begin
165
+ @call.run_batch(SEND_MESSAGE => payload)
166
+ rescue GRPC::Core::CallError => e
167
+ # This is almost definitely caused by a status arriving while still
168
+ # writing. Don't re-throw the error
169
+ GRPC.logger.warn('bidi-write-loop: ended with error')
170
+ GRPC.logger.warn(e)
171
+ break
172
+ end
172
173
  end
173
174
  GRPC.logger.debug("bidi-write-loop: #{count} writes done")
174
175
  if is_client
175
176
  GRPC.logger.debug("bidi-write-loop: client sent #{count}, waiting")
176
- @call.run_batch(@cq, write_tag, INFINITE_FUTURE,
177
- SEND_CLOSE_FROM_CLIENT => nil)
177
+ @call.run_batch(SEND_CLOSE_FROM_CLIENT => nil)
178
178
  GRPC.logger.debug('bidi-write-loop: done')
179
179
  notify_done
180
180
  @writes_complete = true
@@ -195,7 +195,6 @@ module GRPC
195
195
  Thread.new do
196
196
  GRPC.logger.debug('bidi-read-loop: starting')
197
197
  begin
198
- read_tag = Object.new
199
198
  count = 0
200
199
  # queue the initial read before beginning the loop
201
200
  loop do
@@ -208,8 +207,7 @@ module GRPC
208
207
  GRPC.logger.debug("bidi-read-loop: null batch #{batch_result}")
209
208
 
210
209
  if is_client
211
- batch_result = @call.run_batch(@cq, read_tag, INFINITE_FUTURE,
212
- RECV_STATUS_ON_CLIENT => nil)
210
+ batch_result = @call.run_batch(RECV_STATUS_ON_CLIENT => nil)
213
211
  @call.status = batch_result.status
214
212
  batch_result.check_status
215
213
  GRPC.logger.debug("bidi-read-loop: done status #{@call.status}")
@@ -34,7 +34,8 @@ require_relative '../version'
34
34
  module GRPC
35
35
  # rubocop:disable Metrics/ParameterLists
36
36
 
37
- # ClientStub represents an endpoint used to send requests to GRPC servers.
37
+ # ClientStub represents a client connection to a gRPC server, and can be used
38
+ # to send requests.
38
39
  class ClientStub
39
40
  include Core::StatusCodes
40
41
  include Core::TimeConsts
@@ -75,8 +76,9 @@ module GRPC
75
76
  # my_stub = ClientStub.new(example.host.com:50505,
76
77
  # :this_channel_is_insecure)
77
78
  #
78
- # Any arbitrary keyword arguments are treated as channel arguments used to
79
- # configure the RPC connection to the host.
79
+ # If a channel_override argument is passed, it will be used as the
80
+ # underlying channel. Otherwise, the channel_args argument will be used
81
+ # to construct a new underlying channel.
80
82
  #
81
83
  # There are some specific keyword args that are not used to configure the
82
84
  # channel:
@@ -90,19 +92,23 @@ module GRPC
90
92
  # when present, this is the default timeout used for calls
91
93
  #
92
94
  # @param host [String] the host the stub connects to
93
- # @param q [Core::CompletionQueue] used to wait for events - now deprecated
94
- # since each new active call gets its own separately
95
95
  # @param creds [Core::ChannelCredentials|Symbol] the channel credentials, or
96
- # :this_channel_is_insecure
96
+ # :this_channel_is_insecure, which explicitly indicates that the client
97
+ # should be created with an insecure connection. Note: this argument is
98
+ # ignored if the channel_override argument is provided.
97
99
  # @param channel_override [Core::Channel] a pre-created channel
98
100
  # @param timeout [Number] the default timeout to use in requests
99
- # @param channel_args [Hash] the channel arguments
100
- def initialize(host, q, creds,
101
+ # @param propagate_mask [Number] A bitwise combination of flags in
102
+ # GRPC::Core::PropagateMasks. Indicates how data should be propagated
103
+ # from parent server calls to child client calls if this client is being
104
+ # used within a gRPC server.
105
+ # @param channel_args [Hash] the channel arguments. Note: this argument is
106
+ # ignored if the channel_override argument is provided.
107
+ def initialize(host, creds,
101
108
  channel_override: nil,
102
109
  timeout: nil,
103
110
  propagate_mask: nil,
104
111
  channel_args: {})
105
- fail(TypeError, '!CompletionQueue') unless q.is_a?(Core::CompletionQueue)
106
112
  @ch = ClientStub.setup_channel(channel_override, host, creds,
107
113
  channel_args)
108
114
  alt_host = channel_args[Core::Channel::SSL_TARGET]
@@ -392,11 +398,11 @@ module GRPC
392
398
  # @param marshal [Function] f(obj)->string that marshals requests
393
399
  # @param unmarshal [Function] f(string)->obj that unmarshals responses
394
400
  # @param deadline [Time] (optional) the time the request should complete
401
+ # @param return_op [true|false] return an Operation if true
395
402
  # @param parent [Core::Call] a prior call whose reserved metadata
396
403
  # will be propagated by this one.
397
404
  # @param credentials [Core::CallCredentials] credentials to use when making
398
405
  # the call
399
- # @param return_op [true|false] return an Operation if true
400
406
  # @param metadata [Hash] metadata to be sent to the server
401
407
  # @param blk [Block] when provided, is executed for each response
402
408
  # @return [Enumerator|nil|Operation] as discussed above
@@ -433,7 +439,8 @@ module GRPC
433
439
  # @param unmarshal [Function] f(string)->obj that unmarshals responses
434
440
  # @param parent [Grpc::Call] a parent call, available when calls are
435
441
  # made from server
436
- # @param timeout [TimeConst]
442
+ # @param credentials [Core::CallCredentials] credentials to use when making
443
+ # the call
437
444
  def new_active_call(method, marshal, unmarshal,
438
445
  deadline: nil,
439
446
  parent: nil,
@@ -441,15 +448,13 @@ module GRPC
441
448
 
442
449
  deadline = from_relative_time(@timeout) if deadline.nil?
443
450
  # Provide each new client call with its own completion queue
444
- call_queue = Core::CompletionQueue.new
445
- call = @ch.create_call(call_queue,
446
- parent, # parent call
451
+ call = @ch.create_call(parent, # parent call
447
452
  @propagate_mask, # propagation options
448
453
  method,
449
454
  nil, # host use nil,
450
455
  deadline)
451
456
  call.set_credentials! credentials unless credentials.nil?
452
- ActiveCall.new(call, call_queue, marshal, unmarshal, deadline,
457
+ ActiveCall.new(call, marshal, unmarshal, deadline,
453
458
  started: false)
454
459
  end
455
460
  end