google-cloud-pubsub 0.26.0 → 0.27.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/google/cloud/pubsub.rb +67 -31
- data/lib/google/cloud/pubsub/async_publisher.rb +328 -0
- data/lib/google/cloud/pubsub/batch_publisher.rb +98 -0
- data/lib/google/cloud/pubsub/convert.rb +63 -0
- data/lib/google/cloud/pubsub/message.rb +13 -4
- data/lib/google/cloud/pubsub/project.rb +64 -184
- data/lib/google/cloud/pubsub/publish_result.rb +96 -0
- data/lib/google/cloud/pubsub/received_message.rb +36 -2
- data/lib/google/cloud/pubsub/service.rb +25 -28
- data/lib/google/cloud/pubsub/subscriber.rb +185 -0
- data/lib/google/cloud/pubsub/subscriber/async_pusher.rb +220 -0
- data/lib/google/cloud/pubsub/subscriber/enumerator_queue.rb +52 -0
- data/lib/google/cloud/pubsub/subscriber/stream.rb +376 -0
- data/lib/google/cloud/pubsub/subscription.rb +102 -72
- data/lib/google/cloud/pubsub/topic.rb +125 -32
- data/lib/google/cloud/pubsub/v1/publisher_client_config.json +1 -1
- data/lib/google/cloud/pubsub/v1/subscriber_client_config.json +10 -1
- data/lib/google/cloud/pubsub/version.rb +1 -1
- metadata +38 -3
- data/lib/google/cloud/pubsub/topic/publisher.rb +0 -86
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: abd5adf64822c8eefa9b20faf514b0af57414e64
|
4
|
+
data.tar.gz: a93679512d85b7c038980ce0144dad4c28dc87c6
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 0af752e4b5d18bc90dfab1fb702af73a092c128c800e53754bba04c16d6f2a6c12492b9f4e528120c97ba087e4b24fa38446534acd31334b503cc177232367d7
|
7
|
+
data.tar.gz: dae4bcaac46a4b3c2a85ef0f9e0292fec333ed8694eabf25d68cdbe9e4eda63744bf14b2b3dfc3c286d37bd25826815afa4c28d0e4c1b362299b306192b9c217
|
data/lib/google/cloud/pubsub.rb
CHANGED
@@ -147,7 +147,30 @@ module Google
|
|
147
147
|
# this: :that
|
148
148
|
# ```
|
149
149
|
#
|
150
|
-
#
|
150
|
+
# Messages can also be published in batches asynchronously using
|
151
|
+
# `publish_async`. (See {Google::Cloud::Pubsub::Topic#publish_async} and
|
152
|
+
# {Google::Cloud::Pubsub::AsyncPublisher})
|
153
|
+
#
|
154
|
+
# ```ruby
|
155
|
+
# require "google/cloud/pubsub"
|
156
|
+
#
|
157
|
+
# pubsub = Google::Cloud::Pubsub.new
|
158
|
+
#
|
159
|
+
# topic = pubsub.topic "my-topic"
|
160
|
+
# topic.publish_async "task completed" do |result|
|
161
|
+
# if result.succeeded?
|
162
|
+
# log_publish_success result.data
|
163
|
+
# else
|
164
|
+
# log_publish_failure result.data, result.error
|
165
|
+
# end
|
166
|
+
# end
|
167
|
+
#
|
168
|
+
# topic.async_publisher.stop.wait!
|
169
|
+
# ```
|
170
|
+
#
|
171
|
+
# Or multiple messages can be published in batches at the same time by
|
172
|
+
# passing a block to `publish`. (See
|
173
|
+
# {Google::Cloud::Pubsub::BatchPublisher})
|
151
174
|
#
|
152
175
|
# ```ruby
|
153
176
|
# require "google/cloud/pubsub"
|
@@ -199,6 +222,29 @@ module Google
|
|
199
222
|
# msgs = sub.wait_for_messages
|
200
223
|
# ```
|
201
224
|
#
|
225
|
+
# Messages can also be streamed from a subscription with a subscriber object
|
226
|
+
# that can be created using `listen`. (See
|
227
|
+
# {Google::Cloud::Pubsub::Subscription#listen} and
|
228
|
+
# {Google::Cloud::Pubsub::Subscriber})
|
229
|
+
#
|
230
|
+
# ```ruby
|
231
|
+
# require "google/cloud/pubsub"
|
232
|
+
#
|
233
|
+
# pubsub = Google::Cloud::Pubsub.new
|
234
|
+
#
|
235
|
+
# sub = pubsub.subscription "my-topic-sub"
|
236
|
+
#
|
237
|
+
# subscriber = sub.listen do |msg|
|
238
|
+
# # process msg
|
239
|
+
# msg.ack!
|
240
|
+
# end
|
241
|
+
#
|
242
|
+
# subscriber.start
|
243
|
+
#
|
244
|
+
# # Shut down the subscriber when ready to stop receiving messages.
|
245
|
+
# subscriber.stop.wait!
|
246
|
+
# ```
|
247
|
+
#
|
202
248
|
# ## Acknowledging a Message
|
203
249
|
#
|
204
250
|
# Messages that are received can be acknowledged in Pub/Sub, marking the
|
@@ -311,9 +357,10 @@ module Google
|
|
311
357
|
#
|
312
358
|
# ## Listening for Messages
|
313
359
|
#
|
314
|
-
#
|
315
|
-
#
|
316
|
-
# {Google::Cloud::Pubsub::Subscription#listen}
|
360
|
+
# A subscriber object can be created using `listen`, which streams messages
|
361
|
+
# from the backend and processes them as they are received. (See
|
362
|
+
# {Google::Cloud::Pubsub::Subscription#listen} and
|
363
|
+
# {Google::Cloud::Pubsub::Subscriber})
|
317
364
|
#
|
318
365
|
# ```ruby
|
319
366
|
# require "google/cloud/pubsub"
|
@@ -321,28 +368,22 @@ module Google
|
|
321
368
|
# pubsub = Google::Cloud::Pubsub.new
|
322
369
|
#
|
323
370
|
# sub = pubsub.subscription "my-topic-sub"
|
324
|
-
#
|
371
|
+
#
|
372
|
+
# subscriber = sub.listen do |msg|
|
325
373
|
# # process msg
|
374
|
+
# msg.ack!
|
326
375
|
# end
|
327
|
-
# ```
|
328
376
|
#
|
329
|
-
#
|
330
|
-
# pulled per batch can be limited with the `max` option:
|
377
|
+
# subscriber.start
|
331
378
|
#
|
332
|
-
#
|
333
|
-
#
|
334
|
-
#
|
335
|
-
# pubsub = Google::Cloud::Pubsub.new
|
336
|
-
#
|
337
|
-
# sub = pubsub.subscription "my-topic-sub"
|
338
|
-
# sub.listen max: 20 do |msg|
|
339
|
-
# # process msg
|
340
|
-
# end
|
379
|
+
# # Shut down the subscriber when ready to stop receiving messages.
|
380
|
+
# subscriber.stop.wait!
|
341
381
|
# ```
|
342
382
|
#
|
343
|
-
#
|
344
|
-
#
|
345
|
-
#
|
383
|
+
# The subscriber object can be configured to control the number of
|
384
|
+
# concurrent streams to open, the number of received messages to be
|
385
|
+
# collected, and the number of threads each stream opens for concurrent
|
386
|
+
# calls made to handle the received messages.
|
346
387
|
#
|
347
388
|
# ```ruby
|
348
389
|
# require "google/cloud/pubsub"
|
@@ -350,19 +391,14 @@ module Google
|
|
350
391
|
# pubsub = Google::Cloud::Pubsub.new
|
351
392
|
#
|
352
393
|
# sub = pubsub.subscription "my-topic-sub"
|
353
|
-
# sub.listen autoack: true do |msg|
|
354
|
-
# # process msg
|
355
|
-
# end
|
356
|
-
# ```
|
357
394
|
#
|
358
|
-
#
|
359
|
-
#
|
360
|
-
#
|
361
|
-
#
|
362
|
-
#
|
363
|
-
# require "google/cloud/pubsub"
|
395
|
+
# subscriber = sub.listen threads: { callback: 16 } do |msg|
|
396
|
+
# # store the message somewhere before acknowledging
|
397
|
+
# store_in_backend msg.data # takes a few seconds
|
398
|
+
# msg.ack!
|
399
|
+
# end
|
364
400
|
#
|
365
|
-
#
|
401
|
+
# subscriber.start
|
366
402
|
# ```
|
367
403
|
#
|
368
404
|
# ## Working Across Projects
|
@@ -0,0 +1,328 @@
|
|
1
|
+
# Copyright 2017 Google Inc. All rights reserved.
|
2
|
+
#
|
3
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
# you may not use this file except in compliance with the License.
|
5
|
+
# You may obtain a copy of the License at
|
6
|
+
#
|
7
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
#
|
9
|
+
# Unless required by applicable law or agreed to in writing, software
|
10
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
# See the License for the specific language governing permissions and
|
13
|
+
# limitations under the License.
|
14
|
+
|
15
|
+
|
16
|
+
require "monitor"
|
17
|
+
require "concurrent"
|
18
|
+
require "google/cloud/pubsub/publish_result"
|
19
|
+
require "google/cloud/pubsub/service"
|
20
|
+
|
21
|
+
module Google
|
22
|
+
module Cloud
|
23
|
+
module Pubsub
|
24
|
+
##
|
25
|
+
# Used to publish multiple messages in batches to a topic. See
|
26
|
+
# {Google::Cloud::Pubsub::Topic#async_publisher}
|
27
|
+
#
|
28
|
+
# @example
|
29
|
+
# require "google/cloud/pubsub"
|
30
|
+
#
|
31
|
+
# pubsub = Google::Cloud::Pubsub.new
|
32
|
+
#
|
33
|
+
# topic = pubsub.topic "my-topic"
|
34
|
+
# topic.publish_async "task completed" do |result|
|
35
|
+
# if result.succeeded?
|
36
|
+
# log_publish_success result.data
|
37
|
+
# else
|
38
|
+
# log_publish_failure result.data, result.error
|
39
|
+
# end
|
40
|
+
# end
|
41
|
+
#
|
42
|
+
# topic.async_publisher.stop.wait!
|
43
|
+
#
|
44
|
+
# @attr_reader [String] topic_name The name of the topic the messages
|
45
|
+
# are published to. In the form of
|
46
|
+
# "/projects/project-identifier/topics/topic-name".
|
47
|
+
# @attr_reader [Integer] max_bytes The maximum size of messages to be
|
48
|
+
# collected before the batch is published. Default is 10,000,000
|
49
|
+
# (10MB).
|
50
|
+
# @attr_reader [Integer] max_messages The maximum number of messages to
|
51
|
+
# be collected before the batch is published. Default is 1,000.
|
52
|
+
# @attr_reader [Numeric] interval The number of seconds to collect
|
53
|
+
# messages before the batch is published. Default is 0.25.
|
54
|
+
# @attr_reader [Numeric] publish_threads The number of threads used to
|
55
|
+
# publish messages. Default is 4.
|
56
|
+
# @attr_reader [Numeric] callback_threads The number of threads to
|
57
|
+
# handle the published messages' callbacks. Default is 8.
|
58
|
+
#
|
59
|
+
class AsyncPublisher
|
60
|
+
include MonitorMixin
|
61
|
+
|
62
|
+
attr_reader :topic_name, :max_bytes, :max_messages, :interval,
|
63
|
+
:publish_threads, :callback_threads
|
64
|
+
##
|
65
|
+
# @private Implementation accessors
|
66
|
+
attr_reader :service, :batch, :publish_thread_pool,
|
67
|
+
:callback_thread_pool
|
68
|
+
|
69
|
+
##
|
70
|
+
# @private Create a new instance of the object.
|
71
|
+
def initialize topic_name, service, max_bytes: 10000000,
|
72
|
+
max_messages: 1000, interval: 0.25, threads: {}
|
73
|
+
@topic_name = service.topic_path topic_name
|
74
|
+
@service = service
|
75
|
+
|
76
|
+
@max_bytes = max_bytes
|
77
|
+
@max_messages = max_messages
|
78
|
+
@interval = interval
|
79
|
+
@publish_threads = (threads[:publish] || 4).to_i
|
80
|
+
@callback_threads = (threads[:callback] || 8).to_i
|
81
|
+
|
82
|
+
@cond = new_cond
|
83
|
+
|
84
|
+
# init MonitorMixin
|
85
|
+
super()
|
86
|
+
end
|
87
|
+
|
88
|
+
##
|
89
|
+
# Add a message to the async publisher to be published to the topic.
|
90
|
+
# Messages will be collected in batches and published together.
|
91
|
+
# See {Google::Cloud::Pubsub::Topic#publish_async}
|
92
|
+
def publish data = nil, attributes = {}, &block
|
93
|
+
msg = create_pubsub_message data, attributes
|
94
|
+
|
95
|
+
synchronize do
|
96
|
+
fail "Can't publish when stopped." if @stopped
|
97
|
+
|
98
|
+
if @batch.nil?
|
99
|
+
@batch ||= Batch.new self
|
100
|
+
@batch.add msg, block
|
101
|
+
else
|
102
|
+
unless @batch.try_add msg, block
|
103
|
+
publish_batch!
|
104
|
+
@batch = Batch.new self
|
105
|
+
@batch.add msg, block
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
init_resources!
|
110
|
+
|
111
|
+
publish_batch! if @batch.ready?
|
112
|
+
|
113
|
+
@cond.signal
|
114
|
+
end
|
115
|
+
nil
|
116
|
+
end
|
117
|
+
|
118
|
+
##
|
119
|
+
# Begins the process of stopping the publisher. Messages already in
|
120
|
+
# the queue will be published, but no new messages can be added. Use
|
121
|
+
# {#wait!} to block until the publisher is fully stopped and all
|
122
|
+
# pending messages have been published.
|
123
|
+
#
|
124
|
+
# @return [AsyncPublisher] returns self so calls can be chained.
|
125
|
+
def stop
|
126
|
+
synchronize do
|
127
|
+
break if @stopped
|
128
|
+
|
129
|
+
@stopped = true
|
130
|
+
publish_batch!
|
131
|
+
@cond.signal
|
132
|
+
@publish_thread_pool.shutdown if @publish_thread_pool
|
133
|
+
end
|
134
|
+
|
135
|
+
self
|
136
|
+
end
|
137
|
+
|
138
|
+
##
|
139
|
+
# Blocks until the publisher is fully stopped, all pending messages
|
140
|
+
# have been published, and all callbacks have completed. Does not stop
|
141
|
+
# the publisher. To stop the publisher, first call {#stop} and then
|
142
|
+
# call {#wait!} to block until the publisher is stopped.
|
143
|
+
#
|
144
|
+
# @return [AsyncPublisher] returns self so calls can be chained.
|
145
|
+
def wait! timeout = nil
|
146
|
+
synchronize do
|
147
|
+
if @publish_thread_pool
|
148
|
+
@publish_thread_pool.wait_for_termination timeout
|
149
|
+
end
|
150
|
+
|
151
|
+
if @callback_thread_pool
|
152
|
+
@callback_thread_pool.shutdown
|
153
|
+
@callback_thread_pool.wait_for_termination timeout
|
154
|
+
end
|
155
|
+
end
|
156
|
+
|
157
|
+
self
|
158
|
+
end
|
159
|
+
|
160
|
+
##
|
161
|
+
# Forces all messages in the current batch to be published
|
162
|
+
# immediately.
|
163
|
+
#
|
164
|
+
# @return [AsyncPublisher] returns self so calls can be chained.
|
165
|
+
def flush
|
166
|
+
synchronize do
|
167
|
+
publish_batch!
|
168
|
+
@cond.signal
|
169
|
+
end
|
170
|
+
|
171
|
+
self
|
172
|
+
end
|
173
|
+
|
174
|
+
##
|
175
|
+
# Whether the publisher has been started.
|
176
|
+
#
|
177
|
+
# @return [boolean] `true` when started, `false` otherwise.
|
178
|
+
def started?
|
179
|
+
!stopped?
|
180
|
+
end
|
181
|
+
|
182
|
+
##
|
183
|
+
# Whether the publisher has been stopped.
|
184
|
+
#
|
185
|
+
# @return [boolean] `true` when stopped, `false` otherwise.
|
186
|
+
def stopped?
|
187
|
+
synchronize { @stopped }
|
188
|
+
end
|
189
|
+
|
190
|
+
protected
|
191
|
+
|
192
|
+
def init_resources!
|
193
|
+
@first_published_at ||= Time.now
|
194
|
+
@publish_thread_pool ||= Concurrent::FixedThreadPool.new \
|
195
|
+
@publish_threads
|
196
|
+
@callback_thread_pool ||= Concurrent::FixedThreadPool.new \
|
197
|
+
@callback_threads
|
198
|
+
@thread ||= Thread.new { run_background }
|
199
|
+
end
|
200
|
+
|
201
|
+
def run_background
|
202
|
+
synchronize do
|
203
|
+
until @stopped
|
204
|
+
if @batch.nil?
|
205
|
+
@cond.wait
|
206
|
+
next
|
207
|
+
end
|
208
|
+
|
209
|
+
time_since_first_publish = Time.now - @first_published_at
|
210
|
+
if time_since_first_publish > @interval
|
211
|
+
# interval met, publish the batch...
|
212
|
+
publish_batch!
|
213
|
+
@cond.wait
|
214
|
+
else
|
215
|
+
# still waiting for the interval to publish the batch...
|
216
|
+
@cond.wait(@interval - time_since_first_publish)
|
217
|
+
end
|
218
|
+
end
|
219
|
+
end
|
220
|
+
end
|
221
|
+
|
222
|
+
def publish_batch!
|
223
|
+
return unless @batch
|
224
|
+
|
225
|
+
publish_batch_async @topic_name, @batch
|
226
|
+
@batch = nil
|
227
|
+
@first_published_at = nil
|
228
|
+
end
|
229
|
+
|
230
|
+
def publish_batch_async topic_name, batch
|
231
|
+
Concurrent::Future.new(executor: @publish_thread_pool) do
|
232
|
+
begin
|
233
|
+
grpc = @service.publish topic_name, batch.messages
|
234
|
+
batch.items.zip(Array(grpc.message_ids)) do |item, id|
|
235
|
+
next unless item.callback
|
236
|
+
|
237
|
+
item.msg.message_id = id
|
238
|
+
publish_result = PublishResult.from_grpc(item.msg)
|
239
|
+
execute_callback_async item.callback, publish_result
|
240
|
+
end
|
241
|
+
rescue => e
|
242
|
+
batch.items.each do |item|
|
243
|
+
next unless item.callback
|
244
|
+
|
245
|
+
publish_result = PublishResult.from_error(item.msg, e)
|
246
|
+
execute_callback_async item.callback, publish_result
|
247
|
+
end
|
248
|
+
end
|
249
|
+
end.execute
|
250
|
+
end
|
251
|
+
|
252
|
+
def execute_callback_async callback, publish_result
|
253
|
+
Concurrent::Future.new(executor: @callback_thread_pool) do
|
254
|
+
callback.call publish_result
|
255
|
+
end.execute
|
256
|
+
end
|
257
|
+
|
258
|
+
def create_pubsub_message data, attributes
|
259
|
+
attributes ||= {}
|
260
|
+
if data.is_a?(::Hash) && attributes.empty?
|
261
|
+
attributes = data
|
262
|
+
data = nil
|
263
|
+
end
|
264
|
+
# Convert IO-ish objects to strings
|
265
|
+
if data.respond_to?(:read) && data.respond_to?(:rewind)
|
266
|
+
data.rewind
|
267
|
+
data = data.read
|
268
|
+
end
|
269
|
+
# Convert data to encoded byte array to match the protobuf defn
|
270
|
+
data_bytes = String(data).dup.force_encoding("ASCII-8BIT").freeze
|
271
|
+
|
272
|
+
# Convert attributes to strings to match the protobuf definition
|
273
|
+
attributes = Hash[attributes.map { |k, v| [String(k), String(v)] }]
|
274
|
+
|
275
|
+
Google::Pubsub::V1::PubsubMessage.new data: data_bytes,
|
276
|
+
attributes: attributes
|
277
|
+
end
|
278
|
+
|
279
|
+
##
|
280
|
+
# @private
|
281
|
+
class Batch
|
282
|
+
attr_reader :messages, :callbacks
|
283
|
+
|
284
|
+
def initialize publisher
|
285
|
+
@publisher = publisher
|
286
|
+
@messages = []
|
287
|
+
@callbacks = []
|
288
|
+
end
|
289
|
+
|
290
|
+
def add msg, callback
|
291
|
+
@messages << msg
|
292
|
+
@callbacks << callback
|
293
|
+
end
|
294
|
+
|
295
|
+
def try_add msg, callback
|
296
|
+
if total_message_count + 1 > @publisher.max_messages ||
|
297
|
+
total_message_bytes + msg.to_proto.size >= @publisher.max_bytes
|
298
|
+
return false
|
299
|
+
end
|
300
|
+
add msg, callback
|
301
|
+
true
|
302
|
+
end
|
303
|
+
|
304
|
+
def ready?
|
305
|
+
total_message_count >= @publisher.max_messages ||
|
306
|
+
total_message_bytes >= @publisher.max_bytes
|
307
|
+
end
|
308
|
+
|
309
|
+
def total_message_count
|
310
|
+
@messages.count
|
311
|
+
end
|
312
|
+
|
313
|
+
def total_message_bytes
|
314
|
+
@messages.map(&:to_proto).map(&:size).inject(0, :+)
|
315
|
+
end
|
316
|
+
|
317
|
+
def items
|
318
|
+
@messages.zip(@callbacks).map do |msg, callback|
|
319
|
+
Item.new msg, callback
|
320
|
+
end
|
321
|
+
end
|
322
|
+
|
323
|
+
Item = Struct.new :msg, :callback
|
324
|
+
end
|
325
|
+
end
|
326
|
+
end
|
327
|
+
end
|
328
|
+
end
|