rdkafka 0.8.0 → 0.8.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/.semaphore/semaphore.yml +23 -0
- data/CHANGELOG.md +6 -0
- data/README.md +5 -2
- data/docker-compose.yml +2 -0
- data/ext/Rakefile +1 -1
- data/lib/rdkafka.rb +7 -0
- data/lib/rdkafka/abstract_handle.rb +82 -0
- data/lib/rdkafka/admin.rb +144 -0
- data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
- data/lib/rdkafka/admin/create_topic_report.rb +22 -0
- data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
- data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
- data/lib/rdkafka/bindings.rb +44 -17
- data/lib/rdkafka/callbacks.rb +106 -0
- data/lib/rdkafka/config.rb +14 -1
- data/lib/rdkafka/consumer.rb +35 -5
- data/lib/rdkafka/error.rb +29 -3
- data/lib/rdkafka/metadata.rb +6 -5
- data/lib/rdkafka/producer.rb +13 -2
- data/lib/rdkafka/producer/delivery_handle.rb +7 -53
- data/lib/rdkafka/version.rb +1 -1
- data/spec/rdkafka/abstract_handle_spec.rb +114 -0
- data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
- data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
- data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
- data/spec/rdkafka/admin_spec.rb +192 -0
- data/spec/rdkafka/callbacks_spec.rb +20 -0
- data/spec/rdkafka/config_spec.rb +11 -0
- data/spec/rdkafka/consumer_spec.rb +34 -2
- data/spec/rdkafka/error_spec.rb +4 -0
- data/spec/rdkafka/metadata_spec.rb +78 -0
- data/spec/rdkafka/producer/delivery_handle_spec.rb +1 -41
- data/spec/rdkafka/producer_spec.rb +22 -0
- data/spec/spec_helper.rb +28 -11
- metadata +26 -3
- data/.travis.yml +0 -48
@@ -0,0 +1,106 @@
|
|
1
|
+
module Rdkafka
|
2
|
+
module Callbacks
|
3
|
+
|
4
|
+
# Extracts attributes of a rd_kafka_topic_result_t
|
5
|
+
#
|
6
|
+
# @private
|
7
|
+
class TopicResult
|
8
|
+
attr_reader :result_error, :error_string, :result_name
|
9
|
+
|
10
|
+
def initialize(topic_result_pointer)
|
11
|
+
@result_error = Rdkafka::Bindings.rd_kafka_topic_result_error(topic_result_pointer)
|
12
|
+
@error_string = Rdkafka::Bindings.rd_kafka_topic_result_error_string(topic_result_pointer)
|
13
|
+
@result_name = Rdkafka::Bindings.rd_kafka_topic_result_name(topic_result_pointer)
|
14
|
+
end
|
15
|
+
|
16
|
+
def self.create_topic_results_from_array(count, array_pointer)
|
17
|
+
(1..count).map do |index|
|
18
|
+
result_pointer = (array_pointer + (index - 1)).read_pointer
|
19
|
+
new(result_pointer)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
# FFI Function used for Create Topic and Delete Topic callbacks
|
25
|
+
BackgroundEventCallbackFunction = FFI::Function.new(
|
26
|
+
:void, [:pointer, :pointer, :pointer]
|
27
|
+
) do |client_ptr, event_ptr, opaque_ptr|
|
28
|
+
BackgroundEventCallback.call(client_ptr, event_ptr, opaque_ptr)
|
29
|
+
end
|
30
|
+
|
31
|
+
# @private
|
32
|
+
class BackgroundEventCallback
|
33
|
+
def self.call(_, event_ptr, _)
|
34
|
+
event_type = Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
|
35
|
+
if event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
|
36
|
+
process_create_topic(event_ptr)
|
37
|
+
elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
|
38
|
+
process_delete_topic(event_ptr)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
private
|
43
|
+
|
44
|
+
def self.process_create_topic(event_ptr)
|
45
|
+
create_topics_result = Rdkafka::Bindings.rd_kafka_event_CreateTopics_result(event_ptr)
|
46
|
+
|
47
|
+
# Get the number of create topic results
|
48
|
+
pointer_to_size_t = FFI::MemoryPointer.new(:int32)
|
49
|
+
create_topic_result_array = Rdkafka::Bindings.rd_kafka_CreateTopics_result_topics(create_topics_result, pointer_to_size_t)
|
50
|
+
create_topic_results = TopicResult.create_topic_results_from_array(pointer_to_size_t.read_int, create_topic_result_array)
|
51
|
+
create_topic_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
|
52
|
+
|
53
|
+
if create_topic_handle = Rdkafka::Admin::CreateTopicHandle.remove(create_topic_handle_ptr.address)
|
54
|
+
create_topic_handle[:response] = create_topic_results[0].result_error
|
55
|
+
create_topic_handle[:error_string] = create_topic_results[0].error_string
|
56
|
+
create_topic_handle[:result_name] = create_topic_results[0].result_name
|
57
|
+
create_topic_handle[:pending] = false
|
58
|
+
end
|
59
|
+
end
|
60
|
+
|
61
|
+
def self.process_delete_topic(event_ptr)
|
62
|
+
delete_topics_result = Rdkafka::Bindings.rd_kafka_event_DeleteTopics_result(event_ptr)
|
63
|
+
|
64
|
+
# Get the number of topic results
|
65
|
+
pointer_to_size_t = FFI::MemoryPointer.new(:int32)
|
66
|
+
delete_topic_result_array = Rdkafka::Bindings.rd_kafka_DeleteTopics_result_topics(delete_topics_result, pointer_to_size_t)
|
67
|
+
delete_topic_results = TopicResult.create_topic_results_from_array(pointer_to_size_t.read_int, delete_topic_result_array)
|
68
|
+
delete_topic_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
|
69
|
+
|
70
|
+
if delete_topic_handle = Rdkafka::Admin::DeleteTopicHandle.remove(delete_topic_handle_ptr.address)
|
71
|
+
delete_topic_handle[:response] = delete_topic_results[0].result_error
|
72
|
+
delete_topic_handle[:error_string] = delete_topic_results[0].error_string
|
73
|
+
delete_topic_handle[:result_name] = delete_topic_results[0].result_name
|
74
|
+
delete_topic_handle[:pending] = false
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
# FFI Function used for Message Delivery callbacks
|
80
|
+
|
81
|
+
DeliveryCallbackFunction = FFI::Function.new(
|
82
|
+
:void, [:pointer, :pointer, :pointer]
|
83
|
+
) do |client_ptr, message_ptr, opaque_ptr|
|
84
|
+
DeliveryCallback.call(client_ptr, message_ptr, opaque_ptr)
|
85
|
+
end
|
86
|
+
|
87
|
+
# @private
|
88
|
+
class DeliveryCallback
|
89
|
+
def self.call(_, message_ptr, opaque_ptr)
|
90
|
+
message = Rdkafka::Bindings::Message.new(message_ptr)
|
91
|
+
delivery_handle_ptr_address = message[:_private].address
|
92
|
+
if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
|
93
|
+
# Update delivery handle
|
94
|
+
delivery_handle[:response] = message[:err]
|
95
|
+
delivery_handle[:partition] = message[:partition]
|
96
|
+
delivery_handle[:offset] = message[:offset]
|
97
|
+
delivery_handle[:pending] = false
|
98
|
+
# Call delivery callback on opaque
|
99
|
+
if opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
|
100
|
+
opaque.call_delivery_callback(Rdkafka::Producer::DeliveryReport.new(message[:partition], message[:offset], message[:err]))
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|
105
|
+
end
|
106
|
+
end
|
data/lib/rdkafka/config.rb
CHANGED
@@ -137,13 +137,26 @@ module Rdkafka
|
|
137
137
|
# Create Kafka config
|
138
138
|
config = native_config(opaque)
|
139
139
|
# Set callback to receive delivery reports on config
|
140
|
-
Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::
|
140
|
+
Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
|
141
141
|
# Return producer with Kafka client
|
142
142
|
Rdkafka::Producer.new(native_kafka(config, :rd_kafka_producer)).tap do |producer|
|
143
143
|
opaque.producer = producer
|
144
144
|
end
|
145
145
|
end
|
146
146
|
|
147
|
+
# Create an admin instance with this configuration.
|
148
|
+
#
|
149
|
+
# @raise [ConfigError] When the configuration contains invalid options
|
150
|
+
# @raise [ClientCreationError] When the native client cannot be created
|
151
|
+
#
|
152
|
+
# @return [Admin] The created admin instance
|
153
|
+
def admin
|
154
|
+
opaque = Opaque.new
|
155
|
+
config = native_config(opaque)
|
156
|
+
Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
|
157
|
+
Rdkafka::Admin.new(native_kafka(config, :rd_kafka_producer))
|
158
|
+
end
|
159
|
+
|
147
160
|
# Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
|
148
161
|
class ConfigError < RuntimeError; end
|
149
162
|
|
data/lib/rdkafka/consumer.rb
CHANGED
@@ -36,6 +36,8 @@ module Rdkafka
|
|
36
36
|
#
|
37
37
|
# @return [nil]
|
38
38
|
def subscribe(*topics)
|
39
|
+
closed_consumer_check(__method__)
|
40
|
+
|
39
41
|
# Create topic partition list with topics and no partition set
|
40
42
|
tpl = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(topics.length)
|
41
43
|
|
@@ -49,7 +51,7 @@ module Rdkafka
|
|
49
51
|
raise Rdkafka::RdkafkaError.new(response, "Error subscribing to '#{topics.join(', ')}'")
|
50
52
|
end
|
51
53
|
ensure
|
52
|
-
Rdkafka::Bindings.rd_kafka_topic_partition_list_destroy(tpl)
|
54
|
+
Rdkafka::Bindings.rd_kafka_topic_partition_list_destroy(tpl) unless tpl.nil?
|
53
55
|
end
|
54
56
|
|
55
57
|
# Unsubscribe from all subscribed topics.
|
@@ -58,6 +60,8 @@ module Rdkafka
|
|
58
60
|
#
|
59
61
|
# @return [nil]
|
60
62
|
def unsubscribe
|
63
|
+
closed_consumer_check(__method__)
|
64
|
+
|
61
65
|
response = Rdkafka::Bindings.rd_kafka_unsubscribe(@native_kafka)
|
62
66
|
if response != 0
|
63
67
|
raise Rdkafka::RdkafkaError.new(response)
|
@@ -72,6 +76,8 @@ module Rdkafka
|
|
72
76
|
#
|
73
77
|
# @return [nil]
|
74
78
|
def pause(list)
|
79
|
+
closed_consumer_check(__method__)
|
80
|
+
|
75
81
|
unless list.is_a?(TopicPartitionList)
|
76
82
|
raise TypeError.new("list has to be a TopicPartitionList")
|
77
83
|
end
|
@@ -98,6 +104,8 @@ module Rdkafka
|
|
98
104
|
#
|
99
105
|
# @return [nil]
|
100
106
|
def resume(list)
|
107
|
+
closed_consumer_check(__method__)
|
108
|
+
|
101
109
|
unless list.is_a?(TopicPartitionList)
|
102
110
|
raise TypeError.new("list has to be a TopicPartitionList")
|
103
111
|
end
|
@@ -120,6 +128,8 @@ module Rdkafka
|
|
120
128
|
#
|
121
129
|
# @return [TopicPartitionList]
|
122
130
|
def subscription
|
131
|
+
closed_consumer_check(__method__)
|
132
|
+
|
123
133
|
ptr = FFI::MemoryPointer.new(:pointer)
|
124
134
|
response = Rdkafka::Bindings.rd_kafka_subscription(@native_kafka, ptr)
|
125
135
|
|
@@ -142,6 +152,8 @@ module Rdkafka
|
|
142
152
|
#
|
143
153
|
# @raise [RdkafkaError] When assigning fails
|
144
154
|
def assign(list)
|
155
|
+
closed_consumer_check(__method__)
|
156
|
+
|
145
157
|
unless list.is_a?(TopicPartitionList)
|
146
158
|
raise TypeError.new("list has to be a TopicPartitionList")
|
147
159
|
end
|
@@ -164,6 +176,8 @@ module Rdkafka
|
|
164
176
|
#
|
165
177
|
# @return [TopicPartitionList]
|
166
178
|
def assignment
|
179
|
+
closed_consumer_check(__method__)
|
180
|
+
|
167
181
|
ptr = FFI::MemoryPointer.new(:pointer)
|
168
182
|
response = Rdkafka::Bindings.rd_kafka_assignment(@native_kafka, ptr)
|
169
183
|
if response != 0
|
@@ -180,7 +194,7 @@ module Rdkafka
|
|
180
194
|
end
|
181
195
|
end
|
182
196
|
ensure
|
183
|
-
ptr.free
|
197
|
+
ptr.free unless ptr.nil?
|
184
198
|
end
|
185
199
|
|
186
200
|
# Return the current committed offset per partition for this consumer group.
|
@@ -193,6 +207,8 @@ module Rdkafka
|
|
193
207
|
#
|
194
208
|
# @return [TopicPartitionList]
|
195
209
|
def committed(list=nil, timeout_ms=1200)
|
210
|
+
closed_consumer_check(__method__)
|
211
|
+
|
196
212
|
if list.nil?
|
197
213
|
list = assignment
|
198
214
|
elsif !list.is_a?(TopicPartitionList)
|
@@ -222,6 +238,8 @@ module Rdkafka
|
|
222
238
|
#
|
223
239
|
# @return [Integer] The low and high watermark
|
224
240
|
def query_watermark_offsets(topic, partition, timeout_ms=200)
|
241
|
+
closed_consumer_check(__method__)
|
242
|
+
|
225
243
|
low = FFI::MemoryPointer.new(:int64, 1)
|
226
244
|
high = FFI::MemoryPointer.new(:int64, 1)
|
227
245
|
|
@@ -239,8 +257,8 @@ module Rdkafka
|
|
239
257
|
|
240
258
|
return low.read_array_of_int64(1).first, high.read_array_of_int64(1).first
|
241
259
|
ensure
|
242
|
-
low.free
|
243
|
-
high.free
|
260
|
+
low.free unless low.nil?
|
261
|
+
high.free unless high.nil?
|
244
262
|
end
|
245
263
|
|
246
264
|
# Calculate the consumer lag per partition for the provided topic partition list.
|
@@ -279,6 +297,7 @@ module Rdkafka
|
|
279
297
|
#
|
280
298
|
# @return [String, nil]
|
281
299
|
def cluster_id
|
300
|
+
closed_consumer_check(__method__)
|
282
301
|
Rdkafka::Bindings.rd_kafka_clusterid(@native_kafka)
|
283
302
|
end
|
284
303
|
|
@@ -288,6 +307,7 @@ module Rdkafka
|
|
288
307
|
#
|
289
308
|
# @return [String, nil]
|
290
309
|
def member_id
|
310
|
+
closed_consumer_check(__method__)
|
291
311
|
Rdkafka::Bindings.rd_kafka_memberid(@native_kafka)
|
292
312
|
end
|
293
313
|
|
@@ -301,6 +321,8 @@ module Rdkafka
|
|
301
321
|
#
|
302
322
|
# @return [nil]
|
303
323
|
def store_offset(message)
|
324
|
+
closed_consumer_check(__method__)
|
325
|
+
|
304
326
|
# rd_kafka_offset_store is one of the few calls that does not support
|
305
327
|
# a string as the topic, so create a native topic for it.
|
306
328
|
native_topic = Rdkafka::Bindings.rd_kafka_topic_new(
|
@@ -331,6 +353,8 @@ module Rdkafka
|
|
331
353
|
#
|
332
354
|
# @return [nil]
|
333
355
|
def seek(message)
|
356
|
+
closed_consumer_check(__method__)
|
357
|
+
|
334
358
|
# rd_kafka_offset_store is one of the few calls that does not support
|
335
359
|
# a string as the topic, so create a native topic for it.
|
336
360
|
native_topic = Rdkafka::Bindings.rd_kafka_topic_new(
|
@@ -369,6 +393,8 @@ module Rdkafka
|
|
369
393
|
#
|
370
394
|
# @return [nil]
|
371
395
|
def commit(list=nil, async=false)
|
396
|
+
closed_consumer_check(__method__)
|
397
|
+
|
372
398
|
if !list.nil? && !list.is_a?(TopicPartitionList)
|
373
399
|
raise TypeError.new("list has to be nil or a TopicPartitionList")
|
374
400
|
end
|
@@ -393,7 +419,7 @@ module Rdkafka
|
|
393
419
|
#
|
394
420
|
# @return [Message, nil] A message or nil if there was no new message within the timeout
|
395
421
|
def poll(timeout_ms)
|
396
|
-
|
422
|
+
closed_consumer_check(__method__)
|
397
423
|
|
398
424
|
message_ptr = Rdkafka::Bindings.rd_kafka_consumer_poll(@native_kafka, timeout_ms)
|
399
425
|
if message_ptr.null?
|
@@ -441,5 +467,9 @@ module Rdkafka
|
|
441
467
|
end
|
442
468
|
end
|
443
469
|
end
|
470
|
+
|
471
|
+
def closed_consumer_check(method)
|
472
|
+
raise Rdkafka::ClosedConsumerError.new(method) if @native_kafka.nil?
|
473
|
+
end
|
444
474
|
end
|
445
475
|
end
|
data/lib/rdkafka/error.rb
CHANGED
@@ -1,15 +1,27 @@
|
|
1
1
|
module Rdkafka
|
2
|
+
# Base error class.
|
3
|
+
class BaseError < RuntimeError; end
|
4
|
+
|
2
5
|
# Error returned by the underlying rdkafka library.
|
3
|
-
class RdkafkaError <
|
6
|
+
class RdkafkaError < BaseError
|
4
7
|
# The underlying raw error response
|
5
8
|
# @return [Integer]
|
6
|
-
attr_reader :rdkafka_response
|
9
|
+
attr_reader :rdkafka_response
|
10
|
+
|
11
|
+
# Prefix to be used for human readable representation
|
12
|
+
# @return [String]
|
13
|
+
attr_reader :message_prefix
|
14
|
+
|
15
|
+
# Error message sent by the broker
|
16
|
+
# @return [String]
|
17
|
+
attr_reader :broker_message
|
7
18
|
|
8
19
|
# @private
|
9
|
-
def initialize(response, message_prefix=nil)
|
20
|
+
def initialize(response, message_prefix=nil, broker_message: nil)
|
10
21
|
raise TypeError.new("Response has to be an integer") unless response.is_a? Integer
|
11
22
|
@rdkafka_response = response
|
12
23
|
@message_prefix = message_prefix
|
24
|
+
@broker_message = broker_message
|
13
25
|
end
|
14
26
|
|
15
27
|
# This error's code, for example `:partition_eof`, `:msg_size_too_large`.
|
@@ -57,4 +69,18 @@ module Rdkafka
|
|
57
69
|
@topic_partition_list = topic_partition_list
|
58
70
|
end
|
59
71
|
end
|
72
|
+
|
73
|
+
# Error class for public consumer method calls on a closed consumer.
|
74
|
+
class ClosedConsumerError < BaseError
|
75
|
+
def initialize(method)
|
76
|
+
super("Illegal call to #{method.to_s} on a closed consumer")
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
# Error class for public producer method calls on a closed producer.
|
81
|
+
class ClosedProducerError < BaseError
|
82
|
+
def initialize(method)
|
83
|
+
super("Illegal call to #{method.to_s} on a closed producer")
|
84
|
+
end
|
85
|
+
end
|
60
86
|
end
|
data/lib/rdkafka/metadata.rb
CHANGED
@@ -9,14 +9,15 @@ module Rdkafka
|
|
9
9
|
|
10
10
|
ptr = FFI::MemoryPointer.new(:pointer)
|
11
11
|
|
12
|
-
#
|
13
|
-
|
12
|
+
# If topic_flag is 1, we request info about *all* topics in the cluster. If topic_flag is 0,
|
13
|
+
# we only request info about locally known topics (or a single topic if one is passed in).
|
14
|
+
topic_flag = topic_name.nil? ? 1 : 0
|
14
15
|
|
15
16
|
# Retrieve the Metadata
|
16
17
|
result = Rdkafka::Bindings.rd_kafka_metadata(native_client, topic_flag, native_topic, ptr, 250)
|
17
18
|
|
18
19
|
# Error Handling
|
19
|
-
Rdkafka::
|
20
|
+
raise Rdkafka::RdkafkaError.new(result) unless result.zero?
|
20
21
|
|
21
22
|
metadata_from_native(ptr.read_pointer)
|
22
23
|
ensure
|
@@ -34,11 +35,11 @@ module Rdkafka
|
|
34
35
|
|
35
36
|
@topics = Array.new(metadata[:topics_count]) do |i|
|
36
37
|
topic = TopicMetadata.new(metadata[:topics_metadata] + (i * TopicMetadata.size))
|
37
|
-
Rdkafka::
|
38
|
+
raise Rdkafka::RdkafkaError.new(topic[:rd_kafka_resp_err]) unless topic[:rd_kafka_resp_err].zero?
|
38
39
|
|
39
40
|
partitions = Array.new(topic[:partition_count]) do |j|
|
40
41
|
partition = PartitionMetadata.new(topic[:partitions_metadata] + (j * PartitionMetadata.size))
|
41
|
-
Rdkafka::
|
42
|
+
raise Rdkafka::RdkafkaError.new(partition[:rd_kafka_resp_err]) unless partition[:rd_kafka_resp_err].zero?
|
42
43
|
partition.to_h
|
43
44
|
end
|
44
45
|
topic.to_h.merge!(partitions: partitions)
|
data/lib/rdkafka/producer.rb
CHANGED
@@ -11,6 +11,10 @@ module Rdkafka
|
|
11
11
|
def initialize(native_kafka)
|
12
12
|
@closing = false
|
13
13
|
@native_kafka = native_kafka
|
14
|
+
|
15
|
+
# Makes sure, that the producer gets closed before it gets GCed by Ruby
|
16
|
+
ObjectSpace.define_finalizer(self, proc { close })
|
17
|
+
|
14
18
|
# Start thread to poll client for delivery callbacks
|
15
19
|
@polling_thread = Thread.new do
|
16
20
|
loop do
|
@@ -55,7 +59,8 @@ module Rdkafka
|
|
55
59
|
# @return partition count [Integer,nil]
|
56
60
|
#
|
57
61
|
def partition_count(topic)
|
58
|
-
|
62
|
+
closed_producer_check(__method__)
|
63
|
+
Rdkafka::Metadata.new(@native_kafka, topic).topics&.first[:partition_count]
|
59
64
|
end
|
60
65
|
|
61
66
|
# Produces a message to a Kafka topic. The message is added to rdkafka's queue, call {DeliveryHandle#wait wait} on the returned delivery handle to make sure it is delivered.
|
@@ -74,6 +79,8 @@ module Rdkafka
|
|
74
79
|
#
|
75
80
|
# @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
|
76
81
|
def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil)
|
82
|
+
closed_producer_check(__method__)
|
83
|
+
|
77
84
|
# Start by checking and converting the input
|
78
85
|
|
79
86
|
# Get payload length
|
@@ -117,7 +124,7 @@ module Rdkafka
|
|
117
124
|
delivery_handle[:response] = -1
|
118
125
|
delivery_handle[:partition] = -1
|
119
126
|
delivery_handle[:offset] = -1
|
120
|
-
DeliveryHandle.register(delivery_handle
|
127
|
+
DeliveryHandle.register(delivery_handle)
|
121
128
|
|
122
129
|
args = [
|
123
130
|
:int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TOPIC, :string, topic,
|
@@ -163,5 +170,9 @@ module Rdkafka
|
|
163
170
|
def call_delivery_callback(delivery_handle)
|
164
171
|
@delivery_callback.call(delivery_handle) if @delivery_callback
|
165
172
|
end
|
173
|
+
|
174
|
+
def closed_producer_check(method)
|
175
|
+
raise Rdkafka::ClosedProducerError.new(method) if @native_kafka.nil?
|
176
|
+
end
|
166
177
|
end
|
167
178
|
end
|
@@ -2,67 +2,21 @@ module Rdkafka
|
|
2
2
|
class Producer
|
3
3
|
# Handle to wait for a delivery report which is returned when
|
4
4
|
# producing a message.
|
5
|
-
class DeliveryHandle <
|
5
|
+
class DeliveryHandle < Rdkafka::AbstractHandle
|
6
6
|
layout :pending, :bool,
|
7
7
|
:response, :int,
|
8
8
|
:partition, :int,
|
9
9
|
:offset, :int64
|
10
10
|
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
private_constant :CURRENT_TIME
|
16
|
-
|
17
|
-
def self.register(address, handle)
|
18
|
-
REGISTRY[address] = handle
|
19
|
-
end
|
20
|
-
|
21
|
-
def self.remove(address)
|
22
|
-
REGISTRY.delete(address)
|
23
|
-
end
|
24
|
-
|
25
|
-
# Whether the delivery handle is still pending.
|
26
|
-
#
|
27
|
-
# @return [Boolean]
|
28
|
-
def pending?
|
29
|
-
self[:pending]
|
11
|
+
# @return [String] the name of the operation (e.g. "delivery")
|
12
|
+
def operation_name
|
13
|
+
"delivery"
|
30
14
|
end
|
31
15
|
|
32
|
-
#
|
33
|
-
|
34
|
-
|
35
|
-
#
|
36
|
-
# @param max_wait_timeout [Numeric, nil] Amount of time to wait before timing out. If this is nil it does not time out.
|
37
|
-
# @param wait_timeout [Numeric] Amount of time we should wait before we recheck if there is a delivery report available
|
38
|
-
#
|
39
|
-
# @raise [RdkafkaError] When delivering the message failed
|
40
|
-
# @raise [WaitTimeoutError] When the timeout has been reached and the handle is still pending
|
41
|
-
#
|
42
|
-
# @return [DeliveryReport]
|
43
|
-
def wait(max_wait_timeout: 60, wait_timeout: 0.1)
|
44
|
-
timeout = if max_wait_timeout
|
45
|
-
CURRENT_TIME.call + max_wait_timeout
|
46
|
-
else
|
47
|
-
nil
|
48
|
-
end
|
49
|
-
loop do
|
50
|
-
if pending?
|
51
|
-
if timeout && timeout <= CURRENT_TIME.call
|
52
|
-
raise WaitTimeoutError.new("Waiting for delivery timed out after #{max_wait_timeout} seconds")
|
53
|
-
end
|
54
|
-
sleep wait_timeout
|
55
|
-
elsif self[:response] != 0
|
56
|
-
raise RdkafkaError.new(self[:response])
|
57
|
-
else
|
58
|
-
return DeliveryReport.new(self[:partition], self[:offset])
|
59
|
-
end
|
60
|
-
end
|
16
|
+
# @return [DeliveryReport] a report on the delivery of the message
|
17
|
+
def create_result
|
18
|
+
DeliveryReport.new(self[:partition], self[:offset])
|
61
19
|
end
|
62
|
-
|
63
|
-
# Error that is raised when waiting for a delivery handle to complete
|
64
|
-
# takes longer than the specified timeout.
|
65
|
-
class WaitTimeoutError < RuntimeError; end
|
66
20
|
end
|
67
21
|
end
|
68
22
|
end
|