karafka-rdkafka 0.12.4 → 0.13.0.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +1 -1
  4. data/CHANGELOG.md +21 -2
  5. data/Gemfile +2 -0
  6. data/README.md +26 -0
  7. data/Rakefile +2 -0
  8. data/certs/cert_chain.pem +21 -21
  9. data/certs/karafka-pro.pem +11 -0
  10. data/ext/Rakefile +26 -53
  11. data/karafka-rdkafka.gemspec +2 -0
  12. data/lib/rdkafka/abstract_handle.rb +2 -0
  13. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  14. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  15. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  16. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  17. data/lib/rdkafka/admin.rb +95 -73
  18. data/lib/rdkafka/bindings.rb +52 -37
  19. data/lib/rdkafka/callbacks.rb +2 -0
  20. data/lib/rdkafka/config.rb +13 -10
  21. data/lib/rdkafka/consumer/headers.rb +24 -7
  22. data/lib/rdkafka/consumer/message.rb +3 -1
  23. data/lib/rdkafka/consumer/partition.rb +2 -0
  24. data/lib/rdkafka/consumer/topic_partition_list.rb +2 -0
  25. data/lib/rdkafka/consumer.rb +100 -44
  26. data/lib/rdkafka/error.rb +9 -0
  27. data/lib/rdkafka/metadata.rb +25 -2
  28. data/lib/rdkafka/native_kafka.rb +83 -0
  29. data/lib/rdkafka/producer/delivery_handle.rb +2 -0
  30. data/lib/rdkafka/producer/delivery_report.rb +3 -1
  31. data/lib/rdkafka/producer.rb +75 -12
  32. data/lib/rdkafka/version.rb +3 -1
  33. data/lib/rdkafka.rb +3 -1
  34. data/spec/rdkafka/abstract_handle_spec.rb +2 -0
  35. data/spec/rdkafka/admin/create_topic_handle_spec.rb +2 -0
  36. data/spec/rdkafka/admin/create_topic_report_spec.rb +2 -0
  37. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +2 -0
  38. data/spec/rdkafka/admin/delete_topic_report_spec.rb +2 -0
  39. data/spec/rdkafka/admin_spec.rb +4 -3
  40. data/spec/rdkafka/bindings_spec.rb +2 -0
  41. data/spec/rdkafka/callbacks_spec.rb +2 -0
  42. data/spec/rdkafka/config_spec.rb +17 -2
  43. data/spec/rdkafka/consumer/headers_spec.rb +62 -0
  44. data/spec/rdkafka/consumer/message_spec.rb +2 -0
  45. data/spec/rdkafka/consumer/partition_spec.rb +2 -0
  46. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +2 -0
  47. data/spec/rdkafka/consumer_spec.rb +124 -22
  48. data/spec/rdkafka/error_spec.rb +2 -0
  49. data/spec/rdkafka/metadata_spec.rb +2 -0
  50. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -34
  51. data/spec/rdkafka/producer/delivery_handle_spec.rb +2 -0
  52. data/spec/rdkafka/producer/delivery_report_spec.rb +4 -2
  53. data/spec/rdkafka/producer_spec.rb +118 -17
  54. data/spec/spec_helper.rb +17 -1
  55. data.tar.gz.sig +0 -0
  56. metadata +33 -33
  57. metadata.gz.sig +0 -0
  58. data/bin/console +0 -11
  59. data/dist/librdkafka_2.0.2.tar.gz +0 -0
  60. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "ffi"
2
4
  require "json"
3
5
  require "logger"
@@ -15,7 +17,7 @@ module Rdkafka
15
17
  end
16
18
  end
17
19
 
18
- ffi_lib File.join(File.dirname(__FILE__), "../../ext/librdkafka.#{lib_extension}")
20
+ ffi_lib File.join(__dir__, "../../ext/librdkafka.#{lib_extension}")
19
21
 
20
22
  RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS = -175
21
23
  RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS = -174
@@ -33,15 +35,17 @@ module Rdkafka
33
35
 
34
36
  # Polling
35
37
 
38
+ attach_function :rd_kafka_flush, [:pointer, :int], :void, blocking: true
36
39
  attach_function :rd_kafka_poll, [:pointer, :int], :void, blocking: true
37
40
  attach_function :rd_kafka_outq_len, [:pointer], :int, blocking: true
38
41
 
39
42
  # Metadata
40
43
 
41
- attach_function :rd_kafka_memberid, [:pointer], :string
42
- attach_function :rd_kafka_clusterid, [:pointer], :string
43
- attach_function :rd_kafka_metadata, [:pointer, :int, :pointer, :pointer, :int], :int
44
- attach_function :rd_kafka_metadata_destroy, [:pointer], :void
44
+ attach_function :rd_kafka_name, [:pointer], :string, blocking: true
45
+ attach_function :rd_kafka_memberid, [:pointer], :string, blocking: true
46
+ attach_function :rd_kafka_clusterid, [:pointer], :string, blocking: true
47
+ attach_function :rd_kafka_metadata, [:pointer, :int, :pointer, :pointer, :int], :int, blocking: true
48
+ attach_function :rd_kafka_metadata_destroy, [:pointer], :void, blocking: true
45
49
 
46
50
  # Message struct
47
51
 
@@ -110,6 +114,7 @@ module Rdkafka
110
114
  attach_function :rd_kafka_conf_set_stats_cb, [:pointer, :stats_cb], :void
111
115
  callback :error_cb, [:pointer, :int, :string, :pointer], :void
112
116
  attach_function :rd_kafka_conf_set_error_cb, [:pointer, :error_cb], :void
117
+ attach_function :rd_kafka_rebalance_protocol, [:pointer], :string
113
118
 
114
119
  # Log queue
115
120
  attach_function :rd_kafka_set_log_queue, [:pointer, :pointer], :void
@@ -166,24 +171,28 @@ module Rdkafka
166
171
  ]
167
172
 
168
173
  attach_function :rd_kafka_new, [:kafka_type, :pointer, :pointer, :int], :pointer
174
+
169
175
  attach_function :rd_kafka_destroy, [:pointer], :void
170
176
 
171
177
  # Consumer
172
178
 
173
- attach_function :rd_kafka_subscribe, [:pointer, :pointer], :int
174
- attach_function :rd_kafka_unsubscribe, [:pointer], :int
175
- attach_function :rd_kafka_subscription, [:pointer, :pointer], :int
176
- attach_function :rd_kafka_assign, [:pointer, :pointer], :int
177
- attach_function :rd_kafka_assignment, [:pointer, :pointer], :int
178
- attach_function :rd_kafka_committed, [:pointer, :pointer, :int], :int
179
+ attach_function :rd_kafka_subscribe, [:pointer, :pointer], :int, blocking: true
180
+ attach_function :rd_kafka_unsubscribe, [:pointer], :int, blocking: true
181
+ attach_function :rd_kafka_subscription, [:pointer, :pointer], :int, blocking: true
182
+ attach_function :rd_kafka_assign, [:pointer, :pointer], :int, blocking: true
183
+ attach_function :rd_kafka_incremental_assign, [:pointer, :pointer], :int, blocking: true
184
+ attach_function :rd_kafka_incremental_unassign, [:pointer, :pointer], :int, blocking: true
185
+ attach_function :rd_kafka_assignment, [:pointer, :pointer], :int, blocking: true
186
+ attach_function :rd_kafka_assignment_lost, [:pointer], :int, blocking: true
187
+ attach_function :rd_kafka_committed, [:pointer, :pointer, :int], :int, blocking: true
179
188
  attach_function :rd_kafka_commit, [:pointer, :pointer, :bool], :int, blocking: true
180
- attach_function :rd_kafka_poll_set_consumer, [:pointer], :void
189
+ attach_function :rd_kafka_poll_set_consumer, [:pointer], :void, blocking: true
181
190
  attach_function :rd_kafka_consumer_poll, [:pointer, :int], :pointer, blocking: true
182
191
  attach_function :rd_kafka_consumer_close, [:pointer], :void, blocking: true
183
- attach_function :rd_kafka_offset_store, [:pointer, :int32, :int64], :int
184
- attach_function :rd_kafka_pause_partitions, [:pointer, :pointer], :int
185
- attach_function :rd_kafka_resume_partitions, [:pointer, :pointer], :int
186
- attach_function :rd_kafka_seek, [:pointer, :int32, :int64, :int], :int
192
+ attach_function :rd_kafka_offset_store, [:pointer, :int32, :int64], :int, blocking: true
193
+ attach_function :rd_kafka_pause_partitions, [:pointer, :pointer], :int, blocking: true
194
+ attach_function :rd_kafka_resume_partitions, [:pointer, :pointer], :int, blocking: true
195
+ attach_function :rd_kafka_seek, [:pointer, :int32, :int64, :int], :int, blocking: true
187
196
 
188
197
  # Headers
189
198
  attach_function :rd_kafka_header_get_all, [:pointer, :size_t, :pointer, :pointer, SizePtr], :int
@@ -192,30 +201,36 @@ module Rdkafka
192
201
  # Rebalance
193
202
 
194
203
  callback :rebalance_cb_function, [:pointer, :int, :pointer, :pointer], :void
195
- attach_function :rd_kafka_conf_set_rebalance_cb, [:pointer, :rebalance_cb_function], :void
204
+ attach_function :rd_kafka_conf_set_rebalance_cb, [:pointer, :rebalance_cb_function], :void, blocking: true
196
205
 
197
206
  RebalanceCallback = FFI::Function.new(
198
207
  :void, [:pointer, :int, :pointer, :pointer]
199
208
  ) do |client_ptr, code, partitions_ptr, opaque_ptr|
200
209
  case code
201
210
  when RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
202
- Rdkafka::Bindings.rd_kafka_assign(client_ptr, partitions_ptr)
211
+ if Rdkafka::Bindings.rd_kafka_rebalance_protocol(client_ptr) == "COOPERATIVE"
212
+ Rdkafka::Bindings.rd_kafka_incremental_assign(client_ptr, partitions_ptr)
213
+ else
214
+ Rdkafka::Bindings.rd_kafka_assign(client_ptr, partitions_ptr)
215
+ end
203
216
  else # RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS or errors
204
- Rdkafka::Bindings.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
217
+ if Rdkafka::Bindings.rd_kafka_rebalance_protocol(client_ptr) == "COOPERATIVE"
218
+ Rdkafka::Bindings.rd_kafka_incremental_unassign(client_ptr, partitions_ptr)
219
+ else
220
+ Rdkafka::Bindings.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
221
+ end
205
222
  end
206
223
 
207
224
  opaque = Rdkafka::Config.opaques[opaque_ptr.to_i]
208
225
  return unless opaque
209
226
 
210
227
  tpl = Rdkafka::Consumer::TopicPartitionList.from_native_tpl(partitions_ptr).freeze
211
- consumer = Rdkafka::Consumer.new(client_ptr)
212
-
213
228
  begin
214
229
  case code
215
230
  when RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
216
- opaque.call_on_partitions_assigned(consumer, tpl)
231
+ opaque.call_on_partitions_assigned(tpl)
217
232
  when RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS
218
- opaque.call_on_partitions_revoked(consumer, tpl)
233
+ opaque.call_on_partitions_revoked(tpl)
219
234
  end
220
235
  rescue Exception => err
221
236
  Rdkafka::Config.logger.error("Unhandled exception: #{err.class} - #{err.message}")
@@ -242,7 +257,7 @@ module Rdkafka
242
257
 
243
258
  RD_KAFKA_MSG_F_COPY = 0x2
244
259
 
245
- attach_function :rd_kafka_producev, [:pointer, :varargs], :int
260
+ attach_function :rd_kafka_producev, [:pointer, :varargs], :int, blocking: true
246
261
  callback :delivery_cb, [:pointer, :pointer, :pointer], :void
247
262
  attach_function :rd_kafka_conf_set_dr_msg_cb, [:pointer, :delivery_cb], :void
248
263
 
@@ -257,11 +272,11 @@ module Rdkafka
257
272
  # Return RD_KAFKA_PARTITION_UA(unassigned partition) when partition count is nil/zero.
258
273
  return -1 unless partition_count&.nonzero?
259
274
 
260
- str_ptr = FFI::MemoryPointer.from_string(str)
275
+ str_ptr = str.empty? ? FFI::MemoryPointer::NULL : FFI::MemoryPointer.from_string(str)
261
276
  method_name = PARTITIONERS.fetch(partitioner_name) do
262
277
  raise Rdkafka::Config::ConfigError.new("Unknown partitioner: #{partitioner_name}")
263
278
  end
264
- public_send(method_name, nil, str_ptr, str.size, partition_count, nil, nil)
279
+ public_send(method_name, nil, str_ptr, str.size > 0 ? str.size : 1, partition_count, nil, nil)
265
280
  end
266
281
 
267
282
  # Create Topics
@@ -269,23 +284,23 @@ module Rdkafka
269
284
  RD_KAFKA_ADMIN_OP_CREATETOPICS = 1 # rd_kafka_admin_op_t
270
285
  RD_KAFKA_EVENT_CREATETOPICS_RESULT = 100 # rd_kafka_event_type_t
271
286
 
272
- attach_function :rd_kafka_CreateTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :void
273
- attach_function :rd_kafka_NewTopic_new, [:pointer, :size_t, :size_t, :pointer, :size_t], :pointer
274
- attach_function :rd_kafka_NewTopic_set_config, [:pointer, :string, :string], :int32
275
- attach_function :rd_kafka_NewTopic_destroy, [:pointer], :void
276
- attach_function :rd_kafka_event_CreateTopics_result, [:pointer], :pointer
277
- attach_function :rd_kafka_CreateTopics_result_topics, [:pointer, :pointer], :pointer
287
+ attach_function :rd_kafka_CreateTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :void, blocking: true
288
+ attach_function :rd_kafka_NewTopic_new, [:pointer, :size_t, :size_t, :pointer, :size_t], :pointer, blocking: true
289
+ attach_function :rd_kafka_NewTopic_set_config, [:pointer, :string, :string], :int32, blocking: true
290
+ attach_function :rd_kafka_NewTopic_destroy, [:pointer], :void, blocking: true
291
+ attach_function :rd_kafka_event_CreateTopics_result, [:pointer], :pointer, blocking: true
292
+ attach_function :rd_kafka_CreateTopics_result_topics, [:pointer, :pointer], :pointer, blocking: true
278
293
 
279
294
  # Delete Topics
280
295
 
281
296
  RD_KAFKA_ADMIN_OP_DELETETOPICS = 2 # rd_kafka_admin_op_t
282
297
  RD_KAFKA_EVENT_DELETETOPICS_RESULT = 101 # rd_kafka_event_type_t
283
298
 
284
- attach_function :rd_kafka_DeleteTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :int32
285
- attach_function :rd_kafka_DeleteTopic_new, [:pointer], :pointer
286
- attach_function :rd_kafka_DeleteTopic_destroy, [:pointer], :void
287
- attach_function :rd_kafka_event_DeleteTopics_result, [:pointer], :pointer
288
- attach_function :rd_kafka_DeleteTopics_result_topics, [:pointer, :pointer], :pointer
299
+ attach_function :rd_kafka_DeleteTopics, [:pointer, :pointer, :size_t, :pointer, :pointer], :int32, blocking: true
300
+ attach_function :rd_kafka_DeleteTopic_new, [:pointer], :pointer, blocking: true
301
+ attach_function :rd_kafka_DeleteTopic_destroy, [:pointer], :void, blocking: true
302
+ attach_function :rd_kafka_event_DeleteTopics_result, [:pointer], :pointer, blocking: true
303
+ attach_function :rd_kafka_DeleteTopics_result_topics, [:pointer, :pointer], :pointer, blocking: true
289
304
 
290
305
  # Create partitions
291
306
  RD_KAFKA_ADMIN_OP_CREATEPARTITIONS = 3
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  module Callbacks
3
5
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  require "logger"
2
4
 
3
5
  module Rdkafka
@@ -30,7 +32,6 @@ module Rdkafka
30
32
  @@logger
31
33
  end
32
34
 
33
-
34
35
  # Returns a queue whose contents will be passed to the configured logger. Each entry
35
36
  # should follow the format [Logger::Severity, String]. The benefit over calling the
36
37
  # logger directly is that this is safe to use from trap contexts.
@@ -47,7 +48,7 @@ module Rdkafka
47
48
  # @return [nil]
48
49
  def self.logger=(logger)
49
50
  raise NoLoggerError if logger.nil?
50
- @@logger=logger
51
+ @@logger = logger
51
52
  end
52
53
 
53
54
  # Set a callback that will be called every time the underlying client emits statistics.
@@ -156,13 +157,14 @@ module Rdkafka
156
157
  Rdkafka::Bindings.rd_kafka_conf_set_rebalance_cb(config, Rdkafka::Bindings::RebalanceCallback)
157
158
  end
158
159
 
160
+ # Create native client
159
161
  kafka = native_kafka(config, :rd_kafka_consumer)
160
162
 
161
163
  # Redirect the main queue to the consumer
162
164
  Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
163
165
 
164
166
  # Return consumer with Kafka client
165
- Rdkafka::Consumer.new(kafka)
167
+ Rdkafka::Consumer.new(Rdkafka::NativeKafka.new(kafka, run_polling_thread: false))
166
168
  end
167
169
 
168
170
  # Create a producer with this configuration.
@@ -179,7 +181,8 @@ module Rdkafka
179
181
  # Set callback to receive delivery reports on config
180
182
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
183
  # Return producer with Kafka client
182
- Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer)), self[:partitioner]).tap do |producer|
184
+ partitioner_name = self[:partitioner] || self["partitioner"]
185
+ Rdkafka::Producer.new(Rdkafka::NativeKafka.new(native_kafka(config, :rd_kafka_producer), run_polling_thread: true), partitioner_name).tap do |producer|
183
186
  opaque.producer = producer
184
187
  end
185
188
  end
@@ -194,7 +197,7 @@ module Rdkafka
194
197
  opaque = Opaque.new
195
198
  config = native_config(opaque)
196
199
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
197
- Rdkafka::Admin.new(native_kafka(config, :rd_kafka_producer))
200
+ Rdkafka::Admin.new(Rdkafka::NativeKafka.new(native_kafka(config, :rd_kafka_producer), run_polling_thread: true))
198
201
  end
199
202
 
200
203
  # Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
@@ -210,7 +213,7 @@ module Rdkafka
210
213
 
211
214
  # This method is only intended to be used to create a client,
212
215
  # using it in another way will leak memory.
213
- def native_config(opaque=nil)
216
+ def native_config(opaque = nil)
214
217
  Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
215
218
  # Create config
216
219
  @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
@@ -282,18 +285,18 @@ module Rdkafka
282
285
  producer.call_delivery_callback(delivery_report, delivery_handle) if producer
283
286
  end
284
287
 
285
- def call_on_partitions_assigned(consumer, list)
288
+ def call_on_partitions_assigned(list)
286
289
  return unless consumer_rebalance_listener
287
290
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_assigned)
288
291
 
289
- consumer_rebalance_listener.on_partitions_assigned(consumer, list)
292
+ consumer_rebalance_listener.on_partitions_assigned(list)
290
293
  end
291
294
 
292
- def call_on_partitions_revoked(consumer, list)
295
+ def call_on_partitions_revoked(list)
293
296
  return unless consumer_rebalance_listener
294
297
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoked)
295
298
 
296
- consumer_rebalance_listener.on_partitions_revoked(consumer, list)
299
+ consumer_rebalance_listener.on_partitions_revoked(list)
297
300
  end
298
301
  end
299
302
  end
@@ -1,10 +1,26 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
- # A message headers
4
- class Headers
5
- # Reads a native kafka's message header into ruby's hash
5
+ # Interface to return headers for a consumer message
6
+ module Headers
7
+ class HashWithSymbolKeysTreatedLikeStrings < Hash
8
+ def [](key)
9
+ if key.is_a?(Symbol)
10
+ Kernel.warn("rdkafka deprecation warning: header access with Symbol key #{key.inspect} treated as a String. " \
11
+ "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
12
+ super(key.to_s)
13
+ else
14
+ super
15
+ end
16
+ end
17
+ end
18
+
19
+ # Reads a librdkafka native message's headers and returns them as a Ruby Hash
20
+ #
21
+ # @param [librdkakfa message] native_message
6
22
  #
7
- # @return [Hash<String, String>] a message headers
23
+ # @return [Hash<String, String>] headers Hash for the native_message
8
24
  #
9
25
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
10
26
  #
@@ -24,7 +40,8 @@ module Rdkafka
24
40
  name_ptrptr = FFI::MemoryPointer.new(:pointer)
25
41
  value_ptrptr = FFI::MemoryPointer.new(:pointer)
26
42
  size_ptr = Rdkafka::Bindings::SizePtr.new
27
- headers = {}
43
+
44
+ headers = HashWithSymbolKeysTreatedLikeStrings.new
28
45
 
29
46
  idx = 0
30
47
  loop do
@@ -51,12 +68,12 @@ module Rdkafka
51
68
 
52
69
  value = value_ptr.read_string(size)
53
70
 
54
- headers[name.to_sym] = value
71
+ headers[name] = value
55
72
 
56
73
  idx += 1
57
74
  end
58
75
 
59
- headers
76
+ headers.freeze
60
77
  end
61
78
  end
62
79
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A message that was consumed from a topic.
@@ -18,7 +20,7 @@ module Rdkafka
18
20
  # @return [String, nil]
19
21
  attr_reader :key
20
22
 
21
- # This message's offset in it's partition
23
+ # This message's offset in its partition
22
24
  # @return [Integer]
23
25
  attr_reader :offset
24
26
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # Information about a partition, used in {TopicPartitionList}.
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A list of topics with their partition information
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  # A consumer of Kafka messages. It uses the high-level consumer approach where the Kafka
3
5
  # brokers automatically assign partitions and load balance partitions over consumers that
@@ -14,18 +16,33 @@ module Rdkafka
14
16
  # @private
15
17
  def initialize(native_kafka)
16
18
  @native_kafka = native_kafka
17
- @closing = false
19
+ end
20
+
21
+ # @return [String] consumer name
22
+ def name
23
+ @name ||= @native_kafka.with_inner do |inner|
24
+ ::Rdkafka::Bindings.rd_kafka_name(inner)
25
+ end
26
+ end
27
+
28
+ def finalizer
29
+ ->(_) { close }
18
30
  end
19
31
 
20
32
  # Close this consumer
21
33
  # @return [nil]
22
34
  def close
23
- return unless @native_kafka
35
+ return if closed?
36
+ ObjectSpace.undefine_finalizer(self)
37
+ @native_kafka.with_inner do |inner|
38
+ Rdkafka::Bindings.rd_kafka_consumer_close(inner)
39
+ end
40
+ @native_kafka.close
41
+ end
24
42
 
25
- @closing = true
26
- Rdkafka::Bindings.rd_kafka_consumer_close(@native_kafka)
27
- Rdkafka::Bindings.rd_kafka_destroy(@native_kafka)
28
- @native_kafka = nil
43
+ # Whether this consumer has closed
44
+ def closed?
45
+ @native_kafka.closed?
29
46
  end
30
47
 
31
48
  # Subscribe to one or more topics letting Kafka handle partition assignments.
@@ -46,7 +63,9 @@ module Rdkafka
46
63
  end
47
64
 
48
65
  # Subscribe to topic partition list and check this was successful
49
- response = Rdkafka::Bindings.rd_kafka_subscribe(@native_kafka, tpl)
66
+ response = @native_kafka.with_inner do |inner|
67
+ Rdkafka::Bindings.rd_kafka_subscribe(inner, tpl)
68
+ end
50
69
  if response != 0
51
70
  raise Rdkafka::RdkafkaError.new(response, "Error subscribing to '#{topics.join(', ')}'")
52
71
  end
@@ -62,7 +81,9 @@ module Rdkafka
62
81
  def unsubscribe
63
82
  closed_consumer_check(__method__)
64
83
 
65
- response = Rdkafka::Bindings.rd_kafka_unsubscribe(@native_kafka)
84
+ response = @native_kafka.with_inner do |inner|
85
+ Rdkafka::Bindings.rd_kafka_unsubscribe(inner)
86
+ end
66
87
  if response != 0
67
88
  raise Rdkafka::RdkafkaError.new(response)
68
89
  end
@@ -85,7 +106,9 @@ module Rdkafka
85
106
  tpl = list.to_native_tpl
86
107
 
87
108
  begin
88
- response = Rdkafka::Bindings.rd_kafka_pause_partitions(@native_kafka, tpl)
109
+ response = @native_kafka.with_inner do |inner|
110
+ Rdkafka::Bindings.rd_kafka_pause_partitions(inner, tpl)
111
+ end
89
112
 
90
113
  if response != 0
91
114
  list = TopicPartitionList.from_native_tpl(tpl)
@@ -113,7 +136,9 @@ module Rdkafka
113
136
  tpl = list.to_native_tpl
114
137
 
115
138
  begin
116
- response = Rdkafka::Bindings.rd_kafka_resume_partitions(@native_kafka, tpl)
139
+ response = @native_kafka.with_inner do |inner|
140
+ Rdkafka::Bindings.rd_kafka_resume_partitions(inner, tpl)
141
+ end
117
142
  if response != 0
118
143
  raise Rdkafka::RdkafkaError.new(response, "Error resume '#{list.to_h}'")
119
144
  end
@@ -131,7 +156,9 @@ module Rdkafka
131
156
  closed_consumer_check(__method__)
132
157
 
133
158
  ptr = FFI::MemoryPointer.new(:pointer)
134
- response = Rdkafka::Bindings.rd_kafka_subscription(@native_kafka, ptr)
159
+ response = @native_kafka.with_inner do |inner|
160
+ Rdkafka::Bindings.rd_kafka_subscription(inner, ptr)
161
+ end
135
162
 
136
163
  if response != 0
137
164
  raise Rdkafka::RdkafkaError.new(response)
@@ -161,7 +188,9 @@ module Rdkafka
161
188
  tpl = list.to_native_tpl
162
189
 
163
190
  begin
164
- response = Rdkafka::Bindings.rd_kafka_assign(@native_kafka, tpl)
191
+ response = @native_kafka.with_inner do |inner|
192
+ Rdkafka::Bindings.rd_kafka_assign(inner, tpl)
193
+ end
165
194
  if response != 0
166
195
  raise Rdkafka::RdkafkaError.new(response, "Error assigning '#{list.to_h}'")
167
196
  end
@@ -179,7 +208,9 @@ module Rdkafka
179
208
  closed_consumer_check(__method__)
180
209
 
181
210
  ptr = FFI::MemoryPointer.new(:pointer)
182
- response = Rdkafka::Bindings.rd_kafka_assignment(@native_kafka, ptr)
211
+ response = @native_kafka.with_inner do |inner|
212
+ Rdkafka::Bindings.rd_kafka_assignment(inner, ptr)
213
+ end
183
214
  if response != 0
184
215
  raise Rdkafka::RdkafkaError.new(response)
185
216
  end
@@ -197,6 +228,15 @@ module Rdkafka
197
228
  ptr.free unless ptr.nil?
198
229
  end
199
230
 
231
+ # @return [Boolean] true if our current assignment has been lost involuntarily.
232
+ def assignment_lost?
233
+ closed_consumer_check(__method__)
234
+
235
+ @native_kafka.with_inner do |inner|
236
+ !Rdkafka::Bindings.rd_kafka_assignment_lost(inner).zero?
237
+ end
238
+ end
239
+
200
240
  # Return the current committed offset per partition for this consumer group.
201
241
  # The offset field of each requested partition will either be set to stored offset or to -1001 in case there was no stored offset for that partition.
202
242
  #
@@ -218,7 +258,9 @@ module Rdkafka
218
258
  tpl = list.to_native_tpl
219
259
 
220
260
  begin
221
- response = Rdkafka::Bindings.rd_kafka_committed(@native_kafka, tpl, timeout_ms)
261
+ response = @native_kafka.with_inner do |inner|
262
+ Rdkafka::Bindings.rd_kafka_committed(inner, tpl, timeout_ms)
263
+ end
222
264
  if response != 0
223
265
  raise Rdkafka::RdkafkaError.new(response)
224
266
  end
@@ -243,14 +285,16 @@ module Rdkafka
243
285
  low = FFI::MemoryPointer.new(:int64, 1)
244
286
  high = FFI::MemoryPointer.new(:int64, 1)
245
287
 
246
- response = Rdkafka::Bindings.rd_kafka_query_watermark_offsets(
247
- @native_kafka,
248
- topic,
249
- partition,
250
- low,
251
- high,
252
- timeout_ms,
253
- )
288
+ response = @native_kafka.with_inner do |inner|
289
+ Rdkafka::Bindings.rd_kafka_query_watermark_offsets(
290
+ inner,
291
+ topic,
292
+ partition,
293
+ low,
294
+ high,
295
+ timeout_ms,
296
+ )
297
+ end
254
298
  if response != 0
255
299
  raise Rdkafka::RdkafkaError.new(response, "Error querying watermark offsets for partition #{partition} of #{topic}")
256
300
  end
@@ -298,7 +342,9 @@ module Rdkafka
298
342
  # @return [String, nil]
299
343
  def cluster_id
300
344
  closed_consumer_check(__method__)
301
- Rdkafka::Bindings.rd_kafka_clusterid(@native_kafka)
345
+ @native_kafka.with_inner do |inner|
346
+ Rdkafka::Bindings.rd_kafka_clusterid(inner)
347
+ end
302
348
  end
303
349
 
304
350
  # Returns this client's broker-assigned group member id
@@ -308,7 +354,9 @@ module Rdkafka
308
354
  # @return [String, nil]
309
355
  def member_id
310
356
  closed_consumer_check(__method__)
311
- Rdkafka::Bindings.rd_kafka_memberid(@native_kafka)
357
+ @native_kafka.with_inner do |inner|
358
+ Rdkafka::Bindings.rd_kafka_memberid(inner)
359
+ end
312
360
  end
313
361
 
314
362
  # Store offset of a message to be used in the next commit of this consumer
@@ -325,11 +373,13 @@ module Rdkafka
325
373
 
326
374
  # rd_kafka_offset_store is one of the few calls that does not support
327
375
  # a string as the topic, so create a native topic for it.
328
- native_topic = Rdkafka::Bindings.rd_kafka_topic_new(
329
- @native_kafka,
330
- message.topic,
331
- nil
332
- )
376
+ native_topic = @native_kafka.with_inner do |inner|
377
+ Rdkafka::Bindings.rd_kafka_topic_new(
378
+ inner,
379
+ message.topic,
380
+ nil
381
+ )
382
+ end
333
383
  response = Rdkafka::Bindings.rd_kafka_offset_store(
334
384
  native_topic,
335
385
  message.partition,
@@ -357,11 +407,13 @@ module Rdkafka
357
407
 
358
408
  # rd_kafka_offset_store is one of the few calls that does not support
359
409
  # a string as the topic, so create a native topic for it.
360
- native_topic = Rdkafka::Bindings.rd_kafka_topic_new(
361
- @native_kafka,
362
- message.topic,
363
- nil
364
- )
410
+ native_topic = @native_kafka.with_inner do |inner|
411
+ Rdkafka::Bindings.rd_kafka_topic_new(
412
+ inner,
413
+ message.topic,
414
+ nil
415
+ )
416
+ end
365
417
  response = Rdkafka::Bindings.rd_kafka_seek(
366
418
  native_topic,
367
419
  message.partition,
@@ -402,7 +454,9 @@ module Rdkafka
402
454
  tpl = list ? list.to_native_tpl : nil
403
455
 
404
456
  begin
405
- response = Rdkafka::Bindings.rd_kafka_commit(@native_kafka, tpl, async)
457
+ response = @native_kafka.with_inner do |inner|
458
+ Rdkafka::Bindings.rd_kafka_commit(inner, tpl, async)
459
+ end
406
460
  if response != 0
407
461
  raise Rdkafka::RdkafkaError.new(response)
408
462
  end
@@ -421,7 +475,9 @@ module Rdkafka
421
475
  def poll(timeout_ms)
422
476
  closed_consumer_check(__method__)
423
477
 
424
- message_ptr = Rdkafka::Bindings.rd_kafka_consumer_poll(@native_kafka, timeout_ms)
478
+ message_ptr = @native_kafka.with_inner do |inner|
479
+ Rdkafka::Bindings.rd_kafka_consumer_poll(inner, timeout_ms)
480
+ end
425
481
  if message_ptr.null?
426
482
  nil
427
483
  else
@@ -436,7 +492,7 @@ module Rdkafka
436
492
  end
437
493
  ensure
438
494
  # Clean up rdkafka message if there is one
439
- if !message_ptr.nil? && !message_ptr.null?
495
+ if message_ptr && !message_ptr.null?
440
496
  Rdkafka::Bindings.rd_kafka_message_destroy(message_ptr)
441
497
  end
442
498
  end
@@ -459,7 +515,7 @@ module Rdkafka
459
515
  if message
460
516
  yield(message)
461
517
  else
462
- if @closing
518
+ if closed?
463
519
  break
464
520
  else
465
521
  next
@@ -468,10 +524,6 @@ module Rdkafka
468
524
  end
469
525
  end
470
526
 
471
- def closed_consumer_check(method)
472
- raise Rdkafka::ClosedConsumerError.new(method) if @native_kafka.nil?
473
- end
474
-
475
527
  # Poll for new messages and yield them in batches that may contain
476
528
  # messages from more than one partition.
477
529
  #
@@ -527,7 +579,7 @@ module Rdkafka
527
579
  bytes = 0
528
580
  end_time = monotonic_now + timeout_ms / 1000.0
529
581
  loop do
530
- break if @closing
582
+ break if closed?
531
583
  max_wait = end_time - monotonic_now
532
584
  max_wait_ms = if max_wait <= 0
533
585
  0 # should not block, but may retrieve a message
@@ -545,7 +597,7 @@ module Rdkafka
545
597
  end
546
598
  if message
547
599
  slice << message
548
- bytes += message.payload.bytesize
600
+ bytes += message.payload.bytesize if message.payload
549
601
  end
550
602
  if slice.size == max_items || bytes >= bytes_threshold || monotonic_now >= end_time - 0.001
551
603
  yield slice.dup, nil
@@ -561,5 +613,9 @@ module Rdkafka
561
613
  # needed because Time.now can go backwards
562
614
  Process.clock_gettime(Process::CLOCK_MONOTONIC)
563
615
  end
616
+
617
+ def closed_consumer_check(method)
618
+ raise Rdkafka::ClosedConsumerError.new(method) if closed?
619
+ end
564
620
  end
565
621
  end