rdkafka 0.12.0 → 0.15.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci.yml +57 -0
  5. data/.gitignore +4 -0
  6. data/.rspec +1 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +155 -93
  10. data/Gemfile +2 -0
  11. data/{LICENSE → MIT-LICENSE} +2 -1
  12. data/README.md +76 -29
  13. data/Rakefile +2 -0
  14. data/certs/cert_chain.pem +26 -0
  15. data/docker-compose.yml +18 -15
  16. data/ext/README.md +1 -1
  17. data/ext/Rakefile +46 -27
  18. data/lib/rdkafka/abstract_handle.rb +41 -25
  19. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  20. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  21. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  22. data/lib/rdkafka/admin/create_partitions_handle.rb +27 -0
  23. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  24. data/lib/rdkafka/admin/create_topic_handle.rb +2 -0
  25. data/lib/rdkafka/admin/create_topic_report.rb +2 -0
  26. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  27. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  28. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  29. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  30. data/lib/rdkafka/admin/delete_topic_handle.rb +2 -0
  31. data/lib/rdkafka/admin/delete_topic_report.rb +2 -0
  32. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  33. data/lib/rdkafka/admin/describe_acl_report.rb +23 -0
  34. data/lib/rdkafka/admin.rb +494 -35
  35. data/lib/rdkafka/bindings.rb +180 -41
  36. data/lib/rdkafka/callbacks.rb +202 -1
  37. data/lib/rdkafka/config.rb +62 -25
  38. data/lib/rdkafka/consumer/headers.rb +24 -9
  39. data/lib/rdkafka/consumer/message.rb +3 -1
  40. data/lib/rdkafka/consumer/partition.rb +2 -0
  41. data/lib/rdkafka/consumer/topic_partition_list.rb +13 -8
  42. data/lib/rdkafka/consumer.rb +243 -111
  43. data/lib/rdkafka/error.rb +15 -0
  44. data/lib/rdkafka/helpers/time.rb +14 -0
  45. data/lib/rdkafka/metadata.rb +25 -2
  46. data/lib/rdkafka/native_kafka.rb +120 -0
  47. data/lib/rdkafka/producer/delivery_handle.rb +16 -2
  48. data/lib/rdkafka/producer/delivery_report.rb +22 -2
  49. data/lib/rdkafka/producer.rb +151 -21
  50. data/lib/rdkafka/version.rb +5 -3
  51. data/lib/rdkafka.rb +24 -2
  52. data/rdkafka.gemspec +21 -5
  53. data/renovate.json +6 -0
  54. data/spec/rdkafka/abstract_handle_spec.rb +1 -1
  55. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  56. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  57. data/spec/rdkafka/admin/create_topic_handle_spec.rb +1 -1
  58. data/spec/rdkafka/admin/create_topic_report_spec.rb +1 -1
  59. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  60. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  61. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +1 -1
  62. data/spec/rdkafka/admin/delete_topic_report_spec.rb +1 -1
  63. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  64. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  65. data/spec/rdkafka/admin_spec.rb +209 -5
  66. data/spec/rdkafka/bindings_spec.rb +2 -1
  67. data/spec/rdkafka/callbacks_spec.rb +1 -1
  68. data/spec/rdkafka/config_spec.rb +24 -3
  69. data/spec/rdkafka/consumer/headers_spec.rb +60 -0
  70. data/spec/rdkafka/consumer/message_spec.rb +1 -1
  71. data/spec/rdkafka/consumer/partition_spec.rb +1 -1
  72. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +20 -1
  73. data/spec/rdkafka/consumer_spec.rb +352 -61
  74. data/spec/rdkafka/error_spec.rb +1 -1
  75. data/spec/rdkafka/metadata_spec.rb +4 -3
  76. data/spec/rdkafka/{producer/client_spec.rb → native_kafka_spec.rb} +13 -35
  77. data/spec/rdkafka/producer/delivery_handle_spec.rb +4 -1
  78. data/spec/rdkafka/producer/delivery_report_spec.rb +11 -3
  79. data/spec/rdkafka/producer_spec.rb +234 -22
  80. data/spec/spec_helper.rb +20 -2
  81. data.tar.gz.sig +0 -0
  82. metadata +81 -17
  83. metadata.gz.sig +0 -0
  84. data/.semaphore/semaphore.yml +0 -23
  85. data/bin/console +0 -11
  86. data/lib/rdkafka/producer/client.rb +0 -47
@@ -1,9 +1,9 @@
1
- require "logger"
1
+ # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
4
  # Configuration for a Kafka consumer or producer. You can create an instance and use
5
5
  # the consumer and producer methods to create a client. Documentation of the available
6
- # configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
6
+ # configuration options is available on https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md.
7
7
  class Config
8
8
  # @private
9
9
  @@logger = Logger.new(STDOUT)
@@ -12,7 +12,7 @@ module Rdkafka
12
12
  # @private
13
13
  @@error_callback = nil
14
14
  # @private
15
- @@opaques = {}
15
+ @@opaques = ObjectSpace::WeakMap.new
16
16
  # @private
17
17
  @@log_queue = Queue.new
18
18
 
@@ -30,7 +30,6 @@ module Rdkafka
30
30
  @@logger
31
31
  end
32
32
 
33
-
34
33
  # Returns a queue whose contents will be passed to the configured logger. Each entry
35
34
  # should follow the format [Logger::Severity, String]. The benefit over calling the
36
35
  # logger directly is that this is safe to use from trap contexts.
@@ -47,18 +46,18 @@ module Rdkafka
47
46
  # @return [nil]
48
47
  def self.logger=(logger)
49
48
  raise NoLoggerError if logger.nil?
50
- @@logger=logger
49
+ @@logger = logger
51
50
  end
52
51
 
53
52
  # Set a callback that will be called every time the underlying client emits statistics.
54
53
  # You can configure if and how often this happens using `statistics.interval.ms`.
55
- # The callback is called with a hash that's documented here: https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md
54
+ # The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
56
55
  #
57
56
  # @param callback [Proc, #call] The callback
58
57
  #
59
58
  # @return [nil]
60
59
  def self.statistics_callback=(callback)
61
- raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
60
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
62
61
  @@statistics_callback = callback
63
62
  end
64
63
 
@@ -113,6 +112,7 @@ module Rdkafka
113
112
  def initialize(config_hash = {})
114
113
  @config_hash = DEFAULT_CONFIG.merge(config_hash)
115
114
  @consumer_rebalance_listener = nil
115
+ @consumer_poll_set = true
116
116
  end
117
117
 
118
118
  # Set a config option.
@@ -141,12 +141,28 @@ module Rdkafka
141
141
  @consumer_rebalance_listener = listener
142
142
  end
143
143
 
144
- # Create a consumer with this configuration.
144
+ # Should we use a single queue for the underlying consumer and events.
145
145
  #
146
- # @raise [ConfigError] When the configuration contains invalid options
147
- # @raise [ClientCreationError] When the native client cannot be created
146
+ # This is an advanced API that allows for more granular control of the polling process.
147
+ # When this value is set to `false` (`true` by defualt), there will be two queues that need to
148
+ # be polled:
149
+ # - main librdkafka queue for events
150
+ # - consumer queue with messages and rebalances
151
+ #
152
+ # It is recommended to use the defaults and only set it to `false` in advance multi-threaded
153
+ # and complex cases where granular events handling control is needed.
154
+ #
155
+ # @param poll_set [Boolean]
156
+ def consumer_poll_set=(poll_set)
157
+ @consumer_poll_set = poll_set
158
+ end
159
+
160
+ # Creates a consumer with this configuration.
148
161
  #
149
162
  # @return [Consumer] The created consumer
163
+ #
164
+ # @raise [ConfigError] When the configuration contains invalid options
165
+ # @raise [ClientCreationError] When the native client cannot be created
150
166
  def consumer
151
167
  opaque = Opaque.new
152
168
  config = native_config(opaque)
@@ -156,21 +172,28 @@ module Rdkafka
156
172
  Rdkafka::Bindings.rd_kafka_conf_set_rebalance_cb(config, Rdkafka::Bindings::RebalanceCallback)
157
173
  end
158
174
 
175
+ # Create native client
159
176
  kafka = native_kafka(config, :rd_kafka_consumer)
160
177
 
161
- # Redirect the main queue to the consumer
162
- Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
178
+ # Redirect the main queue to the consumer queue
179
+ Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka) if @consumer_poll_set
163
180
 
164
181
  # Return consumer with Kafka client
165
- Rdkafka::Consumer.new(kafka)
182
+ Rdkafka::Consumer.new(
183
+ Rdkafka::NativeKafka.new(
184
+ kafka,
185
+ run_polling_thread: false,
186
+ opaque: opaque
187
+ )
188
+ )
166
189
  end
167
190
 
168
191
  # Create a producer with this configuration.
169
192
  #
193
+ # @return [Producer] The created producer
194
+ #
170
195
  # @raise [ConfigError] When the configuration contains invalid options
171
196
  # @raise [ClientCreationError] When the native client cannot be created
172
- #
173
- # @return [Producer] The created producer
174
197
  def producer
175
198
  # Create opaque
176
199
  opaque = Opaque.new
@@ -179,22 +202,36 @@ module Rdkafka
179
202
  # Set callback to receive delivery reports on config
180
203
  Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
204
  # Return producer with Kafka client
182
- Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer)), self[:partitioner]).tap do |producer|
205
+ partitioner_name = self[:partitioner] || self["partitioner"]
206
+ Rdkafka::Producer.new(
207
+ Rdkafka::NativeKafka.new(
208
+ native_kafka(config, :rd_kafka_producer),
209
+ run_polling_thread: true,
210
+ opaque: opaque
211
+ ),
212
+ partitioner_name
213
+ ).tap do |producer|
183
214
  opaque.producer = producer
184
215
  end
185
216
  end
186
217
 
187
- # Create an admin instance with this configuration.
218
+ # Creates an admin instance with this configuration.
219
+ #
220
+ # @return [Admin] The created admin instance
188
221
  #
189
222
  # @raise [ConfigError] When the configuration contains invalid options
190
223
  # @raise [ClientCreationError] When the native client cannot be created
191
- #
192
- # @return [Admin] The created admin instance
193
224
  def admin
194
225
  opaque = Opaque.new
195
226
  config = native_config(opaque)
196
227
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
197
- Rdkafka::Admin.new(native_kafka(config, :rd_kafka_producer))
228
+ Rdkafka::Admin.new(
229
+ Rdkafka::NativeKafka.new(
230
+ native_kafka(config, :rd_kafka_producer),
231
+ run_polling_thread: true,
232
+ opaque: opaque
233
+ )
234
+ )
198
235
  end
199
236
 
200
237
  # Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
@@ -210,7 +247,7 @@ module Rdkafka
210
247
 
211
248
  # This method is only intended to be used to create a client,
212
249
  # using it in another way will leak memory.
213
- def native_config(opaque=nil)
250
+ def native_config(opaque = nil)
214
251
  Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
215
252
  # Create config
216
253
  @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
@@ -282,18 +319,18 @@ module Rdkafka
282
319
  producer.call_delivery_callback(delivery_report, delivery_handle) if producer
283
320
  end
284
321
 
285
- def call_on_partitions_assigned(consumer, list)
322
+ def call_on_partitions_assigned(list)
286
323
  return unless consumer_rebalance_listener
287
324
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_assigned)
288
325
 
289
- consumer_rebalance_listener.on_partitions_assigned(consumer, list)
326
+ consumer_rebalance_listener.on_partitions_assigned(list)
290
327
  end
291
328
 
292
- def call_on_partitions_revoked(consumer, list)
329
+ def call_on_partitions_revoked(list)
293
330
  return unless consumer_rebalance_listener
294
331
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoked)
295
332
 
296
- consumer_rebalance_listener.on_partitions_revoked(consumer, list)
333
+ consumer_rebalance_listener.on_partitions_revoked(list)
297
334
  end
298
335
  end
299
336
  end
@@ -1,14 +1,28 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
- # A message headers
4
- class Headers
5
- # Reads a native kafka's message header into ruby's hash
5
+ # Interface to return headers for a consumer message
6
+ module Headers
7
+ class HashWithSymbolKeysTreatedLikeStrings < Hash
8
+ def [](key)
9
+ if key.is_a?(Symbol)
10
+ Kernel.warn("rdkafka deprecation warning: header access with Symbol key #{key.inspect} treated as a String. " \
11
+ "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
12
+ super(key.to_s)
13
+ else
14
+ super
15
+ end
16
+ end
17
+ end
18
+
19
+ # Reads a librdkafka native message's headers and returns them as a Ruby Hash
6
20
  #
7
- # @return [Hash<String, String>] a message headers
21
+ # @private
8
22
  #
23
+ # @param [Rdkafka::Bindings::Message] native_message
24
+ # @return [Hash<String, String>] headers Hash for the native_message
9
25
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
10
- #
11
- # @private
12
26
  def self.from_native(native_message)
13
27
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
14
28
  err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
@@ -24,7 +38,8 @@ module Rdkafka
24
38
  name_ptrptr = FFI::MemoryPointer.new(:pointer)
25
39
  value_ptrptr = FFI::MemoryPointer.new(:pointer)
26
40
  size_ptr = Rdkafka::Bindings::SizePtr.new
27
- headers = {}
41
+
42
+ headers = HashWithSymbolKeysTreatedLikeStrings.new
28
43
 
29
44
  idx = 0
30
45
  loop do
@@ -51,12 +66,12 @@ module Rdkafka
51
66
 
52
67
  value = value_ptr.read_string(size)
53
68
 
54
- headers[name.to_sym] = value
69
+ headers[name] = value
55
70
 
56
71
  idx += 1
57
72
  end
58
73
 
59
- headers
74
+ headers.freeze
60
75
  end
61
76
  end
62
77
  end
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A message that was consumed from a topic.
@@ -18,7 +20,7 @@ module Rdkafka
18
20
  # @return [String, nil]
19
21
  attr_reader :key
20
22
 
21
- # This message's offset in it's partition
23
+ # This message's offset in its partition
22
24
  # @return [Integer]
23
25
  attr_reader :offset
24
26
 
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # Information about a partition, used in {TopicPartitionList}.
@@ -1,3 +1,5 @@
1
+ # frozen_string_literal: true
2
+
1
3
  module Rdkafka
2
4
  class Consumer
3
5
  # A list of topics with their partition information
@@ -34,6 +36,11 @@ module Rdkafka
34
36
  # Add a topic with optionally partitions to the list.
35
37
  # Calling this method multiple times for the same topic will overwrite the previous configuraton.
36
38
  #
39
+ # @param topic [String] The topic's name
40
+ # @param partitions [Array<Integer>, Range<Integer>, Integer] The topic's partitions or partition count
41
+ #
42
+ # @return [nil]
43
+ #
37
44
  # @example Add a topic with unassigned partitions
38
45
  # tpl.add_topic("topic")
39
46
  #
@@ -43,10 +50,6 @@ module Rdkafka
43
50
  # @example Add a topic with all topics up to a count
44
51
  # tpl.add_topic("topic", 9)
45
52
  #
46
- # @param topic [String] The topic's name
47
- # @param partitions [Array<Integer>, Range<Integer>, Integer] The topic's partitions or partition count
48
- #
49
- # @return [nil]
50
53
  def add_topic(topic, partitions=nil)
51
54
  if partitions.nil?
52
55
  @data[topic.to_s] = nil
@@ -88,11 +91,11 @@ module Rdkafka
88
91
 
89
92
  # Create a new topic partition list based of a native one.
90
93
  #
94
+ # @private
95
+ #
91
96
  # @param pointer [FFI::Pointer] Optional pointer to an existing native list. Its contents will be copied.
92
97
  #
93
98
  # @return [TopicPartitionList]
94
- #
95
- # @private
96
99
  def self.from_native_tpl(pointer)
97
100
  # Data to be moved into the tpl
98
101
  data = {}
@@ -125,8 +128,8 @@ module Rdkafka
125
128
  #
126
129
  # The pointer will be cleaned by `rd_kafka_topic_partition_list_destroy` when GC releases it.
127
130
  #
128
- # @return [FFI::Pointer]
129
131
  # @private
132
+ # @return [FFI::Pointer]
130
133
  def to_native_tpl
131
134
  tpl = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(count)
132
135
 
@@ -140,11 +143,13 @@ module Rdkafka
140
143
  )
141
144
 
142
145
  if p.offset
146
+ offset = p.offset.is_a?(Time) ? p.offset.to_f * 1_000 : p.offset
147
+
143
148
  Rdkafka::Bindings.rd_kafka_topic_partition_list_set_offset(
144
149
  tpl,
145
150
  topic,
146
151
  p.partition,
147
- p.offset
152
+ offset
148
153
  )
149
154
  end
150
155
  end