karafka-rdkafka 0.23.1.rc2-aarch64-linux-gnu → 0.24.0.rc1-aarch64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +11 -1
  3. data/Gemfile +9 -0
  4. data/docker-compose-ssl.yml +1 -1
  5. data/docker-compose.yml +1 -1
  6. data/ext/librdkafka.so +0 -0
  7. data/karafka-rdkafka.gemspec +0 -7
  8. data/lib/rdkafka/abstract_handle.rb +23 -5
  9. data/lib/rdkafka/admin/acl_binding_result.rb +1 -1
  10. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  11. data/lib/rdkafka/admin/create_acl_handle.rb +3 -0
  12. data/lib/rdkafka/admin/create_acl_report.rb +3 -0
  13. data/lib/rdkafka/admin/create_partitions_handle.rb +3 -0
  14. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  15. data/lib/rdkafka/admin/create_topic_handle.rb +3 -0
  16. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  17. data/lib/rdkafka/admin/delete_acl_handle.rb +3 -0
  18. data/lib/rdkafka/admin/delete_acl_report.rb +3 -0
  19. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -0
  20. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  21. data/lib/rdkafka/admin/delete_topic_handle.rb +3 -0
  22. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  23. data/lib/rdkafka/admin/describe_acl_handle.rb +3 -0
  24. data/lib/rdkafka/admin/describe_acl_report.rb +3 -0
  25. data/lib/rdkafka/admin/describe_configs_handle.rb +3 -0
  26. data/lib/rdkafka/admin/describe_configs_report.rb +6 -0
  27. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +3 -0
  28. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +6 -0
  29. data/lib/rdkafka/admin.rb +99 -103
  30. data/lib/rdkafka/bindings.rb +23 -25
  31. data/lib/rdkafka/callbacks.rb +62 -2
  32. data/lib/rdkafka/config.rb +20 -8
  33. data/lib/rdkafka/consumer/headers.rb +3 -2
  34. data/lib/rdkafka/consumer/message.rb +7 -3
  35. data/lib/rdkafka/consumer/partition.rb +6 -1
  36. data/lib/rdkafka/consumer/topic_partition_list.rb +5 -5
  37. data/lib/rdkafka/consumer.rb +24 -12
  38. data/lib/rdkafka/defaults.rb +84 -0
  39. data/lib/rdkafka/error.rb +46 -1
  40. data/lib/rdkafka/helpers/oauth.rb +11 -5
  41. data/lib/rdkafka/metadata.rb +29 -5
  42. data/lib/rdkafka/native_kafka.rb +26 -2
  43. data/lib/rdkafka/producer/delivery_report.rb +6 -2
  44. data/lib/rdkafka/producer/partitions_count_cache.rb +24 -14
  45. data/lib/rdkafka/producer/testing.rb +3 -3
  46. data/lib/rdkafka/producer.rb +60 -16
  47. data/lib/rdkafka/version.rb +6 -3
  48. data/lib/rdkafka.rb +2 -0
  49. data/renovate.json +1 -8
  50. metadata +3 -86
@@ -1,6 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
+ # Callback handlers for librdkafka events
5
+ # @private
4
6
  module Callbacks
5
7
  # Extracts attributes of a rd_kafka_topic_result_t
6
8
  #
@@ -8,12 +10,16 @@ module Rdkafka
8
10
  class TopicResult
9
11
  attr_reader :result_error, :error_string, :result_name
10
12
 
13
+ # @param topic_result_pointer [FFI::Pointer] pointer to the topic result struct
11
14
  def initialize(topic_result_pointer)
12
15
  @result_error = Rdkafka::Bindings.rd_kafka_topic_result_error(topic_result_pointer)
13
16
  @error_string = Rdkafka::Bindings.rd_kafka_topic_result_error_string(topic_result_pointer)
14
17
  @result_name = Rdkafka::Bindings.rd_kafka_topic_result_name(topic_result_pointer)
15
18
  end
16
19
 
20
+ # @param count [Integer] number of results
21
+ # @param array_pointer [FFI::Pointer] pointer to the results array
22
+ # @return [Array<TopicResult>] array of topic results
17
23
  def self.create_topic_results_from_array(count, array_pointer)
18
24
  (1..count).map do |index|
19
25
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -22,8 +28,13 @@ module Rdkafka
22
28
  end
23
29
  end
24
30
 
31
+ # Extracts attributes of rd_kafka_group_result_t
32
+ #
33
+ # @private
25
34
  class GroupResult
26
35
  attr_reader :result_error, :error_string, :result_name
36
+
37
+ # @param group_result_pointer [FFI::Pointer] pointer to the group result struct
27
38
  def initialize(group_result_pointer)
28
39
  native_error = Rdkafka::Bindings.rd_kafka_group_result_error(group_result_pointer)
29
40
 
@@ -37,6 +48,10 @@ module Rdkafka
37
48
 
38
49
  @result_name = Rdkafka::Bindings.rd_kafka_group_result_name(group_result_pointer)
39
50
  end
51
+
52
+ # @param count [Integer] number of results
53
+ # @param array_pointer [FFI::Pointer] pointer to the results array
54
+ # @return [Array<GroupResult>] array of group results
40
55
  def self.create_group_results_from_array(count, array_pointer)
41
56
  (1..count).map do |index|
42
57
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -51,12 +66,16 @@ module Rdkafka
51
66
  class CreateAclResult
52
67
  attr_reader :result_error, :error_string
53
68
 
69
+ # @param acl_result_pointer [FFI::Pointer] pointer to the ACL result struct
54
70
  def initialize(acl_result_pointer)
55
71
  rd_kafka_error_pointer = Bindings.rd_kafka_acl_result_error(acl_result_pointer)
56
72
  @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
57
73
  @error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
58
74
  end
59
75
 
76
+ # @param count [Integer] number of results
77
+ # @param array_pointer [FFI::Pointer] pointer to the results array
78
+ # @return [Array<CreateAclResult>] array of ACL results
60
79
  def self.create_acl_results_from_array(count, array_pointer)
61
80
  (1..count).map do |index|
62
81
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -71,6 +90,7 @@ module Rdkafka
71
90
  class DeleteAclResult
72
91
  attr_reader :result_error, :error_string, :matching_acls, :matching_acls_count
73
92
 
93
+ # @param acl_result_pointer [FFI::Pointer] pointer to the delete ACL result response struct
74
94
  def initialize(acl_result_pointer)
75
95
  @matching_acls=[]
76
96
  rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_DeleteAcls_result_response_error(acl_result_pointer)
@@ -84,6 +104,9 @@ module Rdkafka
84
104
  end
85
105
  end
86
106
 
107
+ # @param count [Integer] number of results
108
+ # @param array_pointer [FFI::Pointer] pointer to the results array
109
+ # @return [Array<DeleteAclResult>] array of delete ACL results
87
110
  def self.delete_acl_results_from_array(count, array_pointer)
88
111
  (1..count).map do |index|
89
112
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -98,6 +121,7 @@ module Rdkafka
98
121
  class DescribeAclResult
99
122
  attr_reader :result_error, :error_string, :matching_acls, :matching_acls_count
100
123
 
124
+ # @param event_ptr [FFI::Pointer] pointer to the event
101
125
  def initialize(event_ptr)
102
126
  @matching_acls=[]
103
127
  @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
@@ -112,9 +136,13 @@ module Rdkafka
112
136
  end
113
137
  end
114
138
 
139
+ # Extracts attributes of rd_kafka_DescribeConfigs_result_t
140
+ #
141
+ # @private
115
142
  class DescribeConfigsResult
116
143
  attr_reader :result_error, :error_string, :results, :results_count
117
144
 
145
+ # @param event_ptr [FFI::Pointer] pointer to the event
118
146
  def initialize(event_ptr)
119
147
  @results=[]
120
148
  @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
@@ -130,9 +158,13 @@ module Rdkafka
130
158
  end
131
159
  end
132
160
 
161
+ # Extracts attributes of rd_kafka_IncrementalAlterConfigs_result_t
162
+ #
163
+ # @private
133
164
  class IncrementalAlterConfigsResult
134
165
  attr_reader :result_error, :error_string, :results, :results_count
135
166
 
167
+ # @param event_ptr [FFI::Pointer] pointer to the event
136
168
  def initialize(event_ptr)
137
169
  @results=[]
138
170
  @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
@@ -150,7 +182,11 @@ module Rdkafka
150
182
 
151
183
  # @private
152
184
  class BackgroundEventCallback
153
- def self.call(_, event_ptr, _)
185
+ # Handles background events from librdkafka
186
+ # @param _client_ptr [FFI::Pointer] unused client pointer
187
+ # @param event_ptr [FFI::Pointer] pointer to the event
188
+ # @param _opaque_ptr [FFI::Pointer] unused opaque pointer
189
+ def self.call(_client_ptr, event_ptr, _opaque_ptr)
154
190
  case Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
155
191
  when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
156
192
  process_create_topic(event_ptr)
@@ -175,6 +211,8 @@ module Rdkafka
175
211
 
176
212
  private
177
213
 
214
+ # Processes create topic result event
215
+ # @param event_ptr [FFI::Pointer] pointer to the event
178
216
  def self.process_create_topic(event_ptr)
179
217
  create_topics_result = Rdkafka::Bindings.rd_kafka_event_CreateTopics_result(event_ptr)
180
218
 
@@ -193,6 +231,8 @@ module Rdkafka
193
231
  end
194
232
  end
195
233
 
234
+ # Processes describe configs result event
235
+ # @param event_ptr [FFI::Pointer] pointer to the event
196
236
  def self.process_describe_configs(event_ptr)
197
237
  describe_configs = DescribeConfigsResult.new(event_ptr)
198
238
  describe_configs_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
@@ -211,6 +251,8 @@ module Rdkafka
211
251
  end
212
252
  end
213
253
 
254
+ # Processes incremental alter configs result event
255
+ # @param event_ptr [FFI::Pointer] pointer to the event
214
256
  def self.process_incremental_alter_configs(event_ptr)
215
257
  incremental_alter = IncrementalAlterConfigsResult.new(event_ptr)
216
258
  incremental_alter_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
@@ -229,6 +271,8 @@ module Rdkafka
229
271
  end
230
272
  end
231
273
 
274
+ # Processes delete groups result event
275
+ # @param event_ptr [FFI::Pointer] pointer to the event
232
276
  def self.process_delete_groups(event_ptr)
233
277
  delete_groups_result = Rdkafka::Bindings.rd_kafka_event_DeleteGroups_result(event_ptr)
234
278
 
@@ -247,6 +291,8 @@ module Rdkafka
247
291
  end
248
292
  end
249
293
 
294
+ # Processes delete topic result event
295
+ # @param event_ptr [FFI::Pointer] pointer to the event
250
296
  def self.process_delete_topic(event_ptr)
251
297
  delete_topics_result = Rdkafka::Bindings.rd_kafka_event_DeleteTopics_result(event_ptr)
252
298
 
@@ -265,6 +311,8 @@ module Rdkafka
265
311
  end
266
312
  end
267
313
 
314
+ # Processes create partitions result event
315
+ # @param event_ptr [FFI::Pointer] pointer to the event
268
316
  def self.process_create_partitions(event_ptr)
269
317
  create_partitionss_result = Rdkafka::Bindings.rd_kafka_event_CreatePartitions_result(event_ptr)
270
318
 
@@ -283,6 +331,8 @@ module Rdkafka
283
331
  end
284
332
  end
285
333
 
334
+ # Processes create ACL result event
335
+ # @param event_ptr [FFI::Pointer] pointer to the event
286
336
  def self.process_create_acl(event_ptr)
287
337
  create_acls_result = Rdkafka::Bindings.rd_kafka_event_CreateAcls_result(event_ptr)
288
338
 
@@ -300,6 +350,8 @@ module Rdkafka
300
350
  end
301
351
  end
302
352
 
353
+ # Processes delete ACL result event
354
+ # @param event_ptr [FFI::Pointer] pointer to the event
303
355
  def self.process_delete_acl(event_ptr)
304
356
  delete_acls_result = Rdkafka::Bindings.rd_kafka_event_DeleteAcls_result(event_ptr)
305
357
 
@@ -322,6 +374,8 @@ module Rdkafka
322
374
  end
323
375
  end
324
376
 
377
+ # Processes describe ACL result event
378
+ # @param event_ptr [FFI::Pointer] pointer to the event
325
379
  def self.process_describe_acl(event_ptr)
326
380
  describe_acl = DescribeAclResult.new(event_ptr)
327
381
  describe_acl_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
@@ -342,7 +396,11 @@ module Rdkafka
342
396
 
343
397
  # @private
344
398
  class DeliveryCallback
345
- def self.call(_, message_ptr, opaque_ptr)
399
+ # Handles message delivery callbacks
400
+ # @param _client_ptr [FFI::Pointer] unused client pointer
401
+ # @param message_ptr [FFI::Pointer] pointer to the delivered message
402
+ # @param opaque_ptr [FFI::Pointer] pointer to the opaque object for callback context
403
+ def self.call(_client_ptr, message_ptr, opaque_ptr)
346
404
  message = Rdkafka::Bindings::Message.new(message_ptr)
347
405
  delivery_handle_ptr_address = message[:_private].address
348
406
  if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
@@ -373,7 +431,9 @@ module Rdkafka
373
431
  end
374
432
  end
375
433
 
434
+ # @private
376
435
  @@mutex = Mutex.new
436
+ # @private
377
437
  @@current_pid = nil
378
438
 
379
439
  class << self
@@ -71,8 +71,7 @@ module Rdkafka
71
71
  # You can configure if and how often this happens using `statistics.interval.ms`.
72
72
  # The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
73
73
  #
74
- # @param callback [Proc, #call] The callback
75
- #
74
+ # @param callback [Proc, #call, nil] callable object or nil to clear
76
75
  # @return [nil]
77
76
  def self.statistics_callback=(callback)
78
77
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
@@ -90,8 +89,7 @@ module Rdkafka
90
89
  # If this callback is not set, global errors such as brokers becoming unavailable will only be sent to the logger, as defined by librdkafka.
91
90
  # The callback is called with an instance of RdKafka::Error.
92
91
  #
93
- # @param callback [Proc, #call] The callback
94
- #
92
+ # @param callback [Proc, #call] callable object to handle errors
95
93
  # @return [nil]
96
94
  def self.error_callback=(callback)
97
95
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
@@ -108,8 +106,7 @@ module Rdkafka
108
106
  # Sets the SASL/OAUTHBEARER token refresh callback.
109
107
  # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
108
  #
111
- # @param callback [Proc, #call] The callback
112
- #
109
+ # @param callback [Proc, #call, nil] callable object to handle token refresh or nil to clear
113
110
  # @return [nil]
114
111
  def self.oauthbearer_token_refresh_callback=(callback)
115
112
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
@@ -235,7 +232,7 @@ module Rdkafka
235
232
  #
236
233
  # @raise [ConfigError] When the configuration contains invalid options
237
234
  # @raise [ClientCreationError] When the native client cannot be created
238
- def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
235
+ def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: Defaults::NATIVE_KAFKA_POLL_TIMEOUT_MS)
239
236
  # Create opaque
240
237
  opaque = Opaque.new
241
238
  # Create Kafka config
@@ -270,7 +267,7 @@ module Rdkafka
270
267
  #
271
268
  # @raise [ConfigError] When the configuration contains invalid options
272
269
  # @raise [ClientCreationError] When the native client cannot be created
273
- def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
270
+ def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: Defaults::NATIVE_KAFKA_POLL_TIMEOUT_MS)
274
271
  opaque = Opaque.new
275
272
  config = native_config(opaque)
276
273
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
@@ -301,6 +298,9 @@ module Rdkafka
301
298
 
302
299
  # This method is only intended to be used to create a client,
303
300
  # using it in another way will leak memory.
301
+ #
302
+ # @param opaque [Object, nil] optional opaque pointer for callbacks
303
+ # @return [FFI::Pointer] native rdkafka configuration pointer
304
304
  def native_config(opaque = nil)
305
305
  Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
306
306
  # Create config
@@ -345,6 +345,11 @@ module Rdkafka
345
345
  end
346
346
  end
347
347
 
348
+ # Creates a native Kafka handle
349
+ # @param config [FFI::Pointer] pointer to the native config
350
+ # @param type [Symbol] type of client (:rd_kafka_producer or :rd_kafka_consumer)
351
+ # @return [FFI::Pointer] pointer to the native Kafka handle
352
+ # @private
348
353
  def native_kafka(config, type)
349
354
  error_buffer = FFI::MemoryPointer.from_string(" " * 256)
350
355
  handle = Rdkafka::Bindings.rd_kafka_new(
@@ -374,10 +379,15 @@ module Rdkafka
374
379
  attr_accessor :producer
375
380
  attr_accessor :consumer_rebalance_listener
376
381
 
382
+ # Invokes the delivery callback on the producer if one is set
383
+ # @param delivery_report [Rdkafka::Producer::DeliveryReport] the delivery report
384
+ # @param delivery_handle [Rdkafka::Producer::DeliveryHandle] the delivery handle
377
385
  def call_delivery_callback(delivery_report, delivery_handle)
378
386
  producer.call_delivery_callback(delivery_report, delivery_handle) if producer
379
387
  end
380
388
 
389
+ # Invokes the on_partitions_assigned callback on the rebalance listener if set
390
+ # @param list [Rdkafka::Consumer::TopicPartitionList] the assigned partitions
381
391
  def call_on_partitions_assigned(list)
382
392
  return unless consumer_rebalance_listener
383
393
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_assigned)
@@ -385,6 +395,8 @@ module Rdkafka
385
395
  consumer_rebalance_listener.on_partitions_assigned(list)
386
396
  end
387
397
 
398
+ # Invokes the on_partitions_revoked callback on the rebalance listener if set
399
+ # @param list [Rdkafka::Consumer::TopicPartitionList] the revoked partitions
388
400
  def call_on_partitions_revoked(list)
389
401
  return unless consumer_rebalance_listener
390
402
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoked)
@@ -4,6 +4,7 @@ module Rdkafka
4
4
  class Consumer
5
5
  # Interface to return headers for a consumer message
6
6
  module Headers
7
+ # Empty frozen hash used when there are no headers
7
8
  EMPTY_HEADERS = {}.freeze
8
9
 
9
10
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
@@ -12,8 +13,8 @@ module Rdkafka
12
13
  #
13
14
  # @private
14
15
  #
15
- # @param [Rdkafka::Bindings::Message] native_message
16
- # @return [Hash<String, String|Array<String>>] headers Hash for the native_message
16
+ # @param native_message [Rdkafka::Bindings::Message] the native message to read headers from
17
+ # @return [Hash{String => String, Array<String>}] headers Hash for the native_message
17
18
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
18
19
  def self.from_native(native_message)
19
20
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
@@ -28,10 +28,11 @@ module Rdkafka
28
28
  # @return [Time, nil]
29
29
  attr_reader :timestamp
30
30
 
31
- # @return [Hash<String, String>] a message headers
31
+ # @return [Hash{String => String}] message headers
32
32
  attr_reader :headers
33
33
 
34
34
  # @private
35
+ # @param native_message [Rdkafka::Bindings::Message] native message struct from librdkafka
35
36
  def initialize(native_message)
36
37
  # Set topic
37
38
  unless native_message[:rkt].null?
@@ -71,6 +72,11 @@ module Rdkafka
71
72
  "<Message in '#{topic}' with key '#{truncate(key)}', payload '#{truncate(payload)}', partition #{partition}, offset #{offset}, timestamp #{timestamp}#{is_headers}>"
72
73
  end
73
74
 
75
+ private
76
+
77
+ # Truncates a string for display purposes
78
+ # @param string [String, nil] the string to truncate
79
+ # @return [String, nil] truncated string or nil
74
80
  def truncate(string)
75
81
  if string && string.length > 40
76
82
  "#{string[0..39]}..."
@@ -79,8 +85,6 @@ module Rdkafka
79
85
  end
80
86
  end
81
87
 
82
- private
83
-
84
88
  end
85
89
  end
86
90
  end
@@ -21,7 +21,11 @@ module Rdkafka
21
21
  attr_reader :metadata
22
22
 
23
23
  # @private
24
- def initialize(partition, offset, err = 0, metadata = nil)
24
+ # @param partition [Integer] partition number
25
+ # @param offset [Integer, nil] partition offset
26
+ # @param err [Integer] error code from librdkafka
27
+ # @param metadata [String, nil] partition metadata
28
+ def initialize(partition, offset, err = Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR, metadata = nil)
25
29
  @partition = partition
26
30
  @offset = offset
27
31
  @err = err
@@ -46,6 +50,7 @@ module Rdkafka
46
50
  end
47
51
 
48
52
  # Whether another partition is equal to this
53
+ # @param other [Object] object to compare with
49
54
  # @return [Boolean]
50
55
  def ==(other)
51
56
  self.class == other.class &&
@@ -49,7 +49,6 @@ module Rdkafka
49
49
  #
50
50
  # @example Add a topic with all topics up to a count
51
51
  # tpl.add_topic("topic", 9)
52
- #
53
52
  def add_topic(topic, partitions=nil)
54
53
  if partitions.nil?
55
54
  @data[topic.to_s] = nil
@@ -65,10 +64,8 @@ module Rdkafka
65
64
  # Calling this method multiple times for the same topic will overwrite the previous configuraton.
66
65
  #
67
66
  # @param topic [String] The topic's name
68
- # @param partitions_with_offsets [Hash<Integer, Integer>] The topic's partitions and offsets
69
- # @param partitions_with_offsets [Array<Consumer::Partition>] The topic's partitions with offsets
70
- # and metadata (if any)
71
- #
67
+ # @param partitions_with_offsets [Hash{Integer => Integer}, Array<Consumer::Partition>] The topic's
68
+ # partitions and offsets (Hash) or partitions with offsets and metadata (Array)
72
69
  # @return [nil]
73
70
  def add_topic_and_partitions_with_offsets(topic, partitions_with_offsets)
74
71
  @data[topic.to_s] = partitions_with_offsets.map do |p, o|
@@ -89,6 +86,9 @@ module Rdkafka
89
86
  "<TopicPartitionList: #{to_h}>"
90
87
  end
91
88
 
89
+ # Check equality with another TopicPartitionList
90
+ # @param other [TopicPartitionList] object to compare with
91
+ # @return [Boolean]
92
92
  def ==(other)
93
93
  self.to_h == other.to_h
94
94
  end
@@ -16,6 +16,7 @@ module Rdkafka
16
16
  include Helpers::OAuth
17
17
 
18
18
  # @private
19
+ # @param native_kafka [NativeKafka] wrapper around the native Kafka consumer handle
19
20
  def initialize(native_kafka)
20
21
  @native_kafka = native_kafka
21
22
  end
@@ -33,6 +34,8 @@ module Rdkafka
33
34
  end
34
35
  end
35
36
 
37
+ # @return [Proc] finalizer proc for closing the consumer
38
+ # @private
36
39
  def finalizer
37
40
  ->(_) { close }
38
41
  end
@@ -237,7 +240,7 @@ module Rdkafka
237
240
  # @param timeout_ms [Integer] The timeout for fetching this information.
238
241
  # @return [TopicPartitionList]
239
242
  # @raise [RdkafkaError] When getting the committed positions fails.
240
- def committed(list=nil, timeout_ms=2_000)
243
+ def committed(list = nil, timeout_ms = Defaults::CONSUMER_COMMITTED_TIMEOUT_MS)
241
244
  closed_consumer_check(__method__)
242
245
 
243
246
  if list.nil?
@@ -292,7 +295,7 @@ module Rdkafka
292
295
  # @param timeout_ms [Integer] The timeout for querying the broker
293
296
  # @return [Integer] The low and high watermark
294
297
  # @raise [RdkafkaError] When querying the broker fails.
295
- def query_watermark_offsets(topic, partition, timeout_ms=1000)
298
+ def query_watermark_offsets(topic, partition, timeout_ms = Defaults::CONSUMER_QUERY_WATERMARK_TIMEOUT_MS)
296
299
  closed_consumer_check(__method__)
297
300
 
298
301
  low = FFI::MemoryPointer.new(:int64, 1)
@@ -323,10 +326,10 @@ module Rdkafka
323
326
  #
324
327
  # @param topic_partition_list [TopicPartitionList] The list to calculate lag for.
325
328
  # @param watermark_timeout_ms [Integer] The timeout for each query watermark call.
326
- # @return [Hash<String, Hash<Integer, Integer>>] A hash containing all topics with the lag
329
+ # @return [Hash{String => Hash{Integer => Integer}}] A hash containing all topics with the lag
327
330
  # per partition
328
331
  # @raise [RdkafkaError] When querying the broker fails.
329
- def lag(topic_partition_list, watermark_timeout_ms=1000)
332
+ def lag(topic_partition_list, watermark_timeout_ms = Defaults::CONSUMER_LAG_TIMEOUT_MS)
330
333
  out = {}
331
334
 
332
335
  topic_partition_list.to_h.each do |topic, partitions|
@@ -454,7 +457,7 @@ module Rdkafka
454
457
  native_topic,
455
458
  partition,
456
459
  offset,
457
- 0 # timeout
460
+ Defaults::CONSUMER_SEEK_TIMEOUT_MS
458
461
  )
459
462
 
460
463
  return nil if response == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
@@ -471,11 +474,10 @@ module Rdkafka
471
474
  # Lookup offset for the given partitions by timestamp.
472
475
  #
473
476
  # @param list [TopicPartitionList] The TopicPartitionList with timestamps instead of offsets
474
- #
477
+ # @param timeout_ms [Integer] timeout in milliseconds for the operation
475
478
  # @return [TopicPartitionList]
476
- #
477
479
  # @raise [RdKafkaError] When the OffsetForTimes lookup fails
478
- def offsets_for_times(list, timeout_ms = 1000)
480
+ def offsets_for_times(list, timeout_ms = Defaults::CONSUMER_OFFSETS_FOR_TIMES_TIMEOUT_MS)
479
481
  closed_consumer_check(__method__)
480
482
 
481
483
  if !list.is_a?(TopicPartitionList)
@@ -583,7 +585,7 @@ module Rdkafka
583
585
  # @note This method technically should be called `#poll` and the current `#poll` should be
584
586
  # called `#consumer_poll` though we keep the current naming convention to make it backward
585
587
  # compatible.
586
- def events_poll(timeout_ms = 0)
588
+ def events_poll(timeout_ms = Defaults::CONSUMER_EVENTS_POLL_TIMEOUT_MS)
587
589
  @native_kafka.with_inner do |inner|
588
590
  Rdkafka::Bindings.rd_kafka_poll(inner, timeout_ms)
589
591
  end
@@ -595,12 +597,13 @@ module Rdkafka
595
597
  # If `enable.partition.eof` is turned on in the config this will raise an error when an eof is
596
598
  # reached, so you probably want to disable that when using this method of iteration.
597
599
  #
600
+ # @param timeout_ms [Integer] The timeout for each poll
598
601
  # @yieldparam message [Message] Received message
599
602
  # @return [nil]
600
603
  # @raise [RdkafkaError] When polling fails
601
- def each
604
+ def each(timeout_ms: Defaults::CONSUMER_POLL_TIMEOUT_MS)
602
605
  loop do
603
- message = poll(250)
606
+ message = poll(timeout_ms)
604
607
  if message
605
608
  yield(message)
606
609
  else
@@ -613,7 +616,13 @@ module Rdkafka
613
616
  end
614
617
  end
615
618
 
616
- # Deprecated. Please read the error message for more details.
619
+ # @deprecated This method has been removed due to data consistency concerns
620
+ # @param max_items [Integer] unused
621
+ # @param bytes_threshold [Numeric] unused
622
+ # @param timeout_ms [Integer] unused
623
+ # @param yield_on_error [Boolean] unused
624
+ # @param block [Proc] unused block
625
+ # @raise [NotImplementedError] Always raises as this method is no longer supported
617
626
  def each_batch(max_items: 100, bytes_threshold: Float::INFINITY, timeout_ms: 250, yield_on_error: false, &block)
618
627
  raise NotImplementedError, <<~ERROR
619
628
  `each_batch` has been removed due to data consistency concerns.
@@ -650,6 +659,9 @@ module Rdkafka
650
659
 
651
660
  private
652
661
 
662
+ # Checks if the consumer is closed and raises an error if so
663
+ # @param method [Symbol] name of the calling method for error context
664
+ # @raise [ClosedConsumerError] when the consumer is closed
653
665
  def closed_consumer_check(method)
654
666
  raise Rdkafka::ClosedConsumerError.new(method) if closed?
655
667
  end
@@ -0,0 +1,84 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # Provides default timeout and configuration values used throughout the library.
5
+ #
6
+ # These constants standardize timing values across consumers, producers, and admin clients.
7
+ # Values are specified in milliseconds (ms) unless otherwise noted.
8
+ module Defaults
9
+ # Consumer timeout for fetching committed offsets
10
+ # @see Consumer#committed
11
+ CONSUMER_COMMITTED_TIMEOUT_MS = 2_000
12
+
13
+ # Consumer timeout for querying watermark offsets
14
+ # @see Consumer#query_watermark_offsets
15
+ CONSUMER_QUERY_WATERMARK_TIMEOUT_MS = 1_000
16
+
17
+ # Consumer timeout for lag calculations
18
+ # @see Consumer#lag
19
+ CONSUMER_LAG_TIMEOUT_MS = 1_000
20
+
21
+ # Consumer timeout for offset-by-timestamp lookups
22
+ # @see Consumer#offsets_for_times
23
+ CONSUMER_OFFSETS_FOR_TIMES_TIMEOUT_MS = 1_000
24
+
25
+ # Consumer timeout for poll operations (used in each iteration)
26
+ # @see Consumer#each
27
+ CONSUMER_POLL_TIMEOUT_MS = 250
28
+
29
+ # Consumer timeout for seek operations (0 = non-blocking)
30
+ # @see Consumer#seek_by
31
+ CONSUMER_SEEK_TIMEOUT_MS = 0
32
+
33
+ # Consumer timeout for events_poll (0 = non-blocking async)
34
+ # @see Consumer#events_poll
35
+ CONSUMER_EVENTS_POLL_TIMEOUT_MS = 0
36
+
37
+ # Producer timeout for flush operations
38
+ # @see Producer#flush
39
+ PRODUCER_FLUSH_TIMEOUT_MS = 5_000
40
+
41
+ # Producer timeout for flush during purge
42
+ # @see Producer#purge
43
+ PRODUCER_PURGE_FLUSH_TIMEOUT_MS = 100
44
+
45
+ # Sleep interval used in producer purge loop
46
+ # @see Producer#purge
47
+ PRODUCER_PURGE_SLEEP_INTERVAL_MS = 1
48
+
49
+ # Timeout for transactional send_offsets_to_transaction
50
+ # @see Producer#send_offsets_to_transaction
51
+ PRODUCER_SEND_OFFSETS_TIMEOUT_MS = 5_000
52
+
53
+ # Default timeout for metadata requests
54
+ # @see Metadata#initialize
55
+ # @see Admin#metadata
56
+ METADATA_TIMEOUT_MS = 2_000
57
+
58
+ # Maximum retries for metadata requests on transient errors
59
+ # @see Metadata#initialize
60
+ METADATA_MAX_RETRIES = 10
61
+
62
+ # Base backoff time for metadata retry (100ms = 0.1s)
63
+ # @see Metadata#initialize
64
+ METADATA_RETRY_BACKOFF_BASE_MS = 100
65
+
66
+ # Default wait timeout for operation handles
67
+ # @see AbstractHandle#wait
68
+ HANDLE_WAIT_TIMEOUT_MS = 60_000
69
+
70
+ # Polling interval for NativeKafka background thread
71
+ # @see NativeKafka#initialize
72
+ # @see Config#producer
73
+ # @see Config#admin
74
+ NATIVE_KAFKA_POLL_TIMEOUT_MS = 100
75
+
76
+ # Sleep interval used in NativeKafka#synchronize wait loop
77
+ # @see NativeKafka#synchronize
78
+ NATIVE_KAFKA_SYNCHRONIZE_SLEEP_INTERVAL_MS = 10
79
+
80
+ # TTL for partitions count cache entries (30 seconds)
81
+ # @see Producer::PartitionsCountCache
82
+ PARTITIONS_COUNT_CACHE_TTL_MS = 30_000
83
+ end
84
+ end