rdkafka 0.24.2 → 0.25.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +10 -0
  3. data/Gemfile +9 -0
  4. data/README.md +2 -1
  5. data/dist/{librdkafka-2.11.1.tar.gz → librdkafka-2.12.1.tar.gz} +0 -0
  6. data/docker-compose-ssl.yml +1 -1
  7. data/docker-compose.yml +1 -1
  8. data/lib/rdkafka/abstract_handle.rb +23 -5
  9. data/lib/rdkafka/admin/acl_binding_result.rb +1 -1
  10. data/lib/rdkafka/admin/config_resource_binding_result.rb +1 -0
  11. data/lib/rdkafka/admin/create_acl_handle.rb +3 -0
  12. data/lib/rdkafka/admin/create_acl_report.rb +3 -0
  13. data/lib/rdkafka/admin/create_partitions_handle.rb +3 -0
  14. data/lib/rdkafka/admin/create_partitions_report.rb +1 -0
  15. data/lib/rdkafka/admin/create_topic_handle.rb +3 -0
  16. data/lib/rdkafka/admin/create_topic_report.rb +3 -0
  17. data/lib/rdkafka/admin/delete_acl_handle.rb +3 -0
  18. data/lib/rdkafka/admin/delete_acl_report.rb +3 -0
  19. data/lib/rdkafka/admin/delete_groups_handle.rb +5 -0
  20. data/lib/rdkafka/admin/delete_groups_report.rb +3 -0
  21. data/lib/rdkafka/admin/delete_topic_handle.rb +3 -0
  22. data/lib/rdkafka/admin/delete_topic_report.rb +3 -0
  23. data/lib/rdkafka/admin/describe_acl_handle.rb +3 -0
  24. data/lib/rdkafka/admin/describe_acl_report.rb +3 -0
  25. data/lib/rdkafka/admin/describe_configs_handle.rb +3 -0
  26. data/lib/rdkafka/admin/describe_configs_report.rb +6 -0
  27. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +3 -0
  28. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +6 -0
  29. data/lib/rdkafka/admin.rb +108 -112
  30. data/lib/rdkafka/bindings.rb +62 -29
  31. data/lib/rdkafka/callbacks.rb +71 -11
  32. data/lib/rdkafka/config.rb +20 -8
  33. data/lib/rdkafka/consumer/headers.rb +3 -2
  34. data/lib/rdkafka/consumer/message.rb +7 -3
  35. data/lib/rdkafka/consumer/partition.rb +6 -2
  36. data/lib/rdkafka/consumer/topic_partition_list.rb +8 -8
  37. data/lib/rdkafka/consumer.rb +40 -28
  38. data/lib/rdkafka/defaults.rb +106 -0
  39. data/lib/rdkafka/error.rb +16 -1
  40. data/lib/rdkafka/helpers/oauth.rb +11 -5
  41. data/lib/rdkafka/metadata.rb +29 -5
  42. data/lib/rdkafka/native_kafka.rb +26 -2
  43. data/lib/rdkafka/producer/delivery_report.rb +6 -2
  44. data/lib/rdkafka/producer/partitions_count_cache.rb +24 -14
  45. data/lib/rdkafka/producer.rb +49 -23
  46. data/lib/rdkafka/version.rb +6 -3
  47. data/lib/rdkafka.rb +1 -0
  48. data/rdkafka.gemspec +0 -7
  49. data/renovate.json +1 -8
  50. metadata +4 -87
@@ -1,6 +1,8 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
+ # Callback handlers for librdkafka events
5
+ # @private
4
6
  module Callbacks
5
7
  # Extracts attributes of a rd_kafka_topic_result_t
6
8
  #
@@ -8,12 +10,16 @@ module Rdkafka
8
10
  class TopicResult
9
11
  attr_reader :result_error, :error_string, :result_name
10
12
 
13
+ # @param topic_result_pointer [FFI::Pointer] pointer to the topic result struct
11
14
  def initialize(topic_result_pointer)
12
15
  @result_error = Rdkafka::Bindings.rd_kafka_topic_result_error(topic_result_pointer)
13
16
  @error_string = Rdkafka::Bindings.rd_kafka_topic_result_error_string(topic_result_pointer)
14
17
  @result_name = Rdkafka::Bindings.rd_kafka_topic_result_name(topic_result_pointer)
15
18
  end
16
19
 
20
+ # @param count [Integer] number of results
21
+ # @param array_pointer [FFI::Pointer] pointer to the results array
22
+ # @return [Array<TopicResult>] array of topic results
17
23
  def self.create_topic_results_from_array(count, array_pointer)
18
24
  (1..count).map do |index|
19
25
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -22,13 +28,18 @@ module Rdkafka
22
28
  end
23
29
  end
24
30
 
31
+ # Extracts attributes of rd_kafka_group_result_t
32
+ #
33
+ # @private
25
34
  class GroupResult
26
35
  attr_reader :result_error, :error_string, :result_name
36
+
37
+ # @param group_result_pointer [FFI::Pointer] pointer to the group result struct
27
38
  def initialize(group_result_pointer)
28
39
  native_error = Rdkafka::Bindings.rd_kafka_group_result_error(group_result_pointer)
29
40
 
30
41
  if native_error.null?
31
- @result_error = 0
42
+ @result_error = Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
32
43
  @error_string = FFI::Pointer::NULL
33
44
  else
34
45
  @result_error = native_error[:code]
@@ -37,6 +48,10 @@ module Rdkafka
37
48
 
38
49
  @result_name = Rdkafka::Bindings.rd_kafka_group_result_name(group_result_pointer)
39
50
  end
51
+
52
+ # @param count [Integer] number of results
53
+ # @param array_pointer [FFI::Pointer] pointer to the results array
54
+ # @return [Array<GroupResult>] array of group results
40
55
  def self.create_group_results_from_array(count, array_pointer)
41
56
  (1..count).map do |index|
42
57
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -51,12 +66,16 @@ module Rdkafka
51
66
  class CreateAclResult
52
67
  attr_reader :result_error, :error_string
53
68
 
69
+ # @param acl_result_pointer [FFI::Pointer] pointer to the ACL result struct
54
70
  def initialize(acl_result_pointer)
55
71
  rd_kafka_error_pointer = Bindings.rd_kafka_acl_result_error(acl_result_pointer)
56
72
  @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
57
73
  @error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
58
74
  end
59
75
 
76
+ # @param count [Integer] number of results
77
+ # @param array_pointer [FFI::Pointer] pointer to the results array
78
+ # @return [Array<CreateAclResult>] array of ACL results
60
79
  def self.create_acl_results_from_array(count, array_pointer)
61
80
  (1..count).map do |index|
62
81
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -71,12 +90,13 @@ module Rdkafka
71
90
  class DeleteAclResult
72
91
  attr_reader :result_error, :error_string, :matching_acls, :matching_acls_count
73
92
 
93
+ # @param acl_result_pointer [FFI::Pointer] pointer to the delete ACL result response struct
74
94
  def initialize(acl_result_pointer)
75
95
  @matching_acls=[]
76
96
  rd_kafka_error_pointer = Rdkafka::Bindings.rd_kafka_DeleteAcls_result_response_error(acl_result_pointer)
77
97
  @result_error = Rdkafka::Bindings.rd_kafka_error_code(rd_kafka_error_pointer)
78
98
  @error_string = Rdkafka::Bindings.rd_kafka_error_string(rd_kafka_error_pointer)
79
- if @result_error == 0
99
+ if @result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
80
100
  # Get the number of matching acls
81
101
  pointer_to_size_t = FFI::MemoryPointer.new(:int32)
82
102
  @matching_acls = Rdkafka::Bindings.rd_kafka_DeleteAcls_result_response_matching_acls(acl_result_pointer, pointer_to_size_t)
@@ -84,6 +104,9 @@ module Rdkafka
84
104
  end
85
105
  end
86
106
 
107
+ # @param count [Integer] number of results
108
+ # @param array_pointer [FFI::Pointer] pointer to the results array
109
+ # @return [Array<DeleteAclResult>] array of delete ACL results
87
110
  def self.delete_acl_results_from_array(count, array_pointer)
88
111
  (1..count).map do |index|
89
112
  result_pointer = (array_pointer + (index - 1)).read_pointer
@@ -98,11 +121,12 @@ module Rdkafka
98
121
  class DescribeAclResult
99
122
  attr_reader :result_error, :error_string, :matching_acls, :matching_acls_count
100
123
 
124
+ # @param event_ptr [FFI::Pointer] pointer to the event
101
125
  def initialize(event_ptr)
102
126
  @matching_acls=[]
103
127
  @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
104
128
  @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
105
- if @result_error == 0
129
+ if @result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
106
130
  acl_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeAcls_result(event_ptr)
107
131
  # Get the number of matching acls
108
132
  pointer_to_size_t = FFI::MemoryPointer.new(:int32)
@@ -112,15 +136,19 @@ module Rdkafka
112
136
  end
113
137
  end
114
138
 
139
+ # Extracts attributes of rd_kafka_DescribeConfigs_result_t
140
+ #
141
+ # @private
115
142
  class DescribeConfigsResult
116
143
  attr_reader :result_error, :error_string, :results, :results_count
117
144
 
145
+ # @param event_ptr [FFI::Pointer] pointer to the event
118
146
  def initialize(event_ptr)
119
147
  @results=[]
120
148
  @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
121
149
  @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
122
150
 
123
- if @result_error == 0
151
+ if @result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
124
152
  configs_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeConfigs_result(event_ptr)
125
153
  # Get the number of matching acls
126
154
  pointer_to_size_t = FFI::MemoryPointer.new(:int32)
@@ -130,15 +158,19 @@ module Rdkafka
130
158
  end
131
159
  end
132
160
 
161
+ # Extracts attributes of rd_kafka_IncrementalAlterConfigs_result_t
162
+ #
163
+ # @private
133
164
  class IncrementalAlterConfigsResult
134
165
  attr_reader :result_error, :error_string, :results, :results_count
135
166
 
167
+ # @param event_ptr [FFI::Pointer] pointer to the event
136
168
  def initialize(event_ptr)
137
169
  @results=[]
138
170
  @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
139
171
  @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
140
172
 
141
- if @result_error == 0
173
+ if @result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
142
174
  incremental_alter_result = Rdkafka::Bindings.rd_kafka_event_IncrementalAlterConfigs_result(event_ptr)
143
175
  # Get the number of matching acls
144
176
  pointer_to_size_t = FFI::MemoryPointer.new(:int32)
@@ -150,7 +182,11 @@ module Rdkafka
150
182
 
151
183
  # @private
152
184
  class BackgroundEventCallback
153
- def self.call(_, event_ptr, _)
185
+ # Handles background events from librdkafka
186
+ # @param _client_ptr [FFI::Pointer] unused client pointer
187
+ # @param event_ptr [FFI::Pointer] pointer to the event
188
+ # @param _opaque_ptr [FFI::Pointer] unused opaque pointer
189
+ def self.call(_client_ptr, event_ptr, _opaque_ptr)
154
190
  case Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
155
191
  when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
156
192
  process_create_topic(event_ptr)
@@ -175,6 +211,8 @@ module Rdkafka
175
211
 
176
212
  private
177
213
 
214
+ # Processes create topic result event
215
+ # @param event_ptr [FFI::Pointer] pointer to the event
178
216
  def self.process_create_topic(event_ptr)
179
217
  create_topics_result = Rdkafka::Bindings.rd_kafka_event_CreateTopics_result(event_ptr)
180
218
 
@@ -193,6 +231,8 @@ module Rdkafka
193
231
  end
194
232
  end
195
233
 
234
+ # Processes describe configs result event
235
+ # @param event_ptr [FFI::Pointer] pointer to the event
196
236
  def self.process_describe_configs(event_ptr)
197
237
  describe_configs = DescribeConfigsResult.new(event_ptr)
198
238
  describe_configs_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
@@ -202,7 +242,7 @@ module Rdkafka
202
242
  describe_configs_handle[:response_string] = describe_configs.error_string
203
243
  describe_configs_handle[:pending] = false
204
244
 
205
- if describe_configs.result_error == 0
245
+ if describe_configs.result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
206
246
  describe_configs_handle[:config_entries] = describe_configs.results
207
247
  describe_configs_handle[:entry_count] = describe_configs.results_count
208
248
  end
@@ -211,6 +251,8 @@ module Rdkafka
211
251
  end
212
252
  end
213
253
 
254
+ # Processes incremental alter configs result event
255
+ # @param event_ptr [FFI::Pointer] pointer to the event
214
256
  def self.process_incremental_alter_configs(event_ptr)
215
257
  incremental_alter = IncrementalAlterConfigsResult.new(event_ptr)
216
258
  incremental_alter_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
@@ -220,7 +262,7 @@ module Rdkafka
220
262
  incremental_alter_handle[:response_string] = incremental_alter.error_string
221
263
  incremental_alter_handle[:pending] = false
222
264
 
223
- if incremental_alter.result_error == 0
265
+ if incremental_alter.result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
224
266
  incremental_alter_handle[:config_entries] = incremental_alter.results
225
267
  incremental_alter_handle[:entry_count] = incremental_alter.results_count
226
268
  end
@@ -229,6 +271,8 @@ module Rdkafka
229
271
  end
230
272
  end
231
273
 
274
+ # Processes delete groups result event
275
+ # @param event_ptr [FFI::Pointer] pointer to the event
232
276
  def self.process_delete_groups(event_ptr)
233
277
  delete_groups_result = Rdkafka::Bindings.rd_kafka_event_DeleteGroups_result(event_ptr)
234
278
 
@@ -247,6 +291,8 @@ module Rdkafka
247
291
  end
248
292
  end
249
293
 
294
+ # Processes delete topic result event
295
+ # @param event_ptr [FFI::Pointer] pointer to the event
250
296
  def self.process_delete_topic(event_ptr)
251
297
  delete_topics_result = Rdkafka::Bindings.rd_kafka_event_DeleteTopics_result(event_ptr)
252
298
 
@@ -265,6 +311,8 @@ module Rdkafka
265
311
  end
266
312
  end
267
313
 
314
+ # Processes create partitions result event
315
+ # @param event_ptr [FFI::Pointer] pointer to the event
268
316
  def self.process_create_partitions(event_ptr)
269
317
  create_partitionss_result = Rdkafka::Bindings.rd_kafka_event_CreatePartitions_result(event_ptr)
270
318
 
@@ -283,6 +331,8 @@ module Rdkafka
283
331
  end
284
332
  end
285
333
 
334
+ # Processes create ACL result event
335
+ # @param event_ptr [FFI::Pointer] pointer to the event
286
336
  def self.process_create_acl(event_ptr)
287
337
  create_acls_result = Rdkafka::Bindings.rd_kafka_event_CreateAcls_result(event_ptr)
288
338
 
@@ -300,6 +350,8 @@ module Rdkafka
300
350
  end
301
351
  end
302
352
 
353
+ # Processes delete ACL result event
354
+ # @param event_ptr [FFI::Pointer] pointer to the event
303
355
  def self.process_delete_acl(event_ptr)
304
356
  delete_acls_result = Rdkafka::Bindings.rd_kafka_event_DeleteAcls_result(event_ptr)
305
357
 
@@ -313,7 +365,7 @@ module Rdkafka
313
365
  delete_acl_handle[:response] = delete_acl_results[0].result_error
314
366
  delete_acl_handle[:response_string] = delete_acl_results[0].error_string
315
367
 
316
- if delete_acl_results[0].result_error == 0
368
+ if delete_acl_results[0].result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
317
369
  delete_acl_handle[:matching_acls] = delete_acl_results[0].matching_acls
318
370
  delete_acl_handle[:matching_acls_count] = delete_acl_results[0].matching_acls_count
319
371
  end
@@ -322,6 +374,8 @@ module Rdkafka
322
374
  end
323
375
  end
324
376
 
377
+ # Processes describe ACL result event
378
+ # @param event_ptr [FFI::Pointer] pointer to the event
325
379
  def self.process_describe_acl(event_ptr)
326
380
  describe_acl = DescribeAclResult.new(event_ptr)
327
381
  describe_acl_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
@@ -330,7 +384,7 @@ module Rdkafka
330
384
  describe_acl_handle[:response] = describe_acl.result_error
331
385
  describe_acl_handle[:response_string] = describe_acl.error_string
332
386
 
333
- if describe_acl.result_error == 0
387
+ if describe_acl.result_error == Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
334
388
  describe_acl_handle[:acls] = describe_acl.matching_acls
335
389
  describe_acl_handle[:acls_count] = describe_acl.matching_acls_count
336
390
  end
@@ -342,7 +396,11 @@ module Rdkafka
342
396
 
343
397
  # @private
344
398
  class DeliveryCallback
345
- def self.call(_, message_ptr, opaque_ptr)
399
+ # Handles message delivery callbacks
400
+ # @param _client_ptr [FFI::Pointer] unused client pointer
401
+ # @param message_ptr [FFI::Pointer] pointer to the delivered message
402
+ # @param opaque_ptr [FFI::Pointer] pointer to the opaque object for callback context
403
+ def self.call(_client_ptr, message_ptr, opaque_ptr)
346
404
  message = Rdkafka::Bindings::Message.new(message_ptr)
347
405
  delivery_handle_ptr_address = message[:_private].address
348
406
  if delivery_handle = Rdkafka::Producer::DeliveryHandle.remove(delivery_handle_ptr_address)
@@ -373,7 +431,9 @@ module Rdkafka
373
431
  end
374
432
  end
375
433
 
434
+ # @private
376
435
  @@mutex = Mutex.new
436
+ # @private
377
437
  @@current_pid = nil
378
438
 
379
439
  class << self
@@ -71,8 +71,7 @@ module Rdkafka
71
71
  # You can configure if and how often this happens using `statistics.interval.ms`.
72
72
  # The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
73
73
  #
74
- # @param callback [Proc, #call] The callback
75
- #
74
+ # @param callback [Proc, #call, nil] callable object or nil to clear
76
75
  # @return [nil]
77
76
  def self.statistics_callback=(callback)
78
77
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
@@ -90,8 +89,7 @@ module Rdkafka
90
89
  # If this callback is not set, global errors such as brokers becoming unavailable will only be sent to the logger, as defined by librdkafka.
91
90
  # The callback is called with an instance of RdKafka::Error.
92
91
  #
93
- # @param callback [Proc, #call] The callback
94
- #
92
+ # @param callback [Proc, #call] callable object to handle errors
95
93
  # @return [nil]
96
94
  def self.error_callback=(callback)
97
95
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
@@ -108,8 +106,7 @@ module Rdkafka
108
106
  # Sets the SASL/OAUTHBEARER token refresh callback.
109
107
  # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
108
  #
111
- # @param callback [Proc, #call] The callback
112
- #
109
+ # @param callback [Proc, #call, nil] callable object to handle token refresh or nil to clear
113
110
  # @return [nil]
114
111
  def self.oauthbearer_token_refresh_callback=(callback)
115
112
  raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
@@ -235,7 +232,7 @@ module Rdkafka
235
232
  #
236
233
  # @raise [ConfigError] When the configuration contains invalid options
237
234
  # @raise [ClientCreationError] When the native client cannot be created
238
- def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
235
+ def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: Defaults::NATIVE_KAFKA_POLL_TIMEOUT_MS)
239
236
  # Create opaque
240
237
  opaque = Opaque.new
241
238
  # Create Kafka config
@@ -270,7 +267,7 @@ module Rdkafka
270
267
  #
271
268
  # @raise [ConfigError] When the configuration contains invalid options
272
269
  # @raise [ClientCreationError] When the native client cannot be created
273
- def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
270
+ def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: Defaults::NATIVE_KAFKA_POLL_TIMEOUT_MS)
274
271
  opaque = Opaque.new
275
272
  config = native_config(opaque)
276
273
  Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
@@ -301,6 +298,9 @@ module Rdkafka
301
298
 
302
299
  # This method is only intended to be used to create a client,
303
300
  # using it in another way will leak memory.
301
+ #
302
+ # @param opaque [Object, nil] optional opaque pointer for callbacks
303
+ # @return [FFI::Pointer] native rdkafka configuration pointer
304
304
  def native_config(opaque = nil)
305
305
  Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
306
306
  # Create config
@@ -345,6 +345,11 @@ module Rdkafka
345
345
  end
346
346
  end
347
347
 
348
+ # Creates a native Kafka handle
349
+ # @param config [FFI::Pointer] pointer to the native config
350
+ # @param type [Symbol] type of client (:rd_kafka_producer or :rd_kafka_consumer)
351
+ # @return [FFI::Pointer] pointer to the native Kafka handle
352
+ # @private
348
353
  def native_kafka(config, type)
349
354
  error_buffer = FFI::MemoryPointer.from_string(" " * 256)
350
355
  handle = Rdkafka::Bindings.rd_kafka_new(
@@ -374,10 +379,15 @@ module Rdkafka
374
379
  attr_accessor :producer
375
380
  attr_accessor :consumer_rebalance_listener
376
381
 
382
+ # Invokes the delivery callback on the producer if one is set
383
+ # @param delivery_report [Rdkafka::Producer::DeliveryReport] the delivery report
384
+ # @param delivery_handle [Rdkafka::Producer::DeliveryHandle] the delivery handle
377
385
  def call_delivery_callback(delivery_report, delivery_handle)
378
386
  producer.call_delivery_callback(delivery_report, delivery_handle) if producer
379
387
  end
380
388
 
389
+ # Invokes the on_partitions_assigned callback on the rebalance listener if set
390
+ # @param list [Rdkafka::Consumer::TopicPartitionList] the assigned partitions
381
391
  def call_on_partitions_assigned(list)
382
392
  return unless consumer_rebalance_listener
383
393
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_assigned)
@@ -385,6 +395,8 @@ module Rdkafka
385
395
  consumer_rebalance_listener.on_partitions_assigned(list)
386
396
  end
387
397
 
398
+ # Invokes the on_partitions_revoked callback on the rebalance listener if set
399
+ # @param list [Rdkafka::Consumer::TopicPartitionList] the revoked partitions
388
400
  def call_on_partitions_revoked(list)
389
401
  return unless consumer_rebalance_listener
390
402
  return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoked)
@@ -4,6 +4,7 @@ module Rdkafka
4
4
  class Consumer
5
5
  # Interface to return headers for a consumer message
6
6
  module Headers
7
+ # Empty frozen hash used when there are no headers
7
8
  EMPTY_HEADERS = {}.freeze
8
9
 
9
10
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
@@ -12,8 +13,8 @@ module Rdkafka
12
13
  #
13
14
  # @private
14
15
  #
15
- # @param [Rdkafka::Bindings::Message] native_message
16
- # @return [Hash<String, String|Array<String>>] headers Hash for the native_message
16
+ # @param native_message [Rdkafka::Bindings::Message] the native message to read headers from
17
+ # @return [Hash{String => String, Array<String>}] headers Hash for the native_message
17
18
  # @raise [Rdkafka::RdkafkaError] when fail to read headers
18
19
  def self.from_native(native_message)
19
20
  headers_ptrptr = FFI::MemoryPointer.new(:pointer)
@@ -28,10 +28,11 @@ module Rdkafka
28
28
  # @return [Time, nil]
29
29
  attr_reader :timestamp
30
30
 
31
- # @return [Hash<String, String>] a message headers
31
+ # @return [Hash{String => String}] message headers
32
32
  attr_reader :headers
33
33
 
34
34
  # @private
35
+ # @param native_message [Rdkafka::Bindings::Message] native message struct from librdkafka
35
36
  def initialize(native_message)
36
37
  # Set topic
37
38
  unless native_message[:rkt].null?
@@ -71,6 +72,11 @@ module Rdkafka
71
72
  "<Message in '#{topic}' with key '#{truncate(key)}', payload '#{truncate(payload)}', partition #{partition}, offset #{offset}, timestamp #{timestamp}#{is_headers}>"
72
73
  end
73
74
 
75
+ private
76
+
77
+ # Truncates a string for display purposes
78
+ # @param string [String, nil] the string to truncate
79
+ # @return [String, nil] truncated string or nil
74
80
  def truncate(string)
75
81
  if string && string.length > 40
76
82
  "#{string[0..39]}..."
@@ -79,8 +85,6 @@ module Rdkafka
79
85
  end
80
86
  end
81
87
 
82
- private
83
-
84
88
  end
85
89
  end
86
90
  end
@@ -17,7 +17,10 @@ module Rdkafka
17
17
  attr_reader :err
18
18
 
19
19
  # @private
20
- def initialize(partition, offset, err = 0)
20
+ # @param partition [Integer] partition number
21
+ # @param offset [Integer, nil] partition offset
22
+ # @param err [Integer] error code from librdkafka
23
+ def initialize(partition, offset, err = Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR)
21
24
  @partition = partition
22
25
  @offset = offset
23
26
  @err = err
@@ -28,7 +31,7 @@ module Rdkafka
28
31
  def to_s
29
32
  message = "<Partition #{partition}"
30
33
  message += " offset=#{offset}" if offset
31
- message += " err=#{err}" if err != 0
34
+ message += " err=#{err}" if err != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
32
35
  message += ">"
33
36
  message
34
37
  end
@@ -40,6 +43,7 @@ module Rdkafka
40
43
  end
41
44
 
42
45
  # Whether another partition is equal to this
46
+ # @param other [Object] object to compare with
43
47
  # @return [Boolean]
44
48
  def ==(other)
45
49
  self.class == other.class &&
@@ -49,7 +49,6 @@ module Rdkafka
49
49
  #
50
50
  # @example Add a topic with all topics up to a count
51
51
  # tpl.add_topic("topic", 9)
52
- #
53
52
  def add_topic(topic, partitions=nil)
54
53
  if partitions.nil?
55
54
  @data[topic.to_s] = nil
@@ -57,7 +56,7 @@ module Rdkafka
57
56
  if partitions.is_a? Integer
58
57
  partitions = (0..partitions - 1)
59
58
  end
60
- @data[topic.to_s] = partitions.map { |p| Partition.new(p, nil, 0) }
59
+ @data[topic.to_s] = partitions.map { |p| Partition.new(p, nil, Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR) }
61
60
  end
62
61
  end
63
62
 
@@ -65,10 +64,8 @@ module Rdkafka
65
64
  # Calling this method multiple times for the same topic will overwrite the previous configuraton.
66
65
  #
67
66
  # @param topic [String] The topic's name
68
- # @param partitions_with_offsets [Hash<Integer, Integer>] The topic's partitions and offsets
69
- # @param partitions_with_offsets [Array<Consumer::Partition>] The topic's partitions with offsets
70
- # and metadata (if any)
71
- #
67
+ # @param partitions_with_offsets [Hash{Integer => Integer}, Array<Consumer::Partition>] The topic's
68
+ # partitions and offsets (Hash) or partitions with offsets and metadata (Array)
72
69
  # @return [nil]
73
70
  def add_topic_and_partitions_with_offsets(topic, partitions_with_offsets)
74
71
  @data[topic.to_s] = partitions_with_offsets.map do |p, o|
@@ -89,6 +86,9 @@ module Rdkafka
89
86
  "<TopicPartitionList: #{to_h}>"
90
87
  end
91
88
 
89
+ # Check equality with another TopicPartitionList
90
+ # @param other [TopicPartitionList] object to compare with
91
+ # @return [Boolean]
92
92
  def ==(other)
93
93
  self.to_h == other.to_h
94
94
  end
@@ -109,7 +109,7 @@ module Rdkafka
109
109
  native_tpl[:cnt].times do |i|
110
110
  ptr = native_tpl[:elems] + (i * Rdkafka::Bindings::TopicPartition.size)
111
111
  elem = Rdkafka::Bindings::TopicPartition.new(ptr)
112
- if elem[:partition] == -1
112
+ if elem[:partition] == Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
113
113
  data[elem[:topic]] = nil
114
114
  else
115
115
  partitions = data[elem[:topic]] || []
@@ -161,7 +161,7 @@ module Rdkafka
161
161
  Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
162
162
  tpl,
163
163
  topic,
164
- -1
164
+ Rdkafka::Bindings::RD_KAFKA_PARTITION_UA
165
165
  )
166
166
  end
167
167
  end