karafka-rdkafka 0.20.0.rc3-x86_64-linux-gnu

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (99) hide show
  1. checksums.yaml +7 -0
  2. data/.github/CODEOWNERS +3 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/workflows/ci_linux_x86_64_gnu.yml +248 -0
  5. data/.github/workflows/ci_macos_arm64.yml +301 -0
  6. data/.github/workflows/push_linux_x86_64_gnu.yml +60 -0
  7. data/.github/workflows/push_ruby.yml +37 -0
  8. data/.github/workflows/verify-action-pins.yml +16 -0
  9. data/.gitignore +15 -0
  10. data/.rspec +2 -0
  11. data/.ruby-gemset +1 -0
  12. data/.ruby-version +1 -0
  13. data/.yardopts +2 -0
  14. data/CHANGELOG.md +323 -0
  15. data/Gemfile +5 -0
  16. data/MIT-LICENSE +22 -0
  17. data/README.md +177 -0
  18. data/Rakefile +96 -0
  19. data/docker-compose.yml +25 -0
  20. data/ext/README.md +19 -0
  21. data/ext/Rakefile +131 -0
  22. data/ext/build_common.sh +361 -0
  23. data/ext/build_linux_x86_64_gnu.sh +306 -0
  24. data/ext/build_macos_arm64.sh +550 -0
  25. data/ext/librdkafka.so +0 -0
  26. data/karafka-rdkafka.gemspec +61 -0
  27. data/lib/rdkafka/abstract_handle.rb +116 -0
  28. data/lib/rdkafka/admin/acl_binding_result.rb +51 -0
  29. data/lib/rdkafka/admin/config_binding_result.rb +30 -0
  30. data/lib/rdkafka/admin/config_resource_binding_result.rb +18 -0
  31. data/lib/rdkafka/admin/create_acl_handle.rb +28 -0
  32. data/lib/rdkafka/admin/create_acl_report.rb +24 -0
  33. data/lib/rdkafka/admin/create_partitions_handle.rb +30 -0
  34. data/lib/rdkafka/admin/create_partitions_report.rb +6 -0
  35. data/lib/rdkafka/admin/create_topic_handle.rb +32 -0
  36. data/lib/rdkafka/admin/create_topic_report.rb +24 -0
  37. data/lib/rdkafka/admin/delete_acl_handle.rb +30 -0
  38. data/lib/rdkafka/admin/delete_acl_report.rb +23 -0
  39. data/lib/rdkafka/admin/delete_groups_handle.rb +28 -0
  40. data/lib/rdkafka/admin/delete_groups_report.rb +24 -0
  41. data/lib/rdkafka/admin/delete_topic_handle.rb +32 -0
  42. data/lib/rdkafka/admin/delete_topic_report.rb +24 -0
  43. data/lib/rdkafka/admin/describe_acl_handle.rb +30 -0
  44. data/lib/rdkafka/admin/describe_acl_report.rb +24 -0
  45. data/lib/rdkafka/admin/describe_configs_handle.rb +33 -0
  46. data/lib/rdkafka/admin/describe_configs_report.rb +48 -0
  47. data/lib/rdkafka/admin/incremental_alter_configs_handle.rb +33 -0
  48. data/lib/rdkafka/admin/incremental_alter_configs_report.rb +48 -0
  49. data/lib/rdkafka/admin.rb +832 -0
  50. data/lib/rdkafka/bindings.rb +582 -0
  51. data/lib/rdkafka/callbacks.rb +415 -0
  52. data/lib/rdkafka/config.rb +398 -0
  53. data/lib/rdkafka/consumer/headers.rb +79 -0
  54. data/lib/rdkafka/consumer/message.rb +86 -0
  55. data/lib/rdkafka/consumer/partition.rb +57 -0
  56. data/lib/rdkafka/consumer/topic_partition_list.rb +190 -0
  57. data/lib/rdkafka/consumer.rb +663 -0
  58. data/lib/rdkafka/error.rb +201 -0
  59. data/lib/rdkafka/helpers/oauth.rb +58 -0
  60. data/lib/rdkafka/helpers/time.rb +14 -0
  61. data/lib/rdkafka/metadata.rb +115 -0
  62. data/lib/rdkafka/native_kafka.rb +139 -0
  63. data/lib/rdkafka/producer/delivery_handle.rb +48 -0
  64. data/lib/rdkafka/producer/delivery_report.rb +45 -0
  65. data/lib/rdkafka/producer/partitions_count_cache.rb +216 -0
  66. data/lib/rdkafka/producer.rb +492 -0
  67. data/lib/rdkafka/version.rb +7 -0
  68. data/lib/rdkafka.rb +54 -0
  69. data/renovate.json +92 -0
  70. data/spec/rdkafka/abstract_handle_spec.rb +117 -0
  71. data/spec/rdkafka/admin/create_acl_handle_spec.rb +56 -0
  72. data/spec/rdkafka/admin/create_acl_report_spec.rb +18 -0
  73. data/spec/rdkafka/admin/create_topic_handle_spec.rb +54 -0
  74. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  75. data/spec/rdkafka/admin/delete_acl_handle_spec.rb +85 -0
  76. data/spec/rdkafka/admin/delete_acl_report_spec.rb +72 -0
  77. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +54 -0
  78. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  79. data/spec/rdkafka/admin/describe_acl_handle_spec.rb +85 -0
  80. data/spec/rdkafka/admin/describe_acl_report_spec.rb +73 -0
  81. data/spec/rdkafka/admin_spec.rb +769 -0
  82. data/spec/rdkafka/bindings_spec.rb +222 -0
  83. data/spec/rdkafka/callbacks_spec.rb +20 -0
  84. data/spec/rdkafka/config_spec.rb +258 -0
  85. data/spec/rdkafka/consumer/headers_spec.rb +73 -0
  86. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  87. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  88. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +248 -0
  89. data/spec/rdkafka/consumer_spec.rb +1299 -0
  90. data/spec/rdkafka/error_spec.rb +95 -0
  91. data/spec/rdkafka/metadata_spec.rb +79 -0
  92. data/spec/rdkafka/native_kafka_spec.rb +130 -0
  93. data/spec/rdkafka/producer/delivery_handle_spec.rb +60 -0
  94. data/spec/rdkafka/producer/delivery_report_spec.rb +25 -0
  95. data/spec/rdkafka/producer/partitions_count_cache_spec.rb +359 -0
  96. data/spec/rdkafka/producer/partitions_count_spec.rb +359 -0
  97. data/spec/rdkafka/producer_spec.rb +1234 -0
  98. data/spec/spec_helper.rb +181 -0
  99. metadata +244 -0
@@ -0,0 +1,398 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ # Configuration for a Kafka consumer or producer. You can create an instance and use
5
+ # the consumer and producer methods to create a client. Documentation of the available
6
+ # configuration options is available on https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md.
7
+ class Config
8
+ # @private
9
+ @@logger = Logger.new(STDOUT)
10
+ # @private
11
+ @@statistics_callback = nil
12
+ # @private
13
+ @@error_callback = nil
14
+ # @private
15
+ @@opaques = ObjectSpace::WeakMap.new
16
+ # @private
17
+ @@log_queue = Queue.new
18
+ # We memoize thread on the first log flush
19
+ # This allows us also to restart logger thread on forks
20
+ @@log_thread = nil
21
+ # @private
22
+ @@log_mutex = Mutex.new
23
+ # @private
24
+ @@oauthbearer_token_refresh_callback = nil
25
+
26
+ # Returns the current logger, by default this is a logger to stdout.
27
+ #
28
+ # @return [Logger]
29
+ def self.logger
30
+ @@logger
31
+ end
32
+
33
+ # Makes sure that there is a thread for consuming logs
34
+ # We do not spawn thread immediately and we need to check if it operates to support forking
35
+ def self.ensure_log_thread
36
+ return if @@log_thread && @@log_thread.alive?
37
+
38
+ @@log_mutex.synchronize do
39
+ # Restart if dead (fork, crash)
40
+ @@log_thread = nil if @@log_thread && !@@log_thread.alive?
41
+
42
+ @@log_thread ||= Thread.start do
43
+ loop do
44
+ severity, msg = @@log_queue.pop
45
+ @@logger.add(severity, msg)
46
+ end
47
+ end
48
+ end
49
+ end
50
+
51
+ # Returns a queue whose contents will be passed to the configured logger. Each entry
52
+ # should follow the format [Logger::Severity, String]. The benefit over calling the
53
+ # logger directly is that this is safe to use from trap contexts.
54
+ #
55
+ # @return [Queue]
56
+ def self.log_queue
57
+ @@log_queue
58
+ end
59
+
60
+ # Set the logger that will be used for all logging output by this library.
61
+ #
62
+ # @param logger [Logger] The logger to be used
63
+ #
64
+ # @return [nil]
65
+ def self.logger=(logger)
66
+ raise NoLoggerError if logger.nil?
67
+ @@logger = logger
68
+ end
69
+
70
+ # Set a callback that will be called every time the underlying client emits statistics.
71
+ # You can configure if and how often this happens using `statistics.interval.ms`.
72
+ # The callback is called with a hash that's documented here: https://github.com/confluentinc/librdkafka/blob/master/STATISTICS.md
73
+ #
74
+ # @param callback [Proc, #call] The callback
75
+ #
76
+ # @return [nil]
77
+ def self.statistics_callback=(callback)
78
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
79
+ @@statistics_callback = callback
80
+ end
81
+
82
+ # Returns the current statistics callback, by default this is nil.
83
+ #
84
+ # @return [Proc, nil]
85
+ def self.statistics_callback
86
+ @@statistics_callback
87
+ end
88
+
89
+ # Set a callback that will be called every time the underlying client emits an error.
90
+ # If this callback is not set, global errors such as brokers becoming unavailable will only be sent to the logger, as defined by librdkafka.
91
+ # The callback is called with an instance of RdKafka::Error.
92
+ #
93
+ # @param callback [Proc, #call] The callback
94
+ #
95
+ # @return [nil]
96
+ def self.error_callback=(callback)
97
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
98
+ @@error_callback = callback
99
+ end
100
+
101
+ # Returns the current error callback, by default this is nil.
102
+ #
103
+ # @return [Proc, nil]
104
+ def self.error_callback
105
+ @@error_callback
106
+ end
107
+
108
+ # Sets the SASL/OAUTHBEARER token refresh callback.
109
+ # This callback will be triggered when it is time to refresh the client's OAUTHBEARER token
110
+ #
111
+ # @param callback [Proc, #call] The callback
112
+ #
113
+ # @return [nil]
114
+ def self.oauthbearer_token_refresh_callback=(callback)
115
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call) || callback == nil
116
+ @@oauthbearer_token_refresh_callback = callback
117
+ end
118
+
119
+ # Returns the current oauthbearer_token_refresh_callback callback, by default this is nil.
120
+ #
121
+ # @return [Proc, nil]
122
+ def self.oauthbearer_token_refresh_callback
123
+ @@oauthbearer_token_refresh_callback
124
+ end
125
+
126
+ # @private
127
+ def self.opaques
128
+ @@opaques
129
+ end
130
+
131
+ # Default config that can be overwritten.
132
+ DEFAULT_CONFIG = {
133
+ # Request api version so advanced features work
134
+ :"api.version.request" => true
135
+ }.freeze
136
+
137
+ # Required config that cannot be overwritten.
138
+ REQUIRED_CONFIG = {
139
+ # Enable log queues so we get callbacks in our own Ruby threads
140
+ :"log.queue" => true
141
+ }.freeze
142
+
143
+ # Returns a new config with the provided options which are merged with {DEFAULT_CONFIG}.
144
+ #
145
+ # @param config_hash [Hash{String,Symbol => String}] The config options for rdkafka
146
+ #
147
+ # @return [Config]
148
+ def initialize(config_hash = {})
149
+ Callbacks.ensure_ffi_running
150
+
151
+ @config_hash = DEFAULT_CONFIG.merge(config_hash)
152
+ @consumer_rebalance_listener = nil
153
+ @consumer_poll_set = true
154
+ end
155
+
156
+ # Set a config option.
157
+ #
158
+ # @param key [String] The config option's key
159
+ # @param value [String] The config option's value
160
+ #
161
+ # @return [nil]
162
+ def []=(key, value)
163
+ @config_hash[key] = value
164
+ end
165
+
166
+ # Get a config option with the specified key
167
+ #
168
+ # @param key [String] The config option's key
169
+ #
170
+ # @return [String, nil] The config option or `nil` if it is not present
171
+ def [](key)
172
+ @config_hash[key]
173
+ end
174
+
175
+ # Get notifications on partition assignment/revocation for the subscribed topics
176
+ #
177
+ # @param listener [Object, #on_partitions_assigned, #on_partitions_revoked] listener instance
178
+ def consumer_rebalance_listener=(listener)
179
+ @consumer_rebalance_listener = listener
180
+ end
181
+
182
+ # Should we use a single queue for the underlying consumer and events.
183
+ #
184
+ # This is an advanced API that allows for more granular control of the polling process.
185
+ # When this value is set to `false` (`true` by defualt), there will be two queues that need to
186
+ # be polled:
187
+ # - main librdkafka queue for events
188
+ # - consumer queue with messages and rebalances
189
+ #
190
+ # It is recommended to use the defaults and only set it to `false` in advance multi-threaded
191
+ # and complex cases where granular events handling control is needed.
192
+ #
193
+ # @param poll_set [Boolean]
194
+ def consumer_poll_set=(poll_set)
195
+ @consumer_poll_set = poll_set
196
+ end
197
+
198
+ # Creates a consumer with this configuration.
199
+ #
200
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
201
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
202
+ # @return [Consumer] The created consumer
203
+ #
204
+ # @raise [ConfigError] When the configuration contains invalid options
205
+ # @raise [ClientCreationError] When the native client cannot be created
206
+ def consumer(native_kafka_auto_start: true)
207
+ opaque = Opaque.new
208
+ config = native_config(opaque)
209
+
210
+ if @consumer_rebalance_listener
211
+ opaque.consumer_rebalance_listener = @consumer_rebalance_listener
212
+ Rdkafka::Bindings.rd_kafka_conf_set_rebalance_cb(config, Rdkafka::Bindings::RebalanceCallback)
213
+ end
214
+
215
+ # Create native client
216
+ kafka = native_kafka(config, :rd_kafka_consumer)
217
+
218
+ # Redirect the main queue to the consumer queue
219
+ Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka) if @consumer_poll_set
220
+
221
+ # Return consumer with Kafka client
222
+ Rdkafka::Consumer.new(
223
+ Rdkafka::NativeKafka.new(
224
+ kafka,
225
+ run_polling_thread: false,
226
+ opaque: opaque,
227
+ auto_start: native_kafka_auto_start
228
+ )
229
+ )
230
+ end
231
+
232
+ # Create a producer with this configuration.
233
+ #
234
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
235
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
236
+ # @param native_kafka_poll_timeout_ms [Integer] ms poll time of the native Kafka
237
+ # @return [Producer] The created producer
238
+ #
239
+ # @raise [ConfigError] When the configuration contains invalid options
240
+ # @raise [ClientCreationError] When the native client cannot be created
241
+ def producer(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
242
+ # Create opaque
243
+ opaque = Opaque.new
244
+ # Create Kafka config
245
+ config = native_config(opaque)
246
+ # Set callback to receive delivery reports on config
247
+ Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
248
+ # Return producer with Kafka client
249
+ partitioner_name = self[:partitioner] || self["partitioner"]
250
+
251
+ kafka = native_kafka(config, :rd_kafka_producer)
252
+
253
+ Rdkafka::Producer.new(
254
+ Rdkafka::NativeKafka.new(
255
+ kafka,
256
+ run_polling_thread: true,
257
+ opaque: opaque,
258
+ auto_start: native_kafka_auto_start,
259
+ timeout_ms: native_kafka_poll_timeout_ms
260
+ ),
261
+ partitioner_name
262
+ ).tap do |producer|
263
+ opaque.producer = producer
264
+ end
265
+ end
266
+
267
+ # Creates an admin instance with this configuration.
268
+ #
269
+ # @param native_kafka_auto_start [Boolean] should the native kafka operations be started
270
+ # automatically. Defaults to true. Set to false only when doing complex initialization.
271
+ # @param native_kafka_poll_timeout_ms [Integer] ms poll time of the native Kafka
272
+ # @return [Admin] The created admin instance
273
+ #
274
+ # @raise [ConfigError] When the configuration contains invalid options
275
+ # @raise [ClientCreationError] When the native client cannot be created
276
+ def admin(native_kafka_auto_start: true, native_kafka_poll_timeout_ms: 100)
277
+ opaque = Opaque.new
278
+ config = native_config(opaque)
279
+ Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
280
+
281
+ kafka = native_kafka(config, :rd_kafka_producer)
282
+
283
+ Rdkafka::Admin.new(
284
+ Rdkafka::NativeKafka.new(
285
+ kafka,
286
+ run_polling_thread: true,
287
+ opaque: opaque,
288
+ auto_start: native_kafka_auto_start,
289
+ timeout_ms: native_kafka_poll_timeout_ms
290
+ )
291
+ )
292
+ end
293
+
294
+ # Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
295
+ class ConfigError < RuntimeError; end
296
+
297
+ # Error that is returned by the underlying rdkafka library if the client cannot be created.
298
+ class ClientCreationError < RuntimeError; end
299
+
300
+ # Error that is raised when trying to set a nil logger
301
+ class NoLoggerError < RuntimeError; end
302
+
303
+ private
304
+
305
+ # This method is only intended to be used to create a client,
306
+ # using it in another way will leak memory.
307
+ def native_config(opaque = nil)
308
+ Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
309
+ # Create config
310
+ @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
311
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
312
+ result = Rdkafka::Bindings.rd_kafka_conf_set(
313
+ config,
314
+ key.to_s,
315
+ value.to_s,
316
+ error_buffer,
317
+ 256
318
+ )
319
+ unless result == :config_ok
320
+ raise ConfigError.new(error_buffer.read_string)
321
+ end
322
+ end
323
+
324
+ # Set opaque pointer that's used as a proxy for callbacks
325
+ if opaque
326
+ pointer = ::FFI::Pointer.new(:pointer, opaque.object_id)
327
+ Rdkafka::Bindings.rd_kafka_conf_set_opaque(config, pointer)
328
+
329
+ # Store opaque with the pointer as key. We use this approach instead
330
+ # of trying to convert the pointer to a Ruby object because there is
331
+ # no risk of a segfault this way.
332
+ Rdkafka::Config.opaques[pointer.to_i] = opaque
333
+ end
334
+
335
+ # Set log callback
336
+ Rdkafka::Bindings.rd_kafka_conf_set_log_cb(config, Rdkafka::Bindings::LogCallback)
337
+
338
+ # Set stats callback
339
+ Rdkafka::Bindings.rd_kafka_conf_set_stats_cb(config, Rdkafka::Bindings::StatsCallback)
340
+
341
+ # Set error callback
342
+ Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
343
+
344
+ # Set oauth callback
345
+ if Rdkafka::Config.oauthbearer_token_refresh_callback
346
+ Rdkafka::Bindings.rd_kafka_conf_set_oauthbearer_token_refresh_cb(config, Rdkafka::Bindings::OAuthbearerTokenRefreshCallback)
347
+ end
348
+ end
349
+ end
350
+
351
+ def native_kafka(config, type)
352
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
353
+ handle = Rdkafka::Bindings.rd_kafka_new(
354
+ type,
355
+ config,
356
+ error_buffer,
357
+ 256
358
+ )
359
+
360
+ if handle.null?
361
+ raise ClientCreationError.new(error_buffer.read_string)
362
+ end
363
+
364
+ # Redirect log to handle's queue
365
+ Rdkafka::Bindings.rd_kafka_set_log_queue(
366
+ handle,
367
+ Rdkafka::Bindings.rd_kafka_queue_get_main(handle)
368
+ )
369
+
370
+ # Return handle which should be closed using rd_kafka_destroy after usage.
371
+ handle
372
+ end
373
+ end
374
+
375
+ # @private
376
+ class Opaque
377
+ attr_accessor :producer
378
+ attr_accessor :consumer_rebalance_listener
379
+
380
+ def call_delivery_callback(delivery_report, delivery_handle)
381
+ producer.call_delivery_callback(delivery_report, delivery_handle) if producer
382
+ end
383
+
384
+ def call_on_partitions_assigned(list)
385
+ return unless consumer_rebalance_listener
386
+ return unless consumer_rebalance_listener.respond_to?(:on_partitions_assigned)
387
+
388
+ consumer_rebalance_listener.on_partitions_assigned(list)
389
+ end
390
+
391
+ def call_on_partitions_revoked(list)
392
+ return unless consumer_rebalance_listener
393
+ return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoked)
394
+
395
+ consumer_rebalance_listener.on_partitions_revoked(list)
396
+ end
397
+ end
398
+ end
@@ -0,0 +1,79 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Consumer
5
+ # Interface to return headers for a consumer message
6
+ module Headers
7
+ EMPTY_HEADERS = {}.freeze
8
+
9
+ # Reads a librdkafka native message's headers and returns them as a Ruby Hash
10
+ # where each key maps to either a String (single value) or Array<String> (multiple values)
11
+ # to support duplicate headers per KIP-82
12
+ #
13
+ # @private
14
+ #
15
+ # @param [Rdkafka::Bindings::Message] native_message
16
+ # @return [Hash<String, String|Array<String>>] headers Hash for the native_message
17
+ # @raise [Rdkafka::RdkafkaError] when fail to read headers
18
+ def self.from_native(native_message)
19
+ headers_ptrptr = FFI::MemoryPointer.new(:pointer)
20
+ err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
21
+
22
+ if err == Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
23
+ return EMPTY_HEADERS
24
+ elsif err != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
25
+ raise Rdkafka::RdkafkaError.new(err, "Error reading message headers")
26
+ end
27
+
28
+ headers_ptr = headers_ptrptr.read_pointer
29
+
30
+ name_ptrptr = FFI::MemoryPointer.new(:pointer)
31
+ value_ptrptr = FFI::MemoryPointer.new(:pointer)
32
+ size_ptr = Rdkafka::Bindings::SizePtr.new
33
+
34
+ headers = {}
35
+
36
+ idx = 0
37
+ loop do
38
+ err = Rdkafka::Bindings.rd_kafka_header_get_all(
39
+ headers_ptr,
40
+ idx,
41
+ name_ptrptr,
42
+ value_ptrptr,
43
+ size_ptr
44
+ )
45
+
46
+ if err == Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
47
+ break
48
+ elsif err != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
49
+ raise Rdkafka::RdkafkaError.new(err, "Error reading a message header at index #{idx}")
50
+ end
51
+
52
+ name_ptr = name_ptrptr.read_pointer
53
+ name = name_ptr.respond_to?(:read_string_to_null) ? name_ptr.read_string_to_null : name_ptr.read_string
54
+
55
+ size = size_ptr[:value]
56
+
57
+ value_ptr = value_ptrptr.read_pointer
58
+ value = value_ptr.read_string(size)
59
+
60
+ if headers.key?(name)
61
+ # If we've seen this header before, convert to array if needed and append
62
+ if headers[name].is_a?(Array)
63
+ headers[name] << value
64
+ else
65
+ headers[name] = [headers[name], value]
66
+ end
67
+ else
68
+ # First occurrence - store as single value
69
+ headers[name] = value
70
+ end
71
+
72
+ idx += 1
73
+ end
74
+
75
+ headers.freeze
76
+ end
77
+ end
78
+ end
79
+ end
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Consumer
5
+ # A message that was consumed from a topic.
6
+ class Message
7
+ # The topic this message was consumed from
8
+ # @return [String]
9
+ attr_reader :topic
10
+
11
+ # The partition this message was consumed from
12
+ # @return [Integer]
13
+ attr_reader :partition
14
+
15
+ # This message's payload
16
+ # @return [String, nil]
17
+ attr_reader :payload
18
+
19
+ # This message's key
20
+ # @return [String, nil]
21
+ attr_reader :key
22
+
23
+ # This message's offset in its partition
24
+ # @return [Integer]
25
+ attr_reader :offset
26
+
27
+ # This message's timestamp, if provided by the broker
28
+ # @return [Time, nil]
29
+ attr_reader :timestamp
30
+
31
+ # @return [Hash<String, String>] a message headers
32
+ attr_reader :headers
33
+
34
+ # @private
35
+ def initialize(native_message)
36
+ # Set topic
37
+ unless native_message[:rkt].null?
38
+ @topic = Rdkafka::Bindings.rd_kafka_topic_name(native_message[:rkt])
39
+ end
40
+ # Set partition
41
+ @partition = native_message[:partition]
42
+ # Set payload
43
+ unless native_message[:payload].null?
44
+ @payload = native_message[:payload].read_string(native_message[:len])
45
+ end
46
+ # Set key
47
+ unless native_message[:key].null?
48
+ @key = native_message[:key].read_string(native_message[:key_len])
49
+ end
50
+ # Set offset
51
+ @offset = native_message[:offset]
52
+ # Set timestamp
53
+ raw_timestamp = Rdkafka::Bindings.rd_kafka_message_timestamp(native_message, nil)
54
+ @timestamp = if raw_timestamp && raw_timestamp > -1
55
+ # Calculate seconds and microseconds
56
+ seconds = raw_timestamp / 1000
57
+ milliseconds = (raw_timestamp - seconds * 1000) * 1000
58
+ Time.at(seconds, milliseconds)
59
+ else
60
+ nil
61
+ end
62
+
63
+ @headers = Headers.from_native(native_message)
64
+ end
65
+
66
+ # Human readable representation of this message.
67
+ # @return [String]
68
+ def to_s
69
+ is_headers = @headers.empty? ? "" : ", headers #{headers.size}"
70
+
71
+ "<Message in '#{topic}' with key '#{truncate(key)}', payload '#{truncate(payload)}', partition #{partition}, offset #{offset}, timestamp #{timestamp}#{is_headers}>"
72
+ end
73
+
74
+ def truncate(string)
75
+ if string && string.length > 40
76
+ "#{string[0..39]}..."
77
+ else
78
+ string
79
+ end
80
+ end
81
+
82
+ private
83
+
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rdkafka
4
+ class Consumer
5
+ # Information about a partition, used in {TopicPartitionList}.
6
+ class Partition
7
+ # Partition number
8
+ # @return [Integer]
9
+ attr_reader :partition
10
+
11
+ # Partition's offset
12
+ # @return [Integer, nil]
13
+ attr_reader :offset
14
+
15
+ # Partition's error code
16
+ # @return [Integer]
17
+ attr_reader :err
18
+
19
+ # Partition metadata in the context of a consumer
20
+ # @return [String, nil]
21
+ attr_reader :metadata
22
+
23
+ # @private
24
+ def initialize(partition, offset, err = 0, metadata = nil)
25
+ @partition = partition
26
+ @offset = offset
27
+ @err = err
28
+ @metadata = metadata
29
+ end
30
+
31
+ # Human readable representation of this partition.
32
+ # @return [String]
33
+ def to_s
34
+ message = "<Partition #{partition}"
35
+ message += " offset=#{offset}" if offset
36
+ message += " err=#{err}" if err != 0
37
+ message += " metadata=#{metadata}" if metadata != nil
38
+ message += ">"
39
+ message
40
+ end
41
+
42
+ # Human readable representation of this partition.
43
+ # @return [String]
44
+ def inspect
45
+ to_s
46
+ end
47
+
48
+ # Whether another partition is equal to this
49
+ # @return [Boolean]
50
+ def ==(other)
51
+ self.class == other.class &&
52
+ self.partition == other.partition &&
53
+ self.offset == other.offset
54
+ end
55
+ end
56
+ end
57
+ end