karafka-rdkafka 0.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (62) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +2 -0
  3. data/.gitignore +8 -0
  4. data/.rspec +1 -0
  5. data/.semaphore/semaphore.yml +23 -0
  6. data/.yardopts +2 -0
  7. data/CHANGELOG.md +104 -0
  8. data/Gemfile +3 -0
  9. data/Guardfile +19 -0
  10. data/LICENSE +21 -0
  11. data/README.md +114 -0
  12. data/Rakefile +96 -0
  13. data/bin/console +11 -0
  14. data/docker-compose.yml +24 -0
  15. data/ext/README.md +18 -0
  16. data/ext/Rakefile +62 -0
  17. data/lib/rdkafka/abstract_handle.rb +82 -0
  18. data/lib/rdkafka/admin/create_topic_handle.rb +27 -0
  19. data/lib/rdkafka/admin/create_topic_report.rb +22 -0
  20. data/lib/rdkafka/admin/delete_topic_handle.rb +27 -0
  21. data/lib/rdkafka/admin/delete_topic_report.rb +22 -0
  22. data/lib/rdkafka/admin.rb +155 -0
  23. data/lib/rdkafka/bindings.rb +312 -0
  24. data/lib/rdkafka/callbacks.rb +106 -0
  25. data/lib/rdkafka/config.rb +299 -0
  26. data/lib/rdkafka/consumer/headers.rb +63 -0
  27. data/lib/rdkafka/consumer/message.rb +84 -0
  28. data/lib/rdkafka/consumer/partition.rb +49 -0
  29. data/lib/rdkafka/consumer/topic_partition_list.rb +164 -0
  30. data/lib/rdkafka/consumer.rb +565 -0
  31. data/lib/rdkafka/error.rb +86 -0
  32. data/lib/rdkafka/metadata.rb +92 -0
  33. data/lib/rdkafka/producer/client.rb +47 -0
  34. data/lib/rdkafka/producer/delivery_handle.rb +22 -0
  35. data/lib/rdkafka/producer/delivery_report.rb +26 -0
  36. data/lib/rdkafka/producer.rb +178 -0
  37. data/lib/rdkafka/version.rb +5 -0
  38. data/lib/rdkafka.rb +22 -0
  39. data/rdkafka.gemspec +36 -0
  40. data/spec/rdkafka/abstract_handle_spec.rb +113 -0
  41. data/spec/rdkafka/admin/create_topic_handle_spec.rb +52 -0
  42. data/spec/rdkafka/admin/create_topic_report_spec.rb +16 -0
  43. data/spec/rdkafka/admin/delete_topic_handle_spec.rb +52 -0
  44. data/spec/rdkafka/admin/delete_topic_report_spec.rb +16 -0
  45. data/spec/rdkafka/admin_spec.rb +203 -0
  46. data/spec/rdkafka/bindings_spec.rb +134 -0
  47. data/spec/rdkafka/callbacks_spec.rb +20 -0
  48. data/spec/rdkafka/config_spec.rb +182 -0
  49. data/spec/rdkafka/consumer/message_spec.rb +139 -0
  50. data/spec/rdkafka/consumer/partition_spec.rb +57 -0
  51. data/spec/rdkafka/consumer/topic_partition_list_spec.rb +223 -0
  52. data/spec/rdkafka/consumer_spec.rb +1008 -0
  53. data/spec/rdkafka/error_spec.rb +89 -0
  54. data/spec/rdkafka/metadata_spec.rb +78 -0
  55. data/spec/rdkafka/producer/client_spec.rb +145 -0
  56. data/spec/rdkafka/producer/delivery_handle_spec.rb +42 -0
  57. data/spec/rdkafka/producer/delivery_report_spec.rb +17 -0
  58. data/spec/rdkafka/producer_spec.rb +525 -0
  59. data/spec/spec_helper.rb +139 -0
  60. data.tar.gz.sig +0 -0
  61. metadata +277 -0
  62. metadata.gz.sig +0 -0
@@ -0,0 +1,299 @@
1
+ require "logger"
2
+
3
+ module Rdkafka
4
+ # Configuration for a Kafka consumer or producer. You can create an instance and use
5
+ # the consumer and producer methods to create a client. Documentation of the available
6
+ # configuration options is available on https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md.
7
+ class Config
8
+ # @private
9
+ @@logger = Logger.new(STDOUT)
10
+ # @private
11
+ @@statistics_callback = nil
12
+ # @private
13
+ @@error_callback = nil
14
+ # @private
15
+ @@opaques = {}
16
+ # @private
17
+ @@log_queue = Queue.new
18
+
19
+ Thread.start do
20
+ loop do
21
+ severity, msg = @@log_queue.pop
22
+ @@logger.add(severity, msg)
23
+ end
24
+ end
25
+
26
+ # Returns the current logger, by default this is a logger to stdout.
27
+ #
28
+ # @return [Logger]
29
+ def self.logger
30
+ @@logger
31
+ end
32
+
33
+
34
+ # Returns a queue whose contents will be passed to the configured logger. Each entry
35
+ # should follow the format [Logger::Severity, String]. The benefit over calling the
36
+ # logger directly is that this is safe to use from trap contexts.
37
+ #
38
+ # @return [Queue]
39
+ def self.log_queue
40
+ @@log_queue
41
+ end
42
+
43
+ # Set the logger that will be used for all logging output by this library.
44
+ #
45
+ # @param logger [Logger] The logger to be used
46
+ #
47
+ # @return [nil]
48
+ def self.logger=(logger)
49
+ raise NoLoggerError if logger.nil?
50
+ @@logger=logger
51
+ end
52
+
53
+ # Set a callback that will be called every time the underlying client emits statistics.
54
+ # You can configure if and how often this happens using `statistics.interval.ms`.
55
+ # The callback is called with a hash that's documented here: https://github.com/edenhill/librdkafka/blob/master/STATISTICS.md
56
+ #
57
+ # @param callback [Proc, #call] The callback
58
+ #
59
+ # @return [nil]
60
+ def self.statistics_callback=(callback)
61
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
62
+ @@statistics_callback = callback
63
+ end
64
+
65
+ # Returns the current statistics callback, by default this is nil.
66
+ #
67
+ # @return [Proc, nil]
68
+ def self.statistics_callback
69
+ @@statistics_callback
70
+ end
71
+
72
+ # Set a callback that will be called every time the underlying client emits an error.
73
+ # If this callback is not set, global errors such as brokers becoming unavailable will only be sent to the logger, as defined by librdkafka.
74
+ # The callback is called with an instance of RdKafka::Error.
75
+ #
76
+ # @param callback [Proc, #call] The callback
77
+ #
78
+ # @return [nil]
79
+ def self.error_callback=(callback)
80
+ raise TypeError.new("Callback has to be callable") unless callback.respond_to?(:call)
81
+ @@error_callback = callback
82
+ end
83
+
84
+ # Returns the current error callback, by default this is nil.
85
+ #
86
+ # @return [Proc, nil]
87
+ def self.error_callback
88
+ @@error_callback
89
+ end
90
+
91
+ # @private
92
+ def self.opaques
93
+ @@opaques
94
+ end
95
+
96
+ # Default config that can be overwritten.
97
+ DEFAULT_CONFIG = {
98
+ # Request api version so advanced features work
99
+ :"api.version.request" => true
100
+ }.freeze
101
+
102
+ # Required config that cannot be overwritten.
103
+ REQUIRED_CONFIG = {
104
+ # Enable log queues so we get callbacks in our own Ruby threads
105
+ :"log.queue" => true
106
+ }.freeze
107
+
108
+ # Returns a new config with the provided options which are merged with {DEFAULT_CONFIG}.
109
+ #
110
+ # @param config_hash [Hash{String,Symbol => String}] The config options for rdkafka
111
+ #
112
+ # @return [Config]
113
+ def initialize(config_hash = {})
114
+ @config_hash = DEFAULT_CONFIG.merge(config_hash)
115
+ @consumer_rebalance_listener = nil
116
+ end
117
+
118
+ # Set a config option.
119
+ #
120
+ # @param key [String] The config option's key
121
+ # @param value [String] The config option's value
122
+ #
123
+ # @return [nil]
124
+ def []=(key, value)
125
+ @config_hash[key] = value
126
+ end
127
+
128
+ # Get a config option with the specified key
129
+ #
130
+ # @param key [String] The config option's key
131
+ #
132
+ # @return [String, nil] The config option or `nil` if it is not present
133
+ def [](key)
134
+ @config_hash[key]
135
+ end
136
+
137
+ # Get notifications on partition assignment/revocation for the subscribed topics
138
+ #
139
+ # @param listener [Object, #on_partitions_assigned, #on_partitions_revoked] listener instance
140
+ def consumer_rebalance_listener=(listener)
141
+ @consumer_rebalance_listener = listener
142
+ end
143
+
144
+ # Create a consumer with this configuration.
145
+ #
146
+ # @raise [ConfigError] When the configuration contains invalid options
147
+ # @raise [ClientCreationError] When the native client cannot be created
148
+ #
149
+ # @return [Consumer] The created consumer
150
+ def consumer
151
+ opaque = Opaque.new
152
+ config = native_config(opaque)
153
+
154
+ if @consumer_rebalance_listener
155
+ opaque.consumer_rebalance_listener = @consumer_rebalance_listener
156
+ Rdkafka::Bindings.rd_kafka_conf_set_rebalance_cb(config, Rdkafka::Bindings::RebalanceCallback)
157
+ end
158
+
159
+ kafka = native_kafka(config, :rd_kafka_consumer)
160
+
161
+ # Redirect the main queue to the consumer
162
+ Rdkafka::Bindings.rd_kafka_poll_set_consumer(kafka)
163
+
164
+ # Return consumer with Kafka client
165
+ Rdkafka::Consumer.new(kafka)
166
+ end
167
+
168
+ # Create a producer with this configuration.
169
+ #
170
+ # @raise [ConfigError] When the configuration contains invalid options
171
+ # @raise [ClientCreationError] When the native client cannot be created
172
+ #
173
+ # @return [Producer] The created producer
174
+ def producer
175
+ # Create opaque
176
+ opaque = Opaque.new
177
+ # Create Kafka config
178
+ config = native_config(opaque)
179
+ # Set callback to receive delivery reports on config
180
+ Rdkafka::Bindings.rd_kafka_conf_set_dr_msg_cb(config, Rdkafka::Callbacks::DeliveryCallbackFunction)
181
+ # Return producer with Kafka client
182
+ Rdkafka::Producer.new(Rdkafka::Producer::Client.new(native_kafka(config, :rd_kafka_producer)), self[:partitioner]).tap do |producer|
183
+ opaque.producer = producer
184
+ end
185
+ end
186
+
187
+ # Create an admin instance with this configuration.
188
+ #
189
+ # @raise [ConfigError] When the configuration contains invalid options
190
+ # @raise [ClientCreationError] When the native client cannot be created
191
+ #
192
+ # @return [Admin] The created admin instance
193
+ def admin
194
+ opaque = Opaque.new
195
+ config = native_config(opaque)
196
+ Rdkafka::Bindings.rd_kafka_conf_set_background_event_cb(config, Rdkafka::Callbacks::BackgroundEventCallbackFunction)
197
+ Rdkafka::Admin.new(native_kafka(config, :rd_kafka_producer))
198
+ end
199
+
200
+ # Error that is returned by the underlying rdkafka error if an invalid configuration option is present.
201
+ class ConfigError < RuntimeError; end
202
+
203
+ # Error that is returned by the underlying rdkafka library if the client cannot be created.
204
+ class ClientCreationError < RuntimeError; end
205
+
206
+ # Error that is raised when trying to set a nil logger
207
+ class NoLoggerError < RuntimeError; end
208
+
209
+ private
210
+
211
+ # This method is only intended to be used to create a client,
212
+ # using it in another way will leak memory.
213
+ def native_config(opaque=nil)
214
+ Rdkafka::Bindings.rd_kafka_conf_new.tap do |config|
215
+ # Create config
216
+ @config_hash.merge(REQUIRED_CONFIG).each do |key, value|
217
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
218
+ result = Rdkafka::Bindings.rd_kafka_conf_set(
219
+ config,
220
+ key.to_s,
221
+ value.to_s,
222
+ error_buffer,
223
+ 256
224
+ )
225
+ unless result == :config_ok
226
+ raise ConfigError.new(error_buffer.read_string)
227
+ end
228
+ end
229
+
230
+ # Set opaque pointer that's used as a proxy for callbacks
231
+ if opaque
232
+ pointer = ::FFI::Pointer.new(:pointer, opaque.object_id)
233
+ Rdkafka::Bindings.rd_kafka_conf_set_opaque(config, pointer)
234
+
235
+ # Store opaque with the pointer as key. We use this approach instead
236
+ # of trying to convert the pointer to a Ruby object because there is
237
+ # no risk of a segfault this way.
238
+ Rdkafka::Config.opaques[pointer.to_i] = opaque
239
+ end
240
+
241
+ # Set log callback
242
+ Rdkafka::Bindings.rd_kafka_conf_set_log_cb(config, Rdkafka::Bindings::LogCallback)
243
+
244
+ # Set stats callback
245
+ Rdkafka::Bindings.rd_kafka_conf_set_stats_cb(config, Rdkafka::Bindings::StatsCallback)
246
+
247
+ # Set error callback
248
+ Rdkafka::Bindings.rd_kafka_conf_set_error_cb(config, Rdkafka::Bindings::ErrorCallback)
249
+ end
250
+ end
251
+
252
+ def native_kafka(config, type)
253
+ error_buffer = FFI::MemoryPointer.from_string(" " * 256)
254
+ handle = Rdkafka::Bindings.rd_kafka_new(
255
+ type,
256
+ config,
257
+ error_buffer,
258
+ 256
259
+ )
260
+
261
+ if handle.null?
262
+ raise ClientCreationError.new(error_buffer.read_string)
263
+ end
264
+
265
+ # Redirect log to handle's queue
266
+ Rdkafka::Bindings.rd_kafka_set_log_queue(
267
+ handle,
268
+ Rdkafka::Bindings.rd_kafka_queue_get_main(handle)
269
+ )
270
+
271
+ # Return handle which should be closed using rd_kafka_destroy after usage.
272
+ handle
273
+ end
274
+ end
275
+
276
+ # @private
277
+ class Opaque
278
+ attr_accessor :producer
279
+ attr_accessor :consumer_rebalance_listener
280
+
281
+ def call_delivery_callback(delivery_report, delivery_handle)
282
+ producer.call_delivery_callback(delivery_report, delivery_handle) if producer
283
+ end
284
+
285
+ def call_on_partitions_assigned(consumer, list)
286
+ return unless consumer_rebalance_listener
287
+ return unless consumer_rebalance_listener.respond_to?(:on_partitions_assigned)
288
+
289
+ consumer_rebalance_listener.on_partitions_assigned(consumer, list)
290
+ end
291
+
292
+ def call_on_partitions_revoked(consumer, list)
293
+ return unless consumer_rebalance_listener
294
+ return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoked)
295
+
296
+ consumer_rebalance_listener.on_partitions_revoked(consumer, list)
297
+ end
298
+ end
299
+ end
@@ -0,0 +1,63 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ # A message headers
4
+ class Headers
5
+ # Reads a native kafka's message header into ruby's hash
6
+ #
7
+ # @return [Hash<String, String>] a message headers
8
+ #
9
+ # @raise [Rdkafka::RdkafkaError] when fail to read headers
10
+ #
11
+ # @private
12
+ def self.from_native(native_message)
13
+ headers_ptrptr = FFI::MemoryPointer.new(:pointer)
14
+ err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
15
+
16
+ if err == Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
17
+ return {}
18
+ elsif err != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
19
+ raise Rdkafka::RdkafkaError.new(err, "Error reading message headers")
20
+ end
21
+
22
+ headers_ptr = headers_ptrptr.read_pointer
23
+
24
+ name_ptrptr = FFI::MemoryPointer.new(:pointer)
25
+ value_ptrptr = FFI::MemoryPointer.new(:pointer)
26
+ size_ptr = Rdkafka::Bindings::SizePtr.new
27
+ headers = {}
28
+
29
+ idx = 0
30
+ loop do
31
+ err = Rdkafka::Bindings.rd_kafka_header_get_all(
32
+ headers_ptr,
33
+ idx,
34
+ name_ptrptr,
35
+ value_ptrptr,
36
+ size_ptr
37
+ )
38
+
39
+ if err == Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
40
+ break
41
+ elsif err != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
42
+ raise Rdkafka::RdkafkaError.new(err, "Error reading a message header at index #{idx}")
43
+ end
44
+
45
+ name_ptr = name_ptrptr.read_pointer
46
+ name = name_ptr.respond_to?(:read_string_to_null) ? name_ptr.read_string_to_null : name_ptr.read_string
47
+
48
+ size = size_ptr[:value]
49
+
50
+ value_ptr = value_ptrptr.read_pointer
51
+
52
+ value = value_ptr.read_string(size)
53
+
54
+ headers[name.to_sym] = value
55
+
56
+ idx += 1
57
+ end
58
+
59
+ headers
60
+ end
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,84 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ # A message that was consumed from a topic.
4
+ class Message
5
+ # The topic this message was consumed from
6
+ # @return [String]
7
+ attr_reader :topic
8
+
9
+ # The partition this message was consumed from
10
+ # @return [Integer]
11
+ attr_reader :partition
12
+
13
+ # This message's payload
14
+ # @return [String, nil]
15
+ attr_reader :payload
16
+
17
+ # This message's key
18
+ # @return [String, nil]
19
+ attr_reader :key
20
+
21
+ # This message's offset in it's partition
22
+ # @return [Integer]
23
+ attr_reader :offset
24
+
25
+ # This message's timestamp, if provided by the broker
26
+ # @return [Time, nil]
27
+ attr_reader :timestamp
28
+
29
+ # @return [Hash<String, String>] a message headers
30
+ attr_reader :headers
31
+
32
+ # @private
33
+ def initialize(native_message)
34
+ # Set topic
35
+ unless native_message[:rkt].null?
36
+ @topic = Rdkafka::Bindings.rd_kafka_topic_name(native_message[:rkt])
37
+ end
38
+ # Set partition
39
+ @partition = native_message[:partition]
40
+ # Set payload
41
+ unless native_message[:payload].null?
42
+ @payload = native_message[:payload].read_string(native_message[:len])
43
+ end
44
+ # Set key
45
+ unless native_message[:key].null?
46
+ @key = native_message[:key].read_string(native_message[:key_len])
47
+ end
48
+ # Set offset
49
+ @offset = native_message[:offset]
50
+ # Set timestamp
51
+ raw_timestamp = Rdkafka::Bindings.rd_kafka_message_timestamp(native_message, nil)
52
+ @timestamp = if raw_timestamp && raw_timestamp > -1
53
+ # Calculate seconds and microseconds
54
+ seconds = raw_timestamp / 1000
55
+ milliseconds = (raw_timestamp - seconds * 1000) * 1000
56
+ Time.at(seconds, milliseconds)
57
+ else
58
+ nil
59
+ end
60
+
61
+ @headers = Headers.from_native(native_message)
62
+ end
63
+
64
+ # Human readable representation of this message.
65
+ # @return [String]
66
+ def to_s
67
+ is_headers = @headers.empty? ? "" : ", headers #{headers.size}"
68
+
69
+ "<Message in '#{topic}' with key '#{truncate(key)}', payload '#{truncate(payload)}', partition #{partition}, offset #{offset}, timestamp #{timestamp}#{is_headers}>"
70
+ end
71
+
72
+ def truncate(string)
73
+ if string && string.length > 40
74
+ "#{string[0..39]}..."
75
+ else
76
+ string
77
+ end
78
+ end
79
+
80
+ private
81
+
82
+ end
83
+ end
84
+ end
@@ -0,0 +1,49 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ # Information about a partition, used in {TopicPartitionList}.
4
+ class Partition
5
+ # Partition number
6
+ # @return [Integer]
7
+ attr_reader :partition
8
+
9
+ # Partition's offset
10
+ # @return [Integer, nil]
11
+ attr_reader :offset
12
+
13
+ # Partition's error code
14
+ # @return [Integer]
15
+ attr_reader :err
16
+
17
+ # @private
18
+ def initialize(partition, offset, err = 0)
19
+ @partition = partition
20
+ @offset = offset
21
+ @err = err
22
+ end
23
+
24
+ # Human readable representation of this partition.
25
+ # @return [String]
26
+ def to_s
27
+ message = "<Partition #{partition}"
28
+ message += " offset=#{offset}" if offset
29
+ message += " err=#{err}" if err != 0
30
+ message += ">"
31
+ message
32
+ end
33
+
34
+ # Human readable representation of this partition.
35
+ # @return [String]
36
+ def inspect
37
+ to_s
38
+ end
39
+
40
+ # Whether another partition is equal to this
41
+ # @return [Boolean]
42
+ def ==(other)
43
+ self.class == other.class &&
44
+ self.partition == other.partition &&
45
+ self.offset == other.offset
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,164 @@
1
+ module Rdkafka
2
+ class Consumer
3
+ # A list of topics with their partition information
4
+ class TopicPartitionList
5
+ # Create a topic partition list.
6
+ #
7
+ # @param data [Hash{String => nil,Partition}] The topic and partition data or nil to create an empty list
8
+ #
9
+ # @return [TopicPartitionList]
10
+ def initialize(data=nil)
11
+ @data = data || {}
12
+ end
13
+
14
+ # Number of items in the list
15
+ # @return [Integer]
16
+ def count
17
+ i = 0
18
+ @data.each do |_topic, partitions|
19
+ if partitions
20
+ i += partitions.count
21
+ else
22
+ i+= 1
23
+ end
24
+ end
25
+ i
26
+ end
27
+
28
+ # Whether this list is empty
29
+ # @return [Boolean]
30
+ def empty?
31
+ @data.empty?
32
+ end
33
+
34
+ # Add a topic with optionally partitions to the list.
35
+ # Calling this method multiple times for the same topic will overwrite the previous configuraton.
36
+ #
37
+ # @example Add a topic with unassigned partitions
38
+ # tpl.add_topic("topic")
39
+ #
40
+ # @example Add a topic with assigned partitions
41
+ # tpl.add_topic("topic", (0..8))
42
+ #
43
+ # @example Add a topic with all topics up to a count
44
+ # tpl.add_topic("topic", 9)
45
+ #
46
+ # @param topic [String] The topic's name
47
+ # @param partitions [Array<Integer>, Range<Integer>, Integer] The topic's partitions or partition count
48
+ #
49
+ # @return [nil]
50
+ def add_topic(topic, partitions=nil)
51
+ if partitions.nil?
52
+ @data[topic.to_s] = nil
53
+ else
54
+ if partitions.is_a? Integer
55
+ partitions = (0..partitions - 1)
56
+ end
57
+ @data[topic.to_s] = partitions.map { |p| Partition.new(p, nil, 0) }
58
+ end
59
+ end
60
+
61
+ # Add a topic with partitions and offsets set to the list
62
+ # Calling this method multiple times for the same topic will overwrite the previous configuraton.
63
+ #
64
+ # @param topic [String] The topic's name
65
+ # @param partitions_with_offsets [Hash<Integer, Integer>] The topic's partitions and offsets
66
+ #
67
+ # @return [nil]
68
+ def add_topic_and_partitions_with_offsets(topic, partitions_with_offsets)
69
+ @data[topic.to_s] = partitions_with_offsets.map { |p, o| Partition.new(p, o) }
70
+ end
71
+
72
+ # Return a `Hash` with the topics as keys and and an array of partition information as the value if present.
73
+ #
74
+ # @return [Hash{String => Array<Partition>,nil}]
75
+ def to_h
76
+ @data
77
+ end
78
+
79
+ # Human readable representation of this list.
80
+ # @return [String]
81
+ def to_s
82
+ "<TopicPartitionList: #{to_h}>"
83
+ end
84
+
85
+ def ==(other)
86
+ self.to_h == other.to_h
87
+ end
88
+
89
+ # Create a new topic partition list based of a native one.
90
+ #
91
+ # @param pointer [FFI::Pointer] Optional pointer to an existing native list. Its contents will be copied.
92
+ #
93
+ # @return [TopicPartitionList]
94
+ #
95
+ # @private
96
+ def self.from_native_tpl(pointer)
97
+ # Data to be moved into the tpl
98
+ data = {}
99
+
100
+ # Create struct and copy its contents
101
+ native_tpl = Rdkafka::Bindings::TopicPartitionList.new(pointer)
102
+ native_tpl[:cnt].times do |i|
103
+ ptr = native_tpl[:elems] + (i * Rdkafka::Bindings::TopicPartition.size)
104
+ elem = Rdkafka::Bindings::TopicPartition.new(ptr)
105
+ if elem[:partition] == -1
106
+ data[elem[:topic]] = nil
107
+ else
108
+ partitions = data[elem[:topic]] || []
109
+ offset = if elem[:offset] == Rdkafka::Bindings::RD_KAFKA_OFFSET_INVALID
110
+ nil
111
+ else
112
+ elem[:offset]
113
+ end
114
+ partition = Partition.new(elem[:partition], offset, elem[:err])
115
+ partitions.push(partition)
116
+ data[elem[:topic]] = partitions
117
+ end
118
+ end
119
+
120
+ # Return the created object
121
+ TopicPartitionList.new(data)
122
+ end
123
+
124
+ # Create a native tpl with the contents of this object added.
125
+ #
126
+ # The pointer will be cleaned by `rd_kafka_topic_partition_list_destroy` when GC releases it.
127
+ #
128
+ # @return [FFI::Pointer]
129
+ # @private
130
+ def to_native_tpl
131
+ tpl = Rdkafka::Bindings.rd_kafka_topic_partition_list_new(count)
132
+
133
+ @data.each do |topic, partitions|
134
+ if partitions
135
+ partitions.each do |p|
136
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
137
+ tpl,
138
+ topic,
139
+ p.partition
140
+ )
141
+
142
+ if p.offset
143
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_set_offset(
144
+ tpl,
145
+ topic,
146
+ p.partition,
147
+ p.offset
148
+ )
149
+ end
150
+ end
151
+ else
152
+ Rdkafka::Bindings.rd_kafka_topic_partition_list_add(
153
+ tpl,
154
+ topic,
155
+ -1
156
+ )
157
+ end
158
+ end
159
+
160
+ tpl
161
+ end
162
+ end
163
+ end
164
+ end