rdkafka 0.16.0.beta1 → 0.16.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -113,6 +113,42 @@ module Rdkafka
113
113
  end
114
114
  end
115
115
 
116
+ class DescribeConfigsResult
117
+ attr_reader :result_error, :error_string, :results, :results_count
118
+
119
+ def initialize(event_ptr)
120
+ @results=[]
121
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
122
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
123
+
124
+ if @result_error == 0
125
+ configs_describe_result = Rdkafka::Bindings.rd_kafka_event_DescribeConfigs_result(event_ptr)
126
+ # Get the number of matching acls
127
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
128
+ @results = Rdkafka::Bindings.rd_kafka_DescribeConfigs_result_resources(configs_describe_result, pointer_to_size_t)
129
+ @results_count = pointer_to_size_t.read_int
130
+ end
131
+ end
132
+ end
133
+
134
+ class IncrementalAlterConfigsResult
135
+ attr_reader :result_error, :error_string, :results, :results_count
136
+
137
+ def initialize(event_ptr)
138
+ @results=[]
139
+ @result_error = Rdkafka::Bindings.rd_kafka_event_error(event_ptr)
140
+ @error_string = Rdkafka::Bindings.rd_kafka_event_error_string(event_ptr)
141
+
142
+ if @result_error == 0
143
+ incremental_alter_result = Rdkafka::Bindings.rd_kafka_event_IncrementalAlterConfigs_result(event_ptr)
144
+ # Get the number of matching acls
145
+ pointer_to_size_t = FFI::MemoryPointer.new(:int32)
146
+ @results = Rdkafka::Bindings.rd_kafka_IncrementalAlterConfigs_result_resources(incremental_alter_result, pointer_to_size_t)
147
+ @results_count = pointer_to_size_t.read_int
148
+ end
149
+ end
150
+ end
151
+
116
152
  # FFI Function used for Create Topic and Delete Topic callbacks
117
153
  BackgroundEventCallbackFunction = FFI::Function.new(
118
154
  :void, [:pointer, :pointer, :pointer]
@@ -123,20 +159,24 @@ module Rdkafka
123
159
  # @private
124
160
  class BackgroundEventCallback
125
161
  def self.call(_, event_ptr, _)
126
- event_type = Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
127
- if event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
162
+ case Rdkafka::Bindings.rd_kafka_event_type(event_ptr)
163
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATETOPICS_RESULT
128
164
  process_create_topic(event_ptr)
129
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
165
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBECONFIGS_RESULT
166
+ process_describe_configs(event_ptr)
167
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_INCREMENTALALTERCONFIGS_RESULT
168
+ process_incremental_alter_configs(event_ptr)
169
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETETOPICS_RESULT
130
170
  process_delete_topic(event_ptr)
131
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
171
+ when Rdkafka::Bindings::RD_KAFKA_ADMIN_OP_CREATEPARTITIONS_RESULT
132
172
  process_create_partitions(event_ptr)
133
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
173
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_CREATEACLS_RESULT
134
174
  process_create_acl(event_ptr)
135
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
175
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEACLS_RESULT
136
176
  process_delete_acl(event_ptr)
137
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
177
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DESCRIBEACLS_RESULT
138
178
  process_describe_acl(event_ptr)
139
- elsif event_type == Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
179
+ when Rdkafka::Bindings::RD_KAFKA_EVENT_DELETEGROUPS_RESULT
140
180
  process_delete_groups(event_ptr)
141
181
  end
142
182
  end
@@ -161,6 +201,42 @@ module Rdkafka
161
201
  end
162
202
  end
163
203
 
204
+ def self.process_describe_configs(event_ptr)
205
+ describe_configs = DescribeConfigsResult.new(event_ptr)
206
+ describe_configs_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
207
+
208
+ if describe_configs_handle = Rdkafka::Admin::DescribeConfigsHandle.remove(describe_configs_handle_ptr.address)
209
+ describe_configs_handle[:response] = describe_configs.result_error
210
+ describe_configs_handle[:response_string] = describe_configs.error_string
211
+ describe_configs_handle[:pending] = false
212
+
213
+ if describe_configs.result_error == 0
214
+ describe_configs_handle[:config_entries] = describe_configs.results
215
+ describe_configs_handle[:entry_count] = describe_configs.results_count
216
+ end
217
+
218
+ describe_configs_handle.unlock
219
+ end
220
+ end
221
+
222
+ def self.process_incremental_alter_configs(event_ptr)
223
+ incremental_alter = IncrementalAlterConfigsResult.new(event_ptr)
224
+ incremental_alter_handle_ptr = Rdkafka::Bindings.rd_kafka_event_opaque(event_ptr)
225
+
226
+ if incremental_alter_handle = Rdkafka::Admin::IncrementalAlterConfigsHandle.remove(incremental_alter_handle_ptr.address)
227
+ incremental_alter_handle[:response] = incremental_alter.result_error
228
+ incremental_alter_handle[:response_string] = incremental_alter.error_string
229
+ incremental_alter_handle[:pending] = false
230
+
231
+ if incremental_alter.result_error == 0
232
+ incremental_alter_handle[:config_entries] = incremental_alter.results
233
+ incremental_alter_handle[:entry_count] = incremental_alter.results_count
234
+ end
235
+
236
+ incremental_alter_handle.unlock
237
+ end
238
+ end
239
+
164
240
  def self.process_delete_groups(event_ptr)
165
241
  delete_groups_result = Rdkafka::Bindings.rd_kafka_event_DeleteGroups_result(event_ptr)
166
242
 
@@ -263,7 +339,7 @@ module Rdkafka
263
339
  describe_acl_handle[:response_string] = describe_acl.error_string
264
340
 
265
341
  if describe_acl.result_error == 0
266
- describe_acl_handle[:acls] = describe_acl.matching_acls
342
+ describe_acl_handle[:acls] = describe_acl.matching_acls
267
343
  describe_acl_handle[:acls_count] = describe_acl.matching_acls_count
268
344
  end
269
345
 
@@ -4,17 +4,7 @@ module Rdkafka
4
4
  class Consumer
5
5
  # Interface to return headers for a consumer message
6
6
  module Headers
7
- class HashWithSymbolKeysTreatedLikeStrings < Hash
8
- def [](key)
9
- if key.is_a?(Symbol)
10
- Kernel.warn("rdkafka deprecation warning: header access with Symbol key #{key.inspect} treated as a String. " \
11
- "Please change your code to use String keys to avoid this warning. Symbol keys will break in version 1.")
12
- super(key.to_s)
13
- else
14
- super
15
- end
16
- end
17
- end
7
+ EMPTY_HEADERS = {}.freeze
18
8
 
19
9
  # Reads a librdkafka native message's headers and returns them as a Ruby Hash
20
10
  #
@@ -28,7 +18,7 @@ module Rdkafka
28
18
  err = Rdkafka::Bindings.rd_kafka_message_headers(native_message, headers_ptrptr)
29
19
 
30
20
  if err == Rdkafka::Bindings::RD_KAFKA_RESP_ERR__NOENT
31
- return {}
21
+ return EMPTY_HEADERS
32
22
  elsif err != Rdkafka::Bindings::RD_KAFKA_RESP_ERR_NO_ERROR
33
23
  raise Rdkafka::RdkafkaError.new(err, "Error reading message headers")
34
24
  end
@@ -39,7 +29,7 @@ module Rdkafka
39
29
  value_ptrptr = FFI::MemoryPointer.new(:pointer)
40
30
  size_ptr = Rdkafka::Bindings::SizePtr.new
41
31
 
42
- headers = HashWithSymbolKeysTreatedLikeStrings.new
32
+ headers = {}
43
33
 
44
34
  idx = 0
45
35
  loop do
@@ -14,6 +14,10 @@ module Rdkafka
14
14
  # @return [Object, nil] label set during message production or nil by default
15
15
  attr_accessor :label
16
16
 
17
+ # @return [String] topic where we are trying to send the message
18
+ # We use this instead of reading from `topic_name` pointer to save on memory allocations
19
+ attr_accessor :topic
20
+
17
21
  # @return [String] the name of the operation (e.g. "delivery")
18
22
  def operation_name
19
23
  "delivery"
@@ -26,7 +30,7 @@ module Rdkafka
26
30
  self[:offset],
27
31
  # For part of errors, we will not get a topic name reference and in cases like this
28
32
  # we should not return it
29
- self[:topic_name].null? ? nil : self[:topic_name].read_string,
33
+ topic,
30
34
  self[:response] != 0 ? RdkafkaError.new(self[:response]) : nil,
31
35
  label
32
36
  )
@@ -9,7 +9,15 @@ module Rdkafka
9
9
  # Cache partitions count for 30 seconds
10
10
  PARTITIONS_COUNT_TTL = 30
11
11
 
12
- private_constant :PARTITIONS_COUNT_TTL
12
+ # Empty hash used as a default
13
+ EMPTY_HASH = {}.freeze
14
+
15
+ private_constant :PARTITIONS_COUNT_TTL, :EMPTY_HASH
16
+
17
+ # Raised when there was a critical issue when invoking rd_kafka_topic_new
18
+ # This is a temporary solution until https://github.com/karafka/rdkafka-ruby/issues/451 is
19
+ # resolved and this is normalized in all the places
20
+ class TopicHandleCreationError < RuntimeError; end
13
21
 
14
22
  # @private
15
23
  # Returns the current delivery callback, by default this is nil.
@@ -28,6 +36,8 @@ module Rdkafka
28
36
  # @param partitioner_name [String, nil] name of the partitioner we want to use or nil to use
29
37
  # the "consistent_random" default
30
38
  def initialize(native_kafka, partitioner_name)
39
+ @topics_refs_map = {}
40
+ @topics_configs = {}
31
41
  @native_kafka = native_kafka
32
42
  @partitioner_name = partitioner_name || "consistent_random"
33
43
 
@@ -54,6 +64,52 @@ module Rdkafka
54
64
  end
55
65
  end
56
66
 
67
+ # Sets alternative set of configuration details that can be set per topic
68
+ # @note It is not allowed to re-set the same topic config twice because of the underlying
69
+ # librdkafka caching
70
+ # @param topic [String] The topic name
71
+ # @param config [Hash] config we want to use per topic basis
72
+ # @param config_hash [Integer] hash of the config. We expect it here instead of computing it,
73
+ # because it is already computed during the retrieval attempt in the `#produce` flow.
74
+ def set_topic_config(topic, config, config_hash)
75
+ # Ensure lock on topic reference just in case
76
+ @native_kafka.with_inner do |inner|
77
+ @topics_refs_map[topic] ||= {}
78
+ @topics_configs[topic] ||= {}
79
+
80
+ return if @topics_configs[topic].key?(config_hash)
81
+
82
+ # If config is empty, we create an empty reference that will be used with defaults
83
+ rd_topic_config = if config.empty?
84
+ nil
85
+ else
86
+ Rdkafka::Bindings.rd_kafka_topic_conf_new.tap do |topic_config|
87
+ config.each do |key, value|
88
+ error_buffer = FFI::MemoryPointer.new(:char, 256)
89
+ result = Rdkafka::Bindings.rd_kafka_topic_conf_set(
90
+ topic_config,
91
+ key.to_s,
92
+ value.to_s,
93
+ error_buffer,
94
+ 256
95
+ )
96
+
97
+ unless result == :config_ok
98
+ raise Config::ConfigError.new(error_buffer.read_string)
99
+ end
100
+ end
101
+ end
102
+ end
103
+
104
+ topic_handle = Bindings.rd_kafka_topic_new(inner, topic, rd_topic_config)
105
+
106
+ raise TopicHandleCreationError.new("Error creating topic handle for topic #{topic}") if topic_handle.null?
107
+
108
+ @topics_configs[topic][config_hash] = config
109
+ @topics_refs_map[topic][config_hash] = topic_handle
110
+ end
111
+ end
112
+
57
113
  # Starts the native Kafka polling thread and kicks off the init polling
58
114
  # @note Not needed to run unless explicit start was disabled
59
115
  def start
@@ -83,7 +139,18 @@ module Rdkafka
83
139
  def close
84
140
  return if closed?
85
141
  ObjectSpace.undefine_finalizer(self)
86
- @native_kafka.close
142
+
143
+ @native_kafka.close do
144
+ # We need to remove the topics references objects before we destroy the producer,
145
+ # otherwise they would leak out
146
+ @topics_refs_map.each_value do |refs|
147
+ refs.each_value do |ref|
148
+ Rdkafka::Bindings.rd_kafka_topic_destroy(ref)
149
+ end
150
+ end
151
+ end
152
+
153
+ @topics_refs_map.clear
87
154
  end
88
155
 
89
156
  # Whether this producer has closed
@@ -182,11 +249,22 @@ module Rdkafka
182
249
  # @param timestamp [Time,Integer,nil] Optional timestamp of this message. Integer timestamp is in milliseconds since Jan 1 1970.
183
250
  # @param headers [Hash<String,String>] Optional message headers
184
251
  # @param label [Object, nil] a label that can be assigned when producing a message that will be part of the delivery handle and the delivery report
252
+ # @param topic_config [Hash] topic config for given message dispatch. Allows to send messages to topics with different configuration
185
253
  #
186
254
  # @return [DeliveryHandle] Delivery handle that can be used to wait for the result of producing this message
187
255
  #
188
256
  # @raise [RdkafkaError] When adding the message to rdkafka's queue failed
189
- def produce(topic:, payload: nil, key: nil, partition: nil, partition_key: nil, timestamp: nil, headers: nil, label: nil)
257
+ def produce(
258
+ topic:,
259
+ payload: nil,
260
+ key: nil,
261
+ partition: nil,
262
+ partition_key: nil,
263
+ timestamp: nil,
264
+ headers: nil,
265
+ label: nil,
266
+ topic_config: EMPTY_HASH
267
+ )
190
268
  closed_producer_check(__method__)
191
269
 
192
270
  # Start by checking and converting the input
@@ -205,8 +283,20 @@ module Rdkafka
205
283
  key.bytesize
206
284
  end
207
285
 
286
+ topic_config_hash = topic_config.hash
287
+
288
+ # Checks if we have the rdkafka topic reference object ready. It saves us on object
289
+ # allocation and allows to use custom config on demand.
290
+ set_topic_config(topic, topic_config, topic_config_hash) unless @topics_refs_map.dig(topic, topic_config_hash)
291
+ topic_ref = @topics_refs_map.dig(topic, topic_config_hash)
292
+
208
293
  if partition_key
209
294
  partition_count = partition_count(topic)
295
+
296
+ # Check if there are no overrides for the partitioner and use the default one only when
297
+ # no per-topic is present.
298
+ partitioner_name = @topics_configs.dig(topic, topic_config_hash, :partitioner) || @partitioner_name
299
+
210
300
  # If the topic is not present, set to -1
211
301
  partition = Rdkafka::Bindings.partitioner(partition_key, partition_count, @partitioner_name) if partition_count.positive?
212
302
  end
@@ -229,6 +319,7 @@ module Rdkafka
229
319
 
230
320
  delivery_handle = DeliveryHandle.new
231
321
  delivery_handle.label = label
322
+ delivery_handle.topic = topic
232
323
  delivery_handle[:pending] = true
233
324
  delivery_handle[:response] = -1
234
325
  delivery_handle[:partition] = -1
@@ -236,7 +327,7 @@ module Rdkafka
236
327
  DeliveryHandle.register(delivery_handle)
237
328
 
238
329
  args = [
239
- :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_TOPIC, :string, topic,
330
+ :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_RKT, :pointer, topic_ref,
240
331
  :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_MSGFLAGS, :int, Rdkafka::Bindings::RD_KAFKA_MSG_F_COPY,
241
332
  :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_VALUE, :buffer_in, payload, :size_t, payload_size,
242
333
  :int, Rdkafka::Bindings::RD_KAFKA_VTYPE_KEY, :buffer_in, key, :size_t, key_size,
@@ -252,7 +343,7 @@ module Rdkafka
252
343
  args << :int << Rdkafka::Bindings::RD_KAFKA_VTYPE_HEADER
253
344
  args << :string << key
254
345
  args << :pointer << value
255
- args << :size_t << value.bytes.size
346
+ args << :size_t << value.bytesize
256
347
  end
257
348
  end
258
349
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rdkafka
4
- VERSION = "0.16.0.beta1"
4
+ VERSION = "0.16.0"
5
5
  LIBRDKAFKA_VERSION = "2.3.0"
6
6
  LIBRDKAFKA_SOURCE_SHA256 = "2d49c35c77eeb3d42fa61c43757fcbb6a206daa560247154e60642bcdcc14d12"
7
7
  end
data/lib/rdkafka.rb CHANGED
@@ -24,7 +24,13 @@ require "rdkafka/admin/delete_acl_handle"
24
24
  require "rdkafka/admin/delete_acl_report"
25
25
  require "rdkafka/admin/describe_acl_handle"
26
26
  require "rdkafka/admin/describe_acl_report"
27
+ require "rdkafka/admin/describe_configs_handle"
28
+ require "rdkafka/admin/describe_configs_report"
29
+ require "rdkafka/admin/incremental_alter_configs_handle"
30
+ require "rdkafka/admin/incremental_alter_configs_report"
27
31
  require "rdkafka/admin/acl_binding_result"
32
+ require "rdkafka/admin/config_binding_result"
33
+ require "rdkafka/admin/config_resource_binding_result"
28
34
  require "rdkafka/bindings"
29
35
  require "rdkafka/callbacks"
30
36
  require "rdkafka/config"
data/rdkafka.gemspec CHANGED
@@ -15,7 +15,7 @@ Gem::Specification.new do |gem|
15
15
  gem.name = 'rdkafka'
16
16
  gem.require_paths = ['lib']
17
17
  gem.version = Rdkafka::VERSION
18
- gem.required_ruby_version = '>= 2.7'
18
+ gem.required_ruby_version = '>= 3.0'
19
19
  gem.extensions = %w(ext/Rakefile)
20
20
  gem.cert_chain = %w[certs/cert_chain.pem]
21
21